From 9991f09f9bd350ae29904c80fe7181f8334a33da Mon Sep 17 00:00:00 2001 From: baocheny Date: Sat, 14 Sep 2024 11:12:40 +0800 Subject: [PATCH 01/30] upgrade diffusers --- optimum/habana/diffusers/__init__.py | 1 + .../diffusers/pipelines/flux/pipeline_flux.py | 434 ++++++++++++++++++ setup.py | 2 +- 3 files changed, 436 insertions(+), 1 deletion(-) create mode 100644 optimum/habana/diffusers/pipelines/flux/pipeline_flux.py diff --git a/optimum/habana/diffusers/__init__.py b/optimum/habana/diffusers/__init__.py index d3ec347d07..4ca2933e91 100644 --- a/optimum/habana/diffusers/__init__.py +++ b/optimum/habana/diffusers/__init__.py @@ -1,6 +1,7 @@ from .pipelines.auto_pipeline import AutoPipelineForInpainting, AutoPipelineForText2Image from .pipelines.controlnet.pipeline_controlnet import GaudiStableDiffusionControlNetPipeline from .pipelines.ddpm.pipeline_ddpm import GaudiDDPMPipeline +from .pipelines.flux.pipeline_flux import GaudiFluxPipeline from .pipelines.pipeline_utils import GaudiDiffusionPipeline from .pipelines.stable_diffusion.pipeline_stable_diffusion import GaudiStableDiffusionPipeline from .pipelines.stable_diffusion.pipeline_stable_diffusion_depth2img import GaudiStableDiffusionDepth2ImgPipeline diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py new file mode 100644 index 0000000000..7c620c222b --- /dev/null +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -0,0 +1,434 @@ +# Copyright 2024 Black Forest Labs and The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import math +from dataclasses import dataclass +from typing import Any, Callable, Dict, List, Optional, Union + +import numpy as np +import PIL.Image + +import torch + +from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast + +from diffusers.utils import BaseOutput, replace_example_docstring +from diffusers.schedulers import FlowMatchEulerDiscreteScheduler +from diffusers.models.autoencoders import AutoencoderKL +from diffusers.models.transformers import FluxTransformer2DModel +from diffusers.pipelines.flux.pipeline_flux import FluxPipeline, calculate_shift, retrieve_timesteps + +from optimum.utils import logging + +from ....transformers.gaudi_configuration import GaudiConfig +from ....utils import HabanaProfile, speed_metrics, warmup_inference_steps_time_adjustment +from ..pipeline_utils import GaudiDiffusionPipeline + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class GaudiFluxPipelineOutput(BaseOutput): + """ + Output class for Stable Diffusion pipelines. + + Args: + images (`List[PIL.Image.Image]` or `np.ndarray`) + List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width, + num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline. + """ + + images: Union[List[PIL.Image.Image], np.ndarray] + + +EXAMPLE_DOC_STRING = """ + Examples: + ```py + >>> import torch + >>> from optimum.habana.diffusers import GaudiFluxPipeline + + >>> pipe = GaudiFluxPipeline.from_pretrained( + ... "black-forest-labs/FLUX.1-schnell", + ... torch_dtype=torch.bfloat16, + ... use_habana=True, + ... use_hpu_graphs=True, + ... gaudi_config="Habana/stable-diffusion", + ... ) + >>> prompt = "A cat holding a sign that says hello world" + >>> # Depending on the variant being used, the pipeline call will slightly vary. + >>> # Refer to the pipeline documentation for more details. + >>> image = pipe(prompt, num_inference_steps=4, guidance_scale=0.0).images[0] + >>> image.save("flux.png") + ``` +""" + + +class GaudiFluxPipeline(GaudiDiffusionPipeline, FluxPipeline): + r""" + Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/flux/pipeline_flux.py#L140 + + The Flux pipeline for text-to-image generation. + + Reference: https://blackforestlabs.ai/announcing-black-forest-labs/ + + Args: + transformer ([`FluxTransformer2DModel`]): + Conditional Transformer (MMDiT) architecture to denoise the encoded image latents. + scheduler ([`FlowMatchEulerDiscreteScheduler`]): + A scheduler to be used in combination with `transformer` to denoise the encoded image latents. + vae ([`AutoencoderKL`]): + Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations. + text_encoder ([`CLIPTextModel`]): + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically + the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant. + text_encoder_2 ([`T5EncoderModel`]): + [T5](https://huggingface.co/docs/transformers/en/model_doc/t5#transformers.T5EncoderModel), specifically + the [google/t5-v1_1-xxl](https://huggingface.co/google/t5-v1_1-xxl) variant. + tokenizer (`CLIPTokenizer`): + Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/en/model_doc/clip#transformers.CLIPTokenizer). + tokenizer_2 (`T5TokenizerFast`): + Second Tokenizer of class + [T5TokenizerFast](https://huggingface.co/docs/transformers/en/model_doc/t5#transformers.T5TokenizerFast). + """ + + model_cpu_offload_seq = "text_encoder->text_encoder_2->transformer->vae" + _optional_components = [] + _callback_tensor_inputs = ["latents", "prompt_embeds"] + + def __init__( + self, + scheduler: FlowMatchEulerDiscreteScheduler, + vae: AutoencoderKL, + text_encoder: CLIPTextModel, + tokenizer: CLIPTokenizer, + text_encoder_2: T5EncoderModel, + tokenizer_2: T5TokenizerFast, + transformer: FluxTransformer2DModel, + use_habana: bool = False, + use_hpu_graphs: bool = False, + gaudi_config: Union[str, GaudiConfig] = None, + bf16_full_eval: bool = False, + ): + GaudiDiffusionPipeline.__init__( + self, + use_habana, + use_hpu_graphs, + gaudi_config, + bf16_full_eval, + ) + FluxPipeline.__init__( + self, + scheduler=scheduler, + vae=vae, + text_encoder=text_encoder, + tokenizer=tokenizer, + text_encoder_2=text_encoder_2, + tokenizer_2=tokenizer_2, + transformer=transformer, + ) + self.to(self._device) + + @torch.no_grad() + @replace_example_docstring(EXAMPLE_DOC_STRING) + def __call__( + self, + prompt: Union[str, List[str]] = None, + prompt_2: Optional[Union[str, List[str]]] = None, + height: Optional[int] = None, + width: Optional[int] = None, + num_inference_steps: int = 28, + timesteps: List[int] = None, + guidance_scale: float = 3.5, + num_images_per_prompt: Optional[int] = 1, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + joint_attention_kwargs: Optional[Dict[str, Any]] = None, + callback_on_step_end: Optional[Callable[[int, int, Dict], None]] = None, + callback_on_step_end_tensor_inputs: List[str] = ["latents"], + max_sequence_length: int = 512, + profiling_warmup_steps: Optional[int] = 0, + profiling_steps: Optional[int] = 0, + **kwargs + ): + r""" + Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/flux/pipeline_flux.py#L531 + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str` or `List[str]`, *optional*): + The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`. + instead. + prompt_2 (`str` or `List[str]`, *optional*): + The prompt or prompts to be sent to `tokenizer_2` and `text_encoder_2`. If not defined, `prompt` is + will be used instead + height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The height in pixels of the generated image. This is set to 1024 by default for the best results. + width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The width in pixels of the generated image. This is set to 1024 by default for the best results. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + timesteps (`List[int]`, *optional*): + Custom timesteps to use for the denoising process with schedulers which support a `timesteps` argument + in their `set_timesteps` method. If not defined, the default behavior when `num_inference_steps` is + passed will be used. Must be in descending order. + guidance_scale (`float`, *optional*, defaults to 7.0): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. + If not provided, pooled text embeddings will be generated from `prompt` input argument. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.flux.FluxPipelineOutput`] instead of a plain tuple. + joint_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + callback_on_step_end (`Callable`, *optional*): + A function that calls at the end of each denoising steps during the inference. The function is called + with the following arguments: `callback_on_step_end(self: DiffusionPipeline, step: int, timestep: int, + callback_kwargs: Dict)`. `callback_kwargs` will include a list of all tensors as specified by + `callback_on_step_end_tensor_inputs`. + callback_on_step_end_tensor_inputs (`List`, *optional*): + The list of tensor inputs for the `callback_on_step_end` function. The tensors specified in the list + will be passed as `callback_kwargs` argument. You will only be able to include variables listed in the + `._callback_tensor_inputs` attribute of your pipeline class. + max_sequence_length (`int` defaults to 512): Maximum sequence length to use with the `prompt`. + profiling_warmup_steps (`int`, *optional*): + Number of steps to ignore for profling. + profiling_steps (`int`, *optional*): + Number of steps to be captured when enabling profiling. + + Examples: + + Returns: + [`~pipelines.flux.FluxPipelineOutput`] or `tuple`: [`~pipelines.flux.FluxPipelineOutput`] if `return_dict` + is True, otherwise a `tuple`. When returning a tuple, the first element is a list with the generated + images. + """ + + import habana_frameworks.torch.core as htcore + + height = height or self.default_sample_size * self.vae_scale_factor + width = width or self.default_sample_size * self.vae_scale_factor + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + prompt, + prompt_2, + height, + width, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + callback_on_step_end_tensor_inputs=callback_on_step_end_tensor_inputs, + max_sequence_length=max_sequence_length, + ) + + self._guidance_scale = guidance_scale + self._joint_attention_kwargs = joint_attention_kwargs + self._interrupt = False + + # 2. Define call parameters + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + device = self._execution_device + + ( + prompt_embeds, + pooled_prompt_embeds, + text_ids, + ) = self.encode_prompt( + prompt=prompt, + prompt_2=prompt_2, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + device=device, + num_images_per_prompt=num_images_per_prompt, + max_sequence_length=max_sequence_length, + ) + + # 4. Prepare latent variables + num_channels_latents = self.transformer.config.in_channels // 4 + latents, latent_image_ids = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + height, + width, + prompt_embeds.dtype, + device, + generator, + latents, + ) + + # 5. Prepare timesteps + sigmas = np.linspace(1.0, 1 / num_inference_steps, num_inference_steps) + image_seq_len = latents.shape[1] + mu = calculate_shift( + image_seq_len, + self.scheduler.config.base_image_seq_len, + self.scheduler.config.max_image_seq_len, + self.scheduler.config.base_shift, + self.scheduler.config.max_shift, + ) + timesteps, num_inference_steps = retrieve_timesteps( + self.scheduler, + num_inference_steps, + device, + timesteps, + sigmas, + mu=mu, + ) + num_warmup_steps = max(len(timesteps) - num_inference_steps * self.scheduler.order, 0) + self._num_timesteps = len(timesteps) + + # handle guidance + if self.transformer.config.guidance_embeds: + guidance = torch.full([1], guidance_scale, device=device, dtype=torch.float32) + guidance = guidance.expand(latents.shape[0]) + else: + guidance = None + + # 5-1. Define call parameters + if prompt is not None and isinstance(prompt, str): + num_prompts = 1 + elif prompt is not None and isinstance(prompt, list): + num_prompts = len(prompt) + else: + num_prompts = prompt_embeds.shape[0] + num_batches = math.ceil((num_images_per_prompt * num_prompts) / batch_size) + logger.info( + f"{num_prompts} prompt(s) received, {num_images_per_prompt} generation(s) per prompt," + f" {batch_size} sample(s) per batch, {num_batches} total batch(es)." + ) + if num_batches < 3: + logger.warning("The first two iterations are slower so it is recommended to feed more batches.") + + throughput_warmup_steps = kwargs.get("throughput_warmup_steps", 3) + + t0 = time.time() + t1 = t0 + + hb_profiler = HabanaProfile( + warmup=profiling_warmup_steps, + active=profiling_steps, + record_shapes=False, + ) + hb_profiler.start() + + # 6. Denoising loop + with self.progress_bar(total=num_inference_steps) as progress_bar: + for i, t in enumerate(timesteps): + # because compilation occurs in the first two iterations + if i == throughput_warmup_steps: + t1 = time.time() + if self.interrupt: + continue + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timestep = t.expand(latents.shape[0]).to(latents.dtype) + + noise_pred = self.transformer( + hidden_states=latents, + timestep=timestep / 1000, + guidance=guidance, + pooled_projections=pooled_prompt_embeds, + encoder_hidden_states=prompt_embeds, + txt_ids=text_ids, + img_ids=latent_image_ids, + joint_attention_kwargs=self.joint_attention_kwargs, + return_dict=False, + )[0] + + # compute the previous noisy sample x_t -> x_t-1 + latents_dtype = latents.dtype + latents = self.scheduler.step(noise_pred, t, latents, return_dict=False)[0] + + if latents.dtype != latents_dtype: + if torch.backends.mps.is_available(): + # some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272 + latents = latents.to(latents_dtype) + + if callback_on_step_end is not None: + callback_kwargs = {} + for k in callback_on_step_end_tensor_inputs: + callback_kwargs[k] = locals()[k] + callback_outputs = callback_on_step_end(self, i, t, callback_kwargs) + + latents = callback_outputs.pop("latents", latents) + prompt_embeds = callback_outputs.pop("prompt_embeds", prompt_embeds) + + # call the callback, if provided + if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): + progress_bar.update() + + hb_profiler.step() + htcore.mark_step(sync=True) + + hb_profiler.stop() + t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) + speed_metrics_prefix = "generation" + speed_measures = speed_metrics( + split=speed_metrics_prefix, + start_time=t0, + num_samples=num_batches * batch_size, + num_steps=num_batches * batch_size * num_inference_steps, + start_time_after_warmup=t1, + ) + logger.info(f"Speed metrics: {speed_measures}") + + if output_type == "latent": + image = latents + + else: + latents = self._unpack_latents(latents, height, width, self.vae_scale_factor) + latents = (latents / self.vae.config.scaling_factor) + self.vae.config.shift_factor + image = self.vae.decode(latents, return_dict=False)[0] + image = self.image_processor.postprocess(image, output_type=output_type) + + # Offload all models + self.maybe_free_model_hooks() + + if not return_dict: + return (image,) + + return GaudiFluxPipelineOutput(images=image) diff --git a/setup.py b/setup.py index cea680353e..c77a241717 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ "optimum", "torch", "accelerate >= 0.33.0, < 0.34.0", - "diffusers == 0.29.2", + "diffusers >= 0.30.2", "huggingface_hub >= 0.23.2", "sentence-transformers[train] == 3.0.1", ] From 9bbcc1b7c649550c83149ae58f79dbe1a6d76099 Mon Sep 17 00:00:00 2001 From: baocheny Date: Wed, 18 Sep 2024 10:06:45 +0800 Subject: [PATCH 02/30] replace schduler --- .../stable-diffusion/run_flux_pipeline.py | 32 +++++++++++++++++++ .../diffusers/pipelines/flux/pipeline_flux.py | 10 ++---- .../habana/diffusers/schedulers/__init__.py | 1 + .../scheduling_flow_mactch_euler_discrete.py | 25 +++++++++++++++ 4 files changed, 60 insertions(+), 8 deletions(-) create mode 100644 examples/stable-diffusion/run_flux_pipeline.py create mode 100644 optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py diff --git a/examples/stable-diffusion/run_flux_pipeline.py b/examples/stable-diffusion/run_flux_pipeline.py new file mode 100644 index 0000000000..e41ba633a9 --- /dev/null +++ b/examples/stable-diffusion/run_flux_pipeline.py @@ -0,0 +1,32 @@ +import argparse +import torch +from optimum.habana.diffusers import GaudiFluxPipeline + + +parser = argparse.ArgumentParser() +parser.add_argument("--warmup", type=int, default=3, help="warmup iterations") +parser.add_argument("--iterations", type=int, default=3, help="warmup iterations") +parser.add_argument("--use_hpu_graph", action='store_true', help="use hpu graph") +args = parser.parse_args() + + +pipe = GaudiFluxPipeline.from_pretrained( + "black-forest-labs/FLUX.1-schnell", + torch_dtype=torch.bfloat16, + use_habana=True, + use_hpu_graphs=args.use_hpu_graph, + gaudi_config="Habana/stable-diffusion", +) + +if args.use_hpu_graph: + from habana_frameworks.torch.hpu import wrap_in_hpu_graph + pipe.transformer = wrap_in_hpu_graph(pipe.transformer) +prompt = "A cat in a bin, and holding a sign '/bin/cat' " +# Depending on the variant being used, the pipeline call will slightly vary. +# Refer to the pipeline documentation for more details. +print("warmuping...") +for i in range(args.warmup): + image = pipe(prompt, num_inference_steps=28, guidance_scale=0.0).images[0] +torch.hpu.synchronize() +image = pipe(prompt, num_inference_steps=28, guidance_scale=0.0, profiling_warmup_steps=3, profiling_steps=3).images[0] +image.save("flux.png") diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 7c620c222b..0b5cfc2db0 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -25,7 +25,6 @@ from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast from diffusers.utils import BaseOutput, replace_example_docstring -from diffusers.schedulers import FlowMatchEulerDiscreteScheduler from diffusers.models.autoencoders import AutoencoderKL from diffusers.models.transformers import FluxTransformer2DModel from diffusers.pipelines.flux.pipeline_flux import FluxPipeline, calculate_shift, retrieve_timesteps @@ -35,6 +34,7 @@ from ....transformers.gaudi_configuration import GaudiConfig from ....utils import HabanaProfile, speed_metrics, warmup_inference_steps_time_adjustment from ..pipeline_utils import GaudiDiffusionPipeline +from ...schedulers import GaudiFlowMatchEulerDiscreteScheduler logger = logging.get_logger(__name__) # pylint: disable=invalid-name @@ -110,7 +110,7 @@ class GaudiFluxPipeline(GaudiDiffusionPipeline, FluxPipeline): def __init__( self, - scheduler: FlowMatchEulerDiscreteScheduler, + scheduler: GaudiFlowMatchEulerDiscreteScheduler, vae: AutoencoderKL, text_encoder: CLIPTextModel, tokenizer: CLIPTokenizer, @@ -380,14 +380,8 @@ def __call__( )[0] # compute the previous noisy sample x_t -> x_t-1 - latents_dtype = latents.dtype latents = self.scheduler.step(noise_pred, t, latents, return_dict=False)[0] - if latents.dtype != latents_dtype: - if torch.backends.mps.is_available(): - # some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272 - latents = latents.to(latents_dtype) - if callback_on_step_end is not None: callback_kwargs = {} for k in callback_on_step_end_tensor_inputs: diff --git a/optimum/habana/diffusers/schedulers/__init__.py b/optimum/habana/diffusers/schedulers/__init__.py index 37eb80b1a6..48bf0bd8e9 100644 --- a/optimum/habana/diffusers/schedulers/__init__.py +++ b/optimum/habana/diffusers/schedulers/__init__.py @@ -1,3 +1,4 @@ from .scheduling_ddim import GaudiDDIMScheduler from .scheduling_euler_ancestral_discrete import GaudiEulerAncestralDiscreteScheduler from .scheduling_euler_discrete import GaudiEulerDiscreteScheduler +from .scheduling_flow_mactch_euler_discrete import GaudiFlowMatchEulerDiscreteScheduler diff --git a/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py b/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py new file mode 100644 index 0000000000..ccc597fa07 --- /dev/null +++ b/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py @@ -0,0 +1,25 @@ +from diffusers.schedulers import FlowMatchEulerDiscreteScheduler + + +class GaudiFlowMatchEulerDiscreteScheduler(FlowMatchEulerDiscreteScheduler): + # TODO: overwrite orginal func with following one to fix dyn error in gaudi lazy mode + def index_for_timestep(self, timestep, schedule_timesteps=None): + if schedule_timesteps is None: + schedule_timesteps = self.timesteps + + # indices = (schedule_timesteps == timestep).nonzero() + + # The sigma index that is taken for the **very** first `step` + # is always the second index (or the last index if there is only 1) + # This way we can ensure we don't accidentally skip a sigma in + # case we start in the middle of the denoising schedule (e.g. for image-to-image) + # pos = 1 if len(indices) > 1 else 0 + + # return indices[pos].item() + + masked = (schedule_timesteps == timestep) + tmp = masked.cumsum(dim=0) + pos = (tmp == 0).sum().item() + if masked.sum() > 1: + pos += (tmp == 1).sum().item() + return pos From cb2aaf0f896dc400ed66b2f8ec125e05292022aa Mon Sep 17 00:00:00 2001 From: baocheny Date: Wed, 18 Sep 2024 16:36:02 +0800 Subject: [PATCH 03/30] update wkld entrypoint --- examples/stable-diffusion/README.md | 55 +++++++++++++++++-- .../text_to_image_generation.py | 34 ++++++++++-- optimum/habana/diffusers/__init__.py | 2 +- 3 files changed, 80 insertions(+), 11 deletions(-) diff --git a/examples/stable-diffusion/README.md b/examples/stable-diffusion/README.md index 38ca7ae9d7..f3040b653d 100644 --- a/examples/stable-diffusion/README.md +++ b/examples/stable-diffusion/README.md @@ -20,12 +20,12 @@ This directory contains a script that showcases how to perform text-to-image gen Stable Diffusion was proposed in [Stable Diffusion Announcement](https://stability.ai/blog/stable-diffusion-announcement) by Patrick Esser and Robin Rombach and the Stability AI team. - ## Text-to-image Generation ### Single Prompt Here is how to generate images with one prompt: + ```bash python text_to_image_generation.py \ --model_name_or_path CompVis/stable-diffusion-v1-4 \ @@ -43,10 +43,10 @@ python text_to_image_generation.py \ > The first batch of images entails a performance penalty. All subsequent batches will be generated much faster. > You can enable this mode with `--use_hpu_graphs`. - ### Multiple Prompts Here is how to generate images with several prompts: + ```bash python text_to_image_generation.py \ --model_name_or_path CompVis/stable-diffusion-v1-4 \ @@ -61,7 +61,9 @@ python text_to_image_generation.py \ ``` ### Distributed inference with multiple HPUs + Here is how to generate images with two prompts on two HPUs: + ```bash python ../gaudi_spawn.py \ --world_size 2 text_to_image_generation.py \ @@ -101,10 +103,10 @@ python text_to_image_generation.py \ ``` > There are two different checkpoints for Stable Diffusion 2: +> > - use [stabilityai/stable-diffusion-2-1](https://huggingface.co/stabilityai/stable-diffusion-2-1) for generating 768x768 images > - use [stabilityai/stable-diffusion-2-1-base](https://huggingface.co/stabilityai/stable-diffusion-2-1-base) for generating 512x512 images - ### Latent Diffusion Model for 3D (LDM3D) [LDM3D](https://arxiv.org/abs/2305.10853) generates both image and depth map data from a given text prompt, allowing users to generate RGBD images from text prompts. @@ -127,7 +129,9 @@ python text_to_image_generation.py \ --ldm3d \ --bf16 ``` + Here is how to generate images and depth maps with two prompts on two HPUs: + ```bash python ../gaudi_spawn.py \ --world_size 2 text_to_image_generation.py \ @@ -146,6 +150,7 @@ python ../gaudi_spawn.py \ ``` > There are three different checkpoints for LDM3D: +> > - use [original checkpoint](https://huggingface.co/Intel/ldm3d) to generate outputs from the paper > - use [the latest checkpoint](https://huggingface.co/Intel/ldm3d-4c) for generating improved results > - use [the pano checkpoint](https://huggingface.co/Intel/ldm3d-pano) to generate panoramic view @@ -155,6 +160,7 @@ python ../gaudi_spawn.py \ Stable Diffusion XL was proposed in [SDXL: Improving Latent Diffusion Models for High-Resolution Image Synthesis](https://arxiv.org/pdf/2307.01952.pdf) by the Stability AI team. Here is how to generate SDXL images with a single prompt: + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/stable-diffusion-xl-base-1.0 \ @@ -174,6 +180,7 @@ python text_to_image_generation.py \ > You can enable this mode with `--use_hpu_graphs`. Here is how to generate SDXL images with several prompts: + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/stable-diffusion-xl-base-1.0 \ @@ -191,6 +198,7 @@ python text_to_image_generation.py \ SDXL combines a second text encoder (OpenCLIP ViT-bigG/14) with the original text encoder to significantly increase the number of parameters. Here is how to generate images with several prompts for both `prompt` and `prompt_2` (2nd text encoder), as well as their negative prompts: + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/stable-diffusion-xl-base-1.0 \ @@ -209,6 +217,7 @@ python text_to_image_generation.py \ ``` Here is how to generate SDXL images with two prompts on two HPUs: + ```bash python ../gaudi_spawn.py \ --world_size 2 text_to_image_generation.py \ @@ -227,14 +236,17 @@ python ../gaudi_spawn.py \ --bf16 \ --distributed ``` + > HPU graphs are recommended when generating images by batches to get the fastest possible generations. > The first batch of images entails a performance penalty. All subsequent batches will be generated much faster. > You can enable this mode with `--use_hpu_graphs`. ### SDXL-Turbo + SDXL-Turbo is a distilled version of SDXL 1.0, trained for real-time synthesis. Here is how to generate images with multiple prompts: + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/sdxl-turbo \ @@ -267,11 +279,13 @@ Before running SD3 pipeline, you need to: 1. Agree to the Terms and Conditions for using SD3 model at [HuggingFace model page](https://huggingface.co/stabilityai/stable-diffusion-3-medium) 2. Authenticate with HuggingFace using your HF Token. For authentication, run: + ```bash huggingface-cli login ``` Here is how to generate SD3 images with a single prompt: + ```bash PT_HPU_MAX_COMPOUND_OP_SIZE=1 \ python text_to_image_generation.py \ @@ -291,12 +305,32 @@ python text_to_image_generation.py \ > For improved performance of the SD3 pipeline on Gaudi, it is recommended to configure the environment > by setting PT_HPU_MAX_COMPOUND_OP_SIZE to 1. +### FLUX.1 + +FLUX.1 was was introduced by Black Forest Labs [here](https://blackforestlabs.ai/announcing-black-forest-labs/) + +```bash +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-schnell \ + --prompts "A cat holding a sign that says hello world" \ + --num_images_per_prompt 10 \ + --batch_size 1 \ + --num_inference_steps 28 \ + --image_save_dir /tmp/flux_1_images \ + --scheduler flow_match_euler_discrete\ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 +``` + ## ControlNet -ControlNet was introduced in [Adding Conditional Control to Text-to-Image Diffusion Models ](https://huggingface.co/papers/2302.05543) by Lvmin Zhang and Maneesh Agrawala. +ControlNet was introduced in [Adding Conditional Control to Text-to-Image Diffusion Models](https://huggingface.co/papers/2302.05543) by Lvmin Zhang and Maneesh Agrawala. It is a type of model for controlling StableDiffusion by conditioning the model with an additional input image. Here is how to generate images conditioned by canny edge model: + ```bash pip install -r requirements.txt python text_to_image_generation.py \ @@ -314,6 +348,7 @@ python text_to_image_generation.py \ ``` Here is how to generate images conditioned by canny edge model and with multiple prompts: + ```bash pip install -r requirements.txt python text_to_image_generation.py \ @@ -331,6 +366,7 @@ python text_to_image_generation.py \ ``` Here is how to generate images conditioned by canny edge model and with two prompts on two HPUs: + ```bash pip install -r requirements.txt python ../gaudi_spawn.py \ @@ -350,6 +386,7 @@ python ../gaudi_spawn.py \ ``` Here is how to generate images conditioned by open pose model: + ```bash pip install -r requirements.txt python text_to_image_generation.py \ @@ -368,6 +405,7 @@ python text_to_image_generation.py \ ``` Here is how to generate images with conditioned by canny edge model using Stable Diffusion 2 + ```bash pip install -r requirements.txt python text_to_image_generation.py \ @@ -392,6 +430,7 @@ Inpainting replaces or edits specific areas of an image. For more details, please refer to [Hugging Face Diffusers doc](https://huggingface.co/docs/diffusers/en/using-diffusers/inpaint). ### Stable Diffusion Inpainting + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/stable-diffusion-2-inpainting \ @@ -409,6 +448,7 @@ python text_to_image_generation.py \ ``` ### Stable Diffusion XL Inpainting + ```bash python text_to_image_generation.py \ --model_name_or_path diffusers/stable-diffusion-xl-1.0-inpainting-0.1\ @@ -455,10 +495,10 @@ python image_to_image_generation.py \ > The first batch of images entails a performance penalty. All subsequent batches will be generated much faster. > You can enable this mode with `--use_hpu_graphs`. - ### Multiple Prompts Here is how to generate images with several prompts and one image. + ```bash pip install -r requirements.txt python image_to_image_generation.py \ @@ -481,10 +521,10 @@ python image_to_image_generation.py \ > The first batch of images entails a performance penalty. All subsequent batches will be generated much faster. > You can enable this mode with `--use_hpu_graphs`. - ### Stable Diffusion XL Refiner Here is how to generate SDXL images with a single prompt and one image: + ```bash pip install -r requirements.txt python image_to_image_generation.py \ @@ -505,6 +545,7 @@ python image_to_image_generation.py \ ### Stable Diffusion Image Variations Here is how to generate images with one image, it does not accept prompt input + ```bash pip install -r requirements.txt python image_to_image_generation.py \ @@ -565,6 +606,7 @@ Script `image_to_video_generation.py` showcases how to perform image-to-video ge ### Single Image Prompt Here is how to generate video with one image prompt: + ```bash PT_HPU_MAX_COMPOUND_OP_SIZE=1 \ python image_to_video_generation.py \ @@ -585,6 +627,7 @@ python image_to_video_generation.py \ ### Multiple Image Prompts Here is how to generate videos with several image prompts: + ```bash PT_HPU_MAX_COMPOUND_OP_SIZE=1 \ python image_to_video_generation.py \ diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 8caa659ca6..02168b080f 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -26,6 +26,7 @@ GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, GaudiEulerDiscreteScheduler, + GaudiFlowMatchEulerDiscreteScheduler ) from optimum.habana.utils import set_seed @@ -65,7 +66,7 @@ def main(): parser.add_argument( "--scheduler", default="ddim", - choices=["default", "euler_discrete", "euler_ancestral_discrete", "ddim"], + choices=["default", "euler_discrete", "euler_ancestral_discrete", "ddim", "flow_match_euler_discrete"], type=str, help="Name of scheduler", ) @@ -275,13 +276,16 @@ def main(): # Select stable diffuson pipeline based on input sdxl_models = ["stable-diffusion-xl", "sdxl"] sd3_models = ["stable-diffusion-3"] + flux_models = ["FLUX.1-dev", "FLUX.1-schnell"] sdxl = True if any(model in args.model_name_or_path for model in sdxl_models) else False sd3 = True if any(model in args.model_name_or_path for model in sd3_models) else False + flux = True if any(model in args.model_name_or_path for model in flux_models) else False controlnet = True if args.control_image is not None else False inpainting = True if (args.base_image is not None) and (args.mask_image is not None) else False # Set the scheduler kwargs = {"timestep_spacing": args.timestep_spacing} + if args.scheduler == "euler_discrete": scheduler = GaudiEulerDiscreteScheduler.from_pretrained( args.model_name_or_path, subfolder="scheduler", **kwargs @@ -292,6 +296,10 @@ def main(): ) elif args.scheduler == "ddim": scheduler = GaudiDDIMScheduler.from_pretrained(args.model_name_or_path, subfolder="scheduler", **kwargs) + elif args.scheduler == "flow_match_euler_discrete": + scheduler = GaudiFlowMatchEulerDiscreteScheduler.from_pretrained( + args.model_name_or_path, subfolder="scheduler", **kwargs + ) else: scheduler = None @@ -340,16 +348,18 @@ def main(): negative_prompts = negative_prompt kwargs_call["negative_prompt"] = negative_prompts - if sdxl or sd3: + if sdxl or sd3 or flux: prompts_2 = args.prompts_2 - negative_prompts_2 = args.negative_prompts_2 if args.distributed and args.prompts_2 is not None: with distributed_state.split_between_processes(args.prompts_2) as prompt_2: prompts_2 = prompt_2 + kwargs_call["prompt_2"] = prompts_2 + + if sdxl or sd3: + negative_prompts_2 = args.negative_prompts_2 if args.distributed and args.negative_prompts_2 is not None: with distributed_state.split_between_processes(args.negative_prompts_2) as negative_prompt_2: negative_prompts_2 = negative_prompt_2 - kwargs_call["prompt_2"] = prompts_2 kwargs_call["negative_prompt_2"] = negative_prompts_2 if sd3: @@ -428,6 +438,22 @@ def main(): args.model_name_or_path, **kwargs, ) + elif flux: + # SD3 pipelines + if controlnet: + # Import SD3+ControlNet pipeline + raise ValueError("SD3+ControlNet pipeline is not currenly supported") + elif inpainting: + # Import SD3 Inpainting pipeline + raise ValueError("SD3 Inpainting pipeline is not currenly supported") + else: + # Import SD3 pipeline + from optimum.habana.diffusers import GaudiFluxPipeline + + pipeline = GaudiFluxPipeline.from_pretrained( + args.model_name_or_path, + **kwargs, + ) else: # SD pipelines (SD1.x, SD2.x) diff --git a/optimum/habana/diffusers/__init__.py b/optimum/habana/diffusers/__init__.py index 4ca2933e91..3be8537f45 100644 --- a/optimum/habana/diffusers/__init__.py +++ b/optimum/habana/diffusers/__init__.py @@ -21,4 +21,4 @@ from .pipelines.stable_diffusion_xl.pipeline_stable_diffusion_xl_inpaint import GaudiStableDiffusionXLInpaintPipeline from .pipelines.stable_video_diffusion.pipeline_stable_video_diffusion import GaudiStableVideoDiffusionPipeline from .pipelines.text_to_video_synthesis.pipeline_text_to_video_synth import GaudiTextToVideoSDPipeline -from .schedulers import GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, GaudiEulerDiscreteScheduler +from .schedulers import GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, GaudiEulerDiscreteScheduler, GaudiFlowMatchEulerDiscreteScheduler From 4fcf181e77b663341ef4a256adebfae1066a05c0 Mon Sep 17 00:00:00 2001 From: baocheny Date: Wed, 18 Sep 2024 16:36:53 +0800 Subject: [PATCH 04/30] rem demo wkld entrypoint --- .../stable-diffusion/run_flux_pipeline.py | 32 ------------------- 1 file changed, 32 deletions(-) delete mode 100644 examples/stable-diffusion/run_flux_pipeline.py diff --git a/examples/stable-diffusion/run_flux_pipeline.py b/examples/stable-diffusion/run_flux_pipeline.py deleted file mode 100644 index e41ba633a9..0000000000 --- a/examples/stable-diffusion/run_flux_pipeline.py +++ /dev/null @@ -1,32 +0,0 @@ -import argparse -import torch -from optimum.habana.diffusers import GaudiFluxPipeline - - -parser = argparse.ArgumentParser() -parser.add_argument("--warmup", type=int, default=3, help="warmup iterations") -parser.add_argument("--iterations", type=int, default=3, help="warmup iterations") -parser.add_argument("--use_hpu_graph", action='store_true', help="use hpu graph") -args = parser.parse_args() - - -pipe = GaudiFluxPipeline.from_pretrained( - "black-forest-labs/FLUX.1-schnell", - torch_dtype=torch.bfloat16, - use_habana=True, - use_hpu_graphs=args.use_hpu_graph, - gaudi_config="Habana/stable-diffusion", -) - -if args.use_hpu_graph: - from habana_frameworks.torch.hpu import wrap_in_hpu_graph - pipe.transformer = wrap_in_hpu_graph(pipe.transformer) -prompt = "A cat in a bin, and holding a sign '/bin/cat' " -# Depending on the variant being used, the pipeline call will slightly vary. -# Refer to the pipeline documentation for more details. -print("warmuping...") -for i in range(args.warmup): - image = pipe(prompt, num_inference_steps=28, guidance_scale=0.0).images[0] -torch.hpu.synchronize() -image = pipe(prompt, num_inference_steps=28, guidance_scale=0.0, profiling_warmup_steps=3, profiling_steps=3).images[0] -image.save("flux.png") From 8e0a02f3670735e4385fa6a33cec12c3dec6992b Mon Sep 17 00:00:00 2001 From: baocheny Date: Mon, 23 Sep 2024 14:03:05 +0800 Subject: [PATCH 05/30] add warp in hpu graph --- .../stable-diffusion/text_to_image_generation.py | 12 ++++++------ optimum/habana/diffusers/pipelines/auto_pipeline.py | 4 ++++ .../habana/diffusers/pipelines/flux/pipeline_flux.py | 3 +++ optimum/habana/diffusers/pipelines/pipeline_utils.py | 1 + 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 02168b080f..8425389b4b 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -439,15 +439,15 @@ def main(): **kwargs, ) elif flux: - # SD3 pipelines + # Flux pipelines if controlnet: - # Import SD3+ControlNet pipeline - raise ValueError("SD3+ControlNet pipeline is not currenly supported") + # Import Flux+ControlNet pipeline + raise ValueError("Flux+ControlNet pipeline is not currenly supported") elif inpainting: - # Import SD3 Inpainting pipeline - raise ValueError("SD3 Inpainting pipeline is not currenly supported") + # Import FLux Inpainting pipeline + raise ValueError("Flux Inpainting pipeline is not currenly supported") else: - # Import SD3 pipeline + # Import Flux pipeline from optimum.habana.diffusers import GaudiFluxPipeline pipeline = GaudiFluxPipeline.from_pretrained( diff --git a/optimum/habana/diffusers/pipelines/auto_pipeline.py b/optimum/habana/diffusers/pipelines/auto_pipeline.py index 77171c9502..a7fb5431ca 100644 --- a/optimum/habana/diffusers/pipelines/auto_pipeline.py +++ b/optimum/habana/diffusers/pipelines/auto_pipeline.py @@ -33,6 +33,8 @@ from .stable_diffusion.pipeline_stable_diffusion_inpaint import GaudiStableDiffusionInpaintPipeline from .stable_diffusion_xl.pipeline_stable_diffusion_xl import GaudiStableDiffusionXLPipeline from .stable_diffusion_xl.pipeline_stable_diffusion_xl_inpaint import GaudiStableDiffusionXLInpaintPipeline +from .stable_diffusion_3.pipeline_stable_diffusion_3 import GaudiStableDiffusion3Pipeline +from .flux.pipeline_flux import GaudiFluxPipeline GAUDI_PREFIX_NAME = "Gaudi" @@ -42,6 +44,8 @@ ("stable-diffusion", GaudiStableDiffusionPipeline), ("stable-diffusion-xl", GaudiStableDiffusionXLPipeline), ("stable-diffusion-controlnet", GaudiStableDiffusionControlNetPipeline), + ("stable-diffusion-3", GaudiStableDiffusion3Pipeline), + ("flux", GaudiFluxPipeline), ] ) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 0b5cfc2db0..e2f432c6ac 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -140,6 +140,9 @@ def __init__( transformer=transformer, ) self.to(self._device) + if use_hpu_graphs: + from habana_frameworks.torch.hpu import wrap_in_hpu_graph + transformer = wrap_in_hpu_graph(transformer) @torch.no_grad() @replace_example_docstring(EXAMPLE_DOC_STRING) diff --git a/optimum/habana/diffusers/pipelines/pipeline_utils.py b/optimum/habana/diffusers/pipelines/pipeline_utils.py index 7f36b90ae4..6e659edff4 100644 --- a/optimum/habana/diffusers/pipelines/pipeline_utils.py +++ b/optimum/habana/diffusers/pipelines/pipeline_utils.py @@ -55,6 +55,7 @@ "optimum.habana.diffusers.schedulers": { "GaudiDDIMScheduler": ["save_pretrained", "from_pretrained"], "GaudiEulerDiscreteScheduler": ["save_pretrained", "from_pretrained"], + "GaudiFlowMatchEulerDiscreteScheduler": ["save_pretrained", "from_pretrained"], "GaudiEulerAncestralDiscreteScheduler": ["save_pretrained", "from_pretrained"], }, } From 154101e4988433b9919aa34add0617c36a30d2ba Mon Sep 17 00:00:00 2001 From: baocheny Date: Sat, 14 Sep 2024 11:12:40 +0800 Subject: [PATCH 06/30] upgrade diffusers --- optimum/habana/diffusers/__init__.py | 1 + .../diffusers/pipelines/flux/pipeline_flux.py | 434 ++++++++++++++++++ setup.py | 2 +- 3 files changed, 436 insertions(+), 1 deletion(-) create mode 100644 optimum/habana/diffusers/pipelines/flux/pipeline_flux.py diff --git a/optimum/habana/diffusers/__init__.py b/optimum/habana/diffusers/__init__.py index d3ec347d07..4ca2933e91 100644 --- a/optimum/habana/diffusers/__init__.py +++ b/optimum/habana/diffusers/__init__.py @@ -1,6 +1,7 @@ from .pipelines.auto_pipeline import AutoPipelineForInpainting, AutoPipelineForText2Image from .pipelines.controlnet.pipeline_controlnet import GaudiStableDiffusionControlNetPipeline from .pipelines.ddpm.pipeline_ddpm import GaudiDDPMPipeline +from .pipelines.flux.pipeline_flux import GaudiFluxPipeline from .pipelines.pipeline_utils import GaudiDiffusionPipeline from .pipelines.stable_diffusion.pipeline_stable_diffusion import GaudiStableDiffusionPipeline from .pipelines.stable_diffusion.pipeline_stable_diffusion_depth2img import GaudiStableDiffusionDepth2ImgPipeline diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py new file mode 100644 index 0000000000..7c620c222b --- /dev/null +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -0,0 +1,434 @@ +# Copyright 2024 Black Forest Labs and The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import math +from dataclasses import dataclass +from typing import Any, Callable, Dict, List, Optional, Union + +import numpy as np +import PIL.Image + +import torch + +from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast + +from diffusers.utils import BaseOutput, replace_example_docstring +from diffusers.schedulers import FlowMatchEulerDiscreteScheduler +from diffusers.models.autoencoders import AutoencoderKL +from diffusers.models.transformers import FluxTransformer2DModel +from diffusers.pipelines.flux.pipeline_flux import FluxPipeline, calculate_shift, retrieve_timesteps + +from optimum.utils import logging + +from ....transformers.gaudi_configuration import GaudiConfig +from ....utils import HabanaProfile, speed_metrics, warmup_inference_steps_time_adjustment +from ..pipeline_utils import GaudiDiffusionPipeline + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class GaudiFluxPipelineOutput(BaseOutput): + """ + Output class for Stable Diffusion pipelines. + + Args: + images (`List[PIL.Image.Image]` or `np.ndarray`) + List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width, + num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline. + """ + + images: Union[List[PIL.Image.Image], np.ndarray] + + +EXAMPLE_DOC_STRING = """ + Examples: + ```py + >>> import torch + >>> from optimum.habana.diffusers import GaudiFluxPipeline + + >>> pipe = GaudiFluxPipeline.from_pretrained( + ... "black-forest-labs/FLUX.1-schnell", + ... torch_dtype=torch.bfloat16, + ... use_habana=True, + ... use_hpu_graphs=True, + ... gaudi_config="Habana/stable-diffusion", + ... ) + >>> prompt = "A cat holding a sign that says hello world" + >>> # Depending on the variant being used, the pipeline call will slightly vary. + >>> # Refer to the pipeline documentation for more details. + >>> image = pipe(prompt, num_inference_steps=4, guidance_scale=0.0).images[0] + >>> image.save("flux.png") + ``` +""" + + +class GaudiFluxPipeline(GaudiDiffusionPipeline, FluxPipeline): + r""" + Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/flux/pipeline_flux.py#L140 + + The Flux pipeline for text-to-image generation. + + Reference: https://blackforestlabs.ai/announcing-black-forest-labs/ + + Args: + transformer ([`FluxTransformer2DModel`]): + Conditional Transformer (MMDiT) architecture to denoise the encoded image latents. + scheduler ([`FlowMatchEulerDiscreteScheduler`]): + A scheduler to be used in combination with `transformer` to denoise the encoded image latents. + vae ([`AutoencoderKL`]): + Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations. + text_encoder ([`CLIPTextModel`]): + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically + the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant. + text_encoder_2 ([`T5EncoderModel`]): + [T5](https://huggingface.co/docs/transformers/en/model_doc/t5#transformers.T5EncoderModel), specifically + the [google/t5-v1_1-xxl](https://huggingface.co/google/t5-v1_1-xxl) variant. + tokenizer (`CLIPTokenizer`): + Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/en/model_doc/clip#transformers.CLIPTokenizer). + tokenizer_2 (`T5TokenizerFast`): + Second Tokenizer of class + [T5TokenizerFast](https://huggingface.co/docs/transformers/en/model_doc/t5#transformers.T5TokenizerFast). + """ + + model_cpu_offload_seq = "text_encoder->text_encoder_2->transformer->vae" + _optional_components = [] + _callback_tensor_inputs = ["latents", "prompt_embeds"] + + def __init__( + self, + scheduler: FlowMatchEulerDiscreteScheduler, + vae: AutoencoderKL, + text_encoder: CLIPTextModel, + tokenizer: CLIPTokenizer, + text_encoder_2: T5EncoderModel, + tokenizer_2: T5TokenizerFast, + transformer: FluxTransformer2DModel, + use_habana: bool = False, + use_hpu_graphs: bool = False, + gaudi_config: Union[str, GaudiConfig] = None, + bf16_full_eval: bool = False, + ): + GaudiDiffusionPipeline.__init__( + self, + use_habana, + use_hpu_graphs, + gaudi_config, + bf16_full_eval, + ) + FluxPipeline.__init__( + self, + scheduler=scheduler, + vae=vae, + text_encoder=text_encoder, + tokenizer=tokenizer, + text_encoder_2=text_encoder_2, + tokenizer_2=tokenizer_2, + transformer=transformer, + ) + self.to(self._device) + + @torch.no_grad() + @replace_example_docstring(EXAMPLE_DOC_STRING) + def __call__( + self, + prompt: Union[str, List[str]] = None, + prompt_2: Optional[Union[str, List[str]]] = None, + height: Optional[int] = None, + width: Optional[int] = None, + num_inference_steps: int = 28, + timesteps: List[int] = None, + guidance_scale: float = 3.5, + num_images_per_prompt: Optional[int] = 1, + generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, + latents: Optional[torch.FloatTensor] = None, + prompt_embeds: Optional[torch.FloatTensor] = None, + pooled_prompt_embeds: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + joint_attention_kwargs: Optional[Dict[str, Any]] = None, + callback_on_step_end: Optional[Callable[[int, int, Dict], None]] = None, + callback_on_step_end_tensor_inputs: List[str] = ["latents"], + max_sequence_length: int = 512, + profiling_warmup_steps: Optional[int] = 0, + profiling_steps: Optional[int] = 0, + **kwargs + ): + r""" + Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/flux/pipeline_flux.py#L531 + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str` or `List[str]`, *optional*): + The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`. + instead. + prompt_2 (`str` or `List[str]`, *optional*): + The prompt or prompts to be sent to `tokenizer_2` and `text_encoder_2`. If not defined, `prompt` is + will be used instead + height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The height in pixels of the generated image. This is set to 1024 by default for the best results. + width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor): + The width in pixels of the generated image. This is set to 1024 by default for the best results. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + timesteps (`List[int]`, *optional*): + Custom timesteps to use for the denoising process with schedulers which support a `timesteps` argument + in their `set_timesteps` method. If not defined, the default behavior when `num_inference_steps` is + passed will be used. Must be in descending order. + guidance_scale (`float`, *optional*, defaults to 7.0): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + generator (`torch.Generator` or `List[torch.Generator]`, *optional*): + One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html) + to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not + provided, text embeddings will be generated from `prompt` input argument. + pooled_prompt_embeds (`torch.FloatTensor`, *optional*): + Pre-generated pooled text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. + If not provided, pooled text embeddings will be generated from `prompt` input argument. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.flux.FluxPipelineOutput`] instead of a plain tuple. + joint_attention_kwargs (`dict`, *optional*): + A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under + `self.processor` in + [diffusers.models.attention_processor](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/attention_processor.py). + callback_on_step_end (`Callable`, *optional*): + A function that calls at the end of each denoising steps during the inference. The function is called + with the following arguments: `callback_on_step_end(self: DiffusionPipeline, step: int, timestep: int, + callback_kwargs: Dict)`. `callback_kwargs` will include a list of all tensors as specified by + `callback_on_step_end_tensor_inputs`. + callback_on_step_end_tensor_inputs (`List`, *optional*): + The list of tensor inputs for the `callback_on_step_end` function. The tensors specified in the list + will be passed as `callback_kwargs` argument. You will only be able to include variables listed in the + `._callback_tensor_inputs` attribute of your pipeline class. + max_sequence_length (`int` defaults to 512): Maximum sequence length to use with the `prompt`. + profiling_warmup_steps (`int`, *optional*): + Number of steps to ignore for profling. + profiling_steps (`int`, *optional*): + Number of steps to be captured when enabling profiling. + + Examples: + + Returns: + [`~pipelines.flux.FluxPipelineOutput`] or `tuple`: [`~pipelines.flux.FluxPipelineOutput`] if `return_dict` + is True, otherwise a `tuple`. When returning a tuple, the first element is a list with the generated + images. + """ + + import habana_frameworks.torch.core as htcore + + height = height or self.default_sample_size * self.vae_scale_factor + width = width or self.default_sample_size * self.vae_scale_factor + + # 1. Check inputs. Raise error if not correct + self.check_inputs( + prompt, + prompt_2, + height, + width, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + callback_on_step_end_tensor_inputs=callback_on_step_end_tensor_inputs, + max_sequence_length=max_sequence_length, + ) + + self._guidance_scale = guidance_scale + self._joint_attention_kwargs = joint_attention_kwargs + self._interrupt = False + + # 2. Define call parameters + if prompt is not None and isinstance(prompt, str): + batch_size = 1 + elif prompt is not None and isinstance(prompt, list): + batch_size = len(prompt) + else: + batch_size = prompt_embeds.shape[0] + + device = self._execution_device + + ( + prompt_embeds, + pooled_prompt_embeds, + text_ids, + ) = self.encode_prompt( + prompt=prompt, + prompt_2=prompt_2, + prompt_embeds=prompt_embeds, + pooled_prompt_embeds=pooled_prompt_embeds, + device=device, + num_images_per_prompt=num_images_per_prompt, + max_sequence_length=max_sequence_length, + ) + + # 4. Prepare latent variables + num_channels_latents = self.transformer.config.in_channels // 4 + latents, latent_image_ids = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + height, + width, + prompt_embeds.dtype, + device, + generator, + latents, + ) + + # 5. Prepare timesteps + sigmas = np.linspace(1.0, 1 / num_inference_steps, num_inference_steps) + image_seq_len = latents.shape[1] + mu = calculate_shift( + image_seq_len, + self.scheduler.config.base_image_seq_len, + self.scheduler.config.max_image_seq_len, + self.scheduler.config.base_shift, + self.scheduler.config.max_shift, + ) + timesteps, num_inference_steps = retrieve_timesteps( + self.scheduler, + num_inference_steps, + device, + timesteps, + sigmas, + mu=mu, + ) + num_warmup_steps = max(len(timesteps) - num_inference_steps * self.scheduler.order, 0) + self._num_timesteps = len(timesteps) + + # handle guidance + if self.transformer.config.guidance_embeds: + guidance = torch.full([1], guidance_scale, device=device, dtype=torch.float32) + guidance = guidance.expand(latents.shape[0]) + else: + guidance = None + + # 5-1. Define call parameters + if prompt is not None and isinstance(prompt, str): + num_prompts = 1 + elif prompt is not None and isinstance(prompt, list): + num_prompts = len(prompt) + else: + num_prompts = prompt_embeds.shape[0] + num_batches = math.ceil((num_images_per_prompt * num_prompts) / batch_size) + logger.info( + f"{num_prompts} prompt(s) received, {num_images_per_prompt} generation(s) per prompt," + f" {batch_size} sample(s) per batch, {num_batches} total batch(es)." + ) + if num_batches < 3: + logger.warning("The first two iterations are slower so it is recommended to feed more batches.") + + throughput_warmup_steps = kwargs.get("throughput_warmup_steps", 3) + + t0 = time.time() + t1 = t0 + + hb_profiler = HabanaProfile( + warmup=profiling_warmup_steps, + active=profiling_steps, + record_shapes=False, + ) + hb_profiler.start() + + # 6. Denoising loop + with self.progress_bar(total=num_inference_steps) as progress_bar: + for i, t in enumerate(timesteps): + # because compilation occurs in the first two iterations + if i == throughput_warmup_steps: + t1 = time.time() + if self.interrupt: + continue + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timestep = t.expand(latents.shape[0]).to(latents.dtype) + + noise_pred = self.transformer( + hidden_states=latents, + timestep=timestep / 1000, + guidance=guidance, + pooled_projections=pooled_prompt_embeds, + encoder_hidden_states=prompt_embeds, + txt_ids=text_ids, + img_ids=latent_image_ids, + joint_attention_kwargs=self.joint_attention_kwargs, + return_dict=False, + )[0] + + # compute the previous noisy sample x_t -> x_t-1 + latents_dtype = latents.dtype + latents = self.scheduler.step(noise_pred, t, latents, return_dict=False)[0] + + if latents.dtype != latents_dtype: + if torch.backends.mps.is_available(): + # some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272 + latents = latents.to(latents_dtype) + + if callback_on_step_end is not None: + callback_kwargs = {} + for k in callback_on_step_end_tensor_inputs: + callback_kwargs[k] = locals()[k] + callback_outputs = callback_on_step_end(self, i, t, callback_kwargs) + + latents = callback_outputs.pop("latents", latents) + prompt_embeds = callback_outputs.pop("prompt_embeds", prompt_embeds) + + # call the callback, if provided + if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): + progress_bar.update() + + hb_profiler.step() + htcore.mark_step(sync=True) + + hb_profiler.stop() + t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) + speed_metrics_prefix = "generation" + speed_measures = speed_metrics( + split=speed_metrics_prefix, + start_time=t0, + num_samples=num_batches * batch_size, + num_steps=num_batches * batch_size * num_inference_steps, + start_time_after_warmup=t1, + ) + logger.info(f"Speed metrics: {speed_measures}") + + if output_type == "latent": + image = latents + + else: + latents = self._unpack_latents(latents, height, width, self.vae_scale_factor) + latents = (latents / self.vae.config.scaling_factor) + self.vae.config.shift_factor + image = self.vae.decode(latents, return_dict=False)[0] + image = self.image_processor.postprocess(image, output_type=output_type) + + # Offload all models + self.maybe_free_model_hooks() + + if not return_dict: + return (image,) + + return GaudiFluxPipelineOutput(images=image) diff --git a/setup.py b/setup.py index cea680353e..c77a241717 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ "optimum", "torch", "accelerate >= 0.33.0, < 0.34.0", - "diffusers == 0.29.2", + "diffusers >= 0.30.2", "huggingface_hub >= 0.23.2", "sentence-transformers[train] == 3.0.1", ] From 875926435e96aff23fd840c526b4928f20f06e76 Mon Sep 17 00:00:00 2001 From: baocheny Date: Wed, 18 Sep 2024 10:06:45 +0800 Subject: [PATCH 07/30] replace schduler --- .../stable-diffusion/run_flux_pipeline.py | 32 +++++++++++++++++++ .../diffusers/pipelines/flux/pipeline_flux.py | 10 ++---- .../habana/diffusers/schedulers/__init__.py | 1 + .../scheduling_flow_mactch_euler_discrete.py | 25 +++++++++++++++ 4 files changed, 60 insertions(+), 8 deletions(-) create mode 100644 examples/stable-diffusion/run_flux_pipeline.py create mode 100644 optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py diff --git a/examples/stable-diffusion/run_flux_pipeline.py b/examples/stable-diffusion/run_flux_pipeline.py new file mode 100644 index 0000000000..e41ba633a9 --- /dev/null +++ b/examples/stable-diffusion/run_flux_pipeline.py @@ -0,0 +1,32 @@ +import argparse +import torch +from optimum.habana.diffusers import GaudiFluxPipeline + + +parser = argparse.ArgumentParser() +parser.add_argument("--warmup", type=int, default=3, help="warmup iterations") +parser.add_argument("--iterations", type=int, default=3, help="warmup iterations") +parser.add_argument("--use_hpu_graph", action='store_true', help="use hpu graph") +args = parser.parse_args() + + +pipe = GaudiFluxPipeline.from_pretrained( + "black-forest-labs/FLUX.1-schnell", + torch_dtype=torch.bfloat16, + use_habana=True, + use_hpu_graphs=args.use_hpu_graph, + gaudi_config="Habana/stable-diffusion", +) + +if args.use_hpu_graph: + from habana_frameworks.torch.hpu import wrap_in_hpu_graph + pipe.transformer = wrap_in_hpu_graph(pipe.transformer) +prompt = "A cat in a bin, and holding a sign '/bin/cat' " +# Depending on the variant being used, the pipeline call will slightly vary. +# Refer to the pipeline documentation for more details. +print("warmuping...") +for i in range(args.warmup): + image = pipe(prompt, num_inference_steps=28, guidance_scale=0.0).images[0] +torch.hpu.synchronize() +image = pipe(prompt, num_inference_steps=28, guidance_scale=0.0, profiling_warmup_steps=3, profiling_steps=3).images[0] +image.save("flux.png") diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 7c620c222b..0b5cfc2db0 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -25,7 +25,6 @@ from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast from diffusers.utils import BaseOutput, replace_example_docstring -from diffusers.schedulers import FlowMatchEulerDiscreteScheduler from diffusers.models.autoencoders import AutoencoderKL from diffusers.models.transformers import FluxTransformer2DModel from diffusers.pipelines.flux.pipeline_flux import FluxPipeline, calculate_shift, retrieve_timesteps @@ -35,6 +34,7 @@ from ....transformers.gaudi_configuration import GaudiConfig from ....utils import HabanaProfile, speed_metrics, warmup_inference_steps_time_adjustment from ..pipeline_utils import GaudiDiffusionPipeline +from ...schedulers import GaudiFlowMatchEulerDiscreteScheduler logger = logging.get_logger(__name__) # pylint: disable=invalid-name @@ -110,7 +110,7 @@ class GaudiFluxPipeline(GaudiDiffusionPipeline, FluxPipeline): def __init__( self, - scheduler: FlowMatchEulerDiscreteScheduler, + scheduler: GaudiFlowMatchEulerDiscreteScheduler, vae: AutoencoderKL, text_encoder: CLIPTextModel, tokenizer: CLIPTokenizer, @@ -380,14 +380,8 @@ def __call__( )[0] # compute the previous noisy sample x_t -> x_t-1 - latents_dtype = latents.dtype latents = self.scheduler.step(noise_pred, t, latents, return_dict=False)[0] - if latents.dtype != latents_dtype: - if torch.backends.mps.is_available(): - # some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272 - latents = latents.to(latents_dtype) - if callback_on_step_end is not None: callback_kwargs = {} for k in callback_on_step_end_tensor_inputs: diff --git a/optimum/habana/diffusers/schedulers/__init__.py b/optimum/habana/diffusers/schedulers/__init__.py index 37eb80b1a6..48bf0bd8e9 100644 --- a/optimum/habana/diffusers/schedulers/__init__.py +++ b/optimum/habana/diffusers/schedulers/__init__.py @@ -1,3 +1,4 @@ from .scheduling_ddim import GaudiDDIMScheduler from .scheduling_euler_ancestral_discrete import GaudiEulerAncestralDiscreteScheduler from .scheduling_euler_discrete import GaudiEulerDiscreteScheduler +from .scheduling_flow_mactch_euler_discrete import GaudiFlowMatchEulerDiscreteScheduler diff --git a/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py b/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py new file mode 100644 index 0000000000..ccc597fa07 --- /dev/null +++ b/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py @@ -0,0 +1,25 @@ +from diffusers.schedulers import FlowMatchEulerDiscreteScheduler + + +class GaudiFlowMatchEulerDiscreteScheduler(FlowMatchEulerDiscreteScheduler): + # TODO: overwrite orginal func with following one to fix dyn error in gaudi lazy mode + def index_for_timestep(self, timestep, schedule_timesteps=None): + if schedule_timesteps is None: + schedule_timesteps = self.timesteps + + # indices = (schedule_timesteps == timestep).nonzero() + + # The sigma index that is taken for the **very** first `step` + # is always the second index (or the last index if there is only 1) + # This way we can ensure we don't accidentally skip a sigma in + # case we start in the middle of the denoising schedule (e.g. for image-to-image) + # pos = 1 if len(indices) > 1 else 0 + + # return indices[pos].item() + + masked = (schedule_timesteps == timestep) + tmp = masked.cumsum(dim=0) + pos = (tmp == 0).sum().item() + if masked.sum() > 1: + pos += (tmp == 1).sum().item() + return pos From 16848b3639270b8007bd1b701ba68faa01d18b36 Mon Sep 17 00:00:00 2001 From: baocheny Date: Wed, 18 Sep 2024 16:36:02 +0800 Subject: [PATCH 08/30] update wkld entrypoint --- examples/stable-diffusion/README.md | 55 +++++++++++++++++-- .../text_to_image_generation.py | 34 ++++++++++-- optimum/habana/diffusers/__init__.py | 2 +- 3 files changed, 80 insertions(+), 11 deletions(-) diff --git a/examples/stable-diffusion/README.md b/examples/stable-diffusion/README.md index 38ca7ae9d7..f3040b653d 100644 --- a/examples/stable-diffusion/README.md +++ b/examples/stable-diffusion/README.md @@ -20,12 +20,12 @@ This directory contains a script that showcases how to perform text-to-image gen Stable Diffusion was proposed in [Stable Diffusion Announcement](https://stability.ai/blog/stable-diffusion-announcement) by Patrick Esser and Robin Rombach and the Stability AI team. - ## Text-to-image Generation ### Single Prompt Here is how to generate images with one prompt: + ```bash python text_to_image_generation.py \ --model_name_or_path CompVis/stable-diffusion-v1-4 \ @@ -43,10 +43,10 @@ python text_to_image_generation.py \ > The first batch of images entails a performance penalty. All subsequent batches will be generated much faster. > You can enable this mode with `--use_hpu_graphs`. - ### Multiple Prompts Here is how to generate images with several prompts: + ```bash python text_to_image_generation.py \ --model_name_or_path CompVis/stable-diffusion-v1-4 \ @@ -61,7 +61,9 @@ python text_to_image_generation.py \ ``` ### Distributed inference with multiple HPUs + Here is how to generate images with two prompts on two HPUs: + ```bash python ../gaudi_spawn.py \ --world_size 2 text_to_image_generation.py \ @@ -101,10 +103,10 @@ python text_to_image_generation.py \ ``` > There are two different checkpoints for Stable Diffusion 2: +> > - use [stabilityai/stable-diffusion-2-1](https://huggingface.co/stabilityai/stable-diffusion-2-1) for generating 768x768 images > - use [stabilityai/stable-diffusion-2-1-base](https://huggingface.co/stabilityai/stable-diffusion-2-1-base) for generating 512x512 images - ### Latent Diffusion Model for 3D (LDM3D) [LDM3D](https://arxiv.org/abs/2305.10853) generates both image and depth map data from a given text prompt, allowing users to generate RGBD images from text prompts. @@ -127,7 +129,9 @@ python text_to_image_generation.py \ --ldm3d \ --bf16 ``` + Here is how to generate images and depth maps with two prompts on two HPUs: + ```bash python ../gaudi_spawn.py \ --world_size 2 text_to_image_generation.py \ @@ -146,6 +150,7 @@ python ../gaudi_spawn.py \ ``` > There are three different checkpoints for LDM3D: +> > - use [original checkpoint](https://huggingface.co/Intel/ldm3d) to generate outputs from the paper > - use [the latest checkpoint](https://huggingface.co/Intel/ldm3d-4c) for generating improved results > - use [the pano checkpoint](https://huggingface.co/Intel/ldm3d-pano) to generate panoramic view @@ -155,6 +160,7 @@ python ../gaudi_spawn.py \ Stable Diffusion XL was proposed in [SDXL: Improving Latent Diffusion Models for High-Resolution Image Synthesis](https://arxiv.org/pdf/2307.01952.pdf) by the Stability AI team. Here is how to generate SDXL images with a single prompt: + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/stable-diffusion-xl-base-1.0 \ @@ -174,6 +180,7 @@ python text_to_image_generation.py \ > You can enable this mode with `--use_hpu_graphs`. Here is how to generate SDXL images with several prompts: + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/stable-diffusion-xl-base-1.0 \ @@ -191,6 +198,7 @@ python text_to_image_generation.py \ SDXL combines a second text encoder (OpenCLIP ViT-bigG/14) with the original text encoder to significantly increase the number of parameters. Here is how to generate images with several prompts for both `prompt` and `prompt_2` (2nd text encoder), as well as their negative prompts: + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/stable-diffusion-xl-base-1.0 \ @@ -209,6 +217,7 @@ python text_to_image_generation.py \ ``` Here is how to generate SDXL images with two prompts on two HPUs: + ```bash python ../gaudi_spawn.py \ --world_size 2 text_to_image_generation.py \ @@ -227,14 +236,17 @@ python ../gaudi_spawn.py \ --bf16 \ --distributed ``` + > HPU graphs are recommended when generating images by batches to get the fastest possible generations. > The first batch of images entails a performance penalty. All subsequent batches will be generated much faster. > You can enable this mode with `--use_hpu_graphs`. ### SDXL-Turbo + SDXL-Turbo is a distilled version of SDXL 1.0, trained for real-time synthesis. Here is how to generate images with multiple prompts: + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/sdxl-turbo \ @@ -267,11 +279,13 @@ Before running SD3 pipeline, you need to: 1. Agree to the Terms and Conditions for using SD3 model at [HuggingFace model page](https://huggingface.co/stabilityai/stable-diffusion-3-medium) 2. Authenticate with HuggingFace using your HF Token. For authentication, run: + ```bash huggingface-cli login ``` Here is how to generate SD3 images with a single prompt: + ```bash PT_HPU_MAX_COMPOUND_OP_SIZE=1 \ python text_to_image_generation.py \ @@ -291,12 +305,32 @@ python text_to_image_generation.py \ > For improved performance of the SD3 pipeline on Gaudi, it is recommended to configure the environment > by setting PT_HPU_MAX_COMPOUND_OP_SIZE to 1. +### FLUX.1 + +FLUX.1 was was introduced by Black Forest Labs [here](https://blackforestlabs.ai/announcing-black-forest-labs/) + +```bash +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-schnell \ + --prompts "A cat holding a sign that says hello world" \ + --num_images_per_prompt 10 \ + --batch_size 1 \ + --num_inference_steps 28 \ + --image_save_dir /tmp/flux_1_images \ + --scheduler flow_match_euler_discrete\ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 +``` + ## ControlNet -ControlNet was introduced in [Adding Conditional Control to Text-to-Image Diffusion Models ](https://huggingface.co/papers/2302.05543) by Lvmin Zhang and Maneesh Agrawala. +ControlNet was introduced in [Adding Conditional Control to Text-to-Image Diffusion Models](https://huggingface.co/papers/2302.05543) by Lvmin Zhang and Maneesh Agrawala. It is a type of model for controlling StableDiffusion by conditioning the model with an additional input image. Here is how to generate images conditioned by canny edge model: + ```bash pip install -r requirements.txt python text_to_image_generation.py \ @@ -314,6 +348,7 @@ python text_to_image_generation.py \ ``` Here is how to generate images conditioned by canny edge model and with multiple prompts: + ```bash pip install -r requirements.txt python text_to_image_generation.py \ @@ -331,6 +366,7 @@ python text_to_image_generation.py \ ``` Here is how to generate images conditioned by canny edge model and with two prompts on two HPUs: + ```bash pip install -r requirements.txt python ../gaudi_spawn.py \ @@ -350,6 +386,7 @@ python ../gaudi_spawn.py \ ``` Here is how to generate images conditioned by open pose model: + ```bash pip install -r requirements.txt python text_to_image_generation.py \ @@ -368,6 +405,7 @@ python text_to_image_generation.py \ ``` Here is how to generate images with conditioned by canny edge model using Stable Diffusion 2 + ```bash pip install -r requirements.txt python text_to_image_generation.py \ @@ -392,6 +430,7 @@ Inpainting replaces or edits specific areas of an image. For more details, please refer to [Hugging Face Diffusers doc](https://huggingface.co/docs/diffusers/en/using-diffusers/inpaint). ### Stable Diffusion Inpainting + ```bash python text_to_image_generation.py \ --model_name_or_path stabilityai/stable-diffusion-2-inpainting \ @@ -409,6 +448,7 @@ python text_to_image_generation.py \ ``` ### Stable Diffusion XL Inpainting + ```bash python text_to_image_generation.py \ --model_name_or_path diffusers/stable-diffusion-xl-1.0-inpainting-0.1\ @@ -455,10 +495,10 @@ python image_to_image_generation.py \ > The first batch of images entails a performance penalty. All subsequent batches will be generated much faster. > You can enable this mode with `--use_hpu_graphs`. - ### Multiple Prompts Here is how to generate images with several prompts and one image. + ```bash pip install -r requirements.txt python image_to_image_generation.py \ @@ -481,10 +521,10 @@ python image_to_image_generation.py \ > The first batch of images entails a performance penalty. All subsequent batches will be generated much faster. > You can enable this mode with `--use_hpu_graphs`. - ### Stable Diffusion XL Refiner Here is how to generate SDXL images with a single prompt and one image: + ```bash pip install -r requirements.txt python image_to_image_generation.py \ @@ -505,6 +545,7 @@ python image_to_image_generation.py \ ### Stable Diffusion Image Variations Here is how to generate images with one image, it does not accept prompt input + ```bash pip install -r requirements.txt python image_to_image_generation.py \ @@ -565,6 +606,7 @@ Script `image_to_video_generation.py` showcases how to perform image-to-video ge ### Single Image Prompt Here is how to generate video with one image prompt: + ```bash PT_HPU_MAX_COMPOUND_OP_SIZE=1 \ python image_to_video_generation.py \ @@ -585,6 +627,7 @@ python image_to_video_generation.py \ ### Multiple Image Prompts Here is how to generate videos with several image prompts: + ```bash PT_HPU_MAX_COMPOUND_OP_SIZE=1 \ python image_to_video_generation.py \ diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 8caa659ca6..02168b080f 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -26,6 +26,7 @@ GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, GaudiEulerDiscreteScheduler, + GaudiFlowMatchEulerDiscreteScheduler ) from optimum.habana.utils import set_seed @@ -65,7 +66,7 @@ def main(): parser.add_argument( "--scheduler", default="ddim", - choices=["default", "euler_discrete", "euler_ancestral_discrete", "ddim"], + choices=["default", "euler_discrete", "euler_ancestral_discrete", "ddim", "flow_match_euler_discrete"], type=str, help="Name of scheduler", ) @@ -275,13 +276,16 @@ def main(): # Select stable diffuson pipeline based on input sdxl_models = ["stable-diffusion-xl", "sdxl"] sd3_models = ["stable-diffusion-3"] + flux_models = ["FLUX.1-dev", "FLUX.1-schnell"] sdxl = True if any(model in args.model_name_or_path for model in sdxl_models) else False sd3 = True if any(model in args.model_name_or_path for model in sd3_models) else False + flux = True if any(model in args.model_name_or_path for model in flux_models) else False controlnet = True if args.control_image is not None else False inpainting = True if (args.base_image is not None) and (args.mask_image is not None) else False # Set the scheduler kwargs = {"timestep_spacing": args.timestep_spacing} + if args.scheduler == "euler_discrete": scheduler = GaudiEulerDiscreteScheduler.from_pretrained( args.model_name_or_path, subfolder="scheduler", **kwargs @@ -292,6 +296,10 @@ def main(): ) elif args.scheduler == "ddim": scheduler = GaudiDDIMScheduler.from_pretrained(args.model_name_or_path, subfolder="scheduler", **kwargs) + elif args.scheduler == "flow_match_euler_discrete": + scheduler = GaudiFlowMatchEulerDiscreteScheduler.from_pretrained( + args.model_name_or_path, subfolder="scheduler", **kwargs + ) else: scheduler = None @@ -340,16 +348,18 @@ def main(): negative_prompts = negative_prompt kwargs_call["negative_prompt"] = negative_prompts - if sdxl or sd3: + if sdxl or sd3 or flux: prompts_2 = args.prompts_2 - negative_prompts_2 = args.negative_prompts_2 if args.distributed and args.prompts_2 is not None: with distributed_state.split_between_processes(args.prompts_2) as prompt_2: prompts_2 = prompt_2 + kwargs_call["prompt_2"] = prompts_2 + + if sdxl or sd3: + negative_prompts_2 = args.negative_prompts_2 if args.distributed and args.negative_prompts_2 is not None: with distributed_state.split_between_processes(args.negative_prompts_2) as negative_prompt_2: negative_prompts_2 = negative_prompt_2 - kwargs_call["prompt_2"] = prompts_2 kwargs_call["negative_prompt_2"] = negative_prompts_2 if sd3: @@ -428,6 +438,22 @@ def main(): args.model_name_or_path, **kwargs, ) + elif flux: + # SD3 pipelines + if controlnet: + # Import SD3+ControlNet pipeline + raise ValueError("SD3+ControlNet pipeline is not currenly supported") + elif inpainting: + # Import SD3 Inpainting pipeline + raise ValueError("SD3 Inpainting pipeline is not currenly supported") + else: + # Import SD3 pipeline + from optimum.habana.diffusers import GaudiFluxPipeline + + pipeline = GaudiFluxPipeline.from_pretrained( + args.model_name_or_path, + **kwargs, + ) else: # SD pipelines (SD1.x, SD2.x) diff --git a/optimum/habana/diffusers/__init__.py b/optimum/habana/diffusers/__init__.py index 4ca2933e91..3be8537f45 100644 --- a/optimum/habana/diffusers/__init__.py +++ b/optimum/habana/diffusers/__init__.py @@ -21,4 +21,4 @@ from .pipelines.stable_diffusion_xl.pipeline_stable_diffusion_xl_inpaint import GaudiStableDiffusionXLInpaintPipeline from .pipelines.stable_video_diffusion.pipeline_stable_video_diffusion import GaudiStableVideoDiffusionPipeline from .pipelines.text_to_video_synthesis.pipeline_text_to_video_synth import GaudiTextToVideoSDPipeline -from .schedulers import GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, GaudiEulerDiscreteScheduler +from .schedulers import GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, GaudiEulerDiscreteScheduler, GaudiFlowMatchEulerDiscreteScheduler From 073b6d0d27eea0d7adc8224f1ff050fceb0d767e Mon Sep 17 00:00:00 2001 From: baocheny Date: Wed, 18 Sep 2024 16:36:53 +0800 Subject: [PATCH 09/30] rem demo wkld entrypoint --- .../stable-diffusion/run_flux_pipeline.py | 32 ------------------- 1 file changed, 32 deletions(-) delete mode 100644 examples/stable-diffusion/run_flux_pipeline.py diff --git a/examples/stable-diffusion/run_flux_pipeline.py b/examples/stable-diffusion/run_flux_pipeline.py deleted file mode 100644 index e41ba633a9..0000000000 --- a/examples/stable-diffusion/run_flux_pipeline.py +++ /dev/null @@ -1,32 +0,0 @@ -import argparse -import torch -from optimum.habana.diffusers import GaudiFluxPipeline - - -parser = argparse.ArgumentParser() -parser.add_argument("--warmup", type=int, default=3, help="warmup iterations") -parser.add_argument("--iterations", type=int, default=3, help="warmup iterations") -parser.add_argument("--use_hpu_graph", action='store_true', help="use hpu graph") -args = parser.parse_args() - - -pipe = GaudiFluxPipeline.from_pretrained( - "black-forest-labs/FLUX.1-schnell", - torch_dtype=torch.bfloat16, - use_habana=True, - use_hpu_graphs=args.use_hpu_graph, - gaudi_config="Habana/stable-diffusion", -) - -if args.use_hpu_graph: - from habana_frameworks.torch.hpu import wrap_in_hpu_graph - pipe.transformer = wrap_in_hpu_graph(pipe.transformer) -prompt = "A cat in a bin, and holding a sign '/bin/cat' " -# Depending on the variant being used, the pipeline call will slightly vary. -# Refer to the pipeline documentation for more details. -print("warmuping...") -for i in range(args.warmup): - image = pipe(prompt, num_inference_steps=28, guidance_scale=0.0).images[0] -torch.hpu.synchronize() -image = pipe(prompt, num_inference_steps=28, guidance_scale=0.0, profiling_warmup_steps=3, profiling_steps=3).images[0] -image.save("flux.png") From 60c5de3ce2ba2437735ec00a27c1fee433417f7c Mon Sep 17 00:00:00 2001 From: baocheny Date: Mon, 23 Sep 2024 14:03:05 +0800 Subject: [PATCH 10/30] add warp in hpu graph --- .../stable-diffusion/text_to_image_generation.py | 12 ++++++------ optimum/habana/diffusers/pipelines/auto_pipeline.py | 4 ++++ .../habana/diffusers/pipelines/flux/pipeline_flux.py | 3 +++ optimum/habana/diffusers/pipelines/pipeline_utils.py | 1 + 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 02168b080f..8425389b4b 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -439,15 +439,15 @@ def main(): **kwargs, ) elif flux: - # SD3 pipelines + # Flux pipelines if controlnet: - # Import SD3+ControlNet pipeline - raise ValueError("SD3+ControlNet pipeline is not currenly supported") + # Import Flux+ControlNet pipeline + raise ValueError("Flux+ControlNet pipeline is not currenly supported") elif inpainting: - # Import SD3 Inpainting pipeline - raise ValueError("SD3 Inpainting pipeline is not currenly supported") + # Import FLux Inpainting pipeline + raise ValueError("Flux Inpainting pipeline is not currenly supported") else: - # Import SD3 pipeline + # Import Flux pipeline from optimum.habana.diffusers import GaudiFluxPipeline pipeline = GaudiFluxPipeline.from_pretrained( diff --git a/optimum/habana/diffusers/pipelines/auto_pipeline.py b/optimum/habana/diffusers/pipelines/auto_pipeline.py index 77171c9502..a7fb5431ca 100644 --- a/optimum/habana/diffusers/pipelines/auto_pipeline.py +++ b/optimum/habana/diffusers/pipelines/auto_pipeline.py @@ -33,6 +33,8 @@ from .stable_diffusion.pipeline_stable_diffusion_inpaint import GaudiStableDiffusionInpaintPipeline from .stable_diffusion_xl.pipeline_stable_diffusion_xl import GaudiStableDiffusionXLPipeline from .stable_diffusion_xl.pipeline_stable_diffusion_xl_inpaint import GaudiStableDiffusionXLInpaintPipeline +from .stable_diffusion_3.pipeline_stable_diffusion_3 import GaudiStableDiffusion3Pipeline +from .flux.pipeline_flux import GaudiFluxPipeline GAUDI_PREFIX_NAME = "Gaudi" @@ -42,6 +44,8 @@ ("stable-diffusion", GaudiStableDiffusionPipeline), ("stable-diffusion-xl", GaudiStableDiffusionXLPipeline), ("stable-diffusion-controlnet", GaudiStableDiffusionControlNetPipeline), + ("stable-diffusion-3", GaudiStableDiffusion3Pipeline), + ("flux", GaudiFluxPipeline), ] ) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 0b5cfc2db0..e2f432c6ac 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -140,6 +140,9 @@ def __init__( transformer=transformer, ) self.to(self._device) + if use_hpu_graphs: + from habana_frameworks.torch.hpu import wrap_in_hpu_graph + transformer = wrap_in_hpu_graph(transformer) @torch.no_grad() @replace_example_docstring(EXAMPLE_DOC_STRING) diff --git a/optimum/habana/diffusers/pipelines/pipeline_utils.py b/optimum/habana/diffusers/pipelines/pipeline_utils.py index 7f36b90ae4..6e659edff4 100644 --- a/optimum/habana/diffusers/pipelines/pipeline_utils.py +++ b/optimum/habana/diffusers/pipelines/pipeline_utils.py @@ -55,6 +55,7 @@ "optimum.habana.diffusers.schedulers": { "GaudiDDIMScheduler": ["save_pretrained", "from_pretrained"], "GaudiEulerDiscreteScheduler": ["save_pretrained", "from_pretrained"], + "GaudiFlowMatchEulerDiscreteScheduler": ["save_pretrained", "from_pretrained"], "GaudiEulerAncestralDiscreteScheduler": ["save_pretrained", "from_pretrained"], }, } From e1879304c12f46dd817369a9fd425c46a7fbcb0a Mon Sep 17 00:00:00 2001 From: Daniel Socek Date: Thu, 26 Sep 2024 12:53:13 +0000 Subject: [PATCH 11/30] Add fp8 to flux and fix timing Signed-off-by: Daniel Socek --- .../measure_all/fp8_hooks_maxabs.json | 8071 +++++++++++++++++ .../quantize/measure_all/fp8_hooks_maxabs.npz | Bin 0 -> 97750 bytes ...fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.json | 7567 ++++++++++++++++ .../fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.npz | Bin 0 -> 124974 bytes .../fp8_hooks_maxabs_mod_list.json | 506 ++ .../measure_all_500/fp8_hooks_maxabs.json | 8071 +++++++++++++++++ .../measure_all_500/fp8_hooks_maxabs.npz | Bin 0 -> 97750 bytes ...fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.json | 7567 ++++++++++++++++ .../fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.npz | Bin 0 -> 124974 bytes .../fp8_hooks_maxabs_mod_list.json | 506 ++ .../quantize/measure_config.json | 5 + .../quantize/quant_config.json | 6 + .../quantize/quant_config_500.json | 6 + .../quantize/quant_config_bmm.json | 7 + examples/stable-diffusion/readme.txt | 12 + examples/stable-diffusion/run_bf16.sh | 13 + examples/stable-diffusion/run_fp8.sh | 16 + examples/stable-diffusion/run_fp8_500.sh | 16 + .../text_to_image_generation.py | 9 + .../unconditional_image_generation.py | 0 .../diffusers/pipelines/flux/pipeline_flux.py | 47 +- 21 files changed, 32414 insertions(+), 11 deletions(-) create mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.json create mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.npz create mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.json create mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.npz create mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_mod_list.json create mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.json create mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.npz create mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.json create mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.npz create mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_mod_list.json create mode 100755 examples/stable-diffusion/quantize/measure_config.json create mode 100755 examples/stable-diffusion/quantize/quant_config.json create mode 100755 examples/stable-diffusion/quantize/quant_config_500.json create mode 100755 examples/stable-diffusion/quantize/quant_config_bmm.json create mode 100644 examples/stable-diffusion/readme.txt create mode 100755 examples/stable-diffusion/run_bf16.sh create mode 100755 examples/stable-diffusion/run_fp8.sh create mode 100755 examples/stable-diffusion/run_fp8_500.sh mode change 100644 => 100755 examples/stable-diffusion/unconditional_image_generation.py diff --git a/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.json b/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.json new file mode 100644 index 0000000000..8e4c0fb98e --- /dev/null +++ b/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.json @@ -0,0 +1,8071 @@ +{ + "GlobalRank": null, + "LocalRank": null, + "Mode": "DynamicRange", + "Nodes": { + "time_text_embed.timestep_embedder.linear_1": { + "inputs": [ + [ + [ + 1.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.28515625 + ] + ] + } + }, + "time_text_embed.timestep_embedder.linear_2": { + "inputs": [ + [ + [ + 3.28125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1484375 + ] + ] + } + }, + "time_text_embed.guidance_embedder.linear_1": { + "inputs": [ + [ + [ + 1.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.10400390625 + ] + ] + } + }, + "time_text_embed.guidance_embedder.linear_2": { + "inputs": [ + [ + [ + 0.60546875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.1201171875 + ] + ] + } + }, + "time_text_embed.text_embedder.linear_1": { + "inputs": [ + [ + [ + 4.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.263671875 + ] + ] + } + }, + "time_text_embed.text_embedder.linear_2": { + "inputs": [ + [ + [ + 0.373046875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.171875 + ] + ] + } + }, + "context_embedder": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.72265625 + ] + ] + } + }, + "x_embedder": { + "inputs": [ + [ + [ + 5.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "transformer_blocks.0.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.396484375 + ] + ] + } + }, + "transformer_blocks.0.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.0.attn.to_q": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 2.4375 + ] + ] + } + }, + "transformer_blocks.0.attn.to_k": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.0.attn.to_v": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.38671875 + ] + ] + } + }, + "transformer_blocks.0.attn.add_k_proj": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.0.attn.add_v_proj": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.236328125 + ] + ] + } + }, + "transformer_blocks.0.attn.add_q_proj": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.37890625 + ] + ] + } + }, + "transformer_blocks.0.attn.to_out.0": { + "inputs": [ + [ + [ + 1.578125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.408203125 + ] + ] + } + }, + "transformer_blocks.0.attn.to_add_out": { + "inputs": [ + [ + [ + 7.46875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.40625 + ] + ] + } + }, + "transformer_blocks.0.ff.net.0.proj": { + "inputs": [ + [ + [ + 4.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.439453125 + ] + ] + } + }, + "transformer_blocks.0.ff.net.2": { + "inputs": [ + [ + [ + 7.71875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.60546875 + ] + ] + } + }, + "transformer_blocks.0.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 10.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.419921875 + ] + ] + } + }, + "transformer_blocks.0.ff_context.net.2": { + "inputs": [ + [ + [ + 39.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.50390625 + ] + ] + } + }, + "transformer_blocks.1.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71484375 + ] + ] + } + }, + "transformer_blocks.1.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.1.attn.to_q": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.72265625 + ] + ] + } + }, + "transformer_blocks.1.attn.to_k": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0 + ] + ] + } + }, + "transformer_blocks.1.attn.to_v": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.1.attn.add_k_proj": { + "inputs": [ + [ + [ + 34.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3828125 + ] + ] + } + }, + "transformer_blocks.1.attn.add_v_proj": { + "inputs": [ + [ + [ + 34.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.28125 + ] + ] + } + }, + "transformer_blocks.1.attn.add_q_proj": { + "inputs": [ + [ + [ + 34.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.376953125 + ] + ] + } + }, + "transformer_blocks.1.attn.to_out.0": { + "inputs": [ + [ + [ + 7.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4765625 + ] + ] + } + }, + "transformer_blocks.1.attn.to_add_out": { + "inputs": [ + [ + [ + 9.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.296875 + ] + ] + } + }, + "transformer_blocks.1.ff.net.0.proj": { + "inputs": [ + [ + [ + 10.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.47265625 + ] + ] + } + }, + "transformer_blocks.1.ff.net.2": { + "inputs": [ + [ + [ + 11.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "transformer_blocks.1.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 66.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.40234375 + ] + ] + } + }, + "transformer_blocks.1.ff_context.net.2": { + "inputs": [ + [ + [ + 82.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.765625 + ] + ] + } + }, + "transformer_blocks.2.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.82421875 + ] + ] + } + }, + "transformer_blocks.2.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71484375 + ] + ] + } + }, + "transformer_blocks.2.attn.to_q": { + "inputs": [ + [ + [ + 11.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.97265625 + ] + ] + } + }, + "transformer_blocks.2.attn.to_k": { + "inputs": [ + [ + [ + 11.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7421875 + ] + ] + } + }, + "transformer_blocks.2.attn.to_v": { + "inputs": [ + [ + [ + 11.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.2.attn.add_k_proj": { + "inputs": [ + [ + [ + 33.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6796875 + ] + ] + } + }, + "transformer_blocks.2.attn.add_v_proj": { + "inputs": [ + [ + [ + 33.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.26171875 + ] + ] + } + }, + "transformer_blocks.2.attn.add_q_proj": { + "inputs": [ + [ + [ + 33.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.423828125 + ] + ] + } + }, + "transformer_blocks.2.attn.to_out.0": { + "inputs": [ + [ + [ + 9.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "transformer_blocks.2.attn.to_add_out": { + "inputs": [ + [ + [ + 4.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.361328125 + ] + ] + } + }, + "transformer_blocks.2.ff.net.0.proj": { + "inputs": [ + [ + [ + 4.71875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.76171875 + ] + ] + } + }, + "transformer_blocks.2.ff.net.2": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "transformer_blocks.2.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 65.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.482421875 + ] + ] + } + }, + "transformer_blocks.2.ff_context.net.2": { + "inputs": [ + [ + [ + 29.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.609375 + ] + ] + } + }, + "transformer_blocks.3.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.68359375 + ] + ] + } + }, + "transformer_blocks.3.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.412109375 + ] + ] + } + }, + "transformer_blocks.3.attn.to_q": { + "inputs": [ + [ + [ + 11.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1640625 + ] + ] + } + }, + "transformer_blocks.3.attn.to_k": { + "inputs": [ + [ + [ + 11.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6953125 + ] + ] + } + }, + "transformer_blocks.3.attn.to_v": { + "inputs": [ + [ + [ + 11.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.484375 + ] + ] + } + }, + "transformer_blocks.3.attn.add_k_proj": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.474609375 + ] + ] + } + }, + "transformer_blocks.3.attn.add_v_proj": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.255859375 + ] + ] + } + }, + "transformer_blocks.3.attn.add_q_proj": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41796875 + ] + ] + } + }, + "transformer_blocks.3.attn.to_out.0": { + "inputs": [ + [ + [ + 10.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.35546875 + ] + ] + } + }, + "transformer_blocks.3.attn.to_add_out": { + "inputs": [ + [ + [ + 3.359375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.390625 + ] + ] + } + }, + "transformer_blocks.3.ff.net.0.proj": { + "inputs": [ + [ + [ + 11.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.578125 + ] + ] + } + }, + "transformer_blocks.3.ff.net.2": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.765625 + ] + ] + } + }, + "transformer_blocks.3.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 9.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "transformer_blocks.3.ff_context.net.2": { + "inputs": [ + [ + [ + 19.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53515625 + ] + ] + } + }, + "transformer_blocks.4.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8515625 + ] + ] + } + }, + "transformer_blocks.4.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.40625 + ] + ] + } + }, + "transformer_blocks.4.attn.to_q": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1796875 + ] + ] + } + }, + "transformer_blocks.4.attn.to_k": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.75390625 + ] + ] + } + }, + "transformer_blocks.4.attn.to_v": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.494140625 + ] + ] + } + }, + "transformer_blocks.4.attn.add_k_proj": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4765625 + ] + ] + } + }, + "transformer_blocks.4.attn.add_v_proj": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.236328125 + ] + ] + } + }, + "transformer_blocks.4.attn.add_q_proj": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3671875 + ] + ] + } + }, + "transformer_blocks.4.attn.to_out.0": { + "inputs": [ + [ + [ + 12.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.345703125 + ] + ] + } + }, + "transformer_blocks.4.attn.to_add_out": { + "inputs": [ + [ + [ + 6.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.390625 + ] + ] + } + }, + "transformer_blocks.4.ff.net.0.proj": { + "inputs": [ + [ + [ + 21.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "transformer_blocks.4.ff.net.2": { + "inputs": [ + [ + [ + 18.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.09375 + ] + ] + } + }, + "transformer_blocks.4.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 7.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6484375 + ] + ] + } + }, + "transformer_blocks.4.ff_context.net.2": { + "inputs": [ + [ + [ + 16.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.5.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8125 + ] + ] + } + }, + "transformer_blocks.5.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.5.attn.to_q": { + "inputs": [ + [ + [ + 14.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.84765625 + ] + ] + } + }, + "transformer_blocks.5.attn.to_k": { + "inputs": [ + [ + [ + 14.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "transformer_blocks.5.attn.to_v": { + "inputs": [ + [ + [ + 14.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.5.attn.add_k_proj": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.5.attn.add_v_proj": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.298828125 + ] + ] + } + }, + "transformer_blocks.5.attn.add_q_proj": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.287109375 + ] + ] + } + }, + "transformer_blocks.5.attn.to_out.0": { + "inputs": [ + [ + [ + 8.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.28125 + ] + ] + } + }, + "transformer_blocks.5.attn.to_add_out": { + "inputs": [ + [ + [ + 9.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.306640625 + ] + ] + } + }, + "transformer_blocks.5.ff.net.0.proj": { + "inputs": [ + [ + [ + 18.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6328125 + ] + ] + } + }, + "transformer_blocks.5.ff.net.2": { + "inputs": [ + [ + [ + 27.125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.46875 + ] + ] + } + }, + "transformer_blocks.5.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 11.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.54296875 + ] + ] + } + }, + "transformer_blocks.5.ff_context.net.2": { + "inputs": [ + [ + [ + 17.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "transformer_blocks.6.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.78515625 + ] + ] + } + }, + "transformer_blocks.6.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.39453125 + ] + ] + } + }, + "transformer_blocks.6.attn.to_q": { + "inputs": [ + [ + [ + 14.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.56640625 + ] + ] + } + }, + "transformer_blocks.6.attn.to_k": { + "inputs": [ + [ + [ + 14.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53515625 + ] + ] + } + }, + "transformer_blocks.6.attn.to_v": { + "inputs": [ + [ + [ + 14.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.333984375 + ] + ] + } + }, + "transformer_blocks.6.attn.add_k_proj": { + "inputs": [ + [ + [ + 16.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "transformer_blocks.6.attn.add_v_proj": { + "inputs": [ + [ + [ + 16.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.328125 + ] + ] + } + }, + "transformer_blocks.6.attn.add_q_proj": { + "inputs": [ + [ + [ + 16.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3515625 + ] + ] + } + }, + "transformer_blocks.6.attn.to_out.0": { + "inputs": [ + [ + [ + 7.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.47265625 + ] + ] + } + }, + "transformer_blocks.6.attn.to_add_out": { + "inputs": [ + [ + [ + 11.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.259765625 + ] + ] + } + }, + "transformer_blocks.6.ff.net.0.proj": { + "inputs": [ + [ + [ + 8.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "transformer_blocks.6.ff.net.2": { + "inputs": [ + [ + [ + 27.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.80859375 + ] + ] + } + }, + "transformer_blocks.6.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 10.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.62109375 + ] + ] + } + }, + "transformer_blocks.6.ff_context.net.2": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "transformer_blocks.7.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.73046875 + ] + ] + } + }, + "transformer_blocks.7.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41015625 + ] + ] + } + }, + "transformer_blocks.7.attn.to_q": { + "inputs": [ + [ + [ + 14.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.7.attn.to_k": { + "inputs": [ + [ + [ + 14.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.84375 + ] + ] + } + }, + "transformer_blocks.7.attn.to_v": { + "inputs": [ + [ + [ + 14.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.36328125 + ] + ] + } + }, + "transformer_blocks.7.attn.add_k_proj": { + "inputs": [ + [ + [ + 23.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.443359375 + ] + ] + } + }, + "transformer_blocks.7.attn.add_v_proj": { + "inputs": [ + [ + [ + 23.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.365234375 + ] + ] + } + }, + "transformer_blocks.7.attn.add_q_proj": { + "inputs": [ + [ + [ + 23.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.39453125 + ] + ] + } + }, + "transformer_blocks.7.attn.to_out.0": { + "inputs": [ + [ + [ + 9.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.37109375 + ] + ] + } + }, + "transformer_blocks.7.attn.to_add_out": { + "inputs": [ + [ + [ + 7.90625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.375 + ] + ] + } + }, + "transformer_blocks.7.ff.net.0.proj": { + "inputs": [ + [ + [ + 13.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.435546875 + ] + ] + } + }, + "transformer_blocks.7.ff.net.2": { + "inputs": [ + [ + [ + 47.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.94140625 + ] + ] + } + }, + "transformer_blocks.7.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 8.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.7.ff_context.net.2": { + "inputs": [ + [ + [ + 16.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.8.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.8.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51953125 + ] + ] + } + }, + "transformer_blocks.8.attn.to_q": { + "inputs": [ + [ + [ + 13.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.43359375 + ] + ] + } + }, + "transformer_blocks.8.attn.to_k": { + "inputs": [ + [ + [ + 13.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.640625 + ] + ] + } + }, + "transformer_blocks.8.attn.to_v": { + "inputs": [ + [ + [ + 13.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.39453125 + ] + ] + } + }, + "transformer_blocks.8.attn.add_k_proj": { + "inputs": [ + [ + [ + 15.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3359375 + ] + ] + } + }, + "transformer_blocks.8.attn.add_v_proj": { + "inputs": [ + [ + [ + 15.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41015625 + ] + ] + } + }, + "transformer_blocks.8.attn.add_q_proj": { + "inputs": [ + [ + [ + 15.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.326171875 + ] + ] + } + }, + "transformer_blocks.8.attn.to_out.0": { + "inputs": [ + [ + [ + 10.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3203125 + ] + ] + } + }, + "transformer_blocks.8.attn.to_add_out": { + "inputs": [ + [ + [ + 14.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2890625 + ] + ] + } + }, + "transformer_blocks.8.ff.net.0.proj": { + "inputs": [ + [ + [ + 8.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "transformer_blocks.8.ff.net.2": { + "inputs": [ + [ + [ + 19.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "transformer_blocks.8.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.494140625 + ] + ] + } + }, + "transformer_blocks.8.ff_context.net.2": { + "inputs": [ + [ + [ + 15.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.9.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "transformer_blocks.9.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.470703125 + ] + ] + } + }, + "transformer_blocks.9.attn.to_q": { + "inputs": [ + [ + [ + 14.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.462890625 + ] + ] + } + }, + "transformer_blocks.9.attn.to_k": { + "inputs": [ + [ + [ + 14.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.427734375 + ] + ] + } + }, + "transformer_blocks.9.attn.to_v": { + "inputs": [ + [ + [ + 14.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.32421875 + ] + ] + } + }, + "transformer_blocks.9.attn.add_k_proj": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.9.attn.add_v_proj": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.310546875 + ] + ] + } + }, + "transformer_blocks.9.attn.add_q_proj": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.271484375 + ] + ] + } + }, + "transformer_blocks.9.attn.to_out.0": { + "inputs": [ + [ + [ + 14.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.31640625 + ] + ] + } + }, + "transformer_blocks.9.attn.to_add_out": { + "inputs": [ + [ + [ + 7.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.302734375 + ] + ] + } + }, + "transformer_blocks.9.ff.net.0.proj": { + "inputs": [ + [ + [ + 10.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.9.ff.net.2": { + "inputs": [ + [ + [ + 12.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.640625 + ] + ] + } + }, + "transformer_blocks.9.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 12.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "transformer_blocks.9.ff_context.net.2": { + "inputs": [ + [ + [ + 20.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "transformer_blocks.10.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.58984375 + ] + ] + } + }, + "transformer_blocks.10.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3828125 + ] + ] + } + }, + "transformer_blocks.10.attn.to_q": { + "inputs": [ + [ + [ + 13.1875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.10.attn.to_k": { + "inputs": [ + [ + [ + 13.1875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.478515625 + ] + ] + } + }, + "transformer_blocks.10.attn.to_v": { + "inputs": [ + [ + [ + 13.1875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.310546875 + ] + ] + } + }, + "transformer_blocks.10.attn.add_k_proj": { + "inputs": [ + [ + [ + 14.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.333984375 + ] + ] + } + }, + "transformer_blocks.10.attn.add_v_proj": { + "inputs": [ + [ + [ + 14.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.24609375 + ] + ] + } + }, + "transformer_blocks.10.attn.add_q_proj": { + "inputs": [ + [ + [ + 14.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.376953125 + ] + ] + } + }, + "transformer_blocks.10.attn.to_out.0": { + "inputs": [ + [ + [ + 12.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.263671875 + ] + ] + } + }, + "transformer_blocks.10.attn.to_add_out": { + "inputs": [ + [ + [ + 7.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.25390625 + ] + ] + } + }, + "transformer_blocks.10.ff.net.0.proj": { + "inputs": [ + [ + [ + 12.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.48828125 + ] + ] + } + }, + "transformer_blocks.10.ff.net.2": { + "inputs": [ + [ + [ + 15.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8203125 + ] + ] + } + }, + "transformer_blocks.10.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 62.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.451171875 + ] + ] + } + }, + "transformer_blocks.10.ff_context.net.2": { + "inputs": [ + [ + [ + 34.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71875 + ] + ] + } + }, + "transformer_blocks.11.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.640625 + ] + ] + } + }, + "transformer_blocks.11.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.455078125 + ] + ] + } + }, + "transformer_blocks.11.attn.to_q": { + "inputs": [ + [ + [ + 15.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "transformer_blocks.11.attn.to_k": { + "inputs": [ + [ + [ + 15.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.427734375 + ] + ] + } + }, + "transformer_blocks.11.attn.to_v": { + "inputs": [ + [ + [ + 15.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.33203125 + ] + ] + } + }, + "transformer_blocks.11.attn.add_k_proj": { + "inputs": [ + [ + [ + 28.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7109375 + ] + ] + } + }, + "transformer_blocks.11.attn.add_v_proj": { + "inputs": [ + [ + [ + 28.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2294921875 + ] + ] + } + }, + "transformer_blocks.11.attn.add_q_proj": { + "inputs": [ + [ + [ + 28.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3125 + ] + ] + } + }, + "transformer_blocks.11.attn.to_out.0": { + "inputs": [ + [ + [ + 13.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.37109375 + ] + ] + } + }, + "transformer_blocks.11.attn.to_add_out": { + "inputs": [ + [ + [ + 8.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.318359375 + ] + ] + } + }, + "transformer_blocks.11.ff.net.0.proj": { + "inputs": [ + [ + [ + 8.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.498046875 + ] + ] + } + }, + "transformer_blocks.11.ff.net.2": { + "inputs": [ + [ + [ + 12.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6953125 + ] + ] + } + }, + "transformer_blocks.11.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 39.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.58203125 + ] + ] + } + }, + "transformer_blocks.11.ff_context.net.2": { + "inputs": [ + [ + [ + 33.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.470703125 + ] + ] + } + }, + "transformer_blocks.12.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.68359375 + ] + ] + } + }, + "transformer_blocks.12.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41796875 + ] + ] + } + }, + "transformer_blocks.12.attn.to_q": { + "inputs": [ + [ + [ + 11.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.341796875 + ] + ] + } + }, + "transformer_blocks.12.attn.to_k": { + "inputs": [ + [ + [ + 11.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4609375 + ] + ] + } + }, + "transformer_blocks.12.attn.to_v": { + "inputs": [ + [ + [ + 11.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.12.attn.add_k_proj": { + "inputs": [ + [ + [ + 28.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.12.attn.add_v_proj": { + "inputs": [ + [ + [ + 28.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.322265625 + ] + ] + } + }, + "transformer_blocks.12.attn.add_q_proj": { + "inputs": [ + [ + [ + 28.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3359375 + ] + ] + } + }, + "transformer_blocks.12.attn.to_out.0": { + "inputs": [ + [ + [ + 28.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.322265625 + ] + ] + } + }, + "transformer_blocks.12.attn.to_add_out": { + "inputs": [ + [ + [ + 13.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.29296875 + ] + ] + } + }, + "transformer_blocks.12.ff.net.0.proj": { + "inputs": [ + [ + [ + 6.59375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59375 + ] + ] + } + }, + "transformer_blocks.12.ff.net.2": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8359375 + ] + ] + } + }, + "transformer_blocks.12.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 90.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41015625 + ] + ] + } + }, + "transformer_blocks.12.ff_context.net.2": { + "inputs": [ + [ + [ + 25.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "transformer_blocks.13.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65625 + ] + ] + } + }, + "transformer_blocks.13.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.375 + ] + ] + } + }, + "transformer_blocks.13.attn.to_q": { + "inputs": [ + [ + [ + 13.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.447265625 + ] + ] + } + }, + "transformer_blocks.13.attn.to_k": { + "inputs": [ + [ + [ + 13.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.83203125 + ] + ] + } + }, + "transformer_blocks.13.attn.to_v": { + "inputs": [ + [ + [ + 13.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "transformer_blocks.13.attn.add_k_proj": { + "inputs": [ + [ + [ + 29.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.578125 + ] + ] + } + }, + "transformer_blocks.13.attn.add_v_proj": { + "inputs": [ + [ + [ + 29.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.25 + ] + ] + } + }, + "transformer_blocks.13.attn.add_q_proj": { + "inputs": [ + [ + [ + 29.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.33203125 + ] + ] + } + }, + "transformer_blocks.13.attn.to_out.0": { + "inputs": [ + [ + [ + 12.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.314453125 + ] + ] + } + }, + "transformer_blocks.13.attn.to_add_out": { + "inputs": [ + [ + [ + 13.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.27734375 + ] + ] + } + }, + "transformer_blocks.13.ff.net.0.proj": { + "inputs": [ + [ + [ + 7.03125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.13.ff.net.2": { + "inputs": [ + [ + [ + 18.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.015625 + ] + ] + } + }, + "transformer_blocks.13.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 138.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.369140625 + ] + ] + } + }, + "transformer_blocks.13.ff_context.net.2": { + "inputs": [ + [ + [ + 20.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46875 + ] + ] + } + }, + "transformer_blocks.14.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.68359375 + ] + ] + } + }, + "transformer_blocks.14.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3984375 + ] + ] + } + }, + "transformer_blocks.14.attn.to_q": { + "inputs": [ + [ + [ + 21.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46484375 + ] + ] + } + }, + "transformer_blocks.14.attn.to_k": { + "inputs": [ + [ + [ + 21.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.578125 + ] + ] + } + }, + "transformer_blocks.14.attn.to_v": { + "inputs": [ + [ + [ + 21.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.73046875 + ] + ] + } + }, + "transformer_blocks.14.attn.add_k_proj": { + "inputs": [ + [ + [ + 16.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.49609375 + ] + ] + } + }, + "transformer_blocks.14.attn.add_v_proj": { + "inputs": [ + [ + [ + 16.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.279296875 + ] + ] + } + }, + "transformer_blocks.14.attn.add_q_proj": { + "inputs": [ + [ + [ + 16.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.392578125 + ] + ] + } + }, + "transformer_blocks.14.attn.to_out.0": { + "inputs": [ + [ + [ + 14.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41015625 + ] + ] + } + }, + "transformer_blocks.14.attn.to_add_out": { + "inputs": [ + [ + [ + 10.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.318359375 + ] + ] + } + }, + "transformer_blocks.14.ff.net.0.proj": { + "inputs": [ + [ + [ + 5.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53125 + ] + ] + } + }, + "transformer_blocks.14.ff.net.2": { + "inputs": [ + [ + [ + 17.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.73046875 + ] + ] + } + }, + "transformer_blocks.14.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 51.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.14.ff_context.net.2": { + "inputs": [ + [ + [ + 30.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7265625 + ] + ] + } + }, + "transformer_blocks.15.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.78125 + ] + ] + } + }, + "transformer_blocks.15.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.435546875 + ] + ] + } + }, + "transformer_blocks.15.attn.to_q": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.56640625 + ] + ] + } + }, + "transformer_blocks.15.attn.to_k": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.625 + ] + ] + } + }, + "transformer_blocks.15.attn.to_v": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "transformer_blocks.15.attn.add_k_proj": { + "inputs": [ + [ + [ + 17.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5625 + ] + ] + } + }, + "transformer_blocks.15.attn.add_v_proj": { + "inputs": [ + [ + [ + 17.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.34765625 + ] + ] + } + }, + "transformer_blocks.15.attn.add_q_proj": { + "inputs": [ + [ + [ + 17.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2890625 + ] + ] + } + }, + "transformer_blocks.15.attn.to_out.0": { + "inputs": [ + [ + [ + 13.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.365234375 + ] + ] + } + }, + "transformer_blocks.15.attn.to_add_out": { + "inputs": [ + [ + [ + 7.21875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.275390625 + ] + ] + } + }, + "transformer_blocks.15.ff.net.0.proj": { + "inputs": [ + [ + [ + 4.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.45703125 + ] + ] + } + }, + "transformer_blocks.15.ff.net.2": { + "inputs": [ + [ + [ + 21.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8671875 + ] + ] + } + }, + "transformer_blocks.15.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 44.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.15.ff_context.net.2": { + "inputs": [ + [ + [ + 24.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46875 + ] + ] + } + }, + "transformer_blocks.16.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.890625 + ] + ] + } + }, + "transformer_blocks.16.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "transformer_blocks.16.attn.to_q": { + "inputs": [ + [ + [ + 19.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.42578125 + ] + ] + } + }, + "transformer_blocks.16.attn.to_k": { + "inputs": [ + [ + [ + 19.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.68359375 + ] + ] + } + }, + "transformer_blocks.16.attn.to_v": { + "inputs": [ + [ + [ + 19.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.396484375 + ] + ] + } + }, + "transformer_blocks.16.attn.add_k_proj": { + "inputs": [ + [ + [ + 24.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.16.attn.add_v_proj": { + "inputs": [ + [ + [ + 24.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.330078125 + ] + ] + } + }, + "transformer_blocks.16.attn.add_q_proj": { + "inputs": [ + [ + [ + 24.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.30078125 + ] + ] + } + }, + "transformer_blocks.16.attn.to_out.0": { + "inputs": [ + [ + [ + 16.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.298828125 + ] + ] + } + }, + "transformer_blocks.16.attn.to_add_out": { + "inputs": [ + [ + [ + 12.1875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3125 + ] + ] + } + }, + "transformer_blocks.16.ff.net.0.proj": { + "inputs": [ + [ + [ + 5.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.66796875 + ] + ] + } + }, + "transformer_blocks.16.ff.net.2": { + "inputs": [ + [ + [ + 24.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0546875 + ] + ] + } + }, + "transformer_blocks.16.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 34.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0390625 + ] + ] + } + }, + "transformer_blocks.16.ff_context.net.2": { + "inputs": [ + [ + [ + 75.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71875 + ] + ] + } + }, + "transformer_blocks.17.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.9296875 + ] + ] + } + }, + "transformer_blocks.17.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.83203125 + ] + ] + } + }, + "transformer_blocks.17.attn.to_q": { + "inputs": [ + [ + [ + 16.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "transformer_blocks.17.attn.to_k": { + "inputs": [ + [ + [ + 16.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.86328125 + ] + ] + } + }, + "transformer_blocks.17.attn.to_v": { + "inputs": [ + [ + [ + 16.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.423828125 + ] + ] + } + }, + "transformer_blocks.17.attn.add_k_proj": { + "inputs": [ + [ + [ + 33.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6796875 + ] + ] + } + }, + "transformer_blocks.17.attn.add_v_proj": { + "inputs": [ + [ + [ + 33.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.47265625 + ] + ] + } + }, + "transformer_blocks.17.attn.add_q_proj": { + "inputs": [ + [ + [ + 33.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.328125 + ] + ] + } + }, + "transformer_blocks.17.attn.to_out.0": { + "inputs": [ + [ + [ + 15.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.34765625 + ] + ] + } + }, + "transformer_blocks.17.attn.to_add_out": { + "inputs": [ + [ + [ + 19.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.24609375 + ] + ] + } + }, + "transformer_blocks.17.ff.net.0.proj": { + "inputs": [ + [ + [ + 7.03125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "transformer_blocks.17.ff.net.2": { + "inputs": [ + [ + [ + 53.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.15625 + ] + ] + } + }, + "transformer_blocks.17.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 33.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6328125 + ] + ] + } + }, + "transformer_blocks.17.ff_context.net.2": { + "inputs": [ + [ + [ + 68.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.75390625 + ] + ] + } + }, + "transformer_blocks.18.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5234375 + ] + ] + } + }, + "transformer_blocks.18.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7421875 + ] + ] + } + }, + "transformer_blocks.18.attn.to_q": { + "inputs": [ + [ + [ + 15.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.18.attn.to_k": { + "inputs": [ + [ + [ + 15.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.94921875 + ] + ] + } + }, + "transformer_blocks.18.attn.to_v": { + "inputs": [ + [ + [ + 15.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.375 + ] + ] + } + }, + "transformer_blocks.18.attn.add_k_proj": { + "inputs": [ + [ + [ + 22.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4453125 + ] + ] + } + }, + "transformer_blocks.18.attn.add_v_proj": { + "inputs": [ + [ + [ + 22.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.369140625 + ] + ] + } + }, + "transformer_blocks.18.attn.add_q_proj": { + "inputs": [ + [ + [ + 22.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.384765625 + ] + ] + } + }, + "transformer_blocks.18.attn.to_out.0": { + "inputs": [ + [ + [ + 27.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.18.attn.to_add_out": { + "inputs": [ + [ + [ + 15.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.36328125 + ] + ] + } + }, + "transformer_blocks.18.ff.net.0.proj": { + "inputs": [ + [ + [ + 10.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.69921875 + ] + ] + } + }, + "transformer_blocks.18.ff.net.2": { + "inputs": [ + [ + [ + 202.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.4609375 + ] + ] + } + }, + "transformer_blocks.18.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 114.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.9375 + ] + ] + } + }, + "transformer_blocks.18.ff_context.net.2": { + "inputs": [ + [ + [ + 224.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.80859375 + ] + ] + } + }, + "single_transformer_blocks.0.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59375 + ] + ] + } + }, + "single_transformer_blocks.0.proj_mlp": { + "inputs": [ + [ + [ + 43.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.0.proj_out": { + "inputs": [ + [ + [ + 13.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.3671875 + ] + ] + } + }, + "single_transformer_blocks.0.attn.to_q": { + "inputs": [ + [ + [ + 43.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.384765625 + ] + ] + } + }, + "single_transformer_blocks.0.attn.to_k": { + "inputs": [ + [ + [ + 43.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51953125 + ] + ] + } + }, + "single_transformer_blocks.0.attn.to_v": { + "inputs": [ + [ + [ + 43.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.279296875 + ] + ] + } + }, + "single_transformer_blocks.1.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0390625 + ] + ] + } + }, + "single_transformer_blocks.1.proj_mlp": { + "inputs": [ + [ + [ + 35.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "single_transformer_blocks.1.proj_out": { + "inputs": [ + [ + [ + 15.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5 + ] + ] + } + }, + "single_transformer_blocks.1.attn.to_q": { + "inputs": [ + [ + [ + 35.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.373046875 + ] + ] + } + }, + "single_transformer_blocks.1.attn.to_k": { + "inputs": [ + [ + [ + 35.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.484375 + ] + ] + } + }, + "single_transformer_blocks.1.attn.to_v": { + "inputs": [ + [ + [ + 35.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.318359375 + ] + ] + } + }, + "single_transformer_blocks.2.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.203125 + ] + ] + } + }, + "single_transformer_blocks.2.proj_mlp": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "single_transformer_blocks.2.proj_out": { + "inputs": [ + [ + [ + 16.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.4609375 + ] + ] + } + }, + "single_transformer_blocks.2.attn.to_q": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "single_transformer_blocks.2.attn.to_k": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.478515625 + ] + ] + } + }, + "single_transformer_blocks.2.attn.to_v": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.359375 + ] + ] + } + }, + "single_transformer_blocks.3.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.359375 + ] + ] + } + }, + "single_transformer_blocks.3.proj_mlp": { + "inputs": [ + [ + [ + 30.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.484375 + ] + ] + } + }, + "single_transformer_blocks.3.proj_out": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.91796875 + ] + ] + } + }, + "single_transformer_blocks.3.attn.to_q": { + "inputs": [ + [ + [ + 30.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.40625 + ] + ] + } + }, + "single_transformer_blocks.3.attn.to_k": { + "inputs": [ + [ + [ + 30.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.58203125 + ] + ] + } + }, + "single_transformer_blocks.3.attn.to_v": { + "inputs": [ + [ + [ + 30.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.34375 + ] + ] + } + }, + "single_transformer_blocks.4.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6015625 + ] + ] + } + }, + "single_transformer_blocks.4.proj_mlp": { + "inputs": [ + [ + [ + 32.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.458984375 + ] + ] + } + }, + "single_transformer_blocks.4.proj_out": { + "inputs": [ + [ + [ + 17.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.2421875 + ] + ] + } + }, + "single_transformer_blocks.4.attn.to_q": { + "inputs": [ + [ + [ + 32.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.333984375 + ] + ] + } + }, + "single_transformer_blocks.4.attn.to_k": { + "inputs": [ + [ + [ + 32.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.63671875 + ] + ] + } + }, + "single_transformer_blocks.4.attn.to_v": { + "inputs": [ + [ + [ + 32.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.234375 + ] + ] + } + }, + "single_transformer_blocks.5.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6640625 + ] + ] + } + }, + "single_transformer_blocks.5.proj_mlp": { + "inputs": [ + [ + [ + 25.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.5.proj_out": { + "inputs": [ + [ + [ + 13.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.77734375 + ] + ] + } + }, + "single_transformer_blocks.5.attn.to_q": { + "inputs": [ + [ + [ + 25.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3984375 + ] + ] + } + }, + "single_transformer_blocks.5.attn.to_k": { + "inputs": [ + [ + [ + 25.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "single_transformer_blocks.5.attn.to_v": { + "inputs": [ + [ + [ + 25.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.23046875 + ] + ] + } + }, + "single_transformer_blocks.6.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.015625 + ] + ] + } + }, + "single_transformer_blocks.6.proj_mlp": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "single_transformer_blocks.6.proj_out": { + "inputs": [ + [ + [ + 15.1875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.2578125 + ] + ] + } + }, + "single_transformer_blocks.6.attn.to_q": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.32421875 + ] + ] + } + }, + "single_transformer_blocks.6.attn.to_k": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6015625 + ] + ] + } + }, + "single_transformer_blocks.6.attn.to_v": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2451171875 + ] + ] + } + }, + "single_transformer_blocks.7.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.796875 + ] + ] + } + }, + "single_transformer_blocks.7.proj_mlp": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "single_transformer_blocks.7.proj_out": { + "inputs": [ + [ + [ + 12.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.75 + ] + ] + } + }, + "single_transformer_blocks.7.attn.to_q": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.35546875 + ] + ] + } + }, + "single_transformer_blocks.7.attn.to_k": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5703125 + ] + ] + } + }, + "single_transformer_blocks.7.attn.to_v": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2578125 + ] + ] + } + }, + "single_transformer_blocks.8.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.078125 + ] + ] + } + }, + "single_transformer_blocks.8.proj_mlp": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.8.proj_out": { + "inputs": [ + [ + [ + 14.1875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5546875 + ] + ] + } + }, + "single_transformer_blocks.8.attn.to_q": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.423828125 + ] + ] + } + }, + "single_transformer_blocks.8.attn.to_k": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65234375 + ] + ] + } + }, + "single_transformer_blocks.8.attn.to_v": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3203125 + ] + ] + } + }, + "single_transformer_blocks.9.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.34375 + ] + ] + } + }, + "single_transformer_blocks.9.proj_mlp": { + "inputs": [ + [ + [ + 20.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.890625 + ] + ] + } + }, + "single_transformer_blocks.9.proj_out": { + "inputs": [ + [ + [ + 17.875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.234375 + ] + ] + } + }, + "single_transformer_blocks.9.attn.to_q": { + "inputs": [ + [ + [ + 20.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53125 + ] + ] + } + }, + "single_transformer_blocks.9.attn.to_k": { + "inputs": [ + [ + [ + 20.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.9.attn.to_v": { + "inputs": [ + [ + [ + 20.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.396484375 + ] + ] + } + }, + "single_transformer_blocks.10.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.203125 + ] + ] + } + }, + "single_transformer_blocks.10.proj_mlp": { + "inputs": [ + [ + [ + 14.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65234375 + ] + ] + } + }, + "single_transformer_blocks.10.proj_out": { + "inputs": [ + [ + [ + 13.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.109375 + ] + ] + } + }, + "single_transformer_blocks.10.attn.to_q": { + "inputs": [ + [ + [ + 14.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.439453125 + ] + ] + } + }, + "single_transformer_blocks.10.attn.to_k": { + "inputs": [ + [ + [ + 14.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "single_transformer_blocks.10.attn.to_v": { + "inputs": [ + [ + [ + 14.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.251953125 + ] + ] + } + }, + "single_transformer_blocks.11.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.8125 + ] + ] + } + }, + "single_transformer_blocks.11.proj_mlp": { + "inputs": [ + [ + [ + 21.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6640625 + ] + ] + } + }, + "single_transformer_blocks.11.proj_out": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.046875 + ] + ] + } + }, + "single_transformer_blocks.11.attn.to_q": { + "inputs": [ + [ + [ + 21.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4296875 + ] + ] + } + }, + "single_transformer_blocks.11.attn.to_k": { + "inputs": [ + [ + [ + 21.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53125 + ] + ] + } + }, + "single_transformer_blocks.11.attn.to_v": { + "inputs": [ + [ + [ + 21.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4375 + ] + ] + } + }, + "single_transformer_blocks.12.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.7578125 + ] + ] + } + }, + "single_transformer_blocks.12.proj_mlp": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8125 + ] + ] + } + }, + "single_transformer_blocks.12.proj_out": { + "inputs": [ + [ + [ + 13.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.953125 + ] + ] + } + }, + "single_transformer_blocks.12.attn.to_q": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.419921875 + ] + ] + } + }, + "single_transformer_blocks.12.attn.to_k": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.486328125 + ] + ] + } + }, + "single_transformer_blocks.12.attn.to_v": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44921875 + ] + ] + } + }, + "single_transformer_blocks.13.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.609375 + ] + ] + } + }, + "single_transformer_blocks.13.proj_mlp": { + "inputs": [ + [ + [ + 20.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.85546875 + ] + ] + } + }, + "single_transformer_blocks.13.proj_out": { + "inputs": [ + [ + [ + 19.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0078125 + ] + ] + } + }, + "single_transformer_blocks.13.attn.to_q": { + "inputs": [ + [ + [ + 20.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.13.attn.to_k": { + "inputs": [ + [ + [ + 20.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "single_transformer_blocks.13.attn.to_v": { + "inputs": [ + [ + [ + 20.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.45703125 + ] + ] + } + }, + "single_transformer_blocks.14.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.015625 + ] + ] + } + }, + "single_transformer_blocks.14.proj_mlp": { + "inputs": [ + [ + [ + 26.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.109375 + ] + ] + } + }, + "single_transformer_blocks.14.proj_out": { + "inputs": [ + [ + [ + 13.875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0390625 + ] + ] + } + }, + "single_transformer_blocks.14.attn.to_q": { + "inputs": [ + [ + [ + 26.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.392578125 + ] + ] + } + }, + "single_transformer_blocks.14.attn.to_k": { + "inputs": [ + [ + [ + 26.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "single_transformer_blocks.14.attn.to_v": { + "inputs": [ + [ + [ + 26.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "single_transformer_blocks.15.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.03125 + ] + ] + } + }, + "single_transformer_blocks.15.proj_mlp": { + "inputs": [ + [ + [ + 18.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.78125 + ] + ] + } + }, + "single_transformer_blocks.15.proj_out": { + "inputs": [ + [ + [ + 15.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.2578125 + ] + ] + } + }, + "single_transformer_blocks.15.attn.to_q": { + "inputs": [ + [ + [ + 18.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.49609375 + ] + ] + } + }, + "single_transformer_blocks.15.attn.to_k": { + "inputs": [ + [ + [ + 18.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.58203125 + ] + ] + } + }, + "single_transformer_blocks.15.attn.to_v": { + "inputs": [ + [ + [ + 18.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.16.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.140625 + ] + ] + } + }, + "single_transformer_blocks.16.proj_mlp": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1796875 + ] + ] + } + }, + "single_transformer_blocks.16.proj_out": { + "inputs": [ + [ + [ + 13.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.140625 + ] + ] + } + }, + "single_transformer_blocks.16.attn.to_q": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.625 + ] + ] + } + }, + "single_transformer_blocks.16.attn.to_k": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5546875 + ] + ] + } + }, + "single_transformer_blocks.16.attn.to_v": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "single_transformer_blocks.17.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6015625 + ] + ] + } + }, + "single_transformer_blocks.17.proj_mlp": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7265625 + ] + ] + } + }, + "single_transformer_blocks.17.proj_out": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 1.140625 + ] + ] + } + }, + "single_transformer_blocks.17.attn.to_q": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.609375 + ] + ] + } + }, + "single_transformer_blocks.17.attn.to_k": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65625 + ] + ] + } + }, + "single_transformer_blocks.17.attn.to_v": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.81640625 + ] + ] + } + }, + "single_transformer_blocks.18.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.8203125 + ] + ] + } + }, + "single_transformer_blocks.18.proj_mlp": { + "inputs": [ + [ + [ + 24.875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.125 + ] + ] + } + }, + "single_transformer_blocks.18.proj_out": { + "inputs": [ + [ + [ + 14.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.09375 + ] + ] + } + }, + "single_transformer_blocks.18.attn.to_q": { + "inputs": [ + [ + [ + 24.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.18.attn.to_k": { + "inputs": [ + [ + [ + 24.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.62109375 + ] + ] + } + }, + "single_transformer_blocks.18.attn.to_v": { + "inputs": [ + [ + [ + 24.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "single_transformer_blocks.19.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.7578125 + ] + ] + } + }, + "single_transformer_blocks.19.proj_mlp": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.87109375 + ] + ] + } + }, + "single_transformer_blocks.19.proj_out": { + "inputs": [ + [ + [ + 13.75 + ] + ] + ], + "params": { + "weight": [ + [ + 1.4296875 + ] + ] + } + }, + "single_transformer_blocks.19.attn.to_q": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "single_transformer_blocks.19.attn.to_k": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "single_transformer_blocks.19.attn.to_v": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.54296875 + ] + ] + } + }, + "single_transformer_blocks.20.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.046875 + ] + ] + } + }, + "single_transformer_blocks.20.proj_mlp": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.87890625 + ] + ] + } + }, + "single_transformer_blocks.20.proj_out": { + "inputs": [ + [ + [ + 10.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1484375 + ] + ] + } + }, + "single_transformer_blocks.20.attn.to_q": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.20.attn.to_k": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.50390625 + ] + ] + } + }, + "single_transformer_blocks.20.attn.to_v": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "single_transformer_blocks.21.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5234375 + ] + ] + } + }, + "single_transformer_blocks.21.proj_mlp": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.9140625 + ] + ] + } + }, + "single_transformer_blocks.21.proj_out": { + "inputs": [ + [ + [ + 10.875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.4140625 + ] + ] + } + }, + "single_transformer_blocks.21.attn.to_q": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.482421875 + ] + ] + } + }, + "single_transformer_blocks.21.attn.to_k": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46875 + ] + ] + } + }, + "single_transformer_blocks.21.attn.to_v": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.42578125 + ] + ] + } + }, + "single_transformer_blocks.22.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.2265625 + ] + ] + } + }, + "single_transformer_blocks.22.proj_mlp": { + "inputs": [ + [ + [ + 13.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.703125 + ] + ] + } + }, + "single_transformer_blocks.22.proj_out": { + "inputs": [ + [ + [ + 9.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.87109375 + ] + ] + } + }, + "single_transformer_blocks.22.attn.to_q": { + "inputs": [ + [ + [ + 13.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.455078125 + ] + ] + } + }, + "single_transformer_blocks.22.attn.to_k": { + "inputs": [ + [ + [ + 13.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6015625 + ] + ] + } + }, + "single_transformer_blocks.22.attn.to_v": { + "inputs": [ + [ + [ + 13.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.609375 + ] + ] + } + }, + "single_transformer_blocks.23.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6640625 + ] + ] + } + }, + "single_transformer_blocks.23.proj_mlp": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71484375 + ] + ] + } + }, + "single_transformer_blocks.23.proj_out": { + "inputs": [ + [ + [ + 8.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0546875 + ] + ] + } + }, + "single_transformer_blocks.23.attn.to_q": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.498046875 + ] + ] + } + }, + "single_transformer_blocks.23.attn.to_k": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7578125 + ] + ] + } + }, + "single_transformer_blocks.23.attn.to_v": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.24.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.7890625 + ] + ] + } + }, + "single_transformer_blocks.24.proj_mlp": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 1.09375 + ] + ] + } + }, + "single_transformer_blocks.24.proj_out": { + "inputs": [ + [ + [ + 10.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.95703125 + ] + ] + } + }, + "single_transformer_blocks.24.attn.to_q": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.453125 + ] + ] + } + }, + "single_transformer_blocks.24.attn.to_k": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.24.attn.to_v": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.345703125 + ] + ] + } + }, + "single_transformer_blocks.25.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.078125 + ] + ] + } + }, + "single_transformer_blocks.25.proj_mlp": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.15625 + ] + ] + } + }, + "single_transformer_blocks.25.proj_out": { + "inputs": [ + [ + [ + 11.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.3359375 + ] + ] + } + }, + "single_transformer_blocks.25.attn.to_q": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.50390625 + ] + ] + } + }, + "single_transformer_blocks.25.attn.to_k": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "single_transformer_blocks.25.attn.to_v": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3984375 + ] + ] + } + }, + "single_transformer_blocks.26.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.28125 + ] + ] + } + }, + "single_transformer_blocks.26.proj_mlp": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.90234375 + ] + ] + } + }, + "single_transformer_blocks.26.proj_out": { + "inputs": [ + [ + [ + 11.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.453125 + ] + ] + } + }, + "single_transformer_blocks.26.attn.to_q": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.490234375 + ] + ] + } + }, + "single_transformer_blocks.26.attn.to_k": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.455078125 + ] + ] + } + }, + "single_transformer_blocks.26.attn.to_v": { + "inputs": [ + [ + [ + 15.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "single_transformer_blocks.27.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.109375 + ] + ] + } + }, + "single_transformer_blocks.27.proj_mlp": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.75 + ] + ] + } + }, + "single_transformer_blocks.27.proj_out": { + "inputs": [ + [ + [ + 9.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.80859375 + ] + ] + } + }, + "single_transformer_blocks.27.attn.to_q": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.27.attn.to_k": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6953125 + ] + ] + } + }, + "single_transformer_blocks.27.attn.to_v": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "single_transformer_blocks.28.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1953125 + ] + ] + } + }, + "single_transformer_blocks.28.proj_mlp": { + "inputs": [ + [ + [ + 23.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.87109375 + ] + ] + } + }, + "single_transformer_blocks.28.proj_out": { + "inputs": [ + [ + [ + 12.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8828125 + ] + ] + } + }, + "single_transformer_blocks.28.attn.to_q": { + "inputs": [ + [ + [ + 23.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "single_transformer_blocks.28.attn.to_k": { + "inputs": [ + [ + [ + 23.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.875 + ] + ] + } + }, + "single_transformer_blocks.28.attn.to_v": { + "inputs": [ + [ + [ + 23.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.48828125 + ] + ] + } + }, + "single_transformer_blocks.29.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.171875 + ] + ] + } + }, + "single_transformer_blocks.29.proj_mlp": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6953125 + ] + ] + } + }, + "single_transformer_blocks.29.proj_out": { + "inputs": [ + [ + [ + 10.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.109375 + ] + ] + } + }, + "single_transformer_blocks.29.attn.to_q": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53515625 + ] + ] + } + }, + "single_transformer_blocks.29.attn.to_k": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.48828125 + ] + ] + } + }, + "single_transformer_blocks.29.attn.to_v": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "single_transformer_blocks.30.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.546875 + ] + ] + } + }, + "single_transformer_blocks.30.proj_mlp": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6328125 + ] + ] + } + }, + "single_transformer_blocks.30.proj_out": { + "inputs": [ + [ + [ + 11.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.3984375 + ] + ] + } + }, + "single_transformer_blocks.30.attn.to_q": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "single_transformer_blocks.30.attn.to_k": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5859375 + ] + ] + } + }, + "single_transformer_blocks.30.attn.to_v": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.54296875 + ] + ] + } + }, + "single_transformer_blocks.31.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5859375 + ] + ] + } + }, + "single_transformer_blocks.31.proj_mlp": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65625 + ] + ] + } + }, + "single_transformer_blocks.31.proj_out": { + "inputs": [ + [ + [ + 13.3125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.390625 + ] + ] + } + }, + "single_transformer_blocks.31.attn.to_q": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.640625 + ] + ] + } + }, + "single_transformer_blocks.31.attn.to_k": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.47265625 + ] + ] + } + }, + "single_transformer_blocks.31.attn.to_v": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.73828125 + ] + ] + } + }, + "single_transformer_blocks.32.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5078125 + ] + ] + } + }, + "single_transformer_blocks.32.proj_mlp": { + "inputs": [ + [ + [ + 20.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7578125 + ] + ] + } + }, + "single_transformer_blocks.32.proj_out": { + "inputs": [ + [ + [ + 14.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.171875 + ] + ] + } + }, + "single_transformer_blocks.32.attn.to_q": { + "inputs": [ + [ + [ + 20.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "single_transformer_blocks.32.attn.to_k": { + "inputs": [ + [ + [ + 20.5 + ] + ] + ], + "params": { + "weight": [ + [ + 1.421875 + ] + ] + } + }, + "single_transformer_blocks.32.attn.to_v": { + "inputs": [ + [ + [ + 20.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "single_transformer_blocks.33.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6796875 + ] + ] + } + }, + "single_transformer_blocks.33.proj_mlp": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.90234375 + ] + ] + } + }, + "single_transformer_blocks.33.proj_out": { + "inputs": [ + [ + [ + 11.125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5 + ] + ] + } + }, + "single_transformer_blocks.33.attn.to_q": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "single_transformer_blocks.33.attn.to_k": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 1.3828125 + ] + ] + } + }, + "single_transformer_blocks.33.attn.to_v": { + "inputs": [ + [ + [ + 19.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4609375 + ] + ] + } + }, + "single_transformer_blocks.34.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.640625 + ] + ] + } + }, + "single_transformer_blocks.34.proj_mlp": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.9765625 + ] + ] + } + }, + "single_transformer_blocks.34.proj_out": { + "inputs": [ + [ + [ + 29.125 + ] + ] + ], + "params": { + "weight": [ + [ + 3.109375 + ] + ] + } + }, + "single_transformer_blocks.34.attn.to_q": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.498046875 + ] + ] + } + }, + "single_transformer_blocks.34.attn.to_k": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 2.015625 + ] + ] + } + }, + "single_transformer_blocks.34.attn.to_v": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.447265625 + ] + ] + } + }, + "single_transformer_blocks.35.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.8125 + ] + ] + } + }, + "single_transformer_blocks.35.proj_mlp": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8828125 + ] + ] + } + }, + "single_transformer_blocks.35.proj_out": { + "inputs": [ + [ + [ + 16.625 + ] + ] + ], + "params": { + "weight": [ + [ + 3.0625 + ] + ] + } + }, + "single_transformer_blocks.35.attn.to_q": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.63671875 + ] + ] + } + }, + "single_transformer_blocks.35.attn.to_k": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.62890625 + ] + ] + } + }, + "single_transformer_blocks.35.attn.to_v": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "single_transformer_blocks.36.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.171875 + ] + ] + } + }, + "single_transformer_blocks.36.proj_mlp": { + "inputs": [ + [ + [ + 24.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.796875 + ] + ] + } + }, + "single_transformer_blocks.36.proj_out": { + "inputs": [ + [ + [ + 27.875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1015625 + ] + ] + } + }, + "single_transformer_blocks.36.attn.to_q": { + "inputs": [ + [ + [ + 24.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53125 + ] + ] + } + }, + "single_transformer_blocks.36.attn.to_k": { + "inputs": [ + [ + [ + 24.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6328125 + ] + ] + } + }, + "single_transformer_blocks.36.attn.to_v": { + "inputs": [ + [ + [ + 24.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59375 + ] + ] + } + }, + "single_transformer_blocks.37.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "single_transformer_blocks.37.proj_mlp": { + "inputs": [ + [ + [ + 28.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46875 + ] + ] + } + }, + "single_transformer_blocks.37.proj_out": { + "inputs": [ + [ + [ + 23.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.88671875 + ] + ] + } + }, + "single_transformer_blocks.37.attn.to_q": { + "inputs": [ + [ + [ + 28.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46484375 + ] + ] + } + }, + "single_transformer_blocks.37.attn.to_k": { + "inputs": [ + [ + [ + 28.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6015625 + ] + ] + } + }, + "single_transformer_blocks.37.attn.to_v": { + "inputs": [ + [ + [ + 28.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.361328125 + ] + ] + } + }, + "norm_out.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.345703125 + ] + ] + } + }, + "proj_out": { + "inputs": [ + [ + [ + 24.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.1376953125 + ] + ] + } + } + } +} \ No newline at end of file diff --git a/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.npz b/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.npz new file mode 100644 index 0000000000000000000000000000000000000000..d08514bfc03a9d4adc6ccf4c73648b115f9c3b72 GIT binary patch literal 97750 zcmbrH54_E1cJBQdR7O!5)KoDth>UxP3`T0sIT%zXgYI0!T;p#J5rX}*9V6r4m13kR zt_()17%3`EQ7LMws1!9ds9aQ{y@ z$jt=cqeHEKeVNrsOfIw5(XME`To;o~Ni z@V`%barnd%_~WhHG@IV+|L^}EnN@Y<_{lF%oLVq)LQ+!j^5k)uG3+~h>LWvwl93a} zPs${dM`j*LSF%%&jDNwZ>B`@(G$&o9d*u=7s`JucNTsVi{_V=Gvxi>DOr2QrNV@v- zRS?+s8|fPJe>gAwYpHb2$G=_mdC8uZexy&IKF{3c{{nowD3ku;i22`3*P5RG^~axo zgd@GiO&BqJTx$6EvFX}+5e$VpXa~8|L~m&FO;O~K3|LUTT{mme|gME zj*l)$|Hkvx_@DYvos_OOJ^kq8&p!drjCr|aXr|=*nV}^wk0^PeKzvfBWTH|22^WkT zGrnYaGW5yxm!_xdKmL4m9vU-#;^fSvbc64u8@^cS#i02&Uqo=DRQk&Z*8iKE{;d~H z>8P)#8%ybLr_xQL(k|(y_bRQFNjDoYzdQTSDoTGPlWsm@c$hSCcryIuN$JO?r@tEb zl#(%{U(BRi#IY?kwpA+qxQ}i9Vw2Ekabfz2O!{jhCQW`go$b?Yrlm!E0$ci0a^u0RjiB75XH~fjt*(cs8NH?bV}fubz6MS1SFx{zUKW z6UV=qekPOt_CxR0KK<Hltf^)K6NfSwqb zNTT&h|Q7oPITv{(YN1r)>IMJuxqp{-HlHKl?=IqV#K-^dD%tUZ%e9 ze|*!gPf!10n7bg(U8pXLQtAKYT^47%tS(N!kxBo^<}N9lyHrmsOQnD8Pb|+qQKu;V zW+wedn)?;~>EJ&MllTwB%O%Os5#uI|96PCi|HQzb%)X4JS4>a;aad<%TxXS-5C>RQgZ-i4EB&{-H4ab|(F2TBmh|>kQ3)ed1d0Oi%xL zSZiZkYm)lNHFKw-@WovEI6Wdeizw#$`WS^*2l>TWZ{nuKnNv^eq zXENgpG82ZT(>tf9|0e9REAFyeo%f{Df9sw1W;=hsYx=!R`tNL)ePz4s*AoX)>A&|U z4rZTdT%7({CjAe2?J_ofXnOkRVVA>kmm}(YG?o5G?|iImm-jR2|82V*FWcpWo;aCG z|C2v)D*MDEMd=SR>3_~^m(uiy)6@Sa>~cEpaz>rcrqaLg&gaT@`6!eArR{RQY?ljq z;$kZOFaE@(>=XM7(;sKj|EgWSTH)7d_zN!#9XoVlGU284<>~2v3maXD8(mfZYpL|V zd;ja%{x5V*f09Z6hi!DDY@?fc;#MmCpZ>({vVS_CX43yRzl}<5qtB+N|0`^CCvJ3? z{z)aSH>spD7^fsvAii2s6_!c90EPvVYO2QnnkLo7Xj6ljN`4LWHZ@hU|KcZw$s@2# z@{}!tH{w>95(~6i%9tXWmYn80cKNcoW zz%t3#zz2T?*k6B`(U7#^f$7PA1A~GBlBl4qaz?i!rjjRhBr0gHl8wGvoIC}~Bwq(j zK?hY`K}Rv#bRwpbZ-CyWvr5+HXkqd+ER*~WSm9S-)Tn~-CH%uL(7%aE7miO)z6pl? z3rXVsMamgmOiU$RbtLZJO(h$=pg8Fc%Ou|dZT}vs`u;t|Xw!?BN`4pgHoaA{HoquL zo`GeOZ-d(Zw{!go%I@Djd6tvYlkb3G^*$tV^}foP*^ih?eosf@>M50M=B?u7Iant7 z@1U*TUsYdyfEaBC5>tt+j<><$#WufHlzbPKN&W|@)q_3xy#Jt&8s-1_EcYcC%wyA& z?}0(b5R#~4sB#tl1Z>k zk^xP^WK~^4sTggh5L3zbL2olvC2Lc;D0u~zNqzuI!dE_D!jt#ht&(Y+o}S3ThxKQW z#Pw$?XYMRwD*2x}64#%tlFdC_oV*IlB)<>Z`g2tE_2-JwW*#w>{1Egu^Hs7oor;pz zV436(K&_wSE|y*2zX;}Zlh=7_dh&;0kgO&9sANMIbxq!aWs*MuZTGdR`tIw*XtSP}O8yk|HXBs3Hl2%-w_%y& z&%nHPADg_x(do&bgJJiLBysmm${D(um`eUaN8;{VRPx>5g=Lbz1a0@Ns`~ER#Avgf zm`eT%^fo(GvNp4dlAppd$zOwc?OvMfNuq)y${Br>m`eUpN1}pbD%t2Sc1zxeWs?65nu6o1x`Gp8v^hykCI19^n^P)T zo4+eeK7eJCe+KhYV0X{tLmrr({2wqVI8722oKeo`v&2;L3mu6H&Z*=IK7wVEUxKFK zysEC?f*5Ts5>v^)fZpbkO4epkVe&C7ll&`~pMvZ?^fC`jPyP)I3a*eu1y_|b`Wi8n z{JV}s1=m$_1)sn&$$x;R;D)NM;HDUDZV^+-e}dlTwo2CKZwr%8VVUIrf>My<9$NOw zWB1U{I6gi3FEH$Xha~QQS2=?#@t2=C0ni6mfn*2&vN)*GKo|LS72sX@dE zfZnF2O1A%h-~?be0r0_JfpYiIT0DRg0E2?sBvC;f<&3UN#0ls~{0h`l$woKE3BYgy zped-Ysw-$9Mw^C2oB-%;8mVM$evA`<;RL`6zXJE(LmP7(Cjf^1n~=o)n<{5;Ga^nv zN8_Fh6Mn%dX9UjR`;tzxqE0A9>WTNK}R7;)KR3I1&WDS0Ue1tx~XIf%;}bNhhYUk z)6qj!*U?jqHob^g0npp@R>|5N#tOi&0-$u{xQE`S153aYSOPFe=tB}E^i|I6enc#R zjzkG5m2CFfZpm{nECFZ|`m5>^28hvSAQ4LddYeHiS(|TR31Cv{ttSa{02nR+%uhk~9=ekU@B&~^u!|%r*sYw=dx&@e z9f=C|s^kjZgW&~0Q?O4}SFm4k|2dUx@NafaK7!!{K->Sks=ogPG1^=t z;sii%b4ev@!&TyA7)}6u@K>PRJ@hgU-~_;+;0j4pa8)^@uMu$qIuaFJSII^%Elxgx z;RHZaa6?sBa8rynw}?0a(A(Tr$=d9|3BYgyV1-|Sd+(v2aU3TAhW+o5#QpCoXKd*}|Iyb|$!7AmSdYT60HCd2UsYedff#KX60rcFw`ru3 zwc!uXmtj}{P^;&-hiY}dI+VMIHs&#`02p*MA&EMgDrbRaM67_0L>`69BzUZteUun&Mia3Dptl*LlC@ca6@Xy{z`S-Jn~dWq_kS?# zKAt4*K0!G{Cla~;>qy)^QOS3o1mpe>+U}E8_1#OwXfuV#{U7u;Q&qAy?YaNMxc`HB z?OvKp<0$ukFzh~qBx{WDYU9{M^D-~zy)U;#-~uuwUp7ZGs*IuaEuR>?;Hyle6X z3>N^Jf+ecDf~8`#Sw_SKfZk@gO4jC+!sJaDE&$9=LG~WHf(P&dU{J7N?m0ZEQFuVY03bv~13bu*SW;+ot0D7ApDp{Kog~?B0cmYrf znt%R1wCt6~?x8z5juQaG{<}!x{=1bkcn=XLpd)ery(-z@=3SHbU^oHL_TQ(f@4sJ+ zHV23}0npnVRLRh!fC}sNk4NHhN%j z@;(eF0Gfj1s=9&`VzfC)#0h}j=9Eg-=I?O=Fq{Bb;aA|^d+3K8#|eO8|I;LK|1-)N ze3pn4(2=u3yhg+V=tx}sx=J>aH>G?6!vcV|`VCcm^_ya}xkbbRfZpb| zO4f$Ig!>eR1pu{rj(ez9_p3v>d+298h7|yVjyoh#$6e(tQ0WU=pfczSRDom{XwWsO z3d0J3rlXpwuA{mbZE6s)0-(34sgmiajun7m1wiS@aSy#uNBg7}PhbhaAfYx%lu$=G zv+EME1UeGG6!lcH*+1sH0x&EAXcFqH>Jl1=(WW60O8|PCMk-mGnpgrDmH?E59QV+# z-+K>j%xRne7}jq>64!64oVm@2H~}4r>o-@)=5h~x42BZ`ZT%Li`uZ)!Xw!;_69BzU zYn80ccZ-rIU^oF#>*u(KX4m&WOxb&A8=k@qfI&t9NtDr6IrG~QaRWLMWwckxWjqDL z4S*)2gQ_m0qZn;E5pe^cx9O~swb6ToV7LKLGIHEQzh3Sh+J&Q70Wj=dND_B1QqIs~ zB33|0;_ls4vY~umUhj^_3V^nI4^@5lo?^7=MZ^k#-ln%o)@B)20EQI+^V)rE@+?QW z|AS%oJ|uDXzRDTekI4OBN8;`&m3;T-VBG&f+r7W4zWV?%+6*Le{|CLzAeF35SH2qn zqYn4{eP!La)flDPX&TCarY4_`R*fO-2Xw_y+l>teUun&MiaUJ zgWhJ0O4jDTcvBFJ`#-4Nzxw$%i0q%4a`(`2Jb()TgM#rSQNaY|jGjou1?Wgrkf`Je zCc$t4pedNFsw*fJqs(A&&c$=dM#(AQvi0Wd!W*?Z{gJb)Jf zgMtMlQNcpxj9x^<3+PBxuvjHm@CFPo0Gfg&s=9)uVzgOC#0!AlX1Pk%W+z?%h8F;( zAjdtl?3Ksvp({9!69B{hD@o%1tCTZ%H4!JEBXR#VD%s$kyekNX698@hwW|95>%?fY zo`@3wz0C%dtj%BG1YkG;@WEe!a`(`8cmO8=1_c{QqJmAz8NHc^6VQ>UV2esN`k#uE zcVRdI&=hP{)fH?Lqs?|AP5|^aJ5;hZ+(Ul~!wG;Deg*EmhwkJ!P5=!1?;?r&?^e#> zJw%*P@hYiwW-7VgJ3uTQ2Xb&hkm{6 z9(stASO74rewZY#endGlj}ox}IuchurjpHkqiga$3=06->c>^})lZ1g<|Gjd0D7BK zDp{M?i;@puSO8F~=eUPzb-y~4yN70Ue1t&Z%SzTq{mK zf?)+f({Wx^*Kt9NHW!Il0npoAQpwu1#0tQ$0-$u{xQE`SqkVFjC$I!ykZ^@0O1P?= z+1H3z0v(AGuB&9TXJZLqSOU-_+)&je+!UkDEh3fx^ftFuvNn%k31C+L)tQ0Wj>|ge30XR5?SN z5wQX~5_fN|k_{cs`-5Ou0nm1Dp{noRQj9jOh*$y8+q726+6=-9z_0>fUb~M?+HjQn zKNxl|Ac?!TRnE|MMDG7O5_fN}lJEW$jQc-myLV94ckd`hn@&XT|Dd<&tdg~gBq|u8k}DVq!wZ0>phQ(yFiMOzqltI{(A$hr$=WQ(3&8LKV15d+_t0@XfENIR zg7G9#!35=uo=C(C=txwMsN@PJ!SDj0DVVIPD<~DC%@iVD0Q5FfRkAj3;00iK0Z`IfG{raRNFL_n)nj4X(tyf?zlS(Dt9Bs_#Enj5hO# zI04Yx%vZ_U@DAPAU^oHr!C!%L_t4jQ04D$j1q(=`f`!T%y@-et(2=NMu}U_&A>S2% z;RHZautZf?uvCmT%ZNAu(Az9m$=dueP5_1z04w|o+#>S2LA{r0K*A@w*OjHegAc0v{_HY34q>agG$z>IX@o&!wG=eKgT_^L)ks_ z9Zq5az_9v8lDPUN<;>hn!~*C@Tz!j5HuF^1_fO8^E5he@J@Bg&b5 zl!ztJktpGqN;dlsu>>$I0caAAtLhR?h|%UG5laAin^P)T8{EtXFf0Km2|4be9q+w| ze#mK@02tOkO%m5Xqnx>Ci8uiriR+(J$>wr_`v`^;0B!yAs`~mD#AtJoh!X(4%_Wtr zO>dk43?~3;{T%nu?E3zPDSHpS%u~1lFvz$<5@lRf&irdc+<=Zm8P`>E8K1y#1E9&c zp{mQcDMp)HMBD)AZEmY%ZGOgkgkZP~mH4C) ztN`dkt3a|tU*ekrFsuM*yH``ycdsr+n;Jx{0O)OMs${$W237!u6#({Xd52D1%N?80ZCNQRym{F5pe-J5*4&p$wt4* zdxBuN0MHb4P}LQ56r)WiA}#>*Hl0}fcYuN-b0_|0lWYh z6!am93i>K%bUz|qKu4m2luEAPIT&65GzI-tbp->&Xfu$A7XZD@AeF4mU-FJ17+wIB zf*kkIvR59vhYsdAP5=!14P>(sAPk0@&f`eoB(M1m#FIdj}oKJ zXd+Gk^fqHuvNrsb%u6tw0QlgqK)HM9I3BIzE5XfuU~69B!B5 zo4)*b01OKNYV{oVP_6D)hjRDO6+DI&0E3Q|BvHpIN?hm(PljnD*$?%4JuiirdR`=+t)W8zJumqqaFs#3eB(A?(Idk_AaRNFL*Was>&E+2Y9tB zs=AC5VzfC)#0`Mn=9Eg-<{0l0g5d^0$;fdJ?Nsg_`XNWL0$|wvG)dh3jBsu8ArMQgJJhOBysn<${AXzhK5!KeP|U(+3rLcZ^;ELaJ8%Io zTmWba>Z|Gs8i>)RArThOuwgJ3uT z&=mAn)fEg7qs>4fP5|^agH*CMdvF3UoB&wiSK!`z=wOcH1i-NW5R$n6P~{9BM#Kr| zNZfyfN;dd={CofmCji?1C93-Vqr_-4nurqsz0DYvtj!sm01PJpYX2Pf&`xFd&~co^ z0)S!l@g#Be3Cfu{k%$G*k+^!IlFj@k9~K0|0)V#qWL16jQZd?0Az}eQZ!=XTYm?^3 z17KJHP^;&-hiY}dI+VMIPUA7G02p-4Ac;C=DrbRNM67_0L>;qLvIS~k1z=bK&~(gE z)pg7jqs=@bRsi%i^Hs7od{g^17*+t3jvV*U`*gHVUgrrc0T?7KAc+zdDrfd0B9=f$ zqJ+gN+3Z=t6a+RzNw}>}kSOQQIa@<3|aqm5J1*dTW zU|4@8NnC%Ga^|ik;skUguD?blo69})Ef`J!wDs4j>g%r)qs@9EP5|^a8&tA3e0a^< zFq{CW^>f@qv+MgGrtCfR9iGAsfI-Gak|<-7a^`O);s$gi%Gjck%Xk-t8vsqlR#jcb zHZj_4C*lS`Z?i)sYf}w30K*M{l9A&c`i*k;(48E`3V>nvT_kb$-O3rdhlmx>k+}O_ zm2Bu>z9|613V^ozK2?49{bIB^K*S1w-sYf6*5))nDFDL?fO+jcHaWym?*Cxe{V++~ z{fKgg9wl=B*O9pUF_nDx_hH=sLEHVfs=oUPG1{CYa{mXt%_)_v%~$zu0F3)TnAh&5 z$%h=}{tt%TPm{#m&nRc;St9p;9f`Z2Q^|M#2*&*%wB66E>bqYMqs>Jk_kYmaTvEx} zaD(_5#{D1E?m6zE**`Po?xB}?02crT1y@L-f~(3IeT|3<(2=O%x=J?s72XpB!v%n* z;D)NM;HDUDZV_<-ptrfLlC^ncmOW|1_gIWqJq218C~huG`ceA zqpLv56jX)b1wd0!O;uM=U5qw0hjS9);lrKvPg(RaekJj5ZC4cmdGcG*Zdhtj7z$@B*L|PRJ+uuE-~_;+pnxPQXsevj?T9!59f=Cst7M}`^IZWLP5?9o z9aMD%9mQzViHH*by-jD8tj&*b0x+BaSm9UT-g{^lj^hNtuzw**+`mXUgNunc0Ue3^ zcT>p*^UjFwFq{Br`}a`Q_wOl2n_fhm0O)Oct7L7e^78>OoB*i(bKFC}QFae~mXlZj zFs$B(B(C09IWzkau>d*}S5K*AGyjM;2EniZpsn6tRbPF87;Od;u>hdA8Kjc6`E`Cg z0EPtswR(?H?fI-I)lBi>-auygy#0uz0)Glh_Qo6$t90O)PTsAO&QqXICj04N5j46e5-Y^fpsfvNnAB%PTM}0VoMM?xCIUy@yWYG)@2v z>(3yG>(5lq+*w4NfR4oVXRBm$xre?A!wG=4{v1_({kdYanMcG4fZk@lO4f!?UU&_L z69Bb-j(cc!egDIhy@$TeQ@8;z$XGxUWh_+A{6$3EfR02Ni&b(NZ@_Q^pvhRGs>@g^ zMw?|s+yLlpmaAlK{+;&-!EghhWaPMqb}n}hUBOYT02p>(NfLKorJSLwiC6(0iMy{+ z$%c;Ln*uPb0BF0fRn>Q2Cq|p~M63YlZ8oT6ZT^j)6o6p`z`SXuEG!)py?}Mw{(K?*E{-*`boP`DcDc0LJ|v%xiZ( zKa``~|G}{PE|R$WZsiQ!L*)LiBXRe=D*5j3!MOi}w);L+efRxhv^hZJ{ttSagDP2@ zb=?19-2XxCp5q>x{WDYU9(sreZ~7XZD@C6%nrD!c#;F97DJAbStJ%ma7m;L4y6t^&ypevx+t!Egeg?O#n*-@m#TZE6s40-(34 zsgmu#1}6Z+34jm&3Y5Et*5U!202mb1CW#8_C}(tCB2GX@;#Z)aN;di&KN|qU34o@c zzN)UEff#KX5^(~cw`ru3wfQ5Q01PJpR`?aT_a54q<2V5@?B9eW?%z~7gPRd?0y+}+ zZ?2LJZiy3s;RHb2zlEy4e@ijiv?AgJKyTApC2PaqWIX}H34q!^$33)j**&xkC$Ruv zSiOKGuHIHTGusid06G#^Z?BTg?1cq@VF5r}y@RU0dPgzZbRuE_KyTAoC2PZ9Lp%+` z0)SdQ$30Z5`_-Y`J+uptVFkdTqmU%(C{oS>#YC)tjzk^ZRI&wnV+CMX0nl{xP}Ozx z6r)WqB31zOHoaA{HvE{wGcc?GC>=TOq4(+F^Fw(8O8^E5eMq8&zRH>1kBBAEktiXh zlFfb#O8~_#E8Kjc6ktKj(3BU^eEqL{>LeM6CKB6_yI8J7*7&)Oi<1O6N&f%9f>*;m0ZUp7=8dW9g|gc9i?KlnL@-5 zfZk@RO4f$Y^nL|~9{{DJ>F2-210C^${L;xZ&f*5Zu>1^?xcp4zOr1r<4d_T*ezr|SfZk?>`O0b}MK09wL@NN1}wiD!GLB zU|0gsBg2o0Q5EoRkAkIummtH0hp(R?0xhQPv8l_AmK1clyF2jvyT$- z1UeEW98<|9ybr?@fF|L%sxIM#7;R1x@dTi^Ii-@d;q&Z2fZ+*1NqFpmuRnGn{gCrm z0x$?TO%es1QO@MEL@a@hL;>eivdQ1#?Ljas0cZlwtLg$Sh|%UE5laAin@cKLn|W9Q z7?uFc<7-gvMtYei@C0CxaD^mFxT>7l*NAum9f=aIt7Nn9@JXUDJOOADZm8-KZi><7 z775?e>atEFt^g~FpL3c`}a`Q_wOl2n_fhW0qAXdt7L63IM2W^2B7wTBFEPt z$M08qmd9`hV9?QrBJ57Ha^8<%f{O<%cR~>M$bKKu6;8BUG}fQ~53d3~K<| z@+GSJ@}tCPGn$As0KLr^m8=bKIerO-H30Klo=+6zEPenC%a13C%TG|w)QLp=fR4oF z6P0}VNih5XXvS1l|A)5>}E#39FPddo>Ynpd(Sj8kJnaTQIxJrw8 z(PljnZvc9m4Juiiv3LU*-T=%~LiSer4o~0>z#w5GNtCcjIkPtt@di2)C2Ud2CA8XguvRaR#8bxulY{xr;M^;S4|t$aO0%_nVe3a}-MehTX4_#NDqdXXrH|mOw}1?$=ea zp}3k)U|0gscE6#j?|xH^Hn)ga0?^ysR>|7%!P1|?umqrX&vh%k?^hIMf9vXJJcl&^ zgOEEUQOI58Y*6Wo+MqJ%8&rX0H{kbWR)t{=Koe3;RTolSj5alhSOd`8)KtlY)aR#& zVORrDLUP?o?-PPW;1MhW7&O!-i5luCXLwyA7C}ei7o(m^Hk{vg@+b_80GfvSs=9^- zVzg;U#3F#+rjbh4hVK@C8HPmwr6Jd?^jCh_Qe%$e48X8|6Oy=pQ{@b9M#LHDNZh}< zN;a5V>0>aQ0ciWTP}TQuDMp)CM4SQWZCa~jZTOJ;Ctx@OQ2RfT<5rsEw=K2dG28(d zbQF+89c`7fKszGtKu4mE_A0rKr(n1P&~$WA)pc|fqfI9w?f~>QomH|neAe{SFx&ws z9l36$<$v8$7tUf0z_5HFNnE~2Ia7;?SOXo2%Xd@BrncvM1~9AvXv_Cd)tB!nMw?zl ztO4k4daGn@Ht~Z5FsuQX-|~FEC};5lU|7BnNnF0Ka;Ekp;s=C?c_Fv?l{02r1ZLK2rBs+_6Ai1+~=iOY{r z$(J7q!w-PAe2J>Q{3tQnj3(j-KyNcfC2PaqQojVl4}e-e*RAya-?%i6CvXK|kT9Mk zN|>OW*%OJl0v(AG5|wQB1>Q9T!xexgVX~?&p;U}EQ;4_%(A!K^$=dJ+{8wPO0x(Yr z*<0x}p1>P`LBb4@C}E~@X3rwx4Rj<*n5~jacol{>08PRiRb9edG1|-{;tfD=GhZcZ zvledv!yABkO32?vNpVr_e~hy0F;DWx6-oLAG?*V;5^O%3<6e?L; z&g{)ZoPmx+30qXM*+coR0SspVnuM*Yx`b_FwAoI?8Gznqhf3CFDb4_fGXN|88r*v; z-N|{J0T=}AB8dWaD`)Z^BF;cZqJX_B+2r^6iDDSe05k#nRCNLS#b|SYh%*4a%|Vr{ z&2RD(#W0)!C;_={rR9F_(jksw3Ba)XVUoD}5#Me zs=oUPG1{CYVhKQRb4n#^Gk_l>fME$h?f$i#7m3gRyV4JN4r>4gA*V^AkTc5J;4Beq zpd(SpIhAaKzwDNL1j8DDCgi-TF64q3Z7veA2B5dOq>{Da{p=sZum+%n|7% zq5Yr2un3?u>1FzkPaB<_D#IfE%|tM)0cgv&P}P@jDMp)CM63bmZCa~jZO-#e z0~po-%x`%5 z0KH9Tm8?yFzFPpp4}keC&&Q2&7C!)nuK z>dW^OqfIX&egO0~y;ZU{dJho{KLBd^T({EufB(|6Jb^0!gM>aLQ9@tk%68fv^5(bFTW*`w)0D7B2Dp?zTbIW&OxB@Uw3E5leV4lDmfI-3# zk|<%Qa%K-B;tg~pN*JM%OBe~m8-OOEL{*nCN{lw6iFgCh+l*1k+RVZm!0-lOo)WUR z(s4Y2Hvof#@gz~g1m(=0NW>fHNR*JM5j46e8XL^fpsfvNrss z`ztWK0VoN%Zlz_fKXxmf#(A6p7zE59i2`OSXYwo}&Ok?^fY~b938Ku6;4TU4^4i?IYSECFb{Z&lTI-zG+z?L;gA z=xuhWWNl{iL&Y#G0jS--mg81>->)yq{$1%#p2He|LC7wWC}g*CHrPYN8t6zAvR5VB z;F0dhdoZj4XhQa>>O%I5(dGaVYXEwigDP2@zu}t(FsuP6A-Qg)_X**1M|lK`00s?* zNuq`$${Bu?h(*wmsNtAOHvA`81Q-?pG!4g9bqy!PXmgT?MF73cDV40vHNI;A!y?QO9+aT*oIc z+yQ7hZm8-yZi><777=#t+ebHqNJ10IEys^!}51X;_`Qu zGquvMYiecCr&fVvr*7i?L@=xYXvMw=Q$tO4k4YN}+*U*Y{kFsuQX-|~F+ zC};5lU|7C3NnE~;a;DZL;sSsxRL_j5ZC4_yN${G*Zdh za4Y&U3_k$ow>%#{%31sX7?y8B5|?kPoT<%-_yHY>%Qsiamwya~9{_Fn7OML4EyZZl ziijToy-jPCtPP)c_5=(+0BZSMx6=E68&eyez!iW&LIFvX&{jFK+YxaEIua$cSIK7o zK0jmt!xexgp@XU}p`#dWIuUUNpttF)lC`P`K|&!(lu)Fc z*~LV>fsRB8-BfZ3-C=kG&?NLw)g|;4qfIX&-T?GAy;ZU{3;A9F3~vDDDIt3+eU>Ni z24IlTha^hqtDM>WhZFNR*JMWV64@ zcMV`T1JEQ)R@EhxiqU2Y5oZ8;o2e>U8@_}83JhleR{S-%_f|TM^Ed-A2$(?<1sET75I%*JN3aNB(6EstYS^Tl z;hTwA1RaSQwy0#oZ(tE%SOm~CY*p1YY!jo+b|Mx5^fo(GvNlg+5nxyZP#SXGN`K|o zGVSCz&HxPi?;?r&?^e#>Jw%*=j>P@(BImWVab zk+}Rhm2Bz@yq^e$H2`h-^Q!vt7sP0Dk%%<_z0D<+tPLNC|1k_}0Oq$mpG3-8`~Vo1 zzd{n1zp9+6*NFH59f`|dSIL+E1cn~~ZTTCj`tmo$Xmg8*9{|10ZI!G|Grn5@!w-P@ zEzd`hauz=ThUM>&#O3cQXKJO|npzq3sZ}6l%U6Zr2S8iCnyS8hburr1AmRr=Z&Onx zTb_@ieguXe0JVIsTj~A3o2eF0;0nMXp*BgBP)9kl>k@GVIuif+*Hg)6FTfSRa0Q@A zsIRI^Xdp(LhD2Ne=xrLQWNqr=3ShVbFi#2DTWMpSz#D);LKBiGp{a6aHzVQ=bR zu98c542CxVO+pJ*T|!GS+O#6#4M1@CIO>60*0_Havkh0E2`Ak|?3A za%Q(9;tg~pN@%Z=OLz)~HvmmS2UT4{M={!TBH|4|Z_`;NYopH~g5eE7Nyv38Eqnd3 zTWJ^0;|#zcppYaAC{oVkVj|8!N1}jkD%s@cc^453X8@Xj9;&*4o?^7=MZ_6^-ln%o z)`ky+eFlaz0Q2}7l)IHa%M&;QFi7Y_5+(Fi&g_0foPmx+2`QCq_C-FA6oxYZO+tTF zUBUn{+6*M(3_x!)NF{6Yhd2Wm&H$|VYjE$abTH>}24D~{gd_?Ws+`Hgh&Tfsi2_Eb zWRq9&qsK6u0cZkBRCNKP#Aq{`h%*4a%@~!e&Br(c7|sBcfLyoIa=)Qz97nMPVAy>; zN!)#ca)wSMVhMC4?w+V*L;oC00K*c1w)j5hO# zSOd`8%vZ_U=p#vCSOZW(a@|Vr6T)Yb@(30I3>p@YL=6j-Gkg&di=ZP>!(x?e_yH^e z42uAoh9#=HhNWV(Sw_SnfZk@gO4f$ID}57&MF6EC*RAwdeo4~`j^hl#u>VSuxc@5U z3|>vd8R$sde~n5um|N*vFq{Es`>$2i_g^PQoApGT0qAWusAO$k;F|_8oB^o)bKOdF z{FbJ7cno&{1|1toqK-|9wp)jbR;f+ zOeJ6beHeZKwB?Vh>dT)Hqs>VoegO0~r&O{w{B8ROF#G_R-|~DgDQEEmU|9Y%NnHMn za;Bao;s?)8l2~}Zu1JEQ?Q`IF@ z7o$xLBHjS>HZ@f;2^;uc0Ss>d<|!e2E3L&7cmpsNA*J(c{c z@F)y#0GfpQs=9;*Vzg;U#2bL#rjbh4=1tx~1j8GEl920GTK4*5x6;O(#~FY@KogQE zps8{uHzVQJmDN(WVm-X8?Me z&MH|Oe&XzD7|sB!_-k@2Pcho`BH|1{Z_`^PYx6bUPXxmmfD(}FR$A`&H9gBwECCpH??V!I@2i}l{fJls z9f`ZARI;Iac_R@FO90yL{Z;kd2Z+&TAQ4LddYeHiS(|yhkqCw*0JVFrTj_nj=qUSl zrGt46YXAlzLr9{Kp~~4{7!hlrBT>i*m288W-I9?otO003N>p_rqr_-4nus+3z0DYv ztj!#L^caRU03{^Xt@J)2d_pOYU=hHeVLVCHFhM!PClav;IubP`D%tR5{L%s#76CL3 zlT~#MrDC+1Lc}6~-e#&w)`p+Ke+7m`0Hq<B+iXzD+SI`sz_12je#`SIrJThNfMNNKBysso%9*;Eh#%0AxcnBCeED}_ z_yN$C->RxFzfFub+llxA(A(@#$=ZAmKLEoIfcY)Y$CPpwKLCd1cag;9cPnS=9wL4~ zN8<8(Rr2NEgW(52TYjIazWjbM+8iL_2S9IgP$g?~3O@kD4}e-e*RAya-`jMECvXK| zkZ_nJN;smN*++@E0v(AGj;Um`t9MJ@hv5o9lW<&BmvBOiHYbU=0?^x>QpwsB;0j>4 z0x(Yr*<0y{Jb^a=gM`y0QNkJJ%sxxR8|X-sa84zc@DU7e0GfpJs=9;=VzjwP#2bL# z=8{U*<~rU0hBpB7l#soZUginB0T?7)A&C;MDrfdJBHlnpqJ--zxr9$(cmvQR+)&je z+!UkDEh63k^ftFuvNrq<)K6h}15grj-Ac<|f9zKJ8Ru~ZU=VPJBnr5zoXM5yYI0@J zCs%=FC(qH?~Z(WV9wX8?Menktz9{=WJV7|sC9<7-gvR$7ZEa0XzI zP@5!5sH2?Ob%{6w9f@CqdMerMAM;%U7|sAR3H4QV2@S+((~yWW0KH8km8=aPF!p5_ z&H$|VYjE$av@z##24E1-gd_@Rs+`Hqh&Tfsi2|CdWRrPc>0>aQ0cZkRsOkb*iqWPO z5oZ8;o7O5>8-7UT2^h`*lz?2f(sIAOsSQW51Yp>`fF$nTRyjl45wQe15_fN}k`3+8 zhY`WB1fcESK~>+qqZn;E5wQfIx9O~swfPZ0UI4=qfZ9FRt@OTMdz8JEcHud!0T_f7 zl0+dz%Gscph&9lWD5RT8*?)T7lI}390cb*csOmy`iqWPQ5o-W?o8Br}o90*p7}fxk zkX*OY`-Jd$r96T~0E31;BvC_On9OoHJKK+`cn1!BH{;hBrd;LC13sx7=8e><(H`H%P$q9 z%`zf>0Q5G?RkAjd@B=XX0GQwMd|oMM@dIF3ekDmdUVaqs@9EegO0~8&tA3{E*z+F#G_h<#XLi@Bb}M@9+e!01Of~l0*rclrwuX z5m%riQNk9LZ1%gnhX{r%08PSHRb9e1G1_b=;tD`-vqL3o^Gkm67=|kV^OTUimG0yT zya5;_>>`O0b}MK09wOdAN1}wiD!GLBV0Z)2Bg3b0Q5EoRkAkv0RtG` z0L)WD_Evg`C-4SfkZ_nJN;smN*++?Z109JHj;Z7l-iP50K$CD>RhMu=j5a5UcmvSe zoKnf!?Bg9oFuVaM3At{iWv@SWEB%o3I0G;UI871-oKeo?vqYSMjzj_HRIX{-sX}@)`qv!eGJ1HfO&ik%H2vY^90TS3=*!8Ly?>nLYvT_To1N8&&HdMeq_XSyek!mtFO?OtD1 z-@SntZ5k4>1faKRq>{DyYu-o%!xDhnJ=d-DzF&fry_GiRIjjK~gfth60kPp{;U;w%bRuFAKyTAoC2P|c zivYtSfYQ*4A3d8iX8h=JB||gG@bQyIO-NoYNrsLXH(})1Nd@*<`UTmKpzp#n*aI+# zC?ts@ij*_Gn20^lktm{@N;aLVX?Ga*05lOjRCN(O#c0!uh&=$kO>dQ~4WCHy3=De! zN`!u~O+_LmCKFy7`trDm$+J9wIRJx#J|s~=U*(MMN5mZHNK}wg$ws%u9KbLKpeg9D zsw)^EMw@{|%mL_a2B~Cicr(a%VVDCjZw35!AsNgA*aI*q7(x;i3{}qPVMOeKjzk3` zRI<^0Uf@U=_5d^mC91lDQDU?iO~f96-e!zS)@Bp-0ERsPr9k(iioOPAzYJg;Pv8u| zAYnX7lrTX#vnLX9209WYBr5qAVG;~y0Gfo!s=9$y zqs=lRE&=p5%T=;A{OHh|FkAvC2^GI0+2;_)SELm@gIfTDh?OK!#46=XUrod<=tvZ? zMkSlBHx|Kg3!sTutE!7wCq|p~MBD=CZ8oT6ZTL3Y+c4Y$C=nIEB0X8)uSoCk0A>LU z3O15N1)G#JdNUETpd(Si7L{yt?QY4tFw6pI3bv~13bu*SW;+qH0D7ApDp?zTS>#V) zm<2Fz1$sr=$pd%=FeunX5*6%L&geZvyn>EI1$$Mp(bIWf5e%;Ynu2|*x`O>;v^hY; zD}dhSpi0(e4?l7M!z+Lf{&%75gILqaA)df1fI-4xk|^Pba%LYT;uUlxN;syHe-Yk? z;T1rWa9mZFa6*hWCy96k(A%6+$=dA3E5Psy;KN85n|#Ob6Sn}vEr1Urp)|S76SxI1NVq~0 zC0td`>}y2af{sK9*Hv-}pTKYnph>u)s!O;jMw?qi+ydxrZmVQ%_$jncVYmfQ5-NU0 zvd^H5uSlQq42}T|BJPkx5qFg{y;40*uMGP1Dv<1SUXiN8a15Y{sHUoms4hmE8blle z=xu7MWFp?gF~D#PphQ&siqyXB6{!{v;2Xf8pf*WVP)9kV>k{z|IugGO^;ELaTlrQ3 z4Br5ng8Hhuf(Bx=X-LF3fZnE&O4f$IiT^ST-vH*VK(9!Rc>u!z1_ez>qJpN%8QqMC zVbGDNpt(vmdJgY0f?*gyQ_w|7ne4c<|7{CYryHNHaeCebO zPv958AfbRHN@%N`+3kq<1s#bJ+Nyy@`zXcGFX z>JkQs(PkhKy8wEdK`L3Bzv3qlVAusv5-NU0vdvR?ZN{i%Z8qWxk21!&fQ#qq&5%CK;5*5r=$wtrR`w1}o0%!{6sOk#liqU2s z5x)R>oB1kPo3Z!>7=8hK@V^UXAHbAOUgrt?0vIGLAc+zdDrfd0B7Q+fqJ+gN`4{00 z7=8gX2}@LU2}{Ljvy6ye0KLs}m8{KC`~nQW06vU_vB?Uaz%775!b*}TVU==buO{La zbRwy7C>*aK_zR$M~uG>!!3XhBcU{ThbOQLV34qp zBudz%oY|X+*aaPl61J%165fSj7eJG+RaKX;O^i0%iP#0u+w4%u+HB@~3oz^gC6ewLBU~?sNjflMjs_&7<42mIHr<~uF4ya zU>F9_6dYI86`T;G%}FAL0rWPfRI)bzieZ3Z7{I(0=oRTh9>6bvLBVN~sNjrpMxQ0( z7jz^lIH!`0{vqF8fZ-QFQ*d5YS8zd$HW!Kb1<>1EQpwt^;%Ag$_yzF6|1Ok$_(nRp z%oF$pFi5yU5+z(!&g^SM{DO`|3D;HfFTy7<`~qkaZm8-KZi><777@PydYjuSSsQ*Y z;HNPB0{Acz#wMTf1a1Kg67G;h33rt8vs!OOYMw=Q$ z+ydxrYN})s9>Xoba0}qWNGMHe@dS1O3=(RSLi3-{(XLLIvhCxT7g7zxe=tKNi0t~|dnt~3hx`K{kwCO~|Fo52svr5*6 zzuA5ohG78nR-jj;ESa3?$+gKyNchrEDAg0t~+ZK8%F1$zYzqEr3D75RxcisB&fx zBjOfxBuW^el1mr~!!3X&p+r@eFiMOzqlvf$(A$hr$=dK~voFDL3*f^@C{4!k1a<)o z62_B62@{kvdm<6Lpd(R2qLNFP1j8M6HdBb$1<>0}Rms|n!!E$E3!o%a z{EB2ha~fZfrtu7p0SqE$kVFwPl{0-75yzk-QN(POY&x$2)5!Fn~e90+Og;p>jqqB4QYHBq~^}l8yd3 zKePE7A%cz%PJ7!Ag>-V3l%4 zuO{LbbR;TRqmqrD#Lp(c@C%?RSgWclSSLoC^+fyv=xsKrWNm(eUx48kzz6@kQ1(N2 z>Es=rz%PJ7!bXxPVUu!ZZzkdwbR`=+t ztivzB@C)F>NEn;!i4t}zXZ9W$1<)kyQ`IHx7o*Jq zB5ncnHV0L*Hebdqz;FxT!$>Gi4)Fwb0SpojlSBzelr#G%5xbxxQNl5mT*CV>>;h;K zj;rbtPKeRwBoVs+dYe-!S(_hV7hu>0P!cMBMY5kaimyl?@(hjv3?fdGL=k6{GyN

Hhiz` zQy7K;%v*t8kv`)A`~nyh+#!hy?kZ<=r7vl8Wza`gfn-O2oA)EZ@C%?RsHUnbs4hmE z8btg8=xu7MWD1J$3o!fw_~3sR%6{M?oz&t9`~nyx)Fz1%>L_P+T_S!#N8%Tuo=W~j zcoc?T08K)DRb4^@G1@dF;uk<~(?}(2vkkuh!!LjjBVlaPm?v-xV35#+BuZ$ioY~EY zxCI@F5}K>z5*~x$7C@8GLRFX0Qj9jOh`0sN+q726+SJ1>z;FxT!$>Gi+VBK+0Spof zNTP(c%9-7ch+WW;D51SdF5xK{b^$aA9aMD*9mQzViHKbQy-jD8tj+K9{RJ3y0hEM_ zUyUjR+QWK~^4sTggh5b+D3x0$Mvwc!UL zUxDElz=x5*`;mA8w*UqSGf1L@naY_xi-=p$ktkudN-p757;XVH33F6+33J70GmnT{ z0KLt8m8?xGzP|v&Er1Urf%hZv1a<)o5*Cm|2@91odl3=4pd(SjVwGIN8!+qwXcCsF z>JpZU(PkMDy8wEdH50>1zT35Q9dgd@tCeUyk_(2*$N zm`eUdcprve08PSiRb9dfG1{CY;uk<~b4n#^!^ata0K+eU47l*NE5!9f=aItK-mvB?;|5bL!F`JKBn#ZfkVrtf4 z8BFz9Wy)Z5q?oc)bSxDXt4FA15xt@`L5QicWs4;iO_1ufMOZ4;F$ja`6%0Xi1WORT zg6JSRf+0m$M-YPO6}6x1ce{`0{bT!io|(*b>{Jod4KKl-tSw+d?m6Aw8ng$ zRvY8jHvK`xF3=$S%^$sL`?oVMZGYns%RmQ;ZzY{#V{+?fZI|`4X|125_-?&d(k3dF zfi{Xw(=Ce4QW>+k$TH9xGdHb9@n=g`{;7&(pyNLX6m>^(3ny3xIv{K*=?GgTw|i@m zWymA_gV-jmwtIyeNh+3sHiYfcErjh;8MA}PGSC_`FReDlf4lua#WK*z5%N;n(FvA; z4hTC*I>P+qcJC~*40)s@EJ&;E&R?IVVi{;d*frfk*e#VYyNfIXtucF~)yAx08B{C- z4MKi~;s1Ya+o$mTTWL>6cm_H!ER=MHy^>qLx5zW(kPP|_I=N^bcQk!8puo#EiLis29y%Rn2$ zq3IUGVX2HcTx1z&jX5H%HpZ_T`LT*+pi?opmE;K1KnI4SB%R^t-pj4YV;Fmu@i}pURjMM5ck(m=n`#V}9nl3@WC9hGEQmY39}KlN@3k=s#i?nv_1;UTsTc>^C{9neD9%V_%$XwNKx@odX|*xGV;oeB0}aI=|3QYo z9l5)GjuVUn9T1jFI>L(NcAqOU4tb;_tW2xz_7}LEr(zsvLs*q=A)KGemK0aAmrM za8)W}t`->wT4PqH)y7=KIH(v0I>Fb&=@mpX%)jAD#n2}hC9C}3T5XJf;`^nFaiCK%xSQk%>p%yF`y`#= z{^XWFAhHg5q%%C2Rxv!JVjXB>csSi+cqElEkBY1Vtuc?K)yBN*dkreqfreqsduiqc z?#CTsALu~wgrrkEncVuPMD`(%bc(0bYU{n1o>8$6v{5{pZc#j!%9!Ux_JP)z7t(5D z{+WGHu@5v9W8O<6H@IJPf_I>LKtwcUQq z?R^#dKpVpPbPM5wRK|QLvJbSzY)Gq(`7i8)ihZCHd_9cb;r_@G{(%k*A4@vJC&?}U zROBD>NN4ygt@;=~SMd+DF?^A3F?^ZIn6E_sf!3I>(`sYh;2%`{1D%S&1tmwg2Rbl( zE9ndylUqJ(`z)VLYxx|-_Y9k;xCh!8Hcht}HcMs9<|6k%Ys}oV8pGFqHIj;Zpi?op zq2vhrKnI2`C7ofbAx@tv2QZ z-*Hf}4>Sy8-b*vDaqs952SEplog|%Nesb$~7CDGK(kT|C)z*72?V{o!XrtIQ-J;kn zl`*@E90aW~d!+T<7!E?kLC{c)c`uFJ`Eiw>!q$4a!tL?tq z9VHb5K^wxp=@!C%sf<}HG7z-J?4MQ}^Pk*NQZW#8a)i8>4se2hpaa5zl8$gta=Vv^ z{6ik;2nVOtcKbbBhp6}m+7J#+w-63XWz69s|3GWZ5oxtCKjR-%`~#if>tXaJ_mPh9 z4|HHSO41pQPHy>9k$=b|o#B|Y>SH)o#Xr!-a9p~@aC|CbP7wJAT4PR3tBuJIL96%& zIu(O!N{(<3bYM7H(ixT|xBL{5d&nc5;ncK>;WQQZKpVs9=@!Eosf;;OTg5%lsTkZ-a)f=L1H*DjXIPQk@^eM@A&+#1m1z~jc`EjSHilK{7Q^|ejJZH$ zA83uaFs(MmUk`DSihZDA81r75d71lShd2m2P+TJE6qhEq{xXq+$RnNN^0eA|@1-kL z90Y9?SEgGOSEVxMYLSDWHD+~MZA>mIsW=E4iZSn{k=xwYIKe>B0bz}#BV3!@?(0Mb zB9C;0>(gqxUvX1O#X!)8aAUfKa8oK{)`|=StugD;YGeG%nO~?F2s$}J-b*(-!9UOe z;TB0pxHY-mw~72i9_a|Tr`2|!?5dKAf1nNF&U6dmu2ja{E%Fbv#@v%u8TB}r#^Il1Mpi0nfi=?t%?RSdsTu@AH{ zyq0b;yq?OKH$?V<)|fZbYGeEW=xLC}HXZAqtiC%N_SiX229 z=@jp!)z*72y|3aRXrox4Zc%)Y%9syD4uaO04QaJ8bC#_9y^4dNp&0XC8oAT`krNCA z9S}a2bc9cm+x@A?K;)5*@L5`I_gcR`NyR|WhVVtYh45u6W4;m@2wG#lPOFXam-+oc z#X!)>5%OO8#tHs`4hY{$I>N@}cF)=&yJyqdJxB4~?(Ax@tv1Hr6Z``e|3If=aAnC6?tu;rJ4rgj{N$GJEOHNdq%$l? zs~C1saSyaH?3!*d?3T)y-9_$!)|frgYGeH9+YeRT1D%S&oh3)u2Rbk;lyru@l3TvF z$Ufwe&af!0V%SH;KG4RnZ@R^>Un*l3i|hlfG5e?0#<(o=BNh8V!!YK(H1lHj0S<8x zbf7p;(kTu~Zv7IGgUBPD;^4H}dhew}R2&3t6o;l;6o;iU=5UdNpf%=*wAz@tE-k4z z2pWnp@1>EO-A6jXK+pl<|Y*2Z~!Ho#NKy*54*_5P76i+@4lj@4a+~ii4ny;?8u7;;vN2+%0kt zw8q?%RvY7oa(}7fAZRGYyq8Aqci-y-13?Fb`y?IV{^WK)ATkhnq$50-R@?m#?k=eq z2-*-HPPY&qNoCBVA_GBd%wuV_G5O1zRSX2393k(e$DQCG=z#Eqq$50;-0r7D{vnTa zgs0PLyYF&&NyR_VhVX2gvmjok9y!U+a~4hUOHI>J`T?cQ2sAo586 zAht=X?e^DqZmVJ-XhYa8-9p$tl`%Vr35%ONz(Fy*64hTC* zI>P+qcJD0m4|${`EJ&;E{(JYARQv;N2)m|R2)m^+W_OW)pfzTXwAz?!_y-mLKqvTm z7`^Acrz89W9T*l$I>TPcE#F(@AM!|NSd> znEZ&eihrO}F}T0v2=_n-h65#?;h^M}FA=$iJkl8sPOBIWQE?BnF&vt1F&vi4n8QWx zf!3HK(rRPo`~HK9d!SP>xWD8G`#=YVqa>Z-=;W3!71@V8(ix6Ps~C<|u@AH{9G7k} z9G}XV6GZlb)|eC1YGeEu)=yOI0}aEN_tMO(-X}T4LC}HXWJ#x3mfZSNL=Ga4bc$2c zYU{n1PE&CZv{9U%Zc&_(%9t}n4uaO0v(jo~+!Huk#X-35%OMo%L)F04hU~cI>I~2?S5C}AM!{?crUHC+g}{;zKVaK4Pkw{h44Wt zV?Gr52U=q`q}9gUz(1(?2Ri=i!Kd*3eeaJP;UDP0@Uf&be3IPqPeuMAk93C5(yEW) za~1zU8^agr7Q>gRjQL9BA83vFI;}Rwzi<76ihrO}F}T0v2=_n-hHoXEVPkU3XU)s< z*|e6=QGCy^iHdumjbYPti(#`=#%wNf546V2O{+1?<{nhs1D%S&{Ut}(2RblpDd`Mb zCAWNQk$uP`{d3qRt@<3cRk07WF>IG^F>Ig8m>op+f!3ILX|*x_`{4&F_JM|B%zJ6( zeeWF|;vnciv6G}z%ujCp&LRhqM>@rVwAy;_rCn4U1Z@<%rdt%dr7~uBk%OQ$W{G3LEAa^HJTCm0AiAS{%0guRm6y|>6fsgGwtJ!b zODg_>HiSddEri2T8FRSEKhPR;L|Sdk!~BDaf1ne5J&fM>KGG5Xfes8uNjk&P$t_6nBBb6~{irfROF=wUK#(e7jl8SqvQ!%)| z!j*Lmuf2E7K~5^Hl5uZ49f@Er#<`8FPWiKF}I-VOnj>Z{1%~ zu@5v1W8Op7$y>x|&gP@J#%5;n3s#L~Y zEpiaF#;i`Ojrj)-Ld8MQP>gvmjokOX#t8<34hU-`9pT#Kc3&ql5P75{T%T6k?efnJ zDh7f!gd5W>gqu@mPX%)jWD(-HhXT5U{z z*^-KTpi?opzvKw}KnI4GB%R^q zW8O@wjmh^PRO|x{!}oB@1@n&doR7O;vi_F zSf6fDe2~hR4@C}w)|d@xwK0ArlHBf3MFt{| zbcD~+YP)Z7e@Vqa(1!3ux`ps%Dr3G983KhbcU^xTfVi(Kje}AF>I4oeGJ>G_y^h;woA7dwohfu4kG_RYs|c~ z+L(>*FRAzkIu(QaOO9|4bYR#?(i!F_w|r-jd&nc5VL@8Ou#1X&pp9YIbc%Er_QE?EoQ5>3XQ5=@an8QU5g4UQL(rRPYa}X*Hf`($uduimp_mNI85OhE|O41RI zPHy*7k%7n~9pRX?+V21A{*sD;pbg=;bPM75RK}bjG7z-JoS0S{ zPI7{Opaa6ml8&$}x!tFT{6ik;2&bmicKi2Br>Xb{+7M1pw-C-qWz3l(|3GWZS!uN~ zt9V+P;rvv_ zTp;ofw8mVRRvYs>{z1h*(5V>QUvh+dpaa7tlFo2xa?39hxraQ`87@z&7_Lxp5415{ znQk#$mCBf_Mec#tnAK^uF}|jMuHqi(R1EGfIl?~BfnklLGhCb8^6NzQA&+#1>(eTR z8&vEAZ45W2TMRd)GG?vFKF}JoF0D4^pZxr^ihZDA81r75dEfhHhd2m2P~0Nv6t^a~ z{x*?=$RnNN_O#l1@1;9b90Y9?ccxnuccn7sZjpnaHRhhQ+L)hn5GoFWhGNWnY2?25 zy-qL?bU?UI(h=@YZubKs1Cd8M!h>nG-M%09kcxqz4dLN*3*nJe#yl!A5VXcTmR1|{ zKYas2#X!)>5%OMo+zI}H4hT<3I>M95?S4w+AM!{?csi}N`ylt1RQv;N2+yWl2+yT5 z=6R8Spf%=&wAz^e#XqR{2Rgym!{~kQ7aidr=)mxjq%*vn-11jM{vnTahF8<7kKs2e z{(&}z*U~M9*HanuhR8qA8uMmaZH#|s{96_OK&N7Gf5{QAx@tv2RN-+)jt5Oi{cyq9)#f`6a`!cLNoFh9B7JB$279_a`R(rUYZ(C_hJ}*Ouvc=+_ZInw zJkl8!rBxrpJ}Ul!HimuEEr$J48M9dAA83u)KdmiwAz^5UsACT zGz??jOEd3#pX3k+K?jPHC7ohfa_dhKIfy*cDNaqRt@mC!O~pabMsa$&MR7(dW6l&g z2wG##N~?|WL$7D6I0zbwG4G|3``+g`!9dUfVY#FutVnM6xgrCRM>@jFwA${Uxxb`h zAZSBam2M%NpURjEL$+yfmLZjp3`Ta#OUo5(%nkw z=CQQen0f4jihZDA81r75dEfhShd2m2P&^^&6i+6%{wa}z$RnNN>9pE<@1CZ_pjVvQZW#;A-tAuA-tZ-m^VZQg4UQf(`sXUGwZi127*qGkoVGCPVf(OKzLiy z5#C8|_q!thkViVgdug@Z|K9y275_jR!uoUz;e%Ahd?@k{w8m^mtBpC8e^Bucbb_yk z(fi&XIl@2Cf#G9GXZR$!<)4cDLmuf2pQTkF!{;jgfi{LO(k+HBQyKG>$Uo2;^L1Kn zjNkS62NnN7r($q_$r0{>4h-K)I>W}~me1NT%V*PCK1cCA!zL>3fi{Ls(=CS0QW>+k z$UV>+GdHcq@G1A8;vVQ!4DK&E!amS}VM|G8*ebc@TZ`;N9_gRMHfh!8u&s)Hpp9X> zbc}oB z3({)qy_a@TaS*go?3!*-?3T)y-9-+9)|frgYGZEVAXFR#4aJ!E(#U=9J)K}6=zy?L z(h>GbZuj0I1Cd8M!lJa=?$z!usTc^_5cW;C5cW%D%wmy&pfzUywAvW|CHEs013@Q8 z$b0DkC-?_CARH*^2nQv%dx^+Dq$8fBQf1r)wxO9u* z_*BN6Ao35i#+;Z|8#9-GQ1K6RDhBtL9N`}5z;LppGb~GP`6(jzkViVhsc99%X)5l4 zHipyFErv5v8FQw{JS7aaZNM~4? zRxzBXVjpN@Se0%uoS({=3q*WWFTmb zS(jEDvw(q6F%WceguIt-c7lJP1Hvtmj&N&oyKfWuhdj~|ZcnT2-p~Cd75_jR!ky_B z!dcy7#>Mw%%dXzKx@onX|*x_o$;?!`~#he!TlvixCc5gJR#`}PbRng zDUo}~Bc0*tw2I*w756|J!?WoY!*i*Od0ylmXpMOxtv2TWa1Sc(flkHX{*oi?105J% zl5~cblUx3Z$Ufwe&hTnl#qb*y`#>ARYv~rl>#2-+Lu4Oljd?SzHpb7P{8q(2&@ha7 zFU`E~{gy)<1RW^emUN1Dl3V|-$U)?hPVrt^ZN2x>`zj8CHj4G>7R3jtjQLRHAZU%* zkXC&XzgKY(G!$drOC$HaKXQVBpaa6kl8*36a=Skj8Hha65k5<+?e@d?pQ{)M+7P}- zw-CNeWz1J113_!d*J-seehB;zDh7g1j*$1#H%{;mbU^r4(h)W$w|myV&hFW?cF$3K zxBEEvmsI=%Z3vsDTL_z_GG=p;f1ovHZdwiDLHAx@tv2SbefvShKhUWd++T8p zd!PfuPLj?rKe^>Qi`+vV=?n|fDu!KD+yiY4yQW(VyQMN_caeLbHD-^r+L+G{T=_#4 z_dus&aDT}W_JIxz3niUlujH2REwT@Jq%$l^s~GlCu@AH{?3->e?3c=z#UlGaYs~&> zwK4yeeNeFvGz??jOEd3#AK(xNK?jNhC7t4+sXDw)>~XK&N7Gf5{QN`Fs-)R-)8xcih-aF;o)=(;gM9vJSs8}w8lJ^RvY7WFPWKXZS3wV)$Ie zKG4SSMY_fCWh!I764?h@W4=zSjrqv^B^CQX!!YK(H1odqHx6+SbfEZF(kV73w|>@6 zSwEZB`ZqLx5z)_k$+yfmL4wQ6;gOXdmMC2awNM|@W ztztMt#XZo*aA>;4a9Ap14i~uxT4Rn#tBvv7?tiS}9_UmI?k_pQKG1>TC`o5HI=SUb zMfM?&bcSQnDu!cK>;r8K$E8~g$EPyp1d)B9HRi;$+L-lzepcN~O@ zgP@@p^IjUc?|qIF34vyq$8|MtL=W){UsFxK^wxVbPM79RK{E& zG7z-JT$olHlQ1K76G2EDLG2E2On6)DRKx@ppwAz?m_y-mLK&N7Gf5{Q}oBPp8$^doMks;vi_FcsAXlcrKMO&x;%c ztuZg8)yDWY8^2a@5Hu8H-b*9*yKfuIAzOOlT8a&o(05gCX)(h*)wtL=Wq{UsFx zK^wwr=@!E4sf>9;WFTmbc{8mx=12xY#X!)>5%OMo%L)F04hU~cI>I~2?S5C}AM!{? zcrUHC`yKa}RQv;N2FKLtv2R=@DD2ffll!CFnZtnBS-iLIxu`J z=?tGFxBOF)f5;=9;j^^rWB6RfKhVbTMY_fCWh!I768Q&OW4=zSjrkMbeo*lbbSeh- zmmJ|9=)mx;q%&+xZuzYFSw5TA@;Qp{88%UI5416Cnr<;{mdcpTMec#tn7L^+h6UV% zihH0_F}T0v2>U<>hAkzXVXNepZ!NM9d8B_1+oV;W!?r5+fi{Ni(k+JVQyH^^$Ue{- zGcT<+<}cU>75hNLFy_59^S<|v4sj54px8;$Dds1)erJ(`$RnL%L0WCS_tGvZ4uUp{ zUDGX!-BKB|yU0P%8nZ`QZH(Vi{zDZ9K|?X-y)<&)drv1A2s$7vlyro>lH0wv$Ux+g zj<6`LwtKz%ODYC}HiUiCErk728M9bqAZU%*Kdm;#FU|Rpih-b$BjmkwfD`-!9S{zb zbcBPF+r32OAM!{?I5@4g`$_kgRQv;N2#2Oy2#2LI=5UdJpf%=*wAz@{eD^`cKhO!j z9!BqbAL$7HKnI4SB%R^t4h$ztI>WN$mY*VW4|$|BoSIfKoTlO)Xk$1%-C{T+l`&_E z+ykvKXQkD~_&q0QtGEX`6@&Xrj<64OU|25c3@egbey+$qGOc1bPsKja#;_{g zVmLpQF&BvJ1FbO^rq#wg=I5tX>;nzMR{l=;6)Hx7wmDC|is#L~YEiwwU#;i`OjqyXq zKUXmdG=JDX@rNC5BIV2T8vh|HK!-o&8cF|CUYp$b>qJ%{kMuv~^=Y;7{>kSC6)Qm7 zpYq0Z>rZ)8Dr44)tN^Vs>(Xjt{>+a;t5^Z*Pq}K2y2+C1J5iw^mXUv#$R?Imo z=A3iR|2L&ryLRir^Tyv}yg7T;+BIv`kj>QuU%_$t%F(&8$NNUwb-O_i)lNy zXw$gGi18L|@VG(aZP;)vZ#8(##Nn(@965NxaCFO7Z5o#w|Ns4?-t;;{$4weLVQSl< z<864`v6IGxQS>`_YP~@=eCYUb6N60}8tRp5E&Qt9xM2&crP_UK%`7dkdF>&kIQ2)JX{($ce!oK;#TD=piyZ=HdPJ*`x)U%!6a{l!22 zhQ2*RX}KY@_b$~hmzHliupXamJ!brn!DF()<3^WO=r?d#R<<8MG%l>zZ(v;(b{Icw zc&Wj_CHbd2QyN}c>F zT1vKdR%)~;*`}p+ipgq)(z-)tZ%)6{dz97-rN%>MmYNJ)nj^+f3V)x`bkP~>>x^bu zX#>vaTYF}y`N&2KH+Czv2&I;6Z1s0<69(Jhu@g(J%cTwf)&~q9wcp54YO}~^Bl)z= zN*hO?c8h$vm)eI?&5((c{%`-f)Ra1uOC4K|^nWor>445zsmtI0V#FqUl)8peH=5jJ zq$ldG4>!$9Jz^)D<(=>o@5RTxM-rpYNbh}f?bsqKZ5elLwP;6=($=B0O{L!Vn>1?J z;BiBT|I5#G+j42Ur1!o$VEe4JL$UXMp|m6I^v`?WNgrleY3JCV06|`|jGYM^@T1?$~S5j_#$sLusG?bMG~!LFLlEN$-Poz>usowAlNwP#R7Vc&(eXRbP%mf%lj_t!-KQZA+d|;c^8L<(hNR6Ono1o_dQcP zj>t;0;*KNp9kWB}sQ**nHKn7=rDKx5kCpjxS?Tz+?;fQSLg_?WIVtb^WPNx_Rys9y zaa!KR>7g`-kIzuwXXbsMr5$HyrE}tra~JLCQ93V_&R5?J_}801f%8AP_?NhJLAi8c z(&0t2xi~9bl6Kg$G&htk<QI`;$JeOCYx54T(~j%2 z(hYIPjroq7Lg{98s6VHt_`l^!x0Fk_CcWJzgWI#x9mU@645hm`?(V#|d-UPGS?Rub z(*5~K4}{WuK7LTWJ(TzMuy#C>l^%^d9?N$;9!gKBwHVbj59I%0R{E&e`p2R4 z3Eg~}xBi(v{5>5gYk3Z{({``kIfwQS0C4t$(K--)E&C;*KBl9Y2NA&uV?$O05qL zVO-lVeo(3OOS$xG((i9F{XHxFQSA56Q2J}gOsmC|;4NEisXDW42{^Ml;KEL=wMyL8JxYP2eU@L%Cg&S=v@@d%`fO$k) z38}+YhBI45MGX(}l%Cd*m0+ua^t&3ACth8Ot!u!UtqI0H*HR%mbhoup!5V=$tfLZ% z*A;u>^`vlW3}@B^j808eaHsWA!J2_pC7x^>@M+nagGu5RNFCM^&a9P+g~Y8{3AQ0f zzipsA@kUZ?Z3}0%F&O)7r-Jv{9u=$x#G!*qBI`Sr1&mHzRdA9b zO5AL*zrcfs4I4Ci&;%R5zjbFr*)|1}+&z#wax*xyo+=h{_hKbjZ;-D0KzZ)XrP#U! zoY|IO>~<>^yxXl&!L|W$*j6QSZzuNLeWh^P9?onBFgo>9!JT$Q1?vx1mwWQU+&i(M zY#Es3-WjPQcY!n8RmDQ?0jvbu4Ww&L{&{Y$Jb!Q91I}zuFm}6_3f}GBs9^hmI1EyW z-1~|>_h2cS7y@TD6pT*8RB)%^s9+<&>T;JB=H8DDWg7`5xkn*&A9>Yqo zu^?TKgYw+trPw+F&a4E+ZcPR6b|NZR0CAY461gXfJ@)}pI8A{wn+is!166RRgHXYy zfy&)MV3vqM!(QunYKtOPp@r02t-JoQW|wjKdz zHVcfs9;rg?wVTaG1v?7F;b@geeT>*sA1j5^ad2kGgVE^(6+HEcs9-07|NYY@wf+rG zayyv~Wjh5-a-WLSk*C3#ovvb%yQj@zCD<7tU7rc%xzCbf>)CK-=YX->b5-ze&qD<} zAH?ATmB@Xe*mGYbh111wW|x4`X|4+HbSWy>W#D3e+9O7^9XFgGblboGOuL-V%XS5r zq`nfV1FwQJyIRFU>UpdLy9T7^YoR>#by94-9?t9rF!p+*3f}8Ys9-mPINYKVsc#i~ z>f5Anx*g8!4lp|1se(J*g$i~zsMPCJ`W=s{YwRArEZe;V-Ef#

    _L!rAA<704@cRBuT)*Ugzr^e*R{UY1S_7a$+e;KKRUx71wRmDR31*`;n4W#$ip*;N? zQfz$_&g?BP_WZUA-t#-CVDExByr&ZB-xquO52SGV5YFr)Fgks#f;)YJ3ic_e^cz(E zaTn6p*k|l0+vi{s{0pSc{1VRWD-{dDzh))aHy~|)3+2JTlVa=laArS%vE3h4@OFPf z1^XGq;TM$%{;Sx7|0adg?{H>+fYIqs72N4BRIpn7Z8ZgN^6$ah7p^!~o3G2Z1ek=b zgVcF-;mnp)u@HJGR)Q@J((*D;9(q|Rw$_6gp~2W{eHFabmG1=3)=uLJFsrFrzXUom#8lP8*_v zwE-2r!ZoXXTfb)6MtoYfwqTNYW26pi2WQq^#X{m5R)Tc^>9-@4C+;N0*3K}~G#LBr zs)F~~4aGDK;?P|s5^pN@#66^N+6-n$2BT9i72K&eDp((|s>Gvhb3QHG7GRQiOQa6l z3eIe66$^>CVI|nMApLF!<%#=Bv2}ZxIU0<8_EW+8+!4hb4dSqqN+iz2o_J>|oOXd3 zkHP3PKm~W&4HZm(v%IRrlWliCE!!Sol6X&~4%-XPY;P3{iT7b8*dUO8_l5GrgQeIy z1ZIK;W1quR@IHs5n4m!%_EU+(BgLM0loU?;!wkk?bQ+_AJB>vJ8wVPLNP&uIFwZ) z_rYS%eTWoJ(_uzqFgndp!JQ661v?z9E_b|p&16H_jsTO~vyeLSNI0|EDi(4d#Y(WF zLApK$%5xtp#n$6sW@s>Wdx8qy?TIL6Xb^{!RU-E(V$Xf56i%nXjK*Menxld{oq-B= zCRknWc=tMs4P`qUOmd%t)RE`HnVqL%A@}*L1iJvF>kFYg_eD}{y%=VO24lB#Rq$>v zMKMEzI9#q0xvvm=?klBmx(a4E2BXtF72N3>RIqD7)Px{lAwc0HJ+ zz5%HNZ-g_uNyS3yn^_5V3rNqmLV4=jq}X~p%nS|2Uhh=Fd%X+A3=QINk4mJzSL~_p zlfvnKmXSr=8rr9%e(?9s!fwk0N#CV{m4Vt60eW1S`Rw1nK%I zD9`=06kDHx8KuG4?Q<%4x6h*(r9m8CREgX#i9PqrQaHT=GaG}^X@Lsv^cpJI>)>L4 z+Qqxq8+=~2H^C(JTSy)FHk{czDi%_|%Sy2KKze>3%2R(J#numDMrbhh`mqY$>nA8i zXb^|bR3i1~Vo&{r6i#2l48~w|`dS5d`UVy3TTrPh+`Zbj^}E-1d|9^d!6fhxNS*Z~ zoY_w*76SjwO0Zu*+Wi&E1OFz)*56@9XE3(;rwZQYUnoZBI)6LVh7@=Su?Marg;QOa zp%{!#OR3;aOQV7<11fNZyO#p{Fq+)GmStPn>VZl6<&ZkKKAhR|DkkZB+X}1%TM;CE z11L|wk`!B4hMA$k*mFY_V$VHnRTMKch{NhCk$w%ar(aVFr?p_FVlX;2Qo+-&g9^4T zsPq-?UjIm6W9zY_Y>mMrcoU?~Yzk+#zKVt5%~%Px0Z7};p*(mCDYmwRnV-SfZfg}h z_=YIvXAp;tR3dm=u?OE+3a553!!a10YE$GcZwc9m^=FbTf{Qs?%AGuu(cLiqly1ltLu z^$g0x?<~dEU0|kZFt$8E1#fvb6w@?_!|o~(eh;yS-%|>wy^m`}?#1WXbSMe49&aAw0*EF>PmO0fMv`W*@7iAPDXb$^&?8jO98QNjBh zi(;AvaTu=>i6@9XaY+g%gBg;+=oD0Nr%9+_lfkMIkG2E&v}{wrB=J>w2j ziKnp=tPIlc!BC#~5Gl4!hnb_n*yjutywAf>%+VkYGgTt-5n@j~OA4nWVa8)HIvu5g zI~|P*b_`fm;>mU_pO)=7FiCtoQiq)YXLh2Bg~TVZ!rxp5>Gu>UPkgErTTg?TpuyPZ z92LCJGf+&>AP#4#MB=l>p7#qpb}TOd&S@Cnor<_mxs?y$WW624lDLRPb)EK`}vtI9#U^xvv*{ z?i-|Vx)EkH2BXu>D!9`vs9?8()#Z+NuiMyAw%frZ_Z>(bc_*CNT`CrG-_1&}dqBFr z7s_+rC&kwLVPMlni*I4r9Yx$B8N_i|D=)rXml!RWMt3huNbDp&(>u|MtN z-D@R2FWbssl6n=S4r~Z#wyKJS)T^-)Y;};H*MRcWYf7F7<>|MS zV(V5gGc*``-bRJka}V1V#S9JN&{rkWZ!h-rJ4oTw4`wO`qf>tsJpE3nU>T_N74BaD zNMB<+v!iUgfJyLOkvek#oY`(FCc%4}{wvnOb_Z#D4=4}5rxaWFf|;Me*zP_mc)Np8 z%+DYWgHbx;< zW@A+>gdWFAu<;-*Pk{2!B`LNV%m@v}R)Y%O>Le5+G>F3iDiL~$*h5d1!s$Sm`525& z(^PP$GAh`?ph8!;d&SWH>mKi3hp?+`)4?SCp-7!O1J3L)6${}HXC>H7kk*fY^6;~y z*m@+)G!4d!s!HO$0xEokyH|(g z?sY1kmhCh!Nqjm|hs}XAJ43}n;xkzZb{0s#XG3}7bEMdMF3dCy#y-ziA@p=Rw9?BEnAjQ@jVdiKs_Ia}k{*$={#T*UdaGOdbzFq8z?~uajPMGl+j81o} z;7<3Tg53*Nm3Xq<$ERhxA50QIfYf30;mjUXv5@#7R)ReY((fZsp7>EIwmt?kL4&c+ zCsgo0pF}Z1gE%~`5{aJ?d*WxMaC#1AFb1R33o5wNi>P2PflA!$-)GYJTbnt zUICNbuOfBi0ywkRR4n9vot0p3fOP#Pl;?g+imh+MOweHL_FWac+xJjR&>#*Us6_4$ z#h&{kDV#or8I8f{^r;H&^cgDH=U{cYq!6f&RNFBKpoY~SU7IH7cO0Z=?x~>Q1xtEh-Ykim*8jRhppn~UK z5ycD*;;@oRLyZbZ3;6(gR$3UDtNCOpqQaS99pPE>Xu?p-AW3l z)-V$?7@gXv;7%K%g0%(z`=_1Uy*6e;+1i0g?)FF>Sp#R*LB%9@59`QEuudRdcZTxZ zU8LCB6=swMW4D{A5WDSW-BFCvAPzlLBKKxu&)riBr(Q6#F&LftsNlIbM+MsgTBQzL$?XQCOx)X{K z8pL5|l}Npd*i-K+h0_3-!5EBA`d=63PP?Om?Exxvg}YaW{O+|UUzTkzFbTXjQfKW0 zXEsR1Lg0N_;eW*f((Vu_4?I+gt;1kOXE3%oLIn@JABxc##9@?51m0ilfk#W>GzMlU z2BXtB72Ih&D%b>2fh*j-6xfH+00#g4Ka2`0g3BX#CcaArrVSO|U$E5VKhY5O=R4}QE9TTg(QpTXGfNh)}|C!?63 zK^#t1iQuP+J^1NTIL(0>j=|`3rV8$K7An};pn_Mpdvz?{z0Tq5vYiViq0d9=yz}AA zE>N)$`a)KMT?Eqd#ZVsl5-GOMg&Cp2*y?2}c&nGA7@`<`1{2k{sAeR=EIE2V03y&1$TNF73>jE;Vay| zIwp6oNBOjDkAX?z$B{bh2{^MSRV*Zaij`nbgY^3hlqY^ximlJVOw(ZO^92=RpFQkF z6w@?_!^_!Y4yepL#m1u#Q07@b~M!JXbf1$z^$D)DH0i%-k;Hkc%S2dTr}g)@6k z#X{ouSqb(5NWULKdE$?x*!nTd91X@kKUKkhGM}NCqd^?LP>IA}iaqgHQaF7LGaiG{ z>01@t={r=g@4>1PPqrWUv}`|uN#dW7I_zgSvtLv!B>t6^V84O%`#Y2;{zHnbf5J@A zVC=IN|MUC2&)O&^Xb^`wDv`LZ*b^@)h0{_ngE1JLmQleIFN+FR4^-l2|NaMf{H;#z zUdyqeZ1uq;_wq;`xdNQoiYgXzH(({$N+4aY4CT33kz#8@mgL`XSRWgh1|_q z3DyFn>y}WSyOk7MTf@xIVC=Sy3f}ETC}wC7hmBPtcRR7?ZZCyX4a{f^MyHM{xKk%o zu+Ct0x#Qid3meMT6-;t>L+Z#);LN(KSjfF8E5UkzbiEmr=k6)R)?P3(G#I<>qk?z4 zIf@w?#9>R7$i0==b8jt$(>5@}F&Le;Q^B43qJnJ?DtCpuSN^B>!n@ZFd|tMGV3K-A zqz>#4XSS1yh13}8y{i;k2f)nGVC+@@4=eIscSkWpgE;J|5~=qRd+NQV zaM}lELI$JLzAAX?!Kh$E!2kYfCwH%*Y$)3>Fv&d}sUt_gneC@yA@@jDf{g;{dVeU- zJz9#bV_-&UFm^jm1@Cq|icuQGp`;SIP3*ZRO5qe>W@9iqO;*934nPH)0xtHaUA%iu z<@2%~2qviyLh8V2aAsu{3#kugCDe*5_9R)KOgVE_272N4qRIuYfrLJ)I>X_fXj_1p=od70*Pekghli<)I6?M^TWeiu?_-VJAVkBWuh_p%b~K9IKWhw|VLNU?Q3%=`?-b{|r~+kF_t{0!po zs7eHXOzgoQm%`}@nBf?VPEV=ePEVtPJp(Fug}Ya$;@#_6zAoEyU=sRyq|SQ*&g?}M z3!z_PCD_X#Ex!WgpjIb&8jP*Ju7bDv28t0H#NjQK2>rI$L%$=1)4MS9F&LfR zSHYb=Kn42{ROkwKuNc~Y-Q(TsBX*VTV=xK-2~y{N3TO72iiPl>vl8qJkk-G1^6+0t zvGr@1X&Q_zf2)GG{2hvE8pPoTl?eZ%*u(!Mh11V4qcRwsepSJpenSQO9aQ)Vcdt&# z-RloNE!&@9lK3yA4y(ogpQ+4hgT=&4uoA2eNWXQVJn@oJY+VXwng(N^%c$UeE{kHC z260$UB@)*cd*bD#a9ROoNCu-*0~Oq9B~-AL!KxCEwpIAFYz@IA@v2B2wi=w->M9m~ z#%r(=Y)z1U*MjoIYfG`U5zHJ7#y;0o!TVef#T*Ud&_pE?Hx+y0^`&rX1~VRm(W$u# z?$iPmtR+}g;>p&EPs`RCOcHO1)M0Jl%r;W7khm=?!8Qizw;hxxZZE~w8kh+hjD2=g z!TaolVuA*7=%NydyNW$=Hz}MpffdA((^#YUJ zy^%Vy51iTNDi(5Y!Ah_#LAu@w%5!fm#nx?LCTK8ryPXQ&ZC?}F3vDv`UN*mLhF zg;Rf+(HM+QnF{W-Gb-3FV0F3U-D_7ilx+Z*T<`s z*Jw7BZ48*?9*fkG(NcdtYFylgYTB=uoP9e6mL*-RA+ zsgGbK*esBqkA(8nv!&R26wC|_#$Jz6!FxRx#S9JNaJ)*SK0)lMPn5#xB$x>qj83Ph z;7+Haf}IBb_fI>yd!5dPvdsaL+-D$l&H?HATqw_do)lZphZ&{8 z*zJWXc()g!7^OiRE>VfxbH$$fQYoA+gPD!N=yZh&?sO$8*j3n*VTMpws~NZ z`WmDTycW*vIu#44uV*FL4In+=2<547l49%4Fe5Y=d%aZ!@AWnmBQ%J^9V(IfPO+!H zOA4pEVFqI`I^C;+JKcv0c0Z`p74BZ0^1Igqd|9^nU=sL2q|SN>&g@|o3xOYDCD@}N z?LG$OfghJ*>k}}eGZ@=^N`=^F4|^KL=nUfUtV#raPV9l7m%`}5a{D<@biWwTj;Ukqu|FPK9e!JWQ91^W_I`U-cif26Omuh>zx zufZhvH%OiNEu7hRDi(r&&q}Z#K-&Hh%7gzT#nzu;=4UXr`>P7x?r$jOXAp-!R3iAF zVh{e86i&64kyC9jIxWFJ&7JC?g4G2Tyu#h9bMfxABwv?pDKH7WG*ag+1826ZiiObi zSP8ZqNXzx1JoNHXY+V6nga%`)4OH;ZE1?*nK^#_5iO>zj9(q+NoK}OGkHP4)h6?Vq zCMwukph8!;d&SWH>mKi3YqP6tjld-QI!K+nE}YqVDi*>wW+hk?kk*?*dHD6E*xC$c zng(Ob%~kN0TcDVxK^$7CMEKTX55J)lPHkXDWiUFmRl%J$Mg?mJDtv{zSLfvJ)t*nw zRs$x9J0NvfM>w-iDi#uVW+hk`kbb*DdE#zTY~2KAng(N^o2uY__CPUBgE;h5iNw9c zp18LZPJLj8WH35yp@KVYi3+wASXJWDwl$xYZ5uF2ye(3PZ3k!8SH(i&?O6%714zI9 zpgi%8Qf%!HGe?85&rAjHb7vHDG>F5lDv@}A*c0z2h0{Qo@feIwd#K<}d!mBv1y+@K zvhB^MW!nc#5)VS^uzlgo2CG;|JcN~CLqYl-2IYx|OR;qX%mfX_K1ZtHeU3sgL4!Dq zR*A%8#GZJp6i(w{24gTfO;Ev|N~mB4DshFoSNyF`?p_nwP__UjxhElY1){|gnXfSqr ziVEKCsVHV>5Qo!MBKI7z=RQLUr!!$jV=y|Mt%5t9g9>&oSY7UT_d1UaWjh~Ca$kVd zkr%?5U8G_m_r-022Xup2?;u5kCt|MXsX_qvJC%XTxEq`n2I18;>hyG_MH>f2cfb_Yn$ zcS3pUyQJ89H_QwT#$NAL!F#n*E@V(ws*lK^?OJi_&%K3 z2Pzg)f5=L(k3f3<7|K(BBE{BEVMb^$_WHRB-s=}AMraU+uT&!S*J4lojTBDb!VJb> zboyQeclrSp>_wEHKN2mVWn ztvqYN*4kifa|!-w-ew&Xqce!Zk}45+DX|A$S_-FSV1{BaI@MFb122aPRv%R03U@CB z_F*)+do9nlvaJ9n=~qPR;0ADJE2&sWzcMSqRsrd~A(W?IRf?^v!OYNL?0F3pyyrDh z%+MeXYpX>1Mq*FDjucMo!c4_rbZV@EJ2gQCYYHlTg}c{3(%0Df>?m6^FbTc^QfD@Y zGi#w@A$Utxg0%u^yET*t-%yIJZD8hSFt*!P1#fp_6!SBPLwl77UL*G49i(vT2s0dm z(W$cv?$iYptShMC74BYLig&MWd|kFpz$A2cq|VzE&a8)uh0vR^609dk%e|mHbZ;rP z_JJ9p!Px2+DtN0~q8On;9JW@8(A$VT^tMtsZ3i`)W~)Tvqr{&0XepeIff zNF8zObUG#L9lTLtg)9265Yh{Jg*k@$SEC%!-m zrwd^QV=y{htb#jTf(kYlRN@MEulQS?+`TSkL)k6^liZghb>tOrW>>0M$bA(n!LA1B zdLESLzDA0z*TPKDVC?pK6};OUP)yJu4mYVp?wiG)`xYsjZiN|*!RU0m3hs0VD%hQ1 zb-Cl+>n=8w?QSs1eGgJc-V0}TpNfUt_p=i00g$feLwW88rP%rq%nS|2ZXZ#>yL}YJ z3=QJ&xJu-HLhQMpl)~vLn9&%FPS2>|PS2u(JqK2oJKnvXXG7Uu0F&G=B6Z|TaAq&7 zSjhbfE5Tj`>3RW_=YCC!t*^t(&|vKLO%=S`w@}Q`AP(=SMDBORp8Gv1oZg2Sj=|{k zp$hKw5h~cnpmJBZd*y$6FT8tw!sliC6iiZohSY(d!cxg{(xeJ266aFB~t$^_SC;f;q)uagbYTf-&Js@KTyH`1poV|o!q_t zVnf+#)hpz#jnt7#z&vrHVj*{3R)Q@F()ChMo_lF2wk`uRN`tZ6dMbFg%b^&hK^&G> ziQFrQJ@<-II5mKojlt-&vI_3B3MyDbaIrt_;@xXiJ}=v9VDi&m9jOD?fO+Ob#X{<} zSP8Z^NY9O+JoP$KY+V;-ga%`;jaBeoo1hq>K^)dsiPX)+o_Yf*oSMT7#$a@6se(JT zLIrCLDs_dsSC{dL?1Ex+265=75`i}nd*JR;IBg0u6ob)eGZoyaCn{JkP=Q-lnn(Zr?$w)ZW$Obb z={HB};4NU@I#IEZek)djZ4J`dopwb98vrVOg}ax2+&+ry=2>IAv7>By8Y>CDJ5p!v0rS|2iiO~N zu@Y==khb@M^5BD{*t##w{0zo+hp6D~4n;9PgE$OViQpr|9(+G3oJPV7$6$2YUj=s> zjS4mfRPYLSudctvV_8jP(@ zQNdfCieiKYaX3gNLQfNW=&}?}2gA(AV04D5hx;hf7r=@nvF9e7O`(SHKL(V05}l1$Vj{ z6>J_@RpQZh4WE|nS};j`9a4u~5A)24iiN~CvJ&hjkbZB5^2E1DvGrD%IU0<8-mZf8 zc?XI)8pPo)l}LQI*c0C)h10z-<1rYW?pMK`9zX?~4_1|UvOUPBWqSxr5lTvJb3TA=^W1r8c;C()eVuA*7cwQwEzaaL+FG}I`63k!> zMyFR)aHm&M!4`l@T;c8&f2)(b*K2Gj+v{MG`wgUyd=ut{6BP@&-)1G)J0M-Z3+1`r zlVa=pFcUNwyZulF@Ae}U6Euj!Cn}NqQ?cj%ObVyZVMb#xI(?~vJAH);_BB{t?s)h5 zh7D!=7EE$~ht!eZ!#r@JVj=gBtOWZBr0budJohhBZ2c8xh6ZD|zpLQg{()kK266aH zC34qV4))x&rEppTW;6z)Q(YC@X-O2X|AE!zj(4x6*}yA-V3K=Tq>iiy^Tvsah1~U7 z3AQ{)*DFAI?iHoj+5l#T24lA?tKi+Pf?|dSaadI)a<3-#+^b9Bv^NDtCpuSN^B>!n@Zxe9lXOV3K-0qz-Hh^SFtMh15-13AR2+&&{Ab^#)RGZ4NU- zgR$3^DtNE0P|VOE4jZaO>Na9ey^$17ZDA&4FgmqU!JXQpcn1*t@1J&Z_v*j~-U|ei z+?|j*vNOyRCn^?lcV#75H;}G3f%4qlrP#VD%qR`UZZ}iGyX}c$lm>C=trEHWh&}h_ zQaEh^GaG}^X)6`nX=@bk0D_DCX&3Ka+wwWD1%gTHzDOOoJo#SABtjz25~q{CDI=*_VhEQa5@5J zDh8v|kt(>;Y!uG`f=XZE?xi2MkKzmOUPrTo_X5Er__0Wxc^u4RCn^?#pTJ766G7TO z3Ce??EXCGSVCH8qwtJci-tOrr=4TLxGgKn@nPLxqmK08B!wkn@bUIfBcRCNnJAj~q zSGaq1E8e{>;A08FTqUHVC?f16}-<^QB2by4zH<1;@8EV_zfwX-h>&F!RYk13hwj{ie~`9 zsuGX3_xO~D0>LEl2S^?EAGa@$`gMk#n#VZ=4deX`K1cp=T|7^ zXb^{QR3h=WVo&^?6i(m6jK^Sf`cVaU`U%DRe_&OKC)+Q4$_s&DlK3~I4*MPEof8!c ziT`9J*k2(1)~e6PO-I%mS-WqonO0khtxLd6&|vJdt_t4gk|-u<5Qn8zBJna}PrR%Y zPW500V=y|^SHYc@NAdU{sKgcSUh%g&xqGe11|A6nliVvIb>zx0FPx}Y$lZ_?UeN~W zdNnA|y}A@z*MOOz!PxCuDtNbRqnMyU9M(~Z-0O-x_j*z|HHI0D!RXXf1$SB>#k+rC zb-Cl+YXdg$J|LLnZh_R1EnyxwQL&J_H7mTO4bpWRD9^o-6kFTE%+O%$ww(&zZF>|m zG>Ah7mB`&u?72Hh;nW#sGzOznR~6i;8;aNe!0K|xyH|HM@Jb+<Xw@NTz8F++nmY^xHvw-bBrzEU`C4>KHt(W##b z?zAI{HvmE9u5kCt|MXsX_u7fic_|P~Qtyn^fxEywZlYo#^#E3QNgJf+fl!`$cPX~+ z0W(8`vDdv+@Lu;uF++nm3{r{I`-(mFU@4r2z)Z+sbQ-3DI}Jzi4j}m7Kkel1wI3UJ zFAz*}k3#Cm{b8OsQL&JF3@f~(4bt^ED9=4!imel1Mrkm1YbtoR6H$!PAP$pMBKKsm z=RQCRrztSAF&Lc=RKcANLh%kDxY(a|@$OaTb6yJslhlVGb>MWEXHHZsq@KYFFKL7H zd^nV+o+-uFBVa~oF!p++3f}8%6eBcLEksYso58q6yvDi#9IVTC8OLE1eN$^)Mz#n!W7MrSa#d9DiH z=6NVaXAp-AR3h+&Vh?q{nyZ34U5euQKTv@y+`SaohtcG}b9g!1cq|Z1 z(qD-by94-9%hCHW6w9L;62}jVul8BxJ4z>-zxU> zw@Kl2JIquJMyESfaHqRaJOcY<&b~eg&C4xUC_TW!T;q(m5a12JL=TvZ~=TW=^ z2rlN|-R;{IFJCY6Ij;qRN$Qu8I`9>kr%qHXq+Y-ZPicen{5q7UenX0_Z^F#bVC?m6 z6};DXP|VOE4)3W%>i5N-`U5GPK7<*N!RYj{3hwj?igy6P#r(1_O6|Y!@%Hr@+jun) zOwxaW)WKiEJb9vGA^q2^@SZkE@83ds`tPLJ`aR4%4aS~-RKa`x3B^1O;_!<~r2kdy z>3@^L>35i68H`SUs^Ct4p?DN<`Gx7%`}e=iLi$Aybe60(-|}uCm_)9F)M<5LUOQ2- z5P2z9cugCm;bova^0HEFtp_tugR#;2DtM#IqZp_`99C3`$PL6Ec_k^FR)(3A!RXXb z1$SB%#Y=#oe&GKdd9%aICXv@f>a?|Bo;p#n5V;X6yrB)!@VZbQc|9q%Hins{ z!Psb16}-{)QOwdH4jZUM-YBu@Je26&}$BX}BYlNA4uW*3K|vG#DH0s)9G#4aFD@;?P|sB5x}8$UUTR z+6-no2BT9i72K&eiWdMuMXvCt_@ZCzH=hp*;9tDYg!Q8KuG4?l2X+-Qg%k zX%L6~R3i9Du?HU|h132p6EYZ`#;D*h<%FKW1lL8(`hiHG8moasNha#pm-1vtm5aL+``Uc2QLSL zN$_)!I`dqZ$4*o%1V5h@-qZ$Z`$8xWevuSgFNPVa!PxFx6};U`Q4G}}4wtJ$@GHa~ z{7NaDu7a78!RRzk1$Vjz#fyO8;(y-7YuI&s&%=RW68i?EPP`H3#S;|^v2SLDN3}tk zz7@)2-zLS@+hL|@FgAOq3f}BpD5hx;hkH~a_Pt_{eV-Ig_ruJ{V04oYK8G#LAQ zP6hAtc@$$bh{KC2k@zLCCw^H9r&nO+V=y``P{EyEL-7Y;`g?9S>au6kmlcq^7tP}vGqfkc^ZsOf2=}mx`%y&Vx9(Z_)I0@ ze=hd;Ur6EfCCq>fMyIb;@c7@LcoPs*{N~lhud(mg!s~%xlKcmx4*e14=@S)`#8WmX%L6iRU-8oVo$xM6i#cwjLKkiYNUcYt%KrKKv1bG{B4ItssHyT zwjSGfLl8{TH$m#)rZ5kns8~qfj1^wj2I;*yl&5bY#nzTElQkH7Zmoj%ydjFo8pL5E zl}O)K?CCd_!l@n1)C@+a8Wr5B1BxdCL8Y&BEoq%vPCXshQ>a>0^51*)5h}@qQUepF@ID_)YJ4>;37nqqEjExRZ z!5iHT#Y_$2u)9h`-b3t>_mskEFPKRgj86Ng;7)^3ya)(Z7kRP`=35>O1e3@^kveS{ z%+n_-79x*eg(tN^8XgJdkw;0fb$^&?8jOvOQNbG>i(;AvaTu=>ktc{fa!Cp&gPD=R z=oD0Nr%5Q@0|XVh(oHP>VkbAT1K7dCfnXARDpF@22=nTRiiP0QSm9-DkhTwo^5BO^ zv2{AkG!4dfXQ<%q9)@C?2633F62XrUd+=FOI2{QyC47l4XcTV&f>j2OH?d>c z!J~m-68w0i&O8C;)e{v9!B1j^2ev`lJ_X8ypDM-H(_rRlFt$5K1#kBZ6!SEQ!&xd3 z{A{rYKSv6ub77`rFgl&Df;(M+;z>ZT%HZ)Pb`d*xI}l8QUxL(`b75XSQLzyGGFEtD z8>H^8pV0YNZ{eFst}-U;*OiHe2TceBC++aOKf3+1uzlVa=rF!MAR zo1L$MH~S!pc^bswVU>veh}dI4DuvTyFvBt!ot{v^ot{MTJRn%b&pWw^JGggDK-32OmyRV@bt3e#zP>JAgiaq#S zQaHU0GbV%4>0K4v={*$h1A>eHc^7YDAMicT2!cuMkB~a?W0?0(R4l~)log)X25I_p zD3ASx6kET98K=S6?AI!Iv)`Z?r$HRPQ;FE$i#_%aQaJqxGb4l1>1P$(=@%5w1A>ZO z=_b}bzlr_Er#v7CCW-$*>aagy-ak>Xkhs>0I;A#9ze_-Q;yO}ntqU_ogR#%0RPa8R zMlnW%I4r9YiR+0y@p4i))rXmn!RWMt3huNbiiZI~C9ZT6Q(~V>lmAF;CHC=-Aeh8o z1*wx8!aRebVj=!&tnkV!$;uY+Qq260$VCE_<0d;BI+ zI5mYCkiqEGOa*t^0L2S|pyF4$iTxvfjkRD4ZwZ1)@>WP4+8X8|6cr1}+pxlO+aP_n zh4SPZOR=>b%q$JYerr_lemkI;r9m7zsYLS5Vo%;h3a73xb21p6Hc`Qyx}$g_5LEI? zH?f-HO{@o>^O7K#r0$8-fxTd!K~b@gx(_QnvklVo7EqpgODVQ)1v5^AvDa->@Lsn? zF;0Ux^i_$}+lxK*4pKPvgBg{<=+s{YciIWX8-buwSGtMC)czYE|1+_j*~W{4V3K}U zqz)bc^Bjtbh4cei;jL|u-uHm=^m|INbuXC78jL;fqk{K52*qR#;xJex(hm`P`k_)d z4TG7Q!RRzX1$WvH#e;#M(pS2P)g(8uQGCmrf?yJPG*YLHfq4l<#X{t9tnkt{NW&AL zJaS2jtp+n$gR#+|f;Tz|#bgcQaDYlgo+9?hQ>Ab^5N2Kmqti4M+^LM>l|Zn%$fNBL zzU3i7Fo}F9Qm4&;c?LzrLgd3);eBn8hL3>q$g`x_dL+zD4aP=~Qo$QN8pTWv;&7}= zL_SXJk&l(Wb8H`RRso+j0qj(<>tS<6oJC$#FLJ&+MpN`aNb6_4qQLzyDOjdYc z8>Hd0p*-?AQfxgJW|{_LqvxyOjb4Cang(&WNF^d)EcVEkNZ~XWW=001(`72S)8!~$ z2Lu(l(oHP>VkbATE7`#_f?yK-YNXDb2lEDsiiP0UvcfytAZ=d{<-u=|V(X1C(=-^{ zy;%is_ZAe>G>F4(DiQp4u?N3H3a2|^rerWW-K~N<-Gky~K(NZ-@g{a3J9t76OoBgv z)S2^P-at{Y5d0xlcy1e{?MI+I_@h#6eGF!v24lNVsNn5BiDI4xad=uKf8T3`VCHRB)#kQ9KX`RvA3r#9n3xFA0K4@K=#Ka{`|B%AzPncmDj83&0q)xR_JQfI6@$*h@Vs+TTtAbz>d`YCvTngqP z6cr1>mtlqXwn5si2j#(+lVWRqn6Vm+?XIAL2VW7zSPkN^l1c<$S?s}Ak;17V%$N*D zr`1$&r`1us76>l>=Uu#st;zR1DhMXA*GB5ZMli3Ts91=-E-O5^4bpUDD39Glimgpy z#%VA%+e`&-b^{dSG>AhBm5AL^?6F%(;nW&tMh2r(8x`DXBNUGXf{I<~CRUT*#5U$r zo)ZL<#O;wftOn*a6cr1JJF>!4+aUdRhVsN+q}bXOW{d`7pPQ)QeRfANMuRx?P>IBw zi9K;oDV%!2%*SAK>Z5`?ZI0rZKv0P*-NcmG=hEaRwk7*`RS-<#Z;jN++rT`EqGA%i zw{6D?Z*GG$zde-4-$9D4{b1&4FgD#^h1hft+X=-y4dSq~O2pqq?D2P%!f61^fDA^b zfhu_X-BG+32r7Q1o7g|%*VvwH;blQENxnByhwcOOEQ*Rr@}9OYD?Gjp()SQ3Pd-$N zt;1kuX)yLXLIv-4KNPbxh{GtANWQ<=laH3dX$;Jq3`VDMD!9{l6fXvXN?z$E)}eS4 zEAct+3W7=MiAWt7U>-$Lv5NF96>%;P93 z7SbQX3NLSi^nM(ar$1hbttY@t)?n=UBo(~plTl38AP%RhMEcXjp8j+xoaVqx&0uso zQw4WA3&rz+pwd^miFHVBV(0KJFARc7w+i0qJt$^s5QqCzBJ%xW zkNkiXPV-?VWiUEDq=Gv=jN-LGu)4^T?NPqvK|wHy{5VplJpuD9ii(BEPqD&t+aL`; z1LcvQm1662Fw-;`8+}0qZ}deJ(=>>~%PJB16|qNtRSKsCFf%e3onBYLo!&t4Rv@Uz zm2P737dyF$y~Pe56$F#u?;v&NyD%@Js8|U8J}bPs4bt|9P#*jvDYkwLGfjiB-A`5U zc0WTgO@lamp%THr6npTmq;UEgW=aO5)3++P(|0J|2?VPQ9&chlu!9E$!6f)kNS*mJ z%!?>07J~oE3XgAtwEa7j2meEgt$)JI(_n13)=Fu+wNcE|AP#j@B6wY~2VYVOr=?(~ zWH34{qk;!t7R7UcV3on+O>8-K@U9@31YaJhGgp9l6-C8D@CK~#`Zh@0D?@qkRixP3 z5N4hRW4o)V;O(xCVx9(ZSW_i}uO;^2YfItO2xd+Oqtm)7xYK$lUJV2lywXi9|1*5y zO{@vu^SB_G#9kk%6Pv-jhoWL3c5_yEd>f?cmQWtMl@wcB!_3oQY_^RG-t0yw=4lXz zja4FcJF&-ZFNISL%&-hbr;aMPQzsNp2ZB}nypx+)7k2Q*AeaR2hSZsxz&wkhVj=jZ ztnm6aNZXr1dGMZ6Z0!XzR)ewKJ}P*-o1+-3K^(SJiQrp_J^0pAIBf$nCWFywI~Cli zFN(JV!Nvc)i#M?y_?{;Q!6f#MNS)Xp=4})e3$Zg+czzqC>0O{a_O4QF9RM>yY2XhVm(o3xY}F z;Yb}e0_JTL6$^<+vckjLApPzS<%vg2v2_g07!Afg$Eo0bjz=*@gE*8_BC&}*@kA+{ z0?d32MyJUtxYGeB9t{MQxYA8biG40jZeml}#~XuS68|8iPM!wyM2d<@{N8pjE4;uB z()@HNkAJ8XTW7$`(_n1+a1~H{kpgj5MQf!?AGfRW9 z-!oP4e$PTNOM^I^qY}x_6?^jYq;NVPW=;m9(}gOy(?uxW4+NFG(oL*m@g{Z&pYzHf zn54cGsRJ*Ac_Kx{Lh376;R$Y#p09%P)K^Qfbso$(4aQ!tRl$3`4#hYP;&6jXq`pz? zsc(|P>1LQw8H`T1s^Ctyp?E(KRO(7Mv6$L_24z}^yAef}T3#o(ehIuMQ#X|af zS>YXSklycy^7Ic#v2{MoWDUliA5y`4ei+4M4dU>qN~C{G?CBqu!s!W^sTqt;PpRNe zPosED5LEg~H?fY%P3&2|<-I{LiTpfLr@a94N{Whw$S<+NE8HLrzXIivUzK9(0+`7f zjE%mof;ajGipd(p;VqSj{I=L5zaxdyyD;-I7@gi%!JR%p@q!>&UF6aB5#RF2Aecn{ z1gXf$%kcPj6^2lFFvGr@1nHr3Zeyf5v`W=dy8pPoTm5BVK*dzZW zh11V4lQI~cepSJpenatgAXr`G$@T}|^1L9JME(n@(`v1((`tjo$V;%oeIDVS**jEyd%f;YM>ifJ0eVL6qETwm;wmzTn61(+Ebj7|+yaHo|}yc`HBa;2MC z{KZagVym!&CkDYJ_^L>qxf;y-C@L0y)@!iB8{8mmuLb47*Op>yBbaF#jP0(gg15UK zifJ0ep@~WaZz}fS>r3I(3}#9Oqf>Jg+^GeMR|CN+gU6d#D|Ya_AeaQ-5UDfUz`T#5 zVj*~2R(OgVr0sT49=yF2TWetEX)w0iQ3Y?e6N-5n#G#8y1n(;L;N7Hf+5~1w2BXua zD!5Y*6psgjRR)hYv7YQGTQ4vP-W#bi`@p=BqGBQV7Oe0VH%QxCL3!}4rP#U+%sdUo zcDGZ(+wF^Do(6H)K_!Cs6MOI-rEuyGGbe-5DO16nc1H1rAgJJ#ZesbL;R|nKyYf9x z4T4GR-H;C=?F~f>r#ylbhIRcJSgLm;@h-)S2U89!XKL5PSkFyu}UD zwn2IDiBfD0Fk>|s+nua}w|fAJu^Pl-s!9YuQ0&1ElEP^k%$N*Dr-M~+r$bP@BnU44 z=Uu#s9m@ASI0z=O4@2t2!(m=ZQLzyF2v&HE8>HzYp*;3%DYhO3GfsoC*<)1jW{*WN zPJ=icuM)9O5PR$srEoe4W=001(qkUIHVmSHKY!F z9p=Fl6$`1~WQ7O0L3(~0%2U51#nyLW#%VD2`o0R@>jx;tX%L5xR3i1qVo&{v6i%PQ zjLKki`dkHf`U1skf}m1Yx{1Zq{u>`}VqdY1w+F!_{WnM*{4LDGDJmAyf6of9a)b2# zBb2BANs6sM!%Ws-?Dw{ntd1<6hTL$LM6cr1R>#@R{+#n6thw{kFOR;qYn8_N9jW$rh zBd>&FvIcQjMI|CP6no@VrEppeW?lxP(;6zc)0!yW69lV^JlfXgTb>*QlgR5Jb=tZx z52mPCh}@VJUg8F6xG9uJUSEo>&0uC~FgDs;1#h$kikTY3p_NKRZY}o68%p8S24+$Q zqf=WI+-YMJFA0LxMV@T!`Ig59!6b4Aq)zJy^JI#Og~***;VEvAhPy&}jVQhIuhX#X|5Ytne~7NZSWOdGLdz*g6eno(5yP2dm)i9)efZ|<2aPdFy;!W%( zzUTQtFo}H&QYYRD^KOcYh1j>V!qeO!P2UOSvG0;%>)kNpG#Hz`R|RkOJ{03Lh{FRa z5qrMaV?QW`(?c*bG8mm6QNf)aMe(d4sMwWmVx97v*yDW4!-HUw_(`M=dkW^=6cr1J zpJ9bZxk37U4$2chFU8guV8&=L_W6PUrOxBmzKh58JIa4j864b@Z`&(cv}!u@=7S-AU!vP^3N%w!G5Mz>JG8{HDcWDVl5 zwMsD8BJaWq z?{b4QJOIig?Q7#rO~1#fgu6f-r5!`>TXqtg@>+-WL`R|P>uu5=TNzu3u5>>zgV{2-VFFC%s4 z!7y*9s8|R-ofY2a25EZ+lm|adimivkOw(X&_Xrid-B~E6X%L6mDiQoBu?Ig|3a4XW zrerWW9jAgj9gpHgL9oi;@g{a6J9v5!OoE?`)S0Kiyq%(AA^2&m@Ju&I+jF2i_!&}c zJric024lNttKjXPgJPZraX3#Uf}bz;;1@{YbRo=?3`VDmRdAvQ&cPjzls&!=>}pJM zo5ddd7Ac%=g_)DV=ybaZ?sNx=_XR-(uXGd3{|sMv6T6G=d4>>7V&8+*iTA?1oT6eO z_Wi8zOgBi=^PxQUgHmjL2xguJW3!K_;LSdYVx9(Zcw8l7KOy$mPfFqR6wI&;MyF>~ zaHnTcJTeGY@$*h@V$ZXK*9gHR_=`xL`4Y?nDk>I&zrqUdbc3|L0Lp{ECdJm*Va94O zw)>_E-tJo{#%d6UcT^(yyJ8Rio)k{+!;Hybbox*Qclrp$D}&(Tf8NEL*e86?V}xK5 z`!l3Y{2b;L6%`Ayzhs4ny21Zf*LjZT1VwI1M>`m6F(BhimL_qBwnv&B(AgeV;;g`hl`!5GMA|{U)|x^PAXD z{^^0eVJ7G#9+u3*hQqd~q}WM3f)gvcp?*g~P2!QGTpa})qhaZDv=nNeqf^Fc$YIQs zBpxeV6OSFmX&h`mhQ(>z6spsBl=XrziGS`lu_Uf@Y3nz!3EZ=d5Vk(>iO4*8Vr-d8 zikcTvWBJSEmNpHZ$+7`A%|^Jl78E8O}||fr|q$+ z85XAadZDQgE|(*N9VVhgo?6WiIpwV4pMBJWD(X}e+jR8s6j?m4lq8)|q@)I{DZ z%GEw>vWBJ6eNw26?n{}hA&31_68V5|jeKAfr-QJ085XBQQm9UcQg#i({zV?#JKVpu zju5sYA4%qEM`6oUQtU)N#)&Q6P{YTeCi3x7uAYF+)UY&qQVO-v-%w_1$l;WfL_RfK zBcB$<>2z#ThQ;a36sps2DO(0%|04JG&h~FDB808T=aPBadDuFY6g!d6cVbO9)bItU ziF{#{s~2I@G%Sr?l0t3t50q&da=0udkuML|$X7&hx)Pg_VR5=Th3fQ2%8o&p$Upa+ zSot4z>o>7KxnUV0Yz4oL%rmdYHmRi834Ws!+q$8)Z$?eHZX|(*u#%Wq=+6cd&UHmUiDw zp|<-TWuAr{{+W{CAB1c0e?@Wn5Sx=>ar!8Q>hvGVwn3QSKlhth^A7*-Z(^VL_m&dE zR_xEnJn?gEi%N=}*k3xav>R&rYt+R4Cd$=sv3VMnX1`CNHv0o*o`xKLN=fWtCd4)N zuu+_b!-i#8oJL5YI*mwKISBjldAEKO8`%xJ31KVvsAQfw8n#O%#ZK@soY>k8wLKPU zf{z{L>Nwa~4NJS@rcm1*k1|$64ils#_=Mpae4;2$6JujCEKZZAP@N{D>>Pwc{=B=t ziA~|(TTKXCv8N*Q#Hq1;Dk*kiPwT|mZm8+$Q4@QHC|761#%WlZojHZt>@1XV8giH| zC9!7@*VuDJahemGkzsL~JB8{r4`t;bOzfZgO>Cj&H?jHrQ%eb9EAawk9=0I1P9?=o z;)R`9*bVi&C~6Wf7Uk;V*cc5*h8)&UN&H`jYy1tOIBkdx$gntVoI-WlgtB!ICjQU;CiefupSQP} zD+l&AhppsWka_5q*jAMkTgjL1ZSBMgZ>aBWQImYTC|9?~W@%XZ-7$sI@1Wjb$}9~z z?3|M1yM$}lQk?opP54aq<%}8tRaW9Q6}%+qei_Nk=UiF~IM zJG-HV??z4Jd!k&u7n`YJY4rXSYNHQOW@^ac&nb!gP`E~ZIEvFF*rW`L(_<-Ar^hKf z2Vwsr_w}CiZ>=SSt;kQ4dD=7BLX{Ldk)Ly7X*bmH3#f_wVw9^dVbe4$jlP^hZS=2{ zX&Q3)TS_9o8m^K59>wW3Y(|E~>5UYs)0>oqgD{bQ?l-aWKkU|TVsE=)Ga+mR{|A|8 zzKg9=NwE|BeJ6HzLv4S6n&AJ6a`i)Onuev_k5Z`Z{)aM6Lk^#$B>1P{8vL^;PM>2_ zGAvGCrcj-}qU;)k{S01y6Z^&uO9^2s_;+NU`8~EtCB;tgADvj?4YfVYME-HJ!aK^< z;jnocmUc%-p|(3BWuAr{MovlaQNlI&s8O6o!=_|doW@9@I*m!$I0*Y0y!<9Mwi{Lx z!dCEKka^~~*fNzAJHf|yVuv@>_JpVjK2emb6JzrEb()T{cMvA{&;2IWyu<(do7fEgz4e5!6?-N!Pn;RsqLN}K_N-2< z@P?Y69W}A%h;nsKY@UXt*|}4w&CWxary+;=QWAUqaE-k{6sHBTVHp;ug;S_bi%@nC z!hU?-t>45JbHkEC*a|*~%rlq3cB!P;3BHsQJG`N`mqAVNWushO4jZdsX?KMbYP-Lr zjMb3CN+}7xa<~RxC5qFk*q989)9NWyr!^?M2jP%E@9uA6zw+;`D1@!pYm<56I@msy z6g#okb7F}%)by`W6MKUwS2x7QX;_-wIEC8mCX{g+a@Z^-u{RIb*jq$#+7g?QVR70z zh3d2oW%D3R?4SEhY~kiNvF-d*y9r?{@eX7jwj;JqCB;_arF%O$vA!GXcNf$o-ZjeA z-LNqlmOgtals=d2?Lir%A&0$ElDIEi6Ym|xX&-DphQ(>W6pGWJ-u{%8gD{DI?l-X{ zu5)SYH?afVv!M{S;vY=r$%kM|RZ{H4Kg@|e-ca*LpeFv2QLY|^&C{?neM}0q>0>GL zG~{r6O5&douJKQd;&c)=Aj9HxatieWKZUY-5GMZ5{U-MR#hOYJu48vV8?PPb$8GAvGarcj;kqU;}pi9Y>MqVLw*b;r$i-f6eqJuV#ByBD@{ z-$&+=_hXAyQtae@(1~Ts~likjSi zk8<@jY`TV}+c#1u-45!#Ntv!8hqqIb`<-yj{f{V4?_x7EEKcvIP@VosSw9GqJ9GMw z{-@rowf>hI_7uWa@PCte=116Ol@vR{KXzi1H`Mm0s0sdAl&ha((={yZewjl3n7*P+ z*O0?EDGB~SQKMWP4V$iEX?KhiYP(}nrfbMy?34r_CtQR7B8tI{*mji^JGobMV!1ce^~$Kp zy-Jj;t6~E-EZwf2LhW`9%76_y{3<26*9zC%Ye#Wf2OFGWaau2h>a;#(5h3h%?!47E zaKXYt*vh>TnMZDn?Nv#!lY3Jqc6&ozZ;qPWTSU3KB{pHh((TqMlx_$0wxLYeki&K< z$-RBJ=H4NS(~j8q42#oFDO9JODccC)(0|68^=iBKc6GzrLf8tvJDF$pu?LO25-#g0HeXv0rmUj0`p?*yJQwD9w;lPvxKPX&-9~{N$5NvRU#p$pVs?*_= zZG>+0*b06$nP(n@?N~{%6Z|+QwtGWupMaX+Cq}t?5;kJP((cJA)OJsy zjM$LFX(c z_ytaE_lDZO2sObkj&k)9Y{G`6-Ahxb?OsNiupx&lQWE^ia1DM{6sN1PsTmfhYf`9A zf1+$7gb6<6xB7oKF#YeXew{lO7s6Kb8^}EMMr_wgik;{;JF(##YW!BzM87S{)!VTN z87iGeR9PUX;^n1fK`h8KH?#BjaSezbAp*sDUvXKxb`jBV!d7HQT!!Fod z2wS-yCG*I~uvIH5c5*-A#GY@c>!(nY`{^iGpTQ<4y9*W;(VXiT&PC+ux!l_;*pReveJqu(bPQ3iV_9i85hN z@_!D)q68lS&(oEF=*l42+L^iFK|hT5JHHNj_!a&=~G!iJ^YSyQO( z&PJKAA%{6q5`4~Z4L(;Cr@67I85XB`Q>ae!Q8p671RwHS{lD8B{`Xd2z#YpAVJrGV zWS+V(wss}OPV_~c*zyfEzBp>44~lYi32efKrQxMgC=CzlElruQA%|sC5`DRFjlO&o zrxmcl85XA%Q>acWQPvW|L?7~7eZJUu5I zr*Cd$YFz&q6spr^)SkV~VZU?d zt-gf|78k-+?ybl?a%*hcN{XG_+d8rD8|r#{)a2eF%GDjQ2^*GfcS@mlyEA3Nh8%WH zN$%akHTUjOoO;;Y42#pADO9JuC|e2P(0|68|Lptjy}jMAz7V#8?@Q*H`(X=LQtSjj zz=?g|P}>KgCiuZot{#F-*s!#FSPHe>!zmLs%%qp4N;tK#HMCgoNi8` zI^9CqPzV!z$Zz%k{+FTu_g25n9m@=1EBYN|o_Z&?c_qb8^t+wd`3*IGFKVLS7v<{x z*n|yB!w;rV8Xna9GiAbt93DWG_Se%|np*lTD*-;1+eaLV1`J1=; z(=OO$2wSZI6&4NJR|r%>CSf-+%44pXHh_|)MVe3~du(_(WoEKbv> zP@QI=Y$=2T3EtP6$qnlaVJrA7WS%)IwtpqXPVm{C*!c~$Jtt~{&lTnB+}MN-OS|)? zP}`l4GGRjw3#26Yg5er`p(suZV^cFMPK%~cofe}kD})I?zG}Ee zUoDE$>e%26i_@AZRHt8278SxoAM#s$f#$8gwhI;-!dC8e$vkpBYz0e-o!q~6;!o31 z*BhcH_eN2!Zj4RXuyng=3iX59j51+E4qK!o_m<(Bd#fl;TVsPWEKb{|P@T4;tSN;3 z&YidV4lY<{2wS-alX>J$*dmq`JGpmpV(&N9^=_!iy?d0aJ#4~;rQ1DIsNL>GnXn;; zy;G8VpK#5+ZxpBfu(=r)rvp-`P6tvJ6~dwaj5q%|y4`yRyK!Le5ZDTSD4Ay-hOJ;p zu@n3VC$@h>Z6Af2;73QfdJHyU!_w|?Db#k4r%c$8!-**gep0vw|4kI9ld-uO7N=8F zs7|L*b`-*a1RvZx!wt&}VJrA=$vpEcYy(S*o#5v%}x`#jVH|6P=;=VKE#EbU&9 zLT&d#%7hI$T%3~NmxOEZKSXi56q}o2ak@N(>U0HVQz0Bk@V?$vZdhpuTfzTG=9$-E zOIT9u1i#ja9pF&g*P|x*4N28(328M1RPME#Oe&kDw;{ zqfxFthE3S8H2g#grQt!nCn*y)kq}a** zsS{hkp{_qiP3|wET>TQ8uwm);>lEsz^bKXgh8(_2N$&5%HTMruoPNaSW>}ntnXK(J zEM;pU9Qx0A^Pjfay*IoY)*HfB@Da&8b0lmdONyP~qd2hz9BO+s)C3~i6~nO;Xr~9?oHx`^@gw&d@?f6 zoE+Q8l42+LluqmchuWSRHNmHea&=m4!iJ^Y=~Jlf&On*4A%~e#5`5-x4L(a0r&+PN z85XD6Q>adJQ1%wWfdudC&Eg7OMwzf7he0U`zC^eNUowi*QrOfCi_|K94$ zyJN*6Y(@VinWwIZ?PN)@6MbbTR)Ir}uZo)Jt3|oGIyPa$((sxol!mPWr%c$8!`dl{ zzD~GCUpI==df4C$i_@=Ds7@PDb{E1#AM#s$q2{f=kqdSl!dC80$UJgWY$;2Mo!px{ z@uz91>n%}}d#fl{x5g%HSi0Rdh5A8lN13o8haFOqd&h9iJvfTfPT1fKi_Ko!os+tOAF+-Ul_g_l_)TP< zc{8?_CB;tgTby@xVkLk{<)B>4T|8vKDMP7h*J zGb~OIrBIz7rtB?*2|nbv`hUMk`@gsPqwZL72wTw~C-c-Nu)QoPcA`J!#Aa})@n=vI z{n;p2pTj0>SQ>sIh0^e#-iwq88*=zdN}|6UuF?M*#pxAnaE8U{)fB4J-zobGVWJQD zt-f&cR)5`v1AA}4R_-^+Jn}7UGfRq{-0wK?r)j9`cTto3y(m}T$0lr8y8R%9`a%7R zGGRjw|4vEnkHR(gf1)^jj1A7PIDMKzb^465#1Qs7ci!q>xM0~KY~}un%pZuzei2(AEI3S5u31K>2{dOn{Eg7hNVo{ki+mP$vr~2<{mMM(@5Cd42#n! zDb&wxGj)iSzNwE`r94GdILv4?Xn&9I_xjH^J zVZ+kygelaIX(Gym4LMAblHikuYw*dUI8Bbt&9FF4nL>4%irTX`H5^Fr!M$nRu<{VL zf=@^0nbTwYSyJo-pV5iU;85E$qbB$)QLfI4P1vxsJ9`SX-8m=|Hsml@N`lWFuEFPt z;xsQdH^bsIe+t!U0m>#rIFR6dy@lMc@({LyFGA*-i(*?^QtSj@+=;mhQ(>+6sprIlud>(!H4`-U*!M( zs}TQttFPvcrH8N;eGM{CT@%~Wl42+NT25>ShZJ-r#^D-$lPFG`VuLd*PMfDtowlHCGlYpgL*t=!v?dE~a( znwAthxwm)XPt#D>JEA7{;3!vj!X|83y4@v(`a$hVnXn;;-BXgg7p}SYh~l&-HaNrL z)R#ha+MBY>5cWHF-s=0hVCf-j<=&snBM-o~w4~U{eUKB&!J)1XK~3&Mqg*`_@PX7 zA{^@cWz=N3C4@@01Z6a(F!@+207)>~BVKdJCJJVR3pVh3fPV z%1%Q#^bcM3`aPug+_3x*wu1kY%rig0wzZ_#3I3rIf1HNe{s=X}{}biv$JmSwOS_+@ zP(P~AC^I(X@I^|3e;Ka9zl!4YH8wiK;`D6_)#*FRIz!n1;CZqC;D)`2uoe6#GS3`l ziac{z*bP3M6YIjEwnsos@DZb29SIwU&bu zB%dtG)yc6b8Su z3x{j+MWQ$@iVe@OI4zz+bs9ujaR>*Lysx*UE0!U`R`R9EJaidsXG@BmI{WZ3{CB;tm4V_pU4t2gUYO-$<PY z*b0{vJHdBz;!o62+db3--y_P^J+Vm}mUjD6s2|ndlt~+M*f%A?_Y2qH`$us)02`oT zaXKi4>U1!*XYUZ$|KNGIAL@pEh_Ds>a5B$40$bsdVkh`fPAm?G+CBy~!H8Rg_JKa6rii_x|XLortiN{7+;adM&oZCB;ti>z&vg4)uK_YLed+ zA1;n*6RPPIqG?G%QZ{rcj;kqbxgw14`c4d%zWI z5n(I&pUFJ*A#9IJik;+-II%k%>iaR&B!4{0)hDoN82L}x3Fm&mX6;^p>#Z`_Ycan4LQ7*lI-t?YxaLearyw8pTrB>0cv8vLgyPQ&=`Sec<=aT?D5**FbPS$GKhA3X2&5#6vE5w?PlOy-%R zU^`t>>;xapi522d+hd?6_?S_yj)hIzu(Uf)3N`pIDAP9NFkVW6j~}kVCy3%SAvQw8 z;xusz)oBvS&O_LbpY!&2O(%23c0|}pJ_VVFPKm8^NwJfBY9}^{Lw!$+n&i_(xjH>I zZNt*e)9ly?4U5y9DO9JqDEkiKfRYdH&EtyAh_ID> zJ~9uTA6w^=Vkh~6PAn9M`d%0{$rp)obx~~EhNa)dQ>gt8qDx`RHx-BYY*XolK1s~>52u3u$6ozG7nuDTj`QwC;6&QY!rw3UL7^b*NAd; zO>EkRrQfwusQs=@nYJN^byJdjy>LyweiWzt=jsg5usCg)LUr1Rvi%Sy`B2{N-LIQ& z;*w2?u$6r?GLPLHTj-KvC;OI8EER`3-x@X9w~2CfTWs2frQ_{WC>;;#?Le8fA&0>! z$-Yy#X5TrA(=ONy4U5xmDO9K3DQgg6vJd6m-uk}j9&Xr|2wTDTBJ<2XY^_U*o#6X8 z@#ku&?fp;_eE%p{55T5vSlT@(h5At)OqsSJheJ~m{IGBhes~n8Bd{477N?_9s7^;y z)*!O|*m9Q?JHbzKVy!sT_Q|LTeoB<9r()AKEbX42LT&d9 z%CrqR{5B=Q&kEPzXGd{52OFVbaXK%B>hwFx8bsKSpY!%NPJi#pfxQc0EBS?F9(obB z<0ZvT@=Ki9D-QL2DQc2m7Uk;Y*t88xzgMPE`@M=XZ9@)!OiA)5e_K%;NDHH*q8`g$!{U^&|9%3FDZ7C-|oa}aj5S*QIq_xC|B>srfpdI zy*Gv0?|qbM8*+FcCCMKQ*W`bW;`9(ULc`+pND9^IQOYVrIH2Txy~ka#G!eFvKS}1H zPhneLQtTvu#)%cCjVO$r&qB7 z8WyM5Qm9U^Q+xK_fJr`-cYF6sr*FDsZz61Ef1Au>-@z8Wq}a*+t`qykq0Zk&P4<6A zx%vS%ZNt*>hbfef2lf6PM@bxoxY$fM1;vcly`gU zTc=;SVRIsE1^_A32KCDA)`Qi_>T+)DL}h%3eg+|KNGIkLiX5im(-YY%g7uLYcN9hsjbBeDZJ&K1CF#DX|e67N@CG zs7}*RHX_1){G7MHcRHObb|=DC@)^iHbVh93ONyQ3Gdrk)FhuR%GKGiX&aV) z=S-pYI~Qfzh8*TeN%DEaHTir|oaV?B{uiCyDR-|L|!`T9|=W{-tw z85oOwj95zWw@=e1v`DRg^Hpd2NSe&*@p*n3v*@y^}d?@es?pIH@amn^X z*vh^gna6IAt$azblYK`gmX1T6?}VD{J4d;?3pQ=T((!I7l#U1WcBf3+ki#A+$-ZZ} zX5TA{Qy(@%!{W403e{;}%4$TI>_d6Cx4wJ2zZ-Ta!dCDD$vpEQZ0k#ko#2N!@fT~T z?ZZ$L{O~AOkHDsFSlT@*h5At)O_{bKhhtL`{J3xpetZcswWsO`y7 z6MTv&SEt0LZCKi!I)&QqG?Zx@a+oe9!KV+`;4?&Vnh_hJVR4!{h3Yg5Wnm)h$Ip5D zJE*g{VznY{C7*-LL+8YH!KBzpKDQJ5$DzLGMNRVgqFkLHo3>%;cfk~DzY9^OZOCDf zlq6p?T$3*r#c6SDgoedwi4>~Sl9a`Xa6rii_m+0WibdE;zATxCE{83HNwJfB1t)fp zLw&D^n&c}*xwguJMVYoC zhiy`leA{qMzFicj?XdwG7N;Fks7`|^I}>4&&pgZ!|6hf5zlOTAOZF?mR`y-VJa#v1 zGfaw|>^&zIkwcyDiJI(tMY-CCP1~?^yiW?H<3YWBDbqIOuzyOj9}uqD4~*h;5H>@@ z;&ey~)#*^m=0uq6GYm77?DhW*5?EoEsU?0@jQ+s}5x zc173adVq^wSa{rEX=e-rgju2`}NTgk5@^U&+DjW8*8lHcgWR&uEC zn^BYemMB+m#ingo`n^4c+V35dX&Z95D<#SA4%g)OL~*(o8=+xwx<7^L^Z;dpA{%;_n8!Gzt2*pZOGyI zlq7#4T$8^T#pxw%goefGI{O@EQ`Wm(sCdE$jH=Ni@ z4)y&OYLdSl(6BgtltOj-4`q`g zO!AqB8RGk``(@NmT(Wf$wz7Xl=CPk+i(yjiWdG8M<>XN3U!x}bH&L#Bi%r|Gbo_k^ zrQ<=pA1Ko{s~_YpCt9P!oLYC|Ae9rfpc-9XEygQH@8LwjqZJQWAW^a1B0D z6sL)?85$O+NmHm!lTnr_!u|)(yL}2bY+Zz{;8T%#=G53?m=rs~r*&dIIn?&_s0ltp zl&dph(>5&a&YVJRcNWUD4LQt~lHjw4Yw$UuIL(QT(6BhookDe*hq6u)_T%Tg{e9H= zT(Ni&wvsPE=AjE>dtp-SBwyHxHRVv>i=rm^Vo|Oxj!oOJ^t(g~wcjNv(>CO=bV`yh z6RydZjpDQ%HbTSVv_cBi>6es!if};52lrNT#pXrWO1=u2hpvikhDouLe03)_ltX>5 ziJIiUigI-=Y}$sU-*r-`{jN)ywjqc0Qdj6HleIkgab<6 z*W1h$%NJoQ`4(gzx+S(7CdE$jt)19V4)wh)YLagkv<*wYJEl#Z`cPM4rh8zx0N%kYcHT#iKoQ}d~Xjq(%NufF& zOWCUklYJ=f_SSb&k9WfYM%W5|BAI8Ngsq23u@n4cC;nm$wS6jTf}a-U>gm|D4NJRc zrcghs-%_S+$l>gi1V1NSgP$A4={#(PhQ;ap6sptjsXcoa!2So%yZu5p>|cbf;1`p5 z<|WvAm=rs~FLh#3In?&$s0n^Wl&e=_(>5&aUY$byoc>6ewjqZOaXZlWwzg#GwAZ+|QG7FVoagstSak$LFt*mjr{JIU{KVqZDb_uZ&TeovIE z_hQpFEdAb}Lj9l~piJA4!=F=<{Go77{%{nhN3anZ7N^Hjs7{Yl)+)jQB_G^-(iICB zVJrF5WFGnqwjUhvaMu_7E$^1j~Nu2{hcTgm@H=ArLm8)8!IB!AzDz2#8fAD|}r zzoK0I5SzAP>Gz`)YQO)XOxuvdCn-t(X}Bi;EQ-_T*Z>WS)0Zh!r>`iR6=9MO`Q1Km z_j{?|xMT|>Y-RtB%wxaDmc*pk$^N4gtIMIzhndztZdR8^xjGy+ZNt*>2q~0~2lYmz zOxuvd$SKJ_O1NepHHy<{*bEJe(-d+nt9pZ9@+8r6l7NM+GghT&1_lY;(OI^$rOBi7* z`5-b6T>{$?lVT_NQci3yhx%RyHOZHaa&dy6;i1E{*p3nLk=sYB>BqWntYWg zPOD-gG%QZ5r%;{Npe$E}14=%)_bXSdVT7&ZYm<5CI@p$&6g$b+b7Fxx)c3DZlYE0H zS2x6_ZCLu+7cU~VR70zh3d2oWxXOCQ1ZUscCJ{& z2wTZ_AoI{2u{|*WS(|##br~N7W6=9MO<=x)>Uh07^*~JK3*$*c3*h8>YF)4PkALhgsbExwp zP?P=0C|8fdrfpa{J|>0I@u1$ZlxZ7sI6fuWPYBoSCq{8P37er|aXLAL>U0Wa#Uf1h zp}gB$-%CBs4Z9d&EBF~?o_Qv=DkjBF@Uxuwi#62tIj9MKZj`I%VbeA&?Vg`P{iuFV znYJN^3sVyOqHqm9OYdFIvFs+bfz z!LM;*g*nvrwWtYxU6iZWW79S)?cSI|{hV&1OxuvdEh!0pYq$o#EsE3a*a!`a)14_) zr@JT%7GXbr&fDKhy~h=+7-1{Lb{+4NJd| zrBFYp$0^e`($ zjO~g^v6K82C)SuleZPvDhu$3%i?tZO+J)&d-r>(!@6W6 zBWz_Kp3GxMz!t`&*vUSU6RXUj&PPE__EDo;9SxhdVd;2`6l%v~Ql@RlVeFJ-A17S1 z|00UhxY!I0i_`ciRHq3jixy!&UU9AOrB39Ajf}7rd=fIxoD^FalVT_M8ftq= z)C8X@%GIf{X&aVyr%j=@I~`@(h8$){N$?rNHTXPW z%%Q$lMoscnqFh}So3>%;cl8u%ziUvYZOGwQDM`LoxF%maiqksS2n~zVdMQ+=^(l)M z;ee74?rq?Tm5i{Jd?PXs-5A>#lVT_NrcNv~hx*dyty8G|ZbO;2 zA&2c!l6?DcO};}Erya2o8WyLWQm9TlQ&ug)0VVJ2?dpo9jIfn_cQOy{VLM||>?GgQ ziEZXk-+ib_zIT+X`(V>HEdB16LhW~d%CrqR9GH^i2Zd|$gQGYdf(_8HI31Qkbvm4~ zYY`^-P~Pp`@1-8;lD&+umHlWkk39xk81Ar0X5lAjB@oPY}$sUi%CrqRT#=ICSB7iwtD-nv zjm^-oI9-!Mb@~%!-6HIN@VwivbHiRn*b06FnP=XJt&K^s6Z~c;)|o?X--?>xw?(;n zJ2q{@((aup)OPQpOxuvdJt+x(Z@31(FN)Ls*a!`a(}O8gr$1AcEy8~MoVUN1`mih3 zGQw8!N69?&F>Gs0ik;+7II++i>ia3wB!4=})n~A28Jr$r4NJRArBK^lnlf!e4$Gz__;TSIeEBF&D_|ou zEKVz?P@PtytXzcs_&INXFLf1HEN6tR5&qu9rgXcYVsV4LNL(lH?nPYx0evIBkrL(6BgdnnHEjjIwkQ4k-EH-WIM{&j?$| zw<7b%?MnsPFAjlYECLS9ipwZCLuR}@^EKYl-P@VRotX+fyO5WGo+Z78MVJrE*WFER7wm&AtPVxhs*lZ5kV&zV{ZuEGn?s$Sj+*ReM7eqdwYf`A~{)sYeLk`!aB>45=8vKSRPB&sRG%QXxr%;`4p{!nn z{STgZ`)zL6(Fj|??;!KcJFyiqDRzS2?Zj$xsO@`E6a2m?SMSHBZCKiUFon{t)#j9G z8*+F!CBYvF*Wiyvae53JpP0x9B$nTNiQ?T|^all%iG)|*3pe~6mo|BiC?BW&7+rQeTJ zsQrFInYJN^&r*{7^Kec6MHHtmu@M>;r>|3}PTx?LFTw#O@9TZ%iZzX}mHY=X5B(9_ zB9me#`7mB1_M1a}4~LrM!$-L~0yb^K((gzq)P6^%Oxuvds3}Q4TDT@3J&My9*Z>WS z(^x4~r?Dy97h#eQ<=x)>Ug|GgvZ)cavX4jRvEyTlWK!&8pU{aF=TPSpqbB<#QLavk zP1~?^Jb4PG<3YVCDAP9NFjY#jPaUq=r-|Y;EjB~L;xv5<)oBLG0!EnZXbAr`?|EC_ zOP$F-wW$%dg3m(cnX_VxWK!$|pWTVSSVL{kiJIVZMY%dRHf_Vw?z}0~k7_>3v<*2d zkdojFhHLPJqBt##&CswoEt*1gT8y%Q5%xcL-tB|ju&EKYf-gztnM+}dWK!$|U&e_A z=TO_rp(gn9QLe6lP1~@vyJ8CUb6SZqZ9@*Lq$K#N;Tn9kC{C+mBQz{dYo<`0ennZo z2>bDK-u_Y zGs?6LIc$-VV)*e012 zJIQx(V#ztw_im_3zI&9bJ#5;BrQbbMsQvCmnYJN^y;G8WpKwjSZxpBfun`&-rvp-` zP6tv}Fv0;P@9Q1xie-(kmHbdL4?PUqC6i(&`4LWRIfwc_3N^`(j&k)FY}$sU-{VrK z{T@%5wjqZTQ0PyPNVE#gh@V>cYF7Hsb{!kUn6W~|1Fuv zo`tQGNwJgt948i?L!F<8n(V)ea`k*{+J>d$3sR^ZUr3p@A%}}olKql!&HjfdPM2ac zG%QY+r%;`)psZnp$&QBfZg1^Jy~;neuMxI_|B=iyuff*Iq}U05trLH-hT6U!HNkI) za`i@R+J>dwn^UOm-a?tSA&1*i68!dX4Sq)yr#rD38WyL!Q>ae&P}VTQ{s+&y{XRGB zYlN-f50H80gV;Km6g$Bma$?On)b=B&3I1r5tB+yRHZ1KvkwR_vNy@YhIXs<`;Ln6> z@MohqJ%^3ZusFStLUnqPvV;-#lS#3Y{8c9wokM-U zhMMHBN4fe2Hf_Vw?^`LCfb8*=zA zCCR@J*W^D$arzM(pBp)|ilaCk0X?$#ehQ(>Z6sprilue8< z$%pc8?|v_J5|?akgstq8k$LRo*g}~UJK3jnV%<5^`P8V%K24OX(_+&$EFDjuLhX14 z%CrqR%#@PsGly&TS)w@2ip|ilIL)3yb(({+j1eZg|7{HE-QL=gI+q)^Ho{i$dB{9- zUTmLCik;x|JMkB5sO<$&6MUg4R~N>nZCKh}G==Ju@ih{CzhQ5&qZks|)z8z)Sh8%WCN%9@THTmEu zPCH>EG%QZLq)?r9r7UBF14=%)x4SFWHo{i&J;*$CPi&h^ik;+rPAoi!`rZdM$@h(N zbw6y{hNa&FQYifn>K#a#wjqauQmBWi z#f`9){8%y%Jq}wZlVT_N2~KQ0hx$GVHOYSy0OpsnaOaHso+dN|K)$ zuE~EJ#px_;fQH5CoD{0lxs-j3Fv*AVZts3C^>;4W-3VLRe^2JI7hnrzQtV{E$cd%r zQ0JGRCi@?vT)h;Vwqfb`@)T;vS5T&H$l2%(4U5yYDO9KHC@UFZ zvJd6m-uhnZ4Q|-o2wTB#BJ<3fv3)Wrc7osP#9yqTwr@vG@H?Vhy%U?ZVQKg76l%No zP^N9j;l7jvzdu}qKM=*~L2QPG#p$6Gs?)=im5i|e!Sil^)D62EVJrCKWS;p1woWF+ zPVlFkSa}Y${S0b?KO5!hbJ(;EOS>ic`tB>y4G)gQ5G8zrVe4d4>?9w@iM{7g-{Ybt`FK&Tj*m^- zu=G1&3bo&fDAP9NFiA?1Pa3YtCyU}VIW|DU;xuIn)oCiqMn;(ALw>i<*Zp4VG%nfR z2wT~wBlFnlv4t`zcCydt#OiaX^O;eTeU>O!XT_#%SUR3Ph0^h$-W-%^8*-Q{CE4c= z*X;8|aheyKp5&aE|Wt2sFtNn+mOTZDG9zpxCZ}a6sHxj85$O+l~brrt58-l z!u|)(yL~k`>~4gu;A@b1=9<_#nG`$0*K%U5&aW*z(g+O_zc zGHpW+8>J-p#^D-#lPFG`Vk0yxPMfDtKk+Rn`xxQSf6jg4&G%Bba>eFG*h;<)nTKwR zEt5&HlYDz8R-Z$C?}(b@gQHyC37fWI>35eD>IbzeW!i=uc27z2UbrUTBZ||W*a!`a zQ(p?zX>ZCxMmV74gM0hBVs#^ICEuUSLl3~V$)wmxevlK(&!N5#K~3^Qqg*`%; z_lOkgCv_xc+J+pCPD%1(!ZrD^QJjv$Mrc@^PDr6Tok-cp2nUqBulE~Q>~4guzUxF(-2iqrJi01b=Nj44#7nJ60>VUnXE{LOj3?)Oq>@vm)fgstqe zk$LRw*g}~UJK5)SVgowV`P`_sr zg8wqg)fKU68E6sptOl$DII|H1Qa zU)K%08(}N>`edG&|M6HSlVT_MhE8lhhuYp4HNiKDa&=Q|+J>dw%~PoDZb6y0A&0F} z5`61$4Zckjr){wj8WyMRQ>acmQ1&sxe*B!bzn41L6`LDjEBVf39=Z#*OeV!n^4*+R ze-8ECLrwBMqFmh*o3>%;w=ad-@7|PY8*-*W~+0aXJ7SpM_}7zQtTu@%8C8wP~XR(Ci$^Zt{#U?+pzR|LJFndLA?_x z(>CPro0KF!Ib4&U62<9MY=nlz>GTw;(;1YFjBr57`+C21#qLJfN`5w(hn|D2lS#3Y z{5&W2pF@40kDBDak82`?PWPrzo$jOTWQ55+ly`gUd#Mk&VR<8L z1^+XdXFi1OlS#1?{1GSqVhy$Z7;1t)9_8v2*t88xyHBN1KdPrG(>CPrY)XPZ7p}pd zkK*(KHbcYW^im4d=`WO(jIjT~^KSpE8+JFsR`9=(dFHFwI++wZ!C!M?`#IG18>k8X zW|XUMVbeA&?Y@&j{haVI6Zxe>OKe?sP=pJK~oQtTxE+==z)P~TsoCiz!Uu6~V8+pzTeZ3^{+`i?SfLk>Tr zB>9iwn*66IPQ%O+r(t1n8qWXOI1Nu($Os3Nd~k0>SFCP?t>hz;r-@UjPLohJ zGQt5R@9RzGirtN{m3#^^51kTQCzE0)`P5GAKZp9B7B$JIi*j{(Y}$sU-x*V=pZQFb zX&Z8wB_+vc4cFweMRA%P8=zrvnlpv!G#6zfBTRDtI~mfuz5Bh?d0eu+5w^0=N9M8f zV+&||fii4Evb=L@4I`yx@UE{aXtuynk53iY!ZM47fBhb2>zeW`HGzH}6)Ww03< z7N_M>s7}jMb~3_bAIiJE^}WEkRrQNkss2|nZlxZ7sST`lX*9+I+>ql|=H8w-T;j}i9c=e+&B)ID6Wxe>OK??vXJeb_RY6g$cHabo>B)c1a< zNxpxSs|R4yHZ1)fltTTW4yH`oki(%VNq$(kCOdTR8I)-oa`$gsqcFv6K7~C-$F1 zeP4>2l?G{7%$lzbnesyRm5-mX7aDp?+5P zQKoIk;enK7e=uCL|2c}&L)Z)ri_;@1RHsKNI~ifJ59Qt7`d;eeZdl$3Tfv_s^USBP zeKIL_fUX=c8PG0h_jAY4@cR>PPh#%CrqR{52)PUkTUXe~aSuDmFvI z;`CYy)#-K0N=DfK;CZ*d>4x2nuoe7mGS7SmTPKrZC-}QgY(IzEejhc#{~6`#2iUX? zOS>PYP(P=CQ>Jam;Xf$}{&Bbl|0Igjr`QM$i__;RRHrW}`xs$Ae$Lz9Oa00fn;T&( z`8Q-9`YpChCdE$j@10nG4)y&bYLfpHaUA%_uDl6<6a zO+IoIr%|vG8WyL~Qm7yK=#+(wa6rii_r`R^>PFZ~J~o+$j)QHJNwJfBTqpLQLw%2r zn&cBixjG>>ZNt*<#3|H%C!tK+ki%ptNj`bFCZ8gT)0Ef<4U5y%DO9IvC>t5!fRgw1 zrgO#aM%YR|1DS`;h^>=Jv6FmeC-$F1eb0)T_d6Cx4xIUh8vbQ!dCEKk$L7?*glyQJHgj+;xE=v+v}kw`1(<<{u-OMVQF{6 z6zWH{5oOwj95zWw@J+)t_-0X@HpgaYSe&*@p*n3vS;+|dA3X2&ZQQWC5w?PFN9LK^ zW9wv6>;&J@iS6f5+dH8q_|8$T?t)F*u(Z2d3bozcDbqIOut!RQ?-{Pa_ln}whmFv% zIPH@{b=sG*j}i9c=e+&B)cswtxe>OKA4uk*2Vu)(QtTu@#EJFiP~V54Ci&q}t{#C+ z+pzR|R0_4JzTWAwqa@a^Au{kUr?rP z$l04}shQ;ao6spq?lzoh_A3x{q@1_3aip`C)b&?ND=Apx3%VbjQ zBp<CNVW=fKe6|Tw0j^Z>9HbTSVG;Rvj zX*|k8MmV74gL@OWVs#^IedZI9dFaI0HklMV$tQJU|2fq6~4gu5&qE|@~?cOlBO4LK~5lH`kqYx2dSI4zD1(6Bfy zkwSG^lCqHzCi&R@-oDGOy`4AnlRW26JM6N}7QNomYL+y@R>Wn=xG#t8jY+W+aRnz9 zokI<-h?rz%P z!bBWJ#Q*z$sh*eB`VOsOgsp5Fkg?qm+YFOpC)>tO{OuX)Y*W-^+bqh}&9S)|mX5Ye zp?0(tWp0KXwn<60ZNoL&c2S(R$EIOeoOVp1It`{QSA??dwbvF_zU2R&bInzT9d7*L z$Cow#pJ|3U4D0{95Ps7? IA7+^U2dBbe6#xJL literal 0 HcmV?d00001 diff --git a/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_mod_list.json b/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_mod_list.json new file mode 100644 index 0000000000..4cd94ad59e --- /dev/null +++ b/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_mod_list.json @@ -0,0 +1,506 @@ +[ + "time_text_embed.timestep_embedder.linear_1", + "time_text_embed.timestep_embedder.linear_2", + "time_text_embed.guidance_embedder.linear_1", + "time_text_embed.guidance_embedder.linear_2", + "time_text_embed.text_embedder.linear_1", + "time_text_embed.text_embedder.linear_2", + "context_embedder", + "x_embedder", + "transformer_blocks.0.norm1.linear", + "transformer_blocks.0.norm1_context.linear", + "transformer_blocks.0.attn.to_q", + "transformer_blocks.0.attn.to_k", + "transformer_blocks.0.attn.to_v", + "transformer_blocks.0.attn.add_k_proj", + "transformer_blocks.0.attn.add_v_proj", + "transformer_blocks.0.attn.add_q_proj", + "transformer_blocks.0.attn.to_out.0", + "transformer_blocks.0.attn.to_add_out", + "transformer_blocks.0.ff.net.0.proj", + "transformer_blocks.0.ff.net.2", + "transformer_blocks.0.ff_context.net.0.proj", + "transformer_blocks.0.ff_context.net.2", + "transformer_blocks.1.norm1.linear", + "transformer_blocks.1.norm1_context.linear", + "transformer_blocks.1.attn.to_q", + "transformer_blocks.1.attn.to_k", + "transformer_blocks.1.attn.to_v", + "transformer_blocks.1.attn.add_k_proj", + "transformer_blocks.1.attn.add_v_proj", + "transformer_blocks.1.attn.add_q_proj", + "transformer_blocks.1.attn.to_out.0", + "transformer_blocks.1.attn.to_add_out", + "transformer_blocks.1.ff.net.0.proj", + "transformer_blocks.1.ff.net.2", + "transformer_blocks.1.ff_context.net.0.proj", + "transformer_blocks.1.ff_context.net.2", + "transformer_blocks.2.norm1.linear", + "transformer_blocks.2.norm1_context.linear", + "transformer_blocks.2.attn.to_q", + "transformer_blocks.2.attn.to_k", + "transformer_blocks.2.attn.to_v", + "transformer_blocks.2.attn.add_k_proj", + "transformer_blocks.2.attn.add_v_proj", + "transformer_blocks.2.attn.add_q_proj", + "transformer_blocks.2.attn.to_out.0", + "transformer_blocks.2.attn.to_add_out", + "transformer_blocks.2.ff.net.0.proj", + "transformer_blocks.2.ff.net.2", + "transformer_blocks.2.ff_context.net.0.proj", + "transformer_blocks.2.ff_context.net.2", + "transformer_blocks.3.norm1.linear", + "transformer_blocks.3.norm1_context.linear", + "transformer_blocks.3.attn.to_q", + "transformer_blocks.3.attn.to_k", + "transformer_blocks.3.attn.to_v", + "transformer_blocks.3.attn.add_k_proj", + "transformer_blocks.3.attn.add_v_proj", + "transformer_blocks.3.attn.add_q_proj", + "transformer_blocks.3.attn.to_out.0", + "transformer_blocks.3.attn.to_add_out", + "transformer_blocks.3.ff.net.0.proj", + "transformer_blocks.3.ff.net.2", + "transformer_blocks.3.ff_context.net.0.proj", + "transformer_blocks.3.ff_context.net.2", + "transformer_blocks.4.norm1.linear", + "transformer_blocks.4.norm1_context.linear", + "transformer_blocks.4.attn.to_q", + "transformer_blocks.4.attn.to_k", + "transformer_blocks.4.attn.to_v", + "transformer_blocks.4.attn.add_k_proj", + "transformer_blocks.4.attn.add_v_proj", + "transformer_blocks.4.attn.add_q_proj", + "transformer_blocks.4.attn.to_out.0", + "transformer_blocks.4.attn.to_add_out", + "transformer_blocks.4.ff.net.0.proj", + "transformer_blocks.4.ff.net.2", + "transformer_blocks.4.ff_context.net.0.proj", + "transformer_blocks.4.ff_context.net.2", + "transformer_blocks.5.norm1.linear", + "transformer_blocks.5.norm1_context.linear", + "transformer_blocks.5.attn.to_q", + "transformer_blocks.5.attn.to_k", + "transformer_blocks.5.attn.to_v", + "transformer_blocks.5.attn.add_k_proj", + "transformer_blocks.5.attn.add_v_proj", + "transformer_blocks.5.attn.add_q_proj", + "transformer_blocks.5.attn.to_out.0", + "transformer_blocks.5.attn.to_add_out", + "transformer_blocks.5.ff.net.0.proj", + "transformer_blocks.5.ff.net.2", + "transformer_blocks.5.ff_context.net.0.proj", + "transformer_blocks.5.ff_context.net.2", + "transformer_blocks.6.norm1.linear", + "transformer_blocks.6.norm1_context.linear", + "transformer_blocks.6.attn.to_q", + "transformer_blocks.6.attn.to_k", + "transformer_blocks.6.attn.to_v", + "transformer_blocks.6.attn.add_k_proj", + "transformer_blocks.6.attn.add_v_proj", + "transformer_blocks.6.attn.add_q_proj", + "transformer_blocks.6.attn.to_out.0", + "transformer_blocks.6.attn.to_add_out", + "transformer_blocks.6.ff.net.0.proj", + "transformer_blocks.6.ff.net.2", + "transformer_blocks.6.ff_context.net.0.proj", + "transformer_blocks.6.ff_context.net.2", + "transformer_blocks.7.norm1.linear", + "transformer_blocks.7.norm1_context.linear", + "transformer_blocks.7.attn.to_q", + "transformer_blocks.7.attn.to_k", + "transformer_blocks.7.attn.to_v", + "transformer_blocks.7.attn.add_k_proj", + "transformer_blocks.7.attn.add_v_proj", + "transformer_blocks.7.attn.add_q_proj", + "transformer_blocks.7.attn.to_out.0", + "transformer_blocks.7.attn.to_add_out", + "transformer_blocks.7.ff.net.0.proj", + "transformer_blocks.7.ff.net.2", + "transformer_blocks.7.ff_context.net.0.proj", + "transformer_blocks.7.ff_context.net.2", + "transformer_blocks.8.norm1.linear", + "transformer_blocks.8.norm1_context.linear", + "transformer_blocks.8.attn.to_q", + "transformer_blocks.8.attn.to_k", + "transformer_blocks.8.attn.to_v", + "transformer_blocks.8.attn.add_k_proj", + "transformer_blocks.8.attn.add_v_proj", + "transformer_blocks.8.attn.add_q_proj", + "transformer_blocks.8.attn.to_out.0", + "transformer_blocks.8.attn.to_add_out", + "transformer_blocks.8.ff.net.0.proj", + "transformer_blocks.8.ff.net.2", + "transformer_blocks.8.ff_context.net.0.proj", + "transformer_blocks.8.ff_context.net.2", + "transformer_blocks.9.norm1.linear", + "transformer_blocks.9.norm1_context.linear", + "transformer_blocks.9.attn.to_q", + "transformer_blocks.9.attn.to_k", + "transformer_blocks.9.attn.to_v", + "transformer_blocks.9.attn.add_k_proj", + "transformer_blocks.9.attn.add_v_proj", + "transformer_blocks.9.attn.add_q_proj", + "transformer_blocks.9.attn.to_out.0", + "transformer_blocks.9.attn.to_add_out", + "transformer_blocks.9.ff.net.0.proj", + "transformer_blocks.9.ff.net.2", + "transformer_blocks.9.ff_context.net.0.proj", + "transformer_blocks.9.ff_context.net.2", + "transformer_blocks.10.norm1.linear", + "transformer_blocks.10.norm1_context.linear", + "transformer_blocks.10.attn.to_q", + "transformer_blocks.10.attn.to_k", + "transformer_blocks.10.attn.to_v", + "transformer_blocks.10.attn.add_k_proj", + "transformer_blocks.10.attn.add_v_proj", + "transformer_blocks.10.attn.add_q_proj", + "transformer_blocks.10.attn.to_out.0", + "transformer_blocks.10.attn.to_add_out", + "transformer_blocks.10.ff.net.0.proj", + "transformer_blocks.10.ff.net.2", + "transformer_blocks.10.ff_context.net.0.proj", + "transformer_blocks.10.ff_context.net.2", + "transformer_blocks.11.norm1.linear", + "transformer_blocks.11.norm1_context.linear", + "transformer_blocks.11.attn.to_q", + "transformer_blocks.11.attn.to_k", + "transformer_blocks.11.attn.to_v", + "transformer_blocks.11.attn.add_k_proj", + "transformer_blocks.11.attn.add_v_proj", + "transformer_blocks.11.attn.add_q_proj", + "transformer_blocks.11.attn.to_out.0", + "transformer_blocks.11.attn.to_add_out", + "transformer_blocks.11.ff.net.0.proj", + "transformer_blocks.11.ff.net.2", + "transformer_blocks.11.ff_context.net.0.proj", + "transformer_blocks.11.ff_context.net.2", + "transformer_blocks.12.norm1.linear", + "transformer_blocks.12.norm1_context.linear", + "transformer_blocks.12.attn.to_q", + "transformer_blocks.12.attn.to_k", + "transformer_blocks.12.attn.to_v", + "transformer_blocks.12.attn.add_k_proj", + "transformer_blocks.12.attn.add_v_proj", + "transformer_blocks.12.attn.add_q_proj", + "transformer_blocks.12.attn.to_out.0", + "transformer_blocks.12.attn.to_add_out", + "transformer_blocks.12.ff.net.0.proj", + "transformer_blocks.12.ff.net.2", + "transformer_blocks.12.ff_context.net.0.proj", + "transformer_blocks.12.ff_context.net.2", + "transformer_blocks.13.norm1.linear", + "transformer_blocks.13.norm1_context.linear", + "transformer_blocks.13.attn.to_q", + "transformer_blocks.13.attn.to_k", + "transformer_blocks.13.attn.to_v", + "transformer_blocks.13.attn.add_k_proj", + "transformer_blocks.13.attn.add_v_proj", + "transformer_blocks.13.attn.add_q_proj", + "transformer_blocks.13.attn.to_out.0", + "transformer_blocks.13.attn.to_add_out", + "transformer_blocks.13.ff.net.0.proj", + "transformer_blocks.13.ff.net.2", + "transformer_blocks.13.ff_context.net.0.proj", + "transformer_blocks.13.ff_context.net.2", + "transformer_blocks.14.norm1.linear", + "transformer_blocks.14.norm1_context.linear", + "transformer_blocks.14.attn.to_q", + "transformer_blocks.14.attn.to_k", + "transformer_blocks.14.attn.to_v", + "transformer_blocks.14.attn.add_k_proj", + "transformer_blocks.14.attn.add_v_proj", + "transformer_blocks.14.attn.add_q_proj", + "transformer_blocks.14.attn.to_out.0", + "transformer_blocks.14.attn.to_add_out", + "transformer_blocks.14.ff.net.0.proj", + "transformer_blocks.14.ff.net.2", + "transformer_blocks.14.ff_context.net.0.proj", + "transformer_blocks.14.ff_context.net.2", + "transformer_blocks.15.norm1.linear", + "transformer_blocks.15.norm1_context.linear", + "transformer_blocks.15.attn.to_q", + "transformer_blocks.15.attn.to_k", + "transformer_blocks.15.attn.to_v", + "transformer_blocks.15.attn.add_k_proj", + "transformer_blocks.15.attn.add_v_proj", + "transformer_blocks.15.attn.add_q_proj", + "transformer_blocks.15.attn.to_out.0", + "transformer_blocks.15.attn.to_add_out", + "transformer_blocks.15.ff.net.0.proj", + "transformer_blocks.15.ff.net.2", + "transformer_blocks.15.ff_context.net.0.proj", + "transformer_blocks.15.ff_context.net.2", + "transformer_blocks.16.norm1.linear", + "transformer_blocks.16.norm1_context.linear", + "transformer_blocks.16.attn.to_q", + "transformer_blocks.16.attn.to_k", + "transformer_blocks.16.attn.to_v", + "transformer_blocks.16.attn.add_k_proj", + "transformer_blocks.16.attn.add_v_proj", + "transformer_blocks.16.attn.add_q_proj", + "transformer_blocks.16.attn.to_out.0", + "transformer_blocks.16.attn.to_add_out", + "transformer_blocks.16.ff.net.0.proj", + "transformer_blocks.16.ff.net.2", + "transformer_blocks.16.ff_context.net.0.proj", + "transformer_blocks.16.ff_context.net.2", + "transformer_blocks.17.norm1.linear", + "transformer_blocks.17.norm1_context.linear", + "transformer_blocks.17.attn.to_q", + "transformer_blocks.17.attn.to_k", + "transformer_blocks.17.attn.to_v", + "transformer_blocks.17.attn.add_k_proj", + "transformer_blocks.17.attn.add_v_proj", + "transformer_blocks.17.attn.add_q_proj", + "transformer_blocks.17.attn.to_out.0", + "transformer_blocks.17.attn.to_add_out", + "transformer_blocks.17.ff.net.0.proj", + "transformer_blocks.17.ff.net.2", + "transformer_blocks.17.ff_context.net.0.proj", + "transformer_blocks.17.ff_context.net.2", + "transformer_blocks.18.norm1.linear", + "transformer_blocks.18.norm1_context.linear", + "transformer_blocks.18.attn.to_q", + "transformer_blocks.18.attn.to_k", + "transformer_blocks.18.attn.to_v", + "transformer_blocks.18.attn.add_k_proj", + "transformer_blocks.18.attn.add_v_proj", + "transformer_blocks.18.attn.add_q_proj", + "transformer_blocks.18.attn.to_out.0", + "transformer_blocks.18.attn.to_add_out", + "transformer_blocks.18.ff.net.0.proj", + "transformer_blocks.18.ff.net.2", + "transformer_blocks.18.ff_context.net.0.proj", + "transformer_blocks.18.ff_context.net.2", + "single_transformer_blocks.0.norm.linear", + "single_transformer_blocks.0.proj_mlp", + "single_transformer_blocks.0.proj_out", + "single_transformer_blocks.0.attn.to_q", + "single_transformer_blocks.0.attn.to_k", + "single_transformer_blocks.0.attn.to_v", + "single_transformer_blocks.1.norm.linear", + "single_transformer_blocks.1.proj_mlp", + "single_transformer_blocks.1.proj_out", + "single_transformer_blocks.1.attn.to_q", + "single_transformer_blocks.1.attn.to_k", + "single_transformer_blocks.1.attn.to_v", + "single_transformer_blocks.2.norm.linear", + "single_transformer_blocks.2.proj_mlp", + "single_transformer_blocks.2.proj_out", + "single_transformer_blocks.2.attn.to_q", + "single_transformer_blocks.2.attn.to_k", + "single_transformer_blocks.2.attn.to_v", + "single_transformer_blocks.3.norm.linear", + "single_transformer_blocks.3.proj_mlp", + "single_transformer_blocks.3.proj_out", + "single_transformer_blocks.3.attn.to_q", + "single_transformer_blocks.3.attn.to_k", + "single_transformer_blocks.3.attn.to_v", + "single_transformer_blocks.4.norm.linear", + "single_transformer_blocks.4.proj_mlp", + "single_transformer_blocks.4.proj_out", + "single_transformer_blocks.4.attn.to_q", + "single_transformer_blocks.4.attn.to_k", + "single_transformer_blocks.4.attn.to_v", + "single_transformer_blocks.5.norm.linear", + "single_transformer_blocks.5.proj_mlp", + "single_transformer_blocks.5.proj_out", + "single_transformer_blocks.5.attn.to_q", + "single_transformer_blocks.5.attn.to_k", + "single_transformer_blocks.5.attn.to_v", + "single_transformer_blocks.6.norm.linear", + "single_transformer_blocks.6.proj_mlp", + "single_transformer_blocks.6.proj_out", + "single_transformer_blocks.6.attn.to_q", + "single_transformer_blocks.6.attn.to_k", + "single_transformer_blocks.6.attn.to_v", + "single_transformer_blocks.7.norm.linear", + "single_transformer_blocks.7.proj_mlp", + "single_transformer_blocks.7.proj_out", + "single_transformer_blocks.7.attn.to_q", + "single_transformer_blocks.7.attn.to_k", + "single_transformer_blocks.7.attn.to_v", + "single_transformer_blocks.8.norm.linear", + "single_transformer_blocks.8.proj_mlp", + "single_transformer_blocks.8.proj_out", + "single_transformer_blocks.8.attn.to_q", + "single_transformer_blocks.8.attn.to_k", + "single_transformer_blocks.8.attn.to_v", + "single_transformer_blocks.9.norm.linear", + "single_transformer_blocks.9.proj_mlp", + "single_transformer_blocks.9.proj_out", + "single_transformer_blocks.9.attn.to_q", + "single_transformer_blocks.9.attn.to_k", + "single_transformer_blocks.9.attn.to_v", + "single_transformer_blocks.10.norm.linear", + "single_transformer_blocks.10.proj_mlp", + "single_transformer_blocks.10.proj_out", + "single_transformer_blocks.10.attn.to_q", + "single_transformer_blocks.10.attn.to_k", + "single_transformer_blocks.10.attn.to_v", + "single_transformer_blocks.11.norm.linear", + "single_transformer_blocks.11.proj_mlp", + "single_transformer_blocks.11.proj_out", + "single_transformer_blocks.11.attn.to_q", + "single_transformer_blocks.11.attn.to_k", + "single_transformer_blocks.11.attn.to_v", + "single_transformer_blocks.12.norm.linear", + "single_transformer_blocks.12.proj_mlp", + "single_transformer_blocks.12.proj_out", + "single_transformer_blocks.12.attn.to_q", + "single_transformer_blocks.12.attn.to_k", + "single_transformer_blocks.12.attn.to_v", + "single_transformer_blocks.13.norm.linear", + "single_transformer_blocks.13.proj_mlp", + "single_transformer_blocks.13.proj_out", + "single_transformer_blocks.13.attn.to_q", + "single_transformer_blocks.13.attn.to_k", + "single_transformer_blocks.13.attn.to_v", + "single_transformer_blocks.14.norm.linear", + "single_transformer_blocks.14.proj_mlp", + "single_transformer_blocks.14.proj_out", + "single_transformer_blocks.14.attn.to_q", + "single_transformer_blocks.14.attn.to_k", + "single_transformer_blocks.14.attn.to_v", + "single_transformer_blocks.15.norm.linear", + "single_transformer_blocks.15.proj_mlp", + "single_transformer_blocks.15.proj_out", + "single_transformer_blocks.15.attn.to_q", + "single_transformer_blocks.15.attn.to_k", + "single_transformer_blocks.15.attn.to_v", + "single_transformer_blocks.16.norm.linear", + "single_transformer_blocks.16.proj_mlp", + "single_transformer_blocks.16.proj_out", + "single_transformer_blocks.16.attn.to_q", + "single_transformer_blocks.16.attn.to_k", + "single_transformer_blocks.16.attn.to_v", + "single_transformer_blocks.17.norm.linear", + "single_transformer_blocks.17.proj_mlp", + "single_transformer_blocks.17.proj_out", + "single_transformer_blocks.17.attn.to_q", + "single_transformer_blocks.17.attn.to_k", + "single_transformer_blocks.17.attn.to_v", + "single_transformer_blocks.18.norm.linear", + "single_transformer_blocks.18.proj_mlp", + "single_transformer_blocks.18.proj_out", + "single_transformer_blocks.18.attn.to_q", + "single_transformer_blocks.18.attn.to_k", + "single_transformer_blocks.18.attn.to_v", + "single_transformer_blocks.19.norm.linear", + "single_transformer_blocks.19.proj_mlp", + "single_transformer_blocks.19.proj_out", + "single_transformer_blocks.19.attn.to_q", + "single_transformer_blocks.19.attn.to_k", + "single_transformer_blocks.19.attn.to_v", + "single_transformer_blocks.20.norm.linear", + "single_transformer_blocks.20.proj_mlp", + "single_transformer_blocks.20.proj_out", + "single_transformer_blocks.20.attn.to_q", + "single_transformer_blocks.20.attn.to_k", + "single_transformer_blocks.20.attn.to_v", + "single_transformer_blocks.21.norm.linear", + "single_transformer_blocks.21.proj_mlp", + "single_transformer_blocks.21.proj_out", + "single_transformer_blocks.21.attn.to_q", + "single_transformer_blocks.21.attn.to_k", + "single_transformer_blocks.21.attn.to_v", + "single_transformer_blocks.22.norm.linear", + "single_transformer_blocks.22.proj_mlp", + "single_transformer_blocks.22.proj_out", + "single_transformer_blocks.22.attn.to_q", + "single_transformer_blocks.22.attn.to_k", + "single_transformer_blocks.22.attn.to_v", + "single_transformer_blocks.23.norm.linear", + "single_transformer_blocks.23.proj_mlp", + "single_transformer_blocks.23.proj_out", + "single_transformer_blocks.23.attn.to_q", + "single_transformer_blocks.23.attn.to_k", + "single_transformer_blocks.23.attn.to_v", + "single_transformer_blocks.24.norm.linear", + "single_transformer_blocks.24.proj_mlp", + "single_transformer_blocks.24.proj_out", + "single_transformer_blocks.24.attn.to_q", + "single_transformer_blocks.24.attn.to_k", + "single_transformer_blocks.24.attn.to_v", + "single_transformer_blocks.25.norm.linear", + "single_transformer_blocks.25.proj_mlp", + "single_transformer_blocks.25.proj_out", + "single_transformer_blocks.25.attn.to_q", + "single_transformer_blocks.25.attn.to_k", + "single_transformer_blocks.25.attn.to_v", + "single_transformer_blocks.26.norm.linear", + "single_transformer_blocks.26.proj_mlp", + "single_transformer_blocks.26.proj_out", + "single_transformer_blocks.26.attn.to_q", + "single_transformer_blocks.26.attn.to_k", + "single_transformer_blocks.26.attn.to_v", + "single_transformer_blocks.27.norm.linear", + "single_transformer_blocks.27.proj_mlp", + "single_transformer_blocks.27.proj_out", + "single_transformer_blocks.27.attn.to_q", + "single_transformer_blocks.27.attn.to_k", + "single_transformer_blocks.27.attn.to_v", + "single_transformer_blocks.28.norm.linear", + "single_transformer_blocks.28.proj_mlp", + "single_transformer_blocks.28.proj_out", + "single_transformer_blocks.28.attn.to_q", + "single_transformer_blocks.28.attn.to_k", + "single_transformer_blocks.28.attn.to_v", + "single_transformer_blocks.29.norm.linear", + "single_transformer_blocks.29.proj_mlp", + "single_transformer_blocks.29.proj_out", + "single_transformer_blocks.29.attn.to_q", + "single_transformer_blocks.29.attn.to_k", + "single_transformer_blocks.29.attn.to_v", + "single_transformer_blocks.30.norm.linear", + "single_transformer_blocks.30.proj_mlp", + "single_transformer_blocks.30.proj_out", + "single_transformer_blocks.30.attn.to_q", + "single_transformer_blocks.30.attn.to_k", + "single_transformer_blocks.30.attn.to_v", + "single_transformer_blocks.31.norm.linear", + "single_transformer_blocks.31.proj_mlp", + "single_transformer_blocks.31.proj_out", + "single_transformer_blocks.31.attn.to_q", + "single_transformer_blocks.31.attn.to_k", + "single_transformer_blocks.31.attn.to_v", + "single_transformer_blocks.32.norm.linear", + "single_transformer_blocks.32.proj_mlp", + "single_transformer_blocks.32.proj_out", + "single_transformer_blocks.32.attn.to_q", + "single_transformer_blocks.32.attn.to_k", + "single_transformer_blocks.32.attn.to_v", + "single_transformer_blocks.33.norm.linear", + "single_transformer_blocks.33.proj_mlp", + "single_transformer_blocks.33.proj_out", + "single_transformer_blocks.33.attn.to_q", + "single_transformer_blocks.33.attn.to_k", + "single_transformer_blocks.33.attn.to_v", + "single_transformer_blocks.34.norm.linear", + "single_transformer_blocks.34.proj_mlp", + "single_transformer_blocks.34.proj_out", + "single_transformer_blocks.34.attn.to_q", + "single_transformer_blocks.34.attn.to_k", + "single_transformer_blocks.34.attn.to_v", + "single_transformer_blocks.35.norm.linear", + "single_transformer_blocks.35.proj_mlp", + "single_transformer_blocks.35.proj_out", + "single_transformer_blocks.35.attn.to_q", + "single_transformer_blocks.35.attn.to_k", + "single_transformer_blocks.35.attn.to_v", + "single_transformer_blocks.36.norm.linear", + "single_transformer_blocks.36.proj_mlp", + "single_transformer_blocks.36.proj_out", + "single_transformer_blocks.36.attn.to_q", + "single_transformer_blocks.36.attn.to_k", + "single_transformer_blocks.36.attn.to_v", + "single_transformer_blocks.37.norm.linear", + "single_transformer_blocks.37.proj_mlp", + "single_transformer_blocks.37.proj_out", + "single_transformer_blocks.37.attn.to_q", + "single_transformer_blocks.37.attn.to_k", + "single_transformer_blocks.37.attn.to_v", + "norm_out.linear", + "proj_out" +] \ No newline at end of file diff --git a/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.json b/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.json new file mode 100644 index 0000000000..775d712618 --- /dev/null +++ b/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.json @@ -0,0 +1,8071 @@ +{ + "GlobalRank": null, + "LocalRank": null, + "Mode": "DynamicRange", + "Nodes": { + "time_text_embed.timestep_embedder.linear_1": { + "inputs": [ + [ + [ + 1.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.28515625 + ] + ] + } + }, + "time_text_embed.timestep_embedder.linear_2": { + "inputs": [ + [ + [ + 3.28125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1484375 + ] + ] + } + }, + "time_text_embed.guidance_embedder.linear_1": { + "inputs": [ + [ + [ + 1.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.10400390625 + ] + ] + } + }, + "time_text_embed.guidance_embedder.linear_2": { + "inputs": [ + [ + [ + 0.60546875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.1201171875 + ] + ] + } + }, + "time_text_embed.text_embedder.linear_1": { + "inputs": [ + [ + [ + 4.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.263671875 + ] + ] + } + }, + "time_text_embed.text_embedder.linear_2": { + "inputs": [ + [ + [ + 0.373046875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.171875 + ] + ] + } + }, + "context_embedder": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.72265625 + ] + ] + } + }, + "x_embedder": { + "inputs": [ + [ + [ + 5.15625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "transformer_blocks.0.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.396484375 + ] + ] + } + }, + "transformer_blocks.0.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.0.attn.to_q": { + "inputs": [ + [ + [ + 20.125 + ] + ] + ], + "params": { + "weight": [ + [ + 2.4375 + ] + ] + } + }, + "transformer_blocks.0.attn.to_k": { + "inputs": [ + [ + [ + 20.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.0.attn.to_v": { + "inputs": [ + [ + [ + 20.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.38671875 + ] + ] + } + }, + "transformer_blocks.0.attn.add_k_proj": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.0.attn.add_v_proj": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.236328125 + ] + ] + } + }, + "transformer_blocks.0.attn.add_q_proj": { + "inputs": [ + [ + [ + 14.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.37890625 + ] + ] + } + }, + "transformer_blocks.0.attn.to_out.0": { + "inputs": [ + [ + [ + 1.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.408203125 + ] + ] + } + }, + "transformer_blocks.0.attn.to_add_out": { + "inputs": [ + [ + [ + 7.46875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.40625 + ] + ] + } + }, + "transformer_blocks.0.ff.net.0.proj": { + "inputs": [ + [ + [ + 5.46875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.439453125 + ] + ] + } + }, + "transformer_blocks.0.ff.net.2": { + "inputs": [ + [ + [ + 9.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.60546875 + ] + ] + } + }, + "transformer_blocks.0.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 10.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.419921875 + ] + ] + } + }, + "transformer_blocks.0.ff_context.net.2": { + "inputs": [ + [ + [ + 39.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.50390625 + ] + ] + } + }, + "transformer_blocks.1.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71484375 + ] + ] + } + }, + "transformer_blocks.1.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.1.attn.to_q": { + "inputs": [ + [ + [ + 21.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.72265625 + ] + ] + } + }, + "transformer_blocks.1.attn.to_k": { + "inputs": [ + [ + [ + 21.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0 + ] + ] + } + }, + "transformer_blocks.1.attn.to_v": { + "inputs": [ + [ + [ + 21.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.1.attn.add_k_proj": { + "inputs": [ + [ + [ + 36.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3828125 + ] + ] + } + }, + "transformer_blocks.1.attn.add_v_proj": { + "inputs": [ + [ + [ + 36.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.28125 + ] + ] + } + }, + "transformer_blocks.1.attn.add_q_proj": { + "inputs": [ + [ + [ + 36.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.376953125 + ] + ] + } + }, + "transformer_blocks.1.attn.to_out.0": { + "inputs": [ + [ + [ + 8.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4765625 + ] + ] + } + }, + "transformer_blocks.1.attn.to_add_out": { + "inputs": [ + [ + [ + 9.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.296875 + ] + ] + } + }, + "transformer_blocks.1.ff.net.0.proj": { + "inputs": [ + [ + [ + 10.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.47265625 + ] + ] + } + }, + "transformer_blocks.1.ff.net.2": { + "inputs": [ + [ + [ + 13.3125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "transformer_blocks.1.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 67.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.40234375 + ] + ] + } + }, + "transformer_blocks.1.ff_context.net.2": { + "inputs": [ + [ + [ + 83.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.765625 + ] + ] + } + }, + "transformer_blocks.2.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.82421875 + ] + ] + } + }, + "transformer_blocks.2.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71484375 + ] + ] + } + }, + "transformer_blocks.2.attn.to_q": { + "inputs": [ + [ + [ + 14.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.97265625 + ] + ] + } + }, + "transformer_blocks.2.attn.to_k": { + "inputs": [ + [ + [ + 14.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7421875 + ] + ] + } + }, + "transformer_blocks.2.attn.to_v": { + "inputs": [ + [ + [ + 14.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.2.attn.add_k_proj": { + "inputs": [ + [ + [ + 34.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6796875 + ] + ] + } + }, + "transformer_blocks.2.attn.add_v_proj": { + "inputs": [ + [ + [ + 34.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.26171875 + ] + ] + } + }, + "transformer_blocks.2.attn.add_q_proj": { + "inputs": [ + [ + [ + 34.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.423828125 + ] + ] + } + }, + "transformer_blocks.2.attn.to_out.0": { + "inputs": [ + [ + [ + 11.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "transformer_blocks.2.attn.to_add_out": { + "inputs": [ + [ + [ + 6.09375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.361328125 + ] + ] + } + }, + "transformer_blocks.2.ff.net.0.proj": { + "inputs": [ + [ + [ + 6.78125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.76171875 + ] + ] + } + }, + "transformer_blocks.2.ff.net.2": { + "inputs": [ + [ + [ + 7.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "transformer_blocks.2.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 66.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.482421875 + ] + ] + } + }, + "transformer_blocks.2.ff_context.net.2": { + "inputs": [ + [ + [ + 30.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.609375 + ] + ] + } + }, + "transformer_blocks.3.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.68359375 + ] + ] + } + }, + "transformer_blocks.3.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.412109375 + ] + ] + } + }, + "transformer_blocks.3.attn.to_q": { + "inputs": [ + [ + [ + 15.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1640625 + ] + ] + } + }, + "transformer_blocks.3.attn.to_k": { + "inputs": [ + [ + [ + 15.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6953125 + ] + ] + } + }, + "transformer_blocks.3.attn.to_v": { + "inputs": [ + [ + [ + 15.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.484375 + ] + ] + } + }, + "transformer_blocks.3.attn.add_k_proj": { + "inputs": [ + [ + [ + 27.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.474609375 + ] + ] + } + }, + "transformer_blocks.3.attn.add_v_proj": { + "inputs": [ + [ + [ + 27.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.255859375 + ] + ] + } + }, + "transformer_blocks.3.attn.add_q_proj": { + "inputs": [ + [ + [ + 27.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41796875 + ] + ] + } + }, + "transformer_blocks.3.attn.to_out.0": { + "inputs": [ + [ + [ + 12.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.35546875 + ] + ] + } + }, + "transformer_blocks.3.attn.to_add_out": { + "inputs": [ + [ + [ + 4.21875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.390625 + ] + ] + } + }, + "transformer_blocks.3.ff.net.0.proj": { + "inputs": [ + [ + [ + 13.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.578125 + ] + ] + } + }, + "transformer_blocks.3.ff.net.2": { + "inputs": [ + [ + [ + 20.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.765625 + ] + ] + } + }, + "transformer_blocks.3.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 9.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "transformer_blocks.3.ff_context.net.2": { + "inputs": [ + [ + [ + 19.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53515625 + ] + ] + } + }, + "transformer_blocks.4.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8515625 + ] + ] + } + }, + "transformer_blocks.4.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.40625 + ] + ] + } + }, + "transformer_blocks.4.attn.to_q": { + "inputs": [ + [ + [ + 28.125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1796875 + ] + ] + } + }, + "transformer_blocks.4.attn.to_k": { + "inputs": [ + [ + [ + 28.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.75390625 + ] + ] + } + }, + "transformer_blocks.4.attn.to_v": { + "inputs": [ + [ + [ + 28.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.494140625 + ] + ] + } + }, + "transformer_blocks.4.attn.add_k_proj": { + "inputs": [ + [ + [ + 18.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4765625 + ] + ] + } + }, + "transformer_blocks.4.attn.add_v_proj": { + "inputs": [ + [ + [ + 18.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.236328125 + ] + ] + } + }, + "transformer_blocks.4.attn.add_q_proj": { + "inputs": [ + [ + [ + 18.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3671875 + ] + ] + } + }, + "transformer_blocks.4.attn.to_out.0": { + "inputs": [ + [ + [ + 14.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.345703125 + ] + ] + } + }, + "transformer_blocks.4.attn.to_add_out": { + "inputs": [ + [ + [ + 6.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.390625 + ] + ] + } + }, + "transformer_blocks.4.ff.net.0.proj": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "transformer_blocks.4.ff.net.2": { + "inputs": [ + [ + [ + 21.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.09375 + ] + ] + } + }, + "transformer_blocks.4.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 8.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6484375 + ] + ] + } + }, + "transformer_blocks.4.ff_context.net.2": { + "inputs": [ + [ + [ + 16.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.5.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8125 + ] + ] + } + }, + "transformer_blocks.5.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.5.attn.to_q": { + "inputs": [ + [ + [ + 18.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.84765625 + ] + ] + } + }, + "transformer_blocks.5.attn.to_k": { + "inputs": [ + [ + [ + 18.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "transformer_blocks.5.attn.to_v": { + "inputs": [ + [ + [ + 18.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.5.attn.add_k_proj": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.5.attn.add_v_proj": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.298828125 + ] + ] + } + }, + "transformer_blocks.5.attn.add_q_proj": { + "inputs": [ + [ + [ + 19.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.287109375 + ] + ] + } + }, + "transformer_blocks.5.attn.to_out.0": { + "inputs": [ + [ + [ + 10.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.28125 + ] + ] + } + }, + "transformer_blocks.5.attn.to_add_out": { + "inputs": [ + [ + [ + 9.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.306640625 + ] + ] + } + }, + "transformer_blocks.5.ff.net.0.proj": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6328125 + ] + ] + } + }, + "transformer_blocks.5.ff.net.2": { + "inputs": [ + [ + [ + 30.875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.46875 + ] + ] + } + }, + "transformer_blocks.5.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 12.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.54296875 + ] + ] + } + }, + "transformer_blocks.5.ff_context.net.2": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "transformer_blocks.6.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.78515625 + ] + ] + } + }, + "transformer_blocks.6.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.39453125 + ] + ] + } + }, + "transformer_blocks.6.attn.to_q": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.56640625 + ] + ] + } + }, + "transformer_blocks.6.attn.to_k": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53515625 + ] + ] + } + }, + "transformer_blocks.6.attn.to_v": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.333984375 + ] + ] + } + }, + "transformer_blocks.6.attn.add_k_proj": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "transformer_blocks.6.attn.add_v_proj": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.328125 + ] + ] + } + }, + "transformer_blocks.6.attn.add_q_proj": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3515625 + ] + ] + } + }, + "transformer_blocks.6.attn.to_out.0": { + "inputs": [ + [ + [ + 9.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.47265625 + ] + ] + } + }, + "transformer_blocks.6.attn.to_add_out": { + "inputs": [ + [ + [ + 12.1875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.259765625 + ] + ] + } + }, + "transformer_blocks.6.ff.net.0.proj": { + "inputs": [ + [ + [ + 9.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "transformer_blocks.6.ff.net.2": { + "inputs": [ + [ + [ + 32.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.80859375 + ] + ] + } + }, + "transformer_blocks.6.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 10.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.62109375 + ] + ] + } + }, + "transformer_blocks.6.ff_context.net.2": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "transformer_blocks.7.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.73046875 + ] + ] + } + }, + "transformer_blocks.7.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41015625 + ] + ] + } + }, + "transformer_blocks.7.attn.to_q": { + "inputs": [ + [ + [ + 22.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.7.attn.to_k": { + "inputs": [ + [ + [ + 22.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.84375 + ] + ] + } + }, + "transformer_blocks.7.attn.to_v": { + "inputs": [ + [ + [ + 22.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.36328125 + ] + ] + } + }, + "transformer_blocks.7.attn.add_k_proj": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.443359375 + ] + ] + } + }, + "transformer_blocks.7.attn.add_v_proj": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.365234375 + ] + ] + } + }, + "transformer_blocks.7.attn.add_q_proj": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.39453125 + ] + ] + } + }, + "transformer_blocks.7.attn.to_out.0": { + "inputs": [ + [ + [ + 12.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.37109375 + ] + ] + } + }, + "transformer_blocks.7.attn.to_add_out": { + "inputs": [ + [ + [ + 8.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.375 + ] + ] + } + }, + "transformer_blocks.7.ff.net.0.proj": { + "inputs": [ + [ + [ + 15.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.435546875 + ] + ] + } + }, + "transformer_blocks.7.ff.net.2": { + "inputs": [ + [ + [ + 49.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.94140625 + ] + ] + } + }, + "transformer_blocks.7.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 9.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.7.ff_context.net.2": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.8.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.8.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51953125 + ] + ] + } + }, + "transformer_blocks.8.attn.to_q": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.43359375 + ] + ] + } + }, + "transformer_blocks.8.attn.to_k": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.640625 + ] + ] + } + }, + "transformer_blocks.8.attn.to_v": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.39453125 + ] + ] + } + }, + "transformer_blocks.8.attn.add_k_proj": { + "inputs": [ + [ + [ + 17.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3359375 + ] + ] + } + }, + "transformer_blocks.8.attn.add_v_proj": { + "inputs": [ + [ + [ + 17.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41015625 + ] + ] + } + }, + "transformer_blocks.8.attn.add_q_proj": { + "inputs": [ + [ + [ + 17.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.326171875 + ] + ] + } + }, + "transformer_blocks.8.attn.to_out.0": { + "inputs": [ + [ + [ + 12.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3203125 + ] + ] + } + }, + "transformer_blocks.8.attn.to_add_out": { + "inputs": [ + [ + [ + 14.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2890625 + ] + ] + } + }, + "transformer_blocks.8.ff.net.0.proj": { + "inputs": [ + [ + [ + 10.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "transformer_blocks.8.ff.net.2": { + "inputs": [ + [ + [ + 20.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "transformer_blocks.8.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 15.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.494140625 + ] + ] + } + }, + "transformer_blocks.8.ff_context.net.2": { + "inputs": [ + [ + [ + 17.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.9.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "transformer_blocks.9.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.470703125 + ] + ] + } + }, + "transformer_blocks.9.attn.to_q": { + "inputs": [ + [ + [ + 17.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.462890625 + ] + ] + } + }, + "transformer_blocks.9.attn.to_k": { + "inputs": [ + [ + [ + 17.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.427734375 + ] + ] + } + }, + "transformer_blocks.9.attn.to_v": { + "inputs": [ + [ + [ + 17.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.32421875 + ] + ] + } + }, + "transformer_blocks.9.attn.add_k_proj": { + "inputs": [ + [ + [ + 18.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.9.attn.add_v_proj": { + "inputs": [ + [ + [ + 18.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.310546875 + ] + ] + } + }, + "transformer_blocks.9.attn.add_q_proj": { + "inputs": [ + [ + [ + 18.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.271484375 + ] + ] + } + }, + "transformer_blocks.9.attn.to_out.0": { + "inputs": [ + [ + [ + 15.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.31640625 + ] + ] + } + }, + "transformer_blocks.9.attn.to_add_out": { + "inputs": [ + [ + [ + 7.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.302734375 + ] + ] + } + }, + "transformer_blocks.9.ff.net.0.proj": { + "inputs": [ + [ + [ + 12.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.9.ff.net.2": { + "inputs": [ + [ + [ + 17.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.640625 + ] + ] + } + }, + "transformer_blocks.9.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 13.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "transformer_blocks.9.ff_context.net.2": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "transformer_blocks.10.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.58984375 + ] + ] + } + }, + "transformer_blocks.10.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3828125 + ] + ] + } + }, + "transformer_blocks.10.attn.to_q": { + "inputs": [ + [ + [ + 14.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.10.attn.to_k": { + "inputs": [ + [ + [ + 14.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.478515625 + ] + ] + } + }, + "transformer_blocks.10.attn.to_v": { + "inputs": [ + [ + [ + 14.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.310546875 + ] + ] + } + }, + "transformer_blocks.10.attn.add_k_proj": { + "inputs": [ + [ + [ + 14.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.333984375 + ] + ] + } + }, + "transformer_blocks.10.attn.add_v_proj": { + "inputs": [ + [ + [ + 14.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.24609375 + ] + ] + } + }, + "transformer_blocks.10.attn.add_q_proj": { + "inputs": [ + [ + [ + 14.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.376953125 + ] + ] + } + }, + "transformer_blocks.10.attn.to_out.0": { + "inputs": [ + [ + [ + 14.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.263671875 + ] + ] + } + }, + "transformer_blocks.10.attn.to_add_out": { + "inputs": [ + [ + [ + 8.3125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.25390625 + ] + ] + } + }, + "transformer_blocks.10.ff.net.0.proj": { + "inputs": [ + [ + [ + 13.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.48828125 + ] + ] + } + }, + "transformer_blocks.10.ff.net.2": { + "inputs": [ + [ + [ + 17.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8203125 + ] + ] + } + }, + "transformer_blocks.10.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 70.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.451171875 + ] + ] + } + }, + "transformer_blocks.10.ff_context.net.2": { + "inputs": [ + [ + [ + 34.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71875 + ] + ] + } + }, + "transformer_blocks.11.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.640625 + ] + ] + } + }, + "transformer_blocks.11.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.455078125 + ] + ] + } + }, + "transformer_blocks.11.attn.to_q": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "transformer_blocks.11.attn.to_k": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.427734375 + ] + ] + } + }, + "transformer_blocks.11.attn.to_v": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.33203125 + ] + ] + } + }, + "transformer_blocks.11.attn.add_k_proj": { + "inputs": [ + [ + [ + 31.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7109375 + ] + ] + } + }, + "transformer_blocks.11.attn.add_v_proj": { + "inputs": [ + [ + [ + 31.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2294921875 + ] + ] + } + }, + "transformer_blocks.11.attn.add_q_proj": { + "inputs": [ + [ + [ + 31.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3125 + ] + ] + } + }, + "transformer_blocks.11.attn.to_out.0": { + "inputs": [ + [ + [ + 16.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.37109375 + ] + ] + } + }, + "transformer_blocks.11.attn.to_add_out": { + "inputs": [ + [ + [ + 9.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.318359375 + ] + ] + } + }, + "transformer_blocks.11.ff.net.0.proj": { + "inputs": [ + [ + [ + 10.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.498046875 + ] + ] + } + }, + "transformer_blocks.11.ff.net.2": { + "inputs": [ + [ + [ + 15.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6953125 + ] + ] + } + }, + "transformer_blocks.11.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 42.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.58203125 + ] + ] + } + }, + "transformer_blocks.11.ff_context.net.2": { + "inputs": [ + [ + [ + 34.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.470703125 + ] + ] + } + }, + "transformer_blocks.12.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.68359375 + ] + ] + } + }, + "transformer_blocks.12.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41796875 + ] + ] + } + }, + "transformer_blocks.12.attn.to_q": { + "inputs": [ + [ + [ + 15.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.341796875 + ] + ] + } + }, + "transformer_blocks.12.attn.to_k": { + "inputs": [ + [ + [ + 15.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4609375 + ] + ] + } + }, + "transformer_blocks.12.attn.to_v": { + "inputs": [ + [ + [ + 15.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "transformer_blocks.12.attn.add_k_proj": { + "inputs": [ + [ + [ + 32.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.12.attn.add_v_proj": { + "inputs": [ + [ + [ + 32.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.322265625 + ] + ] + } + }, + "transformer_blocks.12.attn.add_q_proj": { + "inputs": [ + [ + [ + 32.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3359375 + ] + ] + } + }, + "transformer_blocks.12.attn.to_out.0": { + "inputs": [ + [ + [ + 29.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.322265625 + ] + ] + } + }, + "transformer_blocks.12.attn.to_add_out": { + "inputs": [ + [ + [ + 15.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.29296875 + ] + ] + } + }, + "transformer_blocks.12.ff.net.0.proj": { + "inputs": [ + [ + [ + 8.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59375 + ] + ] + } + }, + "transformer_blocks.12.ff.net.2": { + "inputs": [ + [ + [ + 21.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8359375 + ] + ] + } + }, + "transformer_blocks.12.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 97.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41015625 + ] + ] + } + }, + "transformer_blocks.12.ff_context.net.2": { + "inputs": [ + [ + [ + 25.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "transformer_blocks.13.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65625 + ] + ] + } + }, + "transformer_blocks.13.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.375 + ] + ] + } + }, + "transformer_blocks.13.attn.to_q": { + "inputs": [ + [ + [ + 15.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.447265625 + ] + ] + } + }, + "transformer_blocks.13.attn.to_k": { + "inputs": [ + [ + [ + 15.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.83203125 + ] + ] + } + }, + "transformer_blocks.13.attn.to_v": { + "inputs": [ + [ + [ + 15.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "transformer_blocks.13.attn.add_k_proj": { + "inputs": [ + [ + [ + 32.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.578125 + ] + ] + } + }, + "transformer_blocks.13.attn.add_v_proj": { + "inputs": [ + [ + [ + 32.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.25 + ] + ] + } + }, + "transformer_blocks.13.attn.add_q_proj": { + "inputs": [ + [ + [ + 32.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.33203125 + ] + ] + } + }, + "transformer_blocks.13.attn.to_out.0": { + "inputs": [ + [ + [ + 16.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.314453125 + ] + ] + } + }, + "transformer_blocks.13.attn.to_add_out": { + "inputs": [ + [ + [ + 13.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.27734375 + ] + ] + } + }, + "transformer_blocks.13.ff.net.0.proj": { + "inputs": [ + [ + [ + 7.84375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.13.ff.net.2": { + "inputs": [ + [ + [ + 21.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.015625 + ] + ] + } + }, + "transformer_blocks.13.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 138.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.369140625 + ] + ] + } + }, + "transformer_blocks.13.ff_context.net.2": { + "inputs": [ + [ + [ + 20.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46875 + ] + ] + } + }, + "transformer_blocks.14.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.68359375 + ] + ] + } + }, + "transformer_blocks.14.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3984375 + ] + ] + } + }, + "transformer_blocks.14.attn.to_q": { + "inputs": [ + [ + [ + 23.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46484375 + ] + ] + } + }, + "transformer_blocks.14.attn.to_k": { + "inputs": [ + [ + [ + 23.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.578125 + ] + ] + } + }, + "transformer_blocks.14.attn.to_v": { + "inputs": [ + [ + [ + 23.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.73046875 + ] + ] + } + }, + "transformer_blocks.14.attn.add_k_proj": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.49609375 + ] + ] + } + }, + "transformer_blocks.14.attn.add_v_proj": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.279296875 + ] + ] + } + }, + "transformer_blocks.14.attn.add_q_proj": { + "inputs": [ + [ + [ + 17.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.392578125 + ] + ] + } + }, + "transformer_blocks.14.attn.to_out.0": { + "inputs": [ + [ + [ + 16.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.41015625 + ] + ] + } + }, + "transformer_blocks.14.attn.to_add_out": { + "inputs": [ + [ + [ + 11.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.318359375 + ] + ] + } + }, + "transformer_blocks.14.ff.net.0.proj": { + "inputs": [ + [ + [ + 6.34375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53125 + ] + ] + } + }, + "transformer_blocks.14.ff.net.2": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.73046875 + ] + ] + } + }, + "transformer_blocks.14.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 51.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.14.ff_context.net.2": { + "inputs": [ + [ + [ + 32.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7265625 + ] + ] + } + }, + "transformer_blocks.15.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.78125 + ] + ] + } + }, + "transformer_blocks.15.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.435546875 + ] + ] + } + }, + "transformer_blocks.15.attn.to_q": { + "inputs": [ + [ + [ + 15.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.56640625 + ] + ] + } + }, + "transformer_blocks.15.attn.to_k": { + "inputs": [ + [ + [ + 15.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.625 + ] + ] + } + }, + "transformer_blocks.15.attn.to_v": { + "inputs": [ + [ + [ + 15.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "transformer_blocks.15.attn.add_k_proj": { + "inputs": [ + [ + [ + 18.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5625 + ] + ] + } + }, + "transformer_blocks.15.attn.add_v_proj": { + "inputs": [ + [ + [ + 18.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.34765625 + ] + ] + } + }, + "transformer_blocks.15.attn.add_q_proj": { + "inputs": [ + [ + [ + 18.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2890625 + ] + ] + } + }, + "transformer_blocks.15.attn.to_out.0": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.365234375 + ] + ] + } + }, + "transformer_blocks.15.attn.to_add_out": { + "inputs": [ + [ + [ + 9.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.275390625 + ] + ] + } + }, + "transformer_blocks.15.ff.net.0.proj": { + "inputs": [ + [ + [ + 5.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.45703125 + ] + ] + } + }, + "transformer_blocks.15.ff.net.2": { + "inputs": [ + [ + [ + 24.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8671875 + ] + ] + } + }, + "transformer_blocks.15.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 45.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "transformer_blocks.15.ff_context.net.2": { + "inputs": [ + [ + [ + 26.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46875 + ] + ] + } + }, + "transformer_blocks.16.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.890625 + ] + ] + } + }, + "transformer_blocks.16.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "transformer_blocks.16.attn.to_q": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.42578125 + ] + ] + } + }, + "transformer_blocks.16.attn.to_k": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.68359375 + ] + ] + } + }, + "transformer_blocks.16.attn.to_v": { + "inputs": [ + [ + [ + 20.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.396484375 + ] + ] + } + }, + "transformer_blocks.16.attn.add_k_proj": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5859375 + ] + ] + } + }, + "transformer_blocks.16.attn.add_v_proj": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.330078125 + ] + ] + } + }, + "transformer_blocks.16.attn.add_q_proj": { + "inputs": [ + [ + [ + 25.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.30078125 + ] + ] + } + }, + "transformer_blocks.16.attn.to_out.0": { + "inputs": [ + [ + [ + 23.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.298828125 + ] + ] + } + }, + "transformer_blocks.16.attn.to_add_out": { + "inputs": [ + [ + [ + 13.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3125 + ] + ] + } + }, + "transformer_blocks.16.ff.net.0.proj": { + "inputs": [ + [ + [ + 8.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.66796875 + ] + ] + } + }, + "transformer_blocks.16.ff.net.2": { + "inputs": [ + [ + [ + 34.5 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0546875 + ] + ] + } + }, + "transformer_blocks.16.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 44.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0390625 + ] + ] + } + }, + "transformer_blocks.16.ff_context.net.2": { + "inputs": [ + [ + [ + 76.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71875 + ] + ] + } + }, + "transformer_blocks.17.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.9296875 + ] + ] + } + }, + "transformer_blocks.17.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.83203125 + ] + ] + } + }, + "transformer_blocks.17.attn.to_q": { + "inputs": [ + [ + [ + 18.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "transformer_blocks.17.attn.to_k": { + "inputs": [ + [ + [ + 18.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.86328125 + ] + ] + } + }, + "transformer_blocks.17.attn.to_v": { + "inputs": [ + [ + [ + 18.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.423828125 + ] + ] + } + }, + "transformer_blocks.17.attn.add_k_proj": { + "inputs": [ + [ + [ + 35.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6796875 + ] + ] + } + }, + "transformer_blocks.17.attn.add_v_proj": { + "inputs": [ + [ + [ + 35.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.47265625 + ] + ] + } + }, + "transformer_blocks.17.attn.add_q_proj": { + "inputs": [ + [ + [ + 35.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.328125 + ] + ] + } + }, + "transformer_blocks.17.attn.to_out.0": { + "inputs": [ + [ + [ + 20.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.34765625 + ] + ] + } + }, + "transformer_blocks.17.attn.to_add_out": { + "inputs": [ + [ + [ + 20.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.24609375 + ] + ] + } + }, + "transformer_blocks.17.ff.net.0.proj": { + "inputs": [ + [ + [ + 7.65625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "transformer_blocks.17.ff.net.2": { + "inputs": [ + [ + [ + 65.5 + ] + ] + ], + "params": { + "weight": [ + [ + 1.15625 + ] + ] + } + }, + "transformer_blocks.17.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 42.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6328125 + ] + ] + } + }, + "transformer_blocks.17.ff_context.net.2": { + "inputs": [ + [ + [ + 71.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.75390625 + ] + ] + } + }, + "transformer_blocks.18.norm1.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5234375 + ] + ] + } + }, + "transformer_blocks.18.norm1_context.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7421875 + ] + ] + } + }, + "transformer_blocks.18.attn.to_q": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "transformer_blocks.18.attn.to_k": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.94921875 + ] + ] + } + }, + "transformer_blocks.18.attn.to_v": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.375 + ] + ] + } + }, + "transformer_blocks.18.attn.add_k_proj": { + "inputs": [ + [ + [ + 25.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4453125 + ] + ] + } + }, + "transformer_blocks.18.attn.add_v_proj": { + "inputs": [ + [ + [ + 25.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.369140625 + ] + ] + } + }, + "transformer_blocks.18.attn.add_q_proj": { + "inputs": [ + [ + [ + 25.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.384765625 + ] + ] + } + }, + "transformer_blocks.18.attn.to_out.0": { + "inputs": [ + [ + [ + 29.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "transformer_blocks.18.attn.to_add_out": { + "inputs": [ + [ + [ + 17.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.36328125 + ] + ] + } + }, + "transformer_blocks.18.ff.net.0.proj": { + "inputs": [ + [ + [ + 11.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.69921875 + ] + ] + } + }, + "transformer_blocks.18.ff.net.2": { + "inputs": [ + [ + [ + 217.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.4609375 + ] + ] + } + }, + "transformer_blocks.18.ff_context.net.0.proj": { + "inputs": [ + [ + [ + 138.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.9375 + ] + ] + } + }, + "transformer_blocks.18.ff_context.net.2": { + "inputs": [ + [ + [ + 225.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.80859375 + ] + ] + } + }, + "single_transformer_blocks.0.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59375 + ] + ] + } + }, + "single_transformer_blocks.0.proj_mlp": { + "inputs": [ + [ + [ + 48.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.0.proj_out": { + "inputs": [ + [ + [ + 20.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.3671875 + ] + ] + } + }, + "single_transformer_blocks.0.attn.to_q": { + "inputs": [ + [ + [ + 48.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.384765625 + ] + ] + } + }, + "single_transformer_blocks.0.attn.to_k": { + "inputs": [ + [ + [ + 48.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51953125 + ] + ] + } + }, + "single_transformer_blocks.0.attn.to_v": { + "inputs": [ + [ + [ + 48.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.279296875 + ] + ] + } + }, + "single_transformer_blocks.1.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0390625 + ] + ] + } + }, + "single_transformer_blocks.1.proj_mlp": { + "inputs": [ + [ + [ + 39.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "single_transformer_blocks.1.proj_out": { + "inputs": [ + [ + [ + 18.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5 + ] + ] + } + }, + "single_transformer_blocks.1.attn.to_q": { + "inputs": [ + [ + [ + 39.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.373046875 + ] + ] + } + }, + "single_transformer_blocks.1.attn.to_k": { + "inputs": [ + [ + [ + 39.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.484375 + ] + ] + } + }, + "single_transformer_blocks.1.attn.to_v": { + "inputs": [ + [ + [ + 39.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.318359375 + ] + ] + } + }, + "single_transformer_blocks.2.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.203125 + ] + ] + } + }, + "single_transformer_blocks.2.proj_mlp": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "single_transformer_blocks.2.proj_out": { + "inputs": [ + [ + [ + 19.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.4609375 + ] + ] + } + }, + "single_transformer_blocks.2.attn.to_q": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.380859375 + ] + ] + } + }, + "single_transformer_blocks.2.attn.to_k": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.478515625 + ] + ] + } + }, + "single_transformer_blocks.2.attn.to_v": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.359375 + ] + ] + } + }, + "single_transformer_blocks.3.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.359375 + ] + ] + } + }, + "single_transformer_blocks.3.proj_mlp": { + "inputs": [ + [ + [ + 34.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.484375 + ] + ] + } + }, + "single_transformer_blocks.3.proj_out": { + "inputs": [ + [ + [ + 18.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.91796875 + ] + ] + } + }, + "single_transformer_blocks.3.attn.to_q": { + "inputs": [ + [ + [ + 34.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.40625 + ] + ] + } + }, + "single_transformer_blocks.3.attn.to_k": { + "inputs": [ + [ + [ + 34.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.58203125 + ] + ] + } + }, + "single_transformer_blocks.3.attn.to_v": { + "inputs": [ + [ + [ + 34.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.34375 + ] + ] + } + }, + "single_transformer_blocks.4.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6015625 + ] + ] + } + }, + "single_transformer_blocks.4.proj_mlp": { + "inputs": [ + [ + [ + 35.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.458984375 + ] + ] + } + }, + "single_transformer_blocks.4.proj_out": { + "inputs": [ + [ + [ + 18.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.2421875 + ] + ] + } + }, + "single_transformer_blocks.4.attn.to_q": { + "inputs": [ + [ + [ + 35.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.333984375 + ] + ] + } + }, + "single_transformer_blocks.4.attn.to_k": { + "inputs": [ + [ + [ + 35.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.63671875 + ] + ] + } + }, + "single_transformer_blocks.4.attn.to_v": { + "inputs": [ + [ + [ + 35.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.234375 + ] + ] + } + }, + "single_transformer_blocks.5.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6640625 + ] + ] + } + }, + "single_transformer_blocks.5.proj_mlp": { + "inputs": [ + [ + [ + 27.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.5.proj_out": { + "inputs": [ + [ + [ + 16.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.77734375 + ] + ] + } + }, + "single_transformer_blocks.5.attn.to_q": { + "inputs": [ + [ + [ + 27.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3984375 + ] + ] + } + }, + "single_transformer_blocks.5.attn.to_k": { + "inputs": [ + [ + [ + 27.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "single_transformer_blocks.5.attn.to_v": { + "inputs": [ + [ + [ + 27.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.23046875 + ] + ] + } + }, + "single_transformer_blocks.6.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.015625 + ] + ] + } + }, + "single_transformer_blocks.6.proj_mlp": { + "inputs": [ + [ + [ + 28.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "single_transformer_blocks.6.proj_out": { + "inputs": [ + [ + [ + 19.125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.2578125 + ] + ] + } + }, + "single_transformer_blocks.6.attn.to_q": { + "inputs": [ + [ + [ + 28.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.32421875 + ] + ] + } + }, + "single_transformer_blocks.6.attn.to_k": { + "inputs": [ + [ + [ + 28.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6015625 + ] + ] + } + }, + "single_transformer_blocks.6.attn.to_v": { + "inputs": [ + [ + [ + 28.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2451171875 + ] + ] + } + }, + "single_transformer_blocks.7.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.796875 + ] + ] + } + }, + "single_transformer_blocks.7.proj_mlp": { + "inputs": [ + [ + [ + 27.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "single_transformer_blocks.7.proj_out": { + "inputs": [ + [ + [ + 15.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.75 + ] + ] + } + }, + "single_transformer_blocks.7.attn.to_q": { + "inputs": [ + [ + [ + 27.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.35546875 + ] + ] + } + }, + "single_transformer_blocks.7.attn.to_k": { + "inputs": [ + [ + [ + 27.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5703125 + ] + ] + } + }, + "single_transformer_blocks.7.attn.to_v": { + "inputs": [ + [ + [ + 27.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.2578125 + ] + ] + } + }, + "single_transformer_blocks.8.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.078125 + ] + ] + } + }, + "single_transformer_blocks.8.proj_mlp": { + "inputs": [ + [ + [ + 21.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.8.proj_out": { + "inputs": [ + [ + [ + 16.5 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5546875 + ] + ] + } + }, + "single_transformer_blocks.8.attn.to_q": { + "inputs": [ + [ + [ + 21.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.423828125 + ] + ] + } + }, + "single_transformer_blocks.8.attn.to_k": { + "inputs": [ + [ + [ + 21.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65234375 + ] + ] + } + }, + "single_transformer_blocks.8.attn.to_v": { + "inputs": [ + [ + [ + 21.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3203125 + ] + ] + } + }, + "single_transformer_blocks.9.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.34375 + ] + ] + } + }, + "single_transformer_blocks.9.proj_mlp": { + "inputs": [ + [ + [ + 21.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.890625 + ] + ] + } + }, + "single_transformer_blocks.9.proj_out": { + "inputs": [ + [ + [ + 21.875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.234375 + ] + ] + } + }, + "single_transformer_blocks.9.attn.to_q": { + "inputs": [ + [ + [ + 21.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53125 + ] + ] + } + }, + "single_transformer_blocks.9.attn.to_k": { + "inputs": [ + [ + [ + 21.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.9.attn.to_v": { + "inputs": [ + [ + [ + 21.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.396484375 + ] + ] + } + }, + "single_transformer_blocks.10.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.203125 + ] + ] + } + }, + "single_transformer_blocks.10.proj_mlp": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65234375 + ] + ] + } + }, + "single_transformer_blocks.10.proj_out": { + "inputs": [ + [ + [ + 18.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.109375 + ] + ] + } + }, + "single_transformer_blocks.10.attn.to_q": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.439453125 + ] + ] + } + }, + "single_transformer_blocks.10.attn.to_k": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "single_transformer_blocks.10.attn.to_v": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.251953125 + ] + ] + } + }, + "single_transformer_blocks.11.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.8125 + ] + ] + } + }, + "single_transformer_blocks.11.proj_mlp": { + "inputs": [ + [ + [ + 23.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6640625 + ] + ] + } + }, + "single_transformer_blocks.11.proj_out": { + "inputs": [ + [ + [ + 22.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.046875 + ] + ] + } + }, + "single_transformer_blocks.11.attn.to_q": { + "inputs": [ + [ + [ + 23.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4296875 + ] + ] + } + }, + "single_transformer_blocks.11.attn.to_k": { + "inputs": [ + [ + [ + 23.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53125 + ] + ] + } + }, + "single_transformer_blocks.11.attn.to_v": { + "inputs": [ + [ + [ + 23.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4375 + ] + ] + } + }, + "single_transformer_blocks.12.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.7578125 + ] + ] + } + }, + "single_transformer_blocks.12.proj_mlp": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8125 + ] + ] + } + }, + "single_transformer_blocks.12.proj_out": { + "inputs": [ + [ + [ + 15.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.953125 + ] + ] + } + }, + "single_transformer_blocks.12.attn.to_q": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.419921875 + ] + ] + } + }, + "single_transformer_blocks.12.attn.to_k": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.486328125 + ] + ] + } + }, + "single_transformer_blocks.12.attn.to_v": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44921875 + ] + ] + } + }, + "single_transformer_blocks.13.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.609375 + ] + ] + } + }, + "single_transformer_blocks.13.proj_mlp": { + "inputs": [ + [ + [ + 21.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.85546875 + ] + ] + } + }, + "single_transformer_blocks.13.proj_out": { + "inputs": [ + [ + [ + 23.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0078125 + ] + ] + } + }, + "single_transformer_blocks.13.attn.to_q": { + "inputs": [ + [ + [ + 21.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.13.attn.to_k": { + "inputs": [ + [ + [ + 21.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.44140625 + ] + ] + } + }, + "single_transformer_blocks.13.attn.to_v": { + "inputs": [ + [ + [ + 21.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.45703125 + ] + ] + } + }, + "single_transformer_blocks.14.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.015625 + ] + ] + } + }, + "single_transformer_blocks.14.proj_mlp": { + "inputs": [ + [ + [ + 26.75 + ] + ] + ], + "params": { + "weight": [ + [ + 1.109375 + ] + ] + } + }, + "single_transformer_blocks.14.proj_out": { + "inputs": [ + [ + [ + 14.6875 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0390625 + ] + ] + } + }, + "single_transformer_blocks.14.attn.to_q": { + "inputs": [ + [ + [ + 26.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.392578125 + ] + ] + } + }, + "single_transformer_blocks.14.attn.to_k": { + "inputs": [ + [ + [ + 26.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "single_transformer_blocks.14.attn.to_v": { + "inputs": [ + [ + [ + 26.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5234375 + ] + ] + } + }, + "single_transformer_blocks.15.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.03125 + ] + ] + } + }, + "single_transformer_blocks.15.proj_mlp": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.78125 + ] + ] + } + }, + "single_transformer_blocks.15.proj_out": { + "inputs": [ + [ + [ + 18.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.2578125 + ] + ] + } + }, + "single_transformer_blocks.15.attn.to_q": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.49609375 + ] + ] + } + }, + "single_transformer_blocks.15.attn.to_k": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.58203125 + ] + ] + } + }, + "single_transformer_blocks.15.attn.to_v": { + "inputs": [ + [ + [ + 19.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.16.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.140625 + ] + ] + } + }, + "single_transformer_blocks.16.proj_mlp": { + "inputs": [ + [ + [ + 23.5 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1796875 + ] + ] + } + }, + "single_transformer_blocks.16.proj_out": { + "inputs": [ + [ + [ + 15.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.140625 + ] + ] + } + }, + "single_transformer_blocks.16.attn.to_q": { + "inputs": [ + [ + [ + 23.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.625 + ] + ] + } + }, + "single_transformer_blocks.16.attn.to_k": { + "inputs": [ + [ + [ + 23.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5546875 + ] + ] + } + }, + "single_transformer_blocks.16.attn.to_v": { + "inputs": [ + [ + [ + 23.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "single_transformer_blocks.17.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6015625 + ] + ] + } + }, + "single_transformer_blocks.17.proj_mlp": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7265625 + ] + ] + } + }, + "single_transformer_blocks.17.proj_out": { + "inputs": [ + [ + [ + 23.375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.140625 + ] + ] + } + }, + "single_transformer_blocks.17.attn.to_q": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.609375 + ] + ] + } + }, + "single_transformer_blocks.17.attn.to_k": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65625 + ] + ] + } + }, + "single_transformer_blocks.17.attn.to_v": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.81640625 + ] + ] + } + }, + "single_transformer_blocks.18.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.8203125 + ] + ] + } + }, + "single_transformer_blocks.18.proj_mlp": { + "inputs": [ + [ + [ + 26.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.125 + ] + ] + } + }, + "single_transformer_blocks.18.proj_out": { + "inputs": [ + [ + [ + 15.8125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.09375 + ] + ] + } + }, + "single_transformer_blocks.18.attn.to_q": { + "inputs": [ + [ + [ + 26.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5390625 + ] + ] + } + }, + "single_transformer_blocks.18.attn.to_k": { + "inputs": [ + [ + [ + 26.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.62109375 + ] + ] + } + }, + "single_transformer_blocks.18.attn.to_v": { + "inputs": [ + [ + [ + 26.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "single_transformer_blocks.19.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.7578125 + ] + ] + } + }, + "single_transformer_blocks.19.proj_mlp": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.87109375 + ] + ] + } + }, + "single_transformer_blocks.19.proj_out": { + "inputs": [ + [ + [ + 16.75 + ] + ] + ], + "params": { + "weight": [ + [ + 1.4296875 + ] + ] + } + }, + "single_transformer_blocks.19.attn.to_q": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "single_transformer_blocks.19.attn.to_k": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "single_transformer_blocks.19.attn.to_v": { + "inputs": [ + [ + [ + 15.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.54296875 + ] + ] + } + }, + "single_transformer_blocks.20.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 2.046875 + ] + ] + } + }, + "single_transformer_blocks.20.proj_mlp": { + "inputs": [ + [ + [ + 17.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.87890625 + ] + ] + } + }, + "single_transformer_blocks.20.proj_out": { + "inputs": [ + [ + [ + 12.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1484375 + ] + ] + } + }, + "single_transformer_blocks.20.attn.to_q": { + "inputs": [ + [ + [ + 17.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.20.attn.to_k": { + "inputs": [ + [ + [ + 17.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.50390625 + ] + ] + } + }, + "single_transformer_blocks.20.attn.to_v": { + "inputs": [ + [ + [ + 17.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55078125 + ] + ] + } + }, + "single_transformer_blocks.21.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5234375 + ] + ] + } + }, + "single_transformer_blocks.21.proj_mlp": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.9140625 + ] + ] + } + }, + "single_transformer_blocks.21.proj_out": { + "inputs": [ + [ + [ + 13.0625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.4140625 + ] + ] + } + }, + "single_transformer_blocks.21.attn.to_q": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.482421875 + ] + ] + } + }, + "single_transformer_blocks.21.attn.to_k": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46875 + ] + ] + } + }, + "single_transformer_blocks.21.attn.to_v": { + "inputs": [ + [ + [ + 19.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.42578125 + ] + ] + } + }, + "single_transformer_blocks.22.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.2265625 + ] + ] + } + }, + "single_transformer_blocks.22.proj_mlp": { + "inputs": [ + [ + [ + 14.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.703125 + ] + ] + } + }, + "single_transformer_blocks.22.proj_out": { + "inputs": [ + [ + [ + 12.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.87109375 + ] + ] + } + }, + "single_transformer_blocks.22.attn.to_q": { + "inputs": [ + [ + [ + 14.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.455078125 + ] + ] + } + }, + "single_transformer_blocks.22.attn.to_k": { + "inputs": [ + [ + [ + 14.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6015625 + ] + ] + } + }, + "single_transformer_blocks.22.attn.to_v": { + "inputs": [ + [ + [ + 14.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.609375 + ] + ] + } + }, + "single_transformer_blocks.23.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6640625 + ] + ] + } + }, + "single_transformer_blocks.23.proj_mlp": { + "inputs": [ + [ + [ + 20.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.71484375 + ] + ] + } + }, + "single_transformer_blocks.23.proj_out": { + "inputs": [ + [ + [ + 11.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.0546875 + ] + ] + } + }, + "single_transformer_blocks.23.attn.to_q": { + "inputs": [ + [ + [ + 20.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.498046875 + ] + ] + } + }, + "single_transformer_blocks.23.attn.to_k": { + "inputs": [ + [ + [ + 20.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7578125 + ] + ] + } + }, + "single_transformer_blocks.23.attn.to_v": { + "inputs": [ + [ + [ + 20.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.24.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.7890625 + ] + ] + } + }, + "single_transformer_blocks.24.proj_mlp": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 1.09375 + ] + ] + } + }, + "single_transformer_blocks.24.proj_out": { + "inputs": [ + [ + [ + 13.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.95703125 + ] + ] + } + }, + "single_transformer_blocks.24.attn.to_q": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.453125 + ] + ] + } + }, + "single_transformer_blocks.24.attn.to_k": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.24.attn.to_v": { + "inputs": [ + [ + [ + 16.125 + ] + ] + ], + "params": { + "weight": [ + [ + 0.345703125 + ] + ] + } + }, + "single_transformer_blocks.25.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.078125 + ] + ] + } + }, + "single_transformer_blocks.25.proj_mlp": { + "inputs": [ + [ + [ + 15.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.15625 + ] + ] + } + }, + "single_transformer_blocks.25.proj_out": { + "inputs": [ + [ + [ + 13.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.3359375 + ] + ] + } + }, + "single_transformer_blocks.25.attn.to_q": { + "inputs": [ + [ + [ + 15.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.50390625 + ] + ] + } + }, + "single_transformer_blocks.25.attn.to_k": { + "inputs": [ + [ + [ + 15.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "single_transformer_blocks.25.attn.to_v": { + "inputs": [ + [ + [ + 15.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.3984375 + ] + ] + } + }, + "single_transformer_blocks.26.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.28125 + ] + ] + } + }, + "single_transformer_blocks.26.proj_mlp": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.90234375 + ] + ] + } + }, + "single_transformer_blocks.26.proj_out": { + "inputs": [ + [ + [ + 13.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.453125 + ] + ] + } + }, + "single_transformer_blocks.26.attn.to_q": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.490234375 + ] + ] + } + }, + "single_transformer_blocks.26.attn.to_k": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.455078125 + ] + ] + } + }, + "single_transformer_blocks.26.attn.to_v": { + "inputs": [ + [ + [ + 15.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.51171875 + ] + ] + } + }, + "single_transformer_blocks.27.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.109375 + ] + ] + } + }, + "single_transformer_blocks.27.proj_mlp": { + "inputs": [ + [ + [ + 17.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.75 + ] + ] + } + }, + "single_transformer_blocks.27.proj_out": { + "inputs": [ + [ + [ + 13.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.80859375 + ] + ] + } + }, + "single_transformer_blocks.27.attn.to_q": { + "inputs": [ + [ + [ + 17.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.57421875 + ] + ] + } + }, + "single_transformer_blocks.27.attn.to_k": { + "inputs": [ + [ + [ + 17.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6953125 + ] + ] + } + }, + "single_transformer_blocks.27.attn.to_v": { + "inputs": [ + [ + [ + 17.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59765625 + ] + ] + } + }, + "single_transformer_blocks.28.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1953125 + ] + ] + } + }, + "single_transformer_blocks.28.proj_mlp": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.87109375 + ] + ] + } + }, + "single_transformer_blocks.28.proj_out": { + "inputs": [ + [ + [ + 13.9375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8828125 + ] + ] + } + }, + "single_transformer_blocks.28.attn.to_q": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "single_transformer_blocks.28.attn.to_k": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.875 + ] + ] + } + }, + "single_transformer_blocks.28.attn.to_v": { + "inputs": [ + [ + [ + 24.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.48828125 + ] + ] + } + }, + "single_transformer_blocks.29.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.171875 + ] + ] + } + }, + "single_transformer_blocks.29.proj_mlp": { + "inputs": [ + [ + [ + 18.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6953125 + ] + ] + } + }, + "single_transformer_blocks.29.proj_out": { + "inputs": [ + [ + [ + 13.25 + ] + ] + ], + "params": { + "weight": [ + [ + 1.109375 + ] + ] + } + }, + "single_transformer_blocks.29.attn.to_q": { + "inputs": [ + [ + [ + 18.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53515625 + ] + ] + } + }, + "single_transformer_blocks.29.attn.to_k": { + "inputs": [ + [ + [ + 18.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.48828125 + ] + ] + } + }, + "single_transformer_blocks.29.attn.to_v": { + "inputs": [ + [ + [ + 18.25 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "single_transformer_blocks.30.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.546875 + ] + ] + } + }, + "single_transformer_blocks.30.proj_mlp": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6328125 + ] + ] + } + }, + "single_transformer_blocks.30.proj_out": { + "inputs": [ + [ + [ + 16.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.3984375 + ] + ] + } + }, + "single_transformer_blocks.30.attn.to_q": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.515625 + ] + ] + } + }, + "single_transformer_blocks.30.attn.to_k": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5859375 + ] + ] + } + }, + "single_transformer_blocks.30.attn.to_v": { + "inputs": [ + [ + [ + 18.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.54296875 + ] + ] + } + }, + "single_transformer_blocks.31.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5859375 + ] + ] + } + }, + "single_transformer_blocks.31.proj_mlp": { + "inputs": [ + [ + [ + 22.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.65625 + ] + ] + } + }, + "single_transformer_blocks.31.proj_out": { + "inputs": [ + [ + [ + 15.5625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.390625 + ] + ] + } + }, + "single_transformer_blocks.31.attn.to_q": { + "inputs": [ + [ + [ + 22.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.640625 + ] + ] + } + }, + "single_transformer_blocks.31.attn.to_k": { + "inputs": [ + [ + [ + 22.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.47265625 + ] + ] + } + }, + "single_transformer_blocks.31.attn.to_v": { + "inputs": [ + [ + [ + 22.375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.73828125 + ] + ] + } + }, + "single_transformer_blocks.32.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5078125 + ] + ] + } + }, + "single_transformer_blocks.32.proj_mlp": { + "inputs": [ + [ + [ + 21.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.7578125 + ] + ] + } + }, + "single_transformer_blocks.32.proj_out": { + "inputs": [ + [ + [ + 19.0 + ] + ] + ], + "params": { + "weight": [ + [ + 1.171875 + ] + ] + } + }, + "single_transformer_blocks.32.attn.to_q": { + "inputs": [ + [ + [ + 21.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.55859375 + ] + ] + } + }, + "single_transformer_blocks.32.attn.to_k": { + "inputs": [ + [ + [ + 21.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.421875 + ] + ] + } + }, + "single_transformer_blocks.32.attn.to_v": { + "inputs": [ + [ + [ + 21.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.52734375 + ] + ] + } + }, + "single_transformer_blocks.33.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.6796875 + ] + ] + } + }, + "single_transformer_blocks.33.proj_mlp": { + "inputs": [ + [ + [ + 20.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.90234375 + ] + ] + } + }, + "single_transformer_blocks.33.proj_out": { + "inputs": [ + [ + [ + 17.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.5 + ] + ] + } + }, + "single_transformer_blocks.33.attn.to_q": { + "inputs": [ + [ + [ + 20.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "single_transformer_blocks.33.attn.to_k": { + "inputs": [ + [ + [ + 20.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.3828125 + ] + ] + } + }, + "single_transformer_blocks.33.attn.to_v": { + "inputs": [ + [ + [ + 20.625 + ] + ] + ], + "params": { + "weight": [ + [ + 0.4609375 + ] + ] + } + }, + "single_transformer_blocks.34.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.640625 + ] + ] + } + }, + "single_transformer_blocks.34.proj_mlp": { + "inputs": [ + [ + [ + 21.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.9765625 + ] + ] + } + }, + "single_transformer_blocks.34.proj_out": { + "inputs": [ + [ + [ + 36.25 + ] + ] + ], + "params": { + "weight": [ + [ + 3.109375 + ] + ] + } + }, + "single_transformer_blocks.34.attn.to_q": { + "inputs": [ + [ + [ + 21.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.498046875 + ] + ] + } + }, + "single_transformer_blocks.34.attn.to_k": { + "inputs": [ + [ + [ + 21.0 + ] + ] + ], + "params": { + "weight": [ + [ + 2.015625 + ] + ] + } + }, + "single_transformer_blocks.34.attn.to_v": { + "inputs": [ + [ + [ + 21.0 + ] + ] + ], + "params": { + "weight": [ + [ + 0.447265625 + ] + ] + } + }, + "single_transformer_blocks.35.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.8125 + ] + ] + } + }, + "single_transformer_blocks.35.proj_mlp": { + "inputs": [ + [ + [ + 25.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.8828125 + ] + ] + } + }, + "single_transformer_blocks.35.proj_out": { + "inputs": [ + [ + [ + 24.125 + ] + ] + ], + "params": { + "weight": [ + [ + 3.0625 + ] + ] + } + }, + "single_transformer_blocks.35.attn.to_q": { + "inputs": [ + [ + [ + 25.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.63671875 + ] + ] + } + }, + "single_transformer_blocks.35.attn.to_k": { + "inputs": [ + [ + [ + 25.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.62890625 + ] + ] + } + }, + "single_transformer_blocks.35.attn.to_v": { + "inputs": [ + [ + [ + 25.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.5078125 + ] + ] + } + }, + "single_transformer_blocks.36.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 1.171875 + ] + ] + } + }, + "single_transformer_blocks.36.proj_mlp": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.796875 + ] + ] + } + }, + "single_transformer_blocks.36.proj_out": { + "inputs": [ + [ + [ + 31.625 + ] + ] + ], + "params": { + "weight": [ + [ + 1.1015625 + ] + ] + } + }, + "single_transformer_blocks.36.attn.to_q": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.53125 + ] + ] + } + }, + "single_transformer_blocks.36.attn.to_k": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6328125 + ] + ] + } + }, + "single_transformer_blocks.36.attn.to_v": { + "inputs": [ + [ + [ + 24.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.59375 + ] + ] + } + }, + "single_transformer_blocks.37.norm.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.546875 + ] + ] + } + }, + "single_transformer_blocks.37.proj_mlp": { + "inputs": [ + [ + [ + 34.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46875 + ] + ] + } + }, + "single_transformer_blocks.37.proj_out": { + "inputs": [ + [ + [ + 33.75 + ] + ] + ], + "params": { + "weight": [ + [ + 0.88671875 + ] + ] + } + }, + "single_transformer_blocks.37.attn.to_q": { + "inputs": [ + [ + [ + 34.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.46484375 + ] + ] + } + }, + "single_transformer_blocks.37.attn.to_k": { + "inputs": [ + [ + [ + 34.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.6015625 + ] + ] + } + }, + "single_transformer_blocks.37.attn.to_v": { + "inputs": [ + [ + [ + 34.5 + ] + ] + ], + "params": { + "weight": [ + [ + 0.361328125 + ] + ] + } + }, + "norm_out.linear": { + "inputs": [ + [ + [ + 6.4375 + ] + ] + ], + "params": { + "weight": [ + [ + 0.345703125 + ] + ] + } + }, + "proj_out": { + "inputs": [ + [ + [ + 28.875 + ] + ] + ], + "params": { + "weight": [ + [ + 0.1376953125 + ] + ] + } + } + } +} \ No newline at end of file diff --git a/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.npz b/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.npz new file mode 100644 index 0000000000000000000000000000000000000000..ea86a90ea6c9515267bb5a59d70e23bb3605abf6 GIT binary patch literal 97750 zcmbrn54_EHcJA-bU}Ov`gPJiI8AQfCguzJ7IR}Hvq_{g5F*W|?5FyxK+ff<+DuYT> zREm+N7#UQms1!9-REnBWR4%SG7nR~l6_uh%Q-1H~v-W45V`LxO>(~3b&sxu{&v&i$ zwO3OR{z13E0tFIQLE=mCXY&5J=Lt$^nR_{G;1|_VvydbFUHO zCztTQPkv#Zl1*UYazmVARBXm0M>IJ)fC2spOG# z_2;S}u-|vmH5UBd{PY)7>6(v!zv^?6Jw5$M-@bjHzRUjw_KOy~j@+IbwWj#DsC_+I^p^$^Y`q#8KJ*p-$iDzQF(RTN9rzN!NX@7U}nA0-hQBQpvDP$?s%_mAo{ve$(fQ#M)@aP zFn;WWk`c+UC(~b8AH8t&~YO8@Zqd`_C>)eRZT&)%&}T_u`iV^XTO%h=c{rWz(`{y?|HtDmAg&-z zX{#yiQt2msO8e}TRB^gPCjIS^BVJ&|jxYFLo%BTKRQfyqM3?LnuNS4C%A|iI*Ir|$ zjD3E@gi$4*^UuHQjP!TIUWIY5B26hyrMvl*?%65)Q|ysRf6w;nS+-X%J<&Ur{!M?P zPxgru-%UTANq_&L_iCShW=8tA!d`vjUi~zse=7YyeM%}j<%62N%6H`;^-|;7=WuI79lzusr{*iX7%ReVxq{{xS<{wRZ`i%69 zFnwm6K1;o3r_#Ucz2;`~{vsr27^=Y84EKkb%&H*E(i3)!BqP1 z{fR@_CmI*0f00T5GOu06r4P?Y|3ld2NZjS9Iv-1=|Is@iFWcq4O!|M@E+@)%IjJX3 zrPBY$pE#X;;*p~C`c}r2j2!bTw{tP5rN@ z(*N%LZ)E#F-!1)dCjB3_(ao}rZt02Jsq}yP6Q7j*)A=-${;&KtDz%M1o00zSu+g2k z(Ovo{mAKxdlFDG5l2n2CYDraCCiwyw7EG$C8vkpWR2QR74Pq+!BIs>us$~DgPYRPq zV437gV18St!`4YH9+;7Q8Px7cZIbNzNgd^ku1icMzosME3rIyoE(WWyom3#;EHeFQ8{(~q?o`PkP-vBH83XC3IFrkEh_yzhmG3mUn@#}0Lvu54QlmZPd@KI=%Yvbe?H5735M|4 zjO2%4&@q%G>KLY+1%?w-$p{^ZI!3BwI$C#2M!_=4^PuS{QPp*f7NgA=Vk&t7^fqHv zvNop*lNVu`WE?0REpmSe?$Oac8P5|ll9#|BVFF2%Fi|-!hMoNn?OPt8dF01Pq~l0+Gclrw)ZF_rwGjzk$tRB{=w!!pSq zfhJ?AsxD)h7;TmlQ^`+2Z?i%rYg4Nzc>|V7{uq>u9Cx=T3;gbuyvflS$)A8>_mw1Z z_f^Unx|*0ueySsJ_cbcn&_C^#yamf7KLc&|wW|8=>%?fYo|sDh6!bP5RI)Z*ijucs zndHyFymlX#Y~<*S|EMET!Eu#rblvXBd$3IMzd=)QLRD9AQj9jIh^gfNfZpb` zO4jD@3X}I?ndG0q{1n*TGx>lAW+eX?3<}PWL;X*?Z^}9+;8*8yFN^C5Z~IDQEO` zVk-G}9f=BVsN@PhhGmlf08PP7Rb9a?G1}ZFrjq{zz0D^oS)0EtOg@EWlK%pwAjdtl z?3Ksvp`USlM)Kca*#8bm-2bj}23O)QKXC$}53T~q4zAoSsS3jhfVO`%Rek^JVzjA2 z#0h}jrlv}^|9{{FU^oHr!C!%L_t08AfD-_Ng4!feK^^6cu1mxT=t%qu)KkeuH^vFT zZ~~wysIRIkXdp(LhD4kI=xrLQWNm(e6M*3azzV+t_ufMra~vlChW(q6#QmEpXK*tj zPC!TE{>@dg!CZnKgW&`~+rNdXzJE(G+O#6#1VC@oS|w|911A8(34q!^$367P0>6j0 z;UpFS467HA#MRp>XJ$Jh7C=Yh>g`psnFG2d9bi}h&{prLs;}Njj5eK#SOC!5bWzFL zG%8A-f?)wbt)Al^s@47KQ0^YumB+9GV9-%W5_J?QXMtiORzOFhj_xYi0`t2kJz!V? z&~)@v)phg|qfKujRsi%ieN?hGN3a4gtNA({31eO2{68e%v3H_8ayFU?2 zpd(R2N+p}Uu6yz<3`+o-gaN9$gn?qT8AQYqfZk@XO4jCkSOOT90F;DpeEvPO{k`|l zA)LkufMNZiBys&=%9%Tyh!fC}xc*3$vj6CclTk370BGx%sOsyF7NgA=B2EDGHe*$? zHqRC%FT!vFpw`cE56!Ocf0(lO(D6Kl8vuih2_#X*MCHt%M8pl~NR*MN5j4R3dHw^fuE}vNl|&UxwiZK*`8)4{cxW9y*<)SOGBXK9eNwK1(@6XA`jk zIuduEqmm7s&@Fidh7|y9_qnS2?(@WGGoOeR0KLrum8{J)tN;uv0Oqy(xa2jCa{mXz z?h8rc?u(Q&bTN_pzmCM+m#E~szYgR658CcaRrTGMiP2^`k^4XBZC0pcZ3?;n!?^#0 zdF@`Byvb4S|6tgCB}v?Um2!rzCUXDRk+}OBm3;TNVBG&f+kLI7zWX{c+N>vX{|CLz z29>PM`NHIF825iryXUxvX8+8TyN7P%0bBqW6l@}i3N|Ze^cEs6Ku4m2tt#2*n%$Fk zV7LI#6l_z~6>Jxy%?=_i0Q5FHRkAh(xBwU~0L)K8_8z*62k-)5P_UaMD%hi((R+z_ z0Ue17_Nn9w-i6@>KvS?^RabC8j5Y^}cmdGc98$^J%qvWO0mBP``6O|m0ZDlFuVY03Qnl%3QmgA<`fYx0D7C#Dp{M;cmWt*0F;8} zxnBRH1GanU2OP%-Xq8M#1 z5pe>bx4Ep6wc#r95ez2)KKLt8?jCxD2XF#lP;iwbD!8Vc(btJM0Ue17Zm49V-z`o) zhT#N2Q*cvNS8z*=Hn)j50npogqLQ`Qi4%a~1i%Wv0{7lSKjSz~01W%zA&L9nRnFi_ zRW-OW=!2_3vV%_-CskoM0nqlZrmF8>U5qw0h&Tbz+tgIa_U}=YJOaZBfZ9LDJ+yt< zJ+u}lu>fFLy*5c)y^eBb)+J&AbR_>)&1&F?jG8h$FKrm(9wh>>S(H*1)34D0y+|PG*`(M_;L5- zF&I_=G#xEebsa6mXw!;_6#%_WYn80cZL9zcD*#GIj(g~RI@%{~cmhiR1_=cuQ9@hg z%x*`-66i>j&|W2*eG^Ln!xDfdp`)rUp_3SGIuo%3pttFwlC^1!C4gZGKuO4P5AAU8 zJ+v#QaROjizmO!ZU!Pr5t7LQe`_LXRoB(L+_f*x_?N#gFqlrwZV z5i6i0arcob+0bcxQvik)0B!dYRekr-Vze1U#0r4kW~@rqW+_$xh7|zw+I?Iyo}=9V z!La)TlDPXsTCarZLo-2cI_`%IF!`z+-QolWHauOo5yIV$sBvHX4<&0iT#0BU`RIo%P8-1o* z@;VF`0GfiOs=9(@VzgOK#07xfW`#=D=HtTT4Hzx}%uhk~9{MH^;03^-U?oXZuu3_j zR}=99IuaGEQOOm&1;Yz~reLkAu3()QZPpX<0-(3qppvzD1up=@3xN44$lgOY@&H}{ z3<@@pLd*}SHGc>&3vm{@-Ykx0NUy|RrS?xiP7dZ5eoo% zn@?1-HvA>rr!Xu4sMT}aL$$hJ9m?H9KjSg102p-KA&ENfDrbR8U(f=TL0_N>B)h<` z^GyL5Rsb{|)l_vI)x~I2gNPLXy-iJ(Oh!J@hdcP5`v^Td3;mw-lpID1VF8y;~tt_-~TXW@1bpY3O4`-83iO!MqA~~Z%4!p=tz{&UL}{& z0frj@O-4smT}CG{+H@x320(AqMI~!eetFEQHmCSnCZZ_`I5YqK0H0K*D^dF?(f zd4{9h|G}_(Uy`_cKjjSVPvriuBXReXO1}HEFz)}L?LI(N-+iDMZ3Yp!|AXFUuu9gZ z8{Z9pasLPN+PyRx!cp%3VAy>qN!)#ya)u5ka{t$nxcf+zeD_f>?*E|eUZSe+K3a@6 zV~E`UL2olwC2RBFyeSCA{U6ls-~9X=ME1{2xqIk%9>4{FLBRx)s9>UUMo%K*0(2xQ zNK~@Xf7mUV48sM0reKPyuAo$mHdBeX0MOe^Q_0%!J=K?CxBxIe1=)M(bRNJ9fI-1b zlBi&oaz@W4;stahDwv~^D|iKl7XVGcTvc7cJTcnLC*lP_Z?ixpYs33PUxncX!2A?s z@1d{p0A2tL3Ko(?1&fq3dNC0%pd(Si5|v!R>oB|kXbP69>I#;L(PlXjF93R*6)IVq zU3dW)UI3JW9QV+&S01~EzR7W%02uaPNfP&8rJTX5i8uiriTkfn$p(+(T|qFM0BHNK zRn_-jCq|p~M4SNVZ8oT6ZT_2yN7P%0h|CB6l@}i3N|Ze^cEscKu4m2 ztt#2*GsVd}Fq{Br3bv{03bu>UW(N@`0D7C9Dp?!up+AS=1i%Wv0{7lScX1pi0EYc{ zlf?b^C};3qB2GX@;{N+ovcU&%0x+BaX#4M1)%QOjMw^2~oB-%;4yj~q>hS&`7)}7x z{yFZU-!8j{9_Az#01T@iA&ILWRnE*~L@a=g#MO_hWHWh7*?TZ70BEb9P}NsIDMp)9 zL@WU4ZBDCXZC)!%-iKiUK&_tR9;(&->QL?;`T>t&1;C)=3`x{+RyhltBVq+~BQu>zpCxvY}4X^9npVFf_x$Z-$7Pe=RY3Qu4Oz#!o& zNtAF+IkT@5u>?92CEQTSX8#yV0K*c1CgG;4F5#9KZEh2>1faM1L?vtU2$lebB>*KM z$33*;z4y@1IE@nk!}@nf;`(=$Gq+MT&8-aj+$xalT<)P&VK@QM)~}|juU}n^HZ_Pi z0nppjRLR!wj}w651VF8y;~tt_-~TXW@1eDL3O4`-8MR5Gj5^AhUzdm*(2@96sHc+6 zN5-Qt+yH1Y>Z|H98i>)RArUtKdYeWnS(|rxj}Qzu07^!VduYdU_t3^1#R`C7_a-E9 z_om7j+Kh-5(2=-%bCqo9GTt8q!wP`5dka;4_m*O`X+^{efZnFHO4epDRse<-0Q1^? zT+)W4-2cI_djUz@y{&SFwj*-?*O9n;dzF0m4lwTjpzYpKRo}gn7;QQex&MRSri)6} zrV8H;fN}o^^V+>M>B>>=|6tg?kRZLif6&|XQOVlyXXa@b_kU2kfAjP2q1it(>9o0EPab40K@(>N#g#qlrwlX5htJ{asN3g+2C*Rt{@mr0JQz*s_OgC z6Qj+1B2EDGHVag;HoQakRTxeHeDGJG+&%O)9>58JLBT?js9=$DMlUAf1au@SSfY}R z{upk|gfG zN;!j96LA7M68B%Dk`3k_`W6f)0NVa*RrUSXiP2^~5hnn8n++;io96s{01PJpYX2Pf z(2iyI(2bnL0)S!lO(b#k&B~d%g@^^vk+}L+m2BqiZpk|^EC6V$Z&TG*-!4X*9Yibu z=xuhYWNmiw#vm9L0MzO^?x9-UuMXwzp}TktD*y%^yGf#sJ<3^NFA*!CBT>gbm283U zVFh4V0nl{pSJibK5TngOB31zOHiuNQHebRDz_0?KbhOCzpMqb}(LOoM6IcQ;NH{_g zB^*`G>|;bMfsRB8$5pb~i?IYSECFZ|PN?bV_!av2}P za08&pxT&hkxFtrL+eF*|=xshx$=dva_Xxpo1E6H&xQBKscMtuHqgVkj?0$zN?tWJ} zLo4w~BUk~@hgN}Phw>+=Dhw+C+V0g<_1&wB(WV9wD*$?%nkw1uzlIfnVFkdvb|072 z;wbljFzjBNB<@~EIYa9bx&P}({O4XzB^%1_kHWbBgSLBpRekpcVzg;UG z8+|tb#{D16YxmNmF-N)ogJJh3Bysnq${E^>$o*eO;_l5=^4%YUasLNx_ZF)9?k&Y= z(~8LbAM`e@RkAjJ#+!m*-2XxCp5q>x{WDYU9@>TnZ~=$@B&~^P)HIL z6e(wPF%d7IBT+$jm0Up&7+wH01wB=D1--;*)0>DF0KH8gm8{L5;ss!M0Wd!W*?Z_S zJb)JfgMz*!Q9(cDjP6gw3+PBxkW$GNJPX4MfTm!8s;*$57;Od-@dBW?8LX1E`Agmr z1j7q}Qjp^wTK39g_s}66#|eO8|Dhyt|6$4*Je-IV(2=$zP5|^a(^RrHb8!MNoB&wiSK!`z=yZ_& zEaeQIO~eW4NZfypN;dfGH~|<=0JQz*s_OgC6Qj+1B2EDGHVag;HvC)sDhww8YX2Pf z&`xFd(APML1pve93rXVYiUSOGBTSV;DsALO# z3o8J_3V^0#t*WkLofvJ_6R`rIx7nbQwP}hKfMEqd>1gr!_t5)vv`;qj1eO2{5;l=U z37eHOdkYaupd(SjR+Vh_Dl7pEO8}aLZK}G2?P9dqLBtY(-e#vt)}{uQ0EQ(1B_YQ> zwDZ09&|RFy34me!-6V1SJ<6H8mxvS4k+}Xom258e(05@t0npapud1(qK#VpAi8uk! z+Z~m1<~cWzdIKft2lD6~_G^wB4(z>bqALqfHGW z_kYma)KtlK|2aP?4CDR}YWE!X(CnX?a`(_$Jb()TgM!*5Q9&K$jIK+>1?Wipr(aJc z8@(SF0K)}{DyknaM(Z~ihQ+ zqfKujP5|^aeN?hGyKw?AoB;UXuRytb=rcTk699vPz9dmWKjn<>Ps9o6NK}wg$wpV= zy80v15OD&aw;8LFwK;njg(OkJBIV3p zOvDoCNR+TdC7aFPNxTlj5`ZRQsj4nvnHX)B6R`xKw^^Z5_MZfn0EQ(1B_YQ>^gH+7 zL*L{yP5=z+uOx}PrXsAO}whrR{F34pf#T2+1hbz-zxPs9m;-e!YJ z)`kzSc^ifk0JVOOduVoj|HG8Mhi>F4+yEG4Y$Ay=HY;cT79wsyN1}|aD!GhzV7LL$ zWNcH_Wo#Ft%?=`N0Q5FHRkAkKa04*h04Nzb?xEi)cMsjgQLF$McHd1Bci*F&p?ir~ z0Ue3E?^DT!;$+^1VFf_jeZQ){`vEc993)}|KyPzMC2MnrpA>*$1;D&^AD0~FDEEIb z?0$qK?tWA`Lyr-;|LaKH{kTfL`+G3%|Df%DLRH`Wq!?{Z5xM_^-sZGQ*5;dhHvq={ zAIxj_(&Ph1ZMZ>v1mpe>YWE!X(CnX?a`(_HJb()TgMzChQNcCkjJ{691?Wgra6=^<{VMMX zg5d%{Q*cvNS8z*=Hn)km0MOffqLQ`Q$aeu?xBxIe1=)M(XFPxx0E2=%BvHX#<&3WM zMUAcu`sgZQa>Z#;kgGXU_0nilGSJf3X5Ti{)B3=OWHjPxWHtX>MFuVXL z1v&1aWv@JT4{gkGoB$a1Z$c9HZ>pTZ&4@Sw9f|ulSIGwV<6S{8oB(M1w@}shZz)Eb zRz#ct=xti7WNm(i6M*3azz2T?%H2cT@BmH#3UpuI{qns>N% zfZ+r{Q_xXWSI|j}Hl2w$0npoYQOVl;5l#Sx696mx3fy}S?aFbS02uZ!B#HYMDQ9po z5htJ{asTcr+2FP~0T@mIwEcUk>ihQ+qfKujP5|^aeN?hGRr&b<7)}7x{yFZU-zmF? zKEp{Y02o&9OA=S_r<|GniC6#~iL0knvYBhJ05B{7XsZuU)mI-VMw>xIECA?j2CHOk zzRZsYz_0+IR?l${)#`qAD0dGX!edwgFz6Uc5_Jqy&H}@USOFc0I!3Bw3rxidz_0?K z=_pavb&M9H%@`t90Q5FvRkAkvQ2`iM0F;g#_t5)v@cE%Ufh7Qggb5^3!bIiFocrDC+1O2iU?-e#Ig)`m}ic^QT!03{*EJ+#Zc_t5E_ z#tDF7{h1_j{aMPHJDZ3T(2=O|m25QcEPfA$ z3jj^Q2~}OeNio`-BH{u-Z*y8DYr|{L`!HMpn4f~|J@f+}zzcvu!5NaM;H+{+pCjT0 zbR;S`uaYbH5QY~3O~D0KUBN{$+FT;y1we0eStV<;3NHY|3xN44$lgP*@Bm%_3<|E2 zL)&*EJ{Fq{Br`&U!d_pdHSn;Jx% z0O)OMs$~1G!3n@{0^ozc0_ETQ)XvmFr&pd)eh_A1%TMOXkB767!>JF4odcM_vbXCf8=^fp~ovNrrR z#8WUV0I1b-+(WgxUmeQbL%Z@ARsak-3Q3}lBIPVlOvDQ4NYv3?C0pRPu>vrx0BAaT zs_Hs=iP5Gv5i0;njz9dmXKjqBsPs9@F zNR*IL$!2fE62Pzoph*~@s!JFsMw>xIECJ|k2CHOkWC>tc0WSAEmgz|8dIRM91?OegF(QCXhrO6P2^TBqDx5N1~2ICD$<-h93Y;#}rjvN2wTX zrV{Z3ptqT(lC|M8yIg1AX!}1GB z;_{1>Gj%Z$51=D)`6Vj(@~^}20H7_uR8?PonHX)B6Y&6`w^^Z*wYiB0fZ+ka{FdiK zL^+EI0K@VtN#gRWlrwcT5fh*zarreW`SNeUFae+~zgAUWew`R?))O%SptsqelC|L? z^fn9=0BZRhuR!sSO}X3XMxMY6fI-40k|<%ba%OKK;stahO4zEB%^u4;gJ5_8&?IbA z)g^2fqsH%XMRM>(_i60rn25+&?Y z$tAoC!xDfdVZW*_;eZ%z4id2hptm`slC_zRC4gZGz&s^n@1uu#0#5)22}ekxgrmxt zeT;}F(2*$NxJoYJJs6$qs=8EmH_lNmsPSh z^RWalECHCu*Pz^u^a@Yl3BVxXDoK=ZO*yl#6Y&H(5+&SF$!1sM`vWjM0ca9#s_GJM ziP7dZ5l;Ymn@?1-HgoUzqE9qyP#}t4;z#Wn(;I48eS9(N~D}z3{3M4z3 z`%qOFrT{bn)l_u>)x~I2gNP{ry-iJ(Ou(m@0vM(Mlz<$6ox-b8RsbIy#8K=37_yW*`v{2QBv=pOF zDPN?kdQF#yB94*R8bd*PNu2A)Lh;fMNNeByss+%9%Qxh&9lWxco?!Z0hgxT>==^ z0JP;xRQ2UYi_vBb5o-W?o3Scc8{TsKA`EK)=C?ebD9Ty<02r2^KoXaqsGO;ji1+~= ziOVM{`SO!t_yN$CpQ5TSUn)kMsYLt$=xwH{WNr8x(wAZQ0WiPi`AAXD;s?O6{7jO# z{4C{6olV3K=tx|Cj!M4#D=_>3Xv@!4)t8?qMw|IW`~c`}7N}%x-o_8W@B^Th&vh%k z|933C#uK;#Fi2QP5+y8B&g{iRT!D^62}@M6+5gS^gkZP=&?GEX)g>$wqs?+6t^o8l zD^#*Jop_%R3|9c=DIt3+eUm5f24Ik|k|au4rJUKTiFgAYi4xYRxp;+(A#WK$=Zy=8^G`eV4f1Px6+L~fj0nygiRz-!e-^n-a^D1=tz{XRVA13 z4h(MqnuKktx`gdwwAn$#8-U(sr%KjlHQoS*HvlCe*R8bd^~Y|dyEu>d%3&C&(pb6NostY(EMw^2~oB`-<4yj~qc-!bNU^oLXkFP_bj~!!x?}k;e@I#;iMRCP7!eiptm`# zlC^mYX8^+)fE9lY?!A?MzO!iE(WV9wYXEwinkt!) z`ur3z3~K;NNUmGyeL}DZJc304gNE89Q9~W&46jSXBIro`V$@T~hI1=@6oy3rO+$TE zT|)yg+B76$5kPO#NF{5-cZif48qfIL!&H(f_tyQu%e8~M1Fq{FX{h!EjE6wrSmfG+b z?f?ur3P_@kw#r$c9T9h+BT+|tm0U*$814Wx9UWD59i7Bz)0v1n0KH8Ym8=b)HT@I} zcK}L9u3KsOU$@kivseQ#EMG_xmoHMz)M6smKu6;8-Bq%wKjM1^FsuP+%lA~(m+vJ; zo8CmM0qAY|sAO$6^MeF1tO1zc@_fE1XYm7ISiUbwT)v-jruHY|2XrJZpHj(}e-?%x z0B!jJs`~N+#b`5#h#vsG&0v+R%@zCr3_k$ow>%#(%31sX7?vMO5| z%a2sammdYg4}i9OiK@Q*XffK1A>s!>Z!=aUYs24CzX-z*fLcD+t@QrixHO(8a0OtH zFo7gWn5dlDlZdzi9f=YWm25UY&^j50D*#Qx6jfb9sTggh5^)8fx0$Arwc!o;FT-#J zV4f1Px6M>%O{KyR~CC2O+`X8^+)fE9lY z?!A@n;ylg(3<7qOL;-u0GkGr&XP_fdz&@31a&?>m3}*nEfc>hvfCFN*IY`7AfZpbi zO4jDr`H5l}&H$8vT({D4zjx^{N3jH8*!>7e-2JF>h8`ng33Mdxeq1FRx)DnN!xDhD z`w3Nj_mg6@IYq=0fZpb`O4eo|KSlt<5`fzMTRAThpZ|BIAMhO301QITkVGM8m9xP) zBGy1hqLA|{*#_@+Pd5>P(pIuNSDC1LBtt=-lnEXw*TWe0~pQ#)c(0{r8$20QY{|C9e_bcZIY;?j&c^LOT-=M zNc=j~Q^^)U$D=UZ0cbkvtLi!$h|#7Y5qAK3n?@>G8-Bj>t1#RFC>^7tUg8Nhc7VE6$rzvcP3QO@EAz_5HFNnE~2Ia7;?_yHY>%Xe4Fm+t|?4}i9O zPgQ;SUShQAO~em?-lmUA)<*9kg5d{1EuZUFdjIcVdWI)(1z?cSmn2H)r<~dSiMRqC zi4syO+3c$P4q_Ot05k~$RCNgh#b`5#h${fS&0v+R4ZpeN2QXX#n5Ts7t#k-a;0?ea zVJJzIFibhKhZFGzIua#}RLLcbg5eE7lTf0nOBgLin=wSZ0qAYUs$^|u;|*YV129hs z*<0y&p1>P`LBa%*C}E;K{m25H}nf(e3X8@Xj zxvIK=d1ADgPsAC3-e!SH*5;=;0~pQ#%;RfN?pFF5Pv8u|AYma%l(0xSvlkO_209WY zEK$j358=B8Fq{Es5|*m!5|)Y4W;qdO0D7AhDp{L_I0G2Y0Ic|HaPO`3P0r&Cz#w2H zNffY3Ig?itaRxdP1*}oYCjS>dUkt+;fF@wAsxDxi7;V-QaR#8b*`SiO;Z3D)!*B+m z1mwDvmisMC8##(40K@K^NaF6Bl{0h;5lf&Wardn%+0Z}362PzopzXd*Ro{KP7;SbC zu>_#E*{PDXnZpki!>|OPcK=q6Tj_njz9{>5rMq|zYXAlzyGf#uJ<8c&FA-~?BT>jc zm287YdL-|{um+$B*{`Y#IUq)xgG8(W=xq+EWNrS2ZyLa`2B3uGx|QB1gwGx25i9~2 zG#nv`8jdPw_%R|DK}Vv7<0{$kHCO}~76CL3CscI}C&g%Uiikx3z0GNrtj%@4YXHL{ zfYOlbR{ATyi0K24;|##C{~40F|5@b>K1akE=t$iEyh=8h+uVmRoB?S2Ur^QezbHnV zOGKOj=xr{mWNrAn(vM&`15o?tx|QbmO-xsK40iwq9al-Bj%&(U;5rd^pd(So4V7HS z$1vOhXgY4H>N;+T(dIT0cK~{uPgJrthj~vC40ixZN3L6G*)K#%C!cW^YXFAj?~ugh z?RhQ67j5eK#xB}4IbWzFL+~KJkQu(Pj`4Zvc9m!75psLwv^o zhBp8uA=j<6?DfZPr9(K6GXR5tp(IhjFy%}hPQ)4LNE9$qC7V2#cM-vG2A~NjQPl;E z7NgA=BF+HxHe*$?HvARQi!hu4n8(+k+^uvxPv8u|AYlSYlrT{_vnLU8209WYBr4hL zPW-Gf3}*nEgej`Jgi00sdwNuq#R z%9%Wyh%?ZUC}56CHu;zQv@r~40Gfcgs=9!AVzil0#2J9zW`Ro9=8OEaF$`w_NGAxYeQk#dGECSnP6B<{XMB^$a9O8~HWW(sTNP*3cw(tHc6CFM>(_W5^)7O694(vQ^{s8#}&YE z1)xc&uc}LEAV!;pL|g&rZ5pX$ZR+9*V7LM>PYKyuX=9$i8-PJV6Ot&Qsd8pFBjOEo zBuZ$ml1q3DhBp9BLJL)0LQ65)v?AgSKyTApC2O;l?-juC24J2NvbWMUJb^a=gM=m5hTfF_}%sxG0E7;QQe@dlu`>7tUg(Pt3B@CKkH7UDQ9vq5oe$yQ9yT7$ah;R9ixhT#mrJiZ3yZl%xg1kL~q68e%v3H_8ayFU?Upd(R2N+p|JkDok-;S4~N zFhEt8Fi?y(gNQf-(Ax}F$=dt@&H#op04x3)+!g-tl7z7L@i2{ZxXYz0&&Ok?^ zfRQTMLt0M!1u zZlyVXOVdUk!ySM@$0m}fW3zG=*h0h|=t$JDRVCN)4h(kynvQL%x{mE)wAn$#9f00u zr%KjFzoP(#I{>94*R8bdm!+hWU7W=lfMNOFByssY%9*;Ch&9lWxcokqY$`tUT^QB? zwB`4!>dPMxqs>7g)&TT2hg7mQ@AFLq7}fyHZ+SkKl(YB&Ff4zBBrbncIa7}j@dG*% zmp`tOFaI74KLFbDCsg(2Pm0mz6cIlFdYjWKSsVVg{e2jI0L*WBKA4oV_yI61e}*J3 ze^xnD&k^whIue&ZuaYnSAq+nN+VU4v_2n;$(dH5nKLC20%PLu$U-Bj*7=8fM^0{uM z_y4A*D?EWK0E2|9BvHaO<;=cL#1-gBlyE~OoBa>GhX{r%08PS8Rb9d@G1}ZF;tD`- z^NC8#2@Vx>U-T=%~LiSc#izn~~V31IoBuc2GoY{4Wcmo}YUxj)q z`B&jl7~TLh3H4QV2@S+((~yWa0KH8km8{Jhyn_gaHvlCe*R8bd^~Y|djX94q0E2)g zBvC+9VbRuaeE~&vy-AI0MimbX3(PbP}UYXClr3 z^fp~ovNrt0*;6o_0a)?Z;NDwlSI*-Mz#yQIBnl`}&g5bu&Ok?^fbJ^UDh0KH8gm8{LTcs~&gX8=k-u3Krj-`DgEN3jH8*u5`F+`XT2hW007 z33Mdxo>Iw%KEfM`U|0gsb|0Xs?>|7T=Z!=#ECHz9bKOes`$b3D zzbhTWb65i~2pLKeg$z^92E&P1109J%Myg~RJlQ=N1;ZMECZt4F7cyFmHe-lb1JK)y zRms}SqeoL=6*_Gkg*ei=ZP>L!y!m{|P^90K+1H zreTVzuAx+nHdBdM1kl?|Q_0%!Gx#sVun3?uydT({C3zq9Ez z9>X1gLB~RpsAG|G7FbNg9q35Zu|y@;@j4850Gf`as=AJ4VzgOK#2tX%W`#=DhIco< z0mB`D(vj;{TK20`(#e~g#TtNN`IRJb`Blo9x|)bJ(2=jUeTZs4p9f`|tRmqos z2ZkR2ZTW4g`tsYwXtRTe9{|10PL-_95Ag#q`~aBW@_bAwXYm7ISbjH2Tz-#ortT%; z2XrJZzfUD!{#_V;0JP=ztLn=i5TngOB7Ok$HiuNQHmC6eF#G_h<#XLi@Bh6`hj{{5 z00s$1NTP(J%9(wPh%3;MDB-wDHv7xnllNe_0?;I!P}L=z6r;^4BCY`RHm6mxHU+o> z7_I=!Q$qGu`T3jw0&f5Y30Fy?glo!~eVvFm(2*$NhDt8sV;J55Gzm9V zbqTk`Xmgv0HvqlOCn{MReh2EOFuVaM3At{iWv@SWEB%b~I0G;UxI+>J+*QuxN_924 zGU$`5K(dp6#=D4MI0MiGR8!RjR2QR74I<6}^fonBG6DR3^&>Ex0hq_vpxmvr7Ej;| zz#ySENt94WIkW2$aRxdPzXtVGve`&@6oxYZO+tNDT|xsf+B78M3_x$wNF{5-2aJ6c zhBE*w{u;zr03{&Tt+d>4Z)(F)ECCpHFCdA#w^h#2c0??Jj>O&Dt7Jo8 z;lqevSOU;?@2INp-bsu$orzci(A#uT$=du8KVAUC5`fx0*RAxvUwf3jm3HMhtN|E= z6p}xI zECT3l2CHOkx?vGuSOicSa@|UQ&;W*9!4EqlyiTe*z&fwuhoPmzS{YR=~gSnNC zg5eB6+rLCr-+#0iZN?CB2B5bYtCF>O7iR#&8Gzb9*R3?iZ*UsVW4Hq_=$Jqfbxc&w z0+Wci109Jv5|v!XWEk!MG#yh^bseQ*w3$l89f00uno8ED5O)B>9e~o2>sDI!i&Xf$ zQqE!xz_9#GlDPaVcmtQ7E zo8?6O0O)O2sAO#>;|E~)0WiPi`Mgrj;s?O6{7RCz{3_*4T}{Lf=tx|CjY_`!TQK|p zXv?ov)t6r`~6_y+pi$jzkIjRB{RL!te&5N!YKdOE@4#n}bBW0qAWGsbp>R0|qd> z0hp(R?5*@LPv8x}AmIo}lyFozvyTz+209WY99PLDya&S@fF|LDsxINA7;R1w@dlu` zIjxel+0Q$OV0Z&i5^~*2%U*x%R{8 z&Ok?^gc~Z^>;-(+0EROFO~Or8UBWFf+T14M3_x%5iAvUn-wgaI3}*mV{581uR{9y| zaRy)zaEBxcxT~DWm3~c=D}z3{3M4!EyEp?F&Hyw4)l_u>)x~I2gNQQ#y-iJ(Oh5LcX^;EK8!mtFO?OtD1 z-@SntZ5k4>1faKRq>{DyYu-o%!xDhnJ=d-DzF&fry_GiRIjjK~gftPsAMPNK}wg$wv3U9KbLKpeY!j zsw)^MMw>xI%mL_a2CHOkcr(ZkV3-3iZw35!AsNC0*aI*q7)lZq3{%eN;Y93#jzk3` zRkG1*`Dp_f_5d^mC91lD(PFe2L&P3{-e#;y)@C#I0ERsPr9k(iioOPAzYJhJPv8u| zAYlSYlrT{_vnLU8209WYBr5qAVKNM70Gfm;s=9&?GEX)g>$w zqs?+6E&=p5D^#*J{OHgdFkAvC2^GI0+2;_)SEM(22DbnP5i3ceh*ip&zM69MZ!Ci07C;lRR#g|VPK-9|iMR#O+iXzD+VE|(w_&&iP$DXRMS8NpUy(NQ0A>LU z3O12M1)G&KdJ7S=pd(SiR+Vh@*ZF}17-j)91>0101>41FvxA6P0KLslm8=cFEb`|t z%mSFV0=**b;sLw@7!>R#i3;{8XY^hoUO`8qf_*C4=+}8)5e%;Ynu7hRx`G2@v^hw` zD}dhSkV@8OFF$es!z+Lf{&%75gILqaVV=M%fI-3$k|^P*a%LYR;uUlxN;s~Pe-Yk; z;T1rWa6(m=a8ismr-*n3(A%6=$=d9}E5Psy;KN85mwdnzxCJmsI71R8oK?>3b41*N zjzkIPRdNX*!f*?qNw}b@OSmXTn@dF80_bfnt7L8d6Sn}vEr1Urp)|R|6SxI1NVrN8 zC0tX^?CV6_f{sK9H&k*7AH#49ph>u?s!O;fMw{D2+ydxrK2gcq@Kb1?!f*?qBvky0 zWS>D9Uy(lJ85{!`MBE{XBJL_@dZl`rUK#Z1RUp~vydqVF;TS*@QB74BQC*BSHHbI{ z(A(5h$wa(?V}RipK#8dM6{&sMD^e{Uz&C(FL2Z(#ppJ4z*CpZ`bR>Qm>ZxR-5Adx7 z7`_2C1@%>R1r5Y#(~yX70KH8km8=ba6aQ5hz5&czfnJds^8khc3<{c%LjD;KN85mvrR`+yWRR6p};xI>;mX*2CHOk{)(SKfMFLvNvQZ0$v%H9z9J3b85{!`L<}W~B8Dkv`fwtS zK}Vv9kt*4AeM13;V*pJ=iK;GQv>0v15OEBkw;8LFwb_JYfZ-TGiKzG$sYBT-(s&-g zFn~e91d^y=qH;!0B4QYHBq~T$ve94UjYcpG1854SsOkzz#b`5?h+zP|%`}y)4gYO> z8HQm1^H!i&r0G0>UjT!GnIuucEai-zO~fzgNK`OKB^&*Rd_Mt(UjR+PTvc7cJTcnL zC*l`CZ?ixpYcmeN0K+eU5B_(d>;st6$!k1;UjT!Ig(OkJBIV3pOvEqfNR+TdCI2G4 z4#O{iCSj?nE@7D%ZI%=93!t}Ip^~*ZhF^f;7r=*+FfMtMCvXd3kg$>@N?4_w*{g}T z1s#bJ)~MtX-h$y4K$EamRhO_%j5h0uxCPMLY*5MC@Dby0!*C1W!$>GiHu3~^0Spp0 zkwgibl{0$_5xbxxQNmW0T*5mr>;h;KwyEk8wu{kb2NAmfdYhdpS(`0$%GjscX2ieHhwUG|D}menCf~gc~aP7vW8vs!OOYMw=Q$ z+ydxrYN})s9>Xoba0}qWNGMHe@dS1O3=(RSLi3-{(XLLIvhCxT7g7zxe=(GG-0t~|dnu3n1x`Iw(wCPO5Fo52si%Qmp zzuA5YhG78nR-jj;t~`KW0E2=;lBl3aIirh-_yrw_3c9OgqhI0Y5@7fR&=mAk)fMy- zqfKujegX70eN?hGz4`6}48H(A_}_)HAG}T{&+r6(0SprQl0*splry_O5x<}#Q9?>3 z{~|mK!!Lj)VSuVGVW1do1`+WKptl*UlC|Np)P4ZNFMtmtVO%nVCvXd3kT8@aN*Jb` z*~5vr1s#bJMylizM!|3kph+lE)g_D;qsn+GzDu_bp`9hXtSP(UjV($29>PMPw@*d`~vvke;3Mr z=q{aXCq3;Rs2Ta8x<7j}fs8Iua!uSIH&32g5Fa zCgFstF5#pYZB7xf3!t|-t&+9*5q1HFT>vGa;#Va5d87D>^a0P{7{DOn3`rDmRyotp z5pfJU5=ER>$)@v)^dSt#0Gfyks=A1aVzjwL#4&*0=CVrG<|2*(hGPIFqT*Mi&SkGi zS9k!!00sqDNuq*l${Br~h+)u?sNjZ5Hu?;2M}lD(KvQs2RabCJj5fE47zWVWe4>)I z;d^bL!Y~YA-U{@J^cfG}7r>z44oOsSS2?3AeMO@ygFd(_W67dT<62A!b zRPryvqcHpeXcFqH>Jl1=(WW60zW{ohMk-mG?f3;4egS+K3FDH+Jb_yPgM=m|Q9@JY z%x*@+E$B#;&|D>#@E8oY0Gfmrs=9=hVzg;R#4Ui{rnO4erXFqqhFbt1MnY-Qh9|HK zV31Hi5+$@%&g^zX?1GL&3GG#K2_0bA1<)jPRMjPP5~EFLB6b1vHeFP*Hb3V33oz^g zCBU4GgN{TI-Bq&bydw30;TS*@(Nk3y(Myar zy@@ym(A)G;$=XyaPM(J07(j`r_!a3pWv@uj@BoGZ3<~;^L|5t%KMRE7zQwJ1$spq!UOmPFen&G5)}+n z&gkJp{DO`|1tV3m(R?QVC>VYLGzBH9x`NSSv>8LhFM!@=tV-7AAbtUcUjQHc??Ty| zZPUqkp1?1FLBa%*C}E;xCPK8%vIGT%oC%{ zd?IcE^fn7rvNo;w{sIiQ06vTa-jBo+*aa|1SV$5jEK<(w#YF6ajzkGdRB{Qg!>|jW zNm#0?OIRjGo8?680_bg4sAO$^#`hOs*ac7$Dt<-kQZRvcOvYEFH+crf00t2&Nur2V z%9*~Jh-1)^C}NFDHl0_bw_rF1&_t|N)kUlmqs@9Ejsf&G8&tA34T_VuVK@d*A}W4G z>QeTKw2=oe3}8^Oi6ko6tenwXh!_SPi3+x=WTW*v5?~kx&=hP_)fH?Pqs}Sg0go=9_9)B0vIG5A&C->DrfdFB7Q+f zqJ-lr`4{0m7=8gX2`5x_2`9y9bBc&x0KLs=m8=aPXZ$`4zW_dr1m2It6SxI1NH{|h zC7e~x>~loif{sK9=T&kEAHr}8ph>u(s!O;iMw?4S+ydxrE~{j1`1{C@V7LYFVI=T= zB%Z)7fI-4lk|^Pta%NvAVi$BIO1PntOZXUuT>wqO|5w=^$9_I%`yHPdW$tWUl#4rC zHSQ>`jSP2`iLEjjMeG&kQp8q_B?#3aqeDkr<`SbeE|XEz#ziP{{HBGzuz`}opbhl-sj9S-~K+kulc=yzxgcPLijwDF<*%6 z0xlGSEh`X}U$RSt?`ZiYx=IG4s-D6o0sM)%R2^10DZCpr|{Nn>)cW&;el!Nk`Z+ zx!qfdEJGgYAH>#awcUP~+o$TH9xGe501#(%qgU&S)e$r18W z+QA8yfer{eN;<-Vj-?Y`EPBo)g*8^SK>7Q(KnjM+_O8EB2!J*_t8 zDwaXTGSDF8XBhtf*S38M-@TRgaD-=|1H&RoXV^2j<$H-dLmuf2i_@x)VQ&@BKpVq8 z=@!Gjsf<}7@(i@b?3Y#>^A9|Oif5oxF}RfE2+KeRh65y>;lSjUFBMsaJklAKrBw_E zsaOWu7!FRi7!FBg%%LL7Kx@olX|*wa-N=trECZd2!L1}mmJV z{Qe(g__L9_+h;k!IM4xMrKBUAo!stoM8+YHbc9uDwcQW<9jPkDfi{HI=@!Cysf;;a zWE^OXS(8>9^8-KPtYRGKi4s?RAhtb>HS31Hu(1GD9NoTk^x#jCc&LNL< zh7D=e$8e2`bD)jk+H{NIx>UwoFLDmF#@vur8}mBnpyC|pR1B^rIl?&5f#D`eXSg}J z<+q58Lmuf2x29DLx2YHh+8Az6w;1k7Wz3x-<3MZ7U1_y3{ukfRRg43jiox9^M_30s zFx)Na4EH3r{9ciD$RnNMzO;(reiiFL8^Z(X7Q=(7jCn|89cYbtIITA3E#GTUu?{p0 zW8O01p7b-gf}D|;mzcBza_E{d88w} zomSg@f$K>s_JKBpchW6{cT*Yjp2$AX8uNZyZOngSA5`oEo#5+X^bYq2j_?n3VE9nd z89qvG`NtyvkViVhCu!Bk@TrP_ppD_Pbc^BhRK|QE@(;Aee3@1o^D6(K;veW#3@#`+ z!adM|;TuV3*qGe%S=(j#Y+B3bD86IZM8!SO#;|F+#jsf_W9Ew71FbRh(rOG}`qfA( z?txCl;D(YT>;oMbwvcp&Et6ZmmB>Eik^VVsomPDg+o;$F+8DM?w-~ldWz6;>`#@{V z{IuGbcYVh}#Xit5jCn83yvDtQLmUJhD0Y-|iUrB7-$~>k@<^vxm{wcwy|lB6gP@IK zmvoC_*Hp&rCUOw8#_XQfw_`X66$e2>G3LEAa*ultCm0AiAS{w}ggukny_d*9jD*k~^@bxfy zllyQ-_y;;L93klpM<%y?xyV1{kPbOV>l+=VmLOHF~^Dg1FbQ~r`5*f zhoDvb1D%S&H6=&52RbmEDCrCd!UWs)O3sCv{c5NE^-gF z#+;E>8{?N_oT=g-=u`~uDLKME(1Bs4q%)kI-12io_92gShE-`5!?`N9AIu(PoRfi{Mx z(k+IkQyKG&$UV>+^K4pe%v|n4#XZoe7~EEJgngg`!}F5P@IrFSUliGgJkl9nN~;)t ztzsW&V|Y2;Vt6H$F|Ug31FbQyrPapx0np#5*asShG4G|BSGr$!h=ZU5#T$}N@n&-C z-x4{9JklxNPOGi=Uiz(ygP@J#opg)h-BiZBCvp(9#=M_a8#8C=s((~*5Hu8H-b*8Q zx<7D&fuIAzhmwx)QF6OK78!^<(h)vMtL^?9_mxx(1Z@bPrCSJ}r!wXXk%6E!=F7C& z82^~xS1JaAPL7cG($`M#4|G8IM$!>BCbxUm_Srp~*6ulqZ+H7KlTB3o18oSKrdtS` zr7~u&$Uo2;GcT=%;0opURQv;-;Ok-ZR`=$P@DFrg*h117woGpMRwDn9NBYOGbz1c? zY@^~IXk*wm-D21-l`-3k`~$5q^V4c${5!$lSMd*YDh5}U9N`}5z_6pFGb~7M`A#DD zkViVh!nBHEXBGEA8^bQ?7Q?QojM+`(9%zl(J*_szkG}mt#XZoe7~ENMgngg`!y-v% z*fY81dx`8r9_b8=(<+9&RqO+84Ev;84Ev@sW{Jo?&>FK}T5XKWGCx$Y4>Sy8-b*tt zcJJ>H2SEpl10sjHw)=e7 zmQ?%$Z3w5PTL`D6GUjxVf1owyjI`RA4Zi!J;veV)Uk{^qyU%ijf1m@yN=aupJGtfO zi2Oqy=?tsVs*mAZ75_jR!|HU4;k;DFoGrxqWxyU`x8goTjZOj(F|DfU?=u`}D zE;+(J(1GD9NoTk^x#jCc_92gSh7D;I!!;`Qfi{L~(=CSUQW_kydqoB!k936l(rUZ? zz4!ep27)$(2huHs2U8jIkjOyL8uM^kZA||0W)%ZLCr8M8=@BRR2Ra};D(MK1CAa%= zk$=b|9pQwpbg=vbPM6>RK`3b@(;AeJeyV<Y;8uJ>Nr zM8!eSMzLwSMX^~bW9Et+1g$ai(rOg%a}X*Hf`($uduimB_vTJ85OhG;LedeoOm6p9 zA_I{}`UkOfT5b1>?k}kr2-*;~O}7xXOJ&UVA_GBd%>1<4nDy>2sTc@4IYQn`J2=5V z&;embNk>?a-0q!3{vnTagoSCf-GAx+l8S$z4Plpb3t`t(#_T5Y546VYo>m)kCI6t} zALs;M52N?I_i%)Npaa7qNoUwIx#fF_{6ik;42#pMk6~{W|3DkVKIs<2zNw5^BJvNk z#_X3?8w2I+q75hLN z!!hX=!?CH1IZk9BXpK2Otv1GwVf|ReKF~0Xc`wbp>V1Mk90VOGPLyERU8Bj#hCZf$X)NVoM0g6 zfUr{15zbC-_cIGswcYF7UsCZ8v>{xUZXv8oWz6Lw|3GWZ z6=}6G5AhEw{((;L^)Pzd`$|Xn2RblZCFu-TC%1gP$Uo$f&afe^`WUWJ@ei~yT$^q& zT$jq2>qY*7)|ea8YGd+)(klLePQ~E-$jaBEt{aGQ#I zppD`7bc^ARRL0yXau2k|+?7@_{9MI7(5V>QUvh+fpaa9*lFo2Xa?9@(*@ryR8SYD~ z817fG5415nkZv(Nn97)kMD~H!n1|D9WBw`opkg0r7=HhIY4Oba-j6uMLC}HXQAwwG zEV=cMiyTBA=@d_-)z*72J*na#Xrp*4-J*Cpl`+qV90aW~&!*MJY_fFKuT&fa4aM(& zFQKUW-p@I~K+pl^=?E{S)plR){*sD;pbg>WbPM5?RK~n2G7z-J zyp~oQ^AiR_#X!)>5%OMo-3k7I4hU~ZI>MXD?S4z-AM!{?css4Od#U?ND*k~sgm=;{ zgm+UJ^Pb2*&>Hi8T5ZfV{DX>rpyR(DdBf!3HW(`sY$+yfmLzL9i>jma&a zH9yN|(^@`9@g2h^D(-M@gqxklgy6L=Ga4bc%&(we{XhJF7Sd+9-BOw9 z-GAZ!l8S$z4dLK)3*nGd#vCg0546S{mR1|{0RN!kALs;M52N?J4|jxrpaa7ZlFo2s za?6*C{6ik;3`eC^AH&fq{(&}zW6~{#V^bM(oX9`W8gqPFZOj*b{z1h*(5V>QUvh+d zpaa8+lFqOqx#cH`+(RDe3@4{m45z5L2ih1;O}7|MOJ&UIBKJUR%o%C5F(13Xq~adv zR1EGfIl?~BfnlYjGn}2=@^eJ?A&+#1RcRH&xhnR7Hip&d7Q=a|j5%LqA83tPlU5t^ z8~2w~>;nzMnD^4m``#Bi#6i%3;vz|>xH!4>Yefzsk93Ml(rW9ymo8Ot5VTQTmTpn3 zOJ&UEA_qZh%oS<1F@M8Bs5l53iZSn{k^A0PI>A8D0pTi1N4Pq<-RngLB9C;04QaLA zXS%D*k~^@bxfy-}@Ji z@DFrgxLeX0?n!R>y(0gRM>@lOY1PMYzlwjLjp2cGi{Zgk#ylkQ546TSoK_q2zWYlm z{((-#;Qo>$+yfmL9+h;4$C6wAxX3-^k_Z;u3@@cs48K;f54177oNh6^lFFD@ zMfQQ#nAg&3WAgn675hNLFy_59^S<}%4sj54pm;;lDc($O{aYdjkw-ek+iA7+-b=q# zaS*goypwKGyqn6H_e2hY)|mIxYGeFL%70XG5Hu8H-b*9*y+3e*fuIAzhmwx)QF6OK z78!^<(h)vMtL?ti{UsFxK^wwn=@!E0sf_tTWFTmb`7*6GW(fnKVj$?`2zf7k?F9co z2ZV1V9bsc~yJvkryJyqdJxB5F?)}_fQt=P8A#9p%A#9e)n7JbVKx@prv>L+Y{DX>r zpc8yOjNbR&+!6kP4h&mJI>VO9E#FGyAM!~57`9HUK89^n`~z(a+ooF#+odvQdy#*j zHD-QVZOlgZmsI=%or=NzB}cdiIxy@g=?n{!TfURXJ>-$jurRG+*jdFr(8jP!y2Y?- zDr0sNxd&Qfc2BE~S;jr6xCc5FgZoR4un%-#SS0BTdnUJhFOhx7Bb{M!TE(!pihZDs zVV`u1Vc%57ED_lUT4VN0tBuM1B^CQX!!YK(H1odq{tj^vbf7pu(kTv1Zv9e`gUBPD zVp&>kz4y{VDh`4+ii6WFibGNvbEwEc&>C}CT5Zfb9E6I4prIJ^UK+XYeYg`01RW5L zkaUD2liR&qWFYcLM>s02w%fn%d$fvypbg=ebPM6wRK^@9G7z-J9G_MjPH=*Mpaa5*l8&$YIbV@?u{((;L^)Pzh`z%NJ2RbmUlyru(lUsg{$Uo$f&af)2`WVht@ei~ytWLKW z&P!#?`6B;7Ys{Lo+L+(*4=VnFPQ~EbcTzQTfSE09`ZU<>hN~o<;p*g;uNT>eJkl99 zq*V;psMrVE7_LpX7_LiY%=IGsKx@nmX|*wb=jW$Y>;nzMnD^4m``$M?#6i%3;wDL_ zxH-A?w}>1>9_bXfrq$MaFWsi%AZVkwJ>8t8YN47zjE!Lf%V{IKe;A0pU?eM|do`-H(g>Lmue}Po&j$doMky;vZ;3cq-jO zcsi9a&xrg3tufE0)yDi!{z1h*&+^L|=w%s=5CRNModioyLQN7x5CFnlQK z3?C)8{9}=Q$RnNMleCKAQx*F_8^dSm7Q^SMjQK)jA83vFGOaeoZ&UwD#Xit5jCn83 zyzl+BLmUJhD87+&ijB#wpY_kPem1T3a}?jM_g>mW#X-`fuJ>Jep+qJ>AnG>Vj$?`2zf8<-~|6b2ZS9Z9brLoyLS@#hdj~|7N*s9 zdoS&*;vZ;3*d^UU*fo_gyNUb*tuec&)yDjie^Bucbb_yk(fi(eIKn^BfnkxPGwhk% z^1VdYu(yhTpp9Xlbc_dsjRVQIB7{*LTND(-Nml~&vR zU)*0(F%YyNtWLKO&P!#?`62^BYs{Lo+8DnL`vMgMK_^GZd+9K(PgVQ_o#5+X^uG6% zj_?n3V7N-s8Lm!l`FfFm$RnL$Lt6DQT%+P2Xk)lG-D0>dl`+?g`~$5qH>B0Z@IXIP z@egz=2KSd7;U4I~aFe7n+??FM zwAz@(+=GgHpi?opzvKw}KnI4qC7t1({6QRO|x{!}oBPo&k>doMky z;vi_Fcq-kZcsi9a&xjlZtufE0)yCYwL8v$g8j3ORrIGvI&pE+B&;j9jNk@1gx!o^{ z3`8F32rs48c0c9*l8S+#4dLZ<3*nVi#=I&r5VXd;mR1|%n_0h6F%WceguIttcY=SQ z1Hv1Uj__u3yWbM|hdj~|-cIY=-R>``_y^h$-buF*-c4o9dm{fpYs~v;wJ|624=VnF zPVn_Gdf)p4NB9RiFnlQK3?C)8{9}=S$RnNMleFq%_*BI|(8lmty2bE$Dr3G7`3G8K zzD%o)@w*`eV{dFep+qJSHAzCVjpN2#=Mti-uK?YAr68L6gx^f#e(G4?<8^% zd8AV;OslQ;UfNm3LC{9AOS(m|Ybs-Q6FCT4V|Gugjk%74P;n466l2~?Blo@caDsuL z1HvLnN7ysD-Ft})L>}n~i_>bmH@Lr~VjyTk*eBgW*f*6iOGE~O)|ma$YGeGD+z(X@ z1f3iq@1^~n;2-FKaDb#E9GKkhr6T{3M>@i?wA$`}=l+t4f1nNF;B*V&kW|JTD)JAs z#vGPb8{>bq{z%0?&SQn0fqzihrO}F}T0v2=_n-h7%>7VMTJwPZGI@Jkl9XPOBJB zQE?BnF`SxiF`SmlnA1h>f!3Ha(rROTO`oaa9_UmI?k_pQKG1<-rKB^Qo!s(sMD`(% zbcR)F6~nnI_JKBr)#(<)d8v#!Ut}L>jaidc8{>DiT%ckfXc&I~dui{P_q{K4h=ZU5 z#YK`%adC3%*NPlO9_bX9q}A4YFI}qQAZVkwEZw45m&%ySMGk`2m@Cq1W1i+9R2&2i z#qWPFp{V=bS31E!&;j8pNk_Okx!vnU1|pAigbiu6-Rs?7QZW#;AzYhoAzYWrnCnFb zg4UQD(rRNCG7u^Tf=-T*_tK3{@DFrAxJl9xZcc9ZEh7JrM>@i-X|>&J-Ct7i540iN zo^B!Bk;<4mMgD=-n7h(yWA5i4RQv-S|MlQg`0l>|5C+2(5V>QUvh+dpaa9BlFsm0 za?2kVxraQ`8J!WBxbypyD3rR1EGfIl?~B zf#G>cXLupG2wBL>}oBZ>QDPdoTS~#X-OaDsuL1Hy-rj_^@(yFV5gh&<8}K1r+XzTf>N6$3#V z!e{9g!sn@s`9fqMXpQ+Ytv1FFfq$i9An4=>c`tqK1phz>gl{ArVPkT;XZ?%po=t1_ z9L2Z0*SNo=;vZ;3*fiZj*esPXb4C7v)|h!|HH7>42NnN7C-{08z3;ubBm4s$7`Bjf zhAop@zLm&71<4n1AWp4=VnFPQ~E< zk|W#$9T;|$bcO}VE#FDx9`ZwoIu(QaOOCJ)bYNH{=?r@&w|p;=eaIu7VR2f;u(yhRpp9XlbcHsfDIv^Y&=?F(Aw|lwBK;)5* za8z1t_usm|q+%dwLpUbgLO3>+F~^Av1g$a0r`5)|4EJLd13@Q8$b0DoC-?_CAe<=a z2rH7?eUivO`RQv;-ioyLQN4N(%FkB?*3>PQ2e67em=DALzhvm83IVo!s*EBKwd>I>Uyvis2d+`#>ARwdoea zb*YTGUSuC=jkzJMHinP;nTma&VHopXnt9**Mu#{EI#Aps=@d67xBeE9gUBPD;?}g< zdhey%R2&3t6t|~a6nCUD=1!4=pf%>MwAz?gIS3U8K|?X-y)<&)`xj0y5OhGeThbBk zNpAPOA_I{}I>LQvwcWS7zocRyXhV1)-9mUUl`#*A3G zcY=YS1Hu-Pj<98NySEY?PbOW7sF%V%Rs8F-t`Lf!3J)(rRP;9Lf(>`~#he!TlvixCc5g93bfo2PU_CsmML# zk9_UmI?k_pQKG1>T2uWu+ zGP&i;MfM?&bcUnSDu$y~>;r8K$D~^f$EGsoIFWszHRkxV+L(9z{IrUFpkWyEUYdE| z`viwL2s%)lDCra{l3Rb0$U)?hPH}QtZN2x>DJl+vHi}czEsE1p8FRYGLC_j=Mp|vm z?>Gn*2SGzI=Djp>-}@{l7zjEbtdw+wvyj>tgdk&dt`t+x9)_m@-*1Z@bb(=CMa zQWE_8x_paa51l8$h3a=X`x{6ik;2$!VQcK^Nm zODg_>HiXO4ErfNcjJaIoA83uaBCR&&-}=`dRQv;-;Ok-ZzW0@m@DFrgxJuF)u1;?G zdXay~Bb{MGTJW7L6~k>R?twOj+tV$EJ5m{Qr^r3f8go}#ZOor=4=V0~PQ~E< zk|XQ`9T@JGbcTD9TYj&|KID)*aKDOuppD^ybc^A^RK`3cvJbSzJe*b=oilHh>T5XKK+4z-;gP@@p^IjUc@BN$;3rU_wbU=7R(h=TFZueUv z|By#I!rN)J-T&A9B^Ccb8^Sy37Q(x!jCoJwA83tvKdm`Kvjrn8tLB&4MFpPOG&AjiugF_qy9Vm8`bczMZt=~!HAo573SeRB@@4d9M zii4nyVwZG_V%Jp0>?U#$w8re7RvY8Dl>b1*LC{c)c`uFJ_uj(^27(RMl7zo-B_DQ!8_DyBX5|M$RHD-u)5415HlWs8_o64Bu zME-%+nB&uGV?N{`RQv;-ioyLQN4N(%Fq|mq3@egbev-&NSy0`ZwiQp1tZnuJHfk7oBy+%2R)` zYSo2$i~=40uop@CANJzpwyYHyg*?*#u$QFOws`$qs$vvq`@>$AZvA1`r84Gnkx`&E z=8Cl17(ZnEQx&5?^N0Ndf7szBQocN|^dGVUbof(VCFy_4tCJhQUStLGNdHrANUM#1 z&rKv1D?r%sDGT&cefq;?NPhdq*@2J7&>gi5NokX;}%nQ zYSE@~i{axe*r2fk$Jx+fTHbol=n2DEpD<$3_+jXlt=cp$H~#Sm=STlT5lDt(!GlI;gT{_3tAHEcJ#=x!v@*F_N9j9(yA>7EXAggW5-Vn6H2R%s5Rp6q^pl0<{DXPO~o9sFxl{q zrL~l7?X1*jQL;@->lBmK3Z-=i&)R~1r}ZeU7fOu>&nPt+urx=En;8B+qv@hE*4G)$ zveE{e(Wmx|Qu7gw7H;fTY7t5;+1Tpu-o_8IL1QMAT9->3{;dxjHgf+Fq10xP&qnfT zo0T?>KJ6CybT73JrJBJLCjQ_4b*U+JD3?069O3_BbkYHxvr?D8|HX(+_9%4?rEWC2 z$p}x>T_0|mm3qWZHp@HVC*F&XdygQ-<|DlKEwp3Hth80!vGt-IJxbez(zca)-+$uB zp@YT_8TKzf)9uQo?UUa7=ztxv(vHR6`-aj^w9_x|eP?}`Wu;wWC%Z1{q+4mXQ0mXe zyQ}vBdGC8@$DUbfuef9HMLW8e_6enZ|IfYGlm?bd`z5^((gB0B(vV{BLqlm8?F`R* z-(Me&$Vww)CkHI*q+4lJD2?XhG3tG6-upQ17@w6&afdD1(Y-Vwl!AJ1{%`+Y@_$Qsaff? z*v0927iWahY(73yeV>*0eYSR-la>Ip6vgYE8SWy-InxryA1BgN_Q4}yDOCL=D2(E-tN_h_hqH~ z<4F(XCp{QS^Z587_4aVy+aucXXjXbG?sz=k@kA&+sot9Cx0SzoPT;SiV}{wl!K23w z88x9Te;r1*kALKso+_7~PFj9OzRzZ*=ZY;qA4)IK#fy2%FX_XVv(hWEfmibeUJIr9 zeEhmvej{)BP3?FqE4>|eyp!*EHF2R`tXaa^kr=1tGtn~LummYf1}pF&0GIYJHF3KKg1nB<~x20rJvRMx|LcV z6vEiHVcft{>6dcp*QDRyWcqtn`lHzIpP}^E;2BnnDZyK|+ER69+7fVPb-;z4SX~ts z{q@I|LOx~kw#-B7_c z0hPGfVt;`L4IMgg)WGpJ?f~n~hO%u6Cb@ecb>wDnW<6CbGykCS5ScsR2X7`rtUyxR$=U;)HoqDthRB=+0~O5rpa&TI-8oeomLoeo9? zn+hs-)5ZRt^9MDzlD6%v%;#l01WZyNiqwJA;LHwFF-hISrn3_4aFCvlfb!Hcq}X~S zoY_n;_Ii{GvDa=k3l;2W5Qk$_BK5IiPko#ePRGNUod8Cs6IJlkC!vC!4F2~|o7DO@ zILYl4Hk9pDFv)!yQb(Q+XLg2)N$#FDo0VW^f^>Zrl;=KMimm6snVk#9ZqHM}yFDKj z>;e#n3soZbMPko=u@p|1z?oeNMyEL{xYK2*V3&i7{b>&$-gfLTe$Z|I{xj_gJ}=vq zV3PVOqz=3q&g>c$3#sR_66{)#p09)Q)YnU~^#(Yz8^PG?O)7Y=H=}~x0^)G1N~FF` z?5S^;!s!k;vpd1)be9V5bT=y4J)lyrQ|Wg+rmnGj`Lb;Hfl1)|kvi)EII{;;ECim% zO0b7O+I<+x13x0g)<@yY9s^^WkE`HqK7k7MB#6UPDiQc;u?K!e3a4k`%$@_I)AK60 z(+j9zFM4t^ER>@^h&>F2W&>~)ad-+=P; zZ%VQCEjY8c!PxUVDtOQDqJq5#;_$vor2jzd=|7ah=_5F^kHP5li3;xYDJs}!pwe$p z`Nv&IUt^!MqikP*N$@X`I`bcW{VsbV4YQmh198l>fApgi=lQf#dUGeU#0)%q%UtIMMpp+Ov0REf|H#2$Jj zDV$b@nUBHf)KCR?S``&+HBg}|T#{mF|A8<3TY9!SyUMl(n1o*wsdLwYGh185Lik3k z1X~BB^>v{<{CZMsZ45I_gR$kNDtOE5qnM^a95zsi@Xf^@zJ(M{En!AwFgmqX!JRfl z1#1H;e1&UP`?h|~vW@t(Y;D0L@y19U)(+0By^4jzHLL{d0Mc(qC{Nr;imjbtrfD$t z*;NJavm1(O8pNTyN+jM??1_6w;j|gdkPJqrUMjd#Z&a|&!KxCEvMu2PZB#5I-jK){__S<$f=S}NkUDH{IJ13JEF|8Sm0$xw`rQx86AzMN z>tL7(8jO7oRl)llhGK#SaoAra5|0pj;*nA~9RM>JgVAZU3hp!p6>Kc1#1-yd@wYm; zdyQj5*~WuO?h;Z*8l2e#6$`loEBuETAYD&_^4teXv2`-c1P#V+4^qLqJs8CV4dPH% ziQI>XJ@=tfI8B2Yjlt+NT?KbK92M*cu)5sw?lprAWjhi~a?eES$fMxQW~o@neKae< zjsfZVSSZhZoD^G+hnb6r;0uIX;L_y4l^2q(P_2{?sO(9 z*jZq8x#QjIY&Mkb95Bg!E>cIH2WNJ^iiO-4uoCP-kghL+^4u3ovGo#|85)e;&QZa; zy$r<+4dQTxO60y$?76R!!s%+5;TVigb5(GsYf-_j1C_gJrN3S;`qO*i-RpWjFWU`Z zlKMuZ4!jA@>}C}Usc&H=*sUNv-v;HWZ-Fgnds!JQsL1$!9$@1J&Z_j-g4WqTA%azBRDk&nZfJ)vSD_mivydkUoM zr=dLeGg54Q7G{(NW4F(%;N8A}Vw47Pcu6I4zby9LuSntaD$HyQMyL5IxYO&XU~hnn z{b?8PUT^Yw+1>(^)Ndno;5%?;@2Xfx{T?g9-UsRV11L}Zp%hy`f*GN~*y|@Mc(0$L z7@Rmr^)=1v40f(P@DS?(_{R*teikSGaq%Z|irj@A$H8--AiuACNlh zM>w;eR4fGknU!F_fVBH7ln4GzimkuHjLu+e^G_AL&A(8L&UOBFs0}Ib5@HWrM+&F9 zFhemIot9F;ot8!gTLx6%3U@CB_F*)+do9bhveg5V^vfZ2aD6zlET%+DYW8>vL_wqg&yu@p}2V1{EbI@PG)!8@RWbp#c>!riN;c=zhW*JbMrCZW3^ zbzWCEvu-LTp?lgUtOV;0(( zNb4Dthu=ktt-HcZ(_n15zY5;+?kJ{d5QjZfBK)3W55JcbPJ6?Q%3yTbR|R((hzhnJ zsPGl;UNyUgR#%)DtMoVqnM*X9A>CQ;v>bLc%~FiN5PE8V01cK z1$R0I73^5Bs>GA*I6f`g@nDkp1f&i-5zg!+6$^<^W`)1G4ASqZP@ecSDYl*tGeLu~ z&)F(?pJ$?&pg|nYR*A&th&}PSQaGIlGZ=%>=>iqp=|WVni$Eo=aQBM8)yduKVm6fR z5-`bqDN;wyfit^I#X|1OSqXLpNY__FdG4#E*m^b01P#V+=c?e{UW;Oa264DvC34>& z_S`p0;dB$sXbeWDTU2nTTT#Jo1FOp&?_RgFp=@`6N$xw5I`S?!v%6I+;Z^$Z)z_AHp>eh#T4pNBJhLB&Gu7g-7R5=hrCLwW93q}ci@%nS|2Zs)7u-M)@u zh6ZtXQzdf0CHCBJOX2hm%y0}wr}tEFr}t68J^+=w>A&B-@;|*7-n~BL^Rj&eCaFJ0 z>cCIn%sy4Ikoq%Lf_)Cs^A}K_`b#Oceg!i_gR$2IDtNEopqQaS9KKVD)ZdFe^$$`w z{RlH5gVE_{72N3;RIp#c|Ndzwcdy^rP`2N}B=;Xk9r-7m*#tY6>$LgVCv(3huN4Dp+$+sVm&QYVx~R3%)E{OE3xC3aPVN!b%;*fpHfvOfZFaK`C`M-xhfXRHxU<*;cag%WE6h*~ zMyE|w@W9+8Grr1C_qQ z-RmFeYit*Glx_`{GocRHNe;VKrwAHhnn86d463FYBu zO0o4Qm}we}Eg!9dw|oqWX&S`gIF$&0yx7B^AcfP3FrzXUolaK4olZdoI~7#;3U{v# z$=&NTJ}uknV3PO@qz;=6XLhEFg~VsE66|b{e$RpO#OF$}^*oqq8jO8jphE1khh2zb zng(&WSS1o)BKE|WO5rpIW=IC3)8#6-(-o*-SAta~9%WbYY1ys@lf>5`b=X`uvujl> zB)*Q7VAq56djpgwzEO&;H^I!&VC?f275pc2D~dT9#Nl?8NPLIb6W=L?(_Jv*F&Lfh zQNf+=MFqPLtSa#&yPr?X_5he9eh{g{=E0dgq+%iQ!>j~*1f<_bp*-xnD!-$oX()ud7(d{RS(+-UR9TEhx|ZwiH|6ftjGe*zJ2Nc(?DPn4m!%K2(X^ zABjEp$5J?b0y7$e(djc4-05>vurI*sa>u*Zmux88S74I+Yov}`0B81%iiOad|~b-^U}l1Lr76r9=8Di(4t!%DDaLAtI7<++!WVrzYv85)e;uAqYF zUJ=C%4dSqpO5|Qy?73Hw!l@z5a12JL)l_h&)ltFL0F}GK-7Ejod*R({O+GK%T40iT zZKMut1ZTF6iiOncvJz}Pke(YudFm!oY;6iNLxZu`W-55E8=#n>K^$7BMCz7ePu)ri zr`9kNG8moOsNhZ;p@Ovq|NEz%+`TqtL)qGaN$&PY9a#fs)jctuXDH9z zMT)ImVMb{%cDsoR-fedjqcn&^50%Khnb>ppl)|YO%xnxsr_EJxr!7#ywgeaZ(=Oh< zw&L@$Z4D-=w?XQ_ZQ;zeQ?ZbGdsc$=0qJ=MC{Mkk6kGekjL=~0wVw*!>&_@fXb^{8 zR3i1RVo$xB6i)qN24gTf>3?0AJMDoAwkN3674BXg^1Ihwd|9@=!6fiLNS(DWoY_DX z3xW4zh5r=`NV|ifJn#@Hwho0Eox#}Va1}i8{wPLg5QmW}5%>VH2OcGb(`cBX7>rJ1 zRdAtmHyKGKj~|1 zCOgV@6qp2`h18ix!lHmq2;wOQqO42WEr@W2={|;H_SPVuS{9xJo5LUoG~~*GSJ>fBr4%x+b&5dJn+g53_%`W;Xn{!S^j z-UTyFgR$j%RPdJXMKMi-INYxi;U5rt_y?tMng=r~gVE_>72N3&RIo=ug|Be;>X_WU z9^=!pJq{*`pFrxcC*jPVQn8TuX;y+g1JduaP@ecXDYiZjGfjiB&lgpQefF@IP)ySx z4zH+0;#b9<_%$h<=EDrhV03yz1$TNA73?jrs>Gx0Z9XmAJ7ALdU8D|s56<%vI*V(TX`b2J$H{7eP^$$XAtjs|h~QY8|9CHBN$OX0KtW;_O?)3++P z(|4#~--A^po@77pY1w`Rlf*wEb=c2vX1}OdNc<}+!F~hj_jf2y{D%}<|Ad*K!PsXl z{^$33pS4j;&>#+VR3dR*u_sas@cE6;&+cZoo>gl|Z^)8On37BE{B*FcUNwyIoBM?{;+*6Euj!nktcd zEwSfbTMDN}FrzUToz_*soz_DIYYbMGJKnvTu%T>C!6f(kNFCV>&TIn}3%Q%K608MC z*DawucPlBjwuYIZ!PspZ6};PxP|VOE4jZdP?sj6&-Chc(8ko@-j7}X@aHmeFV4cD0 za>u(@7dDiwE12Z&hSZUpz?pSdv53TCL&)rjst-WAoXfSrWxeDIx7AR(D z5QnW)BKOu}&%KQlPTRr^$6$2YUIlmRg9^3-sN5CqUiqKi3-4Y#@_E_%f=TL~kUFp* zoY~GQ7E))d1lt9q=Ut&Z^=?vZ?GH0UgRxirKdi`m-2=r84dSquN~GRf?5X#W!f9Wa z2^ow|`>Eim2cd!u2LJn~o!q^Ku%T>2!6f%Eq>da8XSTnJh1?@p2{sa>>jR)X_b4g0 zj)obf!PxCs6};PVC`M@zhmuO*5nF3wDiQuiv4{Uj3a6i8MrANM{i=dH{e}wmJE-s#?p~deyVoCl zTDCvIB=KKJ9af9~KU10228)T8U?o@`kbdhzdEzCd*t!(VG!4c+mr=p{To%PN4dSqz zN+hl?_QcCe;j{wGkPJqr1}eDIN~mBfgHyBbYfFjD4=Fg7>)|ia8p@p@~W)ZYuV~>r3I(3}!qAqf>Jg+^GdB zSWB>~#FMNQpO&pPm?Yj1sl(d9nQf$EA#qz)f^7`aZ#yVY++K>UH82x282jv~g7?`8 z#RLuF&_yK@cNKf$Zc;dH0y7wc(P>i^+^GjD*k+&-SGaq{-|FP<)sqcn>jfscdn0w^ z=5S_Ps94CoB`d+U0_l2dD9^o(6kE52nV`Yg?e;2ow|!7d&>#*wszmO-V$Z#k6i)qM zMq@BKWh%JSE~sF;g4N}Ycdy;pP`3VHl6!Zgj@19}xXkuYv5I%cO9+9A-8KqtlftxYJdrU{`~S{b?8PUf1w>+2(>t>T8iY z@H#lN>s2hIzJZlsH-hwh6O^aES&FTLsf?DaMkyw}@NjL;wscdA6{yTqRQZYiAZ zff0d|c;5XpR-c+%W{w-F5y$#a)J5ZkfT`9J{2Qx#1vF8s| z@E_8LC}wC7hmTbv{U>5i|EUyCpTSJUV08LI1$X)q73?ce=_}m5{*k`MzGg?+7Jy0c zZ;(3kTR5}tR4fGlo|RxffVBN1ln4JwimgAx%+FwK_g59X-QQ5m&mazes6_BT#UA`G zDV%C8Bd6M6bXtOcnmg4&1*;1xc!j%H=i=RKNxm-IQeYB#X{63u2F`3*6$_#3u@Y=K zke2I1dFbV(*t!DD2o1(o8>rx+S3)sDgE*|B5}_N4J@l$lIIRXVAA`|p4HevJO;oV8 zK!vVw_llwY*FE07)@E1P8i7gpb&xuDT{yG#R4jyV%u28(Agwor^6=|Rv9%e@G!4d< zo2%e0w?HvXgE+KOiSVt(9)3e9oZ7&Q%3ySAtAaaij0)BcRQL*aug=Nct398Vtp-dI zcR=c}j&Nq3R4gR!%u28>ApLfQ^2FVw*t!YKG!4c+H&wy=?15sM265=A5{Y|>J#lX- zoHmCUlELV-r3&t}6)M=)U{#4n**1Jywr#;A@pecZwmqC#9~BFUcVH#hjv)Q^h4REZ zNwKvb%p48IJ~I`(&s|W=(I5`HsYK%bVo$ug6ix$R#$zx#?Wux0?S%@qH&|8TNwyE4 zmTg}!Njwm#!}fzS8>C_(@nBYh4FTzQD3m82CdJm_FcUNw`y8Qy_c;>91P$UaN+lAH z7JK3`QaFu;8H~Z`G+qUFDxrcIsKgcSUh%g&xqD4uL)ikDH0({&wY{{5`f=RkSx%cR(PIm`?V#%`}v!MnW*#S9JNaE(gjo-6j;*Gl1Z9n5eH zMyDH8aHktl!EOSTyTaWo|I>Tn-RovPFWW6(lKNJp4!jM{>~<9ksqbJV*qtCf-v#BV z@0McgJuovg7<;`>1@HBK6f-o4!-Fc3dY;%*KO}|I!!Q#v7@Zzf!JQsM1$!L)@1J&Z z_j-a2WqT4#azBOCkx#>!J)>eF_p_`7dk&=Q=b=3J3sP)-5oVMIW4AA>;N8B0Vw47P zcugg8&lh{{*QIcJ17+_)bAs8;0JJKAF5bL z{Shm{J_hOe6DUvpsT5m3gBhX0*y|T6c&}fg7@EuQeqFhv=mOuzzoG;bgHL<2VM>ptUjo~74BXN?89hs z_gbE9Wm^GE(yxfr!42TdR#LH$eq~mItpd_}Lnu$bsuWvSgPEbh*z+1Hc+YF1n4v)& z)>eu1jl`aQ9VwjFg_(-M=+syRcWQzP))Z9w3U{x6q_45{*-^G;U=n--q|R&(XVyZ+ zLhzQX1ZxG-c55gPzM&Lb+rZ4vU~IRo3f}IMyJ6lxYH0+u%V#BSGaq1N$y_5__S=p!6fniNF6o;&TOQL zg~SK25^NMmzoVf%@faz#j)j?~!Pw_`6}-<9ifJ0eVS-8|4q{I{Q3|I?FhepJohGZ` zPE%094g#x6JjxE{)3Qwklf-4D4m$+S>`)a8iKnp=>@bjir$c$-!=>1I1k4-_#y*c! z!TX$vVvYuJn57blj~094W2A687G^vKqto#!xYG%!U?+lAC7xs_@oCvk29w06Aa&TO zaAv2eSV(+2E5XhH>324iCq7e(t!KeZ&|vKI92LCJb5TstAP(oNMB)p?p7=s3oGyYH zjKS!1i3;v?DJs|;P>Cztz2a|ma`(E74Q0C=Ombg=)R9-hnO&t~A@|j+1iJ>L>$y;# z`&ucsUI#NlgR$EiRPb(ZL@_~wINYofxo;7B?pvjBx(#MD2BXs*D!9{~s9<-2)#Z+N zue;e$wtK)N_q|9Rc^{nF{VEo6Kfp?`2SK`?2j#gRl49$_Ff%k5yM0sz@AfeiGc<_9 z6DpDWNwMdCN(!f^VMb#xIz6j`J3WUA_B>c!?s)flfemGQ5lnKwgw&BQ!VN~Hc-?5Tf|!s%C-2^ow|zpLO*f1rZ>3I6v_JGp!P#fGxg zs#nNe8>u6gfO+CX#X|17tOQ#Ur0b=iJonO4Y+VLslm=tB^;Ga~mqRg1gE%a&61i6p zd+rsbaB2WE8-vklWfk0M6;!Z>;9`H;#k<$4d|tNIz~raBI#LI&0rSj>iiOl`u@Y=; zke(YsdFpkf*t#yv2o1(w8>`^GHbF5$gE*|O5~-VsJ@p1sI5meEjKS#CQU!Nvg$mXh zRO$+MuP*uBYeT*)TN^M5yb)4owS{@*M8!hjcB}+z57KT8ln3r0#nz56qca%W?5u*f z*#*Vu4C2sDB?50E_Q2hxaM~1RC`uK-K#g-%CqNyu`mI?Bwhc({+d_Hz?WEYcJ8xQl+iHe2L#!9dWAT0+d4?R(et&?CzXfU=qSp{!( z3W^aL#Nl9-2t8Hoq03S@9Rf2SgVAZ43hs0mD%f;TpI-Ff)I|59? z&p_(jBVisqQLzyIC{}{a0%`qdC=Y*(6kCslnWn+m^6@Ho%O{|ira>G|Qi<>2wv`=?qk`*`UH#xO;U??p|l|Y1z&Klf-8ub=WyDPo1b(NPHeE!OjQi z_W~$Se4!LuFM^q-!Pw^|DtMolqL`*Z94=Fd#FvXb@fA`yT?sQJgVE_~72N3>RIs^V zRf$L0wR~E(>%b)O^++9d1I#lgDi#vo#7eN6LHfM~$`juz#n#(k=4deXd4~$#=bb3# zXb^|HRU+{{Vo!Xp6i)ZSjK^SfdO!ttdJq+C9#~c4N%jz*mhE9MN&E;>hdm1O&WVbJ z#E-KQ>(nPf4-$X_yHbjD0?pQ=Re&%~blb19s@fEkU!==7Be?({V(*aEP+-0|-94I9e# zEtur~4yhx*hk4*c#X{~MSqb(NNY_6@dG24N*!nBX3=PI^e^ZO60D! z9PGJkOX0Kx%xDZor@AV*(~>A&{{ySb9q(RCvw>Fv!6f&xNF7-Z=8Y2-3%TpF5^Q;p zu2+Ea+$&14wE@fw4aRO)R>8Yn1;q>v;;^bprJ9so+j)qj&=l zRPG9Qul!H%g?F!Y_?(vl!6fy1NFCT1=5Z4h3#ps35^Q~to|{2=>J6mW+8kzv24k-+ zRq$S0p_rjT95z&m)NRC`dLt>E+QLl8V03Dyf;+WG@eUyP-#_i-?$v<}ycY;2xjP|s zWM`NsPE;)9?#fEAZXjK60_C~8OR;rRm{A&x-EO9WciR)iC=KG!TP1RDF8178Na3_4 z%xnxsr>#|Rr)^NY0|+kmr(L{zZO7-l76>M(`yh4T4lvK0s8~qdmz7{Uf%M!D%2V$w z#nudFga%`;yQ<*5?uKH7265P3B~lL%d+I%;aM}}QFb1R3-YU4$J}6!R1eLnN-K%SU z_ZrBTJQD~efd?UV)?k=dPE;%e9?D9vVIb`ehw{MtOR;qX%;*fpHV;t2+Z=^rbOv!4 zqY{C~iaqc+DV)Z`48>q{G8Nou0*dGVKm~4HX&(Kb+eduPZ6e!vED%i6ABfb!lVRRE zQL&KzAXb7M4AT2lC{JIOV(TF=Gc*``o~DBLd>D!u8pPpnl}LYt*wfFD!s$qusThn- zN2%aWvrs$(2r7MryO)04K8i2AdmY0L-U|ei;Kw0#=J7C(ov2s{ej+QuP6BEBWGD}Q ziWFNg{9{LI?wq6M{LW8l@t5xt;uR$?F zgE(BP5}~gXd+6(>aJm6zJ_e)HO)9w4%_yD$1Qoi%-7ALnA9%cb-O4WB3GYQi<>ni#_}!QaC*d zGb)48>2Vd@=?N510)h%(;qKKfxqCgur#u=6CW)Ux>ab^Fo;p#nkob94g1rFJ?~72L z_$4W}z6>)>gR#$7Rq#GvLorQ*IJ~YBiQf==;y0ymdJASq2BXtED!9|TD4qcXt4ch| z-se*u3IvnHA0lgZ=_eHL|AAE{o@Br9DK7+qN#fs-I_!6tcTQ9+B>t0? zV1I%1TdO`FHyu%HMD0GcW>{@0wk`oPL4&c+x+-{|OQM*dK^&G=iNwo@J@K+qIMst0 zjKSzsUj=ts9>wE-pb}TOd&S@C(!t< z_v%t?T?1x<24lBtso>qNjbefZaaczsa<41)-0Ml<)EH(o2BT9`72Iik6z~3l)#Z+N zuMOD1`+#7Qy9H85wuE`$M8!hx)~xWBHb~cPpgi|RQfzGtGed*1+jc5=x9w5P&>#*S zR3djrvFGk2g;Qsk(HM+QT~%-)iWwTjVW3K+-cRhQ2T9>H7-m8Sqtj3o+-Vq!cL2fv{%I$7ul?D;dx2n* zdn8gv9su*iiHe2Xqgml4ZIG_VLV50SQfwU$GfIQ8TT{Wioq%GL2633E61gXdJ@I=-9S<`YgVE_k72N406t4h+N?qaZ z)h)k!ox+zq69^`OPebag(_vmYQLzwsHY+@#4btveP#*YfDYl*iGdhE@&GS_7HqS>f zI)gY|s1kuM5_{l_rEt0gW+(=u(;OAt=`s}0|A7iz;qIltK8z;+ox>~G#$$nClKv{B z4!#=ZtrHas>F2V-Q`#WCUkBysua{!$4KOn_7<;}+1@HN06f-o4!>uZj{x-3vzg-Ha zJ7A__Fgo3(f;-)f;u%0t=_}m5{*j(1wb{XYfnXB+ex%NP0Oqk16$`=VvBHzuAZvX&v`8nOj5ss)Pb+UJawXCA@zJ#cuE_j=Qp4{^_x;`eG6uW24k=9sNlW6 zi(-Zbad=-PQhy-!)E`RW^byQ}3`VC~PF6NhgQELB%kGHSS*~Y7ZV3Phz zqz?WH=E)Nk3+WfI!h6~vy?+bk>A#a=>-RA8G#GpSQ3db$ClvEEh{G=`k^Wb)r~gd~ zr{7_QWiUGZse(KGh2l}bm_%L^sngbidFn*PLgYrQ@P;->!|OtMatkS(TEdLTV03D&f;(-9;tfEsy2z7kBfjOCKro5CF;b_s zgL&*k#X{s7R(M1kq~VTG9=VegTRX#y(O_(}s|wy|Hxy$uh(mXkh`g!TBlnQPX)~DV z7>rK6RB)%>C|&>r6}iHn;){N1L4c7+*`!RXXq1$WvV#p{1ymBHf` zY!7zuOdyy9-wUZT_l9}rM8!hzeOcj2ZIHJ2gYw{mq}VzbW|RhFyF*p*c88%Dr9m9_ zSBc;w#2$R46ix@gOvqq#8m)pmjY07UAXsJacn2HD4qgicli($!&NP^}PE;%e53KN} zHb~o(pgj12Qf!?JGfRW9-Gfx{b`M4|OM^I+RU-HyVh?_(6i(A%hGZ~0O;^F44oC48 zAgJK$|L33K3ol_a_?{;N!6f!fq)t2v=9Lo_3$c%8g(tN^nm!iFV;?8Q*5hG@X)rc> zq6*&ZNhpSC5QkG#BKE0bkA0dHPN&0+%3ySwt%5t9iQ+*(u!^5|atk|~9lRU}Cc)1| z>dfgJpeN!gVAZ83hwj}iU$Ef#jfx- ziun^g#;&nP_>?CD!6flxNFDY#%!?-~77{g0D|9zIbqiQn7aV}*CML7M*n%Hw}1#nz8t=4miC{fP>(=^pkeig_Bu;d7OU|ApA& ze<_92S14|W9r`28($$Q$*tnk1# zNZ-FgdGg<+*!nxnEDgqf|5U;I{R_n`y~5uPwIL;6LhQ-wNa0i$W=;m9(^4w9)6ytj z1q7A6!rxVE-@bSgTb9pxI}l7#FNf5D^KG#GZOhDV)}V8I{54)JO$)S_j3efS^)W_}dPPQvdHwY(2K| zh9H=vZ-UgpO<^8DQL&J|87sW54bppaC{N!)imfeSCTlSE+*$?ic|#PFHHgDTDv`de z*wb$;g;P72sTqt;H7dAM2NX{Pf=XZMTGBqbiFM*zUJ(S7$X$>+tt-siCn^>qZ^8<1 zYlAerDU?UKyr>P*a0ca(cadW2t}rt-7#r=cf;YN5 zikTY3VGosvyrj5y+G#DElt%5f?2E{ZD;xJAnB99k)w1gi`lZ(_%>gGU3w zB=`wPop~b6t0yWJf}hL^4{U?9eJYd(KTV3Qr^C$CU~G4`3f}ISDCTJphqF~8_&H(^ zey$Ww=fO|%ECb|9DpzZ9u6=fJ#tqGBQV<*e|+Hb~o7 zLV573q}X~j%sdUocIT?#?Ouywo(6HaUL}IxAok!lO5t=9%$y8Hr(0BTr(03H3d6BP@w?_q@pwn3V{56WZTFU8geVCHEsHakxR zZ}uS+^E8OVBPtR5QL)EH=*p*;92Qfz${W~>HdyYp4>c3(#^R)aXasS?585_|BsrEq!& zW=sa7(|ano)B7mi2Lu=Y^Df@RKID6z5d@RiA0u_*Cou1ys91>o87n-o4bt=%P#*hB zDYkwEGfsoC*##1P$(=@%5w1A>ZO=_b}b zzlr_Er#v7CCW-$*>aagy-ak>Xkhs>0I;A#9ze_-Q;yO}ntqU_ogR#%0RPa8RMlnW% zI4r9YiR+0y@p4i))rXmn!RWMt3huNbiiZI~C9ZT6Q(~V>lmAF;CHC=-Aeh8o1*wx8 z!aRebVj=!&tnkV!$;uY+Qq260$VCE_<0d;BI+I5mYC zkiqEGOa*t^0L2S|pyF4$iTxvfjkRD4ZwZ1)@>WP4+8X8|6cr1}+pxlO+aP_nh4SPZ zOR=>b%q$JYerr_lemkI;r9m7zsYLS5Vo%;h3a73xb21p6Hc`Qyx}$g_5LEI?H?f-H zO{@o>^O7K#r0$8-fxTd!K~b@gdUIBIW*emEEulR1R#I%;8fKgZW3St);Jt2#Vw?tX z=%W&;cMyB(9i?#U3o|N%(W##b?zA(CHv&PWu5=TNsr@%T{%2ylu#Fc5!6f}|NFCfC z<~bA<3+V^2!du%Qz3&O->GzUi>)tSvH5hx|R|W5RAd1Nv#9@$1q#rEy^h2a@8VWNt zgVAZY3huN&iU$KhrLS}ot4VHRBl(s$1;HfpD5Ops4f7I;iiOByS>dH^kcP)YdE}B5 zTMcHi24kZ^1#fgBipd(p;Xsv$JX!3Kr%2&+5X`&`MyIJNxKkO$D}i8jkw@8~e9J?E zU=sN-q)wX-^9+iLg~&&+!u#4F4Ic^Rk!MP=^(dH`8jOt|t%5gt42qc=#NjxVh7T4LGeBySY6~vb{gOEgdmtiJ_D)KX2U#$qGBQPS*-BDHb}$g zKzZbIrPz8N%rp(gMlVpo8@&+4G!5c#u}Va~MC_3-mBMKb%!~|1r^{7vrz=pr4hSl8 zrJGp%#ZGQwSFwX<1i>WuHAtN~7v>EV6$`_fx);UEfMAuu<4x>-cJPECm;`?isWa!n zyn&)(A^5|r@Z2^?+mAwd@W-Us`Z&xy4aRn#RKeSQ3dKAP;_!@01b3Ntb z8H`RZs^Cs9p?Dw=tTK4KiM_%OUJ?Y8;IAQd=6slUP*f}ge}fg?+XiX-EhrEEwiH|6 zftjbl*zS8Oc)RbTn5RJ;K2(X|ABjEq$5J?b0y8Ir(djc4-05=^?*xJhUg;*5{~5mU zCiW%Y^PC`<#Qqwo6Bod|fTCg{_P4C?+%`zl-$QxqAEem&Bg{Mv#%6z3!JGXB#XJq- z@S94+{$1>`|B%AzPncmDj83&0q)xR_JQfI6@$*h@Vs+TTtAbz>d`YCvTngqP6cr1> zmtlqXwn5si2j#(+lVWRqn6Vm+?XIAL2VW7zSPkN^l1c<$S?s}Ak;17V%$N*Dr`1$& zr`1us76>l>=Uu#st;zR1DhMXA*GB5ZMli3Ts91=-E-O5^4bpUDD39Glimgpy#%VA% z+e`&-b^{dSG>AhBm5AL^?6F%(;nW&tMh2r(8x`DXBNUGXf{I<~CRUT*#5U$ro)ZL< z#O;wftOn*a6cr1JJF>!4+aUdRhVsN+q}bXOW{d`7pPQ)QeRfANMuRx?P>IBwi9K;o zDV%!2%*SAK+FS*9+5*KhfuIssx``>V&!x#tY%BKhsvwxe-v+6Zw}p8WMa3k3Z`+;~ z-rNRheg`OzzoQge`@+oAU~Iad3bE-Pwlj)(8pL52m59Hq*yHaeg;Rf+0U3-=161(% zd!TqP5LEn1H?e=jud%(@!pnkSl6)Ve4&4{#SriqMB{o_vTDTZh8T z(qQa&xC-9y{wQW?5QmW}k^BI$Cm$t+(`cAE8H`S2RdA1e5f$kUIEin8#66ETlh{ z6<*#3>HT;pPk(|GTTg_Utijmx$trlyr=XauK^#s~iS(z7J^dL{IL(Hcn!)IFmJ054 zHj3v1L8Y&B6YG%N#LnegUKj+E$mb(<+66H0qNrGid=V?WyA9ItB~TvuQYp61ftjqq z*y!adc%xUKn5;n@u2PA}SBpLJHBvaug_)PZ=yaV5?sPqhHv_@yB9F2g`Ict|!6fp{ zNS$^I%%dnO79!uq3a@R0G<*k?N4`^vt#`r9)L?A%9u>UNdr{2PAP)DdMC1p=9{E8j zoaVty%3ySQSOs@_1jTECV0Dov*<*amgMwfZ`3a;>dlKeZ6cr1RpJs*Uwm}+x7Rn<( zC&kw1VWw#?Hu|Cp-snpxrfCp|S5zYMt74D*niNj+VP<47I=!KSJH3hGtw2zbE8WE6 zFLrVhdz&3RDhMXQ-$m-o_h4Q`QLzyG16Fu-8>H=zpgj1;Qf&PMW|{_LyPv7x?S778 zng((BQYC_aCHCN7OX0KtW=aO5)3++P(|0J|2?VPQ9&chlu!9E$!6f)kNS*mJ%!?>0 z7J~oE3XgAtwEa7j2meEgt$)JI(_n13)=Fu+wNcE|AP#j@B6wY~2VYVOr=?(~WH34{ zqk;!t7R7UcV3on+O>8-K@U9@31YaJhGgp9l6-C8D@CK~#`Zh@0D?@qkRixP35N4hR zW4o)V;O(xCVx9(ZSW_i}uO;^2YfItO2xd+Oqtm)7xYK$lUJV2lywXi9|1*5yO{@vu z^SB_G#9kk%6Pv-jhoWL3c5_yEd>f?cmQWtMl@wcB!_3oQY_^RG-t0yw=4lXzja4Fc zJF&-ZFNISL%&-hbr;aMPQzsNp2ZB}nypx+)7k2Q*AeaR2hSZsxz&wkhVj=jZtnm6a zNZXr1dGMZ6Z0!XzR)ewK%~kMrw?HvggE(xZ62Z3?d+=?faM~7TOa`OV_A0nj9~5r~ zf{Xuo7jI%a@;y%sf=TS1kUFs+%-bj`7Gh_t@ccGN)4M`>?A@f;+8<_|24k}WRPbi^ zKrv2(IP9eovG*2x?0uwg+81U<2BXt{D!9`i6i)|&ie2d@)*-)%4dGKB7X*{U!;m^` zILzB9Di#utV168l`5+{C7^k2eOvB>urjojeuhi4+x+_`U5AR(OFMr1@!3 z9{(^YwoZqcr@`3t5h}!{d)N#V^E8OVOqGa#l-T3XlEUd|m;o7#PRFX?@sC6CdLXFy zm2P7Hh+ktTu!VO9!6f-fNF91I%p)l(Cdqr+sjTo6H%Q;7LwWKuq}VzeW|jtHzh|l7 z{hp0tmIiS+S0$34C-&s$OW||@%$y8Hr;Ai@r;Aa%9|$UWrJGpC;!W&QKIfG|FiCwG zQU_iR^F)e@h16HF!V}yeJzovwsjrb@>s**|8jQVOr-Ju-J&JJ}#NkGjNPUyoQ{OCw z(=9NgG8mn1Q^B2XNAZ3jsMM8iVllP<#>bo3oowT^K`=>wH&O@R1M^ghiiPy|vBEpt zAiX~T<>?=kV(UDZ$r_A3KdgfH{0NH48pPo-l}P`%*wa5Dh0~KTQ!^Nyo>sw~o< z1#k3C6q7ZG!`mtm`5m!Gepd>o_h9B_Fgkspf;)YP;srsly2zvKW4`5)K`@E@DN?6> z2J=LUiiOBuu)^EhAPs*7<&nRZV(S8!nHr3Zeyf5v`W=dy8pPoTm5BVK*dzZWh11V4 zlQI~cepSJpenatgAXr`GN%jZd^1L9JME(n@(`v1((`tjo$V;%oeI zDVS**jEyd%f;YM>ifJ0eVL6qETwm;wmzTn61(+Ebj7|+yaHo|}yc`HBa;2MC{KZag zVym!&CkDYJ_^L>qxf;y-C@L0y)@!iB8{8mmuLb47*Op>yBbaF#jP0(gg15UKifJ0e zp@~WaZz}fS>r3I(3}#9Oqf>Jg+^GeMR|CN+gU6d#D|Ya_AeaQ-5UDfUz`T#5Vj*~2 zR(OgVr0sT49=yF2TWetEX)w0iQ3Y?e6N-5n#G#8y1n(;L;N7Hf+5~1w2BXuaD!5Y* z6psgjRR)hYv7YQGTQ4vP-W#biH-~v6Ma4q!Em`3$ZjiROhVtOsNU?QWn0XqE?QXAv zx7!EBJPqQoqe=wtEB4?!N#WEFW=;m9Q>KDD?SkSBK~TXf-Nf=g!x!GfcH?`V8U&Nr zyCZet0GQWNR4l~alNFxg25EY4D38656kGR&nWw?n?0zbEvx88~(;yB*R3i3JvBw@J zh0}1DVHu20BUEswktiM#1grRYCpWQC?BK;gFbO^esWZpIJd&bfA^3P!c#9jPZG-aQ z6QtN0V8&`NwmV4$Z}&hHV>O7w6qN{mkl2GCEQQllm@yfQPKT)APKTm+Nf2E8&%1aN zJB;sna1cylACAYrJGpC{3dn=pYqfom?S2jF)7>rI= zs^Csnp?E?NRN_iEF(vl7G`Wdg!#-Xd1e5sJB6ae0Fb}4vScrcEE4;@I()>+O9{*-3 zw%!6WPlK`P+f?wTZ$~jtgE-u&67la6d;GhlaJmO(KnA1JeJZ%q{V3iN1Qoy1P3#}> zYwSU`@aiC#B!390Lm!5DGDXEg@<&or*WH*Z7<_2f-xu>qs5=2F!yg zDi%_|#R?B{gY^6kl&5}IimmU#jMHH3^#c{W*AG#Q(;yBXt3>Kg#Gd+7DV#oo8I{54 z^o0uU^d*Yd1VN>)bQ6oI{Wm_|#J*-5Zx4b=`frdr_*O{c+bC~n5;n@{!oeZe~LZ*Us5>LT18H^!RWLE|1@{1gW_pHQ0Xh( z#5yH6u_gJI*9XBQ^3q71whYXhDJm8s*JFh@xj`DP59N`UmtyM*Fq1VH8*QM1M_viV zWDVl5ib_OoDE7#!O5wB`%)AUnr!`b?r!`T$CkR#-d6ccqw>&urCXv@c>a=xX9!ycO z5V7%>@Zcbr z1m6y+Gq;C%Ek(sb@Eus;MQ)I``$Bo}out^>4`!MMW4oCO-tI0arfCp|-BcoYf3XMO zT?(fGFjF!Zo%U40o%TZUh9FpF@OTs3haEgN2qwV?B6a3|Ft4SkSO`9t6&~gWX?rM? z2OlQI*5NSoG#J|*p@O$N62&|X;xI}jf{zw^@G(+2jfI(#!RRzz1$Qc;cuEkgGI+d+ zO<)Ia4uVPWiAbF}3FgHV6$`;9v%<^VAZ;H6<-re@V(V0xc^ZuE9-@M`dnk%|8pPo+ zl?Xmv?7F3)DiM3O*khk5h0|Fu!!j71 z&QZag&PDO8AXvrEJGqIS&ko)n1e4$wB6a3PFi)naSO|UzE4<7N()JuE4}O^xTQ7$h ztHIdrl`4348^m8pki0LiFL|vVo&fX4-bM#;-`>0>}i;HQ&cP@ewGy; zf(f)ra{gc+m3*yqbCc%QGJ7^6WPUQ>z0^TnR{bt#)asCe*)$4Kb2zZXE5_L z7@Ph=1#kLG6!SEQ!`CVie}UNJeKe2^3 z2*D)zFGwBwE6f8bDi)Ie&I-?TgY^9;lqdg7imkO8YHMvU_PYfCH1D?#idh=OVM&!p zzLeOLFD-@BGB9&87@g{=;K`Rm@wOnSor^cI<@uZ!2*D)vibx&U0Ot7=6$`0X zW`*auL3(Zo<*8SdV(V%!<1`q1T|))$bxjoGG>F66Dv`R8*i)|~h10q)qcRws8mr(= zO;Efo2r6}@n^;Wkzwz-Vwm#cO_Dv`cM?CCp5;nWdkY6hcIXBFJ33yOyZL8Y&B6YHGZ#Jce< zZxMn?HtZ^jBQbb~b93(6z+mSXGXFq1VH8{JX`Z*(gZlQoFLHYyQ$ zTd_ypP70^(VdiBpI_;o>JMD<#bwRMY$fIm0zU2WzFp0b~Qm17w&!?zZh`cK+yvq&J zaDOO|yt@=z2f)nKU~F_x6}-{CP|VaI4*RG?;CWu@LzHR(O~jq~Xy}9(jxuTgSpo(_n0Lyb9iE3B@!G z;xIuaA_uWYo+yRWB$yc)j82nPaHlCKUKIotxzbH6{$eLLv4h#c^Mha#yo}VDhrqm@ zqGBQVG*)<@8>H>&P#*knDYhN~GfjiB-6K`-c4wlPra>HLsYLLj#UA_^DV&alnUcZi zbi4}gbOMSO1;HwV$D7zm?BMA^FbRGNQfHnD^LC1gh2W>N!ZY0fq z8jS6pqk^}4E{b^?#Nm9E2!4UsgI_3x(?u{-G8mmMQNf)qMe(p8SY_~d6T6HZyg&#h z!LLB-%qwBuPf@WD{AyNsryHd0xlkVbS}C?(2QyEDvE3U~@OE!RF;9ay+^iD8ZxMU& zTcvQi4Q5UTqthKKxYL~|-WLQFywXi9|1*5yP3&&I=NUpUiG43pC*BA1a*B$D*blJ6 zGuIP|g0hGu7M(Y1n_7_mLRaduwi@ODPcXvOydk7NT^`IN50Kwfo zSO^v%IE3I90t9#W0KtLhuF;$skPZpZiU0!R9xypZwGP zd&5l7Njxl>hYg2qQAx3rcmyX_bVL1)gqp-7N4Yu*Hb%qJ=V&R^K1ZjF(U8NKDM>t5 zxF#MuiqkmQd<=`zxG7Yp@hIyBVG{q`Z(>PY=hD`1ViUM$8zF3c;1iK~^2FFOl@vSi zCv{?1H`M&(sEI#Cl&e!>^E50?Pn|+-dK$_+4LMAglK9hyYy26aIL(L+$gnugoI-V) zg|cT5CjQU;CiefupSL%gD>f3sR`NN>JakTMol1(WBZRHg%aVEEa@aDJ6kDm6?5*I$vTms7l~9v<uL73D(_nTNrUH=zfeiK{YHG2tREByv!9=sv8 zR3*hu`i-5~*bVi*DQePh7Uk;Z*klb$&s(NYd)|sNSwjxnq$K^e;hKKCC{EjBQ!^}1 zJEl;b22<7!!leJX-^3Pb{U)}ve`_-#Y(?Ic%+q$m_Nk=UiQIEyUpLh7o~VhuSCp%L z*klb$qkE@N8{LO8SwjxLO-bba!Zq^#QJfCI=4Du%4oaaq9ZcCZ2>Ta#aPLt6);dDi zihMYkryYSUQ%SKC`6ws0bVCgvgPO?4M!9+%HdDjW=m{y*Mo*;7)R4o;DT#baxJEuT ziqmP>qzsGG=_ypFGbmdIVgDlc_0IBdEh2=i$iFA^v~#d^Dk*j%pXYuvDm5VnHwr@mD@SCDsy&0RPVQKf)6l%M-QKo6g z;f|C9zcXBe|22x!UD%Wii__gHRHu6=`vqY?gO}gL{_ciFgs>I-elpK|0NbRJVkh`R zPAu()+I|Ez!5@us^)YOohNaynQmE}dNtvf1ho@5#{F!hK{%jPd=ddXm7N-|ds7^0Z z)(paa1~0#fz3hg4gs>I-RWi?f4O^y?Vkh|PPHgRl+I|x?!T%ZM>RZ@64NJT4q)^*^ zmoiU74*yC?@b|+t_yJ!}-G z;jm#D7N-$Xs7@nNRt~~`eBQ0!#71_*ZbH}!J}Q}Kj)v`0NwE`r3@5gBLv4?Rn&4wc zxjGIuR>RWnxGB_j$D@qZki!Hi2|i)C2A?R3)5O@A42#pGDO9J)C_4w?kU#J4Z(>vU z_f`|aR_v+BJaKAlpGu0I*wZ?(wi{}Cdep?8AJHaiPtoQ51`OG)h6 z!!`CCQJm()W@K2L=1!qH%|lr^2ow9~eiK`$`Auv-|I|`K*h;(rnTIWity4*{lXzh# z7Is7ZE{dAOi$%G*I5tMZ(&rK>)IOJ_jM0$8(kV&2Ot>aqHj2}7*nA9&(+Vk6rxhv7 z24NEa+;3t@T<6l(Z(=LEXEz~i#b1@olUKu*s-)P7zlIZ=yP@XSLQVW%M!C8+Hc!LS z^g1cjrq`v+(~!gZDT)8&jn^3k6!o>f%-^BjE`1AHQb7lYD z=CGA~3o;Mg65FbhVk`O5y{(;C;SKe@Eozc)7v<{q*enf8zdNQ-`W@68Oqr!2hn-WB ze3x)dzH1bx-LN?s7N=ecHTfQty@N2xf9^N2g}c9r?d6}_P6%76e?#Vhdt)nAQf#GO zvbV1jtGl6|_d`wU{i9qx02`-a>Ghx#YU+b2<22-OXi8Ea7OtrekK%L$HY&s7bW{q} z>1fK{L73D(_nTNrUH=zfeiJ*^HCqZ{EB*0g9()3}StZ3z`jed4-wpMC3To1y8s+L~ z*klb$&!?wQKdCb)lQraUR!Y*J9j@trAI0e$Y-)zZ>D&~m(|MF7gfQuU?l-Z8Tfd22 z;NRL)2wRc=Nakr5VLMe)>_ooAiS6A`!{ab4ZVJq_EWS;f}wooO-PUNSYSlSIW{0wR$KO5!hbJ#QuOQSEOP#b-bGEGAc zFQ+8(E8!aX)hJG{VKXu;POqm>o!+1<9E6GdbH9m||6#X&6Z@wdHWR{D@VCi4^BruJ zN{XG}?>Vu%8*2M~)CB(^%GD3CX&RPxKTe^x`)|rL4LN+8lHi|(Yw*vbIDLUl$*?$m zl|ps;nzCyU_A_|-P3&7YEG2}k;Qt}>%a5ICi0J)72Z*<4u{Rt zu(Ufu3box4Df2YsFmg(Qj}orIM~&h%8a5@v;xtAI)oD!1#zEN6;N>^5vE8tm5VnH< zg3L3=#g?h0*a<$q6Fa=2wkJeQ@QI>aofw;^VQF{L6l%MZQRZpLVTzOlpE6v7PZh;! zYHUu1#cA3Us?&6oy@N2pf9^N2<{kdu-^6C{@2w|dE(627L^n`v1fH+g*Vjn z?5K%7N0h5`V)HaC&CZ=dZFU~YJPkR_my+1?himKwqBt#x4a=}NEu2DiT7ZT9!dCD>WS+SMwo4_&PVl9i*x?Pey$ouCFB|3Ra@bf6OS>ziP}^ORGFC$l zE2kv*D&ZP@)hJG@VPi5ZPHUu4oz|r69)v^wyt}`N{nEd;q7b%X|BB2L*TMFwq}YkQ zo)b&Fp{9S0n%Emexw;`XPQ%jd#wpZhH=&Hvki%vviM@Ha#@-@|)0Wtb42#p&DO9Iz zD4PdiV*lK4VhcCFiEZbf+D!;siFY9LupO~=Dk-)SFWuY8iS^x3zq_C&@vc#>?uL!g zu=LqWq4c?AZx6~C4LR(UlEi)Cn)o+Soc6}%V_2N_O`$jq>iw3oau6o*&;2Hr#C0xh z{U)}*do~opR{R6WJozAOsY;5S_=h;L#~W(?Fx139Jj&H0uz4DmrjJUYHhnZ@o`xKb zO-cOY!ZrT!QJhY|24q;APD-JE;3rd755mO%x!=V8zxea^PIbkSLfA_FJ2DSF9owps zVkh~TPAv3>`aT;q$$uZ^>N(gf4NJf0rcjfgNA1}=A9A=LCCM)g*W`bU;&c%$R)hDL^S}F@=PbXF zT;`57g|HR<3NlZ<65FeiVk`QRy{nzr<_$G|Eo!3wIm*@Ru$dZ`hObYdHhcqRriL7D zN=fva!!`OXQJikY=4Du%Zcm{)-9gzu2ort!p+w)Ux9g6Z?Yz@&y}!D!fA22X%KbMo zkGvaOtde3U_q|Rm^M<;<4>h^(k8}owOQAabi?V(YCU@raA^lIi zS!?})8}<~!R`8F=Jo960vr3Ac;Ga0L$s20>Gt>nCJj&HCu<06>cE3uYeoS9erfbOI z+mrXAIZiGevQl8Jn76ahf%S>NFc=_aIF0A-~oCyDRd4Z}mCcv8E8VqR&O< zsdHnyRZ{FkpVx`)-caN7qbB+SQLZkCP1vwByl@J&;YBDDHsr8aN}?|wuF(fYaasbK zn_+QUDuwE_G-U@N9QxaBo;mAVeOVXmDuk`v%aeKJ3fOj)6g#nR)b%Q;$-Qcn ztE*uHHZ0w)kwWcuP0D}`Is7suxz`TY+`o$Av<@~n!{W4F3e{TN@rupx)-Qj&Z7 zaLv6#6sH}r@fjAUol>YyJ5#n1!lD0+H|y1Q@9pY_wS}-1e0MU>>|xtgQtSlZ(}~^Q zP}_Z|3I3ZXSNFySZCKjfH--8!{gyIlLk|0=B=`a08vMX0P6uIwGb~Prq)?p>rEDXF z0|`F3ceoqY7Q$BWBgs7TC~U_{ik;xcII-OuYWq0U1V28?)f2E08;ymG ziS6D{+ZUoH_#dNOy$GAIVQKf06l%MFqDug=_S`MRB?t8=PTrx;KUD^moceLYU}7p4I1V-s<}m9hYQ!>!$)x%0h^m)aT+Ow>NGNCF(DjC@WH)N-LSzBwt|mN=9yz)yH--{1Ru+Z z72i}X)|ii-sZ61x${=v z!Uc;9VJr7mWFEOSwrwTFPVQ};*!K-}y*+Aj?-1qcj@X0^OSe0vP`llkGGRjwyQU=f zZsD4H_b5(1Y;K0dY0ng@(_WOVgmCCTq2cRbSfl;m=giYA6w0lSjwcSH06E@^=~&8c zLO77%gL}ujVTmDZ1wWC@Gf%>Ht)$oqeu@+OzM-~HLrw7CMY(!9Hetik?wKjncF&?r z*pS2TQxg1~a1H*4C{E{Mb2BVX=ciDeE}(2EgaZlQ*ZZRzRvE%p@QcYj^Ac?1N{XG} zmpZZU8*2M<)C9jG%GE2e2^*GnuTG)1dktm6h8+H!lHk{cYw*8Bak?IxnqhIeF@@@M z6J)c9SfiT<}JSMSCq zY*-q;H-*yhpx)mp6E@^CF_X(?2OI3Sqx<=dJ#>3sxDzR_=GnJn}tk`$~$P-0wTF^Bd~= zL)7H{D9Y83u?ZWNZa+z(cKa!1!iF3^Pf6}C!Zr7oQJlWQ=4M!&zDc1veM{L<2#5YN z-ux$Icklhj4a*E+EBFs&p7|rTeI>V$=kmB+AuEu?ZWNb|+7vwmSu7!iF5CN=fjk!!`IcQJkj5=4M!&rca?d z%|O{w2nQ0puQ!t$))~T9@L9+_b5?BsN{XG}vpcc#8)|z_)C8X^%GJ5C2^*Gn=S`uu zJ0E4jh8z}1N$>^3HTXhNoEFBWW>}mSO`$q1Mp;$}6MV>T_5c1?Cja+VALNd`hOiZV zNit7e3fsVvVki1CPAvY08ea}I(U*^Mbp>p~hNa<^QYZ}%>a9$fupx(4Qxbi(aE-ot z6sI+?!5J2(wNj`~zoaZGgo!@nxB3FjTm4rqSZD}axz{E0$n~%lEGc$!|JsQ^O+#I8 zh??9RMY*~$Hetik?WQTz4{9^Ygbg`tk&@h7hHLJvqBw1h4bHGQZJR=M+K#fO5cWHF z-s(HJV4WdsP4K@&xq3Y|VZ+kyjVaW2Z=y`tki#u034Uw12EQ$e)9u*Q42#p9 zDO9JwQhWC9f(bt4xB7qoi&g)7tN+a%s|{f*`aNWxdM~ztCB;tk`<&PU4mJJ&YN9_F zkEBo<9@Kl3GGRjwkEbO16X6>D$tX@wVS_U)PS2!Jot~xaErf|a7wk8Lt=un?dE`sj8kQ70xnFVOPt#D>uc0RQKcZZH9hGsVO>Id~t%7hI$ zyq%KV?}TgaccVDHhYilKIK7`jb^3s^wh;C^ci!qBxnR8^Y~}tpnMZzt?P5u>llwC# zwtz!je}S6ZUq-q56*ghR((N}X)KBSK%7hI${3j*3zYo{kKSXi*5u2M~aT;c_w$reb zt%Y#tKjY1R+Gh9O@NQUd2wTBNB=gLXu#GG!c7l)M#1?Rdw!0W*!iF3Mr6l+g;Tn9&C{9aZQ!^}1%cM}9mZj`1gb6<6xB7qo>lgoft1s`4 z6^F1DeMK@)T?yOCl42+NDo(5dhZZ zh8zw|N$!KfHTS_$oDRX}W>}mKOQAX)PFYg7;Mwzf7hbvMN{K{|*epM8wtFgHm7N=`ds7`;TEH8uu3EtQHiyO8a!dCDb$UO5# zY%NQQo!~b+u@M|<`&QHhzb(qu+p!57mUizU2M4Zy`+ZA-~oC`%T*az11Id$BIMPivBQ}r#^!1Wl6CU{V^vtgF}r!ftu(~ zM!EVFHetik@G~ith6nYYrA*k6!}BSL{zAA$e=&;FOW5EHi_=3qce@*6*-(WjgQtagZ z&WXL?P}kq1Cif3fuKtKk*sydv%;ZhCgL=bKCTz%I_>|-xAzX8h7{zHMY;K0dX_OS| z=RGQAlOY`X&v^5dyWM-EyJ6)aYy}^a%rnQrHnXJI2|kV!d%>Z$$3;!>@uFNEADggY zX?MaD>c=z@Wx|FWCP_)~Ny9bxWKo;B>3RoG;UaV2wTCY zBlFDZvHdJ5c7o67#Aa})?U_*%e3mFzXT>ILSlXREh1%{MlnEPhm@6g0=MLB4^F(o) z7n_@5ahgAc>a+l5lOY^P@V?$cZdiE;TfrA0^UOuDEiEZ_f-mmGW^kzOB~TN5$tYKs z!X|83+Fd4v+U~NH2^(@)J|)3d2-n~%MsZpRo0?&9S|x?*v?^tjAx!WgzttD{zyB)4 z|K94WyJP7gY(-y_%v0CG_OztfiN3ZI+rgp6*FjD6b)#He51X)IY53PEl!gcOHlR${ zki$kPiN0~TM&Bfg)27(q42#p|DO9H|DBBESq7V74zDV;{-^vAB4`D0!He?>TEw-j5 z#ZK<+o%qu<)b)<2$vrsA)t#^j8$YJ-C6U)J&t`9;@?t`OTJp`MuVd?g; z6zZpRIAy|y9F9y$?xVsr_t8pzBZq8rv8!dCE; z$vpEEY)?yyo#3ZAu^Sv}`*hRL2eqI!( z^Rc-Z7N-kSs7`;REH#7!2|l=Yu^YA@!dCDIzu>+;C;Q@+_3i$ zwu0Y5=9zb5n_5!r1i#CP&EQbmccUiwJyEXSi%r2b<7Lzv(*5Ah3k^K`#l`lL%XAHr7lr^!6_8Ej`uikih-NWPdTr)t9gl80LK0KZLE||046u_pxm)DRzQ?=)@nVp|(FpP4It5x%vqT}AB4LN+7lHgy3Yw)k5IDLbS&agOrmqK;=4`rPp?0@jQ*ne=t-b2_5{u7yJ4l_lb zIV|i3AI^z&;ZWNnpeFc;QLc`J4cV}?J4y<*-BBq+HsmmRN`j9OuEEEQ;xraEI>X{L zP72lO7nG%jFv0U3@}K{DsZYH5e(89wSbqpx$tNK5&+)FhuL%GG(XF&mbC=TD*by8va(h8z}3N%Dom zHTfb@oEF7~XIPvTPoX*uqO3TC14`c4ThbNF5Me9%(qtaG47Rf+#ZL0&oY)u+^}PaW zlCK!$>Ppz04NJePq)_`^l`>~T4y&gm`5NJxe9b6MYhkl9EKX~uP@R57*>MPyd?@es z?pI9Lb;&+N*vh^>naBPbTi%jlC;Ns@tPO`c-xxL7H;Hm}Q*6+NrQ^+0C>;;#Z9y5d zA&0F}l6~uN&Av?(r){zM85XDQQ>acmP}Us6WFPVu$>(W($8@k8_94Pn@SVv#a~Eud zONyP~yE*YEYN+iVYJ%?(Nk{08*bDK-u{;98Ln7~2wTa| zBJY(W*h+p4nTKABEpbV)ll(d-c85cKUyqvPH$=I5 zBQ|Wq((lbF)P8TF4BL>yZ7E59d$=aQBZ||V*a!`a(_JZ4r@v8_9l`-6@9W*;inWNa zmHh8y9(o_P$0fy1@&}yQ9S-&V5NeV?9OddG*t88xzmKI*`+b};Z9@)ErX=}O;hOyE zC{E8{12im7&!teEo~JB5gh@V>cYF7XrZ2i=K_YBrf0@i<*#F>pw~y$C&4{oSd}K1u90l9y zl42+LXilsUhuR(kHNnS>a&;_h+J>dwaZ;$ke?ghHA&2o&5`6q{4L(5>rwOqU8WyLC zQ>adpP<9@|e*B!bziT>~E4CxTR`MyxJakHIolA5&qE}lZ|cMxUTh8&hmN%Ez_HTlv}oR+~x zXjq(OzTik;-EIk8b3>U#~;BwsVi)wQr` z8dw15&6T)q#|08*(@}CBY8~*WianaXJi}pr?asU8WyK>Qm9UUpsYcJ{rEX=f8+E#SN89n4_nDEAoI`*u^lfd zc9LJ@#9ncz?@LgV{7+G?UW!fIu=IO*3bo%WDAP9Na8*i@UmdQ=uZiMxEjB{K;&fdK z)#)#kJ&15X$p`mtaK*+%*h+pAnTOttEqO_?ll)dER*OS@-;SE(cSN~*CpK-v((hd< z)PDa)nYJN^ds348-f&I+_b5*HVIwpwP7kC|ogSpDLWBcK-q(BB6-yIgEBT{j9{L!z zV@+X{FF%I?p6l#(`9p&mX*t88xzt5#m`+c4=Z9@((rX=}G;hOyAC{C|n12im7 zucc6({z2{8dmSeEP~Pp`FP*;OlD&zrmHnS&9{U!y=q1HY_II4vFAjD79%{1xE6UaP zv1uEYjz3JHbUdi{5oOwj9R8h>?4N{d_D`cYeTL1@usD5@LUsC*vJep_`%vEPt#6%v z?S{>Xuoe7UGSB=DTlJD+C;0bH{KXn-`$yCS|0&AVVf+W~P1~@vJDmUXf9)>T8=f+4 zLk=URB=|_-8hqp^PNQHmG%QY|rBFZg(J6ZoVgG~Y-9Dxp7AV42@Uh7}a~y2dONyP~ z<2tcn9BO-f)C8X(%GC+6X&aVyCr+WZI|*glh8!kKN$|uE|%9;5&q zZkR&tcO%NQ4LNL*lH{9)Yx2#aIBkv%(6BgdnL>5iin0+ACizg_?cJ}QZsU^eiLjM@ zJ2H>m9$Wd6Vki5KPAnaVI^PL3*>{d|br)>fhNa`(QYak{>g`UMwjqZ-Qj&eoaLv9~ z6sJCHhK9vy?-Z)jK9tpnFxiLlZf||}^tW!S_b2nr1F)?xDRzP%>}3DgiS^@9=bxY^`=?Q^euhokuyp)I z3Z>&gy)P-#HstViO0s_wuGznh;`ALhL&M_qeG1j-2g>Do))A zJ}jAM4u>s*NwE`r1SkGt4YfTIYJ!g(pw@=`Py^65)iBCl4nG<7+U{dS^pVW!{<51g^qbB$i zQLavjP1~@vJ9P@R-DxP(Hsml}N`g-xuEA%B;xr>RLc`)Ta|+dI7Rtgz*pHv{_IFTc zbH!>!*h)SJnTO7a?Se_MlYDL`_K!n-&x@Mm^F_HjKQ?W{((i&P)P5JDOxuvdA}L9} zXt*X{EQ-_O*a!`a(-J9ErzI(i6XAf85AH4PiWQ5nm3&z;4_yvh29shZ`3g?#Acy*1 z2{p-Aj&gMsY}$sU-_=s6{jN@#wjqZ#Q<8kGa83TpC{Al*BQz{d>!eVf)}`!Bgab<6 z*IVBe+ZACe`37Vjx*@g?CdE$jjh)y(4)wh$YLagjA@CntZz`PTOMxG%QX#rcj**Q+6i8B%gVhA^yJ#>wXP&XP4|(gstqml6mZI*k+g% zJK1|qEFy3HuHO2>nG`%tEB$lX5T-G(*f8F4U5x3 zDO9I}DVq~vvd=KgP_ozmH;{+AVZ|bB1wWk3GmpS_!lc*ZZ^ou=So*y+h1&0JlxZ7sxFaRW?+n-Ee~sdF7dAq};&gWk)#)C}21Pia* zOt>b0Hj2}8*a!`a(+eq7rxz)U6ybo9_w`wX#aQhH*kYIzJK4W-VmUd~`8TM^{%w@2-(k}>EFFKJLg{!= z?+41X4LSUjlI+7wgKPF-qc{zR&CswojgUfh8j-S05hnW#!we;ReYcP7hOLXR6?{}O z&m0X~43lCf_!v(7#TsgREYt)aJId8@uxT5XcE?SjepKU8rftY!f|LZGFkFLA6vb&` zY=(x#Y0?y`(`1xoim?B|^KPHQ4OJ_oAptzF3s2i(}I^Ed4H#LhW}+%CrqRES-|%%Y%`ho;lCR;!hH|LywNR7% zmr<^+jZNFI^t(<9wcm9q(>CO=eoB)6I$V=)5XEUjY=nlzY2y^C(%;cgGZJzk?~$Hsr8#N|Nsq zuE}?e;0w_j1V^M%c>!8#0gG8(R;PVki5)PV6a% zI^Pd9+4qlf^#E+zhNa_!QYak{>K#m(wjqZ@QI}-X4LO{ZlHg~DYw+JkaXJT^pCOAO-h1a8?M3s9L4E6Y=nlz z>G~9^(+!lRim)F)=k0H$-sFn)i?Ega7BUaL726JzVki0SPV6g(`o0r2$^RPV>Rs5h z4NJdwr%*qrdnnU30Qliwf3=>cqnhQ;Zj6spt1l(mX*K*IBJ zYuE@4i__~VRHrv6ixuI3lK1ug>53JMu$BC6G7o(R+YpmtC;59$>@A1-ejhc-KZtVm zLu}fHrQeTJsQvz%GHpW+pQa@FXW^Rs^C(VVU;{KPPG6-^oxY}QR)k4Dyj;uu$BEkWFGrHwj?ISPWB(2SX~ZvKFqZKakIKS%GKepX&aV~M@XS`Jg7G!W!i=u zMovlgQNlI*s8O6o!)9n$oW@9@I*mzLt_YKT$nSR9>)bxJ8@4dQR`6etdFHs-l9&`b z!N+&vFV;}o6QU;gL{Y9zj7{6Hv^!}E^`n}MGHpW+Q=}yLl;IkDswhrVV>2`?PSd7P zou;EKSA_i!o_G5UZrH*ITft`{^URsCB{3;>g3s#2@^Yx{*-;aGjwn~>#HMXn+MPRv z+U`7*X&Z8wFD1d}57*!eL~&XW8=+xwS~!L3v5&qu8>0QcSXvy4LPiwlH{v|Yw}g2IIV__ z(6BhIkwSG^ld@b94k-EH-Y;FTh7q=s|BB2**TJ^Lq}WNmo)Zhqp}v2On&cZqxw;`X zZNt*<#wpZ(H=#`1ki%vvNxpfwCf_28)0Wr>4U5y(DO9IzDC-sBfRgw1wsXZIM%YTe z1DS{Ji0z3x*Ila!_sdrh1%~PlxZ7s*efN;`@%K(Z=yKu zjSbMSIPIH4b^0x3zamWXp}gC>-%H)!CA%15EBk?D9(xeBDkjBF_CuW5Vh(kF7;3T~ z9_8v0*t88x$48}5Iv&(Jnlf!e4#%b>`*Go#{rD(OCtx!)EKVn-P@PVutXPD}K9qNR z>wBrEx?vY1Yz6-vnP;Akt%^yp6Z}jk{$dTaeKu->|31pqbFgU}mUhogp?*~7QKoIk z;ewO|zc5^b|1pZwMc51ti_;}3RHr{tRxHB)2hY3xGB@mEgstFLka^~n*s7QmJHfAZ zVud->_O+-9{^uxHufwKoSlYclh59+&K$*57hnrFo{N`{CeoGXmTd@%u7N^@&s7`lK z7A(Sk{G7MHm-<&%tYU<%Lk|B;N%FVCHTm07oZi7kXjq)y zOQAabi?U=94k&qF?*mt?V}z~bACYTyA@Af&| zu#pkAg3m?fnR835A3YQJkzrftaKmnliUcDN@0RTQUnun`&-r}a{(PU}+^Ey4jM zAKcr(6)PEGEBQub9=b8MF($=M@=cvsW)AheIckz`5#{QZ*t88xzgwqJ``v~zZ9@** zr6l?G;hKDhC{8QcO!lG7?XB;n{@x9H8DT57`yV{-_CLE}FC%OP{|lLCUXQJfNwE|BMkm&pLv7!Tn&7uYxq2%$ zZNt*;?J3lD@1RWEki%b768x@k4gR+%PIqG?G%QZ{rcj;!PFc1H`|)$${$A?+u2{ELQV3w zqg;Ilo3>%;_q`NqzyG34+mOQtDM|iexF-K7iqpr~2n~zVCn;2?Pbmu*;ee9&^*(pS zYDU;f{w0}*euZt0NwJgs8z**}Lw$dTn&kh9a`k&`+J>dyA5*CP{zRF!r~f~PVNsF~ z7p}>NkK!}}HbBGTG*SxHX=KXAMVRD6dAE1JmpZCTwll(3_R+~ab_{HJOp2ZCV>z+b z9LhcpYO?<#%GGhPX&aV~$4{YjJg7GTW!i=uCQ3>6iNiJfBvG6u#b#(&oF-49I!!@Y zx(JhfDDU>x_fn^F!*)j43O)^)XHJVPk4do;e0nGTVhy!DBWi-r6y@s7*t88xyR)WH zKdRX%(>CNVM@oXv8Lq+SisCdkHbcYWG;a#kX+Fx*McDt~dABd%hV6{76?`Ew&s-Q= z9+P4x_@YiMHHX?>95ulQMY*~JHf_Vw?ouh#c9*71+mOSuDG9z@xCUQ7iqi_%2n~zV zN-0#Ql_@J1VLyJ(+uuuF)fLMbVJrFSWFEQ(wmT-pPV%*!SZfaTy*6r+|0>GWb+Bn0 zmVVbuq4v8zW!i=uHb_bG4Z}70Mp2wL#ztsZoHk9NI&DT-x(El9d~k0ISFC4*t>jyg zdFa;I_Lvkq$+vZ4u{qTD_NYm|LzJsKV$(J({qB@P?RRI&v<*4znv&$Zg=_NNqd4`j z5gHb!JyWPodr{Ud!T}}k>;1+R3mRc7`95SGx-YgrCdE$j{hZir4)uKiYLXuq55Hn*7KpPDfz_G%QZXq)?rXrR-gVNj{W!d-r>($Gc=l zBWz_qk<4RH!dA$n*vWp16U)t^&QC*4_TNRhdO9|3!_x7YDb$Y7qDwBqxbi%ukoU!pi&kIm4qINg{+b-IbNdJ*lYz4oK%rkGtR>-8-34W&&tIeUd??O%RzeTxvH#Tj<((b(}lydyPg1D; zeoC3PA&1XXlKhKsP5xyRr?0RP8WyK-Qm9VfQkE~m0VVJ2{l^t+8euE>4`d$tBeq2* z#ZK~JyhiLdhx#54HOYsMa&-i3+J>dyky5Drj!c=hA%{^@l6(?*vUSj6D!W4&L>7q_DP~#ofMn4 zVd;4C6iUZ~dQ(uQZOCD&lw_YeT(eIT#c5h>hK9vy`V^|u43q_oFxk-%{%hXzw!W7- zlYeScBWwkqh0HT&#TLn=*a<$n6MwOW+MW|N!RLx{b#83hhNazkQ>Y)+e3WS$a#$cG z!50kI;0r}@S{R$5VR2eCh3d2zWdS4XfAGB92f1NWBWwj+c6VQF`z6zb=+GG*F^99B(9@YTXK`07!d*1$$+Se({Mp*sDNvVIZv zCO=MM{!y8Lr8})$W!i=u{+N>N7lmu~i=#MQg3Zvd zI9-}Tb-IkQh7l$^8q&MHwIB5g|J1%l*b06XnP*;&t&>Ty6Z~2y{$dTaeI06o|0T-R z>#=DYmUeGUp|*PyW!i=uZb?b-Tf;T@ZBd+V$7X0)obF7aI{lThh7tBZc;4-QbHlzy z*b06RnP=XMt&>Ty6Z}3W)|^9aKY*Ix4@SBA5H@YY((WTE)OH`GOxuvd<0%RLM7Rcj zGK$kv*a!`a(=#blr)Mck7-2tt&fDKhecl!88euE>i)0@961Gex#ZK~9oLF=Y_5B)Z zlK&&h)z`6U8?HrpiB;!N-(R36`Ik|yeuYiju=M*)3bo&FDbqIO@Sl_< z|2|xk{}9FLM{I^g_~ z9t}0gM~`xK3~btlrQfkqsL98sOxuvdFH(|x+;B}kUKFSCu>l$urwLQ2P7_f!F~TGt z%DcV$z0^ruvb7PmvQI|lv6Ev9Wm4>9pVEnS=TPTUqbB<_QLavlP1~?^Jbena;~6N^ zHsml?GgZiFM~t-&>+4`BqV`ZjDXbu=Kla3N`t5lxZ7s*dZm!cMR9$gQGa@ zgpJU!IPH=`b=sA(j1dkf`QYB}u2|a$Tgmqz^Uyu9Z89l#lJ_~W@Eq!UZ`35;C(6}* zv1uEYe)mhE^gF1xKV{m691cuL@`J)P`N2_~4#7rfSey<^p*kH-S;q(ml)SHZq$?IT z!dCL5$vpHJY@JMso#e+kvGE-0`vlY^KQYSHldx$UmVQr3p?*@QQl@Rl;dd!XetNhj zKO>6Mnb-ggi__UDRHxrl_A$aFAIiJE`@PgZxMX)DY-K->%wx~T7Rsd9$$p^|OV6Rs zFG5ZBi=$k<1e>;D>G;wVYR8vRrftaKij-u(GF-D?6~*alY=(x#>Dm;k)1N6T8DX*y z<=x);Ug}@mu)7hqg5N;qnKxqlWK!$|zuAeuSVL{!ikjfJMY(!AHf_Vw?wu*rcK=G5 zwjqbVr6l;>;Trs&C{FicGc+tt_oYys?x(C|g#8bmcl(2G*xd+Q!5=2`%tx?wGAVX~ zKjy^BbExenP!s&gC|94trfpc-eI|w4?z5C>8*+F)CBa_^*WfQkae4_GpBenTNiKEt5&Hll(0wmYzd>zk{0O??$=$9yV>m((n5z z)P6spOxuvdM=44EakwV`cNC{jun`&-r_WNTPM=d2GQt5RAKd%W6{{OzEBV)C9{LTo zO(w-o^6#8jdk*#eJ!+Ew5asHR*t88xzr)Pb?sr(qv<*27pOWMwglqBgsL4J{l&iC1(>5#}&z?f*cu;Q+%CrqR%$1VtbBAm8 zd7?PYi_OrmIL)6zby|S3lMyESkl*dH*YBk+OKZ$svx+hWUP zQtTw(-ig)cP~SVECi&nfS9ijuZCLullDrqL$@hrjv?n$~!{XGJ zLUsBLWg#ORQ1ZdOeO$4+5w?>5mdr!L&eqI!(^RWRM z7N-kSs7`;RY-EHPDYsQLwUEizL$EN z8Jo8R$pG=CK;CDIk7i*~PyHOMTo+wxE#ingo+PyD@`cd6anYJN^2U8OK zp>PfUa1^IUuo)T_r^iyLPLESoGQ$1`&%6CeH|%bNt>90SdFC_NI++wZ!Jl(t`#IG1 z3#bYHVw9^dVbeA&?Y@#iZTD5mv<*4@BPGFK57*#tL~(i(8=+xwdMkzM^fqN5Bkae| zdHZ{*@48}hBWxxA7nz5?k1dl)v6K8mC)S@seSeIa7N_r0s80W(EM$ZONDis7}A2 zY-EH3O5WES&lS5HVJrCrWF9&pwoWF+&SyTc6Z_Ahz9&UZ^2wrHogACCVd;0u6l%Xy zQKoIkVVaaApEg{RPZz~$dTfA(#c9SAs?$uAjf^nK(GdRTJYV;Fsk8Xkwl~67_Swih zc6Mx`Op2ZCb2_mB9qN2;)MTG0%GG(XX&aV~=TD(@JgBz-W!i=u7D`F>g~K)bB2kaLtdmKx6MRD_wx2_7Z;YDYn?$*~DK>4x((dLd)ONR^OxuvdRw)U- zb+`uKCW_Ox*a!`a)AlJ;ryVH!7-2tt&fDKh9qfwDjj)w`XEG1n1zRSQVkh}-POLwN z`tG47`5sZO?ukv?u=LxPLhbiAlxZ7s*e4~)_YK$Nzm4LwA2ve6;&ea?)#*UWLPj{C zG!x4O2318$5Wt5!fRgw1&Tz%b(LZ-lMv*N}Pawb(+L6g%0kb7BKJ)cN(O$$mqWt2biPHY^?A zoI?GqZlO%uki%^$$$opdX1^nf)1BB14U5xVDO9JwQFb!IWFN}Az4g7+d)%ddLJ91VR8B}h3fPXWgjE#$Ip5Dd#V3+ z#pXuXO8zOChkk}FlS#3Y{0k@6pF@3rg_`7FN4fe9Hf_Vw?{_KG59&XZX&Z9*AtlLw z4Aua&&FwZ%0fmspyY#lBf4UBBWxufnao2+!M4ey*hxN`6Z_Ah zzQ;gK@-d@a9SfVbVd;0A6zV7S3(B+&IgFQ*d$#Z#!C)ga2W4LK~ClI%-`Yxbq1I4y(C(6Bfy zmqK+~p0bk>Ci_s{?XB;nuIPs4jj$DbWiroP1=}Z+Vkh`&PW;6hYI_aT1Ya}C)wQr` z8acGQC2d-{s+&yeG@nAZiKDi zn~{0u=GZ!!6g$DUbYlBC)b`e>3BFB~tJ`AJHZ1LKpF;hdcA!k#ki+1V1m7uKgYO*0 zX%}pShQ(>O6sptilzoh_A3x{q@1^eHip`C%m3%KU5ADO2$)wmxzPA(W&!N8eMNRVG zM!C8lHf_Vw?*S>)59&b5v<*2NoRZ{+glqCcqc|OgjnJ?-9g#wHI+C)G5e_K%;NH=$ zSltL)$&V%T(BrUeGAVYFpWwv)bExl=P?P-RC|6IxrfpdIJuQX$N&Sv8Z9@)cq$K&7 z;hOxcC{AZ%BQz{d=cG`b{y^Et2nUqBuXmm+b~nOS@(aj3^g?W%Op2Z47df&29P0ZL z)Fl5?l&hCw(>5&qUYoX5 zdFXf8GMN-R$-j4E{W;Y4kEluhQ{?GsQYyCN8+J+oPOiA*Q!ZrEG zQJhA>Mrc@^MoXc7};hKDkC{9yiBQz{dQ>Rd!rlD+Pgab<6*PG52 zyBlFE`3z(pIwQ7DCdE$jnVr~w4)r}NYLd?uBAI zntZ+}PV-{}G%QXFrcj+0qHJV@Nj{W!d-r>(i@0QaBWz_~jLc&f#}>+@*vY z&X+<>_NAj-T?U)BVd;3e6iUZ~ddpL$ZOCE8lw@BiT(hqn#c366hK9vywG^t;>Xe;~ zFxiLlZf|`rbxk)cZ-lMjza;a_wXuCNDRzRdUjRZNt*;hAGsK zY9q?D4LNL*lHi+$Yw*pYIBky2(6BgdnL>5iin5Xs_CI*u?c2CvcOz^C-;T^Px5w7W zq}U0*qZ8ZDp|*EIP4JzgT-^nmwqa>^w-jo-yHloZ$YGC^1m81UgYOl^sSg{WVR70! zh3d2qWgjE#$Ip5Dd#S&5#pXuXO1?jthaP|}lS#3Y{2(XRpF@2gf|}%qM!9+zHf_Vw z?-41~evhO~+mOT2DM@}zxF$a~iqmn}2n~zV2`N;k6DbQB;ee74?w#z4)s3)~{8Taz zJq_C?lVT_N=}zoFhx$GfHObG4a`kL%+J>dyb5f}N{(&-WLk{PqB>DN_n*4$&P8VV$ zG%QXRrBIzNrfg({14`c4`;#knH^Nr(%g8+Ra%`PUik;+FIicTcB)=xg)oZb7 z80vXK!c`B2{N-S4H|;gao* zu$BF<&nmX04tp?3TrW!i=u9!^R2N5VDx zqfwk5!)9n$oSsObIz36*$q18uDDU>x_fnsB!}3Ph3jQpaXFiARlS#1?`~@feVhy$Z z5^93K9OddO*t88xyRW5C+x-V++J+q7NJ;QF!!`Inqd2{V&Cswoy^}(9dY7`25%xcL z-tGT#!|q1d3jP6^XMTvSlS#1?{9`A!pF?ebf|}r;M!EVKHf_Vw?iVT4cE6-d+mOT8 zDGB~fxCZ|=iqm)42n~zV_bF7TA1M16VLyJ(+uuw5$rYO$XX_*%mdrzk!JamVa${yA1hpwj~&Hn9BhP!#cA9Ws?&Is zg^X}O$p`l)aK-9I*!s*TBJNE>wBO@G8^1j||uGrlOTgm4j^Uyi5buuY- zlF#kL{&T4Bc~O&mz9?7c$EIyq`du)E+V4V?X&Z7_Bqhlg4cFw0MR8gj8=zrvS|Ww& zv?OICBTVwK{k?scU3)uk<|ld1op#t|n=N|1rPVBHgsq6nl5t-S+Z&T&C*lfDEINl8 zTnRN1SB`RZ6>O-6rLomgsEw^o8LACmR{69Qs*w7Ze-e$hLu-iUstvcdJIT+UecOm?Se?H7G F{||AAV_yIO literal 0 HcmV?d00001 diff --git a/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_mod_list.json b/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_mod_list.json new file mode 100644 index 0000000000..4cd94ad59e --- /dev/null +++ b/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_mod_list.json @@ -0,0 +1,506 @@ +[ + "time_text_embed.timestep_embedder.linear_1", + "time_text_embed.timestep_embedder.linear_2", + "time_text_embed.guidance_embedder.linear_1", + "time_text_embed.guidance_embedder.linear_2", + "time_text_embed.text_embedder.linear_1", + "time_text_embed.text_embedder.linear_2", + "context_embedder", + "x_embedder", + "transformer_blocks.0.norm1.linear", + "transformer_blocks.0.norm1_context.linear", + "transformer_blocks.0.attn.to_q", + "transformer_blocks.0.attn.to_k", + "transformer_blocks.0.attn.to_v", + "transformer_blocks.0.attn.add_k_proj", + "transformer_blocks.0.attn.add_v_proj", + "transformer_blocks.0.attn.add_q_proj", + "transformer_blocks.0.attn.to_out.0", + "transformer_blocks.0.attn.to_add_out", + "transformer_blocks.0.ff.net.0.proj", + "transformer_blocks.0.ff.net.2", + "transformer_blocks.0.ff_context.net.0.proj", + "transformer_blocks.0.ff_context.net.2", + "transformer_blocks.1.norm1.linear", + "transformer_blocks.1.norm1_context.linear", + "transformer_blocks.1.attn.to_q", + "transformer_blocks.1.attn.to_k", + "transformer_blocks.1.attn.to_v", + "transformer_blocks.1.attn.add_k_proj", + "transformer_blocks.1.attn.add_v_proj", + "transformer_blocks.1.attn.add_q_proj", + "transformer_blocks.1.attn.to_out.0", + "transformer_blocks.1.attn.to_add_out", + "transformer_blocks.1.ff.net.0.proj", + "transformer_blocks.1.ff.net.2", + "transformer_blocks.1.ff_context.net.0.proj", + "transformer_blocks.1.ff_context.net.2", + "transformer_blocks.2.norm1.linear", + "transformer_blocks.2.norm1_context.linear", + "transformer_blocks.2.attn.to_q", + "transformer_blocks.2.attn.to_k", + "transformer_blocks.2.attn.to_v", + "transformer_blocks.2.attn.add_k_proj", + "transformer_blocks.2.attn.add_v_proj", + "transformer_blocks.2.attn.add_q_proj", + "transformer_blocks.2.attn.to_out.0", + "transformer_blocks.2.attn.to_add_out", + "transformer_blocks.2.ff.net.0.proj", + "transformer_blocks.2.ff.net.2", + "transformer_blocks.2.ff_context.net.0.proj", + "transformer_blocks.2.ff_context.net.2", + "transformer_blocks.3.norm1.linear", + "transformer_blocks.3.norm1_context.linear", + "transformer_blocks.3.attn.to_q", + "transformer_blocks.3.attn.to_k", + "transformer_blocks.3.attn.to_v", + "transformer_blocks.3.attn.add_k_proj", + "transformer_blocks.3.attn.add_v_proj", + "transformer_blocks.3.attn.add_q_proj", + "transformer_blocks.3.attn.to_out.0", + "transformer_blocks.3.attn.to_add_out", + "transformer_blocks.3.ff.net.0.proj", + "transformer_blocks.3.ff.net.2", + "transformer_blocks.3.ff_context.net.0.proj", + "transformer_blocks.3.ff_context.net.2", + "transformer_blocks.4.norm1.linear", + "transformer_blocks.4.norm1_context.linear", + "transformer_blocks.4.attn.to_q", + "transformer_blocks.4.attn.to_k", + "transformer_blocks.4.attn.to_v", + "transformer_blocks.4.attn.add_k_proj", + "transformer_blocks.4.attn.add_v_proj", + "transformer_blocks.4.attn.add_q_proj", + "transformer_blocks.4.attn.to_out.0", + "transformer_blocks.4.attn.to_add_out", + "transformer_blocks.4.ff.net.0.proj", + "transformer_blocks.4.ff.net.2", + "transformer_blocks.4.ff_context.net.0.proj", + "transformer_blocks.4.ff_context.net.2", + "transformer_blocks.5.norm1.linear", + "transformer_blocks.5.norm1_context.linear", + "transformer_blocks.5.attn.to_q", + "transformer_blocks.5.attn.to_k", + "transformer_blocks.5.attn.to_v", + "transformer_blocks.5.attn.add_k_proj", + "transformer_blocks.5.attn.add_v_proj", + "transformer_blocks.5.attn.add_q_proj", + "transformer_blocks.5.attn.to_out.0", + "transformer_blocks.5.attn.to_add_out", + "transformer_blocks.5.ff.net.0.proj", + "transformer_blocks.5.ff.net.2", + "transformer_blocks.5.ff_context.net.0.proj", + "transformer_blocks.5.ff_context.net.2", + "transformer_blocks.6.norm1.linear", + "transformer_blocks.6.norm1_context.linear", + "transformer_blocks.6.attn.to_q", + "transformer_blocks.6.attn.to_k", + "transformer_blocks.6.attn.to_v", + "transformer_blocks.6.attn.add_k_proj", + "transformer_blocks.6.attn.add_v_proj", + "transformer_blocks.6.attn.add_q_proj", + "transformer_blocks.6.attn.to_out.0", + "transformer_blocks.6.attn.to_add_out", + "transformer_blocks.6.ff.net.0.proj", + "transformer_blocks.6.ff.net.2", + "transformer_blocks.6.ff_context.net.0.proj", + "transformer_blocks.6.ff_context.net.2", + "transformer_blocks.7.norm1.linear", + "transformer_blocks.7.norm1_context.linear", + "transformer_blocks.7.attn.to_q", + "transformer_blocks.7.attn.to_k", + "transformer_blocks.7.attn.to_v", + "transformer_blocks.7.attn.add_k_proj", + "transformer_blocks.7.attn.add_v_proj", + "transformer_blocks.7.attn.add_q_proj", + "transformer_blocks.7.attn.to_out.0", + "transformer_blocks.7.attn.to_add_out", + "transformer_blocks.7.ff.net.0.proj", + "transformer_blocks.7.ff.net.2", + "transformer_blocks.7.ff_context.net.0.proj", + "transformer_blocks.7.ff_context.net.2", + "transformer_blocks.8.norm1.linear", + "transformer_blocks.8.norm1_context.linear", + "transformer_blocks.8.attn.to_q", + "transformer_blocks.8.attn.to_k", + "transformer_blocks.8.attn.to_v", + "transformer_blocks.8.attn.add_k_proj", + "transformer_blocks.8.attn.add_v_proj", + "transformer_blocks.8.attn.add_q_proj", + "transformer_blocks.8.attn.to_out.0", + "transformer_blocks.8.attn.to_add_out", + "transformer_blocks.8.ff.net.0.proj", + "transformer_blocks.8.ff.net.2", + "transformer_blocks.8.ff_context.net.0.proj", + "transformer_blocks.8.ff_context.net.2", + "transformer_blocks.9.norm1.linear", + "transformer_blocks.9.norm1_context.linear", + "transformer_blocks.9.attn.to_q", + "transformer_blocks.9.attn.to_k", + "transformer_blocks.9.attn.to_v", + "transformer_blocks.9.attn.add_k_proj", + "transformer_blocks.9.attn.add_v_proj", + "transformer_blocks.9.attn.add_q_proj", + "transformer_blocks.9.attn.to_out.0", + "transformer_blocks.9.attn.to_add_out", + "transformer_blocks.9.ff.net.0.proj", + "transformer_blocks.9.ff.net.2", + "transformer_blocks.9.ff_context.net.0.proj", + "transformer_blocks.9.ff_context.net.2", + "transformer_blocks.10.norm1.linear", + "transformer_blocks.10.norm1_context.linear", + "transformer_blocks.10.attn.to_q", + "transformer_blocks.10.attn.to_k", + "transformer_blocks.10.attn.to_v", + "transformer_blocks.10.attn.add_k_proj", + "transformer_blocks.10.attn.add_v_proj", + "transformer_blocks.10.attn.add_q_proj", + "transformer_blocks.10.attn.to_out.0", + "transformer_blocks.10.attn.to_add_out", + "transformer_blocks.10.ff.net.0.proj", + "transformer_blocks.10.ff.net.2", + "transformer_blocks.10.ff_context.net.0.proj", + "transformer_blocks.10.ff_context.net.2", + "transformer_blocks.11.norm1.linear", + "transformer_blocks.11.norm1_context.linear", + "transformer_blocks.11.attn.to_q", + "transformer_blocks.11.attn.to_k", + "transformer_blocks.11.attn.to_v", + "transformer_blocks.11.attn.add_k_proj", + "transformer_blocks.11.attn.add_v_proj", + "transformer_blocks.11.attn.add_q_proj", + "transformer_blocks.11.attn.to_out.0", + "transformer_blocks.11.attn.to_add_out", + "transformer_blocks.11.ff.net.0.proj", + "transformer_blocks.11.ff.net.2", + "transformer_blocks.11.ff_context.net.0.proj", + "transformer_blocks.11.ff_context.net.2", + "transformer_blocks.12.norm1.linear", + "transformer_blocks.12.norm1_context.linear", + "transformer_blocks.12.attn.to_q", + "transformer_blocks.12.attn.to_k", + "transformer_blocks.12.attn.to_v", + "transformer_blocks.12.attn.add_k_proj", + "transformer_blocks.12.attn.add_v_proj", + "transformer_blocks.12.attn.add_q_proj", + "transformer_blocks.12.attn.to_out.0", + "transformer_blocks.12.attn.to_add_out", + "transformer_blocks.12.ff.net.0.proj", + "transformer_blocks.12.ff.net.2", + "transformer_blocks.12.ff_context.net.0.proj", + "transformer_blocks.12.ff_context.net.2", + "transformer_blocks.13.norm1.linear", + "transformer_blocks.13.norm1_context.linear", + "transformer_blocks.13.attn.to_q", + "transformer_blocks.13.attn.to_k", + "transformer_blocks.13.attn.to_v", + "transformer_blocks.13.attn.add_k_proj", + "transformer_blocks.13.attn.add_v_proj", + "transformer_blocks.13.attn.add_q_proj", + "transformer_blocks.13.attn.to_out.0", + "transformer_blocks.13.attn.to_add_out", + "transformer_blocks.13.ff.net.0.proj", + "transformer_blocks.13.ff.net.2", + "transformer_blocks.13.ff_context.net.0.proj", + "transformer_blocks.13.ff_context.net.2", + "transformer_blocks.14.norm1.linear", + "transformer_blocks.14.norm1_context.linear", + "transformer_blocks.14.attn.to_q", + "transformer_blocks.14.attn.to_k", + "transformer_blocks.14.attn.to_v", + "transformer_blocks.14.attn.add_k_proj", + "transformer_blocks.14.attn.add_v_proj", + "transformer_blocks.14.attn.add_q_proj", + "transformer_blocks.14.attn.to_out.0", + "transformer_blocks.14.attn.to_add_out", + "transformer_blocks.14.ff.net.0.proj", + "transformer_blocks.14.ff.net.2", + "transformer_blocks.14.ff_context.net.0.proj", + "transformer_blocks.14.ff_context.net.2", + "transformer_blocks.15.norm1.linear", + "transformer_blocks.15.norm1_context.linear", + "transformer_blocks.15.attn.to_q", + "transformer_blocks.15.attn.to_k", + "transformer_blocks.15.attn.to_v", + "transformer_blocks.15.attn.add_k_proj", + "transformer_blocks.15.attn.add_v_proj", + "transformer_blocks.15.attn.add_q_proj", + "transformer_blocks.15.attn.to_out.0", + "transformer_blocks.15.attn.to_add_out", + "transformer_blocks.15.ff.net.0.proj", + "transformer_blocks.15.ff.net.2", + "transformer_blocks.15.ff_context.net.0.proj", + "transformer_blocks.15.ff_context.net.2", + "transformer_blocks.16.norm1.linear", + "transformer_blocks.16.norm1_context.linear", + "transformer_blocks.16.attn.to_q", + "transformer_blocks.16.attn.to_k", + "transformer_blocks.16.attn.to_v", + "transformer_blocks.16.attn.add_k_proj", + "transformer_blocks.16.attn.add_v_proj", + "transformer_blocks.16.attn.add_q_proj", + "transformer_blocks.16.attn.to_out.0", + "transformer_blocks.16.attn.to_add_out", + "transformer_blocks.16.ff.net.0.proj", + "transformer_blocks.16.ff.net.2", + "transformer_blocks.16.ff_context.net.0.proj", + "transformer_blocks.16.ff_context.net.2", + "transformer_blocks.17.norm1.linear", + "transformer_blocks.17.norm1_context.linear", + "transformer_blocks.17.attn.to_q", + "transformer_blocks.17.attn.to_k", + "transformer_blocks.17.attn.to_v", + "transformer_blocks.17.attn.add_k_proj", + "transformer_blocks.17.attn.add_v_proj", + "transformer_blocks.17.attn.add_q_proj", + "transformer_blocks.17.attn.to_out.0", + "transformer_blocks.17.attn.to_add_out", + "transformer_blocks.17.ff.net.0.proj", + "transformer_blocks.17.ff.net.2", + "transformer_blocks.17.ff_context.net.0.proj", + "transformer_blocks.17.ff_context.net.2", + "transformer_blocks.18.norm1.linear", + "transformer_blocks.18.norm1_context.linear", + "transformer_blocks.18.attn.to_q", + "transformer_blocks.18.attn.to_k", + "transformer_blocks.18.attn.to_v", + "transformer_blocks.18.attn.add_k_proj", + "transformer_blocks.18.attn.add_v_proj", + "transformer_blocks.18.attn.add_q_proj", + "transformer_blocks.18.attn.to_out.0", + "transformer_blocks.18.attn.to_add_out", + "transformer_blocks.18.ff.net.0.proj", + "transformer_blocks.18.ff.net.2", + "transformer_blocks.18.ff_context.net.0.proj", + "transformer_blocks.18.ff_context.net.2", + "single_transformer_blocks.0.norm.linear", + "single_transformer_blocks.0.proj_mlp", + "single_transformer_blocks.0.proj_out", + "single_transformer_blocks.0.attn.to_q", + "single_transformer_blocks.0.attn.to_k", + "single_transformer_blocks.0.attn.to_v", + "single_transformer_blocks.1.norm.linear", + "single_transformer_blocks.1.proj_mlp", + "single_transformer_blocks.1.proj_out", + "single_transformer_blocks.1.attn.to_q", + "single_transformer_blocks.1.attn.to_k", + "single_transformer_blocks.1.attn.to_v", + "single_transformer_blocks.2.norm.linear", + "single_transformer_blocks.2.proj_mlp", + "single_transformer_blocks.2.proj_out", + "single_transformer_blocks.2.attn.to_q", + "single_transformer_blocks.2.attn.to_k", + "single_transformer_blocks.2.attn.to_v", + "single_transformer_blocks.3.norm.linear", + "single_transformer_blocks.3.proj_mlp", + "single_transformer_blocks.3.proj_out", + "single_transformer_blocks.3.attn.to_q", + "single_transformer_blocks.3.attn.to_k", + "single_transformer_blocks.3.attn.to_v", + "single_transformer_blocks.4.norm.linear", + "single_transformer_blocks.4.proj_mlp", + "single_transformer_blocks.4.proj_out", + "single_transformer_blocks.4.attn.to_q", + "single_transformer_blocks.4.attn.to_k", + "single_transformer_blocks.4.attn.to_v", + "single_transformer_blocks.5.norm.linear", + "single_transformer_blocks.5.proj_mlp", + "single_transformer_blocks.5.proj_out", + "single_transformer_blocks.5.attn.to_q", + "single_transformer_blocks.5.attn.to_k", + "single_transformer_blocks.5.attn.to_v", + "single_transformer_blocks.6.norm.linear", + "single_transformer_blocks.6.proj_mlp", + "single_transformer_blocks.6.proj_out", + "single_transformer_blocks.6.attn.to_q", + "single_transformer_blocks.6.attn.to_k", + "single_transformer_blocks.6.attn.to_v", + "single_transformer_blocks.7.norm.linear", + "single_transformer_blocks.7.proj_mlp", + "single_transformer_blocks.7.proj_out", + "single_transformer_blocks.7.attn.to_q", + "single_transformer_blocks.7.attn.to_k", + "single_transformer_blocks.7.attn.to_v", + "single_transformer_blocks.8.norm.linear", + "single_transformer_blocks.8.proj_mlp", + "single_transformer_blocks.8.proj_out", + "single_transformer_blocks.8.attn.to_q", + "single_transformer_blocks.8.attn.to_k", + "single_transformer_blocks.8.attn.to_v", + "single_transformer_blocks.9.norm.linear", + "single_transformer_blocks.9.proj_mlp", + "single_transformer_blocks.9.proj_out", + "single_transformer_blocks.9.attn.to_q", + "single_transformer_blocks.9.attn.to_k", + "single_transformer_blocks.9.attn.to_v", + "single_transformer_blocks.10.norm.linear", + "single_transformer_blocks.10.proj_mlp", + "single_transformer_blocks.10.proj_out", + "single_transformer_blocks.10.attn.to_q", + "single_transformer_blocks.10.attn.to_k", + "single_transformer_blocks.10.attn.to_v", + "single_transformer_blocks.11.norm.linear", + "single_transformer_blocks.11.proj_mlp", + "single_transformer_blocks.11.proj_out", + "single_transformer_blocks.11.attn.to_q", + "single_transformer_blocks.11.attn.to_k", + "single_transformer_blocks.11.attn.to_v", + "single_transformer_blocks.12.norm.linear", + "single_transformer_blocks.12.proj_mlp", + "single_transformer_blocks.12.proj_out", + "single_transformer_blocks.12.attn.to_q", + "single_transformer_blocks.12.attn.to_k", + "single_transformer_blocks.12.attn.to_v", + "single_transformer_blocks.13.norm.linear", + "single_transformer_blocks.13.proj_mlp", + "single_transformer_blocks.13.proj_out", + "single_transformer_blocks.13.attn.to_q", + "single_transformer_blocks.13.attn.to_k", + "single_transformer_blocks.13.attn.to_v", + "single_transformer_blocks.14.norm.linear", + "single_transformer_blocks.14.proj_mlp", + "single_transformer_blocks.14.proj_out", + "single_transformer_blocks.14.attn.to_q", + "single_transformer_blocks.14.attn.to_k", + "single_transformer_blocks.14.attn.to_v", + "single_transformer_blocks.15.norm.linear", + "single_transformer_blocks.15.proj_mlp", + "single_transformer_blocks.15.proj_out", + "single_transformer_blocks.15.attn.to_q", + "single_transformer_blocks.15.attn.to_k", + "single_transformer_blocks.15.attn.to_v", + "single_transformer_blocks.16.norm.linear", + "single_transformer_blocks.16.proj_mlp", + "single_transformer_blocks.16.proj_out", + "single_transformer_blocks.16.attn.to_q", + "single_transformer_blocks.16.attn.to_k", + "single_transformer_blocks.16.attn.to_v", + "single_transformer_blocks.17.norm.linear", + "single_transformer_blocks.17.proj_mlp", + "single_transformer_blocks.17.proj_out", + "single_transformer_blocks.17.attn.to_q", + "single_transformer_blocks.17.attn.to_k", + "single_transformer_blocks.17.attn.to_v", + "single_transformer_blocks.18.norm.linear", + "single_transformer_blocks.18.proj_mlp", + "single_transformer_blocks.18.proj_out", + "single_transformer_blocks.18.attn.to_q", + "single_transformer_blocks.18.attn.to_k", + "single_transformer_blocks.18.attn.to_v", + "single_transformer_blocks.19.norm.linear", + "single_transformer_blocks.19.proj_mlp", + "single_transformer_blocks.19.proj_out", + "single_transformer_blocks.19.attn.to_q", + "single_transformer_blocks.19.attn.to_k", + "single_transformer_blocks.19.attn.to_v", + "single_transformer_blocks.20.norm.linear", + "single_transformer_blocks.20.proj_mlp", + "single_transformer_blocks.20.proj_out", + "single_transformer_blocks.20.attn.to_q", + "single_transformer_blocks.20.attn.to_k", + "single_transformer_blocks.20.attn.to_v", + "single_transformer_blocks.21.norm.linear", + "single_transformer_blocks.21.proj_mlp", + "single_transformer_blocks.21.proj_out", + "single_transformer_blocks.21.attn.to_q", + "single_transformer_blocks.21.attn.to_k", + "single_transformer_blocks.21.attn.to_v", + "single_transformer_blocks.22.norm.linear", + "single_transformer_blocks.22.proj_mlp", + "single_transformer_blocks.22.proj_out", + "single_transformer_blocks.22.attn.to_q", + "single_transformer_blocks.22.attn.to_k", + "single_transformer_blocks.22.attn.to_v", + "single_transformer_blocks.23.norm.linear", + "single_transformer_blocks.23.proj_mlp", + "single_transformer_blocks.23.proj_out", + "single_transformer_blocks.23.attn.to_q", + "single_transformer_blocks.23.attn.to_k", + "single_transformer_blocks.23.attn.to_v", + "single_transformer_blocks.24.norm.linear", + "single_transformer_blocks.24.proj_mlp", + "single_transformer_blocks.24.proj_out", + "single_transformer_blocks.24.attn.to_q", + "single_transformer_blocks.24.attn.to_k", + "single_transformer_blocks.24.attn.to_v", + "single_transformer_blocks.25.norm.linear", + "single_transformer_blocks.25.proj_mlp", + "single_transformer_blocks.25.proj_out", + "single_transformer_blocks.25.attn.to_q", + "single_transformer_blocks.25.attn.to_k", + "single_transformer_blocks.25.attn.to_v", + "single_transformer_blocks.26.norm.linear", + "single_transformer_blocks.26.proj_mlp", + "single_transformer_blocks.26.proj_out", + "single_transformer_blocks.26.attn.to_q", + "single_transformer_blocks.26.attn.to_k", + "single_transformer_blocks.26.attn.to_v", + "single_transformer_blocks.27.norm.linear", + "single_transformer_blocks.27.proj_mlp", + "single_transformer_blocks.27.proj_out", + "single_transformer_blocks.27.attn.to_q", + "single_transformer_blocks.27.attn.to_k", + "single_transformer_blocks.27.attn.to_v", + "single_transformer_blocks.28.norm.linear", + "single_transformer_blocks.28.proj_mlp", + "single_transformer_blocks.28.proj_out", + "single_transformer_blocks.28.attn.to_q", + "single_transformer_blocks.28.attn.to_k", + "single_transformer_blocks.28.attn.to_v", + "single_transformer_blocks.29.norm.linear", + "single_transformer_blocks.29.proj_mlp", + "single_transformer_blocks.29.proj_out", + "single_transformer_blocks.29.attn.to_q", + "single_transformer_blocks.29.attn.to_k", + "single_transformer_blocks.29.attn.to_v", + "single_transformer_blocks.30.norm.linear", + "single_transformer_blocks.30.proj_mlp", + "single_transformer_blocks.30.proj_out", + "single_transformer_blocks.30.attn.to_q", + "single_transformer_blocks.30.attn.to_k", + "single_transformer_blocks.30.attn.to_v", + "single_transformer_blocks.31.norm.linear", + "single_transformer_blocks.31.proj_mlp", + "single_transformer_blocks.31.proj_out", + "single_transformer_blocks.31.attn.to_q", + "single_transformer_blocks.31.attn.to_k", + "single_transformer_blocks.31.attn.to_v", + "single_transformer_blocks.32.norm.linear", + "single_transformer_blocks.32.proj_mlp", + "single_transformer_blocks.32.proj_out", + "single_transformer_blocks.32.attn.to_q", + "single_transformer_blocks.32.attn.to_k", + "single_transformer_blocks.32.attn.to_v", + "single_transformer_blocks.33.norm.linear", + "single_transformer_blocks.33.proj_mlp", + "single_transformer_blocks.33.proj_out", + "single_transformer_blocks.33.attn.to_q", + "single_transformer_blocks.33.attn.to_k", + "single_transformer_blocks.33.attn.to_v", + "single_transformer_blocks.34.norm.linear", + "single_transformer_blocks.34.proj_mlp", + "single_transformer_blocks.34.proj_out", + "single_transformer_blocks.34.attn.to_q", + "single_transformer_blocks.34.attn.to_k", + "single_transformer_blocks.34.attn.to_v", + "single_transformer_blocks.35.norm.linear", + "single_transformer_blocks.35.proj_mlp", + "single_transformer_blocks.35.proj_out", + "single_transformer_blocks.35.attn.to_q", + "single_transformer_blocks.35.attn.to_k", + "single_transformer_blocks.35.attn.to_v", + "single_transformer_blocks.36.norm.linear", + "single_transformer_blocks.36.proj_mlp", + "single_transformer_blocks.36.proj_out", + "single_transformer_blocks.36.attn.to_q", + "single_transformer_blocks.36.attn.to_k", + "single_transformer_blocks.36.attn.to_v", + "single_transformer_blocks.37.norm.linear", + "single_transformer_blocks.37.proj_mlp", + "single_transformer_blocks.37.proj_out", + "single_transformer_blocks.37.attn.to_q", + "single_transformer_blocks.37.attn.to_k", + "single_transformer_blocks.37.attn.to_v", + "norm_out.linear", + "proj_out" +] \ No newline at end of file diff --git a/examples/stable-diffusion/quantize/measure_config.json b/examples/stable-diffusion/quantize/measure_config.json new file mode 100755 index 0000000000..19d5e988d4 --- /dev/null +++ b/examples/stable-diffusion/quantize/measure_config.json @@ -0,0 +1,5 @@ +{ + "method": "HOOKS", + "mode": "MEASURE", + "dump_stats_path": "quantize/measure_all/fp8" +} diff --git a/examples/stable-diffusion/quantize/quant_config.json b/examples/stable-diffusion/quantize/quant_config.json new file mode 100755 index 0000000000..eab3011a5e --- /dev/null +++ b/examples/stable-diffusion/quantize/quant_config.json @@ -0,0 +1,6 @@ +{ + "method": "HOOKS", + "mode": "QUANTIZE", + "scale_method": "maxabs_hw_opt_weight", + "dump_stats_path": "quantize/measure_all/fp8" +} diff --git a/examples/stable-diffusion/quantize/quant_config_500.json b/examples/stable-diffusion/quantize/quant_config_500.json new file mode 100755 index 0000000000..173f93772c --- /dev/null +++ b/examples/stable-diffusion/quantize/quant_config_500.json @@ -0,0 +1,6 @@ +{ + "method": "HOOKS", + "mode": "QUANTIZE", + "scale_method": "maxabs_hw_opt_weight", + "dump_stats_path": "quantize/measure_all_500/fp8" +} diff --git a/examples/stable-diffusion/quantize/quant_config_bmm.json b/examples/stable-diffusion/quantize/quant_config_bmm.json new file mode 100755 index 0000000000..5cbc2bac9f --- /dev/null +++ b/examples/stable-diffusion/quantize/quant_config_bmm.json @@ -0,0 +1,7 @@ +{ + "method": "HOOKS", + "mode": "QUANTIZE", + "scale_method": "maxabs_hw_opt_weight", + "dump_stats_path": "quantize/measure_all/fp8", + "blocklist": {"types": ["Linear", "Conv2d", "LoRACompatibleLinear", "LoRACompatibleConv"]} +} diff --git a/examples/stable-diffusion/readme.txt b/examples/stable-diffusion/readme.txt new file mode 100644 index 0000000000..d57f39a7f1 --- /dev/null +++ b/examples/stable-diffusion/readme.txt @@ -0,0 +1,12 @@ +This is experimental PR for Fal.ai ask based on current FLUX PR https://github.com/huggingface/optimum-habana/pull/1331 +* PR is fixed with timing (HPU device sync and include VAD into timing measure) +* Added FP8 quantization support + +To run sample with 1 image 1 batch in BF16 precision: +./run_bf16.sh + +To run sample with 1 image 1 batch in FP8 precision (quant weights were tuned with 1 prompt): +./run_fp8.sh + +To run sample with 1 image 1 batch in FP8 precision (quant weights were tuned with 500 prompts): +./run_fp8_500.sh diff --git a/examples/stable-diffusion/run_bf16.sh b/examples/stable-diffusion/run_bf16.sh new file mode 100755 index 0000000000..e8b74d3312 --- /dev/null +++ b/examples/stable-diffusion/run_bf16.sh @@ -0,0 +1,13 @@ +#!/bin/bash +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-dev \ + --prompts "A cat holding a sign that says hello world" \ + --num_images_per_prompt 1 \ + --batch_size 1 \ + --num_inference_steps 30 \ + --image_save_dir /tmp/flux_1_images \ + --scheduler flow_match_euler_discrete \ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 diff --git a/examples/stable-diffusion/run_fp8.sh b/examples/stable-diffusion/run_fp8.sh new file mode 100755 index 0000000000..e914a3421d --- /dev/null +++ b/examples/stable-diffusion/run_fp8.sh @@ -0,0 +1,16 @@ +#!/bin/bash +QUANT_CONFIG=quantize/quant_config.json \ +PT_HPU_WEIGHT_SHARING=0 \ +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-dev \ + --prompts "A cat holding a sign that says hello world" \ + --num_images_per_prompt 1 \ + --batch_size 1 \ + --num_inference_steps 30 \ + --image_save_dir /tmp/flux_1_images_fp8 \ + --scheduler flow_match_euler_discrete \ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 \ + --quant_mode quantize diff --git a/examples/stable-diffusion/run_fp8_500.sh b/examples/stable-diffusion/run_fp8_500.sh new file mode 100755 index 0000000000..50ffc36b66 --- /dev/null +++ b/examples/stable-diffusion/run_fp8_500.sh @@ -0,0 +1,16 @@ +#!/bin/bash +QUANT_CONFIG=quantize/quant_config_500.json \ +PT_HPU_WEIGHT_SHARING=0 \ +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-dev \ + --prompts "A cat holding a sign that says hello world" \ + --num_images_per_prompt 1 \ + --batch_size 1 \ + --num_inference_steps 30 \ + --image_save_dir /tmp/flux_1_images_fp8_500 \ + --scheduler flow_match_euler_discrete \ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 \ + --quant_mode quantize diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 8425389b4b..70be07ee3b 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -271,6 +271,12 @@ def main(): action="store_true", help="Enable deterministic generation using CPU Generator", ) + parser.add_argument( + "--quant_mode", + default="disable", + type=str, + help="Quantization mode 'measure', 'quantize' or 'disable'", + ) args = parser.parse_args() # Select stable diffuson pipeline based on input @@ -398,7 +404,10 @@ def main(): control_image = Image.fromarray(image) kwargs_call["image"] = control_image + kwargs_call["quant_mode"] = args.quant_mode + # Instantiate a Stable Diffusion pipeline class + import habana_frameworks.torch.core as htcore if sdxl: # SDXL pipelines if controlnet: diff --git a/examples/stable-diffusion/unconditional_image_generation.py b/examples/stable-diffusion/unconditional_image_generation.py old mode 100644 new mode 100755 diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index e2f432c6ac..78aff5d4ff 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -244,9 +244,24 @@ def __call__( is True, otherwise a `tuple`. When returning a tuple, the first element is a list with the generated images. """ - + import habana_frameworks.torch as ht import habana_frameworks.torch.core as htcore + quant_mode=kwargs["quant_mode"] + if quant_mode == "measure" or quant_mode == "quantize": + import os + quant_config_path = os.getenv('QUANT_CONFIG') + + htcore.hpu_set_env() + + from neural_compressor.torch.quantization import FP8Config, convert, prepare + config = FP8Config.from_json_file(quant_config_path) + if config.measure: + self.transformer = prepare(self.transformer, config) + elif config.quantize: + self.transformer = convert(self.transformer, config) + htcore.hpu_initialize(self.transformer, mark_only_scales_as_const=True) + height = height or self.default_sample_size * self.vae_scale_factor width = width or self.default_sample_size * self.vae_scale_factor @@ -276,6 +291,7 @@ def __call__( device = self._execution_device + # 3. Run text encoder ( prompt_embeds, pooled_prompt_embeds, @@ -348,6 +364,7 @@ def __call__( throughput_warmup_steps = kwargs.get("throughput_warmup_steps", 3) + ht.hpu.synchronize() t0 = time.time() t1 = t0 @@ -363,6 +380,7 @@ def __call__( for i, t in enumerate(timesteps): # because compilation occurs in the first two iterations if i == throughput_warmup_steps: + ht.hpu.synchronize() t1 = time.time() if self.interrupt: continue @@ -402,16 +420,10 @@ def __call__( htcore.mark_step(sync=True) hb_profiler.stop() - t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) - speed_metrics_prefix = "generation" - speed_measures = speed_metrics( - split=speed_metrics_prefix, - start_time=t0, - num_samples=num_batches * batch_size, - num_steps=num_batches * batch_size * num_inference_steps, - start_time_after_warmup=t1, - ) - logger.info(f"Speed metrics: {speed_measures}") + + if quant_mode == "measure": + from neural_compressor.torch.quantization import finalize_calibration + finalize_calibration(self.transformer) if output_type == "latent": image = latents @@ -422,6 +434,19 @@ def __call__( image = self.vae.decode(latents, return_dict=False)[0] image = self.image_processor.postprocess(image, output_type=output_type) + # Synchronize and measure performance + ht.hpu.synchronize() + t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) + speed_metrics_prefix = "generation" + speed_measures = speed_metrics( + split=speed_metrics_prefix, + start_time=t0, + num_samples=num_batches * batch_size, + num_steps=num_batches * batch_size * num_inference_steps, + start_time_after_warmup=t1, + ) + logger.info(f"Speed metrics: {speed_measures}") + # Offload all models self.maybe_free_model_hooks() From 66098ff2e911dcad039258797123e22afd83bfce Mon Sep 17 00:00:00 2001 From: Daniel Socek Date: Thu, 26 Sep 2024 22:27:30 +0000 Subject: [PATCH 12/30] Enable batching for flux inference Signed-off-by: Daniel Socek Co-authored-by: Deepak Narayana --- examples/stable-diffusion/prompts_100.txt | 100 ++++++ examples/stable-diffusion/prompts_5.txt | 5 + examples/stable-diffusion/readme.txt | 9 + .../stable-diffusion/run_bf16_prompts_100.sh | 13 + .../stable-diffusion/run_bf16_prompts_5.sh | 13 + .../text_to_image_generation.py | 14 + .../diffusers/pipelines/flux/pipeline_flux.py | 294 +++++++++++++++--- 7 files changed, 401 insertions(+), 47 deletions(-) create mode 100644 examples/stable-diffusion/prompts_100.txt create mode 100644 examples/stable-diffusion/prompts_5.txt create mode 100755 examples/stable-diffusion/run_bf16_prompts_100.sh create mode 100755 examples/stable-diffusion/run_bf16_prompts_5.sh diff --git a/examples/stable-diffusion/prompts_100.txt b/examples/stable-diffusion/prompts_100.txt new file mode 100644 index 0000000000..09a94f1b9a --- /dev/null +++ b/examples/stable-diffusion/prompts_100.txt @@ -0,0 +1,100 @@ +A women playing tennis on a blue tennis court. +Two surfers in wetsuits carrying surfboards along the beach. +People are flying their kites in a large field. +A statue that is in front of a building. +A man attempting to do a skateboard trick on an outdoor halfpipe. +A cream bathroom with red accents and open window. +A woman in a white dress and a man in gray stand near a cake on a white table under a white canopy. +A tour bus downtown with yoga ads all over it. +Three people riding horses on a beach next to the ocean.. +A bear lying in its den on a pile of wood. +A herd of cattle is feeding at the river's edge. +there is a male snowboarder that is in the air +A statue of an elephant with tattoos and a target drawing +a woman is standing over a white cake +A grey motorcycle parked in a tropical setting. +Two dogs playing in the grass with a frisbee. +A group of people standing around a table full of food. +A person holding a piece of broccoli with an insect on it. +A man in a gray suit and a red tie. +Someone with skis on his back walking up a snow covered mountain. +a woman in a blue shirt holding a pair of large scissors +A fluffy white cat has a frowning look on it's face. +A bus that is sitting on the street. +Sinks in the washroom that is public and white. +this lady is walking along the shore on a beach +A woman helping a man to do his tie. +A tray with coffee and a pastry on it. +A man with a bald head and a bear wearing a bow tie. +A cat eating a birthday cake on top of the table. +A man at a party talking on a cell phone. +Three commuter buses sitting outside of a building. +Food truck with customers ordering them with friends. +A couple of bikes in front of a small stone wall. +Someone is enjoying a small slice of pie. +there is a small tv and coffee table in the living room +A plate with two sandwiches, cup and knife on the table +A group of people venturing out on a horseback ride. +A horse walking through a grassy field while two cows eat hay. +A bathroom scene with focus on the toilet. +A man on a field swinging a baseball bat. +Some people are standing on a crowd crowded sidewalk +A pregnant women taking a picture of herself in the mirror. +A line of police offices riding horses down a street. +A buffet styled restaurant without self service but a server. +A tview of a living room with fold out bed. +A surfboard advertising offerings as people check them out. +a bath room with a sink a mirror and towel racks +A desk with a computer on in and a key board +A large clock mounted on the wall of a stone building +Three men sitting around a table with wine on it. +Man with glasses and a mustache standing in front of a door. +A woman in a black dress holding a racquet. +This hotel room has a king size bed. +A severely injured man hooked up to machines in the hospital +A black and white picture of an old store. +Fingers keep a meatball sub from falling apart. +A plate of fish covered in marinara, cheese, carrots, a fork, next to bread. +A anal filled with boats and the street above it filled with people under umbrellas. +A woman standing on a tennis court holding a racquet. +An elephant is standing next to a tree and a fence. +People in a street with birds all over. +THERE ARE PEOPLE THAT ARE STANDING IN THE GRASS +a man standing by a fence while throwing a frisbee +A pot that is on the stove with some food in it. +A stuffed animal is inside of a microwave. +Group of parents watching small children on a baseball field. +The kite is flying high in the air +This cat is playing on a fuzzy white blanket. +A lone giraffe at a zoo with trees behind it. +A train engine carrying carts down a track. +a young man brushes his teeth in the bathroom +Two children playing baseball in red uniforms and hats. +Men lined up an a runway in a desert greet an arriving jet plane. +The American flag flies next to the clock tower on a snowy day. +A man takes a selfie of himself in the mirror. +A baseball game where a player is running to 3rd base. +A man and a woman standing in front of a bus. +A plate of dessert sitting beside a drink in a cafe. +A man on the couch is petting the dog +A cat is sleeping with a remote control on a couch. +A European fighter jet flying above the tree tops. +Snowman's head has a carrot for a nose and lemon slices for eyes. +A man standing on a tennis court holding a tennis racquet. +Two large green and white jumbo jet planes on the tarmac. +The bedroom is is decorated in various zebra prints. +Dog displaying skills near disc in open grassy area. +A toilet facility in a stone cell on a plank floor. +A picture done by Independent Expression Photography of a girl posing in an empty road sitting on her suit cases. +Pair of colorful stuffed bears hanging on line in backyard. +a large train is on the track going by the ocean +There is a pizza with olives, peppers, meat, and cheese on the table. +A man standing in front of microphones. +A woman that is holding a book sitting on a bed. +A man showing a ring at a formal event. +A concrete building with towers, a steep in the middle and a clock underneath. +A white dog standing on top of a wooden bench. +A woman holds a plate with rainbow cake. +A women who is taking a picture of her food. +A man with long hair and in a towel holding a toothbrush. +a woman holding a tennis racket in the air diff --git a/examples/stable-diffusion/prompts_5.txt b/examples/stable-diffusion/prompts_5.txt new file mode 100644 index 0000000000..6254d2714e --- /dev/null +++ b/examples/stable-diffusion/prompts_5.txt @@ -0,0 +1,5 @@ +A women playing tennis on a blue tennis court. +Two surfers in wetsuits carrying surfboards along the beach. +People are flying their kites in a large field. +A statue that is in front of a building. +A man attempting to do a skateboard trick on an outdoor halfpipe. diff --git a/examples/stable-diffusion/readme.txt b/examples/stable-diffusion/readme.txt index d57f39a7f1..17d9b726cb 100644 --- a/examples/stable-diffusion/readme.txt +++ b/examples/stable-diffusion/readme.txt @@ -10,3 +10,12 @@ To run sample with 1 image 1 batch in FP8 precision (quant weights were tuned wi To run sample with 1 image 1 batch in FP8 precision (quant weights were tuned with 500 prompts): ./run_fp8_500.sh + +* Added batching +* Added --prompt_file option for large number of input prompts + +To run sample with 5 prompts (batch size 1) in BF16 precision: +./run_bf16_prompts_5.sh + +To run sample with 100 prompts (batch size 1) in BF16 precision: +./run_bf16_prompts_100.sh diff --git a/examples/stable-diffusion/run_bf16_prompts_100.sh b/examples/stable-diffusion/run_bf16_prompts_100.sh new file mode 100755 index 0000000000..d229d5fe68 --- /dev/null +++ b/examples/stable-diffusion/run_bf16_prompts_100.sh @@ -0,0 +1,13 @@ +#!/bin/bash +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-dev \ + --prompts_file prompts_100.txt \ + --num_images_per_prompt 1 \ + --batch_size 1 \ + --num_inference_steps 30 \ + --image_save_dir /tmp/flux_1_images \ + --scheduler flow_match_euler_discrete \ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 diff --git a/examples/stable-diffusion/run_bf16_prompts_5.sh b/examples/stable-diffusion/run_bf16_prompts_5.sh new file mode 100755 index 0000000000..22cc40dc0c --- /dev/null +++ b/examples/stable-diffusion/run_bf16_prompts_5.sh @@ -0,0 +1,13 @@ +#!/bin/bash +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-dev \ + --prompts_file prompts_5.txt \ + --num_images_per_prompt 1 \ + --batch_size 1 \ + --num_inference_steps 30 \ + --image_save_dir /tmp/flux_1_images \ + --scheduler flow_match_euler_discrete \ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 70be07ee3b..74816f0d10 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -277,6 +277,12 @@ def main(): type=str, help="Quantization mode 'measure', 'quantize' or 'disable'", ) + parser.add_argument( + "--prompts_file", + type=str, + default=None, + help="The file with prompts (for large number of images generation).", + ) args = parser.parse_args() # Select stable diffuson pipeline based on input @@ -534,6 +540,14 @@ def main(): # Set RNG seed set_seed(args.seed) + # If prompts file is specified override prompts from the file + if args.prompts_file is not None: + lines = [] + with open(args.prompts_file, "r") as file: + lines = file.readlines() + lines = [line.strip() for line in lines] + args.prompts = lines + # Generate Images using a Stable Diffusion pipeline if args.distributed: with distributed_state.split_between_processes(args.prompts) as prompt: diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 78aff5d4ff..cbbcfcd185 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -51,6 +51,7 @@ class GaudiFluxPipelineOutput(BaseOutput): """ images: Union[List[PIL.Image.Image], np.ndarray] + throughput: float EXAMPLE_DOC_STRING = """ @@ -144,6 +145,86 @@ def __init__( from habana_frameworks.torch.hpu import wrap_in_hpu_graph transformer = wrap_in_hpu_graph(transformer) + + @classmethod + def _split_inputs_into_batches( + cls, + batch_size, + latents, + prompt_embeds, + pooled_prompt_embeds, + text_ids, + latent_image_ids, + guidance + ): + # Use torch.split to generate num_batches batches of size batch_size + latents_batches = list(torch.split(latents, batch_size)) + prompt_embeds_batches = list(torch.split(prompt_embeds, batch_size)) + if pooled_prompt_embeds is not None: + pooled_prompt_embeds_batches = list(torch.split(pooled_prompt_embeds, batch_size)) + if text_ids is not None: + text_ids_batches = list(torch.split(text_ids, batch_size)) + if latent_image_ids is not None: + latent_image_ids_batches = list(torch.split(latent_image_ids, batch_size)) + if guidance is not None: + guidance_batches = list(torch.split(guidance, batch_size)) + + # If the last batch has less samples than batch_size, pad it with dummy samples + num_dummy_samples = 0 + if latents_batches[-1].shape[0] < batch_size: + num_dummy_samples = batch_size - latents_batches[-1].shape[0] + + # Pad latents_batches + sequence_to_stack = (latents_batches[-1],) + tuple( + torch.zeros_like(latents_batches[-1][0][None, :]) for _ in range(num_dummy_samples) + ) + latents_batches[-1] = torch.vstack(sequence_to_stack) + + # Pad prompt_embeds_batches + sequence_to_stack = (prompt_embeds_batches[-1],) + tuple( + torch.zeros_like(prompt_embeds_batches[-1][0][None, :]) for _ in range(num_dummy_samples) + ) + prompt_embeds_batches[-1] = torch.vstack(sequence_to_stack) + + # Pad pooled_prompt_embeds if necessary + if pooled_prompt_embeds is not None: + sequence_to_stack = (pooled_prompt_embeds_batches[-1],) + tuple( + torch.zeros_like(pooled_prompt_embeds_batches[-1][0][None, :]) for _ in range(num_dummy_samples) + ) + pooled_prompt_embeds_batches[-1] = torch.vstack(sequence_to_stack) + + # Pad text_ids_batches if necessary + if text_ids is not None: + sequence_to_stack = (text_ids_batches[-1],) + tuple( + torch.zeros_like(text_ids_batches[-1][0][None, :]) for _ in range(num_dummy_samples) + ) + text_ids_batches[-1] = torch.vstack(sequence_to_stack) + + # Pad latent_image_ids if necessary + if latent_image_ids is not None: + sequence_to_stack = (latent_image_ids_batches[-1],) + tuple( + torch.zeros_like(latent_image_ids_batches[-1][0][None, :]) for _ in range(num_dummy_samples) + ) + latent_image_ids_batches[-1] = torch.vstack(sequence_to_stack) + + # Pad guidance if necessary + if guidance is not None: + sequence_to_stack = (guidance_batches[-1],) + tuple( + torch.zeros_like(guidance_batches[-1][0][None, :]) for _ in range(num_dummy_samples) + ) + guidance_batches[-1] = torch.vstack(sequence_to_stack) + + # Stack batches in the same tensor + latents_batches = torch.stack(latents_batches) + prompt_embeds_batches = torch.stack(prompt_embeds_batches) + pooled_prompt_embeds_batches = torch.stack(pooled_prompt_embeds_batches) + text_ids_batches = torch.stack(text_ids_batches) + latent_image_ids_batches = torch.stack(latent_image_ids_batches) + guidance_batches = torch.stack(guidance_batches) + + return latents_batches, prompt_embeds_batches, pooled_prompt_embeds_batches, text_ids_batches, latent_image_ids_batches, guidance_batches, num_dummy_samples + + @torch.no_grad() @replace_example_docstring(EXAMPLE_DOC_STRING) def __call__( @@ -155,6 +236,7 @@ def __call__( num_inference_steps: int = 28, timesteps: List[int] = None, guidance_scale: float = 3.5, + batch_size: int = 1, num_images_per_prompt: Optional[int] = 1, generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None, latents: Optional[torch.FloatTensor] = None, @@ -244,6 +326,23 @@ def __call__( is True, otherwise a `tuple`. When returning a tuple, the first element is a list with the generated images. """ + + callback = kwargs.pop("callback", None) + callback_steps = kwargs.pop("callback_steps", None) + + if callback is not None: + deprecate( + "callback", + "1.0.0", + "Passing `callback` as an input argument to `__call__` is deprecated, consider use `callback_on_step_end`", + ) + if callback_steps is not None: + deprecate( + "callback_steps", + "1.0.0", + "Passing `callback_steps` as an input argument to `__call__` is deprecated, consider use `callback_on_step_end`", + ) + import habana_frameworks.torch as ht import habana_frameworks.torch.core as htcore @@ -283,11 +382,12 @@ def __call__( # 2. Define call parameters if prompt is not None and isinstance(prompt, str): - batch_size = 1 + num_prompts = 1 elif prompt is not None and isinstance(prompt, list): - batch_size = len(prompt) + num_prompts = len(prompt) else: - batch_size = prompt_embeds.shape[0] + num_prompts = prompt_embeds.shape[0] + num_batches = math.ceil((num_images_per_prompt * num_prompts) / batch_size) device = self._execution_device @@ -309,7 +409,7 @@ def __call__( # 4. Prepare latent variables num_channels_latents = self.transformer.config.in_channels // 4 latents, latent_image_ids = self.prepare_latents( - batch_size * num_images_per_prompt, + num_prompts * num_images_per_prompt, num_channels_latents, height, width, @@ -347,14 +447,6 @@ def __call__( else: guidance = None - # 5-1. Define call parameters - if prompt is not None and isinstance(prompt, str): - num_prompts = 1 - elif prompt is not None and isinstance(prompt, list): - num_prompts = len(prompt) - else: - num_prompts = prompt_embeds.shape[0] - num_batches = math.ceil((num_images_per_prompt * num_prompts) / batch_size) logger.info( f"{num_prompts} prompt(s) received, {num_images_per_prompt} generation(s) per prompt," f" {batch_size} sample(s) per batch, {num_batches} total batch(es)." @@ -363,6 +455,9 @@ def __call__( logger.warning("The first two iterations are slower so it is recommended to feed more batches.") throughput_warmup_steps = kwargs.get("throughput_warmup_steps", 3) + use_warmup_inference_steps = ( + num_batches <= throughput_warmup_steps and num_inference_steps > throughput_warmup_steps + ) ht.hpu.synchronize() t0 = time.time() @@ -375,82 +470,187 @@ def __call__( ) hb_profiler.start() + # 5.1. Split Input data to batches (HPU-specific step) + ( + latents_batches, + text_embeddings_batches, + pooled_prompt_embeddings_batches, + text_ids_batches, + latent_image_ids_batches, + guidance_batches, + num_dummy_samples + ) = self._split_inputs_into_batches( + batch_size, + latents, + prompt_embeds, + pooled_prompt_embeds, + text_ids, + latent_image_ids, + guidance + ) + + outputs = { + "images": [], + } + # 6. Denoising loop - with self.progress_bar(total=num_inference_steps) as progress_bar: - for i, t in enumerate(timesteps): - # because compilation occurs in the first two iterations - if i == throughput_warmup_steps: + for j in range(num_batches): + + # The throughput is calculated from the 4th iteration + # because compilation occurs in the first 2-3 iterations + if j == throughput_warmup_steps: + ht.hpu.synchronize() + t1 = time.time() + if use_warmup_inference_steps: + ht.hpu.synchronize() + t0_inf = time.time() + + latents_batch = latents_batches[0] + latents_batches = torch.roll(latents_batches, shifts=-1, dims=0) + text_embeddings_batch = text_embeddings_batches[0] + text_embeddings_batches = torch.roll(text_embeddings_batches, shifts=-1, dims=0) + pooled_prompt_embeddings_batch = pooled_prompt_embeddings_batches[0] + pooled_prompt_embeddings_batches = torch.roll(pooled_prompt_embeddings_batches, shifts=-1, dims=0) + text_ids_batch = text_ids_batches[0] + text_ids_batches = torch.roll(text_ids_batches, shifts=-1, dims=0) + latent_image_ids_batch = latent_image_ids_batches[0] + latent_image_ids_batches = torch.roll(latent_image_ids_batches, shifts=-1, dims=0) + guidance_batch = guidance_batches[0] + guidance_batches = torch.roll(guidance_batches, shifts=-1, dims=0) + + if hasattr(self.scheduler, "_init_step_index"): + # Reset scheduler step index for next batch + self.scheduler.timesteps = timesteps + self.scheduler._init_step_index(timesteps[0]) + + for i in self.progress_bar(range(len(timesteps))): + if use_warmup_inference_steps and i == throughput_warmup_steps: ht.hpu.synchronize() - t1 = time.time() + t1_inf = time.time() + t1 += t1_inf - t0_inf + if self.interrupt: - continue + continue + timestep = timesteps[0] + timesteps = torch.roll(timesteps, shifts=-1, dims=0) # broadcast to batch dimension in a way that's compatible with ONNX/Core ML - timestep = t.expand(latents.shape[0]).to(latents.dtype) + timestep = timestep.expand(latents_batch.shape[0]).to(latents_batch.dtype) noise_pred = self.transformer( - hidden_states=latents, + hidden_states=latents_batch, timestep=timestep / 1000, - guidance=guidance, - pooled_projections=pooled_prompt_embeds, - encoder_hidden_states=prompt_embeds, - txt_ids=text_ids, - img_ids=latent_image_ids, + guidance=guidance_batch, + pooled_projections=pooled_prompt_embeddings_batch, + encoder_hidden_states=text_embeddings_batch, + txt_ids=text_ids_batch, + img_ids=latent_image_ids_batch, joint_attention_kwargs=self.joint_attention_kwargs, return_dict=False, )[0] # compute the previous noisy sample x_t -> x_t-1 - latents = self.scheduler.step(noise_pred, t, latents, return_dict=False)[0] + latents_dtype = latents_batch.dtype + latents_batch = self.scheduler.step(noise_pred, timestep, latents_batch, return_dict=False)[0] + + if latents_batch.dtype != latents_dtype: + if torch.backends.mps.is_available(): + # some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272 + latents_batch = latents_batch.to(latents_dtype) if callback_on_step_end is not None: callback_kwargs = {} for k in callback_on_step_end_tensor_inputs: callback_kwargs[k] = locals()[k] - callback_outputs = callback_on_step_end(self, i, t, callback_kwargs) + callback_outputs = callback_on_step_end(self, i, timestep, callback_kwargs) + + latents_batch = callback_outputs.pop("latents", latents_batch) + + _prompt_embeds = callback_outputs.pop("prompt_embeds", None) + _negative_prompt_embeds = callback_outputs.pop("negative_prompt_embeds", None) + if _prompt_embeds is not None and _negative_prompt_embeds is not None: + text_embeddings_batch = torch.cat([_negative_prompt_embeds, _prompt_embeds]) + _pooled_prompt_embeds = callback_outputs.pop("pooled_prompt_embeds", None) + _negative_pooled_prompt_embeds = callback_outputs.pop("negative_pooled_prompt_embeds", None) + if _pooled_prompt_embeds is not None and _negative_pooled_prompt_embeds is not None: + pooled_prompt_embeddings_batch = torch.cat([_negative_pooled_prompt_embeds, _pooled_prompt_embeds]) - latents = callback_outputs.pop("latents", latents) - prompt_embeds = callback_outputs.pop("prompt_embeds", prompt_embeds) # call the callback, if provided if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): - progress_bar.update() + if callback is not None and i % callback_steps == 0: + step_idx = i // getattr(self.scheduler, "order", 1) + callback(step_idx, timestep, latents) hb_profiler.step() - htcore.mark_step(sync=True) + #htcore.mark_step(sync=True) - hb_profiler.stop() + if use_warmup_inference_steps: + t1 = warmup_inference_steps_time_adjustment( + t1, t1_inf, num_inference_steps, throughput_warmup_steps + ) - if quant_mode == "measure": - from neural_compressor.torch.quantization import finalize_calibration - finalize_calibration(self.transformer) + if not output_type == "latent": + latents_batch = self._unpack_latents(latents_batch, height, width, self.vae_scale_factor) + latents_batch = (latents_batch / self.vae.config.scaling_factor) + self.vae.config.shift_factor + image = self.vae.decode(latents_batch, return_dict=False)[0] + image = self.image_processor.postprocess(image, output_type=output_type) + else: + image = latents_batch - if output_type == "latent": - image = latents + outputs["images"].append(image) + #htcore.mark_step(sync=True) - else: - latents = self._unpack_latents(latents, height, width, self.vae_scale_factor) - latents = (latents / self.vae.config.scaling_factor) + self.vae.config.shift_factor - image = self.vae.decode(latents, return_dict=False)[0] - image = self.image_processor.postprocess(image, output_type=output_type) + # Stage after denoising + hb_profiler.stop() + + if quant_mode == "measure": + from neural_compressor.torch.quantization import finalize_calibration + finalize_calibration(self.transformer) - # Synchronize and measure performance ht.hpu.synchronize() - t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) speed_metrics_prefix = "generation" speed_measures = speed_metrics( split=speed_metrics_prefix, start_time=t0, - num_samples=num_batches * batch_size, + num_samples=num_batches * batch_size + if t1 == t0 or use_warmup_inference_steps + else (num_batches - throughput_warmup_steps) * batch_size, num_steps=num_batches * batch_size * num_inference_steps, start_time_after_warmup=t1, ) logger.info(f"Speed metrics: {speed_measures}") + # 8 Output Images + # Remove dummy generations if needed + if num_dummy_samples > 0: + outputs["images"][-1] = outputs["images"][-1][:-num_dummy_samples] + + # Process generated images + for i, image in enumerate(outputs["images"][:]): + if i == 0: + outputs["images"].clear() + + if output_type == "pil" and isinstance(image, list): + outputs["images"] += image + elif output_type in ["np", "numpy"] and isinstance(image, np.ndarray): + if len(outputs["images"]) == 0: + outputs["images"] = image + else: + outputs["images"] = np.concatenate((outputs["images"], image), axis=0) + else: + if len(outputs["images"]) == 0: + outputs["images"] = image + else: + outputs["images"] = torch.cat((outputs["images"], image), 0) + # Offload all models self.maybe_free_model_hooks() if not return_dict: - return (image,) + return outputs["images"] - return GaudiFluxPipelineOutput(images=image) + return GaudiFluxPipelineOutput( + images=outputs["images"], + throughput=speed_measures[f"{speed_metrics_prefix}_samples_per_second"], + ) From 0615ce154c654be434d08f8eb6c48f008b935e22 Mon Sep 17 00:00:00 2001 From: baocheny Date: Fri, 27 Sep 2024 13:16:36 +0800 Subject: [PATCH 13/30] update diffusers to adopt rope changes --- .../habana/diffusers/pipelines/flux/pipeline_flux.py | 11 +++++------ optimum/habana/utils.py | 3 ++- setup.py | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index e2f432c6ac..0819f76a3d 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -141,8 +141,7 @@ def __init__( ) self.to(self._device) if use_hpu_graphs: - from habana_frameworks.torch.hpu import wrap_in_hpu_graph - transformer = wrap_in_hpu_graph(transformer) + transformer = self.ht.wrap_in_hpu_graph(transformer) @torch.no_grad() @replace_example_docstring(EXAMPLE_DOC_STRING) @@ -245,8 +244,6 @@ def __call__( images. """ - import habana_frameworks.torch.core as htcore - height = height or self.default_sample_size * self.vae_scale_factor width = width or self.default_sample_size * self.vae_scale_factor @@ -355,6 +352,7 @@ def __call__( warmup=profiling_warmup_steps, active=profiling_steps, record_shapes=False, + with_stack=True ) hb_profiler.start() @@ -397,9 +395,10 @@ def __call__( # call the callback, if provided if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): progress_bar.update() - + if profiling_warmup_steps and profiling_steps: + self.ht.hpu.synchronize() hb_profiler.step() - htcore.mark_step(sync=True) + self.ht.core.mark_step(sync=True) hb_profiler.stop() t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) diff --git a/optimum/habana/utils.py b/optimum/habana/utils.py index 3da3f11872..5434344246 100755 --- a/optimum/habana/utils.py +++ b/optimum/habana/utils.py @@ -285,6 +285,7 @@ def __init__( warmup: int = 0, active: int = 0, record_shapes: bool = True, + with_stack: bool = False, output_dir: str = "./hpu_profile", wait: int = 0, ): @@ -306,7 +307,7 @@ def noop(): activities=activities, on_trace_ready=torch.profiler.tensorboard_trace_handler(output_dir), record_shapes=record_shapes, - with_stack=False, + with_stack=with_stack ) self.start = profiler.start self.stop = profiler.stop diff --git a/setup.py b/setup.py index c77a241717..55510eb4c3 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ "optimum", "torch", "accelerate >= 0.33.0, < 0.34.0", - "diffusers >= 0.30.2", + "diffusers @ git+https://github.com/huggingface/diffusers.git@main", "huggingface_hub >= 0.23.2", "sentence-transformers[train] == 3.0.1", ] From 97a6dd549fbae0dd517c44855f26acda502a8329 Mon Sep 17 00:00:00 2001 From: baocheny Date: Fri, 27 Sep 2024 13:48:07 +0800 Subject: [PATCH 14/30] fix import error --- optimum/habana/diffusers/pipelines/flux/pipeline_flux.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 0819f76a3d..34a70cb069 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -141,7 +141,7 @@ def __init__( ) self.to(self._device) if use_hpu_graphs: - transformer = self.ht.wrap_in_hpu_graph(transformer) + transformer = self.ht.hpu.wrap_in_hpu_graph(transformer) @torch.no_grad() @replace_example_docstring(EXAMPLE_DOC_STRING) From 9aefc5fd51b650872205f881c86f297bd277c19a Mon Sep 17 00:00:00 2001 From: baocheny Date: Fri, 27 Sep 2024 14:31:34 +0800 Subject: [PATCH 15/30] fix time clac drift --- optimum/habana/diffusers/pipelines/flux/pipeline_flux.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 34a70cb069..cf6d758ccf 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -361,6 +361,8 @@ def __call__( for i, t in enumerate(timesteps): # because compilation occurs in the first two iterations if i == throughput_warmup_steps: + # clac acc time `end - t1` + self.ht.hpu.synchronize() t1 = time.time() if self.interrupt: continue @@ -401,6 +403,8 @@ def __call__( self.ht.core.mark_step(sync=True) hb_profiler.stop() + # clac acc time `end - t1` + self.ht.hpu.synchronize() t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) speed_metrics_prefix = "generation" speed_measures = speed_metrics( From 1bc593a8c8b1bfcb4ff5a0b8708c293a1f7507d2 Mon Sep 17 00:00:00 2001 From: baocheny Date: Fri, 27 Sep 2024 15:08:20 +0800 Subject: [PATCH 16/30] fix import error in lazy mode --- optimum/habana/diffusers/pipelines/flux/pipeline_flux.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index cf6d758ccf..fe7380dbeb 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -362,7 +362,7 @@ def __call__( # because compilation occurs in the first two iterations if i == throughput_warmup_steps: # clac acc time `end - t1` - self.ht.hpu.synchronize() + torch.hpu.synchronize() t1 = time.time() if self.interrupt: continue @@ -398,13 +398,13 @@ def __call__( if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): progress_bar.update() if profiling_warmup_steps and profiling_steps: - self.ht.hpu.synchronize() + torch.hpu.synchronize() hb_profiler.step() - self.ht.core.mark_step(sync=True) + self.htcore.mark_step(sync=True) hb_profiler.stop() # clac acc time `end - t1` - self.ht.hpu.synchronize() + torch.hpu.synchronize() t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) speed_metrics_prefix = "generation" speed_measures = speed_metrics( From 3a53c95fab843e4e7f72841fd17b429a37db165d Mon Sep 17 00:00:00 2001 From: Daniel Socek Date: Fri, 27 Sep 2024 18:22:39 +0000 Subject: [PATCH 17/30] Add hybrid fp8 and bf16 denoising to flux Signed-off-by: Daniel Socek --- examples/stable-diffusion/readme.txt | 8 +++ .../stable-diffusion/run_fp8_500_hybrid.sh | 16 +++++ .../run_fp8_500_hybrid_prompts_5.sh | 16 +++++ .../text_to_image_generation.py | 2 +- .../diffusers/pipelines/flux/pipeline_flux.py | 59 +++++++++++++------ 5 files changed, 83 insertions(+), 18 deletions(-) create mode 100755 examples/stable-diffusion/run_fp8_500_hybrid.sh create mode 100755 examples/stable-diffusion/run_fp8_500_hybrid_prompts_5.sh diff --git a/examples/stable-diffusion/readme.txt b/examples/stable-diffusion/readme.txt index 17d9b726cb..e72ebcd497 100644 --- a/examples/stable-diffusion/readme.txt +++ b/examples/stable-diffusion/readme.txt @@ -19,3 +19,11 @@ To run sample with 5 prompts (batch size 1) in BF16 precision: To run sample with 100 prompts (batch size 1) in BF16 precision: ./run_bf16_prompts_100.sh + +* Added hybrid (mixed fp9 and bf16) precision denoising + +To run sample with 1 image 1 batch in hybrid precision: +./run_fp8_500_hybrid.sh + +To run sample with 5 prompts (batch size 1) in hybrid precision: +./run_fp8_500_hybrid_prompts_5.sh diff --git a/examples/stable-diffusion/run_fp8_500_hybrid.sh b/examples/stable-diffusion/run_fp8_500_hybrid.sh new file mode 100755 index 0000000000..6de1397574 --- /dev/null +++ b/examples/stable-diffusion/run_fp8_500_hybrid.sh @@ -0,0 +1,16 @@ +#!/bin/bash +QUANT_CONFIG=quantize/quant_config_500.json \ +PT_HPU_WEIGHT_SHARING=0 \ +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-dev \ + --prompts "A cat holding a sign that says hello world" \ + --num_images_per_prompt 1 \ + --batch_size 1 \ + --num_inference_steps 30 \ + --image_save_dir /tmp/flux_1_images_fp8_500 \ + --scheduler flow_match_euler_discrete \ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 \ + --quant_mode quantize-mixed diff --git a/examples/stable-diffusion/run_fp8_500_hybrid_prompts_5.sh b/examples/stable-diffusion/run_fp8_500_hybrid_prompts_5.sh new file mode 100755 index 0000000000..29d4ac1d22 --- /dev/null +++ b/examples/stable-diffusion/run_fp8_500_hybrid_prompts_5.sh @@ -0,0 +1,16 @@ +#!/bin/bash +QUANT_CONFIG=quantize/quant_config_500.json \ +PT_HPU_WEIGHT_SHARING=0 \ +python text_to_image_generation.py \ + --model_name_or_path black-forest-labs/FLUX.1-dev \ + --prompts_file prompts_5.txt \ + --num_images_per_prompt 1 \ + --batch_size 1 \ + --num_inference_steps 30 \ + --image_save_dir /tmp/flux_1_images \ + --scheduler flow_match_euler_discrete \ + --use_habana \ + --use_hpu_graphs \ + --gaudi_config Habana/stable-diffusion \ + --bf16 \ + --quant_mode quantize-mixed diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 74816f0d10..7e862386f5 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -275,7 +275,7 @@ def main(): "--quant_mode", default="disable", type=str, - help="Quantization mode 'measure', 'quantize' or 'disable'", + help="Quantization mode 'measure', 'quantize', 'quantize-mixed' or 'disable'", ) parser.add_argument( "--prompts_file", diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index cbbcfcd185..999a15bdcc 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -346,8 +346,13 @@ def __call__( import habana_frameworks.torch as ht import habana_frameworks.torch.core as htcore - quant_mode=kwargs["quant_mode"] - if quant_mode == "measure" or quant_mode == "quantize": + quant_mode = kwargs["quant_mode"] + + if quant_mode == "quantize-mixed": + import copy + transformer_bf16 = copy.deepcopy(self.transformer).to(self._execution_device) + + if quant_mode == "measure" or quant_mode.startswith("quantize"): import os quant_config_path = os.getenv('QUANT_CONFIG') @@ -523,6 +528,14 @@ def __call__( self.scheduler.timesteps = timesteps self.scheduler._init_step_index(timesteps[0]) + # Mixed quantization + quant_mixed_step = len(timesteps) + if quant_mode == "quantize-mixed": + # 10% of steps use higher precision in mixed quant mode + quant_mixed_step = quant_mixed_step - (quant_mixed_step // 10) + print(f"Use FP8 Transformer at steps 0 to {quant_mixed_step - 1}") + print(f"Use BF16 Transformer at steps {quant_mixed_step} to {len(timesteps) - 1}") + for i in self.progress_bar(range(len(timesteps))): if use_warmup_inference_steps and i == throughput_warmup_steps: ht.hpu.synchronize() @@ -537,17 +550,31 @@ def __call__( # broadcast to batch dimension in a way that's compatible with ONNX/Core ML timestep = timestep.expand(latents_batch.shape[0]).to(latents_batch.dtype) - noise_pred = self.transformer( - hidden_states=latents_batch, - timestep=timestep / 1000, - guidance=guidance_batch, - pooled_projections=pooled_prompt_embeddings_batch, - encoder_hidden_states=text_embeddings_batch, - txt_ids=text_ids_batch, - img_ids=latent_image_ids_batch, - joint_attention_kwargs=self.joint_attention_kwargs, - return_dict=False, - )[0] + if i >= quant_mixed_step: + # Mixed quantization + noise_pred = transformer_bf16( + hidden_states=latents_batch, + timestep=timestep / 1000, + guidance=guidance_batch, + pooled_projections=pooled_prompt_embeddings_batch, + encoder_hidden_states=text_embeddings_batch, + txt_ids=text_ids_batch, + img_ids=latent_image_ids_batch, + joint_attention_kwargs=self.joint_attention_kwargs, + return_dict=False, + )[0] + else: + noise_pred = self.transformer( + hidden_states=latents_batch, + timestep=timestep / 1000, + guidance=guidance_batch, + pooled_projections=pooled_prompt_embeddings_batch, + encoder_hidden_states=text_embeddings_batch, + txt_ids=text_ids_batch, + img_ids=latent_image_ids_batch, + joint_attention_kwargs=self.joint_attention_kwargs, + return_dict=False, + )[0] # compute the previous noisy sample x_t -> x_t-1 latents_dtype = latents_batch.dtype @@ -567,11 +594,9 @@ def __call__( latents_batch = callback_outputs.pop("latents", latents_batch) _prompt_embeds = callback_outputs.pop("prompt_embeds", None) - _negative_prompt_embeds = callback_outputs.pop("negative_prompt_embeds", None) if _prompt_embeds is not None and _negative_prompt_embeds is not None: text_embeddings_batch = torch.cat([_negative_prompt_embeds, _prompt_embeds]) _pooled_prompt_embeds = callback_outputs.pop("pooled_prompt_embeds", None) - _negative_pooled_prompt_embeds = callback_outputs.pop("negative_pooled_prompt_embeds", None) if _pooled_prompt_embeds is not None and _negative_pooled_prompt_embeds is not None: pooled_prompt_embeddings_batch = torch.cat([_negative_pooled_prompt_embeds, _pooled_prompt_embeds]) @@ -601,7 +626,7 @@ def __call__( outputs["images"].append(image) #htcore.mark_step(sync=True) - # Stage after denoising + # 7. Stage after denoising hb_profiler.stop() if quant_mode == "measure": @@ -622,8 +647,8 @@ def __call__( logger.info(f"Speed metrics: {speed_measures}") # 8 Output Images - # Remove dummy generations if needed if num_dummy_samples > 0: + # Remove dummy generations if needed outputs["images"][-1] = outputs["images"][-1][:-num_dummy_samples] # Process generated images From 0b80a6a7e1fe15d33367ea059dd3ba20747f07dc Mon Sep 17 00:00:00 2001 From: baocheny Date: Sun, 29 Sep 2024 10:40:32 +0800 Subject: [PATCH 18/30] use default scheduler from upstream diffusers --- .../text_to_image_generation.py | 4 --- optimum/habana/diffusers/__init__.py | 2 +- .../diffusers/pipelines/flux/pipeline_flux.py | 10 +++++--- .../diffusers/pipelines/pipeline_utils.py | 1 - .../habana/diffusers/schedulers/__init__.py | 1 - .../scheduling_flow_mactch_euler_discrete.py | 25 ------------------- 6 files changed, 7 insertions(+), 36 deletions(-) delete mode 100644 optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 8425389b4b..67360aec3c 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -296,10 +296,6 @@ def main(): ) elif args.scheduler == "ddim": scheduler = GaudiDDIMScheduler.from_pretrained(args.model_name_or_path, subfolder="scheduler", **kwargs) - elif args.scheduler == "flow_match_euler_discrete": - scheduler = GaudiFlowMatchEulerDiscreteScheduler.from_pretrained( - args.model_name_or_path, subfolder="scheduler", **kwargs - ) else: scheduler = None diff --git a/optimum/habana/diffusers/__init__.py b/optimum/habana/diffusers/__init__.py index 3be8537f45..4ca2933e91 100644 --- a/optimum/habana/diffusers/__init__.py +++ b/optimum/habana/diffusers/__init__.py @@ -21,4 +21,4 @@ from .pipelines.stable_diffusion_xl.pipeline_stable_diffusion_xl_inpaint import GaudiStableDiffusionXLInpaintPipeline from .pipelines.stable_video_diffusion.pipeline_stable_video_diffusion import GaudiStableVideoDiffusionPipeline from .pipelines.text_to_video_synthesis.pipeline_text_to_video_synth import GaudiTextToVideoSDPipeline -from .schedulers import GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, GaudiEulerDiscreteScheduler, GaudiFlowMatchEulerDiscreteScheduler +from .schedulers import GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, GaudiEulerDiscreteScheduler diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index fe7380dbeb..9d9caece02 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -21,12 +21,12 @@ import PIL.Image import torch - from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast from diffusers.utils import BaseOutput, replace_example_docstring from diffusers.models.autoencoders import AutoencoderKL from diffusers.models.transformers import FluxTransformer2DModel +from diffusers.schedulers import FlowMatchEulerDiscreteScheduler from diffusers.pipelines.flux.pipeline_flux import FluxPipeline, calculate_shift, retrieve_timesteps from optimum.utils import logging @@ -34,7 +34,6 @@ from ....transformers.gaudi_configuration import GaudiConfig from ....utils import HabanaProfile, speed_metrics, warmup_inference_steps_time_adjustment from ..pipeline_utils import GaudiDiffusionPipeline -from ...schedulers import GaudiFlowMatchEulerDiscreteScheduler logger = logging.get_logger(__name__) # pylint: disable=invalid-name @@ -110,7 +109,7 @@ class GaudiFluxPipeline(GaudiDiffusionPipeline, FluxPipeline): def __init__( self, - scheduler: GaudiFlowMatchEulerDiscreteScheduler, + scheduler: FlowMatchEulerDiscreteScheduler, vae: AutoencoderKL, text_encoder: CLIPTextModel, tokenizer: CLIPTokenizer, @@ -140,6 +139,9 @@ def __init__( transformer=transformer, ) self.to(self._device) + if use_habana: + import habana_frameworks.torch as ht + self.ht = ht if use_hpu_graphs: transformer = self.ht.hpu.wrap_in_hpu_graph(transformer) @@ -400,7 +402,7 @@ def __call__( if profiling_warmup_steps and profiling_steps: torch.hpu.synchronize() hb_profiler.step() - self.htcore.mark_step(sync=True) + self.ht.core.mark_step(sync=True) hb_profiler.stop() # clac acc time `end - t1` diff --git a/optimum/habana/diffusers/pipelines/pipeline_utils.py b/optimum/habana/diffusers/pipelines/pipeline_utils.py index 6e659edff4..7f36b90ae4 100644 --- a/optimum/habana/diffusers/pipelines/pipeline_utils.py +++ b/optimum/habana/diffusers/pipelines/pipeline_utils.py @@ -55,7 +55,6 @@ "optimum.habana.diffusers.schedulers": { "GaudiDDIMScheduler": ["save_pretrained", "from_pretrained"], "GaudiEulerDiscreteScheduler": ["save_pretrained", "from_pretrained"], - "GaudiFlowMatchEulerDiscreteScheduler": ["save_pretrained", "from_pretrained"], "GaudiEulerAncestralDiscreteScheduler": ["save_pretrained", "from_pretrained"], }, } diff --git a/optimum/habana/diffusers/schedulers/__init__.py b/optimum/habana/diffusers/schedulers/__init__.py index 48bf0bd8e9..37eb80b1a6 100644 --- a/optimum/habana/diffusers/schedulers/__init__.py +++ b/optimum/habana/diffusers/schedulers/__init__.py @@ -1,4 +1,3 @@ from .scheduling_ddim import GaudiDDIMScheduler from .scheduling_euler_ancestral_discrete import GaudiEulerAncestralDiscreteScheduler from .scheduling_euler_discrete import GaudiEulerDiscreteScheduler -from .scheduling_flow_mactch_euler_discrete import GaudiFlowMatchEulerDiscreteScheduler diff --git a/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py b/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py deleted file mode 100644 index ccc597fa07..0000000000 --- a/optimum/habana/diffusers/schedulers/scheduling_flow_mactch_euler_discrete.py +++ /dev/null @@ -1,25 +0,0 @@ -from diffusers.schedulers import FlowMatchEulerDiscreteScheduler - - -class GaudiFlowMatchEulerDiscreteScheduler(FlowMatchEulerDiscreteScheduler): - # TODO: overwrite orginal func with following one to fix dyn error in gaudi lazy mode - def index_for_timestep(self, timestep, schedule_timesteps=None): - if schedule_timesteps is None: - schedule_timesteps = self.timesteps - - # indices = (schedule_timesteps == timestep).nonzero() - - # The sigma index that is taken for the **very** first `step` - # is always the second index (or the last index if there is only 1) - # This way we can ensure we don't accidentally skip a sigma in - # case we start in the middle of the denoising schedule (e.g. for image-to-image) - # pos = 1 if len(indices) > 1 else 0 - - # return indices[pos].item() - - masked = (schedule_timesteps == timestep) - tmp = masked.cumsum(dim=0) - pos = (tmp == 0).sum().item() - if masked.sum() > 1: - pos += (tmp == 1).sum().item() - return pos From f26795889b64166b3e613e79cb327c9830d39ee9 Mon Sep 17 00:00:00 2001 From: baocheny Date: Sun, 29 Sep 2024 10:49:28 +0800 Subject: [PATCH 19/30] fix import error --- examples/stable-diffusion/text_to_image_generation.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index 67360aec3c..69b7f3a897 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -25,8 +25,7 @@ from optimum.habana.diffusers import ( GaudiDDIMScheduler, GaudiEulerAncestralDiscreteScheduler, - GaudiEulerDiscreteScheduler, - GaudiFlowMatchEulerDiscreteScheduler + GaudiEulerDiscreteScheduler ) from optimum.habana.utils import set_seed From 9bcc65f1070b51173060bca2914b3a14512bcf00 Mon Sep 17 00:00:00 2001 From: Daniel Socek Date: Tue, 1 Oct 2024 14:57:52 +0000 Subject: [PATCH 20/30] Fix timing issue with batching Signed-off-by: Daniel Socek --- .../diffusers/pipelines/flux/pipeline_flux.py | 120 +++++------------- 1 file changed, 35 insertions(+), 85 deletions(-) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 999a15bdcc..dd180da8b1 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -12,29 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time import math +import time from dataclasses import dataclass from typing import Any, Callable, Dict, List, Optional, Union import numpy as np import PIL.Image - import torch - -from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast - -from diffusers.utils import BaseOutput, replace_example_docstring from diffusers.models.autoencoders import AutoencoderKL from diffusers.models.transformers import FluxTransformer2DModel from diffusers.pipelines.flux.pipeline_flux import FluxPipeline, calculate_shift, retrieve_timesteps +from diffusers.utils import BaseOutput, replace_example_docstring +from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast from optimum.utils import logging from ....transformers.gaudi_configuration import GaudiConfig from ....utils import HabanaProfile, speed_metrics, warmup_inference_steps_time_adjustment -from ..pipeline_utils import GaudiDiffusionPipeline from ...schedulers import GaudiFlowMatchEulerDiscreteScheduler +from ..pipeline_utils import GaudiDiffusionPipeline + logger = logging.get_logger(__name__) # pylint: disable=invalid-name @@ -143,19 +141,12 @@ def __init__( self.to(self._device) if use_hpu_graphs: from habana_frameworks.torch.hpu import wrap_in_hpu_graph - transformer = wrap_in_hpu_graph(transformer) + transformer = wrap_in_hpu_graph(transformer) @classmethod def _split_inputs_into_batches( - cls, - batch_size, - latents, - prompt_embeds, - pooled_prompt_embeds, - text_ids, - latent_image_ids, - guidance + cls, batch_size, latents, prompt_embeds, pooled_prompt_embeds, text_ids, latent_image_ids, guidance ): # Use torch.split to generate num_batches batches of size batch_size latents_batches = list(torch.split(latents, batch_size)) @@ -222,8 +213,15 @@ def _split_inputs_into_batches( latent_image_ids_batches = torch.stack(latent_image_ids_batches) guidance_batches = torch.stack(guidance_batches) - return latents_batches, prompt_embeds_batches, pooled_prompt_embeds_batches, text_ids_batches, latent_image_ids_batches, guidance_batches, num_dummy_samples - + return ( + latents_batches, + prompt_embeds_batches, + pooled_prompt_embeds_batches, + text_ids_batches, + latent_image_ids_batches, + guidance_batches, + num_dummy_samples, + ) @torch.no_grad() @replace_example_docstring(EXAMPLE_DOC_STRING) @@ -250,7 +248,7 @@ def __call__( max_sequence_length: int = 512, profiling_warmup_steps: Optional[int] = 0, profiling_steps: Optional[int] = 0, - **kwargs + **kwargs, ): r""" Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/flux/pipeline_flux.py#L531 @@ -326,23 +324,6 @@ def __call__( is True, otherwise a `tuple`. When returning a tuple, the first element is a list with the generated images. """ - - callback = kwargs.pop("callback", None) - callback_steps = kwargs.pop("callback_steps", None) - - if callback is not None: - deprecate( - "callback", - "1.0.0", - "Passing `callback` as an input argument to `__call__` is deprecated, consider use `callback_on_step_end`", - ) - if callback_steps is not None: - deprecate( - "callback_steps", - "1.0.0", - "Passing `callback_steps` as an input argument to `__call__` is deprecated, consider use `callback_on_step_end`", - ) - import habana_frameworks.torch as ht import habana_frameworks.torch.core as htcore @@ -350,15 +331,18 @@ def __call__( if quant_mode == "quantize-mixed": import copy + transformer_bf16 = copy.deepcopy(self.transformer).to(self._execution_device) if quant_mode == "measure" or quant_mode.startswith("quantize"): import os - quant_config_path = os.getenv('QUANT_CONFIG') + + quant_config_path = os.getenv("QUANT_CONFIG") htcore.hpu_set_env() from neural_compressor.torch.quantization import FP8Config, convert, prepare + config = FP8Config.from_json_file(quant_config_path) if config.measure: self.transformer = prepare(self.transformer, config) @@ -442,7 +426,6 @@ def __call__( sigmas, mu=mu, ) - num_warmup_steps = max(len(timesteps) - num_inference_steps * self.scheduler.order, 0) self._num_timesteps = len(timesteps) # handle guidance @@ -483,15 +466,9 @@ def __call__( text_ids_batches, latent_image_ids_batches, guidance_batches, - num_dummy_samples + num_dummy_samples, ) = self._split_inputs_into_batches( - batch_size, - latents, - prompt_embeds, - pooled_prompt_embeds, - text_ids, - latent_image_ids, - guidance + batch_size, latents, prompt_embeds, pooled_prompt_embeds, text_ids, latent_image_ids, guidance ) outputs = { @@ -500,15 +477,11 @@ def __call__( # 6. Denoising loop for j in range(num_batches): - # The throughput is calculated from the 4th iteration # because compilation occurs in the first 2-3 iterations if j == throughput_warmup_steps: ht.hpu.synchronize() t1 = time.time() - if use_warmup_inference_steps: - ht.hpu.synchronize() - t0_inf = time.time() latents_batch = latents_batches[0] latents_batches = torch.roll(latents_batches, shifts=-1, dims=0) @@ -537,13 +510,12 @@ def __call__( print(f"Use BF16 Transformer at steps {quant_mixed_step} to {len(timesteps) - 1}") for i in self.progress_bar(range(len(timesteps))): - if use_warmup_inference_steps and i == throughput_warmup_steps: + if use_warmup_inference_steps and i == throughput_warmup_steps and j == num_batches - 1: ht.hpu.synchronize() - t1_inf = time.time() - t1 += t1_inf - t0_inf + t1 = time.time() if self.interrupt: - continue + continue timestep = timesteps[0] timesteps = torch.roll(timesteps, shifts=-1, dims=0) @@ -585,35 +557,8 @@ def __call__( # some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272 latents_batch = latents_batch.to(latents_dtype) - if callback_on_step_end is not None: - callback_kwargs = {} - for k in callback_on_step_end_tensor_inputs: - callback_kwargs[k] = locals()[k] - callback_outputs = callback_on_step_end(self, i, timestep, callback_kwargs) - - latents_batch = callback_outputs.pop("latents", latents_batch) - - _prompt_embeds = callback_outputs.pop("prompt_embeds", None) - if _prompt_embeds is not None and _negative_prompt_embeds is not None: - text_embeddings_batch = torch.cat([_negative_prompt_embeds, _prompt_embeds]) - _pooled_prompt_embeds = callback_outputs.pop("pooled_prompt_embeds", None) - if _pooled_prompt_embeds is not None and _negative_pooled_prompt_embeds is not None: - pooled_prompt_embeddings_batch = torch.cat([_negative_pooled_prompt_embeds, _pooled_prompt_embeds]) - - - # call the callback, if provided - if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0): - if callback is not None and i % callback_steps == 0: - step_idx = i // getattr(self.scheduler, "order", 1) - callback(step_idx, timestep, latents) - hb_profiler.step() - #htcore.mark_step(sync=True) - - if use_warmup_inference_steps: - t1 = warmup_inference_steps_time_adjustment( - t1, t1_inf, num_inference_steps, throughput_warmup_steps - ) + # htcore.mark_step(sync=True) if not output_type == "latent": latents_batch = self._unpack_latents(latents_batch, height, width, self.vae_scale_factor) @@ -624,24 +569,29 @@ def __call__( image = latents_batch outputs["images"].append(image) - #htcore.mark_step(sync=True) + # htcore.mark_step(sync=True) # 7. Stage after denoising hb_profiler.stop() if quant_mode == "measure": from neural_compressor.torch.quantization import finalize_calibration + finalize_calibration(self.transformer) ht.hpu.synchronize() speed_metrics_prefix = "generation" + if use_warmup_inference_steps: + t1 = warmup_inference_steps_time_adjustment(t1, t1, num_inference_steps, throughput_warmup_steps) speed_measures = speed_metrics( split=speed_metrics_prefix, start_time=t0, - num_samples=num_batches * batch_size + num_samples=batch_size if t1 == t0 or use_warmup_inference_steps else (num_batches - throughput_warmup_steps) * batch_size, - num_steps=num_batches * batch_size * num_inference_steps, + num_steps=batch_size * num_inference_steps + if use_warmup_inference_steps + else (num_batches - throughput_warmup_steps) * batch_size * num_inference_steps, start_time_after_warmup=t1, ) logger.info(f"Speed metrics: {speed_measures}") From 1144815b12047292c8185b0c8ba62dbf3d2ab40a Mon Sep 17 00:00:00 2001 From: Sergey Plotnikov Date: Wed, 9 Oct 2024 14:29:12 -0700 Subject: [PATCH 21/30] Add FusedSDPA --- .../diffusers/pipelines/flux/pipeline_flux.py | 186 ++++++++++++++++++ 1 file changed, 186 insertions(+) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index dd180da8b1..571a435f4e 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -73,6 +73,186 @@ class GaudiFluxPipelineOutput(BaseOutput): ``` """ +#GaudiFluxSingleAttnProcessor2_0 and GaudiFluxAttnProcessor2_0 are based on FluxSingleAttnProcessor2_0 and FluxAttnProcessor2_0 +#from //github.com/huggingface/diffusers/blob/v0.30.3/src/diffusers/models/attention_processor.py and have been +#modified to support FusedSDPA +import torch.nn.functional as F +from diffusers.models.attention_processor import Attention + +def apply_rope(xq, xk, freqs_cis): + xq_ = xq.float().reshape(*xq.shape[:-1], -1, 1, 2) + xk_ = xk.float().reshape(*xk.shape[:-1], -1, 1, 2) + xq_out = freqs_cis[..., 0] * xq_[..., 0] + freqs_cis[..., 1] * xq_[..., 1] + xk_out = freqs_cis[..., 0] * xk_[..., 0] + freqs_cis[..., 1] * xk_[..., 1] + return xq_out.reshape(*xq.shape).type_as(xq), xk_out.reshape(*xk.shape).type_as(xk) + +class GaudiFluxSingleAttnProcessor2_0: + r""" + Processor for implementing scaled dot-product attention (enabled by default if you're using PyTorch 2.0). + """ + + def __init__(self): + if not hasattr(F, "scaled_dot_product_attention"): + raise ImportError("AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.") + + def __call__( + self, + attn: Attention, + hidden_states: torch.Tensor, + encoder_hidden_states: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + image_rotary_emb: Optional[torch.Tensor] = None, + ) -> torch.Tensor: + input_ndim = hidden_states.ndim + + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + + query = attn.to_q(hidden_states) + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + + key = attn.to_k(encoder_hidden_states) + value = attn.to_v(encoder_hidden_states) + + inner_dim = key.shape[-1] + head_dim = inner_dim // attn.heads + + query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + if attn.norm_q is not None: + query = attn.norm_q(query) + if attn.norm_k is not None: + key = attn.norm_k(key) + + # Apply RoPE if needed + if image_rotary_emb is not None: + # YiYi to-do: update uising apply_rotary_emb + # from ..embeddings import apply_rotary_emb + # query = apply_rotary_emb(query, image_rotary_emb) + # key = apply_rotary_emb(key, image_rotary_emb) + query, key = apply_rope(query, key, image_rotary_emb) + + # the output of sdp = (batch, num_heads, seq_len, head_dim) + # TODO: add support for attn.scale when we move to Torch 2.1 + from habana_frameworks.torch.hpex.kernels import FusedSDPA + import habana_frameworks.torch.hpu as ht + hidden_states = FusedSDPA.apply(query, key, value, None, 0.0, False, None, 'fast', None) + + hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) + hidden_states = hidden_states.to(query.dtype) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + return hidden_states + + +class GaudiFluxAttnProcessor2_0: + """Attention processor used typically in processing the SD3-like self-attention projections.""" + + def __init__(self): + if not hasattr(F, "scaled_dot_product_attention"): + raise ImportError("FluxAttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.") + + def __call__( + self, + attn: Attention, + hidden_states: torch.FloatTensor, + encoder_hidden_states: torch.FloatTensor = None, + attention_mask: Optional[torch.FloatTensor] = None, + image_rotary_emb: Optional[torch.Tensor] = None, + ) -> torch.FloatTensor: + input_ndim = hidden_states.ndim + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + context_input_ndim = encoder_hidden_states.ndim + if context_input_ndim == 4: + batch_size, channel, height, width = encoder_hidden_states.shape + encoder_hidden_states = encoder_hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size = encoder_hidden_states.shape[0] + + # `sample` projections. + query = attn.to_q(hidden_states) + key = attn.to_k(hidden_states) + value = attn.to_v(hidden_states) + + inner_dim = key.shape[-1] + head_dim = inner_dim // attn.heads + + query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + if attn.norm_q is not None: + query = attn.norm_q(query) + if attn.norm_k is not None: + key = attn.norm_k(key) + + # `context` projections. + encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) + encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) + encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) + + encoder_hidden_states_query_proj = encoder_hidden_states_query_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + encoder_hidden_states_key_proj = encoder_hidden_states_key_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + encoder_hidden_states_value_proj = encoder_hidden_states_value_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + + if attn.norm_added_q is not None: + encoder_hidden_states_query_proj = attn.norm_added_q(encoder_hidden_states_query_proj) + if attn.norm_added_k is not None: + encoder_hidden_states_key_proj = attn.norm_added_k(encoder_hidden_states_key_proj) + + # attention + query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) + key = torch.cat([encoder_hidden_states_key_proj, key], dim=2) + value = torch.cat([encoder_hidden_states_value_proj, value], dim=2) + + if image_rotary_emb is not None: + # YiYi to-do: update uising apply_rotary_emb + # from ..embeddings import apply_rotary_emb + # query = apply_rotary_emb(query, image_rotary_emb) + # key = apply_rotary_emb(key, image_rotary_emb) + query, key = apply_rope(query, key, image_rotary_emb) + + from habana_frameworks.torch.hpex.kernels import FusedSDPA + import habana_frameworks.torch.hpu as ht + hidden_states = FusedSDPA.apply(query, key, value, None, 0.0, False, None, 'fast', None) + + hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) + hidden_states = hidden_states.to(query.dtype) + + encoder_hidden_states, hidden_states = ( + hidden_states[:, : encoder_hidden_states.shape[1]], + hidden_states[:, encoder_hidden_states.shape[1] :], + ) + + # linear proj + hidden_states = attn.to_out[0](hidden_states) + # dropout + hidden_states = attn.to_out[1](hidden_states) + encoder_hidden_states = attn.to_add_out(encoder_hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + if context_input_ndim == 4: + encoder_hidden_states = encoder_hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + return hidden_states, encoder_hidden_states class GaudiFluxPipeline(GaudiDiffusionPipeline, FluxPipeline): r""" @@ -138,6 +318,12 @@ def __init__( tokenizer_2=tokenizer_2, transformer=transformer, ) + + for block in self.transformer.single_transformer_blocks: + block.attn.processor = GaudiFluxSingleAttnProcessor2_0() + for block in self.transformer.transformer_blocks: + block.attn.processor = GaudiFluxAttnProcessor2_0() + self.to(self._device) if use_hpu_graphs: from habana_frameworks.torch.hpu import wrap_in_hpu_graph From 479dc96c022774e4454cb7fcfc23d1aea7748ce8 Mon Sep 17 00:00:00 2001 From: baocheny Date: Thu, 10 Oct 2024 12:39:18 +0800 Subject: [PATCH 22/30] use latest attn rope --- .../diffusers/models/attention_processor.py | 190 +++++++++++++++++- .../diffusers/pipelines/flux/pipeline_flux.py | 189 +---------------- 2 files changed, 191 insertions(+), 188 deletions(-) diff --git a/optimum/habana/diffusers/models/attention_processor.py b/optimum/habana/diffusers/models/attention_processor.py index b0461a272b..2202f517ec 100755 --- a/optimum/habana/diffusers/models/attention_processor.py +++ b/optimum/habana/diffusers/models/attention_processor.py @@ -19,6 +19,7 @@ import torch import torch.nn.functional as F from diffusers.models.attention_processor import Attention +from diffusers.models.embeddings import apply_rotary_emb from diffusers.utils import USE_PEFT_BACKEND, logging from diffusers.utils.import_utils import is_xformers_available from torch import nn @@ -186,4 +187,191 @@ def __call__( return hidden_states -AttentionProcessor = Union[AttnProcessor2_0,] +class GaudiFluxAttnProcessor2_0: + """Attention processor used typically in processing the SD3-like self-attention projections.""" + + def __init__(self): + if not hasattr(F, "scaled_dot_product_attention"): + raise ImportError("FluxAttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.") + + def __call__( + self, + attn: Attention, + hidden_states: torch.FloatTensor, + encoder_hidden_states: torch.FloatTensor = None, + attention_mask: Optional[torch.FloatTensor] = None, + image_rotary_emb: Optional[torch.Tensor] = None, + ) -> torch.FloatTensor: + batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + + # `sample` projections. + query = attn.to_q(hidden_states) + key = attn.to_k(hidden_states) + value = attn.to_v(hidden_states) + + inner_dim = key.shape[-1] + head_dim = inner_dim // attn.heads + + query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + if attn.norm_q is not None: + query = attn.norm_q(query) + if attn.norm_k is not None: + key = attn.norm_k(key) + + # the attention in FluxSingleTransformerBlock does not use `encoder_hidden_states` + if encoder_hidden_states is not None: + # `context` projections. + encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) + encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) + encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) + + encoder_hidden_states_query_proj = encoder_hidden_states_query_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + encoder_hidden_states_key_proj = encoder_hidden_states_key_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + encoder_hidden_states_value_proj = encoder_hidden_states_value_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + + if attn.norm_added_q is not None: + encoder_hidden_states_query_proj = attn.norm_added_q(encoder_hidden_states_query_proj) + if attn.norm_added_k is not None: + encoder_hidden_states_key_proj = attn.norm_added_k(encoder_hidden_states_key_proj) + + # attention + query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) + key = torch.cat([encoder_hidden_states_key_proj, key], dim=2) + value = torch.cat([encoder_hidden_states_value_proj, value], dim=2) + + if image_rotary_emb is not None: + + query = apply_rotary_emb(query, image_rotary_emb) + key = apply_rotary_emb(key, image_rotary_emb) + + # hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False) + from habana_frameworks.torch.hpex.kernels import FusedSDPA + import habana_frameworks.torch.hpu as ht + hidden_states = FusedSDPA.apply(query, key, value, None, 0.0, False, None, 'fast', None) + + hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) + hidden_states = hidden_states.to(query.dtype) + + if encoder_hidden_states is not None: + encoder_hidden_states, hidden_states = ( + hidden_states[:, : encoder_hidden_states.shape[1]], + hidden_states[:, encoder_hidden_states.shape[1]:], + ) + + # linear proj + hidden_states = attn.to_out[0](hidden_states) + # dropout + hidden_states = attn.to_out[1](hidden_states) + encoder_hidden_states = attn.to_add_out(encoder_hidden_states) + + return hidden_states, encoder_hidden_states + else: + return hidden_states + + +class GaudiFusedFluxAttnProcessor2_0: + """Attention processor used typically in processing the SD3-like self-attention projections.""" + + def __init__(self): + if not hasattr(F, "scaled_dot_product_attention"): + raise ImportError( + "FusedFluxAttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0." + ) + + def __call__( + self, + attn: Attention, + hidden_states: torch.FloatTensor, + encoder_hidden_states: torch.FloatTensor = None, + attention_mask: Optional[torch.FloatTensor] = None, + image_rotary_emb: Optional[torch.Tensor] = None, + ) -> torch.FloatTensor: + batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + + # `sample` projections. + qkv = attn.to_qkv(hidden_states) + split_size = qkv.shape[-1] // 3 + query, key, value = torch.split(qkv, split_size, dim=-1) + + inner_dim = key.shape[-1] + head_dim = inner_dim // attn.heads + + query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + if attn.norm_q is not None: + query = attn.norm_q(query) + if attn.norm_k is not None: + key = attn.norm_k(key) + + # the attention in FluxSingleTransformerBlock does not use `encoder_hidden_states` + # `context` projections. + if encoder_hidden_states is not None: + encoder_qkv = attn.to_added_qkv(encoder_hidden_states) + split_size = encoder_qkv.shape[-1] // 3 + ( + encoder_hidden_states_query_proj, + encoder_hidden_states_key_proj, + encoder_hidden_states_value_proj, + ) = torch.split(encoder_qkv, split_size, dim=-1) + + encoder_hidden_states_query_proj = encoder_hidden_states_query_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + encoder_hidden_states_key_proj = encoder_hidden_states_key_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + encoder_hidden_states_value_proj = encoder_hidden_states_value_proj.view( + batch_size, -1, attn.heads, head_dim + ).transpose(1, 2) + + if attn.norm_added_q is not None: + encoder_hidden_states_query_proj = attn.norm_added_q(encoder_hidden_states_query_proj) + if attn.norm_added_k is not None: + encoder_hidden_states_key_proj = attn.norm_added_k(encoder_hidden_states_key_proj) + + # attention + query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) + key = torch.cat([encoder_hidden_states_key_proj, key], dim=2) + value = torch.cat([encoder_hidden_states_value_proj, value], dim=2) + + if image_rotary_emb is not None: + + query = apply_rotary_emb(query, image_rotary_emb) + key = apply_rotary_emb(key, image_rotary_emb) + + # hidden_states = F.scaled_dot_product_attention(query, key, value, dropout_p=0.0, is_causal=False) + from habana_frameworks.torch.hpex.kernels import FusedSDPA + import habana_frameworks.torch.hpu as ht + hidden_states = FusedSDPA.apply(query, key, value, None, 0.0, False, None, 'fast', None) + hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) + hidden_states = hidden_states.to(query.dtype) + + if encoder_hidden_states is not None: + encoder_hidden_states, hidden_states = ( + hidden_states[:, : encoder_hidden_states.shape[1]], + hidden_states[:, encoder_hidden_states.shape[1]:], + ) + + # linear proj + hidden_states = attn.to_out[0](hidden_states) + # dropout + hidden_states = attn.to_out[1](hidden_states) + encoder_hidden_states = attn.to_add_out(encoder_hidden_states) + + return hidden_states, encoder_hidden_states + else: + return hidden_states + + +AttentionProcessor = Union[AttnProcessor2_0, GaudiFluxAttnProcessor2_0, GaudiFusedFluxAttnProcessor2_0] diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 07abd518fb..d4e4968cd0 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -36,6 +36,7 @@ from ....transformers.gaudi_configuration import GaudiConfig from ....utils import HabanaProfile, speed_metrics, warmup_inference_steps_time_adjustment from ..pipeline_utils import GaudiDiffusionPipeline +from ...models.attention_processor import GaudiFluxAttnProcessor2_0, GaudiFusedFluxAttnProcessor2_0 logger = logging.get_logger(__name__) # pylint: disable=invalid-name @@ -76,188 +77,6 @@ class GaudiFluxPipelineOutput(BaseOutput): ``` """ -# GaudiFluxSingleAttnProcessor2_0 and GaudiFluxAttnProcessor2_0 are based on FluxSingleAttnProcessor2_0 and FluxAttnProcessor2_0 -# from //github.com/huggingface/diffusers/blob/v0.30.3/src/diffusers/models/attention_processor.py and have been -# modified to support FusedSDPA - - -def apply_rope(xq, xk, freqs_cis): - xq_ = xq.float().reshape(*xq.shape[:-1], -1, 1, 2) - xk_ = xk.float().reshape(*xk.shape[:-1], -1, 1, 2) - xq_out = freqs_cis[..., 0] * xq_[..., 0] + freqs_cis[..., 1] * xq_[..., 1] - xk_out = freqs_cis[..., 0] * xk_[..., 0] + freqs_cis[..., 1] * xk_[..., 1] - return xq_out.reshape(*xq.shape).type_as(xq), xk_out.reshape(*xk.shape).type_as(xk) - - -class GaudiFluxSingleAttnProcessor2_0: - r""" - Processor for implementing scaled dot-product attention (enabled by default if you're using PyTorch 2.0). - """ - - def __init__(self): - if not hasattr(F, "scaled_dot_product_attention"): - raise ImportError("AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.") - - def __call__( - self, - attn: Attention, - hidden_states: torch.Tensor, - encoder_hidden_states: Optional[torch.Tensor] = None, - attention_mask: Optional[torch.FloatTensor] = None, - image_rotary_emb: Optional[torch.Tensor] = None, - ) -> torch.Tensor: - input_ndim = hidden_states.ndim - - if input_ndim == 4: - batch_size, channel, height, width = hidden_states.shape - hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) - - batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape - - query = attn.to_q(hidden_states) - if encoder_hidden_states is None: - encoder_hidden_states = hidden_states - - key = attn.to_k(encoder_hidden_states) - value = attn.to_v(encoder_hidden_states) - - inner_dim = key.shape[-1] - head_dim = inner_dim // attn.heads - - query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) - - key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) - value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) - - if attn.norm_q is not None: - query = attn.norm_q(query) - if attn.norm_k is not None: - key = attn.norm_k(key) - - # Apply RoPE if needed - if image_rotary_emb is not None: - # YiYi to-do: update uising apply_rotary_emb - # from ..embeddings import apply_rotary_emb - # query = apply_rotary_emb(query, image_rotary_emb) - # key = apply_rotary_emb(key, image_rotary_emb) - query, key = apply_rope(query, key, image_rotary_emb) - - # the output of sdp = (batch, num_heads, seq_len, head_dim) - # TODO: add support for attn.scale when we move to Torch 2.1 - from habana_frameworks.torch.hpex.kernels import FusedSDPA - import habana_frameworks.torch.hpu as ht - hidden_states = FusedSDPA.apply(query, key, value, None, 0.0, False, None, 'fast', None) - - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) - hidden_states = hidden_states.to(query.dtype) - - if input_ndim == 4: - hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) - - return hidden_states - - -class GaudiFluxAttnProcessor2_0: - """Attention processor used typically in processing the SD3-like self-attention projections.""" - - def __init__(self): - if not hasattr(F, "scaled_dot_product_attention"): - raise ImportError("FluxAttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.") - - def __call__( - self, - attn: Attention, - hidden_states: torch.FloatTensor, - encoder_hidden_states: torch.FloatTensor = None, - attention_mask: Optional[torch.FloatTensor] = None, - image_rotary_emb: Optional[torch.Tensor] = None, - ) -> torch.FloatTensor: - input_ndim = hidden_states.ndim - if input_ndim == 4: - batch_size, channel, height, width = hidden_states.shape - hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) - context_input_ndim = encoder_hidden_states.ndim - if context_input_ndim == 4: - batch_size, channel, height, width = encoder_hidden_states.shape - encoder_hidden_states = encoder_hidden_states.view(batch_size, channel, height * width).transpose(1, 2) - - batch_size = encoder_hidden_states.shape[0] - - # `sample` projections. - query = attn.to_q(hidden_states) - key = attn.to_k(hidden_states) - value = attn.to_v(hidden_states) - - inner_dim = key.shape[-1] - head_dim = inner_dim // attn.heads - - query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) - key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) - value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) - - if attn.norm_q is not None: - query = attn.norm_q(query) - if attn.norm_k is not None: - key = attn.norm_k(key) - - # `context` projections. - encoder_hidden_states_query_proj = attn.add_q_proj(encoder_hidden_states) - encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) - encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) - - encoder_hidden_states_query_proj = encoder_hidden_states_query_proj.view( - batch_size, -1, attn.heads, head_dim - ).transpose(1, 2) - encoder_hidden_states_key_proj = encoder_hidden_states_key_proj.view( - batch_size, -1, attn.heads, head_dim - ).transpose(1, 2) - encoder_hidden_states_value_proj = encoder_hidden_states_value_proj.view( - batch_size, -1, attn.heads, head_dim - ).transpose(1, 2) - - if attn.norm_added_q is not None: - encoder_hidden_states_query_proj = attn.norm_added_q(encoder_hidden_states_query_proj) - if attn.norm_added_k is not None: - encoder_hidden_states_key_proj = attn.norm_added_k(encoder_hidden_states_key_proj) - - # attention - query = torch.cat([encoder_hidden_states_query_proj, query], dim=2) - key = torch.cat([encoder_hidden_states_key_proj, key], dim=2) - value = torch.cat([encoder_hidden_states_value_proj, value], dim=2) - - if image_rotary_emb is not None: - # YiYi to-do: update uising apply_rotary_emb - # from ..embeddings import apply_rotary_emb - # query = apply_rotary_emb(query, image_rotary_emb) - # key = apply_rotary_emb(key, image_rotary_emb) - query, key = apply_rope(query, key, image_rotary_emb) - - from habana_frameworks.torch.hpex.kernels import FusedSDPA - import habana_frameworks.torch.hpu as ht - hidden_states = FusedSDPA.apply(query, key, value, None, 0.0, False, None, 'fast', None) - - hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) - hidden_states = hidden_states.to(query.dtype) - - encoder_hidden_states, hidden_states = ( - hidden_states[:, : encoder_hidden_states.shape[1]], - hidden_states[:, encoder_hidden_states.shape[1]:], - ) - - # linear proj - hidden_states = attn.to_out[0](hidden_states) - # dropout - hidden_states = attn.to_out[1](hidden_states) - encoder_hidden_states = attn.to_add_out(encoder_hidden_states) - - if input_ndim == 4: - hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) - if context_input_ndim == 4: - encoder_hidden_states = encoder_hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) - - return hidden_states, encoder_hidden_states - - class GaudiFluxPipeline(GaudiDiffusionPipeline, FluxPipeline): r""" Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/flux/pipeline_flux.py#L140 @@ -324,7 +143,7 @@ def __init__( ) for block in self.transformer.single_transformer_blocks: - block.attn.processor = GaudiFluxSingleAttnProcessor2_0() + block.attn.processor = GaudiFluxAttnProcessor2_0() for block in self.transformer.transformer_blocks: block.attn.processor = GaudiFluxAttnProcessor2_0() @@ -742,10 +561,6 @@ def __call__( latents_dtype = latents_batch.dtype latents_batch = self.scheduler.step(noise_pred, timestep, latents_batch, return_dict=False)[0] - if latents_batch.dtype != latents_dtype: - if torch.backends.mps.is_available(): - # some platforms (eg. apple mps) misbehave due to a pytorch bug: https://github.com/pytorch/pytorch/pull/99272 - latents_batch = latents_batch.to(latents_dtype) hb_profiler.step() # htcore.mark_step(sync=True) From 18c59609e9c910b4fdcd603c66ea06d17f295118 Mon Sep 17 00:00:00 2001 From: baocheny Date: Thu, 10 Oct 2024 15:53:52 +0800 Subject: [PATCH 23/30] fix scheduler --- examples/stable-diffusion/text_to_image_generation.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index d14548b534..de796d177a 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -322,10 +322,6 @@ def main(): ) elif args.scheduler == "ddim": scheduler = GaudiDDIMScheduler.from_pretrained(args.model_name_or_path, subfolder="scheduler", **kwargs) - elif args.scheduler == "flow_match_euler_discrete": - scheduler = GaudiFlowMatchEulerDiscreteScheduler.from_pretrained( - args.model_name_or_path, subfolder="scheduler", **kwargs - ) else: scheduler = None From eee48c7d4d6a809e659e78c51343b0965d2b616b Mon Sep 17 00:00:00 2001 From: baocheny Date: Fri, 11 Oct 2024 11:05:42 +0800 Subject: [PATCH 24/30] add OenFLUX.1 --- examples/stable-diffusion/text_to_image_generation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/stable-diffusion/text_to_image_generation.py b/examples/stable-diffusion/text_to_image_generation.py index de796d177a..fed390fc12 100755 --- a/examples/stable-diffusion/text_to_image_generation.py +++ b/examples/stable-diffusion/text_to_image_generation.py @@ -303,7 +303,7 @@ def main(): # Select stable diffuson pipeline based on input sdxl_models = ["stable-diffusion-xl", "sdxl"] sd3_models = ["stable-diffusion-3"] - flux_models = ["FLUX.1-dev", "FLUX.1-schnell"] + flux_models = ["FLUX.1-dev", "FLUX.1-schnell","OpenFLUX.1"] sdxl = True if any(model in args.model_name_or_path for model in sdxl_models) else False sd3 = True if any(model in args.model_name_or_path for model in sd3_models) else False flux = True if any(model in args.model_name_or_path for model in flux_models) else False From c299caa4c0bd16a3e882b388cb27e32871bcd18b Mon Sep 17 00:00:00 2001 From: baocheny Date: Mon, 14 Oct 2024 11:34:58 +0800 Subject: [PATCH 25/30] fix errors --- .../diffusers/pipelines/flux/pipeline_flux.py | 46 ++++--------------- 1 file changed, 10 insertions(+), 36 deletions(-) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index d4e4968cd0..7606b6a2b5 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -77,6 +77,7 @@ class GaudiFluxPipelineOutput(BaseOutput): ``` """ + class GaudiFluxPipeline(GaudiDiffusionPipeline, FluxPipeline): r""" Adapted from https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/flux/pipeline_flux.py#L140 @@ -154,20 +155,16 @@ def __init__( transformer = wrap_in_hpu_graph(transformer) @classmethod - def _split_inputs_into_batches( - cls, batch_size, latents, prompt_embeds, pooled_prompt_embeds, text_ids, latent_image_ids, guidance - ): + def _split_inputs_into_batches(cls, batch_size, latents, prompt_embeds, pooled_prompt_embeds, guidance): # Use torch.split to generate num_batches batches of size batch_size latents_batches = list(torch.split(latents, batch_size)) prompt_embeds_batches = list(torch.split(prompt_embeds, batch_size)) if pooled_prompt_embeds is not None: pooled_prompt_embeds_batches = list(torch.split(pooled_prompt_embeds, batch_size)) - if text_ids is not None: - text_ids_batches = list(torch.split(text_ids, batch_size)) - if latent_image_ids is not None: - latent_image_ids_batches = list(torch.split(latent_image_ids, batch_size)) if guidance is not None: guidance_batches = list(torch.split(guidance, batch_size)) + else: + guidance_batches = [torch.tensor(float('nan')),] * len(latents_batches) # If the last batch has less samples than batch_size, pad it with dummy samples num_dummy_samples = 0 @@ -193,20 +190,6 @@ def _split_inputs_into_batches( ) pooled_prompt_embeds_batches[-1] = torch.vstack(sequence_to_stack) - # Pad text_ids_batches if necessary - if text_ids is not None: - sequence_to_stack = (text_ids_batches[-1],) + tuple( - torch.zeros_like(text_ids_batches[-1][0][None, :]) for _ in range(num_dummy_samples) - ) - text_ids_batches[-1] = torch.vstack(sequence_to_stack) - - # Pad latent_image_ids if necessary - if latent_image_ids is not None: - sequence_to_stack = (latent_image_ids_batches[-1],) + tuple( - torch.zeros_like(latent_image_ids_batches[-1][0][None, :]) for _ in range(num_dummy_samples) - ) - latent_image_ids_batches[-1] = torch.vstack(sequence_to_stack) - # Pad guidance if necessary if guidance is not None: sequence_to_stack = (guidance_batches[-1],) + tuple( @@ -226,8 +209,6 @@ def _split_inputs_into_batches( latents_batches, prompt_embeds_batches, pooled_prompt_embeds_batches, - text_ids_batches, - latent_image_ids_batches, guidance_batches, num_dummy_samples, ) @@ -472,12 +453,10 @@ def __call__( latents_batches, text_embeddings_batches, pooled_prompt_embeddings_batches, - text_ids_batches, - latent_image_ids_batches, guidance_batches, num_dummy_samples, ) = self._split_inputs_into_batches( - batch_size, latents, prompt_embeds, pooled_prompt_embeds, text_ids, latent_image_ids, guidance + batch_size, latents, prompt_embeds, pooled_prompt_embeds, guidance ) outputs = { @@ -498,11 +477,7 @@ def __call__( text_embeddings_batches = torch.roll(text_embeddings_batches, shifts=-1, dims=0) pooled_prompt_embeddings_batch = pooled_prompt_embeddings_batches[0] pooled_prompt_embeddings_batches = torch.roll(pooled_prompt_embeddings_batches, shifts=-1, dims=0) - text_ids_batch = text_ids_batches[0] - text_ids_batches = torch.roll(text_ids_batches, shifts=-1, dims=0) - latent_image_ids_batch = latent_image_ids_batches[0] - latent_image_ids_batches = torch.roll(latent_image_ids_batches, shifts=-1, dims=0) - guidance_batch = guidance_batches[0] + guidance_batch = None if guidance_batches[0].isnan() else guidance_batches[0] guidance_batches = torch.roll(guidance_batches, shifts=-1, dims=0) if hasattr(self.scheduler, "_init_step_index"): @@ -539,8 +514,8 @@ def __call__( guidance=guidance_batch, pooled_projections=pooled_prompt_embeddings_batch, encoder_hidden_states=text_embeddings_batch, - txt_ids=text_ids_batch, - img_ids=latent_image_ids_batch, + txt_ids=text_ids, + img_ids=latent_image_ids, joint_attention_kwargs=self.joint_attention_kwargs, return_dict=False, )[0] @@ -551,8 +526,8 @@ def __call__( guidance=guidance_batch, pooled_projections=pooled_prompt_embeddings_batch, encoder_hidden_states=text_embeddings_batch, - txt_ids=text_ids_batch, - img_ids=latent_image_ids_batch, + txt_ids=text_ids, + img_ids=latent_image_ids, joint_attention_kwargs=self.joint_attention_kwargs, return_dict=False, )[0] @@ -561,7 +536,6 @@ def __call__( latents_dtype = latents_batch.dtype latents_batch = self.scheduler.step(noise_pred, timestep, latents_batch, return_dict=False)[0] - hb_profiler.step() # htcore.mark_step(sync=True) From c0d391e50ee93510d4fe811835e75b387c2964da Mon Sep 17 00:00:00 2001 From: baocheny Date: Mon, 14 Oct 2024 13:05:45 +0800 Subject: [PATCH 26/30] rem quant files --- .../measure_all/fp8_hooks_maxabs.json | 8071 ----------------- .../quantize/measure_all/fp8_hooks_maxabs.npz | Bin 97750 -> 0 bytes ...fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.json | 7567 ---------------- .../fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.npz | Bin 124974 -> 0 bytes .../fp8_hooks_maxabs_mod_list.json | 506 -- .../measure_all_500/fp8_hooks_maxabs.json | 8071 ----------------- .../measure_all_500/fp8_hooks_maxabs.npz | Bin 97750 -> 0 bytes ...fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.json | 7567 ---------------- .../fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.npz | Bin 124974 -> 0 bytes .../fp8_hooks_maxabs_mod_list.json | 506 -- 10 files changed, 32288 deletions(-) delete mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.json delete mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.npz delete mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.json delete mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.npz delete mode 100644 examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_mod_list.json delete mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.json delete mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.npz delete mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.json delete mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_MAXABS_HW_OPT_WEIGHT.npz delete mode 100644 examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_mod_list.json diff --git a/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.json b/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.json deleted file mode 100644 index 8e4c0fb98e..0000000000 --- a/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.json +++ /dev/null @@ -1,8071 +0,0 @@ -{ - "GlobalRank": null, - "LocalRank": null, - "Mode": "DynamicRange", - "Nodes": { - "time_text_embed.timestep_embedder.linear_1": { - "inputs": [ - [ - [ - 1.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.28515625 - ] - ] - } - }, - "time_text_embed.timestep_embedder.linear_2": { - "inputs": [ - [ - [ - 3.28125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1484375 - ] - ] - } - }, - "time_text_embed.guidance_embedder.linear_1": { - "inputs": [ - [ - [ - 1.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.10400390625 - ] - ] - } - }, - "time_text_embed.guidance_embedder.linear_2": { - "inputs": [ - [ - [ - 0.60546875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.1201171875 - ] - ] - } - }, - "time_text_embed.text_embedder.linear_1": { - "inputs": [ - [ - [ - 4.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.263671875 - ] - ] - } - }, - "time_text_embed.text_embedder.linear_2": { - "inputs": [ - [ - [ - 0.373046875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.171875 - ] - ] - } - }, - "context_embedder": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.72265625 - ] - ] - } - }, - "x_embedder": { - "inputs": [ - [ - [ - 5.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "transformer_blocks.0.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.396484375 - ] - ] - } - }, - "transformer_blocks.0.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.0.attn.to_q": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 2.4375 - ] - ] - } - }, - "transformer_blocks.0.attn.to_k": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.0.attn.to_v": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.38671875 - ] - ] - } - }, - "transformer_blocks.0.attn.add_k_proj": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.0.attn.add_v_proj": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.236328125 - ] - ] - } - }, - "transformer_blocks.0.attn.add_q_proj": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.37890625 - ] - ] - } - }, - "transformer_blocks.0.attn.to_out.0": { - "inputs": [ - [ - [ - 1.578125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.408203125 - ] - ] - } - }, - "transformer_blocks.0.attn.to_add_out": { - "inputs": [ - [ - [ - 7.46875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.40625 - ] - ] - } - }, - "transformer_blocks.0.ff.net.0.proj": { - "inputs": [ - [ - [ - 4.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.439453125 - ] - ] - } - }, - "transformer_blocks.0.ff.net.2": { - "inputs": [ - [ - [ - 7.71875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.60546875 - ] - ] - } - }, - "transformer_blocks.0.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 10.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.419921875 - ] - ] - } - }, - "transformer_blocks.0.ff_context.net.2": { - "inputs": [ - [ - [ - 39.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.50390625 - ] - ] - } - }, - "transformer_blocks.1.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71484375 - ] - ] - } - }, - "transformer_blocks.1.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.1.attn.to_q": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.72265625 - ] - ] - } - }, - "transformer_blocks.1.attn.to_k": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0 - ] - ] - } - }, - "transformer_blocks.1.attn.to_v": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.1.attn.add_k_proj": { - "inputs": [ - [ - [ - 34.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3828125 - ] - ] - } - }, - "transformer_blocks.1.attn.add_v_proj": { - "inputs": [ - [ - [ - 34.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.28125 - ] - ] - } - }, - "transformer_blocks.1.attn.add_q_proj": { - "inputs": [ - [ - [ - 34.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.376953125 - ] - ] - } - }, - "transformer_blocks.1.attn.to_out.0": { - "inputs": [ - [ - [ - 7.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4765625 - ] - ] - } - }, - "transformer_blocks.1.attn.to_add_out": { - "inputs": [ - [ - [ - 9.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.296875 - ] - ] - } - }, - "transformer_blocks.1.ff.net.0.proj": { - "inputs": [ - [ - [ - 10.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.47265625 - ] - ] - } - }, - "transformer_blocks.1.ff.net.2": { - "inputs": [ - [ - [ - 11.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "transformer_blocks.1.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 66.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.40234375 - ] - ] - } - }, - "transformer_blocks.1.ff_context.net.2": { - "inputs": [ - [ - [ - 82.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.765625 - ] - ] - } - }, - "transformer_blocks.2.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.82421875 - ] - ] - } - }, - "transformer_blocks.2.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71484375 - ] - ] - } - }, - "transformer_blocks.2.attn.to_q": { - "inputs": [ - [ - [ - 11.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.97265625 - ] - ] - } - }, - "transformer_blocks.2.attn.to_k": { - "inputs": [ - [ - [ - 11.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7421875 - ] - ] - } - }, - "transformer_blocks.2.attn.to_v": { - "inputs": [ - [ - [ - 11.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.2.attn.add_k_proj": { - "inputs": [ - [ - [ - 33.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6796875 - ] - ] - } - }, - "transformer_blocks.2.attn.add_v_proj": { - "inputs": [ - [ - [ - 33.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.26171875 - ] - ] - } - }, - "transformer_blocks.2.attn.add_q_proj": { - "inputs": [ - [ - [ - 33.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.423828125 - ] - ] - } - }, - "transformer_blocks.2.attn.to_out.0": { - "inputs": [ - [ - [ - 9.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "transformer_blocks.2.attn.to_add_out": { - "inputs": [ - [ - [ - 4.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.361328125 - ] - ] - } - }, - "transformer_blocks.2.ff.net.0.proj": { - "inputs": [ - [ - [ - 4.71875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.76171875 - ] - ] - } - }, - "transformer_blocks.2.ff.net.2": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "transformer_blocks.2.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 65.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.482421875 - ] - ] - } - }, - "transformer_blocks.2.ff_context.net.2": { - "inputs": [ - [ - [ - 29.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.609375 - ] - ] - } - }, - "transformer_blocks.3.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.68359375 - ] - ] - } - }, - "transformer_blocks.3.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.412109375 - ] - ] - } - }, - "transformer_blocks.3.attn.to_q": { - "inputs": [ - [ - [ - 11.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1640625 - ] - ] - } - }, - "transformer_blocks.3.attn.to_k": { - "inputs": [ - [ - [ - 11.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6953125 - ] - ] - } - }, - "transformer_blocks.3.attn.to_v": { - "inputs": [ - [ - [ - 11.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.484375 - ] - ] - } - }, - "transformer_blocks.3.attn.add_k_proj": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.474609375 - ] - ] - } - }, - "transformer_blocks.3.attn.add_v_proj": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.255859375 - ] - ] - } - }, - "transformer_blocks.3.attn.add_q_proj": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41796875 - ] - ] - } - }, - "transformer_blocks.3.attn.to_out.0": { - "inputs": [ - [ - [ - 10.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.35546875 - ] - ] - } - }, - "transformer_blocks.3.attn.to_add_out": { - "inputs": [ - [ - [ - 3.359375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.390625 - ] - ] - } - }, - "transformer_blocks.3.ff.net.0.proj": { - "inputs": [ - [ - [ - 11.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.578125 - ] - ] - } - }, - "transformer_blocks.3.ff.net.2": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.765625 - ] - ] - } - }, - "transformer_blocks.3.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 9.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "transformer_blocks.3.ff_context.net.2": { - "inputs": [ - [ - [ - 19.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53515625 - ] - ] - } - }, - "transformer_blocks.4.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8515625 - ] - ] - } - }, - "transformer_blocks.4.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.40625 - ] - ] - } - }, - "transformer_blocks.4.attn.to_q": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1796875 - ] - ] - } - }, - "transformer_blocks.4.attn.to_k": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.75390625 - ] - ] - } - }, - "transformer_blocks.4.attn.to_v": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.494140625 - ] - ] - } - }, - "transformer_blocks.4.attn.add_k_proj": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4765625 - ] - ] - } - }, - "transformer_blocks.4.attn.add_v_proj": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.236328125 - ] - ] - } - }, - "transformer_blocks.4.attn.add_q_proj": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3671875 - ] - ] - } - }, - "transformer_blocks.4.attn.to_out.0": { - "inputs": [ - [ - [ - 12.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.345703125 - ] - ] - } - }, - "transformer_blocks.4.attn.to_add_out": { - "inputs": [ - [ - [ - 6.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.390625 - ] - ] - } - }, - "transformer_blocks.4.ff.net.0.proj": { - "inputs": [ - [ - [ - 21.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "transformer_blocks.4.ff.net.2": { - "inputs": [ - [ - [ - 18.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.09375 - ] - ] - } - }, - "transformer_blocks.4.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 7.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6484375 - ] - ] - } - }, - "transformer_blocks.4.ff_context.net.2": { - "inputs": [ - [ - [ - 16.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.5.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8125 - ] - ] - } - }, - "transformer_blocks.5.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.5.attn.to_q": { - "inputs": [ - [ - [ - 14.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.84765625 - ] - ] - } - }, - "transformer_blocks.5.attn.to_k": { - "inputs": [ - [ - [ - 14.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "transformer_blocks.5.attn.to_v": { - "inputs": [ - [ - [ - 14.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.5.attn.add_k_proj": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.5.attn.add_v_proj": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.298828125 - ] - ] - } - }, - "transformer_blocks.5.attn.add_q_proj": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.287109375 - ] - ] - } - }, - "transformer_blocks.5.attn.to_out.0": { - "inputs": [ - [ - [ - 8.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.28125 - ] - ] - } - }, - "transformer_blocks.5.attn.to_add_out": { - "inputs": [ - [ - [ - 9.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.306640625 - ] - ] - } - }, - "transformer_blocks.5.ff.net.0.proj": { - "inputs": [ - [ - [ - 18.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6328125 - ] - ] - } - }, - "transformer_blocks.5.ff.net.2": { - "inputs": [ - [ - [ - 27.125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.46875 - ] - ] - } - }, - "transformer_blocks.5.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 11.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.54296875 - ] - ] - } - }, - "transformer_blocks.5.ff_context.net.2": { - "inputs": [ - [ - [ - 17.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "transformer_blocks.6.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.78515625 - ] - ] - } - }, - "transformer_blocks.6.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.39453125 - ] - ] - } - }, - "transformer_blocks.6.attn.to_q": { - "inputs": [ - [ - [ - 14.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.56640625 - ] - ] - } - }, - "transformer_blocks.6.attn.to_k": { - "inputs": [ - [ - [ - 14.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53515625 - ] - ] - } - }, - "transformer_blocks.6.attn.to_v": { - "inputs": [ - [ - [ - 14.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.333984375 - ] - ] - } - }, - "transformer_blocks.6.attn.add_k_proj": { - "inputs": [ - [ - [ - 16.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "transformer_blocks.6.attn.add_v_proj": { - "inputs": [ - [ - [ - 16.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.328125 - ] - ] - } - }, - "transformer_blocks.6.attn.add_q_proj": { - "inputs": [ - [ - [ - 16.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3515625 - ] - ] - } - }, - "transformer_blocks.6.attn.to_out.0": { - "inputs": [ - [ - [ - 7.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.47265625 - ] - ] - } - }, - "transformer_blocks.6.attn.to_add_out": { - "inputs": [ - [ - [ - 11.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.259765625 - ] - ] - } - }, - "transformer_blocks.6.ff.net.0.proj": { - "inputs": [ - [ - [ - 8.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "transformer_blocks.6.ff.net.2": { - "inputs": [ - [ - [ - 27.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.80859375 - ] - ] - } - }, - "transformer_blocks.6.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 10.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.62109375 - ] - ] - } - }, - "transformer_blocks.6.ff_context.net.2": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "transformer_blocks.7.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.73046875 - ] - ] - } - }, - "transformer_blocks.7.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41015625 - ] - ] - } - }, - "transformer_blocks.7.attn.to_q": { - "inputs": [ - [ - [ - 14.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.7.attn.to_k": { - "inputs": [ - [ - [ - 14.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.84375 - ] - ] - } - }, - "transformer_blocks.7.attn.to_v": { - "inputs": [ - [ - [ - 14.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.36328125 - ] - ] - } - }, - "transformer_blocks.7.attn.add_k_proj": { - "inputs": [ - [ - [ - 23.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.443359375 - ] - ] - } - }, - "transformer_blocks.7.attn.add_v_proj": { - "inputs": [ - [ - [ - 23.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.365234375 - ] - ] - } - }, - "transformer_blocks.7.attn.add_q_proj": { - "inputs": [ - [ - [ - 23.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.39453125 - ] - ] - } - }, - "transformer_blocks.7.attn.to_out.0": { - "inputs": [ - [ - [ - 9.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.37109375 - ] - ] - } - }, - "transformer_blocks.7.attn.to_add_out": { - "inputs": [ - [ - [ - 7.90625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.375 - ] - ] - } - }, - "transformer_blocks.7.ff.net.0.proj": { - "inputs": [ - [ - [ - 13.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.435546875 - ] - ] - } - }, - "transformer_blocks.7.ff.net.2": { - "inputs": [ - [ - [ - 47.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.94140625 - ] - ] - } - }, - "transformer_blocks.7.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 8.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.7.ff_context.net.2": { - "inputs": [ - [ - [ - 16.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.8.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.8.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51953125 - ] - ] - } - }, - "transformer_blocks.8.attn.to_q": { - "inputs": [ - [ - [ - 13.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.43359375 - ] - ] - } - }, - "transformer_blocks.8.attn.to_k": { - "inputs": [ - [ - [ - 13.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.640625 - ] - ] - } - }, - "transformer_blocks.8.attn.to_v": { - "inputs": [ - [ - [ - 13.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.39453125 - ] - ] - } - }, - "transformer_blocks.8.attn.add_k_proj": { - "inputs": [ - [ - [ - 15.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3359375 - ] - ] - } - }, - "transformer_blocks.8.attn.add_v_proj": { - "inputs": [ - [ - [ - 15.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41015625 - ] - ] - } - }, - "transformer_blocks.8.attn.add_q_proj": { - "inputs": [ - [ - [ - 15.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.326171875 - ] - ] - } - }, - "transformer_blocks.8.attn.to_out.0": { - "inputs": [ - [ - [ - 10.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3203125 - ] - ] - } - }, - "transformer_blocks.8.attn.to_add_out": { - "inputs": [ - [ - [ - 14.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2890625 - ] - ] - } - }, - "transformer_blocks.8.ff.net.0.proj": { - "inputs": [ - [ - [ - 8.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "transformer_blocks.8.ff.net.2": { - "inputs": [ - [ - [ - 19.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "transformer_blocks.8.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.494140625 - ] - ] - } - }, - "transformer_blocks.8.ff_context.net.2": { - "inputs": [ - [ - [ - 15.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.9.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "transformer_blocks.9.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.470703125 - ] - ] - } - }, - "transformer_blocks.9.attn.to_q": { - "inputs": [ - [ - [ - 14.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.462890625 - ] - ] - } - }, - "transformer_blocks.9.attn.to_k": { - "inputs": [ - [ - [ - 14.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.427734375 - ] - ] - } - }, - "transformer_blocks.9.attn.to_v": { - "inputs": [ - [ - [ - 14.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.32421875 - ] - ] - } - }, - "transformer_blocks.9.attn.add_k_proj": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.9.attn.add_v_proj": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.310546875 - ] - ] - } - }, - "transformer_blocks.9.attn.add_q_proj": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.271484375 - ] - ] - } - }, - "transformer_blocks.9.attn.to_out.0": { - "inputs": [ - [ - [ - 14.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.31640625 - ] - ] - } - }, - "transformer_blocks.9.attn.to_add_out": { - "inputs": [ - [ - [ - 7.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.302734375 - ] - ] - } - }, - "transformer_blocks.9.ff.net.0.proj": { - "inputs": [ - [ - [ - 10.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.9.ff.net.2": { - "inputs": [ - [ - [ - 12.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.640625 - ] - ] - } - }, - "transformer_blocks.9.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 12.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "transformer_blocks.9.ff_context.net.2": { - "inputs": [ - [ - [ - 20.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "transformer_blocks.10.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.58984375 - ] - ] - } - }, - "transformer_blocks.10.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3828125 - ] - ] - } - }, - "transformer_blocks.10.attn.to_q": { - "inputs": [ - [ - [ - 13.1875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.10.attn.to_k": { - "inputs": [ - [ - [ - 13.1875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.478515625 - ] - ] - } - }, - "transformer_blocks.10.attn.to_v": { - "inputs": [ - [ - [ - 13.1875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.310546875 - ] - ] - } - }, - "transformer_blocks.10.attn.add_k_proj": { - "inputs": [ - [ - [ - 14.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.333984375 - ] - ] - } - }, - "transformer_blocks.10.attn.add_v_proj": { - "inputs": [ - [ - [ - 14.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.24609375 - ] - ] - } - }, - "transformer_blocks.10.attn.add_q_proj": { - "inputs": [ - [ - [ - 14.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.376953125 - ] - ] - } - }, - "transformer_blocks.10.attn.to_out.0": { - "inputs": [ - [ - [ - 12.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.263671875 - ] - ] - } - }, - "transformer_blocks.10.attn.to_add_out": { - "inputs": [ - [ - [ - 7.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.25390625 - ] - ] - } - }, - "transformer_blocks.10.ff.net.0.proj": { - "inputs": [ - [ - [ - 12.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.48828125 - ] - ] - } - }, - "transformer_blocks.10.ff.net.2": { - "inputs": [ - [ - [ - 15.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8203125 - ] - ] - } - }, - "transformer_blocks.10.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 62.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.451171875 - ] - ] - } - }, - "transformer_blocks.10.ff_context.net.2": { - "inputs": [ - [ - [ - 34.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71875 - ] - ] - } - }, - "transformer_blocks.11.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.640625 - ] - ] - } - }, - "transformer_blocks.11.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.455078125 - ] - ] - } - }, - "transformer_blocks.11.attn.to_q": { - "inputs": [ - [ - [ - 15.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "transformer_blocks.11.attn.to_k": { - "inputs": [ - [ - [ - 15.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.427734375 - ] - ] - } - }, - "transformer_blocks.11.attn.to_v": { - "inputs": [ - [ - [ - 15.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.33203125 - ] - ] - } - }, - "transformer_blocks.11.attn.add_k_proj": { - "inputs": [ - [ - [ - 28.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7109375 - ] - ] - } - }, - "transformer_blocks.11.attn.add_v_proj": { - "inputs": [ - [ - [ - 28.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2294921875 - ] - ] - } - }, - "transformer_blocks.11.attn.add_q_proj": { - "inputs": [ - [ - [ - 28.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3125 - ] - ] - } - }, - "transformer_blocks.11.attn.to_out.0": { - "inputs": [ - [ - [ - 13.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.37109375 - ] - ] - } - }, - "transformer_blocks.11.attn.to_add_out": { - "inputs": [ - [ - [ - 8.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.318359375 - ] - ] - } - }, - "transformer_blocks.11.ff.net.0.proj": { - "inputs": [ - [ - [ - 8.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.498046875 - ] - ] - } - }, - "transformer_blocks.11.ff.net.2": { - "inputs": [ - [ - [ - 12.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6953125 - ] - ] - } - }, - "transformer_blocks.11.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 39.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.58203125 - ] - ] - } - }, - "transformer_blocks.11.ff_context.net.2": { - "inputs": [ - [ - [ - 33.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.470703125 - ] - ] - } - }, - "transformer_blocks.12.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.68359375 - ] - ] - } - }, - "transformer_blocks.12.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41796875 - ] - ] - } - }, - "transformer_blocks.12.attn.to_q": { - "inputs": [ - [ - [ - 11.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.341796875 - ] - ] - } - }, - "transformer_blocks.12.attn.to_k": { - "inputs": [ - [ - [ - 11.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4609375 - ] - ] - } - }, - "transformer_blocks.12.attn.to_v": { - "inputs": [ - [ - [ - 11.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.12.attn.add_k_proj": { - "inputs": [ - [ - [ - 28.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.12.attn.add_v_proj": { - "inputs": [ - [ - [ - 28.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.322265625 - ] - ] - } - }, - "transformer_blocks.12.attn.add_q_proj": { - "inputs": [ - [ - [ - 28.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3359375 - ] - ] - } - }, - "transformer_blocks.12.attn.to_out.0": { - "inputs": [ - [ - [ - 28.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.322265625 - ] - ] - } - }, - "transformer_blocks.12.attn.to_add_out": { - "inputs": [ - [ - [ - 13.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.29296875 - ] - ] - } - }, - "transformer_blocks.12.ff.net.0.proj": { - "inputs": [ - [ - [ - 6.59375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59375 - ] - ] - } - }, - "transformer_blocks.12.ff.net.2": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8359375 - ] - ] - } - }, - "transformer_blocks.12.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 90.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41015625 - ] - ] - } - }, - "transformer_blocks.12.ff_context.net.2": { - "inputs": [ - [ - [ - 25.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "transformer_blocks.13.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65625 - ] - ] - } - }, - "transformer_blocks.13.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.375 - ] - ] - } - }, - "transformer_blocks.13.attn.to_q": { - "inputs": [ - [ - [ - 13.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.447265625 - ] - ] - } - }, - "transformer_blocks.13.attn.to_k": { - "inputs": [ - [ - [ - 13.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.83203125 - ] - ] - } - }, - "transformer_blocks.13.attn.to_v": { - "inputs": [ - [ - [ - 13.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "transformer_blocks.13.attn.add_k_proj": { - "inputs": [ - [ - [ - 29.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.578125 - ] - ] - } - }, - "transformer_blocks.13.attn.add_v_proj": { - "inputs": [ - [ - [ - 29.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.25 - ] - ] - } - }, - "transformer_blocks.13.attn.add_q_proj": { - "inputs": [ - [ - [ - 29.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.33203125 - ] - ] - } - }, - "transformer_blocks.13.attn.to_out.0": { - "inputs": [ - [ - [ - 12.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.314453125 - ] - ] - } - }, - "transformer_blocks.13.attn.to_add_out": { - "inputs": [ - [ - [ - 13.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.27734375 - ] - ] - } - }, - "transformer_blocks.13.ff.net.0.proj": { - "inputs": [ - [ - [ - 7.03125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.13.ff.net.2": { - "inputs": [ - [ - [ - 18.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.015625 - ] - ] - } - }, - "transformer_blocks.13.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 138.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.369140625 - ] - ] - } - }, - "transformer_blocks.13.ff_context.net.2": { - "inputs": [ - [ - [ - 20.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46875 - ] - ] - } - }, - "transformer_blocks.14.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.68359375 - ] - ] - } - }, - "transformer_blocks.14.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3984375 - ] - ] - } - }, - "transformer_blocks.14.attn.to_q": { - "inputs": [ - [ - [ - 21.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46484375 - ] - ] - } - }, - "transformer_blocks.14.attn.to_k": { - "inputs": [ - [ - [ - 21.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.578125 - ] - ] - } - }, - "transformer_blocks.14.attn.to_v": { - "inputs": [ - [ - [ - 21.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.73046875 - ] - ] - } - }, - "transformer_blocks.14.attn.add_k_proj": { - "inputs": [ - [ - [ - 16.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.49609375 - ] - ] - } - }, - "transformer_blocks.14.attn.add_v_proj": { - "inputs": [ - [ - [ - 16.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.279296875 - ] - ] - } - }, - "transformer_blocks.14.attn.add_q_proj": { - "inputs": [ - [ - [ - 16.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.392578125 - ] - ] - } - }, - "transformer_blocks.14.attn.to_out.0": { - "inputs": [ - [ - [ - 14.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41015625 - ] - ] - } - }, - "transformer_blocks.14.attn.to_add_out": { - "inputs": [ - [ - [ - 10.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.318359375 - ] - ] - } - }, - "transformer_blocks.14.ff.net.0.proj": { - "inputs": [ - [ - [ - 5.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53125 - ] - ] - } - }, - "transformer_blocks.14.ff.net.2": { - "inputs": [ - [ - [ - 17.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.73046875 - ] - ] - } - }, - "transformer_blocks.14.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 51.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.14.ff_context.net.2": { - "inputs": [ - [ - [ - 30.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7265625 - ] - ] - } - }, - "transformer_blocks.15.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.78125 - ] - ] - } - }, - "transformer_blocks.15.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.435546875 - ] - ] - } - }, - "transformer_blocks.15.attn.to_q": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.56640625 - ] - ] - } - }, - "transformer_blocks.15.attn.to_k": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.625 - ] - ] - } - }, - "transformer_blocks.15.attn.to_v": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "transformer_blocks.15.attn.add_k_proj": { - "inputs": [ - [ - [ - 17.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5625 - ] - ] - } - }, - "transformer_blocks.15.attn.add_v_proj": { - "inputs": [ - [ - [ - 17.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.34765625 - ] - ] - } - }, - "transformer_blocks.15.attn.add_q_proj": { - "inputs": [ - [ - [ - 17.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2890625 - ] - ] - } - }, - "transformer_blocks.15.attn.to_out.0": { - "inputs": [ - [ - [ - 13.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.365234375 - ] - ] - } - }, - "transformer_blocks.15.attn.to_add_out": { - "inputs": [ - [ - [ - 7.21875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.275390625 - ] - ] - } - }, - "transformer_blocks.15.ff.net.0.proj": { - "inputs": [ - [ - [ - 4.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.45703125 - ] - ] - } - }, - "transformer_blocks.15.ff.net.2": { - "inputs": [ - [ - [ - 21.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8671875 - ] - ] - } - }, - "transformer_blocks.15.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 44.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.15.ff_context.net.2": { - "inputs": [ - [ - [ - 24.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46875 - ] - ] - } - }, - "transformer_blocks.16.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.890625 - ] - ] - } - }, - "transformer_blocks.16.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "transformer_blocks.16.attn.to_q": { - "inputs": [ - [ - [ - 19.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.42578125 - ] - ] - } - }, - "transformer_blocks.16.attn.to_k": { - "inputs": [ - [ - [ - 19.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.68359375 - ] - ] - } - }, - "transformer_blocks.16.attn.to_v": { - "inputs": [ - [ - [ - 19.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.396484375 - ] - ] - } - }, - "transformer_blocks.16.attn.add_k_proj": { - "inputs": [ - [ - [ - 24.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.16.attn.add_v_proj": { - "inputs": [ - [ - [ - 24.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.330078125 - ] - ] - } - }, - "transformer_blocks.16.attn.add_q_proj": { - "inputs": [ - [ - [ - 24.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.30078125 - ] - ] - } - }, - "transformer_blocks.16.attn.to_out.0": { - "inputs": [ - [ - [ - 16.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.298828125 - ] - ] - } - }, - "transformer_blocks.16.attn.to_add_out": { - "inputs": [ - [ - [ - 12.1875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3125 - ] - ] - } - }, - "transformer_blocks.16.ff.net.0.proj": { - "inputs": [ - [ - [ - 5.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.66796875 - ] - ] - } - }, - "transformer_blocks.16.ff.net.2": { - "inputs": [ - [ - [ - 24.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0546875 - ] - ] - } - }, - "transformer_blocks.16.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 34.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0390625 - ] - ] - } - }, - "transformer_blocks.16.ff_context.net.2": { - "inputs": [ - [ - [ - 75.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71875 - ] - ] - } - }, - "transformer_blocks.17.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.9296875 - ] - ] - } - }, - "transformer_blocks.17.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.83203125 - ] - ] - } - }, - "transformer_blocks.17.attn.to_q": { - "inputs": [ - [ - [ - 16.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "transformer_blocks.17.attn.to_k": { - "inputs": [ - [ - [ - 16.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.86328125 - ] - ] - } - }, - "transformer_blocks.17.attn.to_v": { - "inputs": [ - [ - [ - 16.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.423828125 - ] - ] - } - }, - "transformer_blocks.17.attn.add_k_proj": { - "inputs": [ - [ - [ - 33.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6796875 - ] - ] - } - }, - "transformer_blocks.17.attn.add_v_proj": { - "inputs": [ - [ - [ - 33.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.47265625 - ] - ] - } - }, - "transformer_blocks.17.attn.add_q_proj": { - "inputs": [ - [ - [ - 33.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.328125 - ] - ] - } - }, - "transformer_blocks.17.attn.to_out.0": { - "inputs": [ - [ - [ - 15.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.34765625 - ] - ] - } - }, - "transformer_blocks.17.attn.to_add_out": { - "inputs": [ - [ - [ - 19.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.24609375 - ] - ] - } - }, - "transformer_blocks.17.ff.net.0.proj": { - "inputs": [ - [ - [ - 7.03125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "transformer_blocks.17.ff.net.2": { - "inputs": [ - [ - [ - 53.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.15625 - ] - ] - } - }, - "transformer_blocks.17.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 33.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6328125 - ] - ] - } - }, - "transformer_blocks.17.ff_context.net.2": { - "inputs": [ - [ - [ - 68.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.75390625 - ] - ] - } - }, - "transformer_blocks.18.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5234375 - ] - ] - } - }, - "transformer_blocks.18.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7421875 - ] - ] - } - }, - "transformer_blocks.18.attn.to_q": { - "inputs": [ - [ - [ - 15.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.18.attn.to_k": { - "inputs": [ - [ - [ - 15.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.94921875 - ] - ] - } - }, - "transformer_blocks.18.attn.to_v": { - "inputs": [ - [ - [ - 15.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.375 - ] - ] - } - }, - "transformer_blocks.18.attn.add_k_proj": { - "inputs": [ - [ - [ - 22.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4453125 - ] - ] - } - }, - "transformer_blocks.18.attn.add_v_proj": { - "inputs": [ - [ - [ - 22.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.369140625 - ] - ] - } - }, - "transformer_blocks.18.attn.add_q_proj": { - "inputs": [ - [ - [ - 22.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.384765625 - ] - ] - } - }, - "transformer_blocks.18.attn.to_out.0": { - "inputs": [ - [ - [ - 27.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.18.attn.to_add_out": { - "inputs": [ - [ - [ - 15.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.36328125 - ] - ] - } - }, - "transformer_blocks.18.ff.net.0.proj": { - "inputs": [ - [ - [ - 10.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.69921875 - ] - ] - } - }, - "transformer_blocks.18.ff.net.2": { - "inputs": [ - [ - [ - 202.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.4609375 - ] - ] - } - }, - "transformer_blocks.18.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 114.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.9375 - ] - ] - } - }, - "transformer_blocks.18.ff_context.net.2": { - "inputs": [ - [ - [ - 224.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.80859375 - ] - ] - } - }, - "single_transformer_blocks.0.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59375 - ] - ] - } - }, - "single_transformer_blocks.0.proj_mlp": { - "inputs": [ - [ - [ - 43.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.0.proj_out": { - "inputs": [ - [ - [ - 13.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.3671875 - ] - ] - } - }, - "single_transformer_blocks.0.attn.to_q": { - "inputs": [ - [ - [ - 43.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.384765625 - ] - ] - } - }, - "single_transformer_blocks.0.attn.to_k": { - "inputs": [ - [ - [ - 43.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51953125 - ] - ] - } - }, - "single_transformer_blocks.0.attn.to_v": { - "inputs": [ - [ - [ - 43.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.279296875 - ] - ] - } - }, - "single_transformer_blocks.1.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0390625 - ] - ] - } - }, - "single_transformer_blocks.1.proj_mlp": { - "inputs": [ - [ - [ - 35.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "single_transformer_blocks.1.proj_out": { - "inputs": [ - [ - [ - 15.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5 - ] - ] - } - }, - "single_transformer_blocks.1.attn.to_q": { - "inputs": [ - [ - [ - 35.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.373046875 - ] - ] - } - }, - "single_transformer_blocks.1.attn.to_k": { - "inputs": [ - [ - [ - 35.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.484375 - ] - ] - } - }, - "single_transformer_blocks.1.attn.to_v": { - "inputs": [ - [ - [ - 35.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.318359375 - ] - ] - } - }, - "single_transformer_blocks.2.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.203125 - ] - ] - } - }, - "single_transformer_blocks.2.proj_mlp": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "single_transformer_blocks.2.proj_out": { - "inputs": [ - [ - [ - 16.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.4609375 - ] - ] - } - }, - "single_transformer_blocks.2.attn.to_q": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "single_transformer_blocks.2.attn.to_k": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.478515625 - ] - ] - } - }, - "single_transformer_blocks.2.attn.to_v": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.359375 - ] - ] - } - }, - "single_transformer_blocks.3.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.359375 - ] - ] - } - }, - "single_transformer_blocks.3.proj_mlp": { - "inputs": [ - [ - [ - 30.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.484375 - ] - ] - } - }, - "single_transformer_blocks.3.proj_out": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.91796875 - ] - ] - } - }, - "single_transformer_blocks.3.attn.to_q": { - "inputs": [ - [ - [ - 30.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.40625 - ] - ] - } - }, - "single_transformer_blocks.3.attn.to_k": { - "inputs": [ - [ - [ - 30.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.58203125 - ] - ] - } - }, - "single_transformer_blocks.3.attn.to_v": { - "inputs": [ - [ - [ - 30.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.34375 - ] - ] - } - }, - "single_transformer_blocks.4.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6015625 - ] - ] - } - }, - "single_transformer_blocks.4.proj_mlp": { - "inputs": [ - [ - [ - 32.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.458984375 - ] - ] - } - }, - "single_transformer_blocks.4.proj_out": { - "inputs": [ - [ - [ - 17.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.2421875 - ] - ] - } - }, - "single_transformer_blocks.4.attn.to_q": { - "inputs": [ - [ - [ - 32.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.333984375 - ] - ] - } - }, - "single_transformer_blocks.4.attn.to_k": { - "inputs": [ - [ - [ - 32.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.63671875 - ] - ] - } - }, - "single_transformer_blocks.4.attn.to_v": { - "inputs": [ - [ - [ - 32.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.234375 - ] - ] - } - }, - "single_transformer_blocks.5.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6640625 - ] - ] - } - }, - "single_transformer_blocks.5.proj_mlp": { - "inputs": [ - [ - [ - 25.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.5.proj_out": { - "inputs": [ - [ - [ - 13.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.77734375 - ] - ] - } - }, - "single_transformer_blocks.5.attn.to_q": { - "inputs": [ - [ - [ - 25.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3984375 - ] - ] - } - }, - "single_transformer_blocks.5.attn.to_k": { - "inputs": [ - [ - [ - 25.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "single_transformer_blocks.5.attn.to_v": { - "inputs": [ - [ - [ - 25.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.23046875 - ] - ] - } - }, - "single_transformer_blocks.6.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.015625 - ] - ] - } - }, - "single_transformer_blocks.6.proj_mlp": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "single_transformer_blocks.6.proj_out": { - "inputs": [ - [ - [ - 15.1875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.2578125 - ] - ] - } - }, - "single_transformer_blocks.6.attn.to_q": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.32421875 - ] - ] - } - }, - "single_transformer_blocks.6.attn.to_k": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6015625 - ] - ] - } - }, - "single_transformer_blocks.6.attn.to_v": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2451171875 - ] - ] - } - }, - "single_transformer_blocks.7.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.796875 - ] - ] - } - }, - "single_transformer_blocks.7.proj_mlp": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "single_transformer_blocks.7.proj_out": { - "inputs": [ - [ - [ - 12.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.75 - ] - ] - } - }, - "single_transformer_blocks.7.attn.to_q": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.35546875 - ] - ] - } - }, - "single_transformer_blocks.7.attn.to_k": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5703125 - ] - ] - } - }, - "single_transformer_blocks.7.attn.to_v": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2578125 - ] - ] - } - }, - "single_transformer_blocks.8.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.078125 - ] - ] - } - }, - "single_transformer_blocks.8.proj_mlp": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.8.proj_out": { - "inputs": [ - [ - [ - 14.1875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5546875 - ] - ] - } - }, - "single_transformer_blocks.8.attn.to_q": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.423828125 - ] - ] - } - }, - "single_transformer_blocks.8.attn.to_k": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65234375 - ] - ] - } - }, - "single_transformer_blocks.8.attn.to_v": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3203125 - ] - ] - } - }, - "single_transformer_blocks.9.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.34375 - ] - ] - } - }, - "single_transformer_blocks.9.proj_mlp": { - "inputs": [ - [ - [ - 20.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.890625 - ] - ] - } - }, - "single_transformer_blocks.9.proj_out": { - "inputs": [ - [ - [ - 17.875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.234375 - ] - ] - } - }, - "single_transformer_blocks.9.attn.to_q": { - "inputs": [ - [ - [ - 20.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53125 - ] - ] - } - }, - "single_transformer_blocks.9.attn.to_k": { - "inputs": [ - [ - [ - 20.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.9.attn.to_v": { - "inputs": [ - [ - [ - 20.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.396484375 - ] - ] - } - }, - "single_transformer_blocks.10.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.203125 - ] - ] - } - }, - "single_transformer_blocks.10.proj_mlp": { - "inputs": [ - [ - [ - 14.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65234375 - ] - ] - } - }, - "single_transformer_blocks.10.proj_out": { - "inputs": [ - [ - [ - 13.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.109375 - ] - ] - } - }, - "single_transformer_blocks.10.attn.to_q": { - "inputs": [ - [ - [ - 14.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.439453125 - ] - ] - } - }, - "single_transformer_blocks.10.attn.to_k": { - "inputs": [ - [ - [ - 14.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "single_transformer_blocks.10.attn.to_v": { - "inputs": [ - [ - [ - 14.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.251953125 - ] - ] - } - }, - "single_transformer_blocks.11.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.8125 - ] - ] - } - }, - "single_transformer_blocks.11.proj_mlp": { - "inputs": [ - [ - [ - 21.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6640625 - ] - ] - } - }, - "single_transformer_blocks.11.proj_out": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.046875 - ] - ] - } - }, - "single_transformer_blocks.11.attn.to_q": { - "inputs": [ - [ - [ - 21.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4296875 - ] - ] - } - }, - "single_transformer_blocks.11.attn.to_k": { - "inputs": [ - [ - [ - 21.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53125 - ] - ] - } - }, - "single_transformer_blocks.11.attn.to_v": { - "inputs": [ - [ - [ - 21.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4375 - ] - ] - } - }, - "single_transformer_blocks.12.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.7578125 - ] - ] - } - }, - "single_transformer_blocks.12.proj_mlp": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8125 - ] - ] - } - }, - "single_transformer_blocks.12.proj_out": { - "inputs": [ - [ - [ - 13.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.953125 - ] - ] - } - }, - "single_transformer_blocks.12.attn.to_q": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.419921875 - ] - ] - } - }, - "single_transformer_blocks.12.attn.to_k": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.486328125 - ] - ] - } - }, - "single_transformer_blocks.12.attn.to_v": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44921875 - ] - ] - } - }, - "single_transformer_blocks.13.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.609375 - ] - ] - } - }, - "single_transformer_blocks.13.proj_mlp": { - "inputs": [ - [ - [ - 20.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.85546875 - ] - ] - } - }, - "single_transformer_blocks.13.proj_out": { - "inputs": [ - [ - [ - 19.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0078125 - ] - ] - } - }, - "single_transformer_blocks.13.attn.to_q": { - "inputs": [ - [ - [ - 20.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.13.attn.to_k": { - "inputs": [ - [ - [ - 20.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "single_transformer_blocks.13.attn.to_v": { - "inputs": [ - [ - [ - 20.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.45703125 - ] - ] - } - }, - "single_transformer_blocks.14.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.015625 - ] - ] - } - }, - "single_transformer_blocks.14.proj_mlp": { - "inputs": [ - [ - [ - 26.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.109375 - ] - ] - } - }, - "single_transformer_blocks.14.proj_out": { - "inputs": [ - [ - [ - 13.875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0390625 - ] - ] - } - }, - "single_transformer_blocks.14.attn.to_q": { - "inputs": [ - [ - [ - 26.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.392578125 - ] - ] - } - }, - "single_transformer_blocks.14.attn.to_k": { - "inputs": [ - [ - [ - 26.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "single_transformer_blocks.14.attn.to_v": { - "inputs": [ - [ - [ - 26.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "single_transformer_blocks.15.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.03125 - ] - ] - } - }, - "single_transformer_blocks.15.proj_mlp": { - "inputs": [ - [ - [ - 18.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.78125 - ] - ] - } - }, - "single_transformer_blocks.15.proj_out": { - "inputs": [ - [ - [ - 15.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.2578125 - ] - ] - } - }, - "single_transformer_blocks.15.attn.to_q": { - "inputs": [ - [ - [ - 18.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.49609375 - ] - ] - } - }, - "single_transformer_blocks.15.attn.to_k": { - "inputs": [ - [ - [ - 18.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.58203125 - ] - ] - } - }, - "single_transformer_blocks.15.attn.to_v": { - "inputs": [ - [ - [ - 18.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.16.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.140625 - ] - ] - } - }, - "single_transformer_blocks.16.proj_mlp": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1796875 - ] - ] - } - }, - "single_transformer_blocks.16.proj_out": { - "inputs": [ - [ - [ - 13.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.140625 - ] - ] - } - }, - "single_transformer_blocks.16.attn.to_q": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.625 - ] - ] - } - }, - "single_transformer_blocks.16.attn.to_k": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5546875 - ] - ] - } - }, - "single_transformer_blocks.16.attn.to_v": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "single_transformer_blocks.17.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6015625 - ] - ] - } - }, - "single_transformer_blocks.17.proj_mlp": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7265625 - ] - ] - } - }, - "single_transformer_blocks.17.proj_out": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 1.140625 - ] - ] - } - }, - "single_transformer_blocks.17.attn.to_q": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.609375 - ] - ] - } - }, - "single_transformer_blocks.17.attn.to_k": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65625 - ] - ] - } - }, - "single_transformer_blocks.17.attn.to_v": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.81640625 - ] - ] - } - }, - "single_transformer_blocks.18.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.8203125 - ] - ] - } - }, - "single_transformer_blocks.18.proj_mlp": { - "inputs": [ - [ - [ - 24.875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.125 - ] - ] - } - }, - "single_transformer_blocks.18.proj_out": { - "inputs": [ - [ - [ - 14.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.09375 - ] - ] - } - }, - "single_transformer_blocks.18.attn.to_q": { - "inputs": [ - [ - [ - 24.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.18.attn.to_k": { - "inputs": [ - [ - [ - 24.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.62109375 - ] - ] - } - }, - "single_transformer_blocks.18.attn.to_v": { - "inputs": [ - [ - [ - 24.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "single_transformer_blocks.19.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.7578125 - ] - ] - } - }, - "single_transformer_blocks.19.proj_mlp": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.87109375 - ] - ] - } - }, - "single_transformer_blocks.19.proj_out": { - "inputs": [ - [ - [ - 13.75 - ] - ] - ], - "params": { - "weight": [ - [ - 1.4296875 - ] - ] - } - }, - "single_transformer_blocks.19.attn.to_q": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "single_transformer_blocks.19.attn.to_k": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "single_transformer_blocks.19.attn.to_v": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.54296875 - ] - ] - } - }, - "single_transformer_blocks.20.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.046875 - ] - ] - } - }, - "single_transformer_blocks.20.proj_mlp": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.87890625 - ] - ] - } - }, - "single_transformer_blocks.20.proj_out": { - "inputs": [ - [ - [ - 10.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1484375 - ] - ] - } - }, - "single_transformer_blocks.20.attn.to_q": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.20.attn.to_k": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.50390625 - ] - ] - } - }, - "single_transformer_blocks.20.attn.to_v": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "single_transformer_blocks.21.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5234375 - ] - ] - } - }, - "single_transformer_blocks.21.proj_mlp": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.9140625 - ] - ] - } - }, - "single_transformer_blocks.21.proj_out": { - "inputs": [ - [ - [ - 10.875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.4140625 - ] - ] - } - }, - "single_transformer_blocks.21.attn.to_q": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.482421875 - ] - ] - } - }, - "single_transformer_blocks.21.attn.to_k": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46875 - ] - ] - } - }, - "single_transformer_blocks.21.attn.to_v": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.42578125 - ] - ] - } - }, - "single_transformer_blocks.22.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.2265625 - ] - ] - } - }, - "single_transformer_blocks.22.proj_mlp": { - "inputs": [ - [ - [ - 13.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.703125 - ] - ] - } - }, - "single_transformer_blocks.22.proj_out": { - "inputs": [ - [ - [ - 9.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.87109375 - ] - ] - } - }, - "single_transformer_blocks.22.attn.to_q": { - "inputs": [ - [ - [ - 13.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.455078125 - ] - ] - } - }, - "single_transformer_blocks.22.attn.to_k": { - "inputs": [ - [ - [ - 13.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6015625 - ] - ] - } - }, - "single_transformer_blocks.22.attn.to_v": { - "inputs": [ - [ - [ - 13.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.609375 - ] - ] - } - }, - "single_transformer_blocks.23.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6640625 - ] - ] - } - }, - "single_transformer_blocks.23.proj_mlp": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71484375 - ] - ] - } - }, - "single_transformer_blocks.23.proj_out": { - "inputs": [ - [ - [ - 8.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0546875 - ] - ] - } - }, - "single_transformer_blocks.23.attn.to_q": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.498046875 - ] - ] - } - }, - "single_transformer_blocks.23.attn.to_k": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7578125 - ] - ] - } - }, - "single_transformer_blocks.23.attn.to_v": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.24.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.7890625 - ] - ] - } - }, - "single_transformer_blocks.24.proj_mlp": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 1.09375 - ] - ] - } - }, - "single_transformer_blocks.24.proj_out": { - "inputs": [ - [ - [ - 10.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.95703125 - ] - ] - } - }, - "single_transformer_blocks.24.attn.to_q": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.453125 - ] - ] - } - }, - "single_transformer_blocks.24.attn.to_k": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.24.attn.to_v": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.345703125 - ] - ] - } - }, - "single_transformer_blocks.25.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.078125 - ] - ] - } - }, - "single_transformer_blocks.25.proj_mlp": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.15625 - ] - ] - } - }, - "single_transformer_blocks.25.proj_out": { - "inputs": [ - [ - [ - 11.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.3359375 - ] - ] - } - }, - "single_transformer_blocks.25.attn.to_q": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.50390625 - ] - ] - } - }, - "single_transformer_blocks.25.attn.to_k": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "single_transformer_blocks.25.attn.to_v": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3984375 - ] - ] - } - }, - "single_transformer_blocks.26.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.28125 - ] - ] - } - }, - "single_transformer_blocks.26.proj_mlp": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.90234375 - ] - ] - } - }, - "single_transformer_blocks.26.proj_out": { - "inputs": [ - [ - [ - 11.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.453125 - ] - ] - } - }, - "single_transformer_blocks.26.attn.to_q": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.490234375 - ] - ] - } - }, - "single_transformer_blocks.26.attn.to_k": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.455078125 - ] - ] - } - }, - "single_transformer_blocks.26.attn.to_v": { - "inputs": [ - [ - [ - 15.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "single_transformer_blocks.27.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.109375 - ] - ] - } - }, - "single_transformer_blocks.27.proj_mlp": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.75 - ] - ] - } - }, - "single_transformer_blocks.27.proj_out": { - "inputs": [ - [ - [ - 9.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.80859375 - ] - ] - } - }, - "single_transformer_blocks.27.attn.to_q": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.27.attn.to_k": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6953125 - ] - ] - } - }, - "single_transformer_blocks.27.attn.to_v": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "single_transformer_blocks.28.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1953125 - ] - ] - } - }, - "single_transformer_blocks.28.proj_mlp": { - "inputs": [ - [ - [ - 23.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.87109375 - ] - ] - } - }, - "single_transformer_blocks.28.proj_out": { - "inputs": [ - [ - [ - 12.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8828125 - ] - ] - } - }, - "single_transformer_blocks.28.attn.to_q": { - "inputs": [ - [ - [ - 23.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "single_transformer_blocks.28.attn.to_k": { - "inputs": [ - [ - [ - 23.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.875 - ] - ] - } - }, - "single_transformer_blocks.28.attn.to_v": { - "inputs": [ - [ - [ - 23.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.48828125 - ] - ] - } - }, - "single_transformer_blocks.29.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.171875 - ] - ] - } - }, - "single_transformer_blocks.29.proj_mlp": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6953125 - ] - ] - } - }, - "single_transformer_blocks.29.proj_out": { - "inputs": [ - [ - [ - 10.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.109375 - ] - ] - } - }, - "single_transformer_blocks.29.attn.to_q": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53515625 - ] - ] - } - }, - "single_transformer_blocks.29.attn.to_k": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.48828125 - ] - ] - } - }, - "single_transformer_blocks.29.attn.to_v": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "single_transformer_blocks.30.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.546875 - ] - ] - } - }, - "single_transformer_blocks.30.proj_mlp": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6328125 - ] - ] - } - }, - "single_transformer_blocks.30.proj_out": { - "inputs": [ - [ - [ - 11.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.3984375 - ] - ] - } - }, - "single_transformer_blocks.30.attn.to_q": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "single_transformer_blocks.30.attn.to_k": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5859375 - ] - ] - } - }, - "single_transformer_blocks.30.attn.to_v": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.54296875 - ] - ] - } - }, - "single_transformer_blocks.31.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5859375 - ] - ] - } - }, - "single_transformer_blocks.31.proj_mlp": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65625 - ] - ] - } - }, - "single_transformer_blocks.31.proj_out": { - "inputs": [ - [ - [ - 13.3125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.390625 - ] - ] - } - }, - "single_transformer_blocks.31.attn.to_q": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.640625 - ] - ] - } - }, - "single_transformer_blocks.31.attn.to_k": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.47265625 - ] - ] - } - }, - "single_transformer_blocks.31.attn.to_v": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.73828125 - ] - ] - } - }, - "single_transformer_blocks.32.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5078125 - ] - ] - } - }, - "single_transformer_blocks.32.proj_mlp": { - "inputs": [ - [ - [ - 20.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7578125 - ] - ] - } - }, - "single_transformer_blocks.32.proj_out": { - "inputs": [ - [ - [ - 14.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.171875 - ] - ] - } - }, - "single_transformer_blocks.32.attn.to_q": { - "inputs": [ - [ - [ - 20.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "single_transformer_blocks.32.attn.to_k": { - "inputs": [ - [ - [ - 20.5 - ] - ] - ], - "params": { - "weight": [ - [ - 1.421875 - ] - ] - } - }, - "single_transformer_blocks.32.attn.to_v": { - "inputs": [ - [ - [ - 20.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "single_transformer_blocks.33.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6796875 - ] - ] - } - }, - "single_transformer_blocks.33.proj_mlp": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.90234375 - ] - ] - } - }, - "single_transformer_blocks.33.proj_out": { - "inputs": [ - [ - [ - 11.125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5 - ] - ] - } - }, - "single_transformer_blocks.33.attn.to_q": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "single_transformer_blocks.33.attn.to_k": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 1.3828125 - ] - ] - } - }, - "single_transformer_blocks.33.attn.to_v": { - "inputs": [ - [ - [ - 19.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4609375 - ] - ] - } - }, - "single_transformer_blocks.34.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.640625 - ] - ] - } - }, - "single_transformer_blocks.34.proj_mlp": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.9765625 - ] - ] - } - }, - "single_transformer_blocks.34.proj_out": { - "inputs": [ - [ - [ - 29.125 - ] - ] - ], - "params": { - "weight": [ - [ - 3.109375 - ] - ] - } - }, - "single_transformer_blocks.34.attn.to_q": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.498046875 - ] - ] - } - }, - "single_transformer_blocks.34.attn.to_k": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 2.015625 - ] - ] - } - }, - "single_transformer_blocks.34.attn.to_v": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.447265625 - ] - ] - } - }, - "single_transformer_blocks.35.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.8125 - ] - ] - } - }, - "single_transformer_blocks.35.proj_mlp": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8828125 - ] - ] - } - }, - "single_transformer_blocks.35.proj_out": { - "inputs": [ - [ - [ - 16.625 - ] - ] - ], - "params": { - "weight": [ - [ - 3.0625 - ] - ] - } - }, - "single_transformer_blocks.35.attn.to_q": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.63671875 - ] - ] - } - }, - "single_transformer_blocks.35.attn.to_k": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.62890625 - ] - ] - } - }, - "single_transformer_blocks.35.attn.to_v": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "single_transformer_blocks.36.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.171875 - ] - ] - } - }, - "single_transformer_blocks.36.proj_mlp": { - "inputs": [ - [ - [ - 24.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.796875 - ] - ] - } - }, - "single_transformer_blocks.36.proj_out": { - "inputs": [ - [ - [ - 27.875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1015625 - ] - ] - } - }, - "single_transformer_blocks.36.attn.to_q": { - "inputs": [ - [ - [ - 24.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53125 - ] - ] - } - }, - "single_transformer_blocks.36.attn.to_k": { - "inputs": [ - [ - [ - 24.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6328125 - ] - ] - } - }, - "single_transformer_blocks.36.attn.to_v": { - "inputs": [ - [ - [ - 24.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59375 - ] - ] - } - }, - "single_transformer_blocks.37.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "single_transformer_blocks.37.proj_mlp": { - "inputs": [ - [ - [ - 28.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46875 - ] - ] - } - }, - "single_transformer_blocks.37.proj_out": { - "inputs": [ - [ - [ - 23.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.88671875 - ] - ] - } - }, - "single_transformer_blocks.37.attn.to_q": { - "inputs": [ - [ - [ - 28.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46484375 - ] - ] - } - }, - "single_transformer_blocks.37.attn.to_k": { - "inputs": [ - [ - [ - 28.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6015625 - ] - ] - } - }, - "single_transformer_blocks.37.attn.to_v": { - "inputs": [ - [ - [ - 28.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.361328125 - ] - ] - } - }, - "norm_out.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.345703125 - ] - ] - } - }, - "proj_out": { - "inputs": [ - [ - [ - 24.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.1376953125 - ] - ] - } - } - } -} \ No newline at end of file diff --git a/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.npz b/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs.npz deleted file mode 100644 index d08514bfc03a9d4adc6ccf4c73648b115f9c3b72..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 97750 zcmbrH54_E1cJBQdR7O!5)KoDth>UxP3`T0sIT%zXgYI0!T;p#J5rX}*9V6r4m13kR zt_()17%3`EQ7LMws1!9ds9aQ{y@ z$jt=cqeHEKeVNrsOfIw5(XME`To;o~Ni z@V`%barnd%_~WhHG@IV+|L^}EnN@Y<_{lF%oLVq)LQ+!j^5k)uG3+~h>LWvwl93a} zPs${dM`j*LSF%%&jDNwZ>B`@(G$&o9d*u=7s`JucNTsVi{_V=Gvxi>DOr2QrNV@v- zRS?+s8|fPJe>gAwYpHb2$G=_mdC8uZexy&IKF{3c{{nowD3ku;i22`3*P5RG^~axo zgd@GiO&BqJTx$6EvFX}+5e$VpXa~8|L~m&FO;O~K3|LUTT{mme|gME zj*l)$|Hkvx_@DYvos_OOJ^kq8&p!drjCr|aXr|=*nV}^wk0^PeKzvfBWTH|22^WkT zGrnYaGW5yxm!_xdKmL4m9vU-#;^fSvbc64u8@^cS#i02&Uqo=DRQk&Z*8iKE{;d~H z>8P)#8%ybLr_xQL(k|(y_bRQFNjDoYzdQTSDoTGPlWsm@c$hSCcryIuN$JO?r@tEb zl#(%{U(BRi#IY?kwpA+qxQ}i9Vw2Ekabfz2O!{jhCQW`go$b?Yrlm!E0$ci0a^u0RjiB75XH~fjt*(cs8NH?bV}fubz6MS1SFx{zUKW z6UV=qekPOt_CxR0KK<Hltf^)K6NfSwqb zNTT&h|Q7oPITv{(YN1r)>IMJuxqp{-HlHKl?=IqV#K-^dD%tUZ%e9 ze|*!gPf!10n7bg(U8pXLQtAKYT^47%tS(N!kxBo^<}N9lyHrmsOQnD8Pb|+qQKu;V zW+wedn)?;~>EJ&MllTwB%O%Os5#uI|96PCi|HQzb%)X4JS4>a;aad<%TxXS-5C>RQgZ-i4EB&{-H4ab|(F2TBmh|>kQ3)ed1d0Oi%xL zSZiZkYm)lNHFKw-@WovEI6Wdeizw#$`WS^*2l>TWZ{nuKnNv^eq zXENgpG82ZT(>tf9|0e9REAFyeo%f{Df9sw1W;=hsYx=!R`tNL)ePz4s*AoX)>A&|U z4rZTdT%7({CjAe2?J_ofXnOkRVVA>kmm}(YG?o5G?|iImm-jR2|82V*FWcpWo;aCG z|C2v)D*MDEMd=SR>3_~^m(uiy)6@Sa>~cEpaz>rcrqaLg&gaT@`6!eArR{RQY?ljq z;$kZOFaE@(>=XM7(;sKj|EgWSTH)7d_zN!#9XoVlGU284<>~2v3maXD8(mfZYpL|V zd;ja%{x5V*f09Z6hi!DDY@?fc;#MmCpZ>({vVS_CX43yRzl}<5qtB+N|0`^CCvJ3? z{z)aSH>spD7^fsvAii2s6_!c90EPvVYO2QnnkLo7Xj6ljN`4LWHZ@hU|KcZw$s@2# z@{}!tH{w>95(~6i%9tXWmYn80cKNcoW zz%t3#zz2T?*k6B`(U7#^f$7PA1A~GBlBl4qaz?i!rjjRhBr0gHl8wGvoIC}~Bwq(j zK?hY`K}Rv#bRwpbZ-CyWvr5+HXkqd+ER*~WSm9S-)Tn~-CH%uL(7%aE7miO)z6pl? z3rXVsMamgmOiU$RbtLZJO(h$=pg8Fc%Ou|dZT}vs`u;t|Xw!?BN`4pgHoaA{HoquL zo`GeOZ-d(Zw{!go%I@Djd6tvYlkb3G^*$tV^}foP*^ih?eosf@>M50M=B?u7Iant7 z@1U*TUsYdyfEaBC5>tt+j<><$#WufHlzbPKN&W|@)q_3xy#Jt&8s-1_EcYcC%wyA& z?}0(b5R#~4sB#tl1Z>k zk^xP^WK~^4sTggh5L3zbL2olvC2Lc;D0u~zNqzuI!dE_D!jt#ht&(Y+o}S3ThxKQW z#Pw$?XYMRwD*2x}64#%tlFdC_oV*IlB)<>Z`g2tE_2-JwW*#w>{1Egu^Hs7oor;pz zV436(K&_wSE|y*2zX;}Zlh=7_dh&;0kgO&9sANMIbxq!aWs*MuZTGdR`tIw*XtSP}O8yk|HXBs3Hl2%-w_%y& z&%nHPADg_x(do&bgJJiLBysmm${D(um`eUaN8;{VRPx>5g=Lbz1a0@Ns`~ER#Avgf zm`eT%^fo(GvNp4dlAppd$zOwc?OvMfNuq)y${Br>m`eUpN1}pbD%t2Sc1zxeWs?65nu6o1x`Gp8v^hykCI19^n^P)T zo4+eeK7eJCe+KhYV0X{tLmrr({2wqVI8722oKeo`v&2;L3mu6H&Z*=IK7wVEUxKFK zysEC?f*5Ts5>v^)fZpbkO4epkVe&C7ll&`~pMvZ?^fC`jPyP)I3a*eu1y_|b`Wi8n z{JV}s1=m$_1)sn&$$x;R;D)NM;HDUDZV^+-e}dlTwo2CKZwr%8VVUIrf>My<9$NOw zWB1U{I6gi3FEH$Xha~QQS2=?#@t2=C0ni6mfn*2&vN)*GKo|LS72sX@dE zfZnF2O1A%h-~?be0r0_JfpYiIT0DRg0E2?sBvC;f<&3UN#0ls~{0h`l$woKE3BYgy zped-Ysw-$9Mw^C2oB-%;8mVM$evA`<;RL`6zXJE(LmP7(Cjf^1n~=o)n<{5;Ga^nv zN8_Fh6Mn%dX9UjR`;tzxqE0A9>WTNK}R7;)KR3I1&WDS0Ue1tx~XIf%;}bNhhYUk z)6qj!*U?jqHob^g0npp@R>|5N#tOi&0-$u{xQE`S153aYSOPFe=tB}E^i|I6enc#R zjzkG5m2CFfZpm{nECFZ|`m5>^28hvSAQ4LddYeHiS(|TR31Cv{ttSa{02nR+%uhk~9=ekU@B&~^u!|%r*sYw=dx&@e z9f=C|s^kjZgW&~0Q?O4}SFm4k|2dUx@NafaK7!!{K->Sks=ogPG1^=t z;sii%b4ev@!&TyA7)}6u@K>PRJ@hgU-~_;+;0j4pa8)^@uMu$qIuaFJSII^%Elxgx z;RHZaa6?sBa8rynw}?0a(A(Tr$=d9|3BYgyV1-|Sd+(v2aU3TAhW+o5#QpCoXKd*}|Iyb|$!7AmSdYT60HCd2UsYedff#KX60rcFw`ru3 zwc!uXmtj}{P^;&-hiY}dI+VMIHs&#`02p*MA&EMgDrbRaM67_0L>`69BzUZteUun&Mia3Dptl*LlC@ca6@Xy{z`S-Jn~dWq_kS?# zKAt4*K0!G{Cla~;>qy)^QOS3o1mpe>+U}E8_1#OwXfuV#{U7u;Q&qAy?YaNMxc`HB z?OvKp<0$ukFzh~qBx{WDYU9{M^D-~zy)U;#-~uuwUp7ZGs*IuaEuR>?;Hyle6X z3>N^Jf+ecDf~8`#Sw_SKfZk@gO4jC+!sJaDE&$9=LG~WHf(P&dU{J7N?m0ZEQFuVY03bv~13bu*SW;+ot0D7ApDp{Kog~?B0cmYrf znt%R1wCt6~?x8z5juQaG{<}!x{=1bkcn=XLpd)ery(-z@=3SHbU^oHL_TQ(f@4sJ+ zHV23}0npnVRLRh!fC}sNk4NHhN%j z@;(eF0Gfj1s=9&`VzfC)#0h}j=9Eg-=I?O=Fq{Bb;aA|^d+3K8#|eO8|I;LK|1-)N ze3pn4(2=u3yhg+V=tx}sx=J>aH>G?6!vcV|`VCcm^_ya}xkbbRfZpb| zO4f$Ig!>eR1pu{rj(ez9_p3v>d+298h7|yVjyoh#$6e(tQ0WU=pfczSRDom{XwWsO z3d0J3rlXpwuA{mbZE6s)0-(34sgmiajun7m1wiS@aSy#uNBg7}PhbhaAfYx%lu$=G zv+EME1UeGG6!lcH*+1sH0x&EAXcFqH>Jl1=(WW60O8|PCMk-mGnpgrDmH?E59QV+# z-+K>j%xRne7}jq>64!64oVm@2H~}4r>o-@)=5h~x42BZ`ZT%Li`uZ)!Xw!;_69BzU zYn80ccZ-rIU^oF#>*u(KX4m&WOxb&A8=k@qfI&t9NtDr6IrG~QaRWLMWwckxWjqDL z4S*)2gQ_m0qZn;E5pe^cx9O~swb6ToV7LKLGIHEQzh3Sh+J&Q70Wj=dND_B1QqIs~ zB33|0;_ls4vY~umUhj^_3V^nI4^@5lo?^7=MZ^k#-ln%o)@B)20EQI+^V)rE@+?QW z|AS%oJ|uDXzRDTekI4OBN8;`&m3;T-VBG&f+r7W4zWV?%+6*Le{|CLzAeF35SH2qn zqYn4{eP!La)flDPX&TCarY4_`R*fO-2Xw_y+l>teUun&MiaUJ zgWhJ0O4jDTcvBFJ`#-4Nzxw$%i0q%4a`(`2Jb()TgM#rSQNaY|jGjou1?Wgrkf`Je zCc$t4pedNFsw*fJqs(A&&c$=dM#(AQvi0Wd!W*?Z{gJb)Jf zgMtMlQNcpxj9x^<3+PBxuvjHm@CFPo0Gfg&s=9)uVzgOC#0!AlX1Pk%W+z?%h8F;( zAjdtl?3Ksvp({9!69B{hD@o%1tCTZ%H4!JEBXR#VD%s$kyekNX698@hwW|95>%?fY zo`@3wz0C%dtj%BG1YkG;@WEe!a`(`8cmO8=1_c{QqJmAz8NHc^6VQ>UV2esN`k#uE zcVRdI&=hP{)fH?Lqs?|AP5|^aJ5;hZ+(Ul~!wG;Deg*EmhwkJ!P5=!1?;?r&?^e#> zJw%*P@hYiwW-7VgJ3uTQ2Xb&hkm{6 z9(stASO74rewZY#endGlj}ox}IuchurjpHkqiga$3=06->c>^})lZ1g<|Gjd0D7BK zDp{M?i;@puSO8F~=eUPzb-y~4yN70Ue1t&Z%SzTq{mK zf?)+f({Wx^*Kt9NHW!Il0npoAQpwu1#0tQ$0-$u{xQE`SqkVFjC$I!ykZ^@0O1P?= z+1H3z0v(AGuB&9TXJZLqSOU-_+)&je+!UkDEh3fx^ftFuvNn%k31C+L)tQ0Wj>|ge30XR5?SN z5wQX~5_fN|k_{cs`-5Ou0nm1Dp{noRQj9jOh*$y8+q726+6=-9z_0>fUb~M?+HjQn zKNxl|Ac?!TRnE|MMDG7O5_fN}lJEW$jQc-myLV94ckd`hn@&XT|Dd<&tdg~gBq|u8k}DVq!wZ0>phQ(yFiMOzqltI{(A$hr$=WQ(3&8LKV15d+_t0@XfENIR zg7G9#!35=uo=C(C=txwMsN@PJ!SDj0DVVIPD<~DC%@iVD0Q5FfRkAj3;00iK0Z`IfG{raRNFL_n)nj4X(tyf?zlS(Dt9Bs_#Enj5hO# zI04Yx%vZ_U@DAPAU^oHr!C!%L_t4jQ04D$j1q(=`f`!T%y@-et(2=NMu}U_&A>S2% z;RHZautZf?uvCmT%ZNAu(Az9m$=dueP5_1z04w|o+#>S2LA{r0K*A@w*OjHegAc0v{_HY34q>agG$z>IX@o&!wG=eKgT_^L)ks_ z9Zq5az_9v8lDPUN<;>hn!~*C@Tz!j5HuF^1_fO8^E5he@J@Bg&b5 zl!ztJktpGqN;dlsu>>$I0caAAtLhR?h|%UG5laAin^P)T8{EtXFf0Km2|4be9q+w| ze#mK@02tOkO%m5Xqnx>Ci8uiriR+(J$>wr_`v`^;0B!yAs`~mD#AtJoh!X(4%_Wtr zO>dk43?~3;{T%nu?E3zPDSHpS%u~1lFvz$<5@lRf&irdc+<=Zm8P`>E8K1y#1E9&c zp{mQcDMp)HMBD)AZEmY%ZGOgkgkZP~mH4C) ztN`dkt3a|tU*ekrFsuM*yH``ycdsr+n;Jx{0O)OMs${$W237!u6#({Xd52D1%N?80ZCNQRym{F5pe-J5*4&p$wt4* zdxBuN0MHb4P}LQ56r)WiA}#>*Hl0}fcYuN-b0_|0lWYh z6!am93i>K%bUz|qKu4m2luEAPIT&65GzI-tbp->&Xfu$A7XZD@AeF4mU-FJ17+wIB zf*kkIvR59vhYsdAP5=!14P>(sAPk0@&f`eoB(M1m#FIdj}oKJ zXd+Gk^fqHuvNrsb%u6tw0QlgqK)HM9I3BIzE5XfuU~69B!B5 zo4)*b01OKNYV{oVP_6D)hjRDO6+DI&0E3Q|BvHpIN?hm(PljnD*$?%4JuiirdR`=+t)W8zJumqqaFs#3eB(A?(Idk_AaRNFL*Was>&E+2Y9tB zs=AC5VzfC)#0`Mn=9Eg-<{0l0g5d^0$;fdJ?Nsg_`XNWL0$|wvG)dh3jBsu8ArMQgJJhOBysn<${AXzhK5!KeP|U(+3rLcZ^;ELaJ8%Io zTmWba>Z|Gs8i>)RArThOuwgJ3uT z&=mAn)fEg7qs>4fP5|^agH*CMdvF3UoB&wiSK!`z=wOcH1i-NW5R$n6P~{9BM#Kr| zNZfyfN;dd={CofmCji?1C93-Vqr_-4nurqsz0DYvtj!sm01PJpYX2Pf&`xFd&~co^ z0)S!l@g#Be3Cfu{k%$G*k+^!IlFj@k9~K0|0)V#qWL16jQZd?0Az}eQZ!=XTYm?^3 z17KJHP^;&-hiY}dI+VMIPUA7G02p-4Ac;C=DrbRNM67_0L>;qLvIS~k1z=bK&~(gE z)pg7jqs=@bRsi%i^Hs7od{g^17*+t3jvV*U`*gHVUgrrc0T?7KAc+zdDrfd0B9=f$ zqJ+gN+3Z=t6a+RzNw}>}kSOQQIa@<3|aqm5J1*dTW zU|4@8NnC%Ga^|ik;skUguD?blo69})Ef`J!wDs4j>g%r)qs@9EP5|^a8&tA3e0a^< zFq{CW^>f@qv+MgGrtCfR9iGAsfI-Gak|<-7a^`O);s$gi%Gjck%Xk-t8vsqlR#jcb zHZj_4C*lS`Z?i)sYf}w30K*M{l9A&c`i*k;(48E`3V>nvT_kb$-O3rdhlmx>k+}O_ zm2Bu>z9|613V^ozK2?49{bIB^K*S1w-sYf6*5))nDFDL?fO+jcHaWym?*Cxe{V++~ z{fKgg9wl=B*O9pUF_nDx_hH=sLEHVfs=oUPG1{CYa{mXt%_)_v%~$zu0F3)TnAh&5 z$%h=}{tt%TPm{#m&nRc;St9p;9f`Z2Q^|M#2*&*%wB66E>bqYMqs>Jk_kYmaTvEx} zaD(_5#{D1E?m6zE**`Po?xB}?02crT1y@L-f~(3IeT|3<(2=O%x=J?s72XpB!v%n* z;D)NM;HDUDZV_<-ptrfLlC^ncmOW|1_gIWqJq218C~huG`ceA zqpLv56jX)b1wd0!O;uM=U5qw0hjS9);lrKvPg(RaekJj5ZC4cmdGcG*Zdhtj7z$@B*L|PRJ+uuE-~_;+pnxPQXsevj?T9!59f=Cst7M}`^IZWLP5?9o z9aMD%9mQzViHH*by-jD8tj&*b0x+BaSm9UT-g{^lj^hNtuzw**+`mXUgNunc0Ue3^ zcT>p*^UjFwFq{Br`}a`Q_wOl2n_fhm0O)Oct7L7e^78>OoB*i(bKFC}QFae~mXlZj zFs$B(B(C09IWzkau>d*}S5K*AGyjM;2EniZpsn6tRbPF87;Od;u>hdA8Kjc6`E`Cg z0EPtswR(?H?fI-I)lBi>-auygy#0uz0)Glh_Qo6$t90O)PTsAO&QqXICj04N5j46e5-Y^fpsfvNnAB%PTM}0VoMM?xCIUy@yWYG)@2v z>(3yG>(5lq+*w4NfR4oVXRBm$xre?A!wG=4{v1_({kdYanMcG4fZk@lO4f!?UU&_L z69Bb-j(cc!egDIhy@$TeQ@8;z$XGxUWh_+A{6$3EfR02Ni&b(NZ@_Q^pvhRGs>@g^ zMw?|s+yLlpmaAlK{+;&-!EghhWaPMqb}n}hUBOYT02p>(NfLKorJSLwiC6(0iMy{+ z$%c;Ln*uPb0BF0fRn>Q2Cq|p~M63YlZ8oT6ZT^j)6o6p`z`SXuEG!)py?}Mw{(K?*E{-*`boP`DcDc0LJ|v%xiZ( zKa``~|G}{PE|R$WZsiQ!L*)LiBXRe=D*5j3!MOi}w);L+efRxhv^hZJ{ttSagDP2@ zb=?19-2XxCp5q>x{WDYU9(sreZ~7XZD@C6%nrD!c#;F97DJAbStJ%ma7m;L4y6t^&ypevx+t!Egeg?O#n*-@m#TZE6s40-(34 zsgmu#1}6Z+34jm&3Y5Et*5U!202mb1CW#8_C}(tCB2GX@;#Z)aN;di&KN|qU34o@c zzN)UEff#KX5^(~cw`ru3wfQ5Q01PJpR`?aT_a54q<2V5@?B9eW?%z~7gPRd?0y+}+ zZ?2LJZiy3s;RHb2zlEy4e@ijiv?AgJKyTApC2PaqWIX}H34q!^$33)j**&xkC$Ruv zSiOKGuHIHTGusid06G#^Z?BTg?1cq@VF5r}y@RU0dPgzZbRuE_KyTAoC2PZ9Lp%+` z0)SdQ$30Z5`_-Y`J+uptVFkdTqmU%(C{oS>#YC)tjzk^ZRI&wnV+CMX0nl{xP}Ozx z6r)WqB31zOHoaA{HvE{wGcc?GC>=TOq4(+F^Fw(8O8^E5eMq8&zRH>1kBBAEktiXh zlFfb#O8~_#E8Kjc6ktKj(3BU^eEqL{>LeM6CKB6_yI8J7*7&)Oi<1O6N&f%9f>*;m0ZUp7=8dW9g|gc9i?KlnL@-5 zfZk@RO4f$Y^nL|~9{{DJ>F2-210C^${L;xZ&f*5Zu>1^?xcp4zOr1r<4d_T*ezr|SfZk?>`O0b}MK09wL@NN1}wiD!GLB zU|0gsBg2o0Q5EoRkAkIummtH0hp(R?0xhQPv8l_AmK1clyF2jvyT$- z1UeEW98<|9ybr?@fF|L%sxIM#7;R1x@dTi^Ii-@d;q&Z2fZ+*1NqFpmuRnGn{gCrm z0x$?TO%es1QO@MEL@a@hL;>eivdQ1#?Ljas0cZlwtLg$Sh|%UE5laAin@cKLn|W9Q z7?uFc<7-gvMtYei@C0CxaD^mFxT>7l*NAum9f=aIt7Nn9@JXUDJOOADZm8-KZi><7 z775?e>atEFt^g~FpL3c`}a`Q_wOl2n_fhW0qAXdt7L63IM2W^2B7wTBFEPt z$M08qmd9`hV9?QrBJ57Ha^8<%f{O<%cR~>M$bKKu6;8BUG}fQ~53d3~K<| z@+GSJ@}tCPGn$As0KLr^m8=bKIerO-H30Klo=+6zEPenC%a13C%TG|w)QLp=fR4oF z6P0}VNih5XXvS1l|A)5>}E#39FPddo>Ynpd(Sj8kJnaTQIxJrw8 z(PljnZvc9m4Juiiv3LU*-T=%~LiSer4o~0>z#w5GNtCcjIkPtt@di2)C2Ud2CA8XguvRaR#8bxulY{xr;M^;S4|t$aO0%_nVe3a}-MehTX4_#NDqdXXrH|mOw}1?$=ea zp}3k)U|0gscE6#j?|xH^Hn)ga0?^ysR>|7%!P1|?umqrX&vh%k?^hIMf9vXJJcl&^ zgOEEUQOI58Y*6Wo+MqJ%8&rX0H{kbWR)t{=Koe3;RTolSj5alhSOd`8)KtlY)aR#& zVORrDLUP?o?-PPW;1MhW7&O!-i5luCXLwyA7C}ei7o(m^Hk{vg@+b_80GfvSs=9^- zVzg;U#3F#+rjbh4hVK@C8HPmwr6Jd?^jCh_Qe%$e48X8|6Oy=pQ{@b9M#LHDNZh}< zN;a5V>0>aQ0ciWTP}TQuDMp)CM4SQWZCa~jZTOJ;Ctx@OQ2RfT<5rsEw=K2dG28(d zbQF+89c`7fKszGtKu4mE_A0rKr(n1P&~$WA)pc|fqfI9w?f~>QomH|neAe{SFx&ws z9l36$<$v8$7tUf0z_5HFNnE~2Ia7;?SOXo2%Xd@BrncvM1~9AvXv_Cd)tB!nMw?zl ztO4k4daGn@Ht~Z5FsuQX-|~FEC};5lU|7BnNnF0Ka;Ekp;s=C?c_Fv?l{02r1ZLK2rBs+_6Ai1+~=iOY{r z$(J7q!w-PAe2J>Q{3tQnj3(j-KyNcfC2PaqQojVl4}e-e*RAya-?%i6CvXK|kT9Mk zN|>OW*%OJl0v(AG5|wQB1>Q9T!xexgVX~?&p;U}EQ;4_%(A!K^$=dJ+{8wPO0x(Yr z*<0x}p1>P`LBb4@C}E~@X3rwx4Rj<*n5~jacol{>08PRiRb9edG1|-{;tfD=GhZcZ zvledv!yABkO32?vNpVr_e~hy0F;DWx6-oLAG?*V;5^O%3<6e?L; z&g{)ZoPmx+30qXM*+coR0SspVnuM*Yx`b_FwAoI?8Gznqhf3CFDb4_fGXN|88r*v; z-N|{J0T=}AB8dWaD`)Z^BF;cZqJX_B+2r^6iDDSe05k#nRCNLS#b|SYh%*4a%|Vr{ z&2RD(#W0)!C;_={rR9F_(jksw3Ba)XVUoD}5#Me zs=oUPG1{CYVhKQRb4n#^Gk_l>fME$h?f$i#7m3gRyV4JN4r>4gA*V^AkTc5J;4Beq zpd(SpIhAaKzwDNL1j8DDCgi-TF64q3Z7veA2B5dOq>{Da{p=sZum+%n|7% zq5Yr2un3?u>1FzkPaB<_D#IfE%|tM)0cgv&P}P@jDMp)CM63bmZCa~jZO-#e z0~po-%x`%5 z0KH9Tm8?yFzFPpp4}keC&&Q2&7C!)nuK z>dW^OqfIX&egO0~y;ZU{dJho{KLBd^T({EufB(|6Jb^0!gM>aLQ9@tk%68fv^5(bFTW*`w)0D7B2Dp?zTbIW&OxB@Uw3E5leV4lDmfI-3# zk|<%Qa%K-B;tg~pN*JM%OBe~m8-OOEL{*nCN{lw6iFgCh+l*1k+RVZm!0-lOo)WUR z(s4Y2Hvof#@gz~g1m(=0NW>fHNR*JM5j46e8XL^fpsfvNrss z`ztWK0VoN%Zlz_fKXxmf#(A6p7zE59i2`OSXYwo}&Ok?^fY~b938Ku6;4TU4^4i?IYSECFb{Z&lTI-zG+z?L;gA z=xuhWWNl{iL&Y#G0jS--mg81>->)yq{$1%#p2He|LC7wWC}g*CHrPYN8t6zAvR5VB z;F0dhdoZj4XhQa>>O%I5(dGaVYXEwigDP2@zu}t(FsuP6A-Qg)_X**1M|lK`00s?* zNuq`$${Bu?h(*wmsNtAOHvA`81Q-?pG!4g9bqy!PXmgT?MF73cDV40vHNI;A!y?QO9+aT*oIc z+yQ7hZm8-yZi><777=#t+ebHqNJ10IEys^!}51X;_`Qu zGquvMYiecCr&fVvr*7i?L@=xYXvMw=Q$tO4k4YN}+*U*Y{kFsuQX-|~F+ zC};5lU|7C3NnE~;a;DZL;sSsxRL_j5ZC4_yN${G*Zdh za4Y&U3_k$ow>%#{%31sX7?y8B5|?kPoT<%-_yHY>%Qsiamwya~9{_Fn7OML4EyZZl ziijToy-jPCtPP)c_5=(+0BZSMx6=E68&eyez!iW&LIFvX&{jFK+YxaEIua$cSIK7o zK0jmt!xexgp@XU}p`#dWIuUUNpttF)lC`P`K|&!(lu)Fc z*~LV>fsRB8-BfZ3-C=kG&?NLw)g|;4qfIX&-T?GAy;ZU{3;A9F3~vDDDIt3+eU>Ni z24IlTha^hqtDM>WhZFNR*JMWV64@ zcMV`T1JEQ)R@EhxiqU2Y5oZ8;o2e>U8@_}83JhleR{S-%_f|TM^Ed-A2$(?<1sET75I%*JN3aNB(6EstYS^Tl z;hTwA1RaSQwy0#oZ(tE%SOm~CY*p1YY!jo+b|Mx5^fo(GvNlg+5nxyZP#SXGN`K|o zGVSCz&HxPi?;?r&?^e#>Jw%*=j>P@(BImWVab zk+}Rhm2Bz@yq^e$H2`h-^Q!vt7sP0Dk%%<_z0D<+tPLNC|1k_}0Oq$mpG3-8`~Vo1 zzd{n1zp9+6*NFH59f`|dSIL+E1cn~~ZTTCj`tmo$Xmg8*9{|10ZI!G|Grn5@!w-P@ zEzd`hauz=ThUM>&#O3cQXKJO|npzq3sZ}6l%U6Zr2S8iCnyS8hburr1AmRr=Z&Onx zTb_@ieguXe0JVIsTj~A3o2eF0;0nMXp*BgBP)9kl>k@GVIuif+*Hg)6FTfSRa0Q@A zsIRI^Xdp(LhD2Ne=xrLQWNqr=3ShVbFi#2DTWMpSz#D);LKBiGp{a6aHzVQ=bR zu98c542CxVO+pJ*T|!GS+O#6#4M1@CIO>60*0_Havkh0E2`Ak|?3A za%Q(9;tg~pN@%Z=OLz)~HvmmS2UT4{M={!TBH|4|Z_`;NYopH~g5eE7Nyv38Eqnd3 zTWJ^0;|#zcppYaAC{oVkVj|8!N1}jkD%s@cc^453X8@Xj9;&*4o?^7=MZ_6^-ln%o z)`ky+eFlaz0Q2}7l)IHa%M&;QFi7Y_5+(Fi&g_0foPmx+2`QCq_C-FA6oxYZO+tTF zUBUn{+6*M(3_x!)NF{6Yhd2Wm&H$|VYjE$abTH>}24D~{gd_?Ws+`Hgh&Tfsi2_Eb zWRq9&qsK6u0cZkBRCNKP#Aq{`h%*4a%@~!e&Br(c7|sBcfLyoIa=)Qz97nMPVAy>; zN!)#ca)wSMVhMC4?w+V*L;oC00K*c1w)j5hO# zSOd`8%vZ_U=p#vCSOZW(a@|Vr6T)Yb@(30I3>p@YL=6j-Gkg&di=ZP>!(x?e_yH^e z42uAoh9#=HhNWV(Sw_SnfZk@gO4f$ID}57&MF6EC*RAwdeo4~`j^hl#u>VSuxc@5U z3|>vd8R$sde~n5um|N*vFq{Es`>$2i_g^PQoApGT0qAWusAO$k;F|_8oB^o)bKOdF z{FbJ7cno&{1|1toqK-|9wp)jbR;f+ zOeJ6beHeZKwB?Vh>dT)Hqs>VoegO0~r&O{w{B8ROF#G_R-|~DgDQEEmU|9Y%NnHMn za;Bao;s?)8l2~}Zu1JEQ?Q`IF@ z7o$xLBHjS>HZ@f;2^;uc0Ss>d<|!e2E3L&7cmpsNA*J(c{c z@F)y#0GfpQs=9;*Vzg;U#2bL#rjbh4=1tx~1j8GEl920GTK4*5x6;O(#~FY@KogQE zps8{uHzVQJmDN(WVm-X8?Me z&MH|Oe&XzD7|sB!_-k@2Pcho`BH|1{Z_`^PYx6bUPXxmmfD(}FR$A`&H9gBwECCpH??V!I@2i}l{fJls z9f`ZARI;Iac_R@FO90yL{Z;kd2Z+&TAQ4LddYeHiS(|yhkqCw*0JVFrTj_nj=qUSl zrGt46YXAlzLr9{Kp~~4{7!hlrBT>i*m288W-I9?otO003N>p_rqr_-4nus+3z0DYv ztj!#L^caRU03{^Xt@J)2d_pOYU=hHeVLVCHFhM!PClav;IubP`D%tR5{L%s#76CL3 zlT~#MrDC+1Lc}6~-e#&w)`p+Ke+7m`0Hq<B+iXzD+SI`sz_12je#`SIrJThNfMNNKBysso%9*;Eh#%0AxcnBCeED}_ z_yN$C->RxFzfFub+llxA(A(@#$=ZAmKLEoIfcY)Y$CPpwKLCd1cag;9cPnS=9wL4~ zN8<8(Rr2NEgW(52TYjIazWjbM+8iL_2S9IgP$g?~3O@kD4}e-e*RAya-`jMECvXK| zkZ_nJN;smN*++@E0v(AGj;Um`t9MJ@hv5o9lW<&BmvBOiHYbU=0?^x>QpwsB;0j>4 z0x(Yr*<0y{Jb^a=gM`y0QNkJJ%sxxR8|X-sa84zc@DU7e0GfpJs=9;=VzjwP#2bL# z=8{U*<~rU0hBpB7l#soZUginB0T?7)A&C;MDrfdJBHlnpqJ--zxr9$(cmvQR+)&je z+!UkDEh63k^ftFuvNrq<)K6h}15grj-Ac<|f9zKJ8Ru~ZU=VPJBnr5zoXM5yYI0@J zCs%=FC(qH?~Z(WV9wX8?Menktz9{=WJV7|sC9<7-gvR$7ZEa0XzI zP@5!5sH2?Ob%{6w9f@CqdMerMAM;%U7|sAR3H4QV2@S+((~yWW0KH8km8=aPF!p5_ z&H$|VYjE$av@z##24E1-gd_@Rs+`Hqh&Tfsi2|CdWRrPc>0>aQ0cZkRsOkb*iqWPO z5oZ8;o7O5>8-7UT2^h`*lz?2f(sIAOsSQW51Yp>`fF$nTRyjl45wQe15_fN}k`3+8 zhY`WB1fcESK~>+qqZn;E5wQfIx9O~swfPZ0UI4=qfZ9FRt@OTMdz8JEcHud!0T_f7 zl0+dz%Gscph&9lWD5RT8*?)T7lI}390cb*csOmy`iqWPQ5o-W?o8Br}o90*p7}fxk zkX*OY`-Jd$r96T~0E31;BvC_On9OoHJKK+`cn1!BH{;hBrd;LC13sx7=8e><(H`H%P$q9 z%`zf>0Q5G?RkAjd@B=XX0GQwMd|oMM@dIF3ekDmdUVaqs@9EegO0~8&tA3{E*z+F#G_h<#XLi@Bb}M@9+e!01Of~l0*rclrwuX z5m%riQNk9LZ1%gnhX{r%08PSHRb9e1G1_b=;tD`-vqL3o^Gkm67=|kV^OTUimG0yT zya5;_>>`O0b}MK09wOdAN1}wiD!GLBV0Z)2Bg3b0Q5EoRkAkv0RtG` z0L)WD_Evg`C-4SfkZ_nJN;smN*++?Z109JHj;Z7l-iP50K$CD>RhMu=j5a5UcmvSe zoKnf!?Bg9oFuVaM3At{iWv@SWEB%o3I0G;UI871-oKeo?vqYSMjzj_HRIX{-sX}@)`qv!eGJ1HfO&ik%H2vY^90TS3=*!8Ly?>nLYvT_To1N8&&HdMeq_XSyek!mtFO?OtD1 z-@SntZ5k4>1faKRq>{DyYu-o%!xDhnJ=d-DzF&fry_GiRIjjK~gfth60kPp{;U;w%bRuFAKyTAoC2P|c zivYtSfYQ*4A3d8iX8h=JB||gG@bQyIO-NoYNrsLXH(})1Nd@*<`UTmKpzp#n*aI+# zC?ts@ij*_Gn20^lktm{@N;aLVX?Ga*05lOjRCN(O#c0!uh&=$kO>dQ~4WCHy3=De! zN`!u~O+_LmCKFy7`trDm$+J9wIRJx#J|s~=U*(MMN5mZHNK}wg$ws%u9KbLKpeg9D zsw)^EMw@{|%mL_a2B~Cicr(a%VVDCjZw35!AsNgA*aI*q7(x;i3{}qPVMOeKjzk3` zRI<^0Uf@U=_5d^mC91lDQDU?iO~f96-e!zS)@Bp-0ERsPr9k(iioOPAzYJg;Pv8u| zAYnX7lrTX#vnLX9209WYBr5qAVG;~y0Gfo!s=9$y zqs=lRE&=p5%T=;A{OHh|FkAvC2^GI0+2;_)SELm@gIfTDh?OK!#46=XUrod<=tvZ? zMkSlBHx|Kg3!sTutE!7wCq|p~MBD=CZ8oT6ZTL3Y+c4Y$C=nIEB0X8)uSoCk0A>LU z3O15N1)G#JdNUETpd(Si7L{yt?QY4tFw6pI3bv~13bu*SW;+qH0D7ApDp?zTS>#V) zm<2Fz1$sr=$pd%=FeunX5*6%L&geZvyn>EI1$$Mp(bIWf5e%;Ynu2|*x`O>;v^hY; zD}dhSpi0(e4?l7M!z+Lf{&%75gILqaA)df1fI-4xk|^Pba%LYT;uUlxN;syHe-Yk? z;T1rWa9mZFa6*hWCy96k(A%6+$=dA3E5Psy;KN85n|#Ob6Sn}vEr1Urp)|S76SxI1NVq~0 zC0td`>}y2af{sK9*Hv-}pTKYnph>u)s!O;jMw?qi+ydxrZmVQ%_$jncVYmfQ5-NU0 zvd^H5uSlQq42}T|BJPkx5qFg{y;40*uMGP1Dv<1SUXiN8a15Y{sHUoms4hmE8blle z=xu7MWFp?gF~D#PphQ&siqyXB6{!{v;2Xf8pf*WVP)9kV>k{z|IugGO^;ELaTlrQ3 z4Br5ng8Hhuf(Bx=X-LF3fZnE&O4f$IiT^ST-vH*VK(9!Rc>u!z1_ez>qJpN%8QqMC zVbGDNpt(vmdJgY0f?*gyQ_w|7ne4c<|7{CYryHNHaeCebO zPv958AfbRHN@%N`+3kq<1s#bJ+Nyy@`zXcGFX z>JkQs(PkhKy8wEdK`L3Bzv3qlVAusv5-NU0vdvR?ZN{i%Z8qWxk21!&fQ#qq&5%CK;5*5r=$wtrR`w1}o0%!{6sOk#liqU2s z5x)R>oB1kPo3Z!>7=8hK@V^UXAHbAOUgrt?0vIGLAc+zdDrfd0B7Q+fqJ+gN`4{00 z7=8gX2}@LU2}{Ljvy6ye0KLs}m8{KC`~nQW06vU_vB?Uaz%775!b*}TVU==buO{La zbRwy7C>*aK_zR$M~uG>!!3XhBcU{ThbOQLV34qp zBudz%oY|X+*aaPl61J%165fSj7eJG+RaKX;O^i0%iP#0u+w4%u+HB@~3oz^gC6ewLBU~?sNjflMjs_&7<42mIHr<~uF4ya zU>F9_6dYI86`T;G%}FAL0rWPfRI)bzieZ3Z7{I(0=oRTh9>6bvLBVN~sNjrpMxQ0( z7jz^lIH!`0{vqF8fZ-QFQ*d5YS8zd$HW!Kb1<>1EQpwt^;%Ag$_yzF6|1Ok$_(nRp z%oF$pFi5yU5+z(!&g^SM{DO`|3D;HfFTy7<`~qkaZm8-KZi><777@PydYjuSSsQ*Y z;HNPB0{Acz#wMTf1a1Kg67G;h33rt8vs!OOYMw=Q$ z+ydxrYN})s9>Xoba0}qWNGMHe@dS1O3=(RSLi3-{(XLLIvhCxT7g7zxe=tKNi0t~|dnt~3hx`K{kwCO~|Fo52svr5*6 zzuA5ohG78nR-jj;ESa3?$+gKyNchrEDAg0t~+ZK8%F1$zYzqEr3D75RxcisB&fx zBjOfxBuW^el1mr~!!3X&p+r@eFiMOzqlvf$(A$hr$=dK~voFDL3*f^@C{4!k1a<)o z62_B62@{kvdm<6Lpd(R2qLNFP1j8M6HdBb$1<>0}Rms|n!!E$E3!o%a z{EB2ha~fZfrtu7p0SqE$kVFwPl{0-75yzk-QN(POY&x$2)5!Fn~e90+Og;p>jqqB4QYHBq~^}l8yd3 zKePE7A%cz%PJ7!Ag>-V3l%4 zuO{LbbR;TRqmqrD#Lp(c@C%?RSgWclSSLoC^+fyv=xsKrWNm(eUx48kzz6@kQ1(N2 z>Es=rz%PJ7!bXxPVUu!ZZzkdwbR`=+t ztivzB@C)F>NEn;!i4t}zXZ9W$1<)kyQ`IHx7o*Jq zB5ncnHV0L*Hebdqz;FxT!$>Gi4)Fwb0SpojlSBzelr#G%5xbxxQNl5mT*CV>>;h;K zj;rbtPKeRwBoVs+dYe-!S(_hV7hu>0P!cMBMY5kaimyl?@(hjv3?fdGL=k6{GyN

    Hhiz` zQy7K;%v*t8kv`)A`~nyh+#!hy?kZ<=r7vl8Wza`gfn-O2oA)EZ@C%?RsHUnbs4hmE z8btg8=xu7MWD1J$3o!fw_~3sR%6{M?oz&t9`~nyx)Fz1%>L_P+T_S!#N8%Tuo=W~j zcoc?T08K)DRb4^@G1@dF;uk<~(?}(2vkkuh!!LjjBVlaPm?v-xV35#+BuZ$ioY~EY zxCI@F5}K>z5*~x$7C@8GLRFX0Qj9jOh`0sN+q726+SJ1>z;FxT!$>Gi+VBK+0Spof zNTP(c%9-7ch+WW;D51SdF5xK{b^$aA9aMD*9mQzViHKbQy-jD8tj+K9{RJ3y0hEM_ zUyUjR+QWK~^4sTggh5b+D3x0$Mvwc!UL zUxDElz=x5*`;mA8w*UqSGf1L@naY_xi-=p$ktkudN-p757;XVH33F6+33J70GmnT{ z0KLt8m8?xGzP|v&Er1Urf%hZv1a<)o5*Cm|2@91odl3=4pd(SjVwGIN8!+qwXcCsF z>JpZU(PkMDy8wEdH50>1zT35Q9dgd@tCeUyk_(2*$N zm`eUdcprve08PSiRb9dfG1{CY;uk<~b4n#^!^ata0K+eU47l*NE5!9f=aItK-mvB?;|5bL!F`JKBn#ZfkVrtf4 z8BFz9Wy)Z5q?oc)bSxDXt4FA15xt@`L5QicWs4;iO_1ufMOZ4;F$ja`6%0Xi1WORT zg6JSRf+0m$M-YPO6}6x1ce{`0{bT!io|(*b>{Jod4KKl-tSw+d?m6Aw8ng$ zRvY8jHvK`xF3=$S%^$sL`?oVMZGYns%RmQ;ZzY{#V{+?fZI|`4X|125_-?&d(k3dF zfi{Xw(=Ce4QW>+k$TH9xGdHb9@n=g`{;7&(pyNLX6m>^(3ny3xIv{K*=?GgTw|i@m zWymA_gV-jmwtIyeNh+3sHiYfcErjh;8MA}PGSC_`FReDlf4lua#WK*z5%N;n(FvA; z4hTC*I>P+qcJC~*40)s@EJ&;E&R?IVVi{;d*frfk*e#VYyNfIXtucF~)yAx08B{C- z4MKi~;s1Ya+o$mTTWL>6cm_H!ER=MHy^>qLx5zW(kPP|_I=N^bcQk!8puo#EiLis29y%Rn2$ zq3IUGVX2HcTx1z&jX5H%HpZ_T`LT*+pi?opmE;K1KnI4SB%R^t-pj4YV;Fmu@i}pURjMM5ck(m=n`#V}9nl3@WC9hGEQmY39}KlN@3k=s#i?nv_1;UTsTc>^C{9neD9%V_%$XwNKx@odX|*xGV;oeB0}aI=|3QYo z9l5)GjuVUn9T1jFI>L(NcAqOU4tb;_tW2xz_7}LEr(zsvLs*q=A)KGemK0aAmrM za8)W}t`->wT4PqH)y7=KIH(v0I>Fb&=@mpX%)jAD#n2}hC9C}3T5XJf;`^nFaiCK%xSQk%>p%yF`y`#= z{^XWFAhHg5q%%C2Rxv!JVjXB>csSi+cqElEkBY1Vtuc?K)yBN*dkreqfreqsduiqc z?#CTsALu~wgrrkEncVuPMD`(%bc(0bYU{n1o>8$6v{5{pZc#j!%9!Ux_JP)z7t(5D z{+WGHu@5v9W8O<6H@IJPf_I>LKtwcUQq z?R^#dKpVpPbPM5wRK|QLvJbSzY)Gq(`7i8)ihZCHd_9cb;r_@G{(%k*A4@vJC&?}U zROBD>NN4ygt@;=~SMd+DF?^A3F?^ZIn6E_sf!3I>(`sYh;2%`{1D%S&1tmwg2Rbl( zE9ndylUqJ(`z)VLYxx|-_Y9k;xCh!8Hcht}HcMs9<|6k%Ys}oV8pGFqHIj;Zpi?op zq2vhrKnI2`C7ofbAx@tv2QZ z-*Hf}4>Sy8-b*vDaqs952SEplog|%Nesb$~7CDGK(kT|C)z*72?V{o!XrtIQ-J;kn zl`*@E90aW~d!+T<7!E?kLC{c)c`uFJ`Eiw>!q$4a!tL?tq z9VHb5K^wxp=@!C%sf<}HG7z-J?4MQ}^Pk*NQZW#8a)i8>4se2hpaa5zl8$gta=Vv^ z{6ik;2nVOtcKbbBhp6}m+7J#+w-63XWz69s|3GWZ5oxtCKjR-%`~#if>tXaJ_mPh9 z4|HHSO41pQPHy>9k$=b|o#B|Y>SH)o#Xr!-a9p~@aC|CbP7wJAT4PR3tBuJIL96%& zIu(O!N{(<3bYM7H(ixT|xBL{5d&nc5;ncK>;WQQZKpVs9=@!Eosf;;OTg5%lsTkZ-a)f=L1H*DjXIPQk@^eM@A&+#1m1z~jc`EjSHilK{7Q^|ejJZH$ zA83uaFs(MmUk`DSihZDA81r75d71lShd2m2P+TJE6qhEq{xXq+$RnNN^0eA|@1-kL z90Y9?SEgGOSEVxMYLSDWHD+~MZA>mIsW=E4iZSn{k=xwYIKe>B0bz}#BV3!@?(0Mb zB9C;0>(gqxUvX1O#X!)8aAUfKa8oK{)`|=StugD;YGeG%nO~?F2s$}J-b*(-!9UOe z;TB0pxHY-mw~72i9_a|Tr`2|!?5dKAf1nNF&U6dmu2ja{E%Fbv#@v%u8TB}r#^Il1Mpi0nfi=?t%?RSdsTu@AH{ zyq0b;yq?OKH$?V<)|fZbYGeEW=xLC}HXZAqtiC%N_SiX229 z=@jp!)z*72y|3aRXrox4Zc%)Y%9syD4uaO04QaJ8bC#_9y^4dNp&0XC8oAT`krNCA z9S}a2bc9cm+x@A?K;)5*@L5`I_gcR`NyR|WhVVtYh45u6W4;m@2wG#lPOFXam-+oc z#X!)>5%OO8#tHs`4hY{$I>N@}cF)=&yJyqdJxB4~?(Ax@tv1Hr6Z``e|3If=aAnC6?tu;rJ4rgj{N$GJEOHNdq%$l? zs~C1saSyaH?3!*d?3T)y-9_$!)|frgYGeH9+YeRT1D%S&oh3)u2Rbk;lyru@l3TvF z$Ufwe&af!0V%SH;KG4RnZ@R^>Un*l3i|hlfG5e?0#<(o=BNh8V!!YK(H1lHj0S<8x zbf7p;(kTu~Zv7IGgUBPD;^4H}dhew}R2&3t6o;l;6o;iU=5UdNpf%=*wAz@tE-k4z z2pWnp@1>EO-A6jXK+pl<|Y*2Z~!Ho#NKy*54*_5P76i+@4lj@4a+~ii4ny;?8u7;;vN2+%0kt zw8q?%RvY7oa(}7fAZRGYyq8Aqci-y-13?Fb`y?IV{^WK)ATkhnq$50-R@?m#?k=eq z2-*-HPPY&qNoCBVA_GBd%wuV_G5O1zRSX2393k(e$DQCG=z#Eqq$50;-0r7D{vnTa zgs0PLyYF&&NyR_VhVX2gvmjok9y!U+a~4hUOHI>J`T?cQ2sAo586 zAht=X?e^DqZmVJ-XhYa8-9p$tl`%Vr35%ONz(Fy*64hTC* zI>P+qcJD0m4|${`EJ&;E{(JYARQv;N2)m|R2)m^+W_OW)pfzTXwAz?!_y-mLKqvTm z7`^Acrz89W9T*l$I>TPcE#F(@AM!|NSd> znEZ&eihrO}F}T0v2=_n-h65#?;h^M}FA=$iJkl8sPOBIWQE?BnF&vt1F&vi4n8QWx zf!3HK(rRPo`~HK9d!SP>xWD8G`#=YVqa>Z-=;W3!71@V8(ix6Ps~C<|u@AH{9G7k} z9G}XV6GZlb)|eC1YGeEu)=yOI0}aEN_tMO(-X}T4LC}HXWJ#x3mfZSNL=Ga4bc$2c zYU{n1PE&CZv{9U%Zc&_(%9t}n4uaO0v(jo~+!Huk#X-35%OMo%L)F04hU~cI>I~2?S5C}AM!{?crUHC+g}{;zKVaK4Pkw{h44Wt zV?Gr52U=q`q}9gUz(1(?2Ri=i!Kd*3eeaJP;UDP0@Uf&be3IPqPeuMAk93C5(yEW) za~1zU8^agr7Q>gRjQL9BA83vFI;}Rwzi<76ihrO}F}T0v2=_n-hHoXEVPkU3XU)s< z*|e6=QGCy^iHdumjbYPti(#`=#%wNf546V2O{+1?<{nhs1D%S&{Ut}(2RblpDd`Mb zCAWNQk$uP`{d3qRt@<3cRk07WF>IG^F>Ig8m>op+f!3ILX|*x_`{4&F_JM|B%zJ6( zeeWF|;vnciv6G}z%ujCp&LRhqM>@rVwAy;_rCn4U1Z@<%rdt%dr7~uBk%OQ$W{G3LEAa^HJTCm0AiAS{%0guRm6y|>6fsgGwtJ!b zODg_>HiSddEri2T8FRSEKhPR;L|Sdk!~BDaf1ne5J&fM>KGG5Xfes8uNjk&P$t_6nBBb6~{irfROF=wUK#(e7jl8SqvQ!%)| z!j*Lmuf2E7K~5^Hl5uZ49f@Er#<`8FPWiKF}I-VOnj>Z{1%~ zu@5v1W8Op7$y>x|&gP@J#%5;n3s#L~Y zEpiaF#;i`Ojrj)-Ld8MQP>gvmjokOX#t8<34hU-`9pT#Kc3&ql5P75{T%T6k?efnJ zDh7f!gd5W>gqu@mPX%)jWD(-HhXT5U{z z*^-KTpi?opzvKw}KnI4GB%R^q zW8O@wjmh^PRO|x{!}oB@1@n&doR7O;vi_F zSf6fDe2~hR4@C}w)|d@xwK0ArlHBf3MFt{| zbcD~+YP)Z7e@Vqa(1!3ux`ps%Dr3G983KhbcU^xTfVi(Kje}AF>I4oeGJ>G_y^h;woA7dwohfu4kG_RYs|c~ z+L(>*FRAzkIu(QaOO9|4bYR#?(i!F_w|r-jd&nc5VL@8Ou#1X&pp9YIbc%Er_QE?EoQ5>3XQ5=@an8QU5g4UQL(rRPYa}X*Hf`($uduimp_mNI85OhE|O41RI zPHy*7k%7n~9pRX?+V21A{*sD;pbg=;bPM75RK}bjG7z-JoS0S{ zPI7{Opaa6ml8&$}x!tFT{6ik;2&bmicKi2Br>Xb{+7M1pw-C-qWz3l(|3GWZS!uN~ zt9V+P;rvv_ zTp;ofw8mVRRvYs>{z1h*(5V>QUvh+dpaa7tlFo2xa?39hxraQ`87@z&7_Lxp5415{ znQk#$mCBf_Mec#tnAK^uF}|jMuHqi(R1EGfIl?~BfnklLGhCb8^6NzQA&+#1>(eTR z8&vEAZ45W2TMRd)GG?vFKF}JoF0D4^pZxr^ihZDA81r75dEfhHhd2m2P~0Nv6t^a~ z{x*?=$RnNN_O#l1@1;9b90Y9?ccxnuccn7sZjpnaHRhhQ+L)hn5GoFWhGNWnY2?25 zy-qL?bU?UI(h=@YZubKs1Cd8M!h>nG-M%09kcxqz4dLN*3*nJe#yl!A5VXcTmR1|{ zKYas2#X!)>5%OMo+zI}H4hT<3I>M95?S4w+AM!{?csi}N`ylt1RQv;N2+yWl2+yT5 z=6R8Spf%=&wAz^e#XqR{2Rgym!{~kQ7aidr=)mxjq%*vn-11jM{vnTahF8<7kKs2e z{(&}z*U~M9*HanuhR8qA8uMmaZH#|s{96_OK&N7Gf5{QAx@tv2RN-+)jt5Oi{cyq9)#f`6a`!cLNoFh9B7JB$279_a`R(rUYZ(C_hJ}*Ouvc=+_ZInw zJkl8!rBxrpJ}Ul!HimuEEr$J48M9dAA83u)KdmiwAz^5UsACT zGz??jOEd3#pX3k+K?jPHC7ohfa_dhKIfy*cDNaqRt@mC!O~pabMsa$&MR7(dW6l&g z2wG##N~?|WL$7D6I0zbwG4G|3``+g`!9dUfVY#FutVnM6xgrCRM>@jFwA${Uxxb`h zAZSBam2M%NpURjEL$+yfmLZjp3`Ta#OUo5(%nkw z=CQQen0f4jihZDA81r75dEfhShd2m2P&^^&6i+6%{wa}z$RnNN>9pE<@1CZ_pjVvQZW#;A-tAuA-tZ-m^VZQg4UQf(`sXUGwZi127*qGkoVGCPVf(OKzLiy z5#C8|_q!thkViVgdug@Z|K9y275_jR!uoUz;e%Ahd?@k{w8m^mtBpC8e^Bucbb_yk z(fi&XIl@2Cf#G9GXZR$!<)4cDLmuf2pQTkF!{;jgfi{LO(k+HBQyKG>$Uo2;^L1Kn zjNkS62NnN7r($q_$r0{>4h-K)I>W}~me1NT%V*PCK1cCA!zL>3fi{Ls(=CS0QW>+k z$UV>+GdHcq@G1A8;vVQ!4DK&E!amS}VM|G8*ebc@TZ`;N9_gRMHfh!8u&s)Hpp9X> zbc}oB z3({)qy_a@TaS*go?3!*-?3T)y-9-+9)|frgYGZEVAXFR#4aJ!E(#U=9J)K}6=zy?L z(h>GbZuj0I1Cd8M!lJa=?$z!usTc^_5cW;C5cW%D%wmy&pfzUywAvW|CHEs013@Q8 z$b0DkC-?_CARH*^2nQv%dx^+Dq$8fBQf1r)wxO9u* z_*BN6Ao35i#+;Z|8#9-GQ1K6RDhBtL9N`}5z;LppGb~GP`6(jzkViVhsc99%X)5l4 zHipyFErv5v8FQw{JS7aaZNM~4? zRxzBXVjpN@Se0%uoS({=3q*WWFTmb zS(jEDvw(q6F%WceguIt-c7lJP1Hvtmj&N&oyKfWuhdj~|ZcnT2-p~Cd75_jR!ky_B z!dcy7#>Mw%%dXzKx@onX|*x_o$;?!`~#he!TlvixCc5gJR#`}PbRng zDUo}~Bc0*tw2I*w756|J!?WoY!*i*Od0ylmXpMOxtv2TWa1Sc(flkHX{*oi?105J% zl5~cblUx3Z$Ufwe&hTnl#qb*y`#>ARYv~rl>#2-+Lu4Oljd?SzHpb7P{8q(2&@ha7 zFU`E~{gy)<1RW^emUN1Dl3V|-$U)?hPVrt^ZN2x>`zj8CHj4G>7R3jtjQLRHAZU%* zkXC&XzgKY(G!$drOC$HaKXQVBpaa6kl8*36a=Skj8Hha65k5<+?e@d?pQ{)M+7P}- zw-CNeWz1J113_!d*J-seehB;zDh7g1j*$1#H%{;mbU^r4(h)W$w|myV&hFW?cF$3K zxBEEvmsI=%Z3vsDTL_z_GG=p;f1ovHZdwiDLHAx@tv2SbefvShKhUWd++T8p zd!PfuPLj?rKe^>Qi`+vV=?n|fDu!KD+yiY4yQW(VyQMN_caeLbHD-^r+L+G{T=_#4 z_dus&aDT}W_JIxz3niUlujH2REwT@Jq%$l^s~GlCu@AH{?3->e?3c=z#UlGaYs~&> zwK4yeeNeFvGz??jOEd3#AK(xNK?jNhC7t4+sXDw)>~XK&N7Gf5{QN`Fs-)R-)8xcih-aF;o)=(;gM9vJSs8}w8lJ^RvY7WFPWKXZS3wV)$Ie zKG4SSMY_fCWh!I764?h@W4=zSjrqv^B^CQX!!YK(H1odqHx6+SbfEZF(kV73w|>@6 zSwEZB`ZqLx5z)_k$+yfmL4wQ6;gOXdmMC2awNM|@W ztztMt#XZo*aA>;4a9Ap14i~uxT4Rn#tBvv7?tiS}9_UmI?k_pQKG1>TC`o5HI=SUb zMfM?&bcSQnDu!cK>;r8K$E8~g$EPyp1d)B9HRi;$+L-lzepcN~O@ zgP@@p^IjUc?|qIF34vyq$8|MtL=W){UsFxK^wxVbPM79RK{E& zG7z-JT$olHlQ1K76G2EDLG2E2On6)DRKx@ppwAz?m_y-mLK&N7Gf5{Q}oBPp8$^doMks;vi_FcsAXlcrKMO&x;%c ztuZg8)yDWY8^2a@5Hu8H-b*9*yKfuIAzOOlT8a&o(05gCX)(h*)wtL=Wq{UsFx zK^wwr=@!E4sf>9;WFTmbc{8mx=12xY#X!)>5%OMo%L)F04hU~cI>I~2?S5C}AM!{? zcrUHC`yKa}RQv;N2FKLtv2R=@DD2ffll!CFnZtnBS-iLIxu`J z=?tGFxBOF)f5;=9;j^^rWB6RfKhVbTMY_fCWh!I768Q&OW4=zSjrkMbeo*lbbSeh- zmmJ|9=)mx;q%&+xZuzYFSw5TA@;Qp{88%UI5416Cnr<;{mdcpTMec#tn7L^+h6UV% zihH0_F}T0v2>U<>hAkzXVXNepZ!NM9d8B_1+oV;W!?r5+fi{Ni(k+JVQyH^^$Ue{- zGcT<+<}cU>75hNLFy_59^S<|v4sj54px8;$Dds1)erJ(`$RnL%L0WCS_tGvZ4uUp{ zUDGX!-BKB|yU0P%8nZ`QZH(Vi{zDZ9K|?X-y)<&)drv1A2s$7vlyro>lH0wv$Ux+g zj<6`LwtKz%ODYC}HiUiCErk728M9bqAZU%*Kdm;#FU|Rpih-b$BjmkwfD`-!9S{zb zbcBPF+r32OAM!{?I5@4g`$_kgRQv;N2#2Oy2#2LI=5UdJpf%=*wAz@{eD^`cKhO!j z9!BqbAL$7HKnI4SB%R^t4h$ztI>WN$mY*VW4|$|BoSIfKoTlO)Xk$1%-C{T+l`&_E z+ykvKXQkD~_&q0QtGEX`6@&Xrj<64OU|25c3@egbey+$qGOc1bPsKja#;_{g zVmLpQF&BvJ1FbO^rq#wg=I5tX>;nzMR{l=;6)Hx7wmDC|is#L~YEiwwU#;i`OjqyXq zKUXmdG=JDX@rNC5BIV2T8vh|HK!-o&8cF|CUYp$b>qJ%{kMuv~^=Y;7{>kSC6)Qm7 zpYq0Z>rZ)8Dr44)tN^Vs>(Xjt{>+a;t5^Z*Pq}K2y2+C1J5iw^mXUv#$R?Imo z=A3iR|2L&ryLRir^Tyv}yg7T;+BIv`kj>QuU%_$t%F(&8$NNUwb-O_i)lNy zXw$gGi18L|@VG(aZP;)vZ#8(##Nn(@965NxaCFO7Z5o#w|Ns4?-t;;{$4weLVQSl< z<864`v6IGxQS>`_YP~@=eCYUb6N60}8tRp5E&Qt9xM2&crP_UK%`7dkdF>&kIQ2)JX{($ce!oK;#TD=piyZ=HdPJ*`x)U%!6a{l!22 zhQ2*RX}KY@_b$~hmzHliupXamJ!brn!DF()<3^WO=r?d#R<<8MG%l>zZ(v;(b{Icw zc&Wj_CHbd2QyN}c>F zT1vKdR%)~;*`}p+ipgq)(z-)tZ%)6{dz97-rN%>MmYNJ)nj^+f3V)x`bkP~>>x^bu zX#>vaTYF}y`N&2KH+Czv2&I;6Z1s0<69(Jhu@g(J%cTwf)&~q9wcp54YO}~^Bl)z= zN*hO?c8h$vm)eI?&5((c{%`-f)Ra1uOC4K|^nWor>445zsmtI0V#FqUl)8peH=5jJ zq$ldG4>!$9Jz^)D<(=>o@5RTxM-rpYNbh}f?bsqKZ5elLwP;6=($=B0O{L!Vn>1?J z;BiBT|I5#G+j42Ur1!o$VEe4JL$UXMp|m6I^v`?WNgrleY3JCV06|`|jGYM^@T1?$~S5j_#$sLusG?bMG~!LFLlEN$-Poz>usowAlNwP#R7Vc&(eXRbP%mf%lj_t!-KQZA+d|;c^8L<(hNR6Ono1o_dQcP zj>t;0;*KNp9kWB}sQ**nHKn7=rDKx5kCpjxS?Tz+?;fQSLg_?WIVtb^WPNx_Rys9y zaa!KR>7g`-kIzuwXXbsMr5$HyrE}tra~JLCQ93V_&R5?J_}801f%8AP_?NhJLAi8c z(&0t2xi~9bl6Kg$G&htk<QI`;$JeOCYx54T(~j%2 z(hYIPjroq7Lg{98s6VHt_`l^!x0Fk_CcWJzgWI#x9mU@645hm`?(V#|d-UPGS?Rub z(*5~K4}{WuK7LTWJ(TzMuy#C>l^%^d9?N$;9!gKBwHVbj59I%0R{E&e`p2R4 z3Eg~}xBi(v{5>5gYk3Z{({``kIfwQS0C4t$(K--)E&C;*KBl9Y2NA&uV?$O05qL zVO-lVeo(3OOS$xG((i9F{XHxFQSA56Q2J}gOsmC|;4NEisXDW42{^Ml;KEL=wMyL8JxYP2eU@L%Cg&S=v@@d%`fO$k) z38}+YhBI45MGX(}l%Cd*m0+ua^t&3ACth8Ot!u!UtqI0H*HR%mbhoup!5V=$tfLZ% z*A;u>^`vlW3}@B^j808eaHsWA!J2_pC7x^>@M+nagGu5RNFCM^&a9P+g~Y8{3AQ0f zzipsA@kUZ?Z3}0%F&O)7r-Jv{9u=$x#G!*qBI`Sr1&mHzRdA9b zO5AL*zrcfs4I4Ci&;%R5zjbFr*)|1}+&z#wax*xyo+=h{_hKbjZ;-D0KzZ)XrP#U! zoY|IO>~<>^yxXl&!L|W$*j6QSZzuNLeWh^P9?onBFgo>9!JT$Q1?vx1mwWQU+&i(M zY#Es3-WjPQcY!n8RmDQ?0jvbu4Ww&L{&{Y$Jb!Q91I}zuFm}6_3f}GBs9^hmI1EyW z-1~|>_h2cS7y@TD6pT*8RB)%^s9+<&>T;JB=H8DDWg7`5xkn*&A9>Yqo zu^?TKgYw+trPw+F&a4E+ZcPR6b|NZR0CAY461gXfJ@)}pI8A{wn+is!166RRgHXYy zfy&)MV3vqM!(QunYKtOPp@r02t-JoQW|wjKdz zHVcfs9;rg?wVTaG1v?7F;b@geeT>*sA1j5^ad2kGgVE^(6+HEcs9-07|NYY@wf+rG zayyv~Wjh5-a-WLSk*C3#ovvb%yQj@zCD<7tU7rc%xzCbf>)CK-=YX->b5-ze&qD<} zAH?ATmB@Xe*mGYbh111wW|x4`X|4+HbSWy>W#D3e+9O7^9XFgGblboGOuL-V%XS5r zq`nfV1FwQJyIRFU>UpdLy9T7^YoR>#by94-9?t9rF!p+*3f}8Ys9-mPINYKVsc#i~ z>f5Anx*g8!4lp|1se(J*g$i~zsMPCJ`W=s{YwRArEZe;V-Ef#

      _L!rAA<704@cRBuT)*Ugzr^e*R{UY1S_7a$+e;KKRUx71wRmDR31*`;n4W#$ip*;N? zQfz$_&g?BP_WZUA-t#-CVDExByr&ZB-xquO52SGV5YFr)Fgks#f;)YJ3ic_e^cz(E zaTn6p*k|l0+vi{s{0pSc{1VRWD-{dDzh))aHy~|)3+2JTlVa=laArS%vE3h4@OFPf z1^XGq;TM$%{;Sx7|0adg?{H>+fYIqs72N4BRIpn7Z8ZgN^6$ah7p^!~o3G2Z1ek=b zgVcF-;mnp)u@HJGR)Q@J((*D;9(q|Rw$_6gp~2W{eHFabmG1=3)=uLJFsrFrzXUom#8lP8*_v zwE-2r!ZoXXTfb)6MtoYfwqTNYW26pi2WQq^#X{m5R)Tc^>9-@4C+;N0*3K}~G#LBr zs)F~~4aGDK;?P|s5^pN@#66^N+6-n$2BT9i72K&eDp((|s>Gvhb3QHG7GRQiOQa6l z3eIe66$^>CVI|nMApLF!<%#=Bv2}ZxIU0<8_EW+8+!4hb4dSqqN+iz2o_J>|oOXd3 zkHP3PKm~W&4HZm(v%IRrlWliCE!!Sol6X&~4%-XPY;P3{iT7b8*dUO8_l5GrgQeIy z1ZIK;W1quR@IHs5n4m!%_EU+(BgLM0loU?;!wkk?bQ+_AJB>vJ8wVPLNP&uIFwZ) z_rYS%eTWoJ(_uzqFgndp!JQ661v?z9E_b|p&16H_jsTO~vyeLSNI0|EDi(4d#Y(WF zLApK$%5xtp#n$6sW@s>Wdx8qy?TIL6Xb^{!RU-E(V$Xf56i%nXjK*Menxld{oq-B= zCRknWc=tMs4P`qUOmd%t)RE`HnVqL%A@}*L1iJvF>kFYg_eD}{y%=VO24lB#Rq$>v zMKMEzI9#q0xvvm=?klBmx(a4E2BXtF72N3>RIqD7)Px{lAwc0HJ+ zz5%HNZ-g_uNyS3yn^_5V3rNqmLV4=jq}X~p%nS|2Uhh=Fd%X+A3=QINk4mJzSL~_p zlfvnKmXSr=8rr9%e(?9s!fwk0N#CV{m4Vt60eW1S`Rw1nK%I zD9`=06kDHx8KuG4?Q<%4x6h*(r9m8CREgX#i9PqrQaHT=GaG}^X@Lsv^cpJI>)>L4 z+Qqxq8+=~2H^C(JTSy)FHk{czDi%_|%Sy2KKze>3%2R(J#numDMrbhh`mqY$>nA8i zXb^|bR3i1~Vo&{r6i#2l48~w|`dS5d`UVy3TTrPh+`Zbj^}E-1d|9^d!6fhxNS*Z~ zoY_w*76SjwO0Zu*+Wi&E1OFz)*56@9XE3(;rwZQYUnoZBI)6LVh7@=Su?Marg;QOa zp%{!#OR3;aOQV7<11fNZyO#p{Fq+)GmStPn>VZl6<&ZkKKAhR|DkkZB+X}1%TM;CE z11L|wk`!B4hMA$k*mFY_V$VHnRTMKch{NhCk$w%ar(aVFr?p_FVlX;2Qo+-&g9^4T zsPq-?UjIm6W9zY_Y>mMrcoU?~Yzk+#zKVt5%~%Px0Z7};p*(mCDYmwRnV-SfZfg}h z_=YIvXAp;tR3dm=u?OE+3a553!!a10YE$GcZwc9m^=FbTf{Qs?%AGuu(cLiqly1ltLu z^$g0x?<~dEU0|kZFt$8E1#fvb6w@?_!|o~(eh;yS-%|>wy^m`}?#1WXbSMe49&aAw0*EF>PmO0fMv`W*@7iAPDXb$^&?8jO98QNjBh zi(;AvaTu=>i6@9XaY+g%gBg;+=oD0Nr%9+_lfkMIkG2E&v}{wrB=J>w2j ziKnp=tPIlc!BC#~5Gl4!hnb_n*yjutywAf>%+VkYGgTt-5n@j~OA4nWVa8)HIvu5g zI~|P*b_`fm;>mU_pO)=7FiCtoQiq)YXLh2Bg~TVZ!rxp5>Gu>UPkgErTTg?TpuyPZ z92LCJGf+&>AP#4#MB=l>p7#qpb}TOd&S@Cnor<_mxs?y$WW624lDLRPb)EK`}vtI9#U^xvv*{ z?i-|Vx)EkH2BXu>D!9`vs9?8()#Z+NuiMyAw%frZ_Z>(bc_*CNT`CrG-_1&}dqBFr z7s_+rC&kwLVPMlni*I4r9Yx$B8N_i|D=)rXml!RWMt3huNbDp&(>u|MtN z-D@R2FWbssl6n=S4r~Z#wyKJS)T^-)Y;};H*MRcWYf7F7<>|MS zV(V5gGc*``-bRJka}V1V#S9JN&{rkWZ!h-rJ4oTw4`wO`qf>tsJpE3nU>T_N74BaD zNMB<+v!iUgfJyLOkvek#oY`(FCc%4}{wvnOb_Z#D4=4}5rxaWFf|;Me*zP_mc)Np8 z%+DYWgHbx;< zW@A+>gdWFAu<;-*Pk{2!B`LNV%m@v}R)Y%O>Le5+G>F3iDiL~$*h5d1!s$Sm`525& z(^PP$GAh`?ph8!;d&SWH>mKi3hp?+`)4?SCp-7!O1J3L)6${}HXC>H7kk*fY^6;~y z*m@+)G!4d!s!HO$0xEokyH|(g z?sY1kmhCh!Nqjm|hs}XAJ43}n;xkzZb{0s#XG3}7bEMdMF3dCy#y-ziA@p=Rw9?BEnAjQ@jVdiKs_Ia}k{*$={#T*UdaGOdbzFq8z?~uajPMGl+j81o} z;7<3Tg53*Nm3Xq<$ERhxA50QIfYf30;mjUXv5@#7R)ReY((fZsp7>EIwmt?kL4&c+ zCsgo0pF}Z1gE%~`5{aJ?d*WxMaC#1AFb1R33o5wNi>P2PflA!$-)GYJTbnt zUICNbuOfBi0ywkRR4n9vot0p3fOP#Pl;?g+imh+MOweHL_FWac+xJjR&>#*Us6_4$ z#h&{kDV#or8I8f{^r;H&^cgDH=U{cYq!6f&RNFBKpoY~SU7IH7cO0Z=?x~>Q1xtEh-Ykim*8jRhppn~UK z5ycD*;;@oRLyZbZ3;6(gR$3UDtNCOpqQaS99pPE>Xu?p-AW3l z)-V$?7@gXv;7%K%g0%(z`=_1Uy*6e;+1i0g?)FF>Sp#R*LB%9@59`QEuudRdcZTxZ zU8LCB6=swMW4D{A5WDSW-BFCvAPzlLBKKxu&)riBr(Q6#F&LftsNlIbM+MsgTBQzL$?XQCOx)X{K z8pL5|l}Npd*i-K+h0_3-!5EBA`d=63PP?Om?Exxvg}YaW{O+|UUzTkzFbTXjQfKW0 zXEsR1Lg0N_;eW*f((Vu_4?I+gt;1kOXE3%oLIn@JABxc##9@?51m0ilfk#W>GzMlU z2BXtB72Ih&D%b>2fh*j-6xfH+00#g4Ka2`0g3BX#CcaArrVSO|U$E5VKhY5O=R4}QE9TTg(QpTXGfNh)}|C!?63 zK^#t1iQuP+J^1NTIL(0>j=|`3rV8$K7An};pn_Mpdvz?{z0Tq5vYiViq0d9=yz}AA zE>N)$`a)KMT?Eqd#ZVsl5-GOMg&Cp2*y?2}c&nGA7@`<`1{2k{sAeR=EIE2V03y&1$TNF73>jE;Vay| zIwp6oNBOjDkAX?z$B{bh2{^MSRV*Zaij`nbgY^3hlqY^ximlJVOw(ZO^92=RpFQkF z6w@?_!^_!Y4yepL#m1u#Q07@b~M!JXbf1$z^$D)DH0i%-k;Hkc%S2dTr}g)@6k z#X{ouSqb(5NWULKdE$?x*!nTd91X@kKUKkhGM}NCqd^?LP>IA}iaqgHQaF7LGaiG{ z>01@t={r=g@4>1PPqrWUv}`|uN#dW7I_zgSvtLv!B>t6^V84O%`#Y2;{zHnbf5J@A zVC=IN|MUC2&)O&^Xb^`wDv`LZ*b^@)h0{_ngE1JLmQleIFN+FR4^-l2|NaMf{H;#z zUdyqeZ1uq;_wq;`xdNQoiYgXzH(({$N+4aY4CT33kz#8@mgL`XSRWgh1|_q z3DyFn>y}WSyOk7MTf@xIVC=Sy3f}ETC}wC7hmBPtcRR7?ZZCyX4a{f^MyHM{xKk%o zu+Ct0x#Qid3meMT6-;t>L+Z#);LN(KSjfF8E5UkzbiEmr=k6)R)?P3(G#I<>qk?z4 zIf@w?#9>R7$i0==b8jt$(>5@}F&Le;Q^B43qJnJ?DtCpuSN^B>!n@ZFd|tMGV3K-A zqz>#4XSS1yh13}8y{i;k2f)nGVC+@@4=eIscSkWpgE;J|5~=qRd+NQV zaM}lELI$JLzAAX?!Kh$E!2kYfCwH%*Y$)3>Fv&d}sUt_gneC@yA@@jDf{g;{dVeU- zJz9#bV_-&UFm^jm1@Cq|icuQGp`;SIP3*ZRO5qe>W@9iqO;*934nPH)0xtHaUA%iu z<@2%~2qviyLh8V2aAsu{3#kugCDe*5_9R)KOgVE_272N4qRIuYfrLJ)I>X_fXj_1p=od70*Pekghli<)I6?M^TWeiu?_-VJAVkBWuh_p%b~K9IKWhw|VLNU?Q3%=`?-b{|r~+kF_t{0!po zs7eHXOzgoQm%`}@nBf?VPEV=ePEVtPJp(Fug}Ya$;@#_6zAoEyU=sRyq|SQ*&g?}M z3!z_PCD_X#Ex!WgpjIb&8jP*Ju7bDv28t0H#NjQK2>rI$L%$=1)4MS9F&LfR zSHYb=Kn42{ROkwKuNc~Y-Q(TsBX*VTV=xK-2~y{N3TO72iiPl>vl8qJkk-G1^6+0t zvGr@1X&Q_zf2)GG{2hvE8pPoTl?eZ%*u(!Mh11V4qcRwsepSJpenSQO9aQ)Vcdt&# z-RloNE!&@9lK3yA4y(ogpQ+4hgT=&4uoA2eNWXQVJn@oJY+VXwng(N^%c$UeE{kHC z260$UB@)*cd*bD#a9ROoNCu-*0~Oq9B~-AL!KxCEwpIAFYz@IA@v2B2wi=w->M9m~ z#%r(=Y)z1U*MjoIYfG`U5zHJ7#y;0o!TVef#T*Ud&_pE?Hx+y0^`&rX1~VRm(W$u# z?$iPmtR+}g;>p&EPs`RCOcHO1)M0Jl%r;W7khm=?!8Qizw;hxxZZE~w8kh+hjD2=g z!TaolVuA*7=%NydyNW$=Hz}MpffdA((^#YUJ zy^%Vy51iTNDi(5Y!Ah_#LAu@w%5!fm#nx?LCTK8ryPXQ&ZC?}F3vDv`UN*mLhF zg;Rf+(HM+QnF{W-Gb-3FV0F3U-D_7ilx+Z*T<`s z*Jw7BZ48*?9*fkG(NcdtYFylgYTB=uoP9e6mL*-RA+ zsgGbK*esBqkA(8nv!&R26wC|_#$Jz6!FxRx#S9JNaJ)*SK0)lMPn5#xB$x>qj83Ph z;7+Haf}IBb_fI>yd!5dPvdsaL+-D$l&H?HATqw_do)lZphZ&{8 z*zJWXc()g!7^OiRE>VfxbH$$fQYoA+gPD!N=yZh&?sO$8*j3n*VTMpws~NZ z`WmDTycW*vIu#44uV*FL4In+=2<547l49%4Fe5Y=d%aZ!@AWnmBQ%J^9V(IfPO+!H zOA4pEVFqI`I^C;+JKcv0c0Z`p74BZ0^1Igqd|9^nU=sL2q|SN>&g@|o3xOYDCD@}N z?LG$OfghJ*>k}}eGZ@=^N`=^F4|^KL=nUfUtV#raPV9l7m%`}5a{D<@biWwTj;Ukqu|FPK9e!JWQ91^W_I`U-cif26Omuh>zx zufZhvH%OiNEu7hRDi(r&&q}Z#K-&Hh%7gzT#nzu;=4UXr`>P7x?r$jOXAp-!R3iAF zVh{e86i&64kyC9jIxWFJ&7JC?g4G2Tyu#h9bMfxABwv?pDKH7WG*ag+1826ZiiObi zSP8ZqNXzx1JoNHXY+V6nga%`)4OH;ZE1?*nK^#_5iO>zj9(q+NoK}OGkHP4)h6?Vq zCMwukph8!;d&SWH>mKi3YqP6tjld-QI!K+nE}YqVDi*>wW+hk?kk*?*dHD6E*xC$c zng(Ob%~kN0TcDVxK^$7CMEKTX55J)lPHkXDWiUFmRl%J$Mg?mJDtv{zSLfvJ)t*nw zRs$x9J0NvfM>w-iDi#uVW+hk`kbb*DdE#zTY~2KAng(N^o2uY__CPUBgE;h5iNw9c zp18LZPJLj8WH35yp@KVYi3+wASXJWDwl$xYZ5uF2ye(3PZ3k!8SH(i&?O6%714zI9 zpgi%8Qf%!HGe?85&rAjHb7vHDG>F5lDv@}A*c0z2h0{Qo@feIwd#K<}d!mBv1y+@K zvhB^MW!nc#5)VS^uzlgo2CG;|JcN~CLqYl-2IYx|OR;qX%mfX_K1ZtHeU3sgL4!Dq zR*A%8#GZJp6i(w{24gTfO;Ev|N~mB4DshFoSNyF`?p_nwP__UjxhElY1){|gnXfSqr ziVEKCsVHV>5Qo!MBKI7z=RQLUr!!$jV=y|Mt%5t9g9>&oSY7UT_d1UaWjh~Ca$kVd zkr%?5U8G_m_r-022Xup2?;u5kCt|MXsX_qvJC%XTxEq`n2I18;>hyG_MH>f2cfb_Yn$ zcS3pUyQJ89H_QwT#$NAL!F#n*E@V(ws*lK^?OJi_&%K3 z2Pzg)f5=L(k3f3<7|K(BBE{BEVMb^$_WHRB-s=}AMraU+uT&!S*J4lojTBDb!VJb> zboyQeclrSp>_wEHKN2mVWn ztvqYN*4kifa|!-w-ew&Xqce!Zk}45+DX|A$S_-FSV1{BaI@MFb122aPRv%R03U@CB z_F*)+do9nlvaJ9n=~qPR;0ADJE2&sWzcMSqRsrd~A(W?IRf?^v!OYNL?0F3pyyrDh z%+MeXYpX>1Mq*FDjucMo!c4_rbZV@EJ2gQCYYHlTg}c{3(%0Df>?m6^FbTc^QfD@Y zGi#w@A$Utxg0%u^yET*t-%yIJZD8hSFt*!P1#fp_6!SBPLwl77UL*G49i(vT2s0dm z(W$cv?$iYptShMC74BYLig&MWd|kFpz$A2cq|VzE&a8)uh0vR^609dk%e|mHbZ;rP z_JJ9p!Px2+DtN0~q8On;9JW@8(A$VT^tMtsZ3i`)W~)Tvqr{&0XepeIff zNF8zObUG#L9lTLtg)9265Yh{Jg*k@$SEC%!-m zrwd^QV=y{htb#jTf(kYlRN@MEulQS?+`TSkL)k6^liZghb>tOrW>>0M$bA(n!LA1B zdLESLzDA0z*TPKDVC?pK6};OUP)yJu4mYVp?wiG)`xYsjZiN|*!RU0m3hs0VD%hQ1 zb-Cl+>n=8w?QSs1eGgJc-V0}TpNfUt_p=i00g$feLwW88rP%rq%nS|2ZXZ#>yL}YJ z3=QJ&xJu-HLhQMpl)~vLn9&%FPS2>|PS2u(JqK2oJKnvXXG7Uu0F&G=B6Z|TaAq&7 zSjhbfE5Tj`>3RW_=YCC!t*^t(&|vKLO%=S`w@}Q`AP(=SMDBORp8Gv1oZg2Sj=|{k zp$hKw5h~cnpmJBZd*y$6FT8tw!sliC6iiZohSY(d!cxg{(xeJ266aFB~t$^_SC;f;q)uagbYTf-&Js@KTyH`1poV|o!q_t zVnf+#)hpz#jnt7#z&vrHVj*{3R)Q@F()ChMo_lF2wk`uRN`tZ6dMbFg%b^&hK^&G> ziQFrQJ@<-II5mKojlt-&vI_3B3MyDbaIrt_;@xXiJ}=v9VDi&m9jOD?fO+Ob#X{<} zSP8Z^NY9O+JoP$KY+V;-ga%`;jaBeoo1hq>K^)dsiPX)+o_Yf*oSMT7#$a@6se(JT zLIrCLDs_dsSC{dL?1Ex+265=75`i}nd*JR;IBg0u6ob)eGZoyaCn{JkP=Q-lnn(Zr?$w)ZW$Obb z={HB};4NU@I#IEZek)djZ4J`dopwb98vrVOg}ax2+&+ry=2>IAv7>By8Y>CDJ5p!v0rS|2iiO~N zu@Y==khb@M^5BD{*t##w{0zo+hp6D~4n;9PgE$OViQpr|9(+G3oJPV7$6$2YUj=s> zjS4mfRPYLSudctvV_8jP(@ zQNdfCieiKYaX3gNLQfNW=&}?}2gA(AV04D5hx;hf7r=@nvF9e7O`(SHKL(V05}l1$Vj{ z6>J_@RpQZh4WE|nS};j`9a4u~5A)24iiN~CvJ&hjkbZB5^2E1DvGrD%IU0<8-mZf8 zc?XI)8pPo)l}LQI*c0C)h10z-<1rYW?pMK`9zX?~4_1|UvOUPBWqSxr5lTvJb3TA=^W1r8c;C()eVuA*7cwQwEzaaL+FG}I`63k!> zMyFR)aHm&M!4`l@T;c8&f2)(b*K2Gj+v{MG`wgUyd=ut{6BP@&-)1G)J0M-Z3+1`r zlVa=pFcUNwyZulF@Ae}U6Euj!Cn}NqQ?cj%ObVyZVMb#xI(?~vJAH);_BB{t?s)h5 zh7D!=7EE$~ht!eZ!#r@JVj=gBtOWZBr0budJohhBZ2c8xh6ZD|zpLQg{()kK266aH zC34qV4))x&rEppTW;6z)Q(YC@X-O2X|AE!zj(4x6*}yA-V3K=Tq>iiy^Tvsah1~U7 z3AQ{)*DFAI?iHoj+5l#T24lA?tKi+Pf?|dSaadI)a<3-#+^b9Bv^NDtCpuSN^B>!n@Zxe9lXOV3K-0qz-Hh^SFtMh15-13AR2+&&{Ab^#)RGZ4NU- zgR$3^DtNE0P|VOE4jZaO>Na9ey^$17ZDA&4FgmqU!JXQpcn1*t@1J&Z_v*j~-U|ei z+?|j*vNOyRCn^?lcV#75H;}G3f%4qlrP#VD%qR`UZZ}iGyX}c$lm>C=trEHWh&}h_ zQaEh^GaG}^X)6`nX=@bk0D_DCX&3Ka+wwWD1%gTHzDOOoJo#SABtjz25~q{CDI=*_VhEQa5@5J zDh8v|kt(>;Y!uG`f=XZE?xi2MkKzmOUPrTo_X5Er__0Wxc^u4RCn^?#pTJ766G7TO z3Ce??EXCGSVCH8qwtJci-tOrr=4TLxGgKn@nPLxqmK08B!wkn@bUIfBcRCNnJAj~q zSGaq1E8e{>;A08FTqUHVC?f16}-<^QB2by4zH<1;@8EV_zfwX-h>&F!RYk13hwj{ie~`9 zsuGX3_xO~D0>LEl2S^?EAGa@$`gMk#n#VZ=4deX`K1cp=T|7^ zXb^{QR3h=WVo&^?6i(m6jK^Sf`cVaU`U%DRe_&OKC)+Q4$_s&DlK3~I4*MPEof8!c ziT`9J*k2(1)~e6PO-I%mS-WqonO0khtxLd6&|vJdt_t4gk|-u<5Qn8zBJna}PrR%Y zPW500V=y|^SHYc@NAdU{sKgcSUh%g&xqGe11|A6nliVvIb>zx0FPx}Y$lZ_?UeN~W zdNnA|y}A@z*MOOz!PxCuDtNbRqnMyU9M(~Z-0O-x_j*z|HHI0D!RXXf1$SB>#k+rC zb-Cl+YXdg$J|LLnZh_R1EnyxwQL&J_H7mTO4bpWRD9^o-6kFTE%+O%$ww(&zZF>|m zG>Ah7mB`&u?72Hh;nW#sGzOznR~6i;8;aNe!0K|xyH|HM@Jb+<Xw@NTz8F++nmY^xHvw-bBrzEU`C4>KHt(W##b z?zAI{HvmE9u5kCt|MXsX_u7fic_|P~Qtyn^fxEywZlYo#^#E3QNgJf+fl!`$cPX~+ z0W(8`vDdv+@Lu;uF++nm3{r{I`-(mFU@4r2z)Z+sbQ-3DI}Jzi4j}m7Kkel1wI3UJ zFAz*}k3#Cm{b8OsQL&JF3@f~(4bt^ED9=4!imel1Mrkm1YbtoR6H$!PAP$pMBKKsm z=RQCRrztSAF&Lc=RKcANLh%kDxY(a|@$OaTb6yJslhlVGb>MWEXHHZsq@KYFFKL7H zd^nV+o+-uFBVa~oF!p++3f}8%6eBcLEksYso58q6yvDi#9IVTC8OLE1eN$^)Mz#n!W7MrSa#d9DiH z=6NVaXAp-AR3h+&Vh?q{nyZ34U5euQKTv@y+`SaohtcG}b9g!1cq|Z1 z(qD-by94-9%hCHW6w9L;62}jVul8BxJ4z>-zxU> zw@Kl2JIquJMyESfaHqRaJOcY<&b~eg&C4xUC_TW!T;q(m5a12JL=TvZ~=TW=^ z2rlN|-R;{IFJCY6Ij;qRN$Qu8I`9>kr%qHXq+Y-ZPicen{5q7UenX0_Z^F#bVC?m6 z6};DXP|VOE4)3W%>i5N-`U5GPK7<*N!RYj{3hwj?igy6P#r(1_O6|Y!@%Hr@+jun) zOwxaW)WKiEJb9vGA^q2^@SZkE@83ds`tPLJ`aR4%4aS~-RKa`x3B^1O;_!<~r2kdy z>3@^L>35i68H`SUs^Ct4p?DN<`Gx7%`}e=iLi$Aybe60(-|}uCm_)9F)M<5LUOQ2- z5P2z9cugCm;bova^0HEFtp_tugR#;2DtM#IqZp_`99C3`$PL6Ec_k^FR)(3A!RXXb z1$SB%#Y=#oe&GKdd9%aICXv@f>a?|Bo;p#n5V;X6yrB)!@VZbQc|9q%Hins{ z!Psb16}-{)QOwdH4jZUM-YBu@Je26&}$BX}BYlNA4uW*3K|vG#DH0s)9G#4aFD@;?P|sB5x}8$UUTR z+6-no2BT9i72K&eiWdMuMXvCt_@ZCzH=hp*;9tDYg!Q8KuG4?l2X+-Qg%k zX%L6~R3i9Du?HU|h132p6EYZ`#;D*h<%FKW1lL8(`hiHG8moasNha#pm-1vtm5aL+``Uc2QLSL zN$_)!I`dqZ$4*o%1V5h@-qZ$Z`$8xWevuSgFNPVa!PxFx6};U`Q4G}}4wtJ$@GHa~ z{7NaDu7a78!RRzk1$Vjz#fyO8;(y-7YuI&s&%=RW68i?EPP`H3#S;|^v2SLDN3}tk zz7@)2-zLS@+hL|@FgAOq3f}BpD5hx;hkH~a_Pt_{eV-Ig_ruJ{V04oYK8G#LAQ zP6hAtc@$$bh{KC2k@zLCCw^H9r&nO+V=y``P{EyEL-7Y;`g?9S>au6kmlcq^7tP}vGqfkc^ZsOf2=}mx`%y&Vx9(Z_)I0@ ze=hd;Ur6EfCCq>fMyIb;@c7@LcoPs*{N~lhud(mg!s~%xlKcmx4*e14=@S)`#8WmX%L6iRU-8oVo$xM6i#cwjLKkiYNUcYt%KrKKv1bG{B4ItssHyT zwjSGfLl8{TH$m#)rZ5kns8~qfj1^wj2I;*yl&5bY#nzTElQkH7Zmoj%ydjFo8pL5E zl}O)K?CCd_!l@n1)C@+a8Wr5B1BxdCL8Y&BEoq%vPCXshQ>a>0^51*)5h}@qQUepF@ID_)YJ4>;37nqqEjExRZ z!5iHT#Y_$2u)9h`-b3t>_mskEFPKRgj86Ng;7)^3ya)(Z7kRP`=35>O1e3@^kveS{ z%+n_-79x*eg(tN^8XgJdkw;0fb$^&?8jOvOQNbG>i(;AvaTu=>ktc{fa!Cp&gPD=R z=oD0Nr%5Q@0|XVh(oHP>VkbAT1K7dCfnXARDpF@22=nTRiiP0QSm9-DkhTwo^5BO^ zv2{AkG!4dfXQ<%q9)@C?2633F62XrUd+=FOI2{QyC47l4XcTV&f>j2OH?d>c z!J~m-68w0i&O8C;)e{v9!B1j^2ev`lJ_X8ypDM-H(_rRlFt$5K1#kBZ6!SEQ!&xd3 z{A{rYKSv6ub77`rFgl&Df;(M+;z>ZT%HZ)Pb`d*xI}l8QUxL(`b75XSQLzyGGFEtD z8>H^8pV0YNZ{eFst}-U;*OiHe2TceBC++aOKf3+1uzlVa=rF!MAR zo1L$MH~S!pc^bswVU>veh}dI4DuvTyFvBt!ot{v^ot{MTJRn%b&pWw^JGggDK-32OmyRV@bt3e#zP>JAgiaq#S zQaHU0GbV%4>0K4v={*$h1A>eHc^7YDAMicT2!cuMkB~a?W0?0(R4l~)log)X25I_p zD3ASx6kET98K=S6?AI!Iv)`Z?r$HRPQ;FE$i#_%aQaJqxGb4l1>1P$(=@%5w1A>ZO z=_b}bzlr_Er#v7CCW-$*>aagy-ak>Xkhs>0I;A#9ze_-Q;yO}ntqU_ogR#%0RPa8R zMlnW%I4r9YiR+0y@p4i))rXmn!RWMt3huNbiiZI~C9ZT6Q(~V>lmAF;CHC=-Aeh8o z1*wx8!aRebVj=!&tnkV!$;uY+Qq260$VCE_<0d;BI+ zI5mYCkiqEGOa*t^0L2S|pyF4$iTxvfjkRD4ZwZ1)@>WP4+8X8|6cr1}+pxlO+aP_n zh4SPZOR=>b%q$JYerr_lemkI;r9m7zsYLS5Vo%;h3a73xb21p6Hc`Qyx}$g_5LEI? zH?f-HO{@o>^O7K#r0$8-fxTd!K~b@gx(_QnvklVo7EqpgODVQ)1v5^AvDa->@Lsn? zF;0Ux^i_$}+lxK*4pKPvgBg{<=+s{YciIWX8-buwSGtMC)czYE|1+_j*~W{4V3K}U zqz)bc^Bjtbh4cei;jL|u-uHm=^m|INbuXC78jL;fqk{K52*qR#;xJex(hm`P`k_)d z4TG7Q!RRzX1$WvH#e;#M(pS2P)g(8uQGCmrf?yJPG*YLHfq4l<#X{t9tnkt{NW&AL zJaS2jtp+n$gR#+|f;Tz|#bgcQaDYlgo+9?hQ>Ab^5N2Kmqti4M+^LM>l|Zn%$fNBL zzU3i7Fo}F9Qm4&;c?LzrLgd3);eBn8hL3>q$g`x_dL+zD4aP=~Qo$QN8pTWv;&7}= zL_SXJk&l(Wb8H`RRso+j0qj(<>tS<6oJC$#FLJ&+MpN`aNb6_4qQLzyDOjdYc z8>Hd0p*-?AQfxgJW|{_LqvxyOjb4Cang(&WNF^d)EcVEkNZ~XWW=001(`72S)8!~$ z2Lu(l(oHP>VkbATE7`#_f?yK-YNXDb2lEDsiiP0UvcfytAZ=d{<-u=|V(X1C(=-^{ zy;%is_ZAe>G>F4(DiQp4u?N3H3a2|^rerWW-K~N<-Gky~K(NZ-@g{a3J9t76OoBgv z)S2^P-at{Y5d0xlcy1e{?MI+I_@h#6eGF!v24lNVsNn5BiDI4xad=uKf8T3`VCHRB)#kQ9KX`RvA3r#9n3xFA0K4@K=#Ka{`|B%AzPncmDj83&0q)xR_JQfI6@$*h@Vs+TTtAbz>d`YCvTngqP z6cr1>mtlqXwn5si2j#(+lVWRqn6Vm+?XIAL2VW7zSPkN^l1c<$S?s}Ak;17V%$N*D zr`1$&r`1us76>l>=Uu#st;zR1DhMXA*GB5ZMli3Ts91=-E-O5^4bpUDD39Glimgpy z#%VA%+e`&-b^{dSG>AhBm5AL^?6F%(;nW&tMh2r(8x`DXBNUGXf{I<~CRUT*#5U$r zo)ZL<#O;wftOn*a6cr1JJF>!4+aUdRhVsN+q}bXOW{d`7pPQ)QeRfANMuRx?P>IBw zi9K;oDV%!2%*SAK>Z5`?ZI0rZKv0P*-NcmG=hEaRwk7*`RS-<#Z;jN++rT`EqGA%i zw{6D?Z*GG$zde-4-$9D4{b1&4FgD#^h1hft+X=-y4dSq~O2pqq?D2P%!f61^fDA^b zfhu_X-BG+32r7Q1o7g|%*VvwH;blQENxnByhwcOOEQ*Rr@}9OYD?Gjp()SQ3Pd-$N zt;1kuX)yLXLIv-4KNPbxh{GtANWQ<=laH3dX$;Jq3`VDMD!9{l6fXvXN?z$E)}eS4 zEAct+3W7=MiAWt7U>-$Lv5NF96>%;P93 z7SbQX3NLSi^nM(ar$1hbttY@t)?n=UBo(~plTl38AP%RhMEcXjp8j+xoaVqx&0uso zQw4WA3&rz+pwd^miFHVBV(0KJFARc7w+i0qJt$^s5QqCzBJ%xW zkNkiXPV-?VWiUEDq=Gv=jN-LGu)4^T?NPqvK|wHy{5VplJpuD9ii(BEPqD&t+aL`; z1LcvQm1662Fw-;`8+}0qZ}deJ(=>>~%PJB16|qNtRSKsCFf%e3onBYLo!&t4Rv@Uz zm2P737dyF$y~Pe56$F#u?;v&NyD%@Js8|U8J}bPs4bt|9P#*jvDYkwLGfjiB-A`5U zc0WTgO@lamp%THr6npTmq;UEgW=aO5)3++P(|0J|2?VPQ9&chlu!9E$!6f)kNS*mJ z%!?>07J~oE3XgAtwEa7j2meEgt$)JI(_n13)=Fu+wNcE|AP#j@B6wY~2VYVOr=?(~ zWH34{qk;!t7R7UcV3on+O>8-K@U9@31YaJhGgp9l6-C8D@CK~#`Zh@0D?@qkRixP3 z5N4hRW4o)V;O(xCVx9(ZSW_i}uO;^2YfItO2xd+Oqtm)7xYK$lUJV2lywXi9|1*5y zO{@vu^SB_G#9kk%6Pv-jhoWL3c5_yEd>f?cmQWtMl@wcB!_3oQY_^RG-t0yw=4lXz zja4FcJF&-ZFNISL%&-hbr;aMPQzsNp2ZB}nypx+)7k2Q*AeaR2hSZsxz&wkhVj=jZ ztnm6aNZXr1dGMZ6Z0!XzR)ewKJ}P*-o1+-3K^(SJiQrp_J^0pAIBf$nCWFywI~Cli zFN(JV!Nvc)i#M?y_?{;Q!6f#MNS)Xp=4})e3$Zg+czzqC>0O{a_O4QF9RM>yY2XhVm(o3xY}F z;Yb}e0_JTL6$^<+vckjLApPzS<%vg2v2_g07!Afg$Eo0bjz=*@gE*8_BC&}*@kA+{ z0?d32MyJUtxYGeB9t{MQxYA8biG40jZeml}#~XuS68|8iPM!wyM2d<@{N8pjE4;uB z()@HNkAJ8XTW7$`(_n1+a1~H{kpgj5MQf!?AGfRW9 z-!oP4e$PTNOM^I^qY}x_6?^jYq;NVPW=;m9(}gOy(?uxW4+NFG(oL*m@g{Z&pYzHf zn54cGsRJ*Ac_Kx{Lh376;R$Y#p09%P)K^Qfbso$(4aQ!tRl$3`4#hYP;&6jXq`pz? zsc(|P>1LQw8H`T1s^Ctyp?E(KRO(7Mv6$L_24z}^yAef}T3#o(ehIuMQ#X|af zS>YXSklycy^7Ic#v2{MoWDUliA5y`4ei+4M4dU>qN~C{G?CBqu!s!W^sTqt;PpRNe zPosED5LEg~H?fY%P3&2|<-I{LiTpfLr@a94N{Whw$S<+NE8HLrzXIivUzK9(0+`7f zjE%mof;ajGipd(p;VqSj{I=L5zaxdyyD;-I7@gi%!JR%p@q!>&UF6aB5#RF2Aecn{ z1gXf$%kcPj6^2lFFvGr@1nHr3Zeyf5v`W=dy8pPoTm5BVK*dzZW zh11V4lQI~cepSJpenatgAXr`G$@T}|^1L9JME(n@(`v1((`tjo$V;%oeIDVS**jEyd%f;YM>ifJ0eVL6qETwm;wmzTn61(+Ebj7|+yaHo|}yc`HBa;2MC z{KZagVym!&CkDYJ_^L>qxf;y-C@L0y)@!iB8{8mmuLb47*Op>yBbaF#jP0(gg15UK zifJ0ep@~WaZz}fS>r3I(3}#9Oqf>Jg+^GeMR|CN+gU6d#D|Ya_AeaQ-5UDfUz`T#5 zVj*~2R(OgVr0sT49=yF2TWetEX)w0iQ3Y?e6N-5n#G#8y1n(;L;N7Hf+5~1w2BXua zD!5Y*6psgjRR)hYv7YQGTQ4vP-W#bi`@p=BqGBQV7Oe0VH%QxCL3!}4rP#U+%sdUo zcDGZ(+wF^Do(6H)K_!Cs6MOI-rEuyGGbe-5DO16nc1H1rAgJJ#ZesbL;R|nKyYf9x z4T4GR-H;C=?F~f>r#ylbhIRcJSgLm;@h-)S2U89!XKL5PSkFyu}UD zwn2IDiBfD0Fk>|s+nua}w|fAJu^Pl-s!9YuQ0&1ElEP^k%$N*Dr-M~+r$bP@BnU44 z=Uu#s9m@ASI0z=O4@2t2!(m=ZQLzyF2v&HE8>HzYp*;3%DYhO3GfsoC*<)1jW{*WN zPJ=icuM)9O5PR$srEoe4W=001(qkUIHVmSHKY!F z9p=Fl6$`1~WQ7O0L3(~0%2U51#nyLW#%VD2`o0R@>jx;tX%L5xR3i1qVo&{v6i%PQ zjLKki`dkHf`U1skf}m1Yx{1Zq{u>`}VqdY1w+F!_{WnM*{4LDGDJmAyf6of9a)b2# zBb2BANs6sM!%Ws-?Dw{ntd1<6hTL$LM6cr1R>#@R{+#n6thw{kFOR;qYn8_N9jW$rh zBd>&FvIcQjMI|CP6no@VrEppeW?lxP(;6zc)0!yW69lV^JlfXgTb>*QlgR5Jb=tZx z52mPCh}@VJUg8F6xG9uJUSEo>&0uC~FgDs;1#h$kikTY3p_NKRZY}o68%p8S24+$Q zqf=WI+-YMJFA0LxMV@T!`Ig59!6b4Aq)zJy^JI#Og~***;VEvAhPy&}jVQhIuhX#X|5Ytne~7NZSWOdGLdz*g6eno(5yP2dm)i9)efZ|<2aPdFy;!W%( zzUTQtFo}H&QYYRD^KOcYh1j>V!qeO!P2UOSvG0;%>)kNpG#Hz`R|RkOJ{03Lh{FRa z5qrMaV?QW`(?c*bG8mm6QNf)aMe(d4sMwWmVx97v*yDW4!-HUw_(`M=dkW^=6cr1J zpJ9bZxk37U4$2chFU8guV8&=L_W6PUrOxBmzKh58JIa4j864b@Z`&(cv}!u@=7S-AU!vP^3N%w!G5Mz>JG8{HDcWDVl5 zwMsD8BJaWq z?{b4QJOIig?Q7#rO~1#fgu6f-r5!`>TXqtg@>+-WL`R|P>uu5=TNzu3u5>>zgV{2-VFFC%s4 z!7y*9s8|R-ofY2a25EZ+lm|adimivkOw(X&_Xrid-B~E6X%L6mDiQoBu?Ig|3a4XW zrerWW9jAgj9gpHgL9oi;@g{a6J9v5!OoE?`)S0Kiyq%(AA^2&m@Ju&I+jF2i_!&}c zJric024lNttKjXPgJPZraX3#Uf}bz;;1@{YbRo=?3`VDmRdAvQ&cPjzls&!=>}pJM zo5ddd7Ac%=g_)DV=ybaZ?sNx=_XR-(uXGd3{|sMv6T6G=d4>>7V&8+*iTA?1oT6eO z_Wi8zOgBi=^PxQUgHmjL2xguJW3!K_;LSdYVx9(Zcw8l7KOy$mPfFqR6wI&;MyF>~ zaHnTcJTeGY@$*h@V$ZXK*9gHR_=`xL`4Y?nDk>I&zrqUdbc3|L0Lp{ECdJm*Va94O zw)>_E-tJo{#%d6UcT^(yyJ8Rio)k{+!;Hybbox*Qclrp$D}&(Tf8NEL*e86?V}xK5 z`!l3Y{2b;L6%`Ayzhs4ny21Zf*LjZT1VwI1M>`m6F(BhimL_qBwnv&B(AgeV;;g`hl`!5GMA|{U)|x^PAXD z{^^0eVJ7G#9+u3*hQqd~q}WM3f)gvcp?*g~P2!QGTpa})qhaZDv=nNeqf^Fc$YIQs zBpxeV6OSFmX&h`mhQ(>z6spsBl=XrziGS`lu_Uf@Y3nz!3EZ=d5Vk(>iO4*8Vr-d8 zikcTvWBJSEmNpHZ$+7`A%|^Jl78E8O}||fr|q$+ z85XAadZDQgE|(*N9VVhgo?6WiIpwV4pMBJWD(X}e+jR8s6j?m4lq8)|q@)I{DZ z%GEw>vWBJ6eNw26?n{}hA&31_68V5|jeKAfr-QJ085XBQQm9UcQg#i({zV?#JKVpu zju5sYA4%qEM`6oUQtU)N#)&Q6P{YTeCi3x7uAYF+)UY&qQVO-v-%w_1$l;WfL_RfK zBcB$<>2z#ThQ;a36sps2DO(0%|04JG&h~FDB808T=aPBadDuFY6g!d6cVbO9)bItU ziF{#{s~2I@G%Sr?l0t3t50q&da=0udkuML|$X7&hx)Pg_VR5=Th3fQ2%8o&p$Upa+ zSot4z>o>7KxnUV0Yz4oL%rmdYHmRi834Ws!+q$8)Z$?eHZX|(*u#%Wq=+6cd&UHmUiDw zp|<-TWuAr{{+W{CAB1c0e?@Wn5Sx=>ar!8Q>hvGVwn3QSKlhth^A7*-Z(^VL_m&dE zR_xEnJn?gEi%N=}*k3xav>R&rYt+R4Cd$=sv3VMnX1`CNHv0o*o`xKLN=fWtCd4)N zuu+_b!-i#8oJL5YI*mwKISBjldAEKO8`%xJ31KVvsAQfw8n#O%#ZK@soY>k8wLKPU zf{z{L>Nwa~4NJS@rcm1*k1|$64ils#_=Mpae4;2$6JujCEKZZAP@N{D>>Pwc{=B=t ziA~|(TTKXCv8N*Q#Hq1;Dk*kiPwT|mZm8+$Q4@QHC|761#%WlZojHZt>@1XV8giH| zC9!7@*VuDJahemGkzsL~JB8{r4`t;bOzfZgO>Cj&H?jHrQ%eb9EAawk9=0I1P9?=o z;)R`9*bVi&C~6Wf7Uk;V*cc5*h8)&UN&H`jYy1tOIBkdx$gntVoI-WlgtB!ICjQU;CiefupSQP} zD+l&AhppsWka_5q*jAMkTgjL1ZSBMgZ>aBWQImYTC|9?~W@%XZ-7$sI@1Wjb$}9~z z?3|M1yM$}lQk?opP54aq<%}8tRaW9Q6}%+qei_Nk=UiF~IM zJG-HV??z4Jd!k&u7n`YJY4rXSYNHQOW@^ac&nb!gP`E~ZIEvFF*rW`L(_<-Ar^hKf z2Vwsr_w}CiZ>=SSt;kQ4dD=7BLX{Ldk)Ly7X*bmH3#f_wVw9^dVbe4$jlP^hZS=2{ zX&Q3)TS_9o8m^K59>wW3Y(|E~>5UYs)0>oqgD{bQ?l-aWKkU|TVsE=)Ga+mR{|A|8 zzKg9=NwE|BeJ6HzLv4S6n&AJ6a`i)Onuev_k5Z`Z{)aM6Lk^#$B>1P{8vL^;PM>2_ zGAvGCrcj-}qU;)k{S01y6Z^&uO9^2s_;+NU`8~EtCB;tgADvj?4YfVYME-HJ!aK^< z;jnocmUc%-p|(3BWuAr{MovlaQNlI&s8O6o!=_|doW@9@I*m!$I0*Y0y!<9Mwi{Lx z!dCEKka^~~*fNzAJHf|yVuv@>_JpVjK2emb6JzrEb()T{cMvA{&;2IWyu<(do7fEgz4e5!6?-N!Pn;RsqLN}K_N-2< z@P?Y69W}A%h;nsKY@UXt*|}4w&CWxary+;=QWAUqaE-k{6sHBTVHp;ug;S_bi%@nC z!hU?-t>45JbHkEC*a|*~%rlq3cB!P;3BHsQJG`N`mqAVNWushO4jZdsX?KMbYP-Lr zjMb3CN+}7xa<~RxC5qFk*q989)9NWyr!^?M2jP%E@9uA6zw+;`D1@!pYm<56I@msy z6g#okb7F}%)by`W6MKUwS2x7QX;_-wIEC8mCX{g+a@Z^-u{RIb*jq$#+7g?QVR70z zh3d2oW%D3R?4SEhY~kiNvF-d*y9r?{@eX7jwj;JqCB;_arF%O$vA!GXcNf$o-ZjeA z-LNqlmOgtals=d2?Lir%A&0$ElDIEi6Ym|xX&-DphQ(>W6pGWJ-u{%8gD{DI?l-X{ zu5)SYH?afVv!M{S;vY=r$%kM|RZ{H4Kg@|e-ca*LpeFv2QLY|^&C{?neM}0q>0>GL zG~{r6O5&douJKQd;&c)=Aj9HxatieWKZUY-5GMZ5{U-MR#hOYJu48vV8?PPb$8GAvGarcj;kqU;}pi9Y>MqVLw*b;r$i-f6eqJuV#ByBD@{ z-$&+=_hXAyQtae@(1~Ts~likjSi zk8<@jY`TV}+c#1u-45!#Ntv!8hqqIb`<-yj{f{V4?_x7EEKcvIP@VosSw9GqJ9GMw z{-@rowf>hI_7uWa@PCte=116Ol@vR{KXzi1H`Mm0s0sdAl&ha((={yZewjl3n7*P+ z*O0?EDGB~SQKMWP4V$iEX?KhiYP(}nrfbMy?34r_CtQR7B8tI{*mji^JGobMV!1ce^~$Kp zy-Jj;t6~E-EZwf2LhW`9%76_y{3<26*9zC%Ye#Wf2OFGWaau2h>a;#(5h3h%?!47E zaKXYt*vh>TnMZDn?Nv#!lY3Jqc6&ozZ;qPWTSU3KB{pHh((TqMlx_$0wxLYeki&K< z$-RBJ=H4NS(~j8q42#oFDO9JODccC)(0|68^=iBKc6GzrLf8tvJDF$pu?LO25-#g0HeXv0rmUj0`p?*yJQwD9w;lPvxKPX&-9~{N$5NvRU#p$pVs?*_= zZG>+0*b06$nP(n@?N~{%6Z|+QwtGWupMaX+Cq}t?5;kJP((cJA)OJsy zjM$LFX(c z_ytaE_lDZO2sObkj&k)9Y{G`6-Ahxb?OsNiupx&lQWE^ia1DM{6sN1PsTmfhYf`9A zf1+$7gb6<6xB7oKF#YeXew{lO7s6Kb8^}EMMr_wgik;{;JF(##YW!BzM87S{)!VTN z87iGeR9PUX;^n1fK`h8KH?#BjaSezbAp*sDUvXKxb`jBV!d7HQT!!Fod z2wS-yCG*I~uvIH5c5*-A#GY@c>!(nY`{^iGpTQ<4y9*W;(VXiT&PC+ux!l_;*pReveJqu(bPQ3iV_9i85hN z@_!D)q68lS&(oEF=*l42+L^iFK|hT5JHHNj_!a&=~G!iJ^YSyQO( z&PJKAA%{6q5`4~Z4L(;Cr@67I85XB`Q>ae!Q8p671RwHS{lD8B{`Xd2z#YpAVJrGV zWS+V(wss}OPV_~c*zyfEzBp>44~lYi32efKrQxMgC=CzlElruQA%|sC5`DRFjlO&o zrxmcl85XA%Q>acWQPvW|L?7~7eZJUu5I zr*Cd$YFz&q6spr^)SkV~VZU?d zt-gf|78k-+?ybl?a%*hcN{XG_+d8rD8|r#{)a2eF%GDjQ2^*GfcS@mlyEA3Nh8%WH zN$%akHTUjOoO;;Y42#pADO9JuC|e2P(0|68|Lptjy}jMAz7V#8?@Q*H`(X=LQtSjj zz=?g|P}>KgCiuZot{#F-*s!#FSPHe>!zmLs%%qp4N;tK#HMCgoNi8` zI^9CqPzV!z$Zz%k{+FTu_g25n9m@=1EBYN|o_Z&?c_qb8^t+wd`3*IGFKVLS7v<{x z*n|yB!w;rV8Xna9GiAbt93DWG_Se%|np*lTD*-;1+eaLV1`J1=; z(=OO$2wSZI6&4NJR|r%>CSf-+%44pXHh_|)MVe3~du(_(WoEKbv> zP@QI=Y$=2T3EtP6$qnlaVJrA7WS%)IwtpqXPVm{C*!c~$Jtt~{&lTnB+}MN-OS|)? zP}`l4GGRjw3#26Yg5er`p(suZV^cFMPK%~cofe}kD})I?zG}Ee zUoDE$>e%26i_@AZRHt8278SxoAM#s$f#$8gwhI;-!dC8e$vkpBYz0e-o!q~6;!o31 z*BhcH_eN2!Zj4RXuyng=3iX59j51+E4qK!o_m<(Bd#fl;TVsPWEKb{|P@T4;tSN;3 z&YidV4lY<{2wS-alX>J$*dmq`JGpmpV(&N9^=_!iy?d0aJ#4~;rQ1DIsNL>GnXn;; zy;G8VpK#5+ZxpBfu(=r)rvp-`P6tvJ6~dwaj5q%|y4`yRyK!Le5ZDTSD4Ay-hOJ;p zu@n3VC$@h>Z6Af2;73QfdJHyU!_w|?Db#k4r%c$8!-**gep0vw|4kI9ld-uO7N=8F zs7|L*b`-*a1RvZx!wt&}VJrA=$vpEcYy(S*o#5v%}x`#jVH|6P=;=VKE#EbU&9 zLT&d#%7hI$T%3~NmxOEZKSXi56q}o2ak@N(>U0HVQz0Bk@V?$vZdhpuTfzTG=9$-E zOIT9u1i#ja9pF&g*P|x*4N28(328M1RPME#Oe&kDw;{ zqfxFthE3S8H2g#grQt!nCn*y)kq}a** zsS{hkp{_qiP3|wET>TQ8uwm);>lEsz^bKXgh8(_2N$&5%HTMruoPNaSW>}ntnXK(J zEM;pU9Qx0A^Pjfay*IoY)*HfB@Da&8b0lmdONyP~qd2hz9BO+s)C3~i6~nO;Xr~9?oHx`^@gw&d@?f6 zoE+Q8l42+LluqmchuWSRHNmHea&=m4!iJ^Y=~Jlf&On*4A%~e#5`5-x4L(a0r&+PN z85XD6Q>adJQ1%wWfdudC&Eg7OMwzf7he0U`zC^eNUowi*QrOfCi_|K94$ zyJN*6Y(@VinWwIZ?PN)@6MbbTR)Ir}uZo)Jt3|oGIyPa$((sxol!mPWr%c$8!`dl{ zzD~GCUpI==df4C$i_@=Ds7@PDb{E1#AM#s$q2{f=kqdSl!dC80$UJgWY$;2Mo!px{ z@uz91>n%}}d#fl{x5g%HSi0Rdh5A8lN13o8haFOqd&h9iJvfTfPT1fKi_Ko!os+tOAF+-Ul_g_l_)TP< zc{8?_CB;tgTby@xVkLk{<)B>4T|8vKDMP7h*J zGb~OIrBIz7rtB?*2|nbv`hUMk`@gsPqwZL72wTw~C-c-Nu)QoPcA`J!#Aa})@n=vI z{n;p2pTj0>SQ>sIh0^e#-iwq88*=zdN}|6UuF?M*#pxAnaE8U{)fB4J-zobGVWJQD zt-f&cR)5`v1AA}4R_-^+Jn}7UGfRq{-0wK?r)j9`cTto3y(m}T$0lr8y8R%9`a%7R zGGRjw|4vEnkHR(gf1)^jj1A7PIDMKzb^465#1Qs7ci!q>xM0~KY~}un%pZuzei2(AEI3S5u31K>2{dOn{Eg7hNVo{ki+mP$vr~2<{mMM(@5Cd42#n! zDb&wxGj)iSzNwE`r94GdILv4?Xn&9I_xjH^J zVZ+kygelaIX(Gym4LMAblHikuYw*dUI8Bbt&9FF4nL>4%irTX`H5^Fr!M$nRu<{VL zf=@^0nbTwYSyJo-pV5iU;85E$qbB$)QLfI4P1vxsJ9`SX-8m=|Hsml@N`lWFuEFPt z;xsQdH^bsIe+t!U0m>#rIFR6dy@lMc@({LyFGA*-i(*?^QtSj@+=;mhQ(>+6sprIlud>(!H4`-U*!M( zs}TQttFPvcrH8N;eGM{CT@%~Wl42+NT25>ShZJ-r#^D-$lPFG`VuLd*PMfDtowlHCGlYpgL*t=!v?dE~a( znwAthxwm)XPt#D>JEA7{;3!vj!X|83y4@v(`a$hVnXn;;-BXgg7p}SYh~l&-HaNrL z)R#ha+MBY>5cWHF-s=0hVCf-j<=&snBM-o~w4~U{eUKB&!J)1XK~3&Mqg*`_@PX7 zA{^@cWz=N3C4@@01Z6a(F!@+207)>~BVKdJCJJVR3pVh3fPV z%1%Q#^bcM3`aPug+_3x*wu1kY%rig0wzZ_#3I3rIf1HNe{s=X}{}biv$JmSwOS_+@ zP(P~AC^I(X@I^|3e;Ka9zl!4YH8wiK;`D6_)#*FRIz!n1;CZqC;D)`2uoe6#GS3`l ziac{z*bP3M6YIjEwnsos@DZb29SIwU&bu zB%dtG)yc6b8Su z3x{j+MWQ$@iVe@OI4zz+bs9ujaR>*Lysx*UE0!U`R`R9EJaidsXG@BmI{WZ3{CB;tm4V_pU4t2gUYO-$<PY z*b0{vJHdBz;!o62+db3--y_P^J+Vm}mUjD6s2|ndlt~+M*f%A?_Y2qH`$us)02`oT zaXKi4>U1!*XYUZ$|KNGIAL@pEh_Ds>a5B$40$bsdVkh`fPAm?G+CBy~!H8Rg_JKa6rii_x|XLortiN{7+;adM&oZCB;ti>z&vg4)uK_YLed+ zA1;n*6RPPIqG?G%QZ{rcj;kqbxgw14`c4d%zWI z5n(I&pUFJ*A#9IJik;+-II%k%>iaR&B!4{0)hDoN82L}x3Fm&mX6;^p>#Z`_Ycan4LQ7*lI-t?YxaLearyw8pTrB>0cv8vLgyPQ&=`Sec<=aT?D5**FbPS$GKhA3X2&5#6vE5w?PlOy-%R zU^`t>>;xapi522d+hd?6_?S_yj)hIzu(Uf)3N`pIDAP9NFkVW6j~}kVCy3%SAvQw8 z;xusz)oBvS&O_LbpY!&2O(%23c0|}pJ_VVFPKm8^NwJfBY9}^{Lw!$+n&i_(xjH>I zZNt*e)9ly?4U5y9DO9JqDEkiKfRYdH&EtyAh_ID> zJ~9uTA6w^=Vkh~6PAn9M`d%0{$rp)obx~~EhNa)dQ>gt8qDx`RHx-BYY*XolK1s~>52u3u$6ozG7nuDTj`QwC;6&QY!rw3UL7^b*NAd; zO>EkRrQfwusQs=@nYJN^byJdjy>LyweiWzt=jsg5usCg)LUr1Rvi%Sy`B2{N-LIQ& z;*w2?u$6r?GLPLHTj-KvC;OI8EER`3-x@X9w~2CfTWs2frQ_{WC>;;#?Le8fA&0>! z$-Yy#X5TrA(=ONy4U5xmDO9K3DQgg6vJd6m-uk}j9&Xr|2wTDTBJ<2XY^_U*o#6X8 z@#ku&?fp;_eE%p{55T5vSlT@(h5At)OqsSJheJ~m{IGBhes~n8Bd{477N?_9s7^;y z)*!O|*m9Q?JHbzKVy!sT_Q|LTeoB<9r()AKEbX42LT&d9 z%CrqR{5B=Q&kEPzXGd{52OFVbaXK%B>hwFx8bsKSpY!%NPJi#pfxQc0EBS?F9(obB z<0ZvT@=Ki9D-QL2DQc2m7Uk;Y*t88xzgMPE`@M=XZ9@)!OiA)5e_K%;NDHH*q8`g$!{U^&|9%3FDZ7C-|oa}aj5S*QIq_xC|B>srfpdI zy*Gv0?|qbM8*+FcCCMKQ*W`bW;`9(ULc`+pND9^IQOYVrIH2Txy~ka#G!eFvKS}1H zPhneLQtTvu#)%cCjVO$r&qB7 z8WyM5Qm9U^Q+xK_fJr`-cYF6sr*FDsZz61Ef1Au>-@z8Wq}a*+t`qykq0Zk&P4<6A zx%vS%ZNt*>hbfef2lf6PM@bxoxY$fM1;vcly`gU zTc=;SVRIsE1^_A32KCDA)`Qi_>T+)DL}h%3eg+|KNGIkLiX5im(-YY%g7uLYcN9hsjbBeDZJ&K1CF#DX|e67N@CG zs7}*RHX_1){G7MHcRHObb|=DC@)^iHbVh93ONyQ3Gdrk)FhuR%GKGiX&aV) z=S-pYI~Qfzh8*TeN%DEaHTir|oaV?B{uiCyDR-|L|!`T9|=W{-tw z85oOwj95zWw@=e1v`DRg^Hpd2NSe&*@p*n3v*@y^}d?@es?pIH@amn^X z*vh^gna6IAt$azblYK`gmX1T6?}VD{J4d;?3pQ=T((!I7l#U1WcBf3+ki#A+$-ZZ} zX5TA{Qy(@%!{W403e{;}%4$TI>_d6Cx4wJ2zZ-Ta!dCDD$vpEQZ0k#ko#2N!@fT~T z?ZZ$L{O~AOkHDsFSlT@*h5At)O_{bKhhtL`{J3xpetZcswWsO`y7 z6MTv&SEt0LZCKi!I)&QqG?Zx@a+oe9!KV+`;4?&Vnh_hJVR4!{h3Yg5Wnm)h$Ip5D zJE*g{VznY{C7*-LL+8YH!KBzpKDQJ5$DzLGMNRVgqFkLHo3>%;cfk~DzY9^OZOCDf zlq6p?T$3*r#c6SDgoedwi4>~Sl9a`Xa6rii_m+0WibdE;zATxCE{83HNwJfB1t)fp zLw&D^n&c}*xwguJMVYoC zhiy`leA{qMzFicj?XdwG7N;Fks7`|^I}>4&&pgZ!|6hf5zlOTAOZF?mR`y-VJa#v1 zGfaw|>^&zIkwcyDiJI(tMY-CCP1~?^yiW?H<3YWBDbqIOuzyOj9}uqD4~*h;5H>@@ z;&ey~)#*^m=0uq6GYm77?DhW*5?EoEsU?0@jQ+s}5x zc173adVq^wSa{rEX=e-rgju2`}NTgk5@^U&+DjW8*8lHcgWR&uEC zn^BYemMB+m#ingo`n^4c+V35dX&Z95D<#SA4%g)OL~*(o8=+xwx<7^L^Z;dpA{%;_n8!Gzt2*pZOGyI zlq7#4T$8^T#pxw%goefGI{O@EQ`Wm(sCdE$jH=Ni@ z4)y&OYLdSl(6BgtltOj-4`q`g zO!AqB8RGk``(@NmT(Wf$wz7Xl=CPk+i(yjiWdG8M<>XN3U!x}bH&L#Bi%r|Gbo_k^ zrQ<=pA1Ko{s~_YpCt9P!oLYC|Ae9rfpc-9XEygQH@8LwjqZJQWAW^a1B0D z6sL)?85$O+NmHm!lTnr_!u|)(yL}2bY+Zz{;8T%#=G53?m=rs~r*&dIIn?&_s0ltp zl&dph(>5&a&YVJRcNWUD4LQt~lHjw4Yw$UuIL(QT(6BhookDe*hq6u)_T%Tg{e9H= zT(Ni&wvsPE=AjE>dtp-SBwyHxHRVv>i=rm^Vo|Oxj!oOJ^t(g~wcjNv(>CO=bV`yh z6RydZjpDQ%HbTSVv_cBi>6es!if};52lrNT#pXrWO1=u2hpvikhDouLe03)_ltX>5 ziJIiUigI-=Y}$sU-*r-`{jN)ywjqc0Qdj6HleIkgab<6 z*W1h$%NJoQ`4(gzx+S(7CdE$jt)19V4)wh)YLagkv<*wYJEl#Z`cPM4rh8zx0N%kYcHT#iKoQ}d~Xjq(%NufF& zOWCUklYJ=f_SSb&k9WfYM%W5|BAI8Ngsq23u@n4cC;nm$wS6jTf}a-U>gm|D4NJRc zrcghs-%_S+$l>gi1V1NSgP$A4={#(PhQ;ap6sptjsXcoa!2So%yZu5p>|cbf;1`p5 z<|WvAm=rs~FLh#3In?&$s0n^Wl&e=_(>5&aUY$byoc>6ewjqZOaXZlWwzg#GwAZ+|QG7FVoagstSak$LFt*mjr{JIU{KVqZDb_uZ&TeovIE z_hQpFEdAb}Lj9l~piJA4!=F=<{Go77{%{nhN3anZ7N^Hjs7{Yl)+)jQB_G^-(iICB zVJrF5WFGnqwjUhvaMu_7E$^1j~Nu2{hcTgm@H=ArLm8)8!IB!AzDz2#8fAD|}r zzoK0I5SzAP>Gz`)YQO)XOxuvdCn-t(X}Bi;EQ-_T*Z>WS)0Zh!r>`iR6=9MO`Q1Km z_j{?|xMT|>Y-RtB%wxaDmc*pk$^N4gtIMIzhndztZdR8^xjGy+ZNt*>2q~0~2lYmz zOxuvd$SKJ_O1NepHHy<{*bEJe(-d+nt9pZ9@+8r6l7NM+GghT&1_lY;(OI^$rOBi7* z`5-b6T>{$?lVT_NQci3yhx%RyHOZHaa&dy6;i1E{*p3nLk=sYB>BqWntYWg zPOD-gG%QZ5r%;{Npe$E}14=%)_bXSdVT7&ZYm<5CI@p$&6g$b+b7Fxx)c3DZlYE0H zS2x6_ZCLu+7cU~VR70zh3d2oWxXOCQ1ZUscCJ{& z2wTZ_AoI{2u{|*WS(|##br~N7W6=9MO<=x)>Uh07^*~JK3*$*c3*h8>YF)4PkALhgsbExwp zP?P=0C|8fdrfpa{J|>0I@u1$ZlxZ7sI6fuWPYBoSCq{8P37er|aXLAL>U0Wa#Uf1h zp}gB$-%CBs4Z9d&EBF~?o_Qv=DkjBF@Uxuwi#62tIj9MKZj`I%VbeA&?Vg`P{iuFV znYJN^3sVyOqHqm9OYdFIvFs+bfz z!LM;*g*nvrwWtYxU6iZWW79S)?cSI|{hV&1OxuvdEh!0pYq$o#EsE3a*a!`a)14_) zr@JT%7GXbr&fDKhy~h=+7-1{Lb{+4NJd| zrBFYp$0^e`($ zjO~g^v6K82C)SuleZPvDhu$3%i?tZO+J)&d-r>(!@6W6 zBWz_Kp3GxMz!t`&*vUSU6RXUj&PPE__EDo;9SxhdVd;2`6l%v~Ql@RlVeFJ-A17S1 z|00UhxY!I0i_`ciRHq3jixy!&UU9AOrB39Ajf}7rd=fIxoD^FalVT_M8ftq= z)C8X@%GIf{X&aVyr%j=@I~`@(h8$){N$?rNHTXPW z%%Q$lMoscnqFh}So3>%;cl8u%ziUvYZOGwQDM`LoxF%maiqksS2n~zVdMQ+=^(l)M z;ee74?rq?Tm5i{Jd?PXs-5A>#lVT_NrcNv~hx*dyty8G|ZbO;2 zA&2c!l6?DcO};}Erya2o8WyLWQm9TlQ&ug)0VVJ2?dpo9jIfn_cQOy{VLM||>?GgQ ziEZXk-+ib_zIT+X`(V>HEdB16LhW~d%CrqR9GH^i2Zd|$gQGYdf(_8HI31Qkbvm4~ zYY`^-P~Pp`@1-8;lD&+umHlWkk39xk81Ar0X5lAjB@oPY}$sUi%CrqRT#=ICSB7iwtD-nv zjm^-oI9-!Mb@~%!-6HIN@VwivbHiRn*b06FnP=XJt&K^s6Z~c;)|o?X--?>xw?(;n zJ2q{@((aup)OPQpOxuvdJt+x(Z@31(FN)Ls*a!`a(}O8gr$1AcEy8~MoVUN1`mih3 zGQw8!N69?&F>Gs0ik;+7II++i>ia3wB!4=})n~A28Jr$r4NJRArBK^lnlf!e4$Gz__;TSIeEBF&D_|ou zEKVz?P@PtytXzcs_&INXFLf1HEN6tR5&qu9rgXcYVsV4LNL(lH?nPYx0evIBkrL(6BgdnnHEjjIwkQ4k-EH-WIM{&j?$| zw<7b%?MnsPFAjlYECLS9ipwZCLuR}@^EKYl-P@VRotX+fyO5WGo+Z78MVJrE*WFER7wm&AtPVxhs*lZ5kV&zV{ZuEGn?s$Sj+*ReM7eqdwYf`A~{)sYeLk`!aB>45=8vKSRPB&sRG%QXxr%;`4p{!nn z{STgZ`)zL6(Fj|??;!KcJFyiqDRzS2?Zj$xsO@`E6a2m?SMSHBZCKiUFon{t)#j9G z8*+F!CBYvF*Wiyvae53JpP0x9B$nTNiQ?T|^all%iG)|*3pe~6mo|BiC?BW&7+rQeTJ zsQrFInYJN^&r*{7^Kec6MHHtmu@M>;r>|3}PTx?LFTw#O@9TZ%iZzX}mHY=X5B(9_ zB9me#`7mB1_M1a}4~LrM!$-L~0yb^K((gzq)P6^%Oxuvds3}Q4TDT@3J&My9*Z>WS z(^x4~r?Dy97h#eQ<=x)>Ug|GgvZ)cavX4jRvEyTlWK!&8pU{aF=TPSpqbB<#QLavk zP1~?^Jb4PG<3YVCDAP9NFjY#jPaUq=r-|Y;EjB~L;xv5<)oBLG0!EnZXbAr`?|EC_ zOP$F-wW$%dg3m(cnX_VxWK!$|pWTVSSVL{kiJIVZMY%dRHf_Vw?z}0~k7_>3v<*2d zkdojFhHLPJqBt##&CswoEt*1gT8y%Q5%xcL-tB|ju&EKYf-gztnM+}dWK!$|U&e_A z=TO_rp(gn9QLe6lP1~@vyJ8CUb6SZqZ9@*Lq$K#N;Tn9kC{C+mBQz{dYo<`0ennZo z2>bDK-u_Y zGs?6LIc$-VV)*e012 zJIQx(V#ztw_im_3zI&9bJ#5;BrQbbMsQvCmnYJN^y;G8WpKwjSZxpBfun`&-rvp-` zP6tv}Fv0;P@9Q1xie-(kmHbdL4?PUqC6i(&`4LWRIfwc_3N^`(j&k)FY}$sU-{VrK z{T@%5wjqZTQ0PyPNVE#gh@V>cYF7Hsb{!kUn6W~|1Fuv zo`tQGNwJgt948i?L!F<8n(V)ea`k*{+J>d$3sR^ZUr3p@A%}}olKql!&HjfdPM2ac zG%QY+r%;`)psZnp$&QBfZg1^Jy~;neuMxI_|B=iyuff*Iq}U05trLH-hT6U!HNkI) za`i@R+J>dwn^UOm-a?tSA&1*i68!dX4Sq)yr#rD38WyL!Q>ae&P}VTQ{s+&y{XRGB zYlN-f50H80gV;Km6g$Bma$?On)b=B&3I1r5tB+yRHZ1KvkwR_vNy@YhIXs<`;Ln6> z@MohqJ%^3ZusFStLUnqPvV;-#lS#3Y{8c9wokM-U zhMMHBN4fe2Hf_Vw?^`LCfb8*=zA zCCR@J*W^D$arzM(pBp)|ilaCk0X?$#ehQ(>Z6sprilue8< z$%pc8?|v_J5|?akgstq8k$LRo*g}~UJK3jnV%<5^`P8V%K24OX(_+&$EFDjuLhX14 z%CrqR%#@PsGly&TS)w@2ip|ilIL)3yb(({+j1eZg|7{HE-QL=gI+q)^Ho{i$dB{9- zUTmLCik;x|JMkB5sO<$&6MUg4R~N>nZCKh}G==Ju@ih{CzhQ5&qZks|)z8z)Sh8%WCN%9@THTmEu zPCH>EG%QZLq)?r9r7UBF14=%)x4SFWHo{i&J;*$CPi&h^ik;+rPAoi!`rZdM$@h(N zbw6y{hNa&FQYifn>K#a#wjqauQmBWi z#f`9){8%y%Jq}wZlVT_N2~KQ0hx$GVHOYSy0OpsnaOaHso+dN|K)$ zuE~EJ#px_;fQH5CoD{0lxs-j3Fv*AVZts3C^>;4W-3VLRe^2JI7hnrzQtV{E$cd%r zQ0JGRCi@?vT)h;Vwqfb`@)T;vS5T&H$l2%(4U5yYDO9KHC@UFZ zvJd6m-uhnZ4Q|-o2wTB#BJ<3fv3)Wrc7osP#9yqTwr@vG@H?Vhy%U?ZVQKg76l%No zP^N9j;l7jvzdu}qKM=*~L2QPG#p$6Gs?)=im5i|e!Sil^)D62EVJrCKWS;p1woWF+ zPVlFkSa}Y${S0b?KO5!hbJ(;EOS>ic`tB>y4G)gQ5G8zrVe4d4>?9w@iM{7g-{Ybt`FK&Tj*m^- zu=G1&3bo&fDAP9NFiA?1Pa3YtCyU}VIW|DU;xuIn)oCiqMn;(ALw>i<*Zp4VG%nfR z2wT~wBlFnlv4t`zcCydt#OiaX^O;eTeU>O!XT_#%SUR3Ph0^h$-W-%^8*-Q{CE4c= z*X;8|aheyKp5&aE|Wt2sFtNn+mOTZDG9zpxCZ}a6sHxj85$O+l~brrt58-l z!u|)(yL~k`>~4gu;A@b1=9<_#nG`$0*K%U5&aW*z(g+O_zc zGHpW+8>J-p#^D-#lPFG`Vk0yxPMfDtKk+Rn`xxQSf6jg4&G%Bba>eFG*h;<)nTKwR zEt5&HlYDz8R-Z$C?}(b@gQHyC37fWI>35eD>IbzeW!i=uc27z2UbrUTBZ||W*a!`a zQ(p?zX>ZCxMmV74gM0hBVs#^ICEuUSLl3~V$)wmxevlK(&!N5#K~3^Qqg*`%; z_lOkgCv_xc+J+pCPD%1(!ZrD^QJjv$Mrc@^PDr6Tok-cp2nUqBulE~Q>~4guzUxF(-2iqrJi01b=Nj44#7nJ60>VUnXE{LOj3?)Oq>@vm)fgstqe zk$LRw*g}~UJK5)SVgowV`P`_sr zg8wqg)fKU68E6sptOl$DII|H1Qa zU)K%08(}N>`edG&|M6HSlVT_MhE8lhhuYp4HNiKDa&=Q|+J>dw%~PoDZb6y0A&0F} z5`61$4Zckjr){wj8WyMRQ>acmQ1&sxe*B!bzn41L6`LDjEBVf39=Z#*OeV!n^4*+R ze-8ECLrwBMqFmh*o3>%;w=ad-@7|PY8*-*W~+0aXJ7SpM_}7zQtTu@%8C8wP~XR(Ci$^Zt{#U?+pzR|LJFndLA?_x z(>CPro0KF!Ib4&U62<9MY=nlz>GTw;(;1YFjBr57`+C21#qLJfN`5w(hn|D2lS#3Y z{5&W2pF@40kDBDak82`?PWPrzo$jOTWQ55+ly`gUd#Mk&VR<8L z1^+XdXFi1OlS#1?{1GSqVhy$Z7;1t)9_8v2*t88xyHBN1KdPrG(>CPrY)XPZ7p}pd zkK*(KHbcYW^im4d=`WO(jIjT~^KSpE8+JFsR`9=(dFHFwI++wZ!C!M?`#IG18>k8X zW|XUMVbeA&?Y@&j{haVI6Zxe>OKe?sP=pJK~oQtTxE+==z)P~TsoCiz!Uu6~V8+pzTeZ3^{+`i?SfLk>Tr zB>9iwn*66IPQ%O+r(t1n8qWXOI1Nu($Os3Nd~k0>SFCP?t>hz;r-@UjPLohJ zGQt5R@9RzGirtN{m3#^^51kTQCzE0)`P5GAKZp9B7B$JIi*j{(Y}$sU-x*V=pZQFb zX&Z8wB_+vc4cFweMRA%P8=zrvnlpv!G#6zfBTRDtI~mfuz5Bh?d0eu+5w^0=N9M8f zV+&||fii4Evb=L@4I`yx@UE{aXtuynk53iY!ZM47fBhb2>zeW`HGzH}6)Ww03< z7N_M>s7}jMb~3_bAIiJE^}WEkRrQNkss2|nZlxZ7sST`lX*9+I+>ql|=H8w-T;j}i9c=e+&B)ID6Wxe>OK??vXJeb_RY6g$cHabo>B)c1a< zNxpxSs|R4yHZ1)fltTTW4yH`oki(%VNq$(kCOdTR8I)-oa`$gsqcFv6K7~C-$F1 zeP4>2l?G{7%$lzbnesyRm5-mX7aDp?+5P zQKoIk;enK7e=uCL|2c}&L)Z)ri_;@1RHsKNI~ifJ59Qt7`d;eeZdl$3Tfv_s^USBP zeKIL_fUX=c8PG0h_jAY4@cR>PPh#%CrqR{52)PUkTUXe~aSuDmFvI z;`CYy)#-K0N=DfK;CZ*d>4x2nuoe7mGS7SmTPKrZC-}QgY(IzEejhc#{~6`#2iUX? zOS>PYP(P=CQ>Jam;Xf$}{&Bbl|0Igjr`QM$i__;RRHrW}`xs$Ae$Lz9Oa00fn;T&( z`8Q-9`YpChCdE$j@10nG4)y&bYLfpHaUA%_uDl6<6a zO+IoIr%|vG8WyL~Qm7yK=#+(wa6rii_r`R^>PFZ~J~o+$j)QHJNwJfBTqpLQLw%2r zn&cBixjG>>ZNt*<#3|H%C!tK+ki%ptNj`bFCZ8gT)0Ef<4U5y%DO9IvC>t5!fRgw1 zrgO#aM%YR|1DS`;h^>=Jv6FmeC-$F1eb0)T_d6Cx4xIUh8vbQ!dCEKk$L7?*glyQJHgj+;xE=v+v}kw`1(<<{u-OMVQF{6 z6zWH{5oOwj95zWw@J+)t_-0X@HpgaYSe&*@p*n3vS;+|dA3X2&ZQQWC5w?PFN9LK^ zW9wv6>;&J@iS6f5+dH8q_|8$T?t)F*u(Z2d3bozcDbqIOut!RQ?-{Pa_ln}whmFv% zIPH@{b=sG*j}i9c=e+&B)cswtxe>OKA4uk*2Vu)(QtTu@#EJFiP~V54Ci&q}t{#C+ z+pzR|R0_4JzTWAwqa@a^Au{kUr?rP z$l04}shQ;ao6spq?lzoh_A3x{q@1_3aip`C)b&?ND=Apx3%VbjQ zBp<CNVW=fKe6|Tw0j^Z>9HbTSVG;Rvj zX*|k8MmV74gL@OWVs#^IedZI9dFaI0HklMV$tQJU|2fq6~4gu5&qE|@~?cOlBO4LK~5lH`kqYx2dSI4zD1(6Bfy zkwSG^lCqHzCi&R@-oDGOy`4AnlRW26JM6N}7QNomYL+y@R>Wn=xG#t8jY+W+aRnz9 zokI<-h?rz%P z!bBWJ#Q*z$sh*eB`VOsOgsp5Fkg?qm+YFOpC)>tO{OuX)Y*W-^+bqh}&9S)|mX5Ye zp?0(tWp0KXwn<60ZNoL&c2S(R$EIOeoOVp1It`{QSA??dwbvF_zU2R&bInzT9d7*L z$Cow#pJ|3U4D0{95Ps7? IA7+^U2dBbe6#xJL diff --git a/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_mod_list.json b/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_mod_list.json deleted file mode 100644 index 4cd94ad59e..0000000000 --- a/examples/stable-diffusion/quantize/measure_all/fp8_hooks_maxabs_mod_list.json +++ /dev/null @@ -1,506 +0,0 @@ -[ - "time_text_embed.timestep_embedder.linear_1", - "time_text_embed.timestep_embedder.linear_2", - "time_text_embed.guidance_embedder.linear_1", - "time_text_embed.guidance_embedder.linear_2", - "time_text_embed.text_embedder.linear_1", - "time_text_embed.text_embedder.linear_2", - "context_embedder", - "x_embedder", - "transformer_blocks.0.norm1.linear", - "transformer_blocks.0.norm1_context.linear", - "transformer_blocks.0.attn.to_q", - "transformer_blocks.0.attn.to_k", - "transformer_blocks.0.attn.to_v", - "transformer_blocks.0.attn.add_k_proj", - "transformer_blocks.0.attn.add_v_proj", - "transformer_blocks.0.attn.add_q_proj", - "transformer_blocks.0.attn.to_out.0", - "transformer_blocks.0.attn.to_add_out", - "transformer_blocks.0.ff.net.0.proj", - "transformer_blocks.0.ff.net.2", - "transformer_blocks.0.ff_context.net.0.proj", - "transformer_blocks.0.ff_context.net.2", - "transformer_blocks.1.norm1.linear", - "transformer_blocks.1.norm1_context.linear", - "transformer_blocks.1.attn.to_q", - "transformer_blocks.1.attn.to_k", - "transformer_blocks.1.attn.to_v", - "transformer_blocks.1.attn.add_k_proj", - "transformer_blocks.1.attn.add_v_proj", - "transformer_blocks.1.attn.add_q_proj", - "transformer_blocks.1.attn.to_out.0", - "transformer_blocks.1.attn.to_add_out", - "transformer_blocks.1.ff.net.0.proj", - "transformer_blocks.1.ff.net.2", - "transformer_blocks.1.ff_context.net.0.proj", - "transformer_blocks.1.ff_context.net.2", - "transformer_blocks.2.norm1.linear", - "transformer_blocks.2.norm1_context.linear", - "transformer_blocks.2.attn.to_q", - "transformer_blocks.2.attn.to_k", - "transformer_blocks.2.attn.to_v", - "transformer_blocks.2.attn.add_k_proj", - "transformer_blocks.2.attn.add_v_proj", - "transformer_blocks.2.attn.add_q_proj", - "transformer_blocks.2.attn.to_out.0", - "transformer_blocks.2.attn.to_add_out", - "transformer_blocks.2.ff.net.0.proj", - "transformer_blocks.2.ff.net.2", - "transformer_blocks.2.ff_context.net.0.proj", - "transformer_blocks.2.ff_context.net.2", - "transformer_blocks.3.norm1.linear", - "transformer_blocks.3.norm1_context.linear", - "transformer_blocks.3.attn.to_q", - "transformer_blocks.3.attn.to_k", - "transformer_blocks.3.attn.to_v", - "transformer_blocks.3.attn.add_k_proj", - "transformer_blocks.3.attn.add_v_proj", - "transformer_blocks.3.attn.add_q_proj", - "transformer_blocks.3.attn.to_out.0", - "transformer_blocks.3.attn.to_add_out", - "transformer_blocks.3.ff.net.0.proj", - "transformer_blocks.3.ff.net.2", - "transformer_blocks.3.ff_context.net.0.proj", - "transformer_blocks.3.ff_context.net.2", - "transformer_blocks.4.norm1.linear", - "transformer_blocks.4.norm1_context.linear", - "transformer_blocks.4.attn.to_q", - "transformer_blocks.4.attn.to_k", - "transformer_blocks.4.attn.to_v", - "transformer_blocks.4.attn.add_k_proj", - "transformer_blocks.4.attn.add_v_proj", - "transformer_blocks.4.attn.add_q_proj", - "transformer_blocks.4.attn.to_out.0", - "transformer_blocks.4.attn.to_add_out", - "transformer_blocks.4.ff.net.0.proj", - "transformer_blocks.4.ff.net.2", - "transformer_blocks.4.ff_context.net.0.proj", - "transformer_blocks.4.ff_context.net.2", - "transformer_blocks.5.norm1.linear", - "transformer_blocks.5.norm1_context.linear", - "transformer_blocks.5.attn.to_q", - "transformer_blocks.5.attn.to_k", - "transformer_blocks.5.attn.to_v", - "transformer_blocks.5.attn.add_k_proj", - "transformer_blocks.5.attn.add_v_proj", - "transformer_blocks.5.attn.add_q_proj", - "transformer_blocks.5.attn.to_out.0", - "transformer_blocks.5.attn.to_add_out", - "transformer_blocks.5.ff.net.0.proj", - "transformer_blocks.5.ff.net.2", - "transformer_blocks.5.ff_context.net.0.proj", - "transformer_blocks.5.ff_context.net.2", - "transformer_blocks.6.norm1.linear", - "transformer_blocks.6.norm1_context.linear", - "transformer_blocks.6.attn.to_q", - "transformer_blocks.6.attn.to_k", - "transformer_blocks.6.attn.to_v", - "transformer_blocks.6.attn.add_k_proj", - "transformer_blocks.6.attn.add_v_proj", - "transformer_blocks.6.attn.add_q_proj", - "transformer_blocks.6.attn.to_out.0", - "transformer_blocks.6.attn.to_add_out", - "transformer_blocks.6.ff.net.0.proj", - "transformer_blocks.6.ff.net.2", - "transformer_blocks.6.ff_context.net.0.proj", - "transformer_blocks.6.ff_context.net.2", - "transformer_blocks.7.norm1.linear", - "transformer_blocks.7.norm1_context.linear", - "transformer_blocks.7.attn.to_q", - "transformer_blocks.7.attn.to_k", - "transformer_blocks.7.attn.to_v", - "transformer_blocks.7.attn.add_k_proj", - "transformer_blocks.7.attn.add_v_proj", - "transformer_blocks.7.attn.add_q_proj", - "transformer_blocks.7.attn.to_out.0", - "transformer_blocks.7.attn.to_add_out", - "transformer_blocks.7.ff.net.0.proj", - "transformer_blocks.7.ff.net.2", - "transformer_blocks.7.ff_context.net.0.proj", - "transformer_blocks.7.ff_context.net.2", - "transformer_blocks.8.norm1.linear", - "transformer_blocks.8.norm1_context.linear", - "transformer_blocks.8.attn.to_q", - "transformer_blocks.8.attn.to_k", - "transformer_blocks.8.attn.to_v", - "transformer_blocks.8.attn.add_k_proj", - "transformer_blocks.8.attn.add_v_proj", - "transformer_blocks.8.attn.add_q_proj", - "transformer_blocks.8.attn.to_out.0", - "transformer_blocks.8.attn.to_add_out", - "transformer_blocks.8.ff.net.0.proj", - "transformer_blocks.8.ff.net.2", - "transformer_blocks.8.ff_context.net.0.proj", - "transformer_blocks.8.ff_context.net.2", - "transformer_blocks.9.norm1.linear", - "transformer_blocks.9.norm1_context.linear", - "transformer_blocks.9.attn.to_q", - "transformer_blocks.9.attn.to_k", - "transformer_blocks.9.attn.to_v", - "transformer_blocks.9.attn.add_k_proj", - "transformer_blocks.9.attn.add_v_proj", - "transformer_blocks.9.attn.add_q_proj", - "transformer_blocks.9.attn.to_out.0", - "transformer_blocks.9.attn.to_add_out", - "transformer_blocks.9.ff.net.0.proj", - "transformer_blocks.9.ff.net.2", - "transformer_blocks.9.ff_context.net.0.proj", - "transformer_blocks.9.ff_context.net.2", - "transformer_blocks.10.norm1.linear", - "transformer_blocks.10.norm1_context.linear", - "transformer_blocks.10.attn.to_q", - "transformer_blocks.10.attn.to_k", - "transformer_blocks.10.attn.to_v", - "transformer_blocks.10.attn.add_k_proj", - "transformer_blocks.10.attn.add_v_proj", - "transformer_blocks.10.attn.add_q_proj", - "transformer_blocks.10.attn.to_out.0", - "transformer_blocks.10.attn.to_add_out", - "transformer_blocks.10.ff.net.0.proj", - "transformer_blocks.10.ff.net.2", - "transformer_blocks.10.ff_context.net.0.proj", - "transformer_blocks.10.ff_context.net.2", - "transformer_blocks.11.norm1.linear", - "transformer_blocks.11.norm1_context.linear", - "transformer_blocks.11.attn.to_q", - "transformer_blocks.11.attn.to_k", - "transformer_blocks.11.attn.to_v", - "transformer_blocks.11.attn.add_k_proj", - "transformer_blocks.11.attn.add_v_proj", - "transformer_blocks.11.attn.add_q_proj", - "transformer_blocks.11.attn.to_out.0", - "transformer_blocks.11.attn.to_add_out", - "transformer_blocks.11.ff.net.0.proj", - "transformer_blocks.11.ff.net.2", - "transformer_blocks.11.ff_context.net.0.proj", - "transformer_blocks.11.ff_context.net.2", - "transformer_blocks.12.norm1.linear", - "transformer_blocks.12.norm1_context.linear", - "transformer_blocks.12.attn.to_q", - "transformer_blocks.12.attn.to_k", - "transformer_blocks.12.attn.to_v", - "transformer_blocks.12.attn.add_k_proj", - "transformer_blocks.12.attn.add_v_proj", - "transformer_blocks.12.attn.add_q_proj", - "transformer_blocks.12.attn.to_out.0", - "transformer_blocks.12.attn.to_add_out", - "transformer_blocks.12.ff.net.0.proj", - "transformer_blocks.12.ff.net.2", - "transformer_blocks.12.ff_context.net.0.proj", - "transformer_blocks.12.ff_context.net.2", - "transformer_blocks.13.norm1.linear", - "transformer_blocks.13.norm1_context.linear", - "transformer_blocks.13.attn.to_q", - "transformer_blocks.13.attn.to_k", - "transformer_blocks.13.attn.to_v", - "transformer_blocks.13.attn.add_k_proj", - "transformer_blocks.13.attn.add_v_proj", - "transformer_blocks.13.attn.add_q_proj", - "transformer_blocks.13.attn.to_out.0", - "transformer_blocks.13.attn.to_add_out", - "transformer_blocks.13.ff.net.0.proj", - "transformer_blocks.13.ff.net.2", - "transformer_blocks.13.ff_context.net.0.proj", - "transformer_blocks.13.ff_context.net.2", - "transformer_blocks.14.norm1.linear", - "transformer_blocks.14.norm1_context.linear", - "transformer_blocks.14.attn.to_q", - "transformer_blocks.14.attn.to_k", - "transformer_blocks.14.attn.to_v", - "transformer_blocks.14.attn.add_k_proj", - "transformer_blocks.14.attn.add_v_proj", - "transformer_blocks.14.attn.add_q_proj", - "transformer_blocks.14.attn.to_out.0", - "transformer_blocks.14.attn.to_add_out", - "transformer_blocks.14.ff.net.0.proj", - "transformer_blocks.14.ff.net.2", - "transformer_blocks.14.ff_context.net.0.proj", - "transformer_blocks.14.ff_context.net.2", - "transformer_blocks.15.norm1.linear", - "transformer_blocks.15.norm1_context.linear", - "transformer_blocks.15.attn.to_q", - "transformer_blocks.15.attn.to_k", - "transformer_blocks.15.attn.to_v", - "transformer_blocks.15.attn.add_k_proj", - "transformer_blocks.15.attn.add_v_proj", - "transformer_blocks.15.attn.add_q_proj", - "transformer_blocks.15.attn.to_out.0", - "transformer_blocks.15.attn.to_add_out", - "transformer_blocks.15.ff.net.0.proj", - "transformer_blocks.15.ff.net.2", - "transformer_blocks.15.ff_context.net.0.proj", - "transformer_blocks.15.ff_context.net.2", - "transformer_blocks.16.norm1.linear", - "transformer_blocks.16.norm1_context.linear", - "transformer_blocks.16.attn.to_q", - "transformer_blocks.16.attn.to_k", - "transformer_blocks.16.attn.to_v", - "transformer_blocks.16.attn.add_k_proj", - "transformer_blocks.16.attn.add_v_proj", - "transformer_blocks.16.attn.add_q_proj", - "transformer_blocks.16.attn.to_out.0", - "transformer_blocks.16.attn.to_add_out", - "transformer_blocks.16.ff.net.0.proj", - "transformer_blocks.16.ff.net.2", - "transformer_blocks.16.ff_context.net.0.proj", - "transformer_blocks.16.ff_context.net.2", - "transformer_blocks.17.norm1.linear", - "transformer_blocks.17.norm1_context.linear", - "transformer_blocks.17.attn.to_q", - "transformer_blocks.17.attn.to_k", - "transformer_blocks.17.attn.to_v", - "transformer_blocks.17.attn.add_k_proj", - "transformer_blocks.17.attn.add_v_proj", - "transformer_blocks.17.attn.add_q_proj", - "transformer_blocks.17.attn.to_out.0", - "transformer_blocks.17.attn.to_add_out", - "transformer_blocks.17.ff.net.0.proj", - "transformer_blocks.17.ff.net.2", - "transformer_blocks.17.ff_context.net.0.proj", - "transformer_blocks.17.ff_context.net.2", - "transformer_blocks.18.norm1.linear", - "transformer_blocks.18.norm1_context.linear", - "transformer_blocks.18.attn.to_q", - "transformer_blocks.18.attn.to_k", - "transformer_blocks.18.attn.to_v", - "transformer_blocks.18.attn.add_k_proj", - "transformer_blocks.18.attn.add_v_proj", - "transformer_blocks.18.attn.add_q_proj", - "transformer_blocks.18.attn.to_out.0", - "transformer_blocks.18.attn.to_add_out", - "transformer_blocks.18.ff.net.0.proj", - "transformer_blocks.18.ff.net.2", - "transformer_blocks.18.ff_context.net.0.proj", - "transformer_blocks.18.ff_context.net.2", - "single_transformer_blocks.0.norm.linear", - "single_transformer_blocks.0.proj_mlp", - "single_transformer_blocks.0.proj_out", - "single_transformer_blocks.0.attn.to_q", - "single_transformer_blocks.0.attn.to_k", - "single_transformer_blocks.0.attn.to_v", - "single_transformer_blocks.1.norm.linear", - "single_transformer_blocks.1.proj_mlp", - "single_transformer_blocks.1.proj_out", - "single_transformer_blocks.1.attn.to_q", - "single_transformer_blocks.1.attn.to_k", - "single_transformer_blocks.1.attn.to_v", - "single_transformer_blocks.2.norm.linear", - "single_transformer_blocks.2.proj_mlp", - "single_transformer_blocks.2.proj_out", - "single_transformer_blocks.2.attn.to_q", - "single_transformer_blocks.2.attn.to_k", - "single_transformer_blocks.2.attn.to_v", - "single_transformer_blocks.3.norm.linear", - "single_transformer_blocks.3.proj_mlp", - "single_transformer_blocks.3.proj_out", - "single_transformer_blocks.3.attn.to_q", - "single_transformer_blocks.3.attn.to_k", - "single_transformer_blocks.3.attn.to_v", - "single_transformer_blocks.4.norm.linear", - "single_transformer_blocks.4.proj_mlp", - "single_transformer_blocks.4.proj_out", - "single_transformer_blocks.4.attn.to_q", - "single_transformer_blocks.4.attn.to_k", - "single_transformer_blocks.4.attn.to_v", - "single_transformer_blocks.5.norm.linear", - "single_transformer_blocks.5.proj_mlp", - "single_transformer_blocks.5.proj_out", - "single_transformer_blocks.5.attn.to_q", - "single_transformer_blocks.5.attn.to_k", - "single_transformer_blocks.5.attn.to_v", - "single_transformer_blocks.6.norm.linear", - "single_transformer_blocks.6.proj_mlp", - "single_transformer_blocks.6.proj_out", - "single_transformer_blocks.6.attn.to_q", - "single_transformer_blocks.6.attn.to_k", - "single_transformer_blocks.6.attn.to_v", - "single_transformer_blocks.7.norm.linear", - "single_transformer_blocks.7.proj_mlp", - "single_transformer_blocks.7.proj_out", - "single_transformer_blocks.7.attn.to_q", - "single_transformer_blocks.7.attn.to_k", - "single_transformer_blocks.7.attn.to_v", - "single_transformer_blocks.8.norm.linear", - "single_transformer_blocks.8.proj_mlp", - "single_transformer_blocks.8.proj_out", - "single_transformer_blocks.8.attn.to_q", - "single_transformer_blocks.8.attn.to_k", - "single_transformer_blocks.8.attn.to_v", - "single_transformer_blocks.9.norm.linear", - "single_transformer_blocks.9.proj_mlp", - "single_transformer_blocks.9.proj_out", - "single_transformer_blocks.9.attn.to_q", - "single_transformer_blocks.9.attn.to_k", - "single_transformer_blocks.9.attn.to_v", - "single_transformer_blocks.10.norm.linear", - "single_transformer_blocks.10.proj_mlp", - "single_transformer_blocks.10.proj_out", - "single_transformer_blocks.10.attn.to_q", - "single_transformer_blocks.10.attn.to_k", - "single_transformer_blocks.10.attn.to_v", - "single_transformer_blocks.11.norm.linear", - "single_transformer_blocks.11.proj_mlp", - "single_transformer_blocks.11.proj_out", - "single_transformer_blocks.11.attn.to_q", - "single_transformer_blocks.11.attn.to_k", - "single_transformer_blocks.11.attn.to_v", - "single_transformer_blocks.12.norm.linear", - "single_transformer_blocks.12.proj_mlp", - "single_transformer_blocks.12.proj_out", - "single_transformer_blocks.12.attn.to_q", - "single_transformer_blocks.12.attn.to_k", - "single_transformer_blocks.12.attn.to_v", - "single_transformer_blocks.13.norm.linear", - "single_transformer_blocks.13.proj_mlp", - "single_transformer_blocks.13.proj_out", - "single_transformer_blocks.13.attn.to_q", - "single_transformer_blocks.13.attn.to_k", - "single_transformer_blocks.13.attn.to_v", - "single_transformer_blocks.14.norm.linear", - "single_transformer_blocks.14.proj_mlp", - "single_transformer_blocks.14.proj_out", - "single_transformer_blocks.14.attn.to_q", - "single_transformer_blocks.14.attn.to_k", - "single_transformer_blocks.14.attn.to_v", - "single_transformer_blocks.15.norm.linear", - "single_transformer_blocks.15.proj_mlp", - "single_transformer_blocks.15.proj_out", - "single_transformer_blocks.15.attn.to_q", - "single_transformer_blocks.15.attn.to_k", - "single_transformer_blocks.15.attn.to_v", - "single_transformer_blocks.16.norm.linear", - "single_transformer_blocks.16.proj_mlp", - "single_transformer_blocks.16.proj_out", - "single_transformer_blocks.16.attn.to_q", - "single_transformer_blocks.16.attn.to_k", - "single_transformer_blocks.16.attn.to_v", - "single_transformer_blocks.17.norm.linear", - "single_transformer_blocks.17.proj_mlp", - "single_transformer_blocks.17.proj_out", - "single_transformer_blocks.17.attn.to_q", - "single_transformer_blocks.17.attn.to_k", - "single_transformer_blocks.17.attn.to_v", - "single_transformer_blocks.18.norm.linear", - "single_transformer_blocks.18.proj_mlp", - "single_transformer_blocks.18.proj_out", - "single_transformer_blocks.18.attn.to_q", - "single_transformer_blocks.18.attn.to_k", - "single_transformer_blocks.18.attn.to_v", - "single_transformer_blocks.19.norm.linear", - "single_transformer_blocks.19.proj_mlp", - "single_transformer_blocks.19.proj_out", - "single_transformer_blocks.19.attn.to_q", - "single_transformer_blocks.19.attn.to_k", - "single_transformer_blocks.19.attn.to_v", - "single_transformer_blocks.20.norm.linear", - "single_transformer_blocks.20.proj_mlp", - "single_transformer_blocks.20.proj_out", - "single_transformer_blocks.20.attn.to_q", - "single_transformer_blocks.20.attn.to_k", - "single_transformer_blocks.20.attn.to_v", - "single_transformer_blocks.21.norm.linear", - "single_transformer_blocks.21.proj_mlp", - "single_transformer_blocks.21.proj_out", - "single_transformer_blocks.21.attn.to_q", - "single_transformer_blocks.21.attn.to_k", - "single_transformer_blocks.21.attn.to_v", - "single_transformer_blocks.22.norm.linear", - "single_transformer_blocks.22.proj_mlp", - "single_transformer_blocks.22.proj_out", - "single_transformer_blocks.22.attn.to_q", - "single_transformer_blocks.22.attn.to_k", - "single_transformer_blocks.22.attn.to_v", - "single_transformer_blocks.23.norm.linear", - "single_transformer_blocks.23.proj_mlp", - "single_transformer_blocks.23.proj_out", - "single_transformer_blocks.23.attn.to_q", - "single_transformer_blocks.23.attn.to_k", - "single_transformer_blocks.23.attn.to_v", - "single_transformer_blocks.24.norm.linear", - "single_transformer_blocks.24.proj_mlp", - "single_transformer_blocks.24.proj_out", - "single_transformer_blocks.24.attn.to_q", - "single_transformer_blocks.24.attn.to_k", - "single_transformer_blocks.24.attn.to_v", - "single_transformer_blocks.25.norm.linear", - "single_transformer_blocks.25.proj_mlp", - "single_transformer_blocks.25.proj_out", - "single_transformer_blocks.25.attn.to_q", - "single_transformer_blocks.25.attn.to_k", - "single_transformer_blocks.25.attn.to_v", - "single_transformer_blocks.26.norm.linear", - "single_transformer_blocks.26.proj_mlp", - "single_transformer_blocks.26.proj_out", - "single_transformer_blocks.26.attn.to_q", - "single_transformer_blocks.26.attn.to_k", - "single_transformer_blocks.26.attn.to_v", - "single_transformer_blocks.27.norm.linear", - "single_transformer_blocks.27.proj_mlp", - "single_transformer_blocks.27.proj_out", - "single_transformer_blocks.27.attn.to_q", - "single_transformer_blocks.27.attn.to_k", - "single_transformer_blocks.27.attn.to_v", - "single_transformer_blocks.28.norm.linear", - "single_transformer_blocks.28.proj_mlp", - "single_transformer_blocks.28.proj_out", - "single_transformer_blocks.28.attn.to_q", - "single_transformer_blocks.28.attn.to_k", - "single_transformer_blocks.28.attn.to_v", - "single_transformer_blocks.29.norm.linear", - "single_transformer_blocks.29.proj_mlp", - "single_transformer_blocks.29.proj_out", - "single_transformer_blocks.29.attn.to_q", - "single_transformer_blocks.29.attn.to_k", - "single_transformer_blocks.29.attn.to_v", - "single_transformer_blocks.30.norm.linear", - "single_transformer_blocks.30.proj_mlp", - "single_transformer_blocks.30.proj_out", - "single_transformer_blocks.30.attn.to_q", - "single_transformer_blocks.30.attn.to_k", - "single_transformer_blocks.30.attn.to_v", - "single_transformer_blocks.31.norm.linear", - "single_transformer_blocks.31.proj_mlp", - "single_transformer_blocks.31.proj_out", - "single_transformer_blocks.31.attn.to_q", - "single_transformer_blocks.31.attn.to_k", - "single_transformer_blocks.31.attn.to_v", - "single_transformer_blocks.32.norm.linear", - "single_transformer_blocks.32.proj_mlp", - "single_transformer_blocks.32.proj_out", - "single_transformer_blocks.32.attn.to_q", - "single_transformer_blocks.32.attn.to_k", - "single_transformer_blocks.32.attn.to_v", - "single_transformer_blocks.33.norm.linear", - "single_transformer_blocks.33.proj_mlp", - "single_transformer_blocks.33.proj_out", - "single_transformer_blocks.33.attn.to_q", - "single_transformer_blocks.33.attn.to_k", - "single_transformer_blocks.33.attn.to_v", - "single_transformer_blocks.34.norm.linear", - "single_transformer_blocks.34.proj_mlp", - "single_transformer_blocks.34.proj_out", - "single_transformer_blocks.34.attn.to_q", - "single_transformer_blocks.34.attn.to_k", - "single_transformer_blocks.34.attn.to_v", - "single_transformer_blocks.35.norm.linear", - "single_transformer_blocks.35.proj_mlp", - "single_transformer_blocks.35.proj_out", - "single_transformer_blocks.35.attn.to_q", - "single_transformer_blocks.35.attn.to_k", - "single_transformer_blocks.35.attn.to_v", - "single_transformer_blocks.36.norm.linear", - "single_transformer_blocks.36.proj_mlp", - "single_transformer_blocks.36.proj_out", - "single_transformer_blocks.36.attn.to_q", - "single_transformer_blocks.36.attn.to_k", - "single_transformer_blocks.36.attn.to_v", - "single_transformer_blocks.37.norm.linear", - "single_transformer_blocks.37.proj_mlp", - "single_transformer_blocks.37.proj_out", - "single_transformer_blocks.37.attn.to_q", - "single_transformer_blocks.37.attn.to_k", - "single_transformer_blocks.37.attn.to_v", - "norm_out.linear", - "proj_out" -] \ No newline at end of file diff --git a/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.json b/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.json deleted file mode 100644 index 775d712618..0000000000 --- a/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.json +++ /dev/null @@ -1,8071 +0,0 @@ -{ - "GlobalRank": null, - "LocalRank": null, - "Mode": "DynamicRange", - "Nodes": { - "time_text_embed.timestep_embedder.linear_1": { - "inputs": [ - [ - [ - 1.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.28515625 - ] - ] - } - }, - "time_text_embed.timestep_embedder.linear_2": { - "inputs": [ - [ - [ - 3.28125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1484375 - ] - ] - } - }, - "time_text_embed.guidance_embedder.linear_1": { - "inputs": [ - [ - [ - 1.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.10400390625 - ] - ] - } - }, - "time_text_embed.guidance_embedder.linear_2": { - "inputs": [ - [ - [ - 0.60546875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.1201171875 - ] - ] - } - }, - "time_text_embed.text_embedder.linear_1": { - "inputs": [ - [ - [ - 4.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.263671875 - ] - ] - } - }, - "time_text_embed.text_embedder.linear_2": { - "inputs": [ - [ - [ - 0.373046875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.171875 - ] - ] - } - }, - "context_embedder": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.72265625 - ] - ] - } - }, - "x_embedder": { - "inputs": [ - [ - [ - 5.15625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "transformer_blocks.0.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.396484375 - ] - ] - } - }, - "transformer_blocks.0.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.0.attn.to_q": { - "inputs": [ - [ - [ - 20.125 - ] - ] - ], - "params": { - "weight": [ - [ - 2.4375 - ] - ] - } - }, - "transformer_blocks.0.attn.to_k": { - "inputs": [ - [ - [ - 20.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.0.attn.to_v": { - "inputs": [ - [ - [ - 20.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.38671875 - ] - ] - } - }, - "transformer_blocks.0.attn.add_k_proj": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.0.attn.add_v_proj": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.236328125 - ] - ] - } - }, - "transformer_blocks.0.attn.add_q_proj": { - "inputs": [ - [ - [ - 14.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.37890625 - ] - ] - } - }, - "transformer_blocks.0.attn.to_out.0": { - "inputs": [ - [ - [ - 1.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.408203125 - ] - ] - } - }, - "transformer_blocks.0.attn.to_add_out": { - "inputs": [ - [ - [ - 7.46875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.40625 - ] - ] - } - }, - "transformer_blocks.0.ff.net.0.proj": { - "inputs": [ - [ - [ - 5.46875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.439453125 - ] - ] - } - }, - "transformer_blocks.0.ff.net.2": { - "inputs": [ - [ - [ - 9.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.60546875 - ] - ] - } - }, - "transformer_blocks.0.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 10.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.419921875 - ] - ] - } - }, - "transformer_blocks.0.ff_context.net.2": { - "inputs": [ - [ - [ - 39.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.50390625 - ] - ] - } - }, - "transformer_blocks.1.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71484375 - ] - ] - } - }, - "transformer_blocks.1.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.1.attn.to_q": { - "inputs": [ - [ - [ - 21.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.72265625 - ] - ] - } - }, - "transformer_blocks.1.attn.to_k": { - "inputs": [ - [ - [ - 21.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0 - ] - ] - } - }, - "transformer_blocks.1.attn.to_v": { - "inputs": [ - [ - [ - 21.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.1.attn.add_k_proj": { - "inputs": [ - [ - [ - 36.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3828125 - ] - ] - } - }, - "transformer_blocks.1.attn.add_v_proj": { - "inputs": [ - [ - [ - 36.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.28125 - ] - ] - } - }, - "transformer_blocks.1.attn.add_q_proj": { - "inputs": [ - [ - [ - 36.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.376953125 - ] - ] - } - }, - "transformer_blocks.1.attn.to_out.0": { - "inputs": [ - [ - [ - 8.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4765625 - ] - ] - } - }, - "transformer_blocks.1.attn.to_add_out": { - "inputs": [ - [ - [ - 9.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.296875 - ] - ] - } - }, - "transformer_blocks.1.ff.net.0.proj": { - "inputs": [ - [ - [ - 10.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.47265625 - ] - ] - } - }, - "transformer_blocks.1.ff.net.2": { - "inputs": [ - [ - [ - 13.3125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "transformer_blocks.1.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 67.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.40234375 - ] - ] - } - }, - "transformer_blocks.1.ff_context.net.2": { - "inputs": [ - [ - [ - 83.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.765625 - ] - ] - } - }, - "transformer_blocks.2.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.82421875 - ] - ] - } - }, - "transformer_blocks.2.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71484375 - ] - ] - } - }, - "transformer_blocks.2.attn.to_q": { - "inputs": [ - [ - [ - 14.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.97265625 - ] - ] - } - }, - "transformer_blocks.2.attn.to_k": { - "inputs": [ - [ - [ - 14.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7421875 - ] - ] - } - }, - "transformer_blocks.2.attn.to_v": { - "inputs": [ - [ - [ - 14.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.2.attn.add_k_proj": { - "inputs": [ - [ - [ - 34.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6796875 - ] - ] - } - }, - "transformer_blocks.2.attn.add_v_proj": { - "inputs": [ - [ - [ - 34.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.26171875 - ] - ] - } - }, - "transformer_blocks.2.attn.add_q_proj": { - "inputs": [ - [ - [ - 34.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.423828125 - ] - ] - } - }, - "transformer_blocks.2.attn.to_out.0": { - "inputs": [ - [ - [ - 11.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "transformer_blocks.2.attn.to_add_out": { - "inputs": [ - [ - [ - 6.09375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.361328125 - ] - ] - } - }, - "transformer_blocks.2.ff.net.0.proj": { - "inputs": [ - [ - [ - 6.78125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.76171875 - ] - ] - } - }, - "transformer_blocks.2.ff.net.2": { - "inputs": [ - [ - [ - 7.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "transformer_blocks.2.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 66.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.482421875 - ] - ] - } - }, - "transformer_blocks.2.ff_context.net.2": { - "inputs": [ - [ - [ - 30.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.609375 - ] - ] - } - }, - "transformer_blocks.3.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.68359375 - ] - ] - } - }, - "transformer_blocks.3.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.412109375 - ] - ] - } - }, - "transformer_blocks.3.attn.to_q": { - "inputs": [ - [ - [ - 15.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1640625 - ] - ] - } - }, - "transformer_blocks.3.attn.to_k": { - "inputs": [ - [ - [ - 15.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6953125 - ] - ] - } - }, - "transformer_blocks.3.attn.to_v": { - "inputs": [ - [ - [ - 15.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.484375 - ] - ] - } - }, - "transformer_blocks.3.attn.add_k_proj": { - "inputs": [ - [ - [ - 27.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.474609375 - ] - ] - } - }, - "transformer_blocks.3.attn.add_v_proj": { - "inputs": [ - [ - [ - 27.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.255859375 - ] - ] - } - }, - "transformer_blocks.3.attn.add_q_proj": { - "inputs": [ - [ - [ - 27.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41796875 - ] - ] - } - }, - "transformer_blocks.3.attn.to_out.0": { - "inputs": [ - [ - [ - 12.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.35546875 - ] - ] - } - }, - "transformer_blocks.3.attn.to_add_out": { - "inputs": [ - [ - [ - 4.21875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.390625 - ] - ] - } - }, - "transformer_blocks.3.ff.net.0.proj": { - "inputs": [ - [ - [ - 13.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.578125 - ] - ] - } - }, - "transformer_blocks.3.ff.net.2": { - "inputs": [ - [ - [ - 20.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.765625 - ] - ] - } - }, - "transformer_blocks.3.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 9.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "transformer_blocks.3.ff_context.net.2": { - "inputs": [ - [ - [ - 19.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53515625 - ] - ] - } - }, - "transformer_blocks.4.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8515625 - ] - ] - } - }, - "transformer_blocks.4.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.40625 - ] - ] - } - }, - "transformer_blocks.4.attn.to_q": { - "inputs": [ - [ - [ - 28.125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1796875 - ] - ] - } - }, - "transformer_blocks.4.attn.to_k": { - "inputs": [ - [ - [ - 28.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.75390625 - ] - ] - } - }, - "transformer_blocks.4.attn.to_v": { - "inputs": [ - [ - [ - 28.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.494140625 - ] - ] - } - }, - "transformer_blocks.4.attn.add_k_proj": { - "inputs": [ - [ - [ - 18.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4765625 - ] - ] - } - }, - "transformer_blocks.4.attn.add_v_proj": { - "inputs": [ - [ - [ - 18.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.236328125 - ] - ] - } - }, - "transformer_blocks.4.attn.add_q_proj": { - "inputs": [ - [ - [ - 18.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3671875 - ] - ] - } - }, - "transformer_blocks.4.attn.to_out.0": { - "inputs": [ - [ - [ - 14.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.345703125 - ] - ] - } - }, - "transformer_blocks.4.attn.to_add_out": { - "inputs": [ - [ - [ - 6.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.390625 - ] - ] - } - }, - "transformer_blocks.4.ff.net.0.proj": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "transformer_blocks.4.ff.net.2": { - "inputs": [ - [ - [ - 21.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.09375 - ] - ] - } - }, - "transformer_blocks.4.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 8.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6484375 - ] - ] - } - }, - "transformer_blocks.4.ff_context.net.2": { - "inputs": [ - [ - [ - 16.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.5.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8125 - ] - ] - } - }, - "transformer_blocks.5.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.5.attn.to_q": { - "inputs": [ - [ - [ - 18.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.84765625 - ] - ] - } - }, - "transformer_blocks.5.attn.to_k": { - "inputs": [ - [ - [ - 18.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "transformer_blocks.5.attn.to_v": { - "inputs": [ - [ - [ - 18.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.5.attn.add_k_proj": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.5.attn.add_v_proj": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.298828125 - ] - ] - } - }, - "transformer_blocks.5.attn.add_q_proj": { - "inputs": [ - [ - [ - 19.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.287109375 - ] - ] - } - }, - "transformer_blocks.5.attn.to_out.0": { - "inputs": [ - [ - [ - 10.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.28125 - ] - ] - } - }, - "transformer_blocks.5.attn.to_add_out": { - "inputs": [ - [ - [ - 9.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.306640625 - ] - ] - } - }, - "transformer_blocks.5.ff.net.0.proj": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6328125 - ] - ] - } - }, - "transformer_blocks.5.ff.net.2": { - "inputs": [ - [ - [ - 30.875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.46875 - ] - ] - } - }, - "transformer_blocks.5.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 12.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.54296875 - ] - ] - } - }, - "transformer_blocks.5.ff_context.net.2": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "transformer_blocks.6.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.78515625 - ] - ] - } - }, - "transformer_blocks.6.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.39453125 - ] - ] - } - }, - "transformer_blocks.6.attn.to_q": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.56640625 - ] - ] - } - }, - "transformer_blocks.6.attn.to_k": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53515625 - ] - ] - } - }, - "transformer_blocks.6.attn.to_v": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.333984375 - ] - ] - } - }, - "transformer_blocks.6.attn.add_k_proj": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "transformer_blocks.6.attn.add_v_proj": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.328125 - ] - ] - } - }, - "transformer_blocks.6.attn.add_q_proj": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3515625 - ] - ] - } - }, - "transformer_blocks.6.attn.to_out.0": { - "inputs": [ - [ - [ - 9.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.47265625 - ] - ] - } - }, - "transformer_blocks.6.attn.to_add_out": { - "inputs": [ - [ - [ - 12.1875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.259765625 - ] - ] - } - }, - "transformer_blocks.6.ff.net.0.proj": { - "inputs": [ - [ - [ - 9.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "transformer_blocks.6.ff.net.2": { - "inputs": [ - [ - [ - 32.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.80859375 - ] - ] - } - }, - "transformer_blocks.6.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 10.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.62109375 - ] - ] - } - }, - "transformer_blocks.6.ff_context.net.2": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "transformer_blocks.7.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.73046875 - ] - ] - } - }, - "transformer_blocks.7.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41015625 - ] - ] - } - }, - "transformer_blocks.7.attn.to_q": { - "inputs": [ - [ - [ - 22.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.7.attn.to_k": { - "inputs": [ - [ - [ - 22.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.84375 - ] - ] - } - }, - "transformer_blocks.7.attn.to_v": { - "inputs": [ - [ - [ - 22.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.36328125 - ] - ] - } - }, - "transformer_blocks.7.attn.add_k_proj": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.443359375 - ] - ] - } - }, - "transformer_blocks.7.attn.add_v_proj": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.365234375 - ] - ] - } - }, - "transformer_blocks.7.attn.add_q_proj": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.39453125 - ] - ] - } - }, - "transformer_blocks.7.attn.to_out.0": { - "inputs": [ - [ - [ - 12.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.37109375 - ] - ] - } - }, - "transformer_blocks.7.attn.to_add_out": { - "inputs": [ - [ - [ - 8.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.375 - ] - ] - } - }, - "transformer_blocks.7.ff.net.0.proj": { - "inputs": [ - [ - [ - 15.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.435546875 - ] - ] - } - }, - "transformer_blocks.7.ff.net.2": { - "inputs": [ - [ - [ - 49.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.94140625 - ] - ] - } - }, - "transformer_blocks.7.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 9.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.7.ff_context.net.2": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.8.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.8.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51953125 - ] - ] - } - }, - "transformer_blocks.8.attn.to_q": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.43359375 - ] - ] - } - }, - "transformer_blocks.8.attn.to_k": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.640625 - ] - ] - } - }, - "transformer_blocks.8.attn.to_v": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.39453125 - ] - ] - } - }, - "transformer_blocks.8.attn.add_k_proj": { - "inputs": [ - [ - [ - 17.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3359375 - ] - ] - } - }, - "transformer_blocks.8.attn.add_v_proj": { - "inputs": [ - [ - [ - 17.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41015625 - ] - ] - } - }, - "transformer_blocks.8.attn.add_q_proj": { - "inputs": [ - [ - [ - 17.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.326171875 - ] - ] - } - }, - "transformer_blocks.8.attn.to_out.0": { - "inputs": [ - [ - [ - 12.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3203125 - ] - ] - } - }, - "transformer_blocks.8.attn.to_add_out": { - "inputs": [ - [ - [ - 14.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2890625 - ] - ] - } - }, - "transformer_blocks.8.ff.net.0.proj": { - "inputs": [ - [ - [ - 10.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "transformer_blocks.8.ff.net.2": { - "inputs": [ - [ - [ - 20.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "transformer_blocks.8.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 15.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.494140625 - ] - ] - } - }, - "transformer_blocks.8.ff_context.net.2": { - "inputs": [ - [ - [ - 17.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.9.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "transformer_blocks.9.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.470703125 - ] - ] - } - }, - "transformer_blocks.9.attn.to_q": { - "inputs": [ - [ - [ - 17.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.462890625 - ] - ] - } - }, - "transformer_blocks.9.attn.to_k": { - "inputs": [ - [ - [ - 17.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.427734375 - ] - ] - } - }, - "transformer_blocks.9.attn.to_v": { - "inputs": [ - [ - [ - 17.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.32421875 - ] - ] - } - }, - "transformer_blocks.9.attn.add_k_proj": { - "inputs": [ - [ - [ - 18.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.9.attn.add_v_proj": { - "inputs": [ - [ - [ - 18.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.310546875 - ] - ] - } - }, - "transformer_blocks.9.attn.add_q_proj": { - "inputs": [ - [ - [ - 18.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.271484375 - ] - ] - } - }, - "transformer_blocks.9.attn.to_out.0": { - "inputs": [ - [ - [ - 15.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.31640625 - ] - ] - } - }, - "transformer_blocks.9.attn.to_add_out": { - "inputs": [ - [ - [ - 7.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.302734375 - ] - ] - } - }, - "transformer_blocks.9.ff.net.0.proj": { - "inputs": [ - [ - [ - 12.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.9.ff.net.2": { - "inputs": [ - [ - [ - 17.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.640625 - ] - ] - } - }, - "transformer_blocks.9.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 13.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "transformer_blocks.9.ff_context.net.2": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "transformer_blocks.10.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.58984375 - ] - ] - } - }, - "transformer_blocks.10.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3828125 - ] - ] - } - }, - "transformer_blocks.10.attn.to_q": { - "inputs": [ - [ - [ - 14.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.10.attn.to_k": { - "inputs": [ - [ - [ - 14.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.478515625 - ] - ] - } - }, - "transformer_blocks.10.attn.to_v": { - "inputs": [ - [ - [ - 14.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.310546875 - ] - ] - } - }, - "transformer_blocks.10.attn.add_k_proj": { - "inputs": [ - [ - [ - 14.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.333984375 - ] - ] - } - }, - "transformer_blocks.10.attn.add_v_proj": { - "inputs": [ - [ - [ - 14.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.24609375 - ] - ] - } - }, - "transformer_blocks.10.attn.add_q_proj": { - "inputs": [ - [ - [ - 14.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.376953125 - ] - ] - } - }, - "transformer_blocks.10.attn.to_out.0": { - "inputs": [ - [ - [ - 14.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.263671875 - ] - ] - } - }, - "transformer_blocks.10.attn.to_add_out": { - "inputs": [ - [ - [ - 8.3125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.25390625 - ] - ] - } - }, - "transformer_blocks.10.ff.net.0.proj": { - "inputs": [ - [ - [ - 13.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.48828125 - ] - ] - } - }, - "transformer_blocks.10.ff.net.2": { - "inputs": [ - [ - [ - 17.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8203125 - ] - ] - } - }, - "transformer_blocks.10.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 70.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.451171875 - ] - ] - } - }, - "transformer_blocks.10.ff_context.net.2": { - "inputs": [ - [ - [ - 34.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71875 - ] - ] - } - }, - "transformer_blocks.11.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.640625 - ] - ] - } - }, - "transformer_blocks.11.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.455078125 - ] - ] - } - }, - "transformer_blocks.11.attn.to_q": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "transformer_blocks.11.attn.to_k": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.427734375 - ] - ] - } - }, - "transformer_blocks.11.attn.to_v": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.33203125 - ] - ] - } - }, - "transformer_blocks.11.attn.add_k_proj": { - "inputs": [ - [ - [ - 31.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7109375 - ] - ] - } - }, - "transformer_blocks.11.attn.add_v_proj": { - "inputs": [ - [ - [ - 31.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2294921875 - ] - ] - } - }, - "transformer_blocks.11.attn.add_q_proj": { - "inputs": [ - [ - [ - 31.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3125 - ] - ] - } - }, - "transformer_blocks.11.attn.to_out.0": { - "inputs": [ - [ - [ - 16.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.37109375 - ] - ] - } - }, - "transformer_blocks.11.attn.to_add_out": { - "inputs": [ - [ - [ - 9.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.318359375 - ] - ] - } - }, - "transformer_blocks.11.ff.net.0.proj": { - "inputs": [ - [ - [ - 10.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.498046875 - ] - ] - } - }, - "transformer_blocks.11.ff.net.2": { - "inputs": [ - [ - [ - 15.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6953125 - ] - ] - } - }, - "transformer_blocks.11.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 42.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.58203125 - ] - ] - } - }, - "transformer_blocks.11.ff_context.net.2": { - "inputs": [ - [ - [ - 34.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.470703125 - ] - ] - } - }, - "transformer_blocks.12.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.68359375 - ] - ] - } - }, - "transformer_blocks.12.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41796875 - ] - ] - } - }, - "transformer_blocks.12.attn.to_q": { - "inputs": [ - [ - [ - 15.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.341796875 - ] - ] - } - }, - "transformer_blocks.12.attn.to_k": { - "inputs": [ - [ - [ - 15.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4609375 - ] - ] - } - }, - "transformer_blocks.12.attn.to_v": { - "inputs": [ - [ - [ - 15.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "transformer_blocks.12.attn.add_k_proj": { - "inputs": [ - [ - [ - 32.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.12.attn.add_v_proj": { - "inputs": [ - [ - [ - 32.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.322265625 - ] - ] - } - }, - "transformer_blocks.12.attn.add_q_proj": { - "inputs": [ - [ - [ - 32.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3359375 - ] - ] - } - }, - "transformer_blocks.12.attn.to_out.0": { - "inputs": [ - [ - [ - 29.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.322265625 - ] - ] - } - }, - "transformer_blocks.12.attn.to_add_out": { - "inputs": [ - [ - [ - 15.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.29296875 - ] - ] - } - }, - "transformer_blocks.12.ff.net.0.proj": { - "inputs": [ - [ - [ - 8.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59375 - ] - ] - } - }, - "transformer_blocks.12.ff.net.2": { - "inputs": [ - [ - [ - 21.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8359375 - ] - ] - } - }, - "transformer_blocks.12.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 97.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41015625 - ] - ] - } - }, - "transformer_blocks.12.ff_context.net.2": { - "inputs": [ - [ - [ - 25.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "transformer_blocks.13.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65625 - ] - ] - } - }, - "transformer_blocks.13.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.375 - ] - ] - } - }, - "transformer_blocks.13.attn.to_q": { - "inputs": [ - [ - [ - 15.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.447265625 - ] - ] - } - }, - "transformer_blocks.13.attn.to_k": { - "inputs": [ - [ - [ - 15.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.83203125 - ] - ] - } - }, - "transformer_blocks.13.attn.to_v": { - "inputs": [ - [ - [ - 15.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "transformer_blocks.13.attn.add_k_proj": { - "inputs": [ - [ - [ - 32.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.578125 - ] - ] - } - }, - "transformer_blocks.13.attn.add_v_proj": { - "inputs": [ - [ - [ - 32.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.25 - ] - ] - } - }, - "transformer_blocks.13.attn.add_q_proj": { - "inputs": [ - [ - [ - 32.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.33203125 - ] - ] - } - }, - "transformer_blocks.13.attn.to_out.0": { - "inputs": [ - [ - [ - 16.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.314453125 - ] - ] - } - }, - "transformer_blocks.13.attn.to_add_out": { - "inputs": [ - [ - [ - 13.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.27734375 - ] - ] - } - }, - "transformer_blocks.13.ff.net.0.proj": { - "inputs": [ - [ - [ - 7.84375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.13.ff.net.2": { - "inputs": [ - [ - [ - 21.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.015625 - ] - ] - } - }, - "transformer_blocks.13.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 138.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.369140625 - ] - ] - } - }, - "transformer_blocks.13.ff_context.net.2": { - "inputs": [ - [ - [ - 20.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46875 - ] - ] - } - }, - "transformer_blocks.14.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.68359375 - ] - ] - } - }, - "transformer_blocks.14.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3984375 - ] - ] - } - }, - "transformer_blocks.14.attn.to_q": { - "inputs": [ - [ - [ - 23.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46484375 - ] - ] - } - }, - "transformer_blocks.14.attn.to_k": { - "inputs": [ - [ - [ - 23.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.578125 - ] - ] - } - }, - "transformer_blocks.14.attn.to_v": { - "inputs": [ - [ - [ - 23.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.73046875 - ] - ] - } - }, - "transformer_blocks.14.attn.add_k_proj": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.49609375 - ] - ] - } - }, - "transformer_blocks.14.attn.add_v_proj": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.279296875 - ] - ] - } - }, - "transformer_blocks.14.attn.add_q_proj": { - "inputs": [ - [ - [ - 17.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.392578125 - ] - ] - } - }, - "transformer_blocks.14.attn.to_out.0": { - "inputs": [ - [ - [ - 16.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.41015625 - ] - ] - } - }, - "transformer_blocks.14.attn.to_add_out": { - "inputs": [ - [ - [ - 11.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.318359375 - ] - ] - } - }, - "transformer_blocks.14.ff.net.0.proj": { - "inputs": [ - [ - [ - 6.34375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53125 - ] - ] - } - }, - "transformer_blocks.14.ff.net.2": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.73046875 - ] - ] - } - }, - "transformer_blocks.14.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 51.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.14.ff_context.net.2": { - "inputs": [ - [ - [ - 32.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7265625 - ] - ] - } - }, - "transformer_blocks.15.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.78125 - ] - ] - } - }, - "transformer_blocks.15.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.435546875 - ] - ] - } - }, - "transformer_blocks.15.attn.to_q": { - "inputs": [ - [ - [ - 15.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.56640625 - ] - ] - } - }, - "transformer_blocks.15.attn.to_k": { - "inputs": [ - [ - [ - 15.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.625 - ] - ] - } - }, - "transformer_blocks.15.attn.to_v": { - "inputs": [ - [ - [ - 15.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "transformer_blocks.15.attn.add_k_proj": { - "inputs": [ - [ - [ - 18.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5625 - ] - ] - } - }, - "transformer_blocks.15.attn.add_v_proj": { - "inputs": [ - [ - [ - 18.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.34765625 - ] - ] - } - }, - "transformer_blocks.15.attn.add_q_proj": { - "inputs": [ - [ - [ - 18.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2890625 - ] - ] - } - }, - "transformer_blocks.15.attn.to_out.0": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.365234375 - ] - ] - } - }, - "transformer_blocks.15.attn.to_add_out": { - "inputs": [ - [ - [ - 9.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.275390625 - ] - ] - } - }, - "transformer_blocks.15.ff.net.0.proj": { - "inputs": [ - [ - [ - 5.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.45703125 - ] - ] - } - }, - "transformer_blocks.15.ff.net.2": { - "inputs": [ - [ - [ - 24.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8671875 - ] - ] - } - }, - "transformer_blocks.15.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 45.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "transformer_blocks.15.ff_context.net.2": { - "inputs": [ - [ - [ - 26.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46875 - ] - ] - } - }, - "transformer_blocks.16.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.890625 - ] - ] - } - }, - "transformer_blocks.16.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "transformer_blocks.16.attn.to_q": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.42578125 - ] - ] - } - }, - "transformer_blocks.16.attn.to_k": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.68359375 - ] - ] - } - }, - "transformer_blocks.16.attn.to_v": { - "inputs": [ - [ - [ - 20.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.396484375 - ] - ] - } - }, - "transformer_blocks.16.attn.add_k_proj": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5859375 - ] - ] - } - }, - "transformer_blocks.16.attn.add_v_proj": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.330078125 - ] - ] - } - }, - "transformer_blocks.16.attn.add_q_proj": { - "inputs": [ - [ - [ - 25.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.30078125 - ] - ] - } - }, - "transformer_blocks.16.attn.to_out.0": { - "inputs": [ - [ - [ - 23.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.298828125 - ] - ] - } - }, - "transformer_blocks.16.attn.to_add_out": { - "inputs": [ - [ - [ - 13.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3125 - ] - ] - } - }, - "transformer_blocks.16.ff.net.0.proj": { - "inputs": [ - [ - [ - 8.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.66796875 - ] - ] - } - }, - "transformer_blocks.16.ff.net.2": { - "inputs": [ - [ - [ - 34.5 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0546875 - ] - ] - } - }, - "transformer_blocks.16.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 44.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0390625 - ] - ] - } - }, - "transformer_blocks.16.ff_context.net.2": { - "inputs": [ - [ - [ - 76.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71875 - ] - ] - } - }, - "transformer_blocks.17.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.9296875 - ] - ] - } - }, - "transformer_blocks.17.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.83203125 - ] - ] - } - }, - "transformer_blocks.17.attn.to_q": { - "inputs": [ - [ - [ - 18.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "transformer_blocks.17.attn.to_k": { - "inputs": [ - [ - [ - 18.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.86328125 - ] - ] - } - }, - "transformer_blocks.17.attn.to_v": { - "inputs": [ - [ - [ - 18.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.423828125 - ] - ] - } - }, - "transformer_blocks.17.attn.add_k_proj": { - "inputs": [ - [ - [ - 35.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6796875 - ] - ] - } - }, - "transformer_blocks.17.attn.add_v_proj": { - "inputs": [ - [ - [ - 35.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.47265625 - ] - ] - } - }, - "transformer_blocks.17.attn.add_q_proj": { - "inputs": [ - [ - [ - 35.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.328125 - ] - ] - } - }, - "transformer_blocks.17.attn.to_out.0": { - "inputs": [ - [ - [ - 20.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.34765625 - ] - ] - } - }, - "transformer_blocks.17.attn.to_add_out": { - "inputs": [ - [ - [ - 20.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.24609375 - ] - ] - } - }, - "transformer_blocks.17.ff.net.0.proj": { - "inputs": [ - [ - [ - 7.65625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "transformer_blocks.17.ff.net.2": { - "inputs": [ - [ - [ - 65.5 - ] - ] - ], - "params": { - "weight": [ - [ - 1.15625 - ] - ] - } - }, - "transformer_blocks.17.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 42.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6328125 - ] - ] - } - }, - "transformer_blocks.17.ff_context.net.2": { - "inputs": [ - [ - [ - 71.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.75390625 - ] - ] - } - }, - "transformer_blocks.18.norm1.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5234375 - ] - ] - } - }, - "transformer_blocks.18.norm1_context.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7421875 - ] - ] - } - }, - "transformer_blocks.18.attn.to_q": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "transformer_blocks.18.attn.to_k": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.94921875 - ] - ] - } - }, - "transformer_blocks.18.attn.to_v": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.375 - ] - ] - } - }, - "transformer_blocks.18.attn.add_k_proj": { - "inputs": [ - [ - [ - 25.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4453125 - ] - ] - } - }, - "transformer_blocks.18.attn.add_v_proj": { - "inputs": [ - [ - [ - 25.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.369140625 - ] - ] - } - }, - "transformer_blocks.18.attn.add_q_proj": { - "inputs": [ - [ - [ - 25.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.384765625 - ] - ] - } - }, - "transformer_blocks.18.attn.to_out.0": { - "inputs": [ - [ - [ - 29.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "transformer_blocks.18.attn.to_add_out": { - "inputs": [ - [ - [ - 17.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.36328125 - ] - ] - } - }, - "transformer_blocks.18.ff.net.0.proj": { - "inputs": [ - [ - [ - 11.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.69921875 - ] - ] - } - }, - "transformer_blocks.18.ff.net.2": { - "inputs": [ - [ - [ - 217.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.4609375 - ] - ] - } - }, - "transformer_blocks.18.ff_context.net.0.proj": { - "inputs": [ - [ - [ - 138.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.9375 - ] - ] - } - }, - "transformer_blocks.18.ff_context.net.2": { - "inputs": [ - [ - [ - 225.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.80859375 - ] - ] - } - }, - "single_transformer_blocks.0.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59375 - ] - ] - } - }, - "single_transformer_blocks.0.proj_mlp": { - "inputs": [ - [ - [ - 48.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.0.proj_out": { - "inputs": [ - [ - [ - 20.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.3671875 - ] - ] - } - }, - "single_transformer_blocks.0.attn.to_q": { - "inputs": [ - [ - [ - 48.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.384765625 - ] - ] - } - }, - "single_transformer_blocks.0.attn.to_k": { - "inputs": [ - [ - [ - 48.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51953125 - ] - ] - } - }, - "single_transformer_blocks.0.attn.to_v": { - "inputs": [ - [ - [ - 48.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.279296875 - ] - ] - } - }, - "single_transformer_blocks.1.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0390625 - ] - ] - } - }, - "single_transformer_blocks.1.proj_mlp": { - "inputs": [ - [ - [ - 39.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "single_transformer_blocks.1.proj_out": { - "inputs": [ - [ - [ - 18.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5 - ] - ] - } - }, - "single_transformer_blocks.1.attn.to_q": { - "inputs": [ - [ - [ - 39.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.373046875 - ] - ] - } - }, - "single_transformer_blocks.1.attn.to_k": { - "inputs": [ - [ - [ - 39.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.484375 - ] - ] - } - }, - "single_transformer_blocks.1.attn.to_v": { - "inputs": [ - [ - [ - 39.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.318359375 - ] - ] - } - }, - "single_transformer_blocks.2.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.203125 - ] - ] - } - }, - "single_transformer_blocks.2.proj_mlp": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "single_transformer_blocks.2.proj_out": { - "inputs": [ - [ - [ - 19.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.4609375 - ] - ] - } - }, - "single_transformer_blocks.2.attn.to_q": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.380859375 - ] - ] - } - }, - "single_transformer_blocks.2.attn.to_k": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.478515625 - ] - ] - } - }, - "single_transformer_blocks.2.attn.to_v": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.359375 - ] - ] - } - }, - "single_transformer_blocks.3.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.359375 - ] - ] - } - }, - "single_transformer_blocks.3.proj_mlp": { - "inputs": [ - [ - [ - 34.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.484375 - ] - ] - } - }, - "single_transformer_blocks.3.proj_out": { - "inputs": [ - [ - [ - 18.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.91796875 - ] - ] - } - }, - "single_transformer_blocks.3.attn.to_q": { - "inputs": [ - [ - [ - 34.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.40625 - ] - ] - } - }, - "single_transformer_blocks.3.attn.to_k": { - "inputs": [ - [ - [ - 34.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.58203125 - ] - ] - } - }, - "single_transformer_blocks.3.attn.to_v": { - "inputs": [ - [ - [ - 34.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.34375 - ] - ] - } - }, - "single_transformer_blocks.4.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6015625 - ] - ] - } - }, - "single_transformer_blocks.4.proj_mlp": { - "inputs": [ - [ - [ - 35.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.458984375 - ] - ] - } - }, - "single_transformer_blocks.4.proj_out": { - "inputs": [ - [ - [ - 18.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.2421875 - ] - ] - } - }, - "single_transformer_blocks.4.attn.to_q": { - "inputs": [ - [ - [ - 35.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.333984375 - ] - ] - } - }, - "single_transformer_blocks.4.attn.to_k": { - "inputs": [ - [ - [ - 35.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.63671875 - ] - ] - } - }, - "single_transformer_blocks.4.attn.to_v": { - "inputs": [ - [ - [ - 35.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.234375 - ] - ] - } - }, - "single_transformer_blocks.5.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6640625 - ] - ] - } - }, - "single_transformer_blocks.5.proj_mlp": { - "inputs": [ - [ - [ - 27.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.5.proj_out": { - "inputs": [ - [ - [ - 16.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.77734375 - ] - ] - } - }, - "single_transformer_blocks.5.attn.to_q": { - "inputs": [ - [ - [ - 27.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3984375 - ] - ] - } - }, - "single_transformer_blocks.5.attn.to_k": { - "inputs": [ - [ - [ - 27.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "single_transformer_blocks.5.attn.to_v": { - "inputs": [ - [ - [ - 27.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.23046875 - ] - ] - } - }, - "single_transformer_blocks.6.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.015625 - ] - ] - } - }, - "single_transformer_blocks.6.proj_mlp": { - "inputs": [ - [ - [ - 28.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "single_transformer_blocks.6.proj_out": { - "inputs": [ - [ - [ - 19.125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.2578125 - ] - ] - } - }, - "single_transformer_blocks.6.attn.to_q": { - "inputs": [ - [ - [ - 28.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.32421875 - ] - ] - } - }, - "single_transformer_blocks.6.attn.to_k": { - "inputs": [ - [ - [ - 28.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6015625 - ] - ] - } - }, - "single_transformer_blocks.6.attn.to_v": { - "inputs": [ - [ - [ - 28.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2451171875 - ] - ] - } - }, - "single_transformer_blocks.7.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.796875 - ] - ] - } - }, - "single_transformer_blocks.7.proj_mlp": { - "inputs": [ - [ - [ - 27.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "single_transformer_blocks.7.proj_out": { - "inputs": [ - [ - [ - 15.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.75 - ] - ] - } - }, - "single_transformer_blocks.7.attn.to_q": { - "inputs": [ - [ - [ - 27.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.35546875 - ] - ] - } - }, - "single_transformer_blocks.7.attn.to_k": { - "inputs": [ - [ - [ - 27.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5703125 - ] - ] - } - }, - "single_transformer_blocks.7.attn.to_v": { - "inputs": [ - [ - [ - 27.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.2578125 - ] - ] - } - }, - "single_transformer_blocks.8.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.078125 - ] - ] - } - }, - "single_transformer_blocks.8.proj_mlp": { - "inputs": [ - [ - [ - 21.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.8.proj_out": { - "inputs": [ - [ - [ - 16.5 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5546875 - ] - ] - } - }, - "single_transformer_blocks.8.attn.to_q": { - "inputs": [ - [ - [ - 21.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.423828125 - ] - ] - } - }, - "single_transformer_blocks.8.attn.to_k": { - "inputs": [ - [ - [ - 21.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65234375 - ] - ] - } - }, - "single_transformer_blocks.8.attn.to_v": { - "inputs": [ - [ - [ - 21.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3203125 - ] - ] - } - }, - "single_transformer_blocks.9.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.34375 - ] - ] - } - }, - "single_transformer_blocks.9.proj_mlp": { - "inputs": [ - [ - [ - 21.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.890625 - ] - ] - } - }, - "single_transformer_blocks.9.proj_out": { - "inputs": [ - [ - [ - 21.875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.234375 - ] - ] - } - }, - "single_transformer_blocks.9.attn.to_q": { - "inputs": [ - [ - [ - 21.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53125 - ] - ] - } - }, - "single_transformer_blocks.9.attn.to_k": { - "inputs": [ - [ - [ - 21.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.9.attn.to_v": { - "inputs": [ - [ - [ - 21.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.396484375 - ] - ] - } - }, - "single_transformer_blocks.10.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.203125 - ] - ] - } - }, - "single_transformer_blocks.10.proj_mlp": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65234375 - ] - ] - } - }, - "single_transformer_blocks.10.proj_out": { - "inputs": [ - [ - [ - 18.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.109375 - ] - ] - } - }, - "single_transformer_blocks.10.attn.to_q": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.439453125 - ] - ] - } - }, - "single_transformer_blocks.10.attn.to_k": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "single_transformer_blocks.10.attn.to_v": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.251953125 - ] - ] - } - }, - "single_transformer_blocks.11.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.8125 - ] - ] - } - }, - "single_transformer_blocks.11.proj_mlp": { - "inputs": [ - [ - [ - 23.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6640625 - ] - ] - } - }, - "single_transformer_blocks.11.proj_out": { - "inputs": [ - [ - [ - 22.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.046875 - ] - ] - } - }, - "single_transformer_blocks.11.attn.to_q": { - "inputs": [ - [ - [ - 23.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4296875 - ] - ] - } - }, - "single_transformer_blocks.11.attn.to_k": { - "inputs": [ - [ - [ - 23.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53125 - ] - ] - } - }, - "single_transformer_blocks.11.attn.to_v": { - "inputs": [ - [ - [ - 23.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4375 - ] - ] - } - }, - "single_transformer_blocks.12.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.7578125 - ] - ] - } - }, - "single_transformer_blocks.12.proj_mlp": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8125 - ] - ] - } - }, - "single_transformer_blocks.12.proj_out": { - "inputs": [ - [ - [ - 15.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.953125 - ] - ] - } - }, - "single_transformer_blocks.12.attn.to_q": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.419921875 - ] - ] - } - }, - "single_transformer_blocks.12.attn.to_k": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.486328125 - ] - ] - } - }, - "single_transformer_blocks.12.attn.to_v": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44921875 - ] - ] - } - }, - "single_transformer_blocks.13.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.609375 - ] - ] - } - }, - "single_transformer_blocks.13.proj_mlp": { - "inputs": [ - [ - [ - 21.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.85546875 - ] - ] - } - }, - "single_transformer_blocks.13.proj_out": { - "inputs": [ - [ - [ - 23.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0078125 - ] - ] - } - }, - "single_transformer_blocks.13.attn.to_q": { - "inputs": [ - [ - [ - 21.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.13.attn.to_k": { - "inputs": [ - [ - [ - 21.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.44140625 - ] - ] - } - }, - "single_transformer_blocks.13.attn.to_v": { - "inputs": [ - [ - [ - 21.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.45703125 - ] - ] - } - }, - "single_transformer_blocks.14.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.015625 - ] - ] - } - }, - "single_transformer_blocks.14.proj_mlp": { - "inputs": [ - [ - [ - 26.75 - ] - ] - ], - "params": { - "weight": [ - [ - 1.109375 - ] - ] - } - }, - "single_transformer_blocks.14.proj_out": { - "inputs": [ - [ - [ - 14.6875 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0390625 - ] - ] - } - }, - "single_transformer_blocks.14.attn.to_q": { - "inputs": [ - [ - [ - 26.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.392578125 - ] - ] - } - }, - "single_transformer_blocks.14.attn.to_k": { - "inputs": [ - [ - [ - 26.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "single_transformer_blocks.14.attn.to_v": { - "inputs": [ - [ - [ - 26.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5234375 - ] - ] - } - }, - "single_transformer_blocks.15.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.03125 - ] - ] - } - }, - "single_transformer_blocks.15.proj_mlp": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.78125 - ] - ] - } - }, - "single_transformer_blocks.15.proj_out": { - "inputs": [ - [ - [ - 18.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.2578125 - ] - ] - } - }, - "single_transformer_blocks.15.attn.to_q": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.49609375 - ] - ] - } - }, - "single_transformer_blocks.15.attn.to_k": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.58203125 - ] - ] - } - }, - "single_transformer_blocks.15.attn.to_v": { - "inputs": [ - [ - [ - 19.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.16.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.140625 - ] - ] - } - }, - "single_transformer_blocks.16.proj_mlp": { - "inputs": [ - [ - [ - 23.5 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1796875 - ] - ] - } - }, - "single_transformer_blocks.16.proj_out": { - "inputs": [ - [ - [ - 15.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.140625 - ] - ] - } - }, - "single_transformer_blocks.16.attn.to_q": { - "inputs": [ - [ - [ - 23.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.625 - ] - ] - } - }, - "single_transformer_blocks.16.attn.to_k": { - "inputs": [ - [ - [ - 23.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5546875 - ] - ] - } - }, - "single_transformer_blocks.16.attn.to_v": { - "inputs": [ - [ - [ - 23.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "single_transformer_blocks.17.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6015625 - ] - ] - } - }, - "single_transformer_blocks.17.proj_mlp": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7265625 - ] - ] - } - }, - "single_transformer_blocks.17.proj_out": { - "inputs": [ - [ - [ - 23.375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.140625 - ] - ] - } - }, - "single_transformer_blocks.17.attn.to_q": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.609375 - ] - ] - } - }, - "single_transformer_blocks.17.attn.to_k": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65625 - ] - ] - } - }, - "single_transformer_blocks.17.attn.to_v": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.81640625 - ] - ] - } - }, - "single_transformer_blocks.18.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.8203125 - ] - ] - } - }, - "single_transformer_blocks.18.proj_mlp": { - "inputs": [ - [ - [ - 26.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.125 - ] - ] - } - }, - "single_transformer_blocks.18.proj_out": { - "inputs": [ - [ - [ - 15.8125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.09375 - ] - ] - } - }, - "single_transformer_blocks.18.attn.to_q": { - "inputs": [ - [ - [ - 26.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5390625 - ] - ] - } - }, - "single_transformer_blocks.18.attn.to_k": { - "inputs": [ - [ - [ - 26.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.62109375 - ] - ] - } - }, - "single_transformer_blocks.18.attn.to_v": { - "inputs": [ - [ - [ - 26.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "single_transformer_blocks.19.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.7578125 - ] - ] - } - }, - "single_transformer_blocks.19.proj_mlp": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.87109375 - ] - ] - } - }, - "single_transformer_blocks.19.proj_out": { - "inputs": [ - [ - [ - 16.75 - ] - ] - ], - "params": { - "weight": [ - [ - 1.4296875 - ] - ] - } - }, - "single_transformer_blocks.19.attn.to_q": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "single_transformer_blocks.19.attn.to_k": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "single_transformer_blocks.19.attn.to_v": { - "inputs": [ - [ - [ - 15.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.54296875 - ] - ] - } - }, - "single_transformer_blocks.20.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 2.046875 - ] - ] - } - }, - "single_transformer_blocks.20.proj_mlp": { - "inputs": [ - [ - [ - 17.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.87890625 - ] - ] - } - }, - "single_transformer_blocks.20.proj_out": { - "inputs": [ - [ - [ - 12.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1484375 - ] - ] - } - }, - "single_transformer_blocks.20.attn.to_q": { - "inputs": [ - [ - [ - 17.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.20.attn.to_k": { - "inputs": [ - [ - [ - 17.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.50390625 - ] - ] - } - }, - "single_transformer_blocks.20.attn.to_v": { - "inputs": [ - [ - [ - 17.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55078125 - ] - ] - } - }, - "single_transformer_blocks.21.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5234375 - ] - ] - } - }, - "single_transformer_blocks.21.proj_mlp": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.9140625 - ] - ] - } - }, - "single_transformer_blocks.21.proj_out": { - "inputs": [ - [ - [ - 13.0625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.4140625 - ] - ] - } - }, - "single_transformer_blocks.21.attn.to_q": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.482421875 - ] - ] - } - }, - "single_transformer_blocks.21.attn.to_k": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46875 - ] - ] - } - }, - "single_transformer_blocks.21.attn.to_v": { - "inputs": [ - [ - [ - 19.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.42578125 - ] - ] - } - }, - "single_transformer_blocks.22.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.2265625 - ] - ] - } - }, - "single_transformer_blocks.22.proj_mlp": { - "inputs": [ - [ - [ - 14.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.703125 - ] - ] - } - }, - "single_transformer_blocks.22.proj_out": { - "inputs": [ - [ - [ - 12.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.87109375 - ] - ] - } - }, - "single_transformer_blocks.22.attn.to_q": { - "inputs": [ - [ - [ - 14.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.455078125 - ] - ] - } - }, - "single_transformer_blocks.22.attn.to_k": { - "inputs": [ - [ - [ - 14.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6015625 - ] - ] - } - }, - "single_transformer_blocks.22.attn.to_v": { - "inputs": [ - [ - [ - 14.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.609375 - ] - ] - } - }, - "single_transformer_blocks.23.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6640625 - ] - ] - } - }, - "single_transformer_blocks.23.proj_mlp": { - "inputs": [ - [ - [ - 20.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.71484375 - ] - ] - } - }, - "single_transformer_blocks.23.proj_out": { - "inputs": [ - [ - [ - 11.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.0546875 - ] - ] - } - }, - "single_transformer_blocks.23.attn.to_q": { - "inputs": [ - [ - [ - 20.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.498046875 - ] - ] - } - }, - "single_transformer_blocks.23.attn.to_k": { - "inputs": [ - [ - [ - 20.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7578125 - ] - ] - } - }, - "single_transformer_blocks.23.attn.to_v": { - "inputs": [ - [ - [ - 20.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.24.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.7890625 - ] - ] - } - }, - "single_transformer_blocks.24.proj_mlp": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 1.09375 - ] - ] - } - }, - "single_transformer_blocks.24.proj_out": { - "inputs": [ - [ - [ - 13.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.95703125 - ] - ] - } - }, - "single_transformer_blocks.24.attn.to_q": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.453125 - ] - ] - } - }, - "single_transformer_blocks.24.attn.to_k": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.24.attn.to_v": { - "inputs": [ - [ - [ - 16.125 - ] - ] - ], - "params": { - "weight": [ - [ - 0.345703125 - ] - ] - } - }, - "single_transformer_blocks.25.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.078125 - ] - ] - } - }, - "single_transformer_blocks.25.proj_mlp": { - "inputs": [ - [ - [ - 15.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.15625 - ] - ] - } - }, - "single_transformer_blocks.25.proj_out": { - "inputs": [ - [ - [ - 13.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.3359375 - ] - ] - } - }, - "single_transformer_blocks.25.attn.to_q": { - "inputs": [ - [ - [ - 15.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.50390625 - ] - ] - } - }, - "single_transformer_blocks.25.attn.to_k": { - "inputs": [ - [ - [ - 15.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "single_transformer_blocks.25.attn.to_v": { - "inputs": [ - [ - [ - 15.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.3984375 - ] - ] - } - }, - "single_transformer_blocks.26.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.28125 - ] - ] - } - }, - "single_transformer_blocks.26.proj_mlp": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.90234375 - ] - ] - } - }, - "single_transformer_blocks.26.proj_out": { - "inputs": [ - [ - [ - 13.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.453125 - ] - ] - } - }, - "single_transformer_blocks.26.attn.to_q": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.490234375 - ] - ] - } - }, - "single_transformer_blocks.26.attn.to_k": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.455078125 - ] - ] - } - }, - "single_transformer_blocks.26.attn.to_v": { - "inputs": [ - [ - [ - 15.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.51171875 - ] - ] - } - }, - "single_transformer_blocks.27.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.109375 - ] - ] - } - }, - "single_transformer_blocks.27.proj_mlp": { - "inputs": [ - [ - [ - 17.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.75 - ] - ] - } - }, - "single_transformer_blocks.27.proj_out": { - "inputs": [ - [ - [ - 13.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.80859375 - ] - ] - } - }, - "single_transformer_blocks.27.attn.to_q": { - "inputs": [ - [ - [ - 17.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.57421875 - ] - ] - } - }, - "single_transformer_blocks.27.attn.to_k": { - "inputs": [ - [ - [ - 17.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6953125 - ] - ] - } - }, - "single_transformer_blocks.27.attn.to_v": { - "inputs": [ - [ - [ - 17.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59765625 - ] - ] - } - }, - "single_transformer_blocks.28.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1953125 - ] - ] - } - }, - "single_transformer_blocks.28.proj_mlp": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.87109375 - ] - ] - } - }, - "single_transformer_blocks.28.proj_out": { - "inputs": [ - [ - [ - 13.9375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8828125 - ] - ] - } - }, - "single_transformer_blocks.28.attn.to_q": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "single_transformer_blocks.28.attn.to_k": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.875 - ] - ] - } - }, - "single_transformer_blocks.28.attn.to_v": { - "inputs": [ - [ - [ - 24.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.48828125 - ] - ] - } - }, - "single_transformer_blocks.29.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.171875 - ] - ] - } - }, - "single_transformer_blocks.29.proj_mlp": { - "inputs": [ - [ - [ - 18.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6953125 - ] - ] - } - }, - "single_transformer_blocks.29.proj_out": { - "inputs": [ - [ - [ - 13.25 - ] - ] - ], - "params": { - "weight": [ - [ - 1.109375 - ] - ] - } - }, - "single_transformer_blocks.29.attn.to_q": { - "inputs": [ - [ - [ - 18.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53515625 - ] - ] - } - }, - "single_transformer_blocks.29.attn.to_k": { - "inputs": [ - [ - [ - 18.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.48828125 - ] - ] - } - }, - "single_transformer_blocks.29.attn.to_v": { - "inputs": [ - [ - [ - 18.25 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "single_transformer_blocks.30.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.546875 - ] - ] - } - }, - "single_transformer_blocks.30.proj_mlp": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6328125 - ] - ] - } - }, - "single_transformer_blocks.30.proj_out": { - "inputs": [ - [ - [ - 16.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.3984375 - ] - ] - } - }, - "single_transformer_blocks.30.attn.to_q": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.515625 - ] - ] - } - }, - "single_transformer_blocks.30.attn.to_k": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5859375 - ] - ] - } - }, - "single_transformer_blocks.30.attn.to_v": { - "inputs": [ - [ - [ - 18.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.54296875 - ] - ] - } - }, - "single_transformer_blocks.31.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5859375 - ] - ] - } - }, - "single_transformer_blocks.31.proj_mlp": { - "inputs": [ - [ - [ - 22.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.65625 - ] - ] - } - }, - "single_transformer_blocks.31.proj_out": { - "inputs": [ - [ - [ - 15.5625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.390625 - ] - ] - } - }, - "single_transformer_blocks.31.attn.to_q": { - "inputs": [ - [ - [ - 22.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.640625 - ] - ] - } - }, - "single_transformer_blocks.31.attn.to_k": { - "inputs": [ - [ - [ - 22.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.47265625 - ] - ] - } - }, - "single_transformer_blocks.31.attn.to_v": { - "inputs": [ - [ - [ - 22.375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.73828125 - ] - ] - } - }, - "single_transformer_blocks.32.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5078125 - ] - ] - } - }, - "single_transformer_blocks.32.proj_mlp": { - "inputs": [ - [ - [ - 21.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.7578125 - ] - ] - } - }, - "single_transformer_blocks.32.proj_out": { - "inputs": [ - [ - [ - 19.0 - ] - ] - ], - "params": { - "weight": [ - [ - 1.171875 - ] - ] - } - }, - "single_transformer_blocks.32.attn.to_q": { - "inputs": [ - [ - [ - 21.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.55859375 - ] - ] - } - }, - "single_transformer_blocks.32.attn.to_k": { - "inputs": [ - [ - [ - 21.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.421875 - ] - ] - } - }, - "single_transformer_blocks.32.attn.to_v": { - "inputs": [ - [ - [ - 21.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.52734375 - ] - ] - } - }, - "single_transformer_blocks.33.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.6796875 - ] - ] - } - }, - "single_transformer_blocks.33.proj_mlp": { - "inputs": [ - [ - [ - 20.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.90234375 - ] - ] - } - }, - "single_transformer_blocks.33.proj_out": { - "inputs": [ - [ - [ - 17.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.5 - ] - ] - } - }, - "single_transformer_blocks.33.attn.to_q": { - "inputs": [ - [ - [ - 20.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "single_transformer_blocks.33.attn.to_k": { - "inputs": [ - [ - [ - 20.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.3828125 - ] - ] - } - }, - "single_transformer_blocks.33.attn.to_v": { - "inputs": [ - [ - [ - 20.625 - ] - ] - ], - "params": { - "weight": [ - [ - 0.4609375 - ] - ] - } - }, - "single_transformer_blocks.34.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.640625 - ] - ] - } - }, - "single_transformer_blocks.34.proj_mlp": { - "inputs": [ - [ - [ - 21.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.9765625 - ] - ] - } - }, - "single_transformer_blocks.34.proj_out": { - "inputs": [ - [ - [ - 36.25 - ] - ] - ], - "params": { - "weight": [ - [ - 3.109375 - ] - ] - } - }, - "single_transformer_blocks.34.attn.to_q": { - "inputs": [ - [ - [ - 21.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.498046875 - ] - ] - } - }, - "single_transformer_blocks.34.attn.to_k": { - "inputs": [ - [ - [ - 21.0 - ] - ] - ], - "params": { - "weight": [ - [ - 2.015625 - ] - ] - } - }, - "single_transformer_blocks.34.attn.to_v": { - "inputs": [ - [ - [ - 21.0 - ] - ] - ], - "params": { - "weight": [ - [ - 0.447265625 - ] - ] - } - }, - "single_transformer_blocks.35.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.8125 - ] - ] - } - }, - "single_transformer_blocks.35.proj_mlp": { - "inputs": [ - [ - [ - 25.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.8828125 - ] - ] - } - }, - "single_transformer_blocks.35.proj_out": { - "inputs": [ - [ - [ - 24.125 - ] - ] - ], - "params": { - "weight": [ - [ - 3.0625 - ] - ] - } - }, - "single_transformer_blocks.35.attn.to_q": { - "inputs": [ - [ - [ - 25.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.63671875 - ] - ] - } - }, - "single_transformer_blocks.35.attn.to_k": { - "inputs": [ - [ - [ - 25.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.62890625 - ] - ] - } - }, - "single_transformer_blocks.35.attn.to_v": { - "inputs": [ - [ - [ - 25.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.5078125 - ] - ] - } - }, - "single_transformer_blocks.36.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 1.171875 - ] - ] - } - }, - "single_transformer_blocks.36.proj_mlp": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.796875 - ] - ] - } - }, - "single_transformer_blocks.36.proj_out": { - "inputs": [ - [ - [ - 31.625 - ] - ] - ], - "params": { - "weight": [ - [ - 1.1015625 - ] - ] - } - }, - "single_transformer_blocks.36.attn.to_q": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.53125 - ] - ] - } - }, - "single_transformer_blocks.36.attn.to_k": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6328125 - ] - ] - } - }, - "single_transformer_blocks.36.attn.to_v": { - "inputs": [ - [ - [ - 24.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.59375 - ] - ] - } - }, - "single_transformer_blocks.37.norm.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.546875 - ] - ] - } - }, - "single_transformer_blocks.37.proj_mlp": { - "inputs": [ - [ - [ - 34.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46875 - ] - ] - } - }, - "single_transformer_blocks.37.proj_out": { - "inputs": [ - [ - [ - 33.75 - ] - ] - ], - "params": { - "weight": [ - [ - 0.88671875 - ] - ] - } - }, - "single_transformer_blocks.37.attn.to_q": { - "inputs": [ - [ - [ - 34.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.46484375 - ] - ] - } - }, - "single_transformer_blocks.37.attn.to_k": { - "inputs": [ - [ - [ - 34.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.6015625 - ] - ] - } - }, - "single_transformer_blocks.37.attn.to_v": { - "inputs": [ - [ - [ - 34.5 - ] - ] - ], - "params": { - "weight": [ - [ - 0.361328125 - ] - ] - } - }, - "norm_out.linear": { - "inputs": [ - [ - [ - 6.4375 - ] - ] - ], - "params": { - "weight": [ - [ - 0.345703125 - ] - ] - } - }, - "proj_out": { - "inputs": [ - [ - [ - 28.875 - ] - ] - ], - "params": { - "weight": [ - [ - 0.1376953125 - ] - ] - } - } - } -} \ No newline at end of file diff --git a/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.npz b/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs.npz deleted file mode 100644 index ea86a90ea6c9515267bb5a59d70e23bb3605abf6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 97750 zcmbrn54_EHcJA-bU}Ov`gPJiI8AQfCguzJ7IR}Hvq_{g5F*W|?5FyxK+ff<+DuYT> zREm+N7#UQms1!9-REnBWR4%SG7nR~l6_uh%Q-1H~v-W45V`LxO>(~3b&sxu{&v&i$ zwO3OR{z13E0tFIQLE=mCXY&5J=Lt$^nR_{G;1|_VvydbFUHO zCztTQPkv#Zl1*UYazmVARBXm0M>IJ)fC2spOG# z_2;S}u-|vmH5UBd{PY)7>6(v!zv^?6Jw5$M-@bjHzRUjw_KOy~j@+IbwWj#DsC_+I^p^$^Y`q#8KJ*p-$iDzQF(RTN9rzN!NX@7U}nA0-hQBQpvDP$?s%_mAo{ve$(fQ#M)@aP zFn;WWk`c+UC(~b8AH8t&~YO8@Zqd`_C>)eRZT&)%&}T_u`iV^XTO%h=c{rWz(`{y?|HtDmAg&-z zX{#yiQt2msO8e}TRB^gPCjIS^BVJ&|jxYFLo%BTKRQfyqM3?LnuNS4C%A|iI*Ir|$ zjD3E@gi$4*^UuHQjP!TIUWIY5B26hyrMvl*?%65)Q|ysRf6w;nS+-X%J<&Ur{!M?P zPxgru-%UTANq_&L_iCShW=8tA!d`vjUi~zse=7YyeM%}j<%62N%6H`;^-|;7=WuI79lzusr{*iX7%ReVxq{{xS<{wRZ`i%69 zFnwm6K1;o3r_#Ucz2;`~{vsr27^=Y84EKkb%&H*E(i3)!BqP1 z{fR@_CmI*0f00T5GOu06r4P?Y|3ld2NZjS9Iv-1=|Is@iFWcq4O!|M@E+@)%IjJX3 zrPBY$pE#X;;*p~C`c}r2j2!bTw{tP5rN@ z(*N%LZ)E#F-!1)dCjB3_(ao}rZt02Jsq}yP6Q7j*)A=-${;&KtDz%M1o00zSu+g2k z(Ovo{mAKxdlFDG5l2n2CYDraCCiwyw7EG$C8vkpWR2QR74Pq+!BIs>us$~DgPYRPq zV437gV18St!`4YH9+;7Q8Px7cZIbNzNgd^ku1icMzosME3rIyoE(WWyom3#;EHeFQ8{(~q?o`PkP-vBH83XC3IFrkEh_yzhmG3mUn@#}0Lvu54QlmZPd@KI=%Yvbe?H5735M|4 zjO2%4&@q%G>KLY+1%?w-$p{^ZI!3BwI$C#2M!_=4^PuS{QPp*f7NgA=Vk&t7^fqHv zvNop*lNVu`WE?0REpmSe?$Oac8P5|ll9#|BVFF2%Fi|-!hMoNn?OPt8dF01Pq~l0+Gclrw)ZF_rwGjzk$tRB{=w!!pSq zfhJ?AsxD)h7;TmlQ^`+2Z?i%rYg4Nzc>|V7{uq>u9Cx=T3;gbuyvflS$)A8>_mw1Z z_f^Unx|*0ueySsJ_cbcn&_C^#yamf7KLc&|wW|8=>%?fYo|sDh6!bP5RI)Z*ijucs zndHyFymlX#Y~<*S|EMET!Eu#rblvXBd$3IMzd=)QLRD9AQj9jIh^gfNfZpb` zO4jD@3X}I?ndG0q{1n*TGx>lAW+eX?3<}PWL;X*?Z^}9+;8*8yFN^C5Z~IDQEO` zVk-G}9f=BVsN@PhhGmlf08PP7Rb9a?G1}ZFrjq{zz0D^oS)0EtOg@EWlK%pwAjdtl z?3Ksvp`USlM)Kca*#8bm-2bj}23O)QKXC$}53T~q4zAoSsS3jhfVO`%Rek^JVzjA2 z#0h}jrlv}^|9{{FU^oHr!C!%L_t08AfD-_Ng4!feK^^6cu1mxT=t%qu)KkeuH^vFT zZ~~wysIRIkXdp(LhD4kI=xrLQWNm(e6M*3azzV+t_ufMra~vlChW(q6#QmEpXK*tj zPC!TE{>@dg!CZnKgW&`~+rNdXzJE(G+O#6#1VC@oS|w|911A8(34q!^$367P0>6j0 z;UpFS467HA#MRp>XJ$Jh7C=Yh>g`psnFG2d9bi}h&{prLs;}Njj5eK#SOC!5bWzFL zG%8A-f?)wbt)Al^s@47KQ0^YumB+9GV9-%W5_J?QXMtiORzOFhj_xYi0`t2kJz!V? z&~)@v)phg|qfKujRsi%ieN?hGN3a4gtNA({31eO2{68e%v3H_8ayFU?2 zpd(R2N+p}Uu6yz<3`+o-gaN9$gn?qT8AQYqfZk@XO4jCkSOOT90F;DpeEvPO{k`|l zA)LkufMNZiBys&=%9%Tyh!fC}xc*3$vj6CclTk370BGx%sOsyF7NgA=B2EDGHe*$? zHqRC%FT!vFpw`cE56!Ocf0(lO(D6Kl8vuih2_#X*MCHt%M8pl~NR*MN5j4R3dHw^fuE}vNl|&UxwiZK*`8)4{cxW9y*<)SOGBXK9eNwK1(@6XA`jk zIuduEqmm7s&@Fidh7|y9_qnS2?(@WGGoOeR0KLrum8{J)tN;uv0Oqy(xa2jCa{mXz z?h8rc?u(Q&bTN_pzmCM+m#E~szYgR658CcaRrTGMiP2^`k^4XBZC0pcZ3?;n!?^#0 zdF@`Byvb4S|6tgCB}v?Um2!rzCUXDRk+}OBm3;TNVBG&f+kLI7zWX{c+N>vX{|CLz z29>PM`NHIF825iryXUxvX8+8TyN7P%0bBqW6l@}i3N|Ze^cEs6Ku4m2tt#2*n%$Fk zV7LI#6l_z~6>Jxy%?=_i0Q5FHRkAh(xBwU~0L)K8_8z*62k-)5P_UaMD%hi((R+z_ z0Ue17_Nn9w-i6@>KvS?^RabC8j5Y^}cmdGc98$^J%qvWO0mBP``6O|m0ZDlFuVY03Qnl%3QmgA<`fYx0D7C#Dp{M;cmWt*0F;8} zxnBRH1GanU2OP%-Xq8M#1 z5pe>bx4Ep6wc#r95ez2)KKLt8?jCxD2XF#lP;iwbD!8Vc(btJM0Ue17Zm49V-z`o) zhT#N2Q*cvNS8z*=Hn)j50npogqLQ`Qi4%a~1i%Wv0{7lSKjSz~01W%zA&L9nRnFi_ zRW-OW=!2_3vV%_-CskoM0nqlZrmF8>U5qw0h&Tbz+tgIa_U}=YJOaZBfZ9LDJ+yt< zJ+u}lu>fFLy*5c)y^eBb)+J&AbR_>)&1&F?jG8h$FKrm(9wh>>S(H*1)34D0y+|PG*`(M_;L5- zF&I_=G#xEebsa6mXw!;_6#%_WYn80cZL9zcD*#GIj(g~RI@%{~cmhiR1_=cuQ9@hg z%x*`-66i>j&|W2*eG^Ln!xDfdp`)rUp_3SGIuo%3pttFwlC^1!C4gZGKuO4P5AAU8 zJ+v#QaROjizmO!ZU!Pr5t7LQe`_LXRoB(L+_f*x_?N#gFqlrwZV z5i6i0arcob+0bcxQvik)0B!dYRekr-Vze1U#0r4kW~@rqW+_$xh7|zw+I?Iyo}=9V z!La)TlDPXsTCarZLo-2cI_`%IF!`z+-QolWHauOo5yIV$sBvHX4<&0iT#0BU`RIo%P8-1o* z@;VF`0GfiOs=9(@VzgOK#07xfW`#=D=HtTT4Hzx}%uhk~9{MH^;03^-U?oXZuu3_j zR}=99IuaGEQOOm&1;Yz~reLkAu3()QZPpX<0-(3qppvzD1up=@3xN44$lgOY@&H}{ z3<@@pLd*}SHGc>&3vm{@-Ykx0NUy|RrS?xiP7dZ5eoo% zn@?1-HvA>rr!Xu4sMT}aL$$hJ9m?H9KjSg102p-KA&ENfDrbR8U(f=TL0_N>B)h<` z^GyL5Rsb{|)l_vI)x~I2gNPLXy-iJ(Oh!J@hdcP5`v^Td3;mw-lpID1VF8y;~tt_-~TXW@1bpY3O4`-83iO!MqA~~Z%4!p=tz{&UL}{& z0frj@O-4smT}CG{+H@x320(AqMI~!eetFEQHmCSnCZZ_`I5YqK0H0K*D^dF?(f zd4{9h|G}_(Uy`_cKjjSVPvriuBXReXO1}HEFz)}L?LI(N-+iDMZ3Yp!|AXFUuu9gZ z8{Z9pasLPN+PyRx!cp%3VAy>qN!)#ya)u5ka{t$nxcf+zeD_f>?*E|eUZSe+K3a@6 zV~E`UL2olwC2RBFyeSCA{U6ls-~9X=ME1{2xqIk%9>4{FLBRx)s9>UUMo%K*0(2xQ zNK~@Xf7mUV48sM0reKPyuAo$mHdBeX0MOe^Q_0%!J=K?CxBxIe1=)M(bRNJ9fI-1b zlBi&oaz@W4;stahDwv~^D|iKl7XVGcTvc7cJTcnLC*lP_Z?ixpYs33PUxncX!2A?s z@1d{p0A2tL3Ko(?1&fq3dNC0%pd(Si5|v!R>oB|kXbP69>I#;L(PlXjF93R*6)IVq zU3dW)UI3JW9QV+&S01~EzR7W%02uaPNfP&8rJTX5i8uiriTkfn$p(+(T|qFM0BHNK zRn_-jCq|p~M4SNVZ8oT6ZT_2yN7P%0h|CB6l@}i3N|Ze^cEscKu4m2 ztt#2*GsVd}Fq{Br3bv{03bu>UW(N@`0D7C9Dp?!up+AS=1i%Wv0{7lScX1pi0EYc{ zlf?b^C};3qB2GX@;{N+ovcU&%0x+BaX#4M1)%QOjMw^2~oB-%;4yj~q>hS&`7)}7x z{yFZU-!8j{9_Az#01T@iA&ILWRnE*~L@a=g#MO_hWHWh7*?TZ70BEb9P}NsIDMp)9 zL@WU4ZBDCXZC)!%-iKiUK&_tR9;(&->QL?;`T>t&1;C)=3`x{+RyhltBVq+~BQu>zpCxvY}4X^9npVFf_x$Z-$7Pe=RY3Qu4Oz#!o& zNtAF+IkT@5u>?92CEQTSX8#yV0K*c1CgG;4F5#9KZEh2>1faM1L?vtU2$lebB>*KM z$33*;z4y@1IE@nk!}@nf;`(=$Gq+MT&8-aj+$xalT<)P&VK@QM)~}|juU}n^HZ_Pi z0nppjRLR!wj}w651VF8y;~tt_-~TXW@1eDL3O4`-8MR5Gj5^AhUzdm*(2@96sHc+6 zN5-Qt+yH1Y>Z|H98i>)RArUtKdYeWnS(|rxj}Qzu07^!VduYdU_t3^1#R`C7_a-E9 z_om7j+Kh-5(2=-%bCqo9GTt8q!wP`5dka;4_m*O`X+^{efZnFHO4epDRse<-0Q1^? zT+)W4-2cI_djUz@y{&SFwj*-?*O9n;dzF0m4lwTjpzYpKRo}gn7;QQex&MRSri)6} zrV8H;fN}o^^V+>M>B>>=|6tg?kRZLif6&|XQOVlyXXa@b_kU2kfAjP2q1it(>9o0EPab40K@(>N#g#qlrwlX5htJ{asN3g+2C*Rt{@mr0JQz*s_OgC z6Qj+1B2EDGHVag;HoQakRTxeHeDGJG+&%O)9>58JLBT?js9=$DMlUAf1au@SSfY}R z{upk|gfG zN;!j96LA7M68B%Dk`3k_`W6f)0NVa*RrUSXiP2^~5hnn8n++;io96s{01PJpYX2Pf z(2iyI(2bnL0)S!lO(b#k&B~d%g@^^vk+}L+m2BqiZpk|^EC6V$Z&TG*-!4X*9Yibu z=xuhYWNmiw#vm9L0MzO^?x9-UuMXwzp}TktD*y%^yGf#sJ<3^NFA*!CBT>gbm283U zVFh4V0nl{pSJibK5TngOB31zOHiuNQHebRDz_0?KbhOCzpMqb}(LOoM6IcQ;NH{_g zB^*`G>|;bMfsRB8$5pb~i?IYSECFZ|PN?bV_!av2}P za08&pxT&hkxFtrL+eF*|=xshx$=dva_Xxpo1E6H&xQBKscMtuHqgVkj?0$zN?tWJ} zLo4w~BUk~@hgN}Phw>+=Dhw+C+V0g<_1&wB(WV9wD*$?%nkw1uzlIfnVFkdvb|072 z;wbljFzjBNB<@~EIYa9bx&P}({O4XzB^%1_kHWbBgSLBpRekpcVzg;UG z8+|tb#{D16YxmNmF-N)ogJJh3Bysnq${E^>$o*eO;_l5=^4%YUasLNx_ZF)9?k&Y= z(~8LbAM`e@RkAjJ#+!m*-2XxCp5q>x{WDYU9@>TnZ~=$@B&~^P)HIL z6e(wPF%d7IBT+$jm0Up&7+wH01wB=D1--;*)0>DF0KH8gm8{L5;ss!M0Wd!W*?Z_S zJb)JfgMz*!Q9(cDjP6gw3+PBxkW$GNJPX4MfTm!8s;*$57;Od-@dBW?8LX1E`Agmr z1j7q}Qjp^wTK39g_s}66#|eO8|Dhyt|6$4*Je-IV(2=$zP5|^a(^RrHb8!MNoB&wiSK!`z=yZ_& zEaeQIO~eW4NZfypN;dfGH~|<=0JQz*s_OgC6Qj+1B2EDGHVag;HvC)sDhww8YX2Pf z&`xFd(APML1pve93rXVYiUSOGBTSV;DsALO# z3o8J_3V^0#t*WkLofvJ_6R`rIx7nbQwP}hKfMEqd>1gr!_t5)vv`;qj1eO2{5;l=U z37eHOdkYaupd(SjR+Vh_Dl7pEO8}aLZK}G2?P9dqLBtY(-e#vt)}{uQ0EQ(1B_YQ> zwDZ09&|RFy34me!-6V1SJ<6H8mxvS4k+}Xom258e(05@t0npapud1(qK#VpAi8uk! z+Z~m1<~cWzdIKft2lD6~_G^wB4(z>bqALqfHGW z_kYma)KtlK|2aP?4CDR}YWE!X(CnX?a`(_$Jb()TgM!*5Q9&K$jIK+>1?Wipr(aJc z8@(SF0K)}{DyknaM(Z~ihQ+ zqfKujP5|^aeN?hGyKw?AoB;UXuRytb=rcTk699vPz9dmWKjn<>Ps9o6NK}wg$wpV= zy80v15OD&aw;8LFwK;njg(OkJBIV3p zOvDoCNR+TdC7aFPNxTlj5`ZRQsj4nvnHX)B6R`xKw^^Z5_MZfn0EQ(1B_YQ>^gH+7 zL*L{yP5=z+uOx}PrXsAO}whrR{F34pf#T2+1hbz-zxPs9m;-e!YJ z)`kzSc^ifk0JVOOduVoj|HG8Mhi>F4+yEG4Y$Ay=HY;cT79wsyN1}|aD!GhzV7LL$ zWNcH_Wo#Ft%?=`N0Q5FHRkAkKa04*h04Nzb?xEi)cMsjgQLF$McHd1Bci*F&p?ir~ z0Ue3E?^DT!;$+^1VFf_jeZQ){`vEc993)}|KyPzMC2MnrpA>*$1;D&^AD0~FDEEIb z?0$qK?tWA`Lyr-;|LaKH{kTfL`+G3%|Df%DLRH`Wq!?{Z5xM_^-sZGQ*5;dhHvq={ zAIxj_(&Ph1ZMZ>v1mpe>YWE!X(CnX?a`(_HJb()TgMzChQNcCkjJ{691?Wgra6=^<{VMMX zg5d%{Q*cvNS8z*=Hn)km0MOffqLQ`Q$aeu?xBxIe1=)M(XFPxx0E2=%BvHX#<&3WM zMUAcu`sgZQa>Z#;kgGXU_0nilGSJf3X5Ti{)B3=OWHjPxWHtX>MFuVXL z1v&1aWv@JT4{gkGoB$a1Z$c9HZ>pTZ&4@Sw9f|ulSIGwV<6S{8oB(M1w@}shZz)Eb zRz#ct=xti7WNm(i6M*3azz2T?%H2cT@BmH#3UpuI{qns>N% zfZ+r{Q_xXWSI|j}Hl2w$0npoYQOVl;5l#Sx696mx3fy}S?aFbS02uZ!B#HYMDQ9po z5htJ{asTcr+2FP~0T@mIwEcUk>ihQ+qfKujP5|^aeN?hGRr&b<7)}7x{yFZU-zmF? zKEp{Y02o&9OA=S_r<|GniC6#~iL0knvYBhJ05B{7XsZuU)mI-VMw>xIECA?j2CHOk zzRZsYz_0+IR?l${)#`qAD0dGX!edwgFz6Uc5_Jqy&H}@USOFc0I!3Bw3rxidz_0?K z=_pavb&M9H%@`t90Q5FvRkAkvQ2`iM0F;g#_t5)v@cE%Ufh7Qggb5^3!bIiFocrDC+1O2iU?-e#Ig)`m}ic^QT!03{*EJ+#Zc_t5E_ z#tDF7{h1_j{aMPHJDZ3T(2=O|m25QcEPfA$ z3jj^Q2~}OeNio`-BH{u-Z*y8DYr|{L`!HMpn4f~|J@f+}zzcvu!5NaM;H+{+pCjT0 zbR;S`uaYbH5QY~3O~D0KUBN{$+FT;y1we0eStV<;3NHY|3xN44$lgP*@Bm%_3<|E2 zL)&*EJ{Fq{Br`&U!d_pdHSn;Jx% z0O)OMs$~1G!3n@{0^ozc0_ETQ)XvmFr&pd)eh_A1%TMOXkB767!>JF4odcM_vbXCf8=^fp~ovNrrR z#8WUV0I1b-+(WgxUmeQbL%Z@ARsak-3Q3}lBIPVlOvDQ4NYv3?C0pRPu>vrx0BAaT zs_Hs=iP5Gv5i0;njz9dmXKjqBsPs9@F zNR*IL$!2fE62Pzoph*~@s!JFsMw>xIECJ|k2CHOkWC>tc0WSAEmgz|8dIRM91?OegF(QCXhrO6P2^TBqDx5N1~2ICD$<-h93Y;#}rjvN2wTX zrV{Z3ptqT(lC|M8yIg1AX!}1GB z;_{1>Gj%Z$51=D)`6Vj(@~^}20H7_uR8?PonHX)B6Y&6`w^^Z*wYiB0fZ+ka{FdiK zL^+EI0K@VtN#gRWlrwcT5fh*zarreW`SNeUFae+~zgAUWew`R?))O%SptsqelC|L? z^fn9=0BZRhuR!sSO}X3XMxMY6fI-40k|<%ba%OKK;stahO4zEB%^u4;gJ5_8&?IbA z)g^2fqsH%XMRM>(_i60rn25+&?Y z$tAoC!xDfdVZW*_;eZ%z4id2hptm`slC_zRC4gZGz&s^n@1uu#0#5)22}ekxgrmxt zeT;}F(2*$NxJoYJJs6$qs=8EmH_lNmsPSh z^RWalECHCu*Pz^u^a@Yl3BVxXDoK=ZO*yl#6Y&H(5+&SF$!1sM`vWjM0ca9#s_GJM ziP7dZ5l;Ymn@?1-HgoUzqE9qyP#}t4;z#Wn(;I48eS9(N~D}z3{3M4z3 z`%qOFrT{bn)l_u>)x~I2gNP{ry-iJ(Ou(m@0vM(Mlz<$6ox-b8RsbIy#8K=37_yW*`v{2QBv=pOF zDPN?kdQF#yB94*R8bd*PNu2A)Lh;fMNNeByss+%9%Qxh&9lWxco?!Z0hgxT>==^ z0JP;xRQ2UYi_vBb5o-W?o3Scc8{TsKA`EK)=C?ebD9Ty<02r2^KoXaqsGO;ji1+~= ziOVM{`SO!t_yN$CpQ5TSUn)kMsYLt$=xwH{WNr8x(wAZQ0WiPi`AAXD;s?O6{7jO# z{4C{6olV3K=tx|Cj!M4#D=_>3Xv@!4)t8?qMw|IW`~c`}7N}%x-o_8W@B^Th&vh%k z|933C#uK;#Fi2QP5+y8B&g{iRT!D^62}@M6+5gS^gkZP=&?GEX)g>$wqs?+6t^o8l zD^#*Jop_%R3|9c=DIt3+eUm5f24Ik|k|au4rJUKTiFgAYi4xYRxp;+(A#WK$=Zy=8^G`eV4f1Px6+L~fj0nygiRz-!e-^n-a^D1=tz{XRVA13 z4h(MqnuKktx`gdwwAn$#8-U(sr%KjlHQoS*HvlCe*R8bd^~Y|dyEu>d%3&C&(pb6NostY(EMw^2~oB`-<4yj~qc-!bNU^oLXkFP_bj~!!x?}k;e@I#;iMRCP7!eiptm`# zlC^mYX8^+)fE9lY?!A?MzO!iE(WV9wYXEwinkt!) z`ur3z3~K;NNUmGyeL}DZJc304gNE89Q9~W&46jSXBIro`V$@T~hI1=@6oy3rO+$TE zT|)yg+B76$5kPO#NF{5-cZif48qfIL!&H(f_tyQu%e8~M1Fq{FX{h!EjE6wrSmfG+b z?f?ur3P_@kw#r$c9T9h+BT+|tm0U*$814Wx9UWD59i7Bz)0v1n0KH8Ym8=b)HT@I} zcK}L9u3KsOU$@kivseQ#EMG_xmoHMz)M6smKu6;8-Bq%wKjM1^FsuP+%lA~(m+vJ; zo8CmM0qAY|sAO$6^MeF1tO1zc@_fE1XYm7ISiUbwT)v-jruHY|2XrJZpHj(}e-?%x z0B!jJs`~N+#b`5#h#vsG&0v+R%@zCr3_k$ow>%#(%31sX7?vMO5| z%a2sammdYg4}i9OiK@Q*XffK1A>s!>Z!=aUYs24CzX-z*fLcD+t@QrixHO(8a0OtH zFo7gWn5dlDlZdzi9f=YWm25UY&^j50D*#Qx6jfb9sTggh5^)8fx0$Arwc!o;FT-#J zV4f1Px6M>%O{KyR~CC2O+`X8^+)fE9lY z?!A@n;ylg(3<7qOL;-u0GkGr&XP_fdz&@31a&?>m3}*nEfc>hvfCFN*IY`7AfZpbi zO4jDr`H5l}&H$8vT({D4zjx^{N3jH8*!>7e-2JF>h8`ng33Mdxeq1FRx)DnN!xDhD z`w3Nj_mg6@IYq=0fZpb`O4eo|KSlt<5`fzMTRAThpZ|BIAMhO301QITkVGM8m9xP) zBGy1hqLA|{*#_@+Pd5>P(pIuNSDC1LBtt=-lnEXw*TWe0~pQ#)c(0{r8$20QY{|C9e_bcZIY;?j&c^LOT-=M zNc=j~Q^^)U$D=UZ0cbkvtLi!$h|#7Y5qAK3n?@>G8-Bj>t1#RFC>^7tUg8Nhc7VE6$rzvcP3QO@EAz_5HFNnE~2Ia7;?_yHY>%Xe4Fm+t|?4}i9O zPgQ;SUShQAO~em?-lmUA)<*9kg5d{1EuZUFdjIcVdWI)(1z?cSmn2H)r<~dSiMRqC zi4syO+3c$P4q_Ot05k~$RCNgh#b`5#h${fS&0v+R4ZpeN2QXX#n5Ts7t#k-a;0?ea zVJJzIFibhKhZFGzIua#}RLLcbg5eE7lTf0nOBgLin=wSZ0qAYUs$^|u;|*YV129hs z*<0y&p1>P`LBa%*C}E;K{m25H}nf(e3X8@Xj zxvIK=d1ADgPsAC3-e!SH*5;=;0~pQ#%;RfN?pFF5Pv8u|AYma%l(0xSvlkO_209WY zEK$j358=B8Fq{Es5|*m!5|)Y4W;qdO0D7AhDp{L_I0G2Y0Ic|HaPO`3P0r&Cz#w2H zNffY3Ig?itaRxdP1*}oYCjS>dUkt+;fF@wAsxDxi7;V-QaR#8b*`SiO;Z3D)!*B+m z1mwDvmisMC8##(40K@K^NaF6Bl{0h;5lf&Wardn%+0Z}362PzopzXd*Ro{KP7;SbC zu>_#E*{PDXnZpki!>|OPcK=q6Tj_njz9{>5rMq|zYXAlzyGf#uJ<8c&FA-~?BT>jc zm287YdL-|{um+$B*{`Y#IUq)xgG8(W=xq+EWNrS2ZyLa`2B3uGx|QB1gwGx25i9~2 zG#nv`8jdPw_%R|DK}Vv7<0{$kHCO}~76CL3CscI}C&g%Uiikx3z0GNrtj%@4YXHL{ zfYOlbR{ATyi0K24;|##C{~40F|5@b>K1akE=t$iEyh=8h+uVmRoB?S2Ur^QezbHnV zOGKOj=xr{mWNrAn(vM&`15o?tx|QbmO-xsK40iwq9al-Bj%&(U;5rd^pd(So4V7HS z$1vOhXgY4H>N;+T(dIT0cK~{uPgJrthj~vC40ixZN3L6G*)K#%C!cW^YXFAj?~ugh z?RhQ67j5eK#xB}4IbWzFL+~KJkQu(Pj`4Zvc9m!75psLwv^o zhBp8uA=j<6?DfZPr9(K6GXR5tp(IhjFy%}hPQ)4LNE9$qC7V2#cM-vG2A~NjQPl;E z7NgA=BF+HxHe*$?HvARQi!hu4n8(+k+^uvxPv8u|AYlSYlrT{_vnLU8209WYBr4hL zPW-Gf3}*nEgej`Jgi00sdwNuq#R z%9%Wyh%?ZUC}56CHu;zQv@r~40Gfcgs=9!AVzil0#2J9zW`Ro9=8OEaF$`w_NGAxYeQk#dGECSnP6B<{XMB^$a9O8~HWW(sTNP*3cw(tHc6CFM>(_W5^)7O694(vQ^{s8#}&YE z1)xc&uc}LEAV!;pL|g&rZ5pX$ZR+9*V7LM>PYKyuX=9$i8-PJV6Ot&Qsd8pFBjOEo zBuZ$ml1q3DhBp9BLJL)0LQ65)v?AgSKyTApC2O;l?-juC24J2NvbWMUJb^a=gM=m5hTfF_}%sxG0E7;QQe@dlu`>7tUg(Pt3B@CKkH7UDQ9vq5oe$yQ9yT7$ah;R9ixhT#mrJiZ3yZl%xg1kL~q68e%v3H_8ayFU?Upd(R2N+p|JkDok-;S4~N zFhEt8Fi?y(gNQf-(Ax}F$=dt@&H#op04x3)+!g-tl7z7L@i2{ZxXYz0&&Ok?^ zfRQTMLt0M!1u zZlyVXOVdUk!ySM@$0m}fW3zG=*h0h|=t$JDRVCN)4h(kynvQL%x{mE)wAn$#9f00u zr%KjFzoP(#I{>94*R8bdm!+hWU7W=lfMNOFByssY%9*;Ch&9lWxcokqY$`tUT^QB? zwB`4!>dPMxqs>7g)&TT2hg7mQ@AFLq7}fyHZ+SkKl(YB&Ff4zBBrbncIa7}j@dG*% zmp`tOFaI74KLFbDCsg(2Pm0mz6cIlFdYjWKSsVVg{e2jI0L*WBKA4oV_yI61e}*J3 ze^xnD&k^whIue&ZuaYnSAq+nN+VU4v_2n;$(dH5nKLC20%PLu$U-Bj*7=8fM^0{uM z_y4A*D?EWK0E2|9BvHaO<;=cL#1-gBlyE~OoBa>GhX{r%08PS8Rb9d@G1}ZF;tD`- z^NC8#2@Vx>U-T=%~LiSc#izn~~V31IoBuc2GoY{4Wcmo}YUxj)q z`B&jl7~TLh3H4QV2@S+((~yWa0KH8km8{Jhyn_gaHvlCe*R8bd^~Y|djX94q0E2)g zBvC+9VbRuaeE~&vy-AI0MimbX3(PbP}UYXClr3 z^fp~ovNrt0*;6o_0a)?Z;NDwlSI*-Mz#yQIBnl`}&g5bu&Ok?^fbJ^UDh0KH8gm8{LTcs~&gX8=k-u3Krj-`DgEN3jH8*u5`F+`XT2hW007 z33Mdxo>Iw%KEfM`U|0gsb|0Xs?>|7T=Z!=#ECHz9bKOes`$b3D zzbhTWb65i~2pLKeg$z^92E&P1109J%Myg~RJlQ=N1;ZMECZt4F7cyFmHe-lb1JK)y zRms}SqeoL=6*_Gkg*ei=ZP>L!y!m{|P^90K+1H zreTVzuAx+nHdBdM1kl?|Q_0%!Gx#sVun3?uydT({C3zq9Ez z9>X1gLB~RpsAG|G7FbNg9q35Zu|y@;@j4850Gf`as=AJ4VzgOK#2tX%W`#=DhIco< z0mB`D(vj;{TK20`(#e~g#TtNN`IRJb`Blo9x|)bJ(2=jUeTZs4p9f`|tRmqos z2ZkR2ZTW4g`tsYwXtRTe9{|10PL-_95Ag#q`~aBW@_bAwXYm7ISbjH2Tz-#ortT%; z2XrJZzfUD!{#_V;0JP=ztLn=i5TngOB7Ok$HiuNQHmC6eF#G_h<#XLi@Bh6`hj{{5 z00s$1NTP(J%9(wPh%3;MDB-wDHv7xnllNe_0?;I!P}L=z6r;^4BCY`RHm6mxHU+o> z7_I=!Q$qGu`T3jw0&f5Y30Fy?glo!~eVvFm(2*$NhDt8sV;J55Gzm9V zbqTk`Xmgv0HvqlOCn{MReh2EOFuVaM3At{iWv@SWEB%b~I0G;UxI+>J+*QuxN_924 zGU$`5K(dp6#=D4MI0MiGR8!RjR2QR74I<6}^fonBG6DR3^&>Ex0hq_vpxmvr7Ej;| zz#ySENt94WIkW2$aRxdPzXtVGve`&@6oxYZO+tNDT|xsf+B78M3_x$wNF{5-2aJ6c zhBE*w{u;zr03{&Tt+d>4Z)(F)ECCpHFCdA#w^h#2c0??Jj>O&Dt7Jo8 z;lqevSOU;?@2INp-bsu$orzci(A#uT$=du8KVAUC5`fx0*RAxvUwf3jm3HMhtN|E= z6p}xI zECT3l2CHOkx?vGuSOicSa@|UQ&;W*9!4EqlyiTe*z&fwuhoPmzS{YR=~gSnNC zg5eB6+rLCr-+#0iZN?CB2B5bYtCF>O7iR#&8Gzb9*R3?iZ*UsVW4Hq_=$Jqfbxc&w z0+Wci109Jv5|v!XWEk!MG#yh^bseQ*w3$l89f00uno8ED5O)B>9e~o2>sDI!i&Xf$ zQqE!xz_9#GlDPaVcmtQ7E zo8?6O0O)O2sAO#>;|E~)0WiPi`Mgrj;s?O6{7RCz{3_*4T}{Lf=tx|CjY_`!TQK|p zXv?ov)t6r`~6_y+pi$jzkIjRB{RL!te&5N!YKdOE@4#n}bBW0qAWGsbp>R0|qd> z0hp(R?5*@LPv8x}AmIo}lyFozvyTz+209WY99PLDya&S@fF|LDsxINA7;R1w@dlu` zIjxel+0Q$OV0Z&i5^~*2%U*x%R{8 z&Ok?^gc~Z^>;-(+0EROFO~Or8UBWFf+T14M3_x%5iAvUn-wgaI3}*mV{581uR{9y| zaRy)zaEBxcxT~DWm3~c=D}z3{3M4!EyEp?F&Hyw4)l_u>)x~I2gNQQ#y-iJ(Oh5LcX^;EK8!mtFO?OtD1 z-@SntZ5k4>1faKRq>{DyYu-o%!xDhnJ=d-DzF&fry_GiRIjjK~gftPsAMPNK}wg$wv3U9KbLKpeY!j zsw)^MMw>xI%mL_a2CHOkcr(ZkV3-3iZw35!AsNC0*aI*q7)lZq3{%eN;Y93#jzk3` zRkG1*`Dp_f_5d^mC91lD(PFe2L&P3{-e#;y)@C#I0ERsPr9k(iioOPAzYJhJPv8u| zAYlSYlrT{_vnLU8209WYBr5qAVKNM70Gfm;s=9&?GEX)g>$w zqs?+6E&=p5D^#*J{OHgdFkAvC2^GI0+2;_)SEM(22DbnP5i3ceh*ip&zM69MZ!Ci07C;lRR#g|VPK-9|iMR#O+iXzD+VE|(w_&&iP$DXRMS8NpUy(NQ0A>LU z3O12M1)G&KdJ7S=pd(SiR+Vh@*ZF}17-j)91>0101>41FvxA6P0KLslm8=cFEb`|t z%mSFV0=**b;sLw@7!>R#i3;{8XY^hoUO`8qf_*C4=+}8)5e%;Ynu7hRx`G2@v^hw` zD}dhSkV@8OFF$es!z+Lf{&%75gILqaVV=M%fI-3$k|^P*a%LYR;uUlxN;s~Pe-Yk; z;T1rWa6(m=a8ismr-*n3(A%6=$=d9}E5Psy;KN85mwdnzxCJmsI71R8oK?>3b41*N zjzkIPRdNX*!f*?qNw}b@OSmXTn@dF80_bfnt7L8d6Sn}vEr1Urp)|R|6SxI1NVrN8 zC0tX^?CV6_f{sK9H&k*7AH#49ph>u?s!O;fMw{D2+ydxrK2gcq@Kb1?!f*?qBvky0 zWS>D9Uy(lJ85{!`MBE{XBJL_@dZl`rUK#Z1RUp~vydqVF;TS*@QB74BQC*BSHHbI{ z(A(5h$wa(?V}RipK#8dM6{&sMD^e{Uz&C(FL2Z(#ppJ4z*CpZ`bR>Qm>ZxR-5Adx7 z7`_2C1@%>R1r5Y#(~yX70KH8km8=ba6aQ5hz5&czfnJds^8khc3<{c%LjD;KN85mvrR`+yWRR6p};xI>;mX*2CHOk{)(SKfMFLvNvQZ0$v%H9z9J3b85{!`L<}W~B8Dkv`fwtS zK}Vv9kt*4AeM13;V*pJ=iK;GQv>0v15OEBkw;8LFwb_JYfZ-TGiKzG$sYBT-(s&-g zFn~e91d^y=qH;!0B4QYHBq~T$ve94UjYcpG1854SsOkzz#b`5?h+zP|%`}y)4gYO> z8HQm1^H!i&r0G0>UjT!GnIuucEai-zO~fzgNK`OKB^&*Rd_Mt(UjR+PTvc7cJTcnL zC*l`CZ?ixpYcmeN0K+eU5B_(d>;st6$!k1;UjT!Ig(OkJBIV3pOvEqfNR+TdCI2G4 z4#O{iCSj?nE@7D%ZI%=93!t}Ip^~*ZhF^f;7r=*+FfMtMCvXd3kg$>@N?4_w*{g}T z1s#bJ)~MtX-h$y4K$EamRhO_%j5h0uxCPMLY*5MC@Dby0!*C1W!$>GiHu3~^0Spp0 zkwgibl{0$_5xbxxQNmW0T*5mr>;h;KwyEk8wu{kb2NAmfdYhdpS(`0$%GjscX2ieHhwUG|D}menCf~gc~aP7vW8vs!OOYMw=Q$ z+ydxrYN})s9>Xoba0}qWNGMHe@dS1O3=(RSLi3-{(XLLIvhCxT7g7zxe=(GG-0t~|dnu3n1x`Iw(wCPO5Fo52si%Qmp zzuA5YhG78nR-jj;t~`KW0E2=;lBl3aIirh-_yrw_3c9OgqhI0Y5@7fR&=mAk)fMy- zqfKujegX70eN?hGz4`6}48H(A_}_)HAG}T{&+r6(0SprQl0*splry_O5x<}#Q9?>3 z{~|mK!!Lj)VSuVGVW1do1`+WKptl*UlC|Np)P4ZNFMtmtVO%nVCvXd3kT8@aN*Jb` z*~5vr1s#bJMylizM!|3kph+lE)g_D;qsn+GzDu_bp`9hXtSP(UjV($29>PMPw@*d`~vvke;3Mr z=q{aXCq3;Rs2Ta8x<7j}fs8Iua!uSIH&32g5Fa zCgFstF5#pYZB7xf3!t|-t&+9*5q1HFT>vGa;#Va5d87D>^a0P{7{DOn3`rDmRyotp z5pfJU5=ER>$)@v)^dSt#0Gfyks=A1aVzjwL#4&*0=CVrG<|2*(hGPIFqT*Mi&SkGi zS9k!!00sqDNuq*l${Br~h+)u?sNjZ5Hu?;2M}lD(KvQs2RabCJj5fE47zWVWe4>)I z;d^bL!Y~YA-U{@J^cfG}7r>z44oOsSS2?3AeMO@ygFd(_W67dT<62A!b zRPryvqcHpeXcFqH>Jl1=(WW60zW{ohMk-mG?f3;4egS+K3FDH+Jb_yPgM=m|Q9@JY z%x*@+E$B#;&|D>#@E8oY0Gfmrs=9=hVzg;R#4Ui{rnO4erXFqqhFbt1MnY-Qh9|HK zV31Hi5+$@%&g^zX?1GL&3GG#K2_0bA1<)jPRMjPP5~EFLB6b1vHeFP*Hb3V33oz^g zCBU4GgN{TI-Bq&bydw30;TS*@(Nk3y(Myar zy@@ym(A)G;$=XyaPM(J07(j`r_!a3pWv@uj@BoGZ3<~;^L|5t%KMRE7zQwJ1$spq!UOmPFen&G5)}+n z&gkJp{DO`|1tV3m(R?QVC>VYLGzBH9x`NSSv>8LhFM!@=tV-7AAbtUcUjQHc??Ty| zZPUqkp1?1FLBa%*C}E;xCPK8%vIGT%oC%{ zd?IcE^fn7rvNo;w{sIiQ06vTa-jBo+*aa|1SV$5jEK<(w#YF6ajzkGdRB{Qg!>|jW zNm#0?OIRjGo8?680_bg4sAO$^#`hOs*ac7$Dt<-kQZRvcOvYEFH+crf00t2&Nur2V z%9*~Jh-1)^C}NFDHl0_bw_rF1&_t|N)kUlmqs@9Ejsf&G8&tA34T_VuVK@d*A}W4G z>QeTKw2=oe3}8^Oi6ko6tenwXh!_SPi3+x=WTW*v5?~kx&=hP_)fH?Pqs}Sg0go=9_9)B0vIG5A&C->DrfdFB7Q+f zqJ-lr`4{0m7=8gX2`5x_2`9y9bBc&x0KLs=m8=aPXZ$`4zW_dr1m2It6SxI1NH{|h zC7e~x>~loif{sK9=T&kEAHr}8ph>u(s!O;iMw?4S+ydxrE~{j1`1{C@V7LYFVI=T= zB%Z)7fI-4lk|^Pta%NvAVi$BIO1PntOZXUuT>wqO|5w=^$9_I%`yHPdW$tWUl#4rC zHSQ>`jSP2`iLEjjMeG&kQp8q_B?#3aqeDkr<`SbeE|XEz#ziP{{HBGzuz`}opbhl-sj9S-~K+kulc=yzxgcPLijwDF<*%6 z0xlGSEh`X}U$RSt?`ZiYx=IG4s-D6o0sM)%R2^10DZCpr|{Nn>)cW&;el!Nk`Z+ zx!qfdEJGgYAH>#awcUP~+o$TH9xGe501#(%qgU&S)e$r18W z+QA8yfer{eN;<-Vj-?Y`EPBo)g*8^SK>7Q(KnjM+_O8EB2!J*_t8 zDwaXTGSDF8XBhtf*S38M-@TRgaD-=|1H&RoXV^2j<$H-dLmuf2i_@x)VQ&@BKpVq8 z=@!Gjsf<}7@(i@b?3Y#>^A9|Oif5oxF}RfE2+KeRh65y>;lSjUFBMsaJklAKrBw_E zsaOWu7!FRi7!FBg%%LL7Kx@olX|*wa-N=trECZd2!L1}mmJV z{Qe(g__L9_+h;k!IM4xMrKBUAo!stoM8+YHbc9uDwcQW<9jPkDfi{HI=@!Cysf;;a zWE^OXS(8>9^8-KPtYRGKi4s?RAhtb>HS31Hu(1GD9NoTk^x#jCc&LNL< zh7D=e$8e2`bD)jk+H{NIx>UwoFLDmF#@vur8}mBnpyC|pR1B^rIl?&5f#D`eXSg}J z<+q58Lmuf2x29DLx2YHh+8Az6w;1k7Wz3x-<3MZ7U1_y3{ukfRRg43jiox9^M_30s zFx)Na4EH3r{9ciD$RnNMzO;(reiiFL8^Z(X7Q=(7jCn|89cYbtIITA3E#GTUu?{p0 zW8O01p7b-gf}D|;mzcBza_E{d88w} zomSg@f$K>s_JKBpchW6{cT*Yjp2$AX8uNZyZOngSA5`oEo#5+X^bYq2j_?n3VE9nd z89qvG`NtyvkViVhCu!Bk@TrP_ppD_Pbc^BhRK|QE@(;Aee3@1o^D6(K;veW#3@#`+ z!adM|;TuV3*qGe%S=(j#Y+B3bD86IZM8!SO#;|F+#jsf_W9Ew71FbRh(rOG}`qfA( z?txCl;D(YT>;oMbwvcp&Et6ZmmB>Eik^VVsomPDg+o;$F+8DM?w-~ldWz6;>`#@{V z{IuGbcYVh}#Xit5jCn83yvDtQLmUJhD0Y-|iUrB7-$~>k@<^vxm{wcwy|lB6gP@IK zmvoC_*Hp&rCUOw8#_XQfw_`X66$e2>G3LEAa*ultCm0AiAS{w}ggukny_d*9jD*k~^@bxfy zllyQ-_y;;L93klpM<%y?xyV1{kPbOV>l+=VmLOHF~^Dg1FbQ~r`5*f zhoDvb1D%S&H6=&52RbmEDCrCd!UWs)O3sCv{c5NE^-gF z#+;E>8{?N_oT=g-=u`~uDLKME(1Bs4q%)kI-12io_92gShE-`5!?`N9AIu(PoRfi{Mx z(k+IkQyKG&$UV>+^K4pe%v|n4#XZoe7~EEJgngg`!}F5P@IrFSUliGgJkl9nN~;)t ztzsW&V|Y2;Vt6H$F|Ug31FbQyrPapx0np#5*asShG4G|BSGr$!h=ZU5#T$}N@n&-C z-x4{9JklxNPOGi=Uiz(ygP@J#opg)h-BiZBCvp(9#=M_a8#8C=s((~*5Hu8H-b*8Q zx<7D&fuIAzhmwx)QF6OK78!^<(h)vMtL^?9_mxx(1Z@bPrCSJ}r!wXXk%6E!=F7C& z82^~xS1JaAPL7cG($`M#4|G8IM$!>BCbxUm_Srp~*6ulqZ+H7KlTB3o18oSKrdtS` zr7~u&$Uo2;GcT=%;0opURQv;-;Ok-ZR`=$P@DFrg*h117woGpMRwDn9NBYOGbz1c? zY@^~IXk*wm-D21-l`-3k`~$5q^V4c${5!$lSMd*YDh5}U9N`}5z_6pFGb~7M`A#DD zkViVh!nBHEXBGEA8^bQ?7Q?QojM+`(9%zl(J*_szkG}mt#XZoe7~ENMgngg`!y-v% z*fY81dx`8r9_b8=(<+9&RqO+84Ev;84Ev@sW{Jo?&>FK}T5XKWGCx$Y4>Sy8-b*tt zcJJ>H2SEpl10sjHw)=e7 zmQ?%$Z3w5PTL`D6GUjxVf1owyjI`RA4Zi!J;veV)Uk{^qyU%ijf1m@yN=aupJGtfO zi2Oqy=?tsVs*mAZ75_jR!|HU4;k;DFoGrxqWxyU`x8goTjZOj(F|DfU?=u`}D zE;+(J(1GD9NoTk^x#jCc_92gSh7D;I!!;`Qfi{L~(=CSUQW_kydqoB!k936l(rUZ? zz4!ep27)$(2huHs2U8jIkjOyL8uM^kZA||0W)%ZLCr8M8=@BRR2Ra};D(MK1CAa%= zk$=b|9pQwpbg=vbPM6>RK`3b@(;AeJeyV<Y;8uJ>Nr zM8!eSMzLwSMX^~bW9Et+1g$ai(rOg%a}X*Hf`($uduimB_vTJ85OhG;LedeoOm6p9 zA_I{}`UkOfT5b1>?k}kr2-*;~O}7xXOJ&UVA_GBd%>1<4nDy>2sTc@4IYQn`J2=5V z&;embNk>?a-0q!3{vnTagoSCf-GAx+l8S$z4Plpb3t`t(#_T5Y546VYo>m)kCI6t} zALs;M52N?I_i%)Npaa7qNoUwIx#fF_{6ik;42#pMk6~{W|3DkVKIs<2zNw5^BJvNk z#_X3?8w2I+q75hLN z!!hX=!?CH1IZk9BXpK2Otv1GwVf|ReKF~0Xc`wbp>V1Mk90VOGPLyERU8Bj#hCZf$X)NVoM0g6 zfUr{15zbC-_cIGswcYF7UsCZ8v>{xUZXv8oWz6Lw|3GWZ z6=}6G5AhEw{((;L^)Pzd`$|Xn2RblZCFu-TC%1gP$Uo$f&afe^`WUWJ@ei~yT$^q& zT$jq2>qY*7)|ea8YGd+)(klLePQ~E-$jaBEt{aGQ#I zppD`7bc^ARRL0yXau2k|+?7@_{9MI7(5V>QUvh+fpaa9*lFo2Xa?9@(*@ryR8SYD~ z817fG5415nkZv(Nn97)kMD~H!n1|D9WBw`opkg0r7=HhIY4Oba-j6uMLC}HXQAwwG zEV=cMiyTBA=@d_-)z*72J*na#Xrp*4-J*Cpl`+qV90aW~&!*MJY_fFKuT&fa4aM(& zFQKUW-p@I~K+pl^=?E{S)plR){*sD;pbg>WbPM5?RK~n2G7z-J zyp~oQ^AiR_#X!)>5%OMo-3k7I4hU~ZI>MXD?S4z-AM!{?css4Od#U?ND*k~sgm=;{ zgm+UJ^Pb2*&>Hi8T5ZfV{DX>rpyR(DdBf!3HW(`sY$+yfmLzL9i>jma&a zH9yN|(^@`9@g2h^D(-M@gqxklgy6L=Ga4bc%&(we{XhJF7Sd+9-BOw9 z-GAZ!l8S$z4dLK)3*nGd#vCg0546S{mR1|{0RN!kALs;M52N?J4|jxrpaa7ZlFo2s za?6*C{6ik;3`eC^AH&fq{(&}zW6~{#V^bM(oX9`W8gqPFZOj*b{z1h*(5V>QUvh+d zpaa8+lFqOqx#cH`+(RDe3@4{m45z5L2ih1;O}7|MOJ&UIBKJUR%o%C5F(13Xq~adv zR1EGfIl?~BfnlYjGn}2=@^eJ?A&+#1RcRH&xhnR7Hip&d7Q=a|j5%LqA83tPlU5t^ z8~2w~>;nzMnD^4m``#Bi#6i%3;vz|>xH!4>Yefzsk93Ml(rW9ymo8Ot5VTQTmTpn3 zOJ&UEA_qZh%oS<1F@M8Bs5l53iZSn{k^A0PI>A8D0pTi1N4Pq<-RngLB9C;04QaLA zXS%D*k~^@bxfy-}@Ji z@DFrgxLeX0?n!R>y(0gRM>@lOY1PMYzlwjLjp2cGi{Zgk#ylkQ546TSoK_q2zWYlm z{((-#;Qo>$+yfmL9+h;4$C6wAxX3-^k_Z;u3@@cs48K;f54177oNh6^lFFD@ zMfQQ#nAg&3WAgn675hNLFy_59^S<}%4sj54pm;;lDc($O{aYdjkw-ek+iA7+-b=q# zaS*goypwKGyqn6H_e2hY)|mIxYGeFL%70XG5Hu8H-b*9*y+3e*fuIAzhmwx)QF6OK z78!^<(h)vMtL?ti{UsFxK^wwn=@!E0sf_tTWFTmb`7*6GW(fnKVj$?`2zf7k?F9co z2ZV1V9bsc~yJvkryJyqdJxB5F?)}_fQt=P8A#9p%A#9e)n7JbVKx@prv>L+Y{DX>r zpc8yOjNbR&+!6kP4h&mJI>VO9E#FGyAM!~57`9HUK89^n`~z(a+ooF#+odvQdy#*j zHD-QVZOlgZmsI=%or=NzB}cdiIxy@g=?n{!TfURXJ>-$jurRG+*jdFr(8jP!y2Y?- zDr0sNxd&Qfc2BE~S;jr6xCc5FgZoR4un%-#SS0BTdnUJhFOhx7Bb{M!TE(!pihZDs zVV`u1Vc%57ED_lUT4VN0tBuM1B^CQX!!YK(H1odq{tj^vbf7pu(kTv1Zv9e`gUBPD zVp&>kz4y{VDh`4+ii6WFibGNvbEwEc&>C}CT5Zfb9E6I4prIJ^UK+XYeYg`01RW5L zkaUD2liR&qWFYcLM>s02w%fn%d$fvypbg=ebPM6wRK^@9G7z-J9G_MjPH=*Mpaa5*l8&$YIbV@?u{((;L^)Pzh`z%NJ2RbmUlyru(lUsg{$Uo$f&af)2`WVht@ei~ytWLKW z&P!#?`6B;7Ys{Lo+L+(*4=VnFPQ~EbcTzQTfSE09`ZU<>hN~o<;p*g;uNT>eJkl99 zq*V;psMrVE7_LpX7_LiY%=IGsKx@nmX|*wb=jW$Y>;nzMnD^4m``$M?#6i%3;wDL_ zxH-A?w}>1>9_bXfrq$MaFWsi%AZVkwJ>8t8YN47zjE!Lf%V{IKe;A0pU?eM|do`-H(g>Lmue}Po&j$doMky;vZ;3cq-jO zcsi9a&xrg3tufE0)yDi!{z1h*&+^L|=w%s=5CRNModioyLQN7x5CFnlQK z3?C)8{9}=Q$RnNMleCKAQx*F_8^dSm7Q^SMjQK)jA83vFGOaeoZ&UwD#Xit5jCn83 zyzl+BLmUJhD87+&ijB#wpY_kPem1T3a}?jM_g>mW#X-`fuJ>Jep+qJ>AnG>Vj$?`2zf8<-~|6b2ZS9Z9brLoyLS@#hdj~|7N*s9 zdoS&*;vZ;3*d^UU*fo_gyNUb*tuec&)yDjie^Bucbb_yk(fi(eIKn^BfnkxPGwhk% z^1VdYu(yhTpp9Xlbc_dsjRVQIB7{*LTND(-Nml~&vR zU)*0(F%YyNtWLKO&P!#?`62^BYs{Lo+8DnL`vMgMK_^GZd+9K(PgVQ_o#5+X^uG6% zj_?n3V7N-s8Lm!l`FfFm$RnL$Lt6DQT%+P2Xk)lG-D0>dl`+?g`~$5qH>B0Z@IXIP z@egz=2KSd7;U4I~aFe7n+??FM zwAz@(+=GgHpi?opzvKw}KnI4qC7t1({6QRO|x{!}oBPo&k>doMky z;vi_Fcq-kZcsi9a&xjlZtufE0)yCYwL8v$g8j3ORrIGvI&pE+B&;j9jNk@1gx!o^{ z3`8F32rs48c0c9*l8S+#4dLZ<3*nVi#=I&r5VXd;mR1|%n_0h6F%WceguIttcY=SQ z1Hv1Uj__u3yWbM|hdj~|-cIY=-R>``_y^h$-buF*-c4o9dm{fpYs~v;wJ|624=VnF zPVn_Gdf)p4NB9RiFnlQK3?C)8{9}=S$RnNMleFq%_*BI|(8lmty2bE$Dr3G7`3G8K zzD%o)@w*`eV{dFep+qJSHAzCVjpN2#=Mti-uK?YAr68L6gx^f#e(G4?<8^% zd8AV;OslQ;UfNm3LC{9AOS(m|Ybs-Q6FCT4V|Gugjk%74P;n466l2~?Blo@caDsuL z1HvLnN7ysD-Ft})L>}n~i_>bmH@Lr~VjyTk*eBgW*f*6iOGE~O)|ma$YGeGD+z(X@ z1f3iq@1^~n;2-FKaDb#E9GKkhr6T{3M>@i?wA$`}=l+t4f1nNF;B*V&kW|JTD)JAs z#vGPb8{>bq{z%0?&SQn0fqzihrO}F}T0v2=_n-h7%>7VMTJwPZGI@Jkl9XPOBJB zQE?BnF`SxiF`SmlnA1h>f!3Ha(rROTO`oaa9_UmI?k_pQKG1<-rKB^Qo!s(sMD`(% zbcR)F6~nnI_JKBr)#(<)d8v#!Ut}L>jaidc8{>DiT%ckfXc&I~dui{P_q{K4h=ZU5 z#YK`%adC3%*NPlO9_bX9q}A4YFI}qQAZVkwEZw45m&%ySMGk`2m@Cq1W1i+9R2&2i z#qWPFp{V=bS31E!&;j8pNk_Okx!vnU1|pAigbiu6-Rs?7QZW#;AzYhoAzYWrnCnFb zg4UQD(rRNCG7u^Tf=-T*_tK3{@DFrAxJl9xZcc9ZEh7JrM>@i-X|>&J-Ct7i540iN zo^B!Bk;<4mMgD=-n7h(yWA5i4RQv-S|MlQg`0l>|5C+2(5V>QUvh+dpaa9BlFsm0 za?2kVxraQ`8J!WBxbypyD3rR1EGfIl?~B zf#G>cXLupG2wBL>}oBZ>QDPdoTS~#X-OaDsuL1Hy-rj_^@(yFV5gh&<8}K1r+XzTf>N6$3#V z!e{9g!sn@s`9fqMXpQ+Ytv1FFfq$i9An4=>c`tqK1phz>gl{ArVPkT;XZ?%po=t1_ z9L2Z0*SNo=;vZ;3*fiZj*esPXb4C7v)|h!|HH7>42NnN7C-{08z3;ubBm4s$7`Bjf zhAop@zLm&71<4n1AWp4=VnFPQ~E< zk|W#$9T;|$bcO}VE#FDx9`ZwoIu(QaOOCJ)bYNH{=?r@&w|p;=eaIu7VR2f;u(yhRpp9XlbcHsfDIv^Y&=?F(Aw|lwBK;)5* za8z1t_usm|q+%dwLpUbgLO3>+F~^Av1g$a0r`5)|4EJLd13@Q8$b0DoC-?_CAe<=a z2rH7?eUivO`RQv;-ioyLQN4N(%FkB?*3>PQ2e67em=DALzhvm83IVo!s*EBKwd>I>Uyvis2d+`#>ARwdoea zb*YTGUSuC=jkzJMHinP;nTma&VHopXnt9**Mu#{EI#Aps=@d67xBeE9gUBPD;?}g< zdhey%R2&3t6t|~a6nCUD=1!4=pf%>MwAz?gIS3U8K|?X-y)<&)`xj0y5OhGeThbBk zNpAPOA_I{}I>LQvwcWS7zocRyXhV1)-9mUUl`#*A3G zcY=YS1Hu-Pj<98NySEY?PbOW7sF%V%Rs8F-t`Lf!3J)(rRP;9Lf(>`~#he!TlvixCc5g93bfo2PU_CsmML# zk9_UmI?k_pQKG1>T2uWu+ zGP&i;MfM?&bcUnSDu$y~>;r8K$D~^f$EGsoIFWszHRkxV+L(9z{IrUFpkWyEUYdE| z`viwL2s%)lDCra{l3Rb0$U)?hPH}QtZN2x>DJl+vHi}czEsE1p8FRYGLC_j=Mp|vm z?>Gn*2SGzI=Djp>-}@{l7zjEbtdw+wvyj>tgdk&dt`t+x9)_m@-*1Z@bb(=CMa zQWE_8x_paa51l8$h3a=X`x{6ik;2$!VQcK^Nm zODg_>HiXO4ErfNcjJaIoA83uaBCR&&-}=`dRQv;-;Ok-ZzW0@m@DFrgxJuF)u1;?G zdXay~Bb{MGTJW7L6~k>R?twOj+tV$EJ5m{Qr^r3f8go}#ZOor=4=V0~PQ~E< zk|XQ`9T@JGbcTD9TYj&|KID)*aKDOuppD^ybc^A^RK`3cvJbSzJe*b=oilHh>T5XKK+4z-;gP@@p^IjUc@BN$;3rU_wbU=7R(h=TFZueUv z|By#I!rN)J-T&A9B^Ccb8^Sy37Q(x!jCoJwA83tvKdm`Kvjrn8tLB&4MFpPOG&AjiugF_qy9Vm8`bczMZt=~!HAo573SeRB@@4d9M zii4nyVwZG_V%Jp0>?U#$w8re7RvY8Dl>b1*LC{c)c`uFJ_uj(^27(RMl7zo-B_DQ!8_DyBX5|M$RHD-u)5415HlWs8_o64Bu zME-%+nB&uGV?N{`RQv;-ioyLQN4N(%Fq|mq3@egbev-&NSy0`ZwiQp1tZnuJHfk7oBy+%2R)` zYSo2$i~=40uop@CANJzpwyYHyg*?*#u$QFOws`$qs$vvq`@>$AZvA1`r84Gnkx`&E z=8Cl17(ZnEQx&5?^N0Ndf7szBQocN|^dGVUbof(VCFy_4tCJhQUStLGNdHrANUM#1 z&rKv1D?r%sDGT&cefq;?NPhdq*@2J7&>gi5NokX;}%nQ zYSE@~i{axe*r2fk$Jx+fTHbol=n2DEpD<$3_+jXlt=cp$H~#Sm=STlT5lDt(!GlI;gT{_3tAHEcJ#=x!v@*F_N9j9(yA>7EXAggW5-Vn6H2R%s5Rp6q^pl0<{DXPO~o9sFxl{q zrL~l7?X1*jQL;@->lBmK3Z-=i&)R~1r}ZeU7fOu>&nPt+urx=En;8B+qv@hE*4G)$ zveE{e(Wmx|Qu7gw7H;fTY7t5;+1Tpu-o_8IL1QMAT9->3{;dxjHgf+Fq10xP&qnfT zo0T?>KJ6CybT73JrJBJLCjQ_4b*U+JD3?069O3_BbkYHxvr?D8|HX(+_9%4?rEWC2 z$p}x>T_0|mm3qWZHp@HVC*F&XdygQ-<|DlKEwp3Hth80!vGt-IJxbez(zca)-+$uB zp@YT_8TKzf)9uQo?UUa7=ztxv(vHR6`-aj^w9_x|eP?}`Wu;wWC%Z1{q+4mXQ0mXe zyQ}vBdGC8@$DUbfuef9HMLW8e_6enZ|IfYGlm?bd`z5^((gB0B(vV{BLqlm8?F`R* z-(Me&$Vww)CkHI*q+4lJD2?XhG3tG6-upQ17@w6&afdD1(Y-Vwl!AJ1{%`+Y@_$Qsaff? z*v0927iWahY(73yeV>*0eYSR-la>Ip6vgYE8SWy-InxryA1BgN_Q4}yDOCL=D2(E-tN_h_hqH~ z<4F(XCp{QS^Z587_4aVy+aucXXjXbG?sz=k@kA&+sot9Cx0SzoPT;SiV}{wl!K23w z88x9Te;r1*kALKso+_7~PFj9OzRzZ*=ZY;qA4)IK#fy2%FX_XVv(hWEfmibeUJIr9 zeEhmvej{)BP3?FqE4>|eyp!*EHF2R`tXaa^kr=1tGtn~LummYf1}pF&0GIYJHF3KKg1nB<~x20rJvRMx|LcV z6vEiHVcft{>6dcp*QDRyWcqtn`lHzIpP}^E;2BnnDZyK|+ER69+7fVPb-;z4SX~ts z{q@I|LOx~kw#-B7_c z0hPGfVt;`L4IMgg)WGpJ?f~n~hO%u6Cb@ecb>wDnW<6CbGykCS5ScsR2X7`rtUyxR$=U;)HoqDthRB=+0~O5rpa&TI-8oeomLoeo9? zn+hs-)5ZRt^9MDzlD6%v%;#l01WZyNiqwJA;LHwFF-hISrn3_4aFCvlfb!Hcq}X~S zoY_n;_Ii{GvDa=k3l;2W5Qk$_BK5IiPko#ePRGNUod8Cs6IJlkC!vC!4F2~|o7DO@ zILYl4Hk9pDFv)!yQb(Q+XLg2)N$#FDo0VW^f^>Zrl;=KMimm6snVk#9ZqHM}yFDKj z>;e#n3soZbMPko=u@p|1z?oeNMyEL{xYK2*V3&i7{b>&$-gfLTe$Z|I{xj_gJ}=vq zV3PVOqz=3q&g>c$3#sR_66{)#p09)Q)YnU~^#(Yz8^PG?O)7Y=H=}~x0^)G1N~FF` z?5S^;!s!k;vpd1)be9V5bT=y4J)lyrQ|Wg+rmnGj`Lb;Hfl1)|kvi)EII{;;ECim% zO0b7O+I<+x13x0g)<@yY9s^^WkE`HqK7k7MB#6UPDiQc;u?K!e3a4k`%$@_I)AK60 z(+j9zFM4t^ER>@^h&>F2W&>~)ad-+=P; zZ%VQCEjY8c!PxUVDtOQDqJq5#;_$vor2jzd=|7ah=_5F^kHP5li3;xYDJs}!pwe$p z`Nv&IUt^!MqikP*N$@X`I`bcW{VsbV4YQmh198l>fApgi=lQf#dUGeU#0)%q%UtIMMpp+Ov0REf|H#2$Jj zDV$b@nUBHf)KCR?S``&+HBg}|T#{mF|A8<3TY9!SyUMl(n1o*wsdLwYGh185Lik3k z1X~BB^>v{<{CZMsZ45I_gR$kNDtOE5qnM^a95zsi@Xf^@zJ(M{En!AwFgmqX!JRfl z1#1H;e1&UP`?h|~vW@t(Y;D0L@y19U)(+0By^4jzHLL{d0Mc(qC{Nr;imjbtrfD$t z*;NJavm1(O8pNTyN+jM??1_6w;j|gdkPJqrUMjd#Z&a|&!KxCEvMu2PZB#5I-jK){__S<$f=S}NkUDH{IJ13JEF|8Sm0$xw`rQx86AzMN z>tL7(8jO7oRl)llhGK#SaoAra5|0pj;*nA~9RM>JgVAZU3hp!p6>Kc1#1-yd@wYm; zdyQj5*~WuO?h;Z*8l2e#6$`loEBuETAYD&_^4teXv2`-c1P#V+4^qLqJs8CV4dPH% ziQI>XJ@=tfI8B2Yjlt+NT?KbK92M*cu)5sw?lprAWjhi~a?eES$fMxQW~o@neKae< zjsfZVSSZhZoD^G+hnb6r;0uIX;L_y4l^2q(P_2{?sO(9 z*jZq8x#QjIY&Mkb95Bg!E>cIH2WNJ^iiO-4uoCP-kghL+^4u3ovGo#|85)e;&QZa; zy$r<+4dQTxO60y$?76R!!s%+5;TVigb5(GsYf-_j1C_gJrN3S;`qO*i-RpWjFWU`Z zlKMuZ4!jA@>}C}Usc&H=*sUNv-v;HWZ-Fgnds!JQsL1$!9$@1J&Z_j-g4WqTA%azBRDk&nZfJ)vSD_mivydkUoM zr=dLeGg54Q7G{(NW4F(%;N8A}Vw47Pcu6I4zby9LuSntaD$HyQMyL5IxYO&XU~hnn z{b?8PUT^Yw+1>(^)Ndno;5%?;@2Xfx{T?g9-UsRV11L}Zp%hy`f*GN~*y|@Mc(0$L z7@Rmr^)=1v40f(P@DS?(_{R*teikSGaq%Z|irj@A$H8--AiuACNlh zM>w;eR4fGknU!F_fVBH7ln4GzimkuHjLu+e^G_AL&A(8L&UOBFs0}Ib5@HWrM+&F9 zFhemIot9F;ot8!gTLx6%3U@CB_F*)+do9bhveg5V^vfZ2aD6zlET%+DYW8>vL_wqg&yu@p}2V1{EbI@PG)!8@RWbp#c>!riN;c=zhW*JbMrCZW3^ zbzWCEvu-LTp?lgUtOV;0(( zNb4Dthu=ktt-HcZ(_n15zY5;+?kJ{d5QjZfBK)3W55JcbPJ6?Q%3yTbR|R((hzhnJ zsPGl;UNyUgR#%)DtMoVqnM*X9A>CQ;v>bLc%~FiN5PE8V01cK z1$R0I73^5Bs>GA*I6f`g@nDkp1f&i-5zg!+6$^<^W`)1G4ASqZP@ecSDYl*tGeLu~ z&)F(?pJ$?&pg|nYR*A&th&}PSQaGIlGZ=%>=>iqp=|WVni$Eo=aQBM8)yduKVm6fR z5-`bqDN;wyfit^I#X|1OSqXLpNY__FdG4#E*m^b01P#V+=c?e{UW;Oa264DvC34>& z_S`p0;dB$sXbeWDTU2nTTT#Jo1FOp&?_RgFp=@`6N$xw5I`S?!v%6I+;Z^$Z)z_AHp>eh#T4pNBJhLB&Gu7g-7R5=hrCLwW93q}ci@%nS|2Zs)7u-M)@u zh6ZtXQzdf0CHCBJOX2hm%y0}wr}tEFr}t68J^+=w>A&B-@;|*7-n~BL^Rj&eCaFJ0 z>cCIn%sy4Ikoq%Lf_)Cs^A}K_`b#Oceg!i_gR$2IDtNEopqQaS9KKVD)ZdFe^$$`w z{RlH5gVE_{72N3;RIp#c|Ndzwcdy^rP`2N}B=;Xk9r-7m*#tY6>$LgVCv(3huN4Dp+$+sVm&QYVx~R3%)E{OE3xC3aPVN!b%;*fpHfvOfZFaK`C`M-xhfXRHxU<*;cag%WE6h*~ zMyE|w@W9+8Grr1C_qQ z-RmFeYit*Glx_`{GocRHNe;VKrwAHhnn86d463FYBu zO0o4Qm}we}Eg!9dw|oqWX&S`gIF$&0yx7B^AcfP3FrzXUolaK4olZdoI~7#;3U{v# z$=&NTJ}uknV3PO@qz;=6XLhEFg~VsE66|b{e$RpO#OF$}^*oqq8jO8jphE1khh2zb zng(&WSS1o)BKE|WO5rpIW=IC3)8#6-(-o*-SAta~9%WbYY1ys@lf>5`b=X`uvujl> zB)*Q7VAq56djpgwzEO&;H^I!&VC?f275pc2D~dT9#Nl?8NPLIb6W=L?(_Jv*F&Lfh zQNf+=MFqPLtSa#&yPr?X_5he9eh{g{=E0dgq+%iQ!>j~*1f<_bp*-xnD!-$oX()ud7(d{RS(+-UR9TEhx|ZwiH|6ftjGe*zJ2Nc(?DPn4m!%K2(X^ zABjEp$5J?b0y7$e(djc4-05>vurI*sa>u*Zmux88S74I+Yov}`0B81%iiOad|~b-^U}l1Lr76r9=8Di(4t!%DDaLAtI7<++!WVrzYv85)e;uAqYF zUJ=C%4dSqpO5|Qy?73Hw!l@z5a12JL)l_h&)ltFL0F}GK-7Ejod*R({O+GK%T40iT zZKMut1ZTF6iiOncvJz}Pke(YudFm!oY;6iNLxZu`W-55E8=#n>K^$7BMCz7ePu)ri zr`9kNG8moOsNhZ;p@Ovq|NEz%+`TqtL)qGaN$&PY9a#fs)jctuXDH9z zMT)ImVMb{%cDsoR-fedjqcn&^50%Khnb>ppl)|YO%xnxsr_EJxr!7#ywgeaZ(=Oh< zw&L@$Z4D-=w?XQ_ZQ;zeQ?ZbGdsc$=0qJ=MC{Mkk6kGekjL=~0wVw*!>&_@fXb^{8 zR3i1RVo$xB6i)qN24gTf>3?0AJMDoAwkN3674BXg^1Ihwd|9@=!6fiLNS(DWoY_DX z3xW4zh5r=`NV|ifJn#@Hwho0Eox#}Va1}i8{wPLg5QmW}5%>VH2OcGb(`cBX7>rJ1 zRdAtmHyKGKj~|1 zCOgV@6qp2`h18ix!lHmq2;wOQqO42WEr@W2={|;H_SPVuS{9xJo5LUoG~~*GSJ>fBr4%x+b&5dJn+g53_%`W;Xn{!S^j z-UTyFgR$j%RPdJXMKMi-INYxi;U5rt_y?tMng=r~gVE_>72N3&RIo=ug|Be;>X_WU z9^=!pJq{*`pFrxcC*jPVQn8TuX;y+g1JduaP@ecXDYiZjGfjiB&lgpQefF@IP)ySx z4zH+0;#b9<_%$h<=EDrhV03yz1$TNA73?jrs>Gx0Z9XmAJ7ALdU8D|s56<%vI*V(TX`b2J$H{7eP^$$XAtjs|h~QY8|9CHBN$OX0KtW;_O?)3++P z(|4#~--A^po@77pY1w`Rlf*wEb=c2vX1}OdNc<}+!F~hj_jf2y{D%}<|Ad*K!PsXl z{^$33pS4j;&>#+VR3dR*u_sas@cE6;&+cZoo>gl|Z^)8On37BE{B*FcUNwyIoBM?{;+*6Euj!nktcd zEwSfbTMDN}FrzUToz_*soz_DIYYbMGJKnvTu%T>C!6f(kNFCV>&TIn}3%Q%K608MC z*DawucPlBjwuYIZ!PspZ6};PxP|VOE4jZdP?sj6&-Chc(8ko@-j7}X@aHmeFV4cD0 za>u(@7dDiwE12Z&hSZUpz?pSdv53TCL&)rjst-WAoXfSrWxeDIx7AR(D z5QnW)BKOu}&%KQlPTRr^$6$2YUIlmRg9^3-sN5CqUiqKi3-4Y#@_E_%f=TL~kUFp* zoY~GQ7E))d1lt9q=Ut&Z^=?vZ?GH0UgRxirKdi`m-2=r84dSquN~GRf?5X#W!f9Wa z2^ow|`>Eim2cd!u2LJn~o!q^Ku%T>2!6f%Eq>da8XSTnJh1?@p2{sa>>jR)X_b4g0 zj)obf!PxCs6};PVC`M@zhmuO*5nF3wDiQuiv4{Uj3a6i8MrANM{i=dH{e}wmJE-s#?p~deyVoCl zTDCvIB=KKJ9af9~KU10228)T8U?o@`kbdhzdEzCd*t!(VG!4c+mr=p{To%PN4dSqz zN+hl?_QcCe;j{wGkPJqr1}eDIN~mBfgHyBbYfFjD4=Fg7>)|ia8p@p@~W)ZYuV~>r3I(3}!qAqf>Jg+^GdB zSWB>~#FMNQpO&pPm?Yj1sl(d9nQf$EA#qz)f^7`aZ#yVY++K>UH82x282jv~g7?`8 z#RLuF&_yK@cNKf$Zc;dH0y7wc(P>i^+^GjD*k+&-SGaq{-|FP<)sqcn>jfscdn0w^ z=5S_Ps94CoB`d+U0_l2dD9^o(6kE52nV`Yg?e;2ow|!7d&>#*wszmO-V$Z#k6i)qM zMq@BKWh%JSE~sF;g4N}Ycdy;pP`3VHl6!Zgj@19}xXkuYv5I%cO9+9A-8KqtlftxYJdrU{`~S{b?8PUf1w>+2(>t>T8iY z@H#lN>s2hIzJZlsH-hwh6O^aES&FTLsf?DaMkyw}@NjL;wscdA6{yTqRQZYiAZ zff0d|c;5XpR-c+%W{w-F5y$#a)J5ZkfT`9J{2Qx#1vF8s| z@E_8LC}wC7hmTbv{U>5i|EUyCpTSJUV08LI1$X)q73?ce=_}m5{*k`MzGg?+7Jy0c zZ;(3kTR5}tR4fGlo|RxffVBN1ln4JwimgAx%+FwK_g59X-QQ5m&mazes6_BT#UA`G zDV%C8Bd6M6bXtOcnmg4&1*;1xc!j%H=i=RKNxm-IQeYB#X{63u2F`3*6$_#3u@Y=K zke2I1dFbV(*t!DD2o1(o8>rx+S3)sDgE*|B5}_N4J@l$lIIRXVAA`|p4HevJO;oV8 zK!vVw_llwY*FE07)@E1P8i7gpb&xuDT{yG#R4jyV%u28(Agwor^6=|Rv9%e@G!4d< zo2%e0w?HvXgE+KOiSVt(9)3e9oZ7&Q%3ySAtAaaij0)BcRQL*aug=Nct398Vtp-dI zcR=c}j&Nq3R4gR!%u28>ApLfQ^2FVw*t!YKG!4c+H&wy=?15sM265=A5{Y|>J#lX- zoHmCUlELV-r3&t}6)M=)U{#4n**1Jywr#;A@pecZwmqC#9~BFUcVH#hjv)Q^h4REZ zNwKvb%p48IJ~I`(&s|W=(I5`HsYK%bVo$ug6ix$R#$zx#?Wux0?S%@qH&|8TNwyE4 zmTg}!Njwm#!}fzS8>C_(@nBYh4FTzQD3m82CdJm_FcUNw`y8Qy_c;>91P$UaN+lAH z7JK3`QaFu;8H~Z`G+qUFDxrcIsKgcSUh%g&xqD4uL)ikDH0({&wY{{5`f=RkSx%cR(PIm`?V#%`}v!MnW*#S9JNaE(gjo-6j;*Gl1Z9n5eH zMyDH8aHktl!EOSTyTaWo|I>Tn-RovPFWW6(lKNJp4!jM{>~<9ksqbJV*qtCf-v#BV z@0McgJuovg7<;`>1@HBK6f-o4!-Fc3dY;%*KO}|I!!Q#v7@Zzf!JQsM1$!L)@1J&Z z_j-a2WqT4#azBOCkx#>!J)>eF_p_`7dk&=Q=b=3J3sP)-5oVMIW4AA>;N8B0Vw47P zcugg8&lh{{*QIcJ17+_)bAs8;0JJKAF5bL z{Shm{J_hOe6DUvpsT5m3gBhX0*y|T6c&}fg7@EuQeqFhv=mOuzzoG;bgHL<2VM>ptUjo~74BXN?89hs z_gbE9Wm^GE(yxfr!42TdR#LH$eq~mItpd_}Lnu$bsuWvSgPEbh*z+1Hc+YF1n4v)& z)>eu1jl`aQ9VwjFg_(-M=+syRcWQzP))Z9w3U{x6q_45{*-^G;U=n--q|R&(XVyZ+ zLhzQX1ZxG-c55gPzM&Lb+rZ4vU~IRo3f}IMyJ6lxYH0+u%V#BSGaq1N$y_5__S=p!6fniNF6o;&TOQL zg~SK25^NMmzoVf%@faz#j)j?~!Pw_`6}-<9ifJ0eVS-8|4q{I{Q3|I?FhepJohGZ` zPE%094g#x6JjxE{)3Qwklf-4D4m$+S>`)a8iKnp=>@bjir$c$-!=>1I1k4-_#y*c! z!TX$vVvYuJn57blj~094W2A687G^vKqto#!xYG%!U?+lAC7xs_@oCvk29w06Aa&TO zaAv2eSV(+2E5XhH>324iCq7e(t!KeZ&|vKI92LCJb5TstAP(oNMB)p?p7=s3oGyYH zjKS!1i3;v?DJs|;P>Cztz2a|ma`(E74Q0C=Ombg=)R9-hnO&t~A@|j+1iJ>L>$y;# z`&ucsUI#NlgR$EiRPb(ZL@_~wINYofxo;7B?pvjBx(#MD2BXs*D!9{~s9<-2)#Z+N zue;e$wtK)N_q|9Rc^{nF{VEo6Kfp?`2SK`?2j#gRl49$_Ff%k5yM0sz@AfeiGc<_9 z6DpDWNwMdCN(!f^VMb#xIz6j`J3WUA_B>c!?s)flfemGQ5lnKwgw&BQ!VN~Hc-?5Tf|!s%C-2^ow|zpLO*f1rZ>3I6v_JGp!P#fGxg zs#nNe8>u6gfO+CX#X|17tOQ#Ur0b=iJonO4Y+VLslm=tB^;Ga~mqRg1gE%a&61i6p zd+rsbaB2WE8-vklWfk0M6;!Z>;9`H;#k<$4d|tNIz~raBI#LI&0rSj>iiOl`u@Y=; zke(YsdFpkf*t#yv2o1(w8>`^GHbF5$gE*|O5~-VsJ@p1sI5meEjKS#CQU!Nvg$mXh zRO$+MuP*uBYeT*)TN^M5yb)4owS{@*M8!hjcB}+z57KT8ln3r0#nz56qca%W?5u*f z*#*Vu4C2sDB?50E_Q2hxaM~1RC`uK-K#g-%CqNyu`mI?Bwhc({+d_Hz?WEYcJ8xQl+iHe2L#!9dWAT0+d4?R(et&?CzXfU=qSp{!( z3W^aL#Nl9-2t8Hoq03S@9Rf2SgVAZ43hs0mD%f;TpI-Ff)I|59? z&p_(jBVisqQLzyIC{}{a0%`qdC=Y*(6kCslnWn+m^6@Ho%O{|ira>G|Qi<>2wv`=?qk`*`UH#xO;U??p|l|Y1z&Klf-8ub=WyDPo1b(NPHeE!OjQi z_W~$Se4!LuFM^q-!Pw^|DtMolqL`*Z94=Fd#FvXb@fA`yT?sQJgVE_~72N3>RIs^V zRf$L0wR~E(>%b)O^++9d1I#lgDi#vo#7eN6LHfM~$`juz#n#(k=4deXd4~$#=bb3# zXb^|HRU+{{Vo!Xp6i)ZSjK^SfdO!ttdJq+C9#~c4N%jz*mhE9MN&E;>hdm1O&WVbJ z#E-KQ>(nPf4-$X_yHbjD0?pQ=Re&%~blb19s@fEkU!==7Be?({V(*aEP+-0|-94I9e# zEtur~4yhx*hk4*c#X{~MSqb(NNY_6@dG24N*!nBX3=PI^e^ZO60D! z9PGJkOX0Kx%xDZor@AV*(~>A&{{ySb9q(RCvw>Fv!6f&xNF7-Z=8Y2-3%TpF5^Q;p zu2+Ea+$&14wE@fw4aRO)R>8Yn1;q>v;;^bprJ9so+j)qj&=l zRPG9Qul!H%g?F!Y_?(vl!6fy1NFCT1=5Z4h3#ps35^Q~to|{2=>J6mW+8kzv24k-+ zRq$S0p_rjT95z&m)NRC`dLt>E+QLl8V03Dyf;+WG@eUyP-#_i-?$v<}ycY;2xjP|s zWM`NsPE;)9?#fEAZXjK60_C~8OR;rRm{A&x-EO9WciR)iC=KG!TP1RDF8178Na3_4 z%xnxsr>#|Rr)^NY0|+kmr(L{zZO7-l76>M(`yh4T4lvK0s8~qdmz7{Uf%M!D%2V$w z#nudFga%`;yQ<*5?uKH7265P3B~lL%d+I%;aM}}QFb1R3-YU4$J}6!R1eLnN-K%SU z_ZrBTJQD~efd?UV)?k=dPE;%e9?D9vVIb`ehw{MtOR;qX%;*fpHV;t2+Z=^rbOv!4 zqY{C~iaqc+DV)Z`48>q{G8Nou0*dGVKm~4HX&(Kb+eduPZ6e!vED%i6ABfb!lVRRE zQL&KzAXb7M4AT2lC{JIOV(TF=Gc*``o~DBLd>D!u8pPpnl}LYt*wfFD!s$qusThn- zN2%aWvrs$(2r7MryO)04K8i2AdmY0L-U|ei;Kw0#=J7C(ov2s{ej+QuP6BEBWGD}Q ziWFNg{9{LI?wq6M{LW8l@t5xt;uR$?F zgE(BP5}~gXd+6(>aJm6zJ_e)HO)9w4%_yD$1Qoi%-7ALnA9%cb-O4WB3GYQi<>ni#_}!QaC*d zGb)48>2Vd@=?N510)h%(;qKKfxqCgur#u=6CW)Ux>ab^Fo;p#nkob94g1rFJ?~72L z_$4W}z6>)>gR#$7Rq#GvLorQ*IJ~YBiQf==;y0ymdJASq2BXtED!9|TD4qcXt4ch| z-se*u3IvnHA0lgZ=_eHL|AAE{o@Br9DK7+qN#fs-I_!6tcTQ9+B>t0? zV1I%1TdO`FHyu%HMD0GcW>{@0wk`oPL4&c+x+-{|OQM*dK^&G=iNwo@J@K+qIMst0 zjKSzsUj=ts9>wE-pb}TOd&S@C(!t< z_v%t?T?1x<24lBtso>qNjbefZaaczsa<41)-0Ml<)EH(o2BT9`72Iik6z~3l)#Z+N zuMOD1`+#7Qy9H85wuE`$M8!hx)~xWBHb~cPpgi|RQfzGtGed*1+jc5=x9w5P&>#*S zR3djrvFGk2g;Qsk(HM+QT~%-)iWwTjVW3K+-cRhQ2T9>H7-m8Sqtj3o+-Vq!cL2fv{%I$7ul?D;dx2n* zdn8gv9su*iiHe2Xqgml4ZIG_VLV50SQfwU$GfIQ8TT{Wioq%GL2633E61gXdJ@I=-9S<`YgVE_k72N406t4h+N?qaZ z)h)k!ox+zq69^`OPebag(_vmYQLzwsHY+@#4btveP#*YfDYl*iGdhE@&GS_7HqS>f zI)gY|s1kuM5_{l_rEt0gW+(=u(;OAt=`s}0|A7iz;qIltK8z;+ox>~G#$$nClKv{B z4!#=ZtrHas>F2V-Q`#WCUkBysua{!$4KOn_7<;}+1@HN06f-o4!>uZj{x-3vzg-Ha zJ7A__Fgo3(f;-)f;u%0t=_}m5{*j(1wb{XYfnXB+ex%NP0Oqk16$`=VvBHzuAZvX&v`8nOj5ss)Pb+UJawXCA@zJ#cuE_j=Qp4{^_x;`eG6uW24k=9sNlW6 zi(-Zbad=-PQhy-!)E`RW^byQ}3`VC~PF6NhgQELB%kGHSS*~Y7ZV3Phz zqz?WH=E)Nk3+WfI!h6~vy?+bk>A#a=>-RA8G#GpSQ3db$ClvEEh{G=`k^Wb)r~gd~ zr{7_QWiUGZse(KGh2l}bm_%L^sngbidFn*PLgYrQ@P;->!|OtMatkS(TEdLTV03D&f;(-9;tfEsy2z7kBfjOCKro5CF;b_s zgL&*k#X{s7R(M1kq~VTG9=VegTRX#y(O_(}s|wy|Hxy$uh(mXkh`g!TBlnQPX)~DV z7>rK6RB)%>C|&>r6}iHn;){N1L4c7+*`!RXXq1$WvV#p{1ymBHf` zY!7zuOdyy9-wUZT_l9}rM8!hzeOcj2ZIHJ2gYw{mq}VzbW|RhFyF*p*c88%Dr9m9_ zSBc;w#2$R46ix@gOvqq#8m)pmjY07UAXsJacn2HD4qgicli($!&NP^}PE;%e53KN} zHb~o(pgj12Qf!?JGfRW9-Gfx{b`M4|OM^I+RU-HyVh?_(6i(A%hGZ~0O;^F44oC48 zAgJK$|L33K3ol_a_?{;N!6f!fq)t2v=9Lo_3$c%8g(tN^nm!iFV;?8Q*5hG@X)rc> zq6*&ZNhpSC5QkG#BKE0bkA0dHPN&0+%3ySwt%5t9iQ+*(u!^5|atk|~9lRU}Cc)1| z>dfgJpeN!gVAZ83hwj}iU$Ef#jfx- ziun^g#;&nP_>?CD!6flxNFDY#%!?-~77{g0D|9zIbqiQn7aV}*CML7M*n%Hw}1#nz8t=4miC{fP>(=^pkeig_Bu;d7OU|ApA& ze<_92S14|W9r`28($$Q$*tnk1# zNZ-FgdGg<+*!nxnEDgqf|5U;I{R_n`y~5uPwIL;6LhQ-wNa0i$W=;m9(^4w9)6ytj z1q7A6!rxVE-@bSgTb9pxI}l7#FNf5D^KG#GZOhDV)}V8I{54)JO$)S_j3efS^)W_}dPPQvdHwY(2K| zh9H=vZ-UgpO<^8DQL&J|87sW54bppaC{N!)imfeSCTlSE+*$?ic|#PFHHgDTDv`de z*wb$;g;P72sTqt;H7dAM2NX{Pf=XZMTGBqbiFM*zUJ(S7$X$>+tt-siCn^>qZ^8<1 zYlAerDU?UKyr>P*a0ca(cadW2t}rt-7#r=cf;YN5 zikTY3VGosvyrj5y+G#DElt%5f?2E{ZD;xJAnB99k)w1gi`lZ(_%>gGU3w zB=`wPop~b6t0yWJf}hL^4{U?9eJYd(KTV3Qr^C$CU~G4`3f}ISDCTJphqF~8_&H(^ zey$Ww=fO|%ECb|9DpzZ9u6=fJ#tqGBQV<*e|+Hb~o7 zLV573q}X~j%sdUocIT?#?Ouywo(6HaUL}IxAok!lO5t=9%$y8Hr(0BTr(03H3d6BP@w?_q@pwn3V{56WZTFU8geVCHEsHakxR zZ}uS+^E8OVBPtR5QL)EH=*p*;92Qfz${W~>HdyYp4>c3(#^R)aXasS?585_|BsrEq!& zW=sa7(|ano)B7mi2Lu=Y^Df@RKID6z5d@RiA0u_*Cou1ys91>o87n-o4bt=%P#*hB zDYkwEGfsoC*##1P$(=@%5w1A>ZO=_b}b zzlr_Er#v7CCW-$*>aagy-ak>Xkhs>0I;A#9ze_-Q;yO}ntqU_ogR#%0RPa8RMlnW% zI4r9YiR+0y@p4i))rXmn!RWMt3huNbiiZI~C9ZT6Q(~V>lmAF;CHC=-Aeh8o1*wx8 z!aRebVj=!&tnkV!$;uY+Qq260$VCE_<0d;BI+I5mYC zkiqEGOa*t^0L2S|pyF4$iTxvfjkRD4ZwZ1)@>WP4+8X8|6cr1}+pxlO+aP_nh4SPZ zOR=>b%q$JYerr_lemkI;r9m7zsYLS5Vo%;h3a73xb21p6Hc`Qyx}$g_5LEI?H?f-H zO{@o>^O7K#r0$8-fxTd!K~b@gdUIBIW*emEEulR1R#I%;8fKgZW3St);Jt2#Vw?tX z=%W&;cMyB(9i?#U3o|N%(W##b?zA(CHv&PWu5=TNsr@%T{%2ylu#Fc5!6f}|NFCfC z<~bA<3+V^2!du%Qz3&O->GzUi>)tSvH5hx|R|W5RAd1Nv#9@$1q#rEy^h2a@8VWNt zgVAZY3huN&iU$KhrLS}ot4VHRBl(s$1;HfpD5Ops4f7I;iiOByS>dH^kcP)YdE}B5 zTMcHi24kZ^1#fgBipd(p;Xsv$JX!3Kr%2&+5X`&`MyIJNxKkO$D}i8jkw@8~e9J?E zU=sN-q)wX-^9+iLg~&&+!u#4F4Ic^Rk!MP=^(dH`8jOt|t%5gt42qc=#NjxVh7T4LGeBySY6~vb{gOEgdmtiJ_D)KX2U#$qGBQPS*-BDHb}$g zKzZbIrPz8N%rp(gMlVpo8@&+4G!5c#u}Va~MC_3-mBMKb%!~|1r^{7vrz=pr4hSl8 zrJGp%#ZGQwSFwX<1i>WuHAtN~7v>EV6$`_fx);UEfMAuu<4x>-cJPECm;`?isWa!n zyn&)(A^5|r@Z2^?+mAwd@W-Us`Z&xy4aRn#RKeSQ3dKAP;_!@01b3Ntb z8H`RZs^Cs9p?Dw=tTK4KiM_%OUJ?Y8;IAQd=6slUP*f}ge}fg?+XiX-EhrEEwiH|6 zftjbl*zS8Oc)RbTn5RJ;K2(X|ABjEq$5J?b0y8Ir(djc4-05=^?*xJhUg;*5{~5mU zCiW%Y^PC`<#Qqwo6Bod|fTCg{_P4C?+%`zl-$QxqAEem&Bg{Mv#%6z3!JGXB#XJq- z@S94+{$1>`|B%AzPncmDj83&0q)xR_JQfI6@$*h@Vs+TTtAbz>d`YCvTngqP6cr1> zmtlqXwn5si2j#(+lVWRqn6Vm+?XIAL2VW7zSPkN^l1c<$S?s}Ak;17V%$N*Dr`1$& zr`1us76>l>=Uu#st;zR1DhMXA*GB5ZMli3Ts91=-E-O5^4bpUDD39Glimgpy#%VA% z+e`&-b^{dSG>AhBm5AL^?6F%(;nW&tMh2r(8x`DXBNUGXf{I<~CRUT*#5U$ro)ZL< z#O;wftOn*a6cr1JJF>!4+aUdRhVsN+q}bXOW{d`7pPQ)QeRfANMuRx?P>IBwi9K;o zDV%!2%*SAK+FS*9+5*KhfuIssx``>V&!x#tY%BKhsvwxe-v+6Zw}p8WMa3k3Z`+;~ z-rNRheg`OzzoQge`@+oAU~Iad3bE-Pwlj)(8pL52m59Hq*yHaeg;Rf+0U3-=161(% zd!TqP5LEn1H?e=jud%(@!pnkSl6)Ve4&4{#SriqMB{o_vTDTZh8T z(qQa&xC-9y{wQW?5QmW}k^BI$Cm$t+(`cAE8H`S2RdA1e5f$kUIEin8#66ETlh{ z6<*#3>HT;pPk(|GTTg_Utijmx$trlyr=XauK^#s~iS(z7J^dL{IL(Hcn!)IFmJ054 zHj3v1L8Y&B6YG%N#LnegUKj+E$mb(<+66H0qNrGid=V?WyA9ItB~TvuQYp61ftjqq z*y!adc%xUKn5;n@u2PA}SBpLJHBvaug_)PZ=yaV5?sPqhHv_@yB9F2g`Ict|!6fp{ zNS$^I%%dnO79!uq3a@R0G<*k?N4`^vt#`r9)L?A%9u>UNdr{2PAP)DdMC1p=9{E8j zoaVty%3ySQSOs@_1jTECV0Dov*<*amgMwfZ`3a;>dlKeZ6cr1RpJs*Uwm}+x7Rn<( zC&kw1VWw#?Hu|Cp-snpxrfCp|S5zYMt74D*niNj+VP<47I=!KSJH3hGtw2zbE8WE6 zFLrVhdz&3RDhMXQ-$m-o_h4Q`QLzyG16Fu-8>H=zpgj1;Qf&PMW|{_LyPv7x?S778 zng((BQYC_aCHCN7OX0KtW=aO5)3++P(|0J|2?VPQ9&chlu!9E$!6f)kNS*mJ%!?>0 z7J~oE3XgAtwEa7j2meEgt$)JI(_n13)=Fu+wNcE|AP#j@B6wY~2VYVOr=?(~WH34{ zqk;!t7R7UcV3on+O>8-K@U9@31YaJhGgp9l6-C8D@CK~#`Zh@0D?@qkRixP35N4hR zW4o)V;O(xCVx9(ZSW_i}uO;^2YfItO2xd+Oqtm)7xYK$lUJV2lywXi9|1*5yO{@vu z^SB_G#9kk%6Pv-jhoWL3c5_yEd>f?cmQWtMl@wcB!_3oQY_^RG-t0yw=4lXzja4Fc zJF&-ZFNISL%&-hbr;aMPQzsNp2ZB}nypx+)7k2Q*AeaR2hSZsxz&wkhVj=jZtnm6a zNZXr1dGMZ6Z0!XzR)ewK%~kMrw?HvggE(xZ62Z3?d+=?faM~7TOa`OV_A0nj9~5r~ zf{Xuo7jI%a@;y%sf=TS1kUFs+%-bj`7Gh_t@ccGN)4M`>?A@f;+8<_|24k}WRPbi^ zKrv2(IP9eovG*2x?0uwg+81U<2BXt{D!9`i6i)|&ie2d@)*-)%4dGKB7X*{U!;m^` zILzB9Di#utV168l`5+{C7^k2eOvB>urjojeuhi4+x+_`U5AR(OFMr1@!3 z9{(^YwoZqcr@`3t5h}!{d)N#V^E8OVOqGa#l-T3XlEUd|m;o7#PRFX?@sC6CdLXFy zm2P7Hh+ktTu!VO9!6f-fNF91I%p)l(Cdqr+sjTo6H%Q;7LwWKuq}VzeW|jtHzh|l7 z{hp0tmIiS+S0$34C-&s$OW||@%$y8Hr;Ai@r;Aa%9|$UWrJGpC;!W&QKIfG|FiCwG zQU_iR^F)e@h16HF!V}yeJzovwsjrb@>s**|8jQVOr-Ju-J&JJ}#NkGjNPUyoQ{OCw z(=9NgG8mn1Q^B2XNAZ3jsMM8iVllP<#>bo3oowT^K`=>wH&O@R1M^ghiiPy|vBEpt zAiX~T<>?=kV(UDZ$r_A3KdgfH{0NH48pPo-l}P`%*wa5Dh0~KTQ!^Nyo>sw~o< z1#k3C6q7ZG!`mtm`5m!Gepd>o_h9B_Fgkspf;)YP;srsly2zvKW4`5)K`@E@DN?6> z2J=LUiiOBuu)^EhAPs*7<&nRZV(S8!nHr3Zeyf5v`W=dy8pPoTm5BVK*dzZWh11V4 zlQI~cepSJpenatgAXr`GN%jZd^1L9JME(n@(`v1((`tjo$V;%oeI zDVS**jEyd%f;YM>ifJ0eVL6qETwm;wmzTn61(+Ebj7|+yaHo|}yc`HBa;2MC{KZag zVym!&CkDYJ_^L>qxf;y-C@L0y)@!iB8{8mmuLb47*Op>yBbaF#jP0(gg15UKifJ0e zp@~WaZz}fS>r3I(3}#9Oqf>Jg+^GeMR|CN+gU6d#D|Ya_AeaQ-5UDfUz`T#5Vj*~2 zR(OgVr0sT49=yF2TWetEX)w0iQ3Y?e6N-5n#G#8y1n(;L;N7Hf+5~1w2BXuaD!5Y* z6psgjRR)hYv7YQGTQ4vP-W#biH-~v6Ma4q!Em`3$ZjiROhVtOsNU?QWn0XqE?QXAv zx7!EBJPqQoqe=wtEB4?!N#WEFW=;m9Q>KDD?SkSBK~TXf-Nf=g!x!GfcH?`V8U&Nr zyCZet0GQWNR4l~alNFxg25EY4D38656kGR&nWw?n?0zbEvx88~(;yB*R3i3JvBw@J zh0}1DVHu20BUEswktiM#1grRYCpWQC?BK;gFbO^esWZpIJd&bfA^3P!c#9jPZG-aQ z6QtN0V8&`NwmV4$Z}&hHV>O7w6qN{mkl2GCEQQllm@yfQPKT)APKTm+Nf2E8&%1aN zJB;sna1cylACAYrJGpC{3dn=pYqfom?S2jF)7>rI= zs^Csnp?E?NRN_iEF(vl7G`Wdg!#-Xd1e5sJB6ae0Fb}4vScrcEE4;@I()>+O9{*-3 zw%!6WPlK`P+f?wTZ$~jtgE-u&67la6d;GhlaJmO(KnA1JeJZ%q{V3iN1Qoy1P3#}> zYwSU`@aiC#B!390Lm!5DGDXEg@<&or*WH*Z7<_2f-xu>qs5=2F!yg zDi%_|#R?B{gY^6kl&5}IimmU#jMHH3^#c{W*AG#Q(;yBXt3>Kg#Gd+7DV#oo8I{54 z^o0uU^d*Yd1VN>)bQ6oI{Wm_|#J*-5Zx4b=`frdr_*O{c+bC~n5;n@{!oeZe~LZ*Us5>LT18H^!RWLE|1@{1gW_pHQ0Xh( z#5yH6u_gJI*9XBQ^3q71whYXhDJm8s*JFh@xj`DP59N`UmtyM*Fq1VH8*QM1M_viV zWDVl5ib_OoDE7#!O5wB`%)AUnr!`b?r!`T$CkR#-d6ccqw>&urCXv@c>a=xX9!ycO z5V7%>@Zcbr z1m6y+Gq;C%Ek(sb@Eus;MQ)I``$Bo}out^>4`!MMW4oCO-tI0arfCp|-BcoYf3XMO zT?(fGFjF!Zo%U40o%TZUh9FpF@OTs3haEgN2qwV?B6a3|Ft4SkSO`9t6&~gWX?rM? z2OlQI*5NSoG#J|*p@O$N62&|X;xI}jf{zw^@G(+2jfI(#!RRzz1$Qc;cuEkgGI+d+ zO<)Ia4uVPWiAbF}3FgHV6$`;9v%<^VAZ;H6<-re@V(V0xc^ZuE9-@M`dnk%|8pPo+ zl?Xmv?7F3)DiM3O*khk5h0|Fu!!j71 z&QZag&PDO8AXvrEJGqIS&ko)n1e4$wB6a3PFi)naSO|UzE4<7N()JuE4}O^xTQ7$h ztHIdrl`4348^m8pki0LiFL|vVo&fX4-bM#;-`>0>}i;HQ&cP@ewGy; zf(f)ra{gc+m3*yqbCc%QGJ7^6WPUQ>z0^TnR{bt#)asCe*)$4Kb2zZXE5_L z7@Ph=1#kLG6!SEQ!`CVie}UNJeKe2^3 z2*D)zFGwBwE6f8bDi)Ie&I-?TgY^9;lqdg7imkO8YHMvU_PYfCH1D?#idh=OVM&!p zzLeOLFD-@BGB9&87@g{=;K`Rm@wOnSor^cI<@uZ!2*D)vibx&U0Ot7=6$`0X zW`*auL3(Zo<*8SdV(V%!<1`q1T|))$bxjoGG>F66Dv`R8*i)|~h10q)qcRws8mr(= zO;Efo2r6}@n^;Wkzwz-Vwm#cO_Dv`cM?CCp5;nWdkY6hcIXBFJ33yOyZL8Y&B6YHGZ#Jce< zZxMn?HtZ^jBQbb~b93(6z+mSXGXFq1VH8{JX`Z*(gZlQoFLHYyQ$ zTd_ypP70^(VdiBpI_;o>JMD<#bwRMY$fIm0zU2WzFp0b~Qm17w&!?zZh`cK+yvq&J zaDOO|yt@=z2f)nKU~F_x6}-{CP|VaI4*RG?;CWu@LzHR(O~jq~Xy}9(jxuTgSpo(_n0Lyb9iE3B@!G z;xIuaA_uWYo+yRWB$yc)j82nPaHlCKUKIotxzbH6{$eLLv4h#c^Mha#yo}VDhrqm@ zqGBQVG*)<@8>H>&P#*knDYhN~GfjiB-6K`-c4wlPra>HLsYLLj#UA_^DV&alnUcZi zbi4}gbOMSO1;HwV$D7zm?BMA^FbRGNQfHnD^LC1gh2W>N!ZY0fq z8jS6pqk^}4E{b^?#Nm9E2!4UsgI_3x(?u{-G8mmMQNf)qMe(p8SY_~d6T6HZyg&#h z!LLB-%qwBuPf@WD{AyNsryHd0xlkVbS}C?(2QyEDvE3U~@OE!RF;9ay+^iD8ZxMU& zTcvQi4Q5UTqthKKxYL~|-WLQFywXi9|1*5yP3&&I=NUpUiG43pC*BA1a*B$D*blJ6 zGuIP|g0hGu7M(Y1n_7_mLRaduwi@ODPcXvOydk7NT^`IN50Kwfo zSO^v%IE3I90t9#W0KtLhuF;$skPZpZiU0!R9xypZwGP zd&5l7Njxl>hYg2qQAx3rcmyX_bVL1)gqp-7N4Yu*Hb%qJ=V&R^K1ZjF(U8NKDM>t5 zxF#MuiqkmQd<=`zxG7Yp@hIyBVG{q`Z(>PY=hD`1ViUM$8zF3c;1iK~^2FFOl@vSi zCv{?1H`M&(sEI#Cl&e!>^E50?Pn|+-dK$_+4LMAglK9hyYy26aIL(L+$gnugoI-V) zg|cT5CjQU;CiefupSL%gD>f3sR`NN>JakTMol1(WBZRHg%aVEEa@aDJ6kDm6?5*I$vTms7l~9v<uL73D(_nTNrUH=zfeiK{YHG2tREByv!9=sv8 zR3*hu`i-5~*bVi*DQePh7Uk;Z*klb$&s(NYd)|sNSwjxnq$K^e;hKKCC{EjBQ!^}1 zJEl;b22<7!!leJX-^3Pb{U)}ve`_-#Y(?Ic%+q$m_Nk=UiQIEyUpLh7o~VhuSCp%L z*klb$qkE@N8{LO8SwjxLO-bba!Zq^#QJfCI=4Du%4oaaq9ZcCZ2>Ta#aPLt6);dDi zihMYkryYSUQ%SKC`6ws0bVCgvgPO?4M!9+%HdDjW=m{y*Mo*;7)R4o;DT#baxJEuT ziqmP>qzsGG=_ypFGbmdIVgDlc_0IBdEh2=i$iFA^v~#d^Dk*j%pXYuvDm5VnHwr@mD@SCDsy&0RPVQKf)6l%M-QKo6g z;f|C9zcXBe|22x!UD%Wii__gHRHu6=`vqY?gO}gL{_ciFgs>I-elpK|0NbRJVkh`R zPAu()+I|Ez!5@us^)YOohNaynQmE}dNtvf1ho@5#{F!hK{%jPd=ddXm7N-|ds7^0Z z)(paa1~0#fz3hg4gs>I-RWi?f4O^y?Vkh|PPHgRl+I|x?!T%ZM>RZ@64NJT4q)^*^ zmoiU74*yC?@b|+t_yJ!}-G z;jm#D7N-$Xs7@nNRt~~`eBQ0!#71_*ZbH}!J}Q}Kj)v`0NwE`r3@5gBLv4?Rn&4wc zxjGIuR>RWnxGB_j$D@qZki!Hi2|i)C2A?R3)5O@A42#pGDO9J)C_4w?kU#J4Z(>vU z_f`|aR_v+BJaKAlpGu0I*wZ?(wi{}Cdep?8AJHaiPtoQ51`OG)h6 z!!`CCQJm()W@K2L=1!qH%|lr^2ow9~eiK`$`Auv-|I|`K*h;(rnTIWity4*{lXzh# z7Is7ZE{dAOi$%G*I5tMZ(&rK>)IOJ_jM0$8(kV&2Ot>aqHj2}7*nA9&(+Vk6rxhv7 z24NEa+;3t@T<6l(Z(=LEXEz~i#b1@olUKu*s-)P7zlIZ=yP@XSLQVW%M!C8+Hc!LS z^g1cjrq`v+(~!gZDT)8&jn^3k6!o>f%-^BjE`1AHQb7lYD z=CGA~3o;Mg65FbhVk`O5y{(;C;SKe@Eozc)7v<{q*enf8zdNQ-`W@68Oqr!2hn-WB ze3x)dzH1bx-LN?s7N=ecHTfQty@N2xf9^N2g}c9r?d6}_P6%76e?#Vhdt)nAQf#GO zvbV1jtGl6|_d`wU{i9qx02`-a>Ghx#YU+b2<22-OXi8Ea7OtrekK%L$HY&s7bW{q} z>1fK{L73D(_nTNrUH=zfeiJ*^HCqZ{EB*0g9()3}StZ3z`jed4-wpMC3To1y8s+L~ z*klb$&!?wQKdCb)lQraUR!Y*J9j@trAI0e$Y-)zZ>D&~m(|MF7gfQuU?l-Z8Tfd22 z;NRL)2wRc=Nakr5VLMe)>_ooAiS6A`!{ab4ZVJq_EWS;f}wooO-PUNSYSlSIW{0wR$KO5!hbJ#QuOQSEOP#b-bGEGAc zFQ+8(E8!aX)hJG{VKXu;POqm>o!+1<9E6GdbH9m||6#X&6Z@wdHWR{D@VCi4^BruJ zN{XG}?>Vu%8*2M~)CB(^%GD3CX&RPxKTe^x`)|rL4LN+8lHi|(Yw*vbIDLUl$*?$m zl|ps;nzCyU_A_|-P3&7YEG2}k;Qt}>%a5ICi0J)72Z*<4u{Rt zu(Ufu3box4Df2YsFmg(Qj}orIM~&h%8a5@v;xtAI)oD!1#zEN6;N>^5vE8tm5VnH< zg3L3=#g?h0*a<$q6Fa=2wkJeQ@QI>aofw;^VQF{L6l%MZQRZpLVTzOlpE6v7PZh;! zYHUu1#cA3Us?&6oy@N2pf9^N2<{kdu-^6C{@2w|dE(627L^n`v1fH+g*Vjn z?5K%7N0h5`V)HaC&CZ=dZFU~YJPkR_my+1?himKwqBt#x4a=}NEu2DiT7ZT9!dCD>WS+SMwo4_&PVl9i*x?Pey$ouCFB|3Ra@bf6OS>ziP}^ORGFC$l zE2kv*D&ZP@)hJG@VPi5ZPHUu4oz|r69)v^wyt}`N{nEd;q7b%X|BB2L*TMFwq}YkQ zo)b&Fp{9S0n%Emexw;`XPQ%jd#wpZhH=&Hvki%vviM@Ha#@-@|)0Wtb42#p&DO9Iz zD4PdiV*lK4VhcCFiEZbf+D!;siFY9LupO~=Dk-)SFWuY8iS^x3zq_C&@vc#>?uL!g zu=LqWq4c?AZx6~C4LR(UlEi)Cn)o+Soc6}%V_2N_O`$jq>iw3oau6o*&;2Hr#C0xh z{U)}*do~opR{R6WJozAOsY;5S_=h;L#~W(?Fx139Jj&H0uz4DmrjJUYHhnZ@o`xKb zO-cOY!ZrT!QJhY|24q;APD-JE;3rd755mO%x!=V8zxea^PIbkSLfA_FJ2DSF9owps zVkh~TPAv3>`aT;q$$uZ^>N(gf4NJf0rcjfgNA1}=A9A=LCCM)g*W`bU;&c%$R)hDL^S}F@=PbXF zT;`57g|HR<3NlZ<65FeiVk`QRy{nzr<_$G|Eo!3wIm*@Ru$dZ`hObYdHhcqRriL7D zN=fva!!`OXQJikY=4Du%Zcm{)-9gzu2ort!p+w)Ux9g6Z?Yz@&y}!D!fA22X%KbMo zkGvaOtde3U_q|Rm^M<;<4>h^(k8}owOQAabi?V(YCU@raA^lIi zS!?})8}<~!R`8F=Jo960vr3Ac;Ga0L$s20>Gt>nCJj&HCu<06>cE3uYeoS9erfbOI z+mrXAIZiGevQl8Jn76ahf%S>NFc=_aIF0A-~oCyDRd4Z}mCcv8E8VqR&O< zsdHnyRZ{FkpVx`)-caN7qbB+SQLZkCP1vwByl@J&;YBDDHsr8aN}?|wuF(fYaasbK zn_+QUDuwE_G-U@N9QxaBo;mAVeOVXmDuk`v%aeKJ3fOj)6g#nR)b%Q;$-Qcn ztE*uHHZ0w)kwWcuP0D}`Is7suxz`TY+`o$Av<@~n!{W4F3e{TN@rupx)-Qj&Z7 zaLv6#6sH}r@fjAUol>YyJ5#n1!lD0+H|y1Q@9pY_wS}-1e0MU>>|xtgQtSlZ(}~^Q zP}_Z|3I3ZXSNFySZCKjfH--8!{gyIlLk|0=B=`a08vMX0P6uIwGb~Prq)?p>rEDXF z0|`F3ceoqY7Q$BWBgs7TC~U_{ik;xcII-OuYWq0U1V28?)f2E08;ymG ziS6D{+ZUoH_#dNOy$GAIVQKf06l%MFqDug=_S`MRB?t8=PTrx;KUD^moceLYU}7p4I1V-s<}m9hYQ!>!$)x%0h^m)aT+Ow>NGNCF(DjC@WH)N-LSzBwt|mN=9yz)yH--{1Ru+Z z72i}X)|ii-sZ61x${=v z!Uc;9VJr7mWFEOSwrwTFPVQ};*!K-}y*+Aj?-1qcj@X0^OSe0vP`llkGGRjwyQU=f zZsD4H_b5(1Y;K0dY0ng@(_WOVgmCCTq2cRbSfl;m=giYA6w0lSjwcSH06E@^=~&8c zLO77%gL}ujVTmDZ1wWC@Gf%>Ht)$oqeu@+OzM-~HLrw7CMY(!9Hetik?wKjncF&?r z*pS2TQxg1~a1H*4C{E{Mb2BVX=ciDeE}(2EgaZlQ*ZZRzRvE%p@QcYj^Ac?1N{XG} zmpZZU8*2M<)C9jG%GE2e2^*GnuTG)1dktm6h8+H!lHk{cYw*8Bak?IxnqhIeF@@@M z6J)c9SfiT<}JSMSCq zY*-q;H-*yhpx)mp6E@^CF_X(?2OI3Sqx<=dJ#>3sxDzR_=GnJn}tk`$~$P-0wTF^Bd~= zL)7H{D9Y83u?ZWNZa+z(cKa!1!iF3^Pf6}C!Zr7oQJlWQ=4M!&zDc1veM{L<2#5YN z-ux$Icklhj4a*E+EBFs&p7|rTeI>V$=kmB+AuEu?ZWNb|+7vwmSu7!iF5CN=fjk!!`IcQJkj5=4M!&rca?d z%|O{w2nQ0puQ!t$))~T9@L9+_b5?BsN{XG}vpcc#8)|z_)C8X^%GJ5C2^*Gn=S`uu zJ0E4jh8z}1N$>^3HTXhNoEFBWW>}mSO`$q1Mp;$}6MV>T_5c1?Cja+VALNd`hOiZV zNit7e3fsVvVki1CPAvY08ea}I(U*^Mbp>p~hNa<^QYZ}%>a9$fupx(4Qxbi(aE-ot z6sI+?!5J2(wNj`~zoaZGgo!@nxB3FjTm4rqSZD}axz{E0$n~%lEGc$!|JsQ^O+#I8 zh??9RMY*~$Hetik?WQTz4{9^Ygbg`tk&@h7hHLJvqBw1h4bHGQZJR=M+K#fO5cWHF z-s(HJV4WdsP4K@&xq3Y|VZ+kyjVaW2Z=y`tki#u034Uw12EQ$e)9u*Q42#p9 zDO9JwQhWC9f(bt4xB7qoi&g)7tN+a%s|{f*`aNWxdM~ztCB;tk`<&PU4mJJ&YN9_F zkEBo<9@Kl3GGRjwkEbO16X6>D$tX@wVS_U)PS2!Jot~xaErf|a7wk8Lt=un?dE`sj8kQ70xnFVOPt#D>uc0RQKcZZH9hGsVO>Id~t%7hI$ zyq%KV?}TgaccVDHhYilKIK7`jb^3s^wh;C^ci!qBxnR8^Y~}tpnMZzt?P5u>llwC# zwtz!je}S6ZUq-q56*ghR((N}X)KBSK%7hI${3j*3zYo{kKSXi*5u2M~aT;c_w$reb zt%Y#tKjY1R+Gh9O@NQUd2wTBNB=gLXu#GG!c7l)M#1?Rdw!0W*!iF3Mr6l+g;Tn9&C{9aZQ!^}1%cM}9mZj`1gb6<6xB7qo>lgoft1s`4 z6^F1DeMK@)T?yOCl42+NDo(5dhZZ zh8zw|N$!KfHTS_$oDRX}W>}mKOQAX)PFYg7;Mwzf7hbvMN{K{|*epM8wtFgHm7N=`ds7`;TEH8uu3EtQHiyO8a!dCDb$UO5# zY%NQQo!~b+u@M|<`&QHhzb(qu+p!57mUizU2M4Zy`+ZA-~oC`%T*az11Id$BIMPivBQ}r#^!1Wl6CU{V^vtgF}r!ftu(~ zM!EVFHetik@G~ith6nYYrA*k6!}BSL{zAA$e=&;FOW5EHi_=3qce@*6*-(WjgQtagZ z&WXL?P}kq1Cif3fuKtKk*sydv%;ZhCgL=bKCTz%I_>|-xAzX8h7{zHMY;K0dX_OS| z=RGQAlOY`X&v^5dyWM-EyJ6)aYy}^a%rnQrHnXJI2|kV!d%>Z$$3;!>@uFNEADggY zX?MaD>c=z@Wx|FWCP_)~Ny9bxWKo;B>3RoG;UaV2wTCY zBlFDZvHdJ5c7o67#Aa})?U_*%e3mFzXT>ILSlXREh1%{MlnEPhm@6g0=MLB4^F(o) z7n_@5ahgAc>a+l5lOY^P@V?$cZdiE;TfrA0^UOuDEiEZ_f-mmGW^kzOB~TN5$tYKs z!X|83+Fd4v+U~NH2^(@)J|)3d2-n~%MsZpRo0?&9S|x?*v?^tjAx!WgzttD{zyB)4 z|K94WyJP7gY(-y_%v0CG_OztfiN3ZI+rgp6*FjD6b)#He51X)IY53PEl!gcOHlR${ zki$kPiN0~TM&Bfg)27(q42#p|DO9H|DBBESq7V74zDV;{-^vAB4`D0!He?>TEw-j5 z#ZK<+o%qu<)b)<2$vrsA)t#^j8$YJ-C6U)J&t`9;@?t`OTJp`MuVd?g; z6zZpRIAy|y9F9y$?xVsr_t8pzBZq8rv8!dCE; z$vpEEY)?yyo#3ZAu^Sv}`*hRL2eqI!( z^Rc-Z7N-kSs7`;REH#7!2|l=Yu^YA@!dCDIzu>+;C;Q@+_3i$ zwu0Y5=9zb5n_5!r1i#CP&EQbmccUiwJyEXSi%r2b<7Lzv(*5Ah3k^K`#l`lL%XAHr7lr^!6_8Ej`uikih-NWPdTr)t9gl80LK0KZLE||046u_pxm)DRzQ?=)@nVp|(FpP4It5x%vqT}AB4LN+7lHgy3Yw)k5IDLbS&agOrmqK;=4`rPp?0@jQ*ne=t-b2_5{u7yJ4l_lb zIV|i3AI^z&;ZWNnpeFc;QLc`J4cV}?J4y<*-BBq+HsmmRN`j9OuEEEQ;xraEI>X{L zP72lO7nG%jFv0U3@}K{DsZYH5e(89wSbqpx$tNK5&+)FhuL%GG(XF&mbC=TD*by8va(h8z}3N%Dom zHTfb@oEF7~XIPvTPoX*uqO3TC14`c4ThbNF5Me9%(qtaG47Rf+#ZL0&oY)u+^}PaW zlCK!$>Ppz04NJePq)_`^l`>~T4y&gm`5NJxe9b6MYhkl9EKX~uP@R57*>MPyd?@es z?pI9Lb;&+N*vh^>naBPbTi%jlC;Ns@tPO`c-xxL7H;Hm}Q*6+NrQ^+0C>;;#Z9y5d zA&0F}l6~uN&Av?(r){zM85XDQQ>acmP}Us6WFPVu$>(W($8@k8_94Pn@SVv#a~Eud zONyP~yE*YEYN+iVYJ%?(Nk{08*bDK-u{;98Ln7~2wTa| zBJY(W*h+p4nTKABEpbV)ll(d-c85cKUyqvPH$=I5 zBQ|Wq((lbF)P8TF4BL>yZ7E59d$=aQBZ||V*a!`a(_JZ4r@v8_9l`-6@9W*;inWNa zmHh8y9(o_P$0fy1@&}yQ9S-&V5NeV?9OddG*t88xzmKI*`+b};Z9@)ErX=}O;hOyE zC{E8{12im7&!teEo~JB5gh@V>cYF7XrZ2i=K_YBrf0@i<*#F>pw~y$C&4{oSd}K1u90l9y zl42+LXilsUhuR(kHNnS>a&;_h+J>dwaZ;$ke?ghHA&2o&5`6q{4L(5>rwOqU8WyLC zQ>adpP<9@|e*B!bziT>~E4CxTR`MyxJakHIolA5&qE}lZ|cMxUTh8&hmN%Ez_HTlv}oR+~x zXjq(OzTik;-EIk8b3>U#~;BwsVi)wQr` z8dw15&6T)q#|08*(@}CBY8~*WianaXJi}pr?asU8WyK>Qm9UUpsYcJ{rEX=f8+E#SN89n4_nDEAoI`*u^lfd zc9LJ@#9ncz?@LgV{7+G?UW!fIu=IO*3bo%WDAP9Na8*i@UmdQ=uZiMxEjB{K;&fdK z)#)#kJ&15X$p`mtaK*+%*h+pAnTOttEqO_?ll)dER*OS@-;SE(cSN~*CpK-v((hd< z)PDa)nYJN^ds348-f&I+_b5*HVIwpwP7kC|ogSpDLWBcK-q(BB6-yIgEBT{j9{L!z zV@+X{FF%I?p6l#(`9p&mX*t88xzt5#m`+c4=Z9@((rX=}G;hOyAC{C|n12im7 zucc6({z2{8dmSeEP~Pp`FP*;OlD&zrmHnS&9{U!y=q1HY_II4vFAjD79%{1xE6UaP zv1uEYjz3JHbUdi{5oOwj9R8h>?4N{d_D`cYeTL1@usD5@LUsC*vJep_`%vEPt#6%v z?S{>Xuoe7UGSB=DTlJD+C;0bH{KXn-`$yCS|0&AVVf+W~P1~@vJDmUXf9)>T8=f+4 zLk=URB=|_-8hqp^PNQHmG%QY|rBFZg(J6ZoVgG~Y-9Dxp7AV42@Uh7}a~y2dONyP~ z<2tcn9BO-f)C8X(%GC+6X&aVyCr+WZI|*glh8!kKN$|uE|%9;5&q zZkR&tcO%NQ4LNL*lH{9)Yx2#aIBkv%(6BgdnL>5iin0+ACizg_?cJ}QZsU^eiLjM@ zJ2H>m9$Wd6Vki5KPAnaVI^PL3*>{d|br)>fhNa`(QYak{>g`UMwjqZ-Qj&eoaLv9~ z6sJCHhK9vy?-Z)jK9tpnFxiLlZf||}^tW!S_b2nr1F)?xDRzP%>}3DgiS^@9=bxY^`=?Q^euhokuyp)I z3Z>&gy)P-#HstViO0s_wuGznh;`ALhL&M_qeG1j-2g>Do))A zJ}jAM4u>s*NwE`r1SkGt4YfTIYJ!g(pw@=`Py^65)iBCl4nG<7+U{dS^pVW!{<51g^qbB$i zQLavjP1~@vJ9P@R-DxP(Hsml}N`g-xuEA%B;xr>RLc`)Ta|+dI7Rtgz*pHv{_IFTc zbH!>!*h)SJnTO7a?Se_MlYDL`_K!n-&x@Mm^F_HjKQ?W{((i&P)P5JDOxuvdA}L9} zXt*X{EQ-_O*a!`a(-J9ErzI(i6XAf85AH4PiWQ5nm3&z;4_yvh29shZ`3g?#Acy*1 z2{p-Aj&gMsY}$sU-_=s6{jN@#wjqZ#Q<8kGa83TpC{Al*BQz{d>!eVf)}`!Bgab<6 z*IVBe+ZACe`37Vjx*@g?CdE$jjh)y(4)wh$YLagjA@CntZz`PTOMxG%QX#rcj**Q+6i8B%gVhA^yJ#>wXP&XP4|(gstqml6mZI*k+g% zJK1|qEFy3HuHO2>nG`%tEB$lX5T-G(*f8F4U5x3 zDO9I}DVq~vvd=KgP_ozmH;{+AVZ|bB1wWk3GmpS_!lc*ZZ^ou=So*y+h1&0JlxZ7sxFaRW?+n-Ee~sdF7dAq};&gWk)#)C}21Pia* zOt>b0Hj2}8*a!`a(+eq7rxz)U6ybo9_w`wX#aQhH*kYIzJK4W-VmUd~`8TM^{%w@2-(k}>EFFKJLg{!= z?+41X4LSUjlI+7wgKPF-qc{zR&CswojgUfh8j-S05hnW#!we;ReYcP7hOLXR6?{}O z&m0X~43lCf_!v(7#TsgREYt)aJId8@uxT5XcE?SjepKU8rftY!f|LZGFkFLA6vb&` zY=(x#Y0?y`(`1xoim?B|^KPHQ4OJ_oAptzF3s2i(}I^Ed4H#LhW}+%CrqRES-|%%Y%`ho;lCR;!hH|LywNR7% zmr<^+jZNFI^t(<9wcm9q(>CO=eoB)6I$V=)5XEUjY=nlzY2y^C(%;cgGZJzk?~$Hsr8#N|Nsq zuE}?e;0w_j1V^M%c>!8#0gG8(R;PVki5)PV6a% zI^Pd9+4qlf^#E+zhNa_!QYak{>K#m(wjqZ@QI}-X4LO{ZlHg~DYw+JkaXJT^pCOAO-h1a8?M3s9L4E6Y=nlz z>G~9^(+!lRim)F)=k0H$-sFn)i?Ega7BUaL726JzVki0SPV6g(`o0r2$^RPV>Rs5h z4NJdwr%*qrdnnU30Qliwf3=>cqnhQ;Zj6spt1l(mX*K*IBJ zYuE@4i__~VRHrv6ixuI3lK1ug>53JMu$BC6G7o(R+YpmtC;59$>@A1-ejhc-KZtVm zLu}fHrQeTJsQvz%GHpW+pQa@FXW^Rs^C(VVU;{KPPG6-^oxY}QR)k4Dyj;uu$BEkWFGrHwj?ISPWB(2SX~ZvKFqZKakIKS%GKepX&aV~M@XS`Jg7G!W!i=u zMovlgQNlI*s8O6o!)9n$oW@9@I*mzLt_YKT$nSR9>)bxJ8@4dQR`6etdFHs-l9&`b z!N+&vFV;}o6QU;gL{Y9zj7{6Hv^!}E^`n}MGHpW+Q=}yLl;IkDswhrVV>2`?PSd7P zou;EKSA_i!o_G5UZrH*ITft`{^URsCB{3;>g3s#2@^Yx{*-;aGjwn~>#HMXn+MPRv z+U`7*X&Z8wFD1d}57*!eL~&XW8=+xwS~!L3v5&qu8>0QcSXvy4LPiwlH{v|Yw}g2IIV__ z(6BhIkwSG^ld@b94k-EH-Y;FTh7q=s|BB2**TJ^Lq}WNmo)Zhqp}v2On&cZqxw;`X zZNt*<#wpZ(H=#`1ki%vvNxpfwCf_28)0Wr>4U5y(DO9IzDC-sBfRgw1wsXZIM%YTe z1DS{Ji0z3x*Ila!_sdrh1%~PlxZ7s*efN;`@%K(Z=yKu zjSbMSIPIH4b^0x3zamWXp}gC>-%H)!CA%15EBk?D9(xeBDkjBF_CuW5Vh(kF7;3T~ z9_8v0*t88x$48}5Iv&(Jnlf!e4#%b>`*Go#{rD(OCtx!)EKVn-P@PVutXPD}K9qNR z>wBrEx?vY1Yz6-vnP;Akt%^yp6Z}jk{$dTaeKu->|31pqbFgU}mUhogp?*~7QKoIk z;ewO|zc5^b|1pZwMc51ti_;}3RHr{tRxHB)2hY3xGB@mEgstFLka^~n*s7QmJHfAZ zVud->_O+-9{^uxHufwKoSlYclh59+&K$*57hnrFo{N`{CeoGXmTd@%u7N^@&s7`lK z7A(Sk{G7MHm-<&%tYU<%Lk|B;N%FVCHTm07oZi7kXjq)y zOQAabi?U=94k&qF?*mt?V}z~bACYTyA@Af&| zu#pkAg3m?fnR835A3YQJkzrftaKmnliUcDN@0RTQUnun`&-r}a{(PU}+^Ey4jM zAKcr(6)PEGEBQub9=b8MF($=M@=cvsW)AheIckz`5#{QZ*t88xzgwqJ``v~zZ9@** zr6l?G;hKDhC{8QcO!lG7?XB;n{@x9H8DT57`yV{-_CLE}FC%OP{|lLCUXQJfNwE|BMkm&pLv7!Tn&7uYxq2%$ zZNt*;?J3lD@1RWEki%b768x@k4gR+%PIqG?G%QZ{rcj;!PFc1H`|)$${$A?+u2{ELQV3w zqg;Ilo3>%;_q`NqzyG34+mOQtDM|iexF-K7iqpr~2n~zVCn;2?Pbmu*;ee9&^*(pS zYDU;f{w0}*euZt0NwJgs8z**}Lw$dTn&kh9a`k&`+J>dyA5*CP{zRF!r~f~PVNsF~ z7p}>NkK!}}HbBGTG*SxHX=KXAMVRD6dAE1JmpZCTwll(3_R+~ab_{HJOp2ZCV>z+b z9LhcpYO?<#%GGhPX&aV~$4{YjJg7GTW!i=uCQ3>6iNiJfBvG6u#b#(&oF-49I!!@Y zx(JhfDDU>x_fn^F!*)j43O)^)XHJVPk4do;e0nGTVhy!DBWi-r6y@s7*t88xyR)WH zKdRX%(>CNVM@oXv8Lq+SisCdkHbcYWG;a#kX+Fx*McDt~dABd%hV6{76?`Ew&s-Q= z9+P4x_@YiMHHX?>95ulQMY*~JHf_Vw?ouh#c9*71+mOSuDG9z@xCUQ7iqi_%2n~zV zN-0#Ql_@J1VLyJ(+uuuF)fLMbVJrFSWFEQ(wmT-pPV%*!SZfaTy*6r+|0>GWb+Bn0 zmVVbuq4v8zW!i=uHb_bG4Z}70Mp2wL#ztsZoHk9NI&DT-x(El9d~k0ISFC4*t>jyg zdFa;I_Lvkq$+vZ4u{qTD_NYm|LzJsKV$(J({qB@P?RRI&v<*4znv&$Zg=_NNqd4`j z5gHb!JyWPodr{Ud!T}}k>;1+R3mRc7`95SGx-YgrCdE$j{hZir4)uKiYLXuq55Hn*7KpPDfz_G%QZXq)?rXrR-gVNj{W!d-r>($Gc=l zBWz_qk<4RH!dA$n*vWp16U)t^&QC*4_TNRhdO9|3!_x7YDb$Y7qDwBqxbi%ukoU!pi&kIm4qINg{+b-IbNdJ*lYz4oK%rkGtR>-8-34W&&tIeUd??O%RzeTxvH#Tj<((b(}lydyPg1D; zeoC3PA&1XXlKhKsP5xyRr?0RP8WyK-Qm9VfQkE~m0VVJ2{l^t+8euE>4`d$tBeq2* z#ZK~JyhiLdhx#54HOYsMa&-i3+J>dyky5Drj!c=hA%{^@l6(?*vUSj6D!W4&L>7q_DP~#ofMn4 zVd;4C6iUZ~dQ(uQZOCD&lw_YeT(eIT#c5h>hK9vy`V^|u43q_oFxk-%{%hXzw!W7- zlYeScBWwkqh0HT&#TLn=*a<$n6MwOW+MW|N!RLx{b#83hhNazkQ>Y)+e3WS$a#$cG z!50kI;0r}@S{R$5VR2eCh3d2zWdS4XfAGB92f1NWBWwj+c6VQF`z6zb=+GG*F^99B(9@YTXK`07!d*1$$+Se({Mp*sDNvVIZv zCO=MM{!y8Lr8})$W!i=u{+N>N7lmu~i=#MQg3Zvd zI9-}Tb-IkQh7l$^8q&MHwIB5g|J1%l*b06XnP*;&t&>Ty6Z~2y{$dTaeI06o|0T-R z>#=DYmUeGUp|*PyW!i=uZb?b-Tf;T@ZBd+V$7X0)obF7aI{lThh7tBZc;4-QbHlzy z*b06RnP=XMt&>Ty6Z}3W)|^9aKY*Ix4@SBA5H@YY((WTE)OH`GOxuvd<0%RLM7Rcj zGK$kv*a!`a(=#blr)Mck7-2tt&fDKhecl!88euE>i)0@961Gex#ZK~9oLF=Y_5B)Z zlK&&h)z`6U8?HrpiB;!N-(R36`Ik|yeuYiju=M*)3bo&FDbqIO@Sl_< z|2|xk{}9FLM{I^g_~ z9t}0gM~`xK3~btlrQfkqsL98sOxuvdFH(|x+;B}kUKFSCu>l$urwLQ2P7_f!F~TGt z%DcV$z0^ruvb7PmvQI|lv6Ev9Wm4>9pVEnS=TPTUqbB<_QLavlP1~?^Jbena;~6N^ zHsml?GgZiFM~t-&>+4`BqV`ZjDXbu=Kla3N`t5lxZ7s*dZm!cMR9$gQGa@ zgpJU!IPH=`b=sA(j1dkf`QYB}u2|a$Tgmqz^Uyu9Z89l#lJ_~W@Eq!UZ`35;C(6}* zv1uEYe)mhE^gF1xKV{m691cuL@`J)P`N2_~4#7rfSey<^p*kH-S;q(ml)SHZq$?IT z!dCL5$vpHJY@JMso#e+kvGE-0`vlY^KQYSHldx$UmVQr3p?*@QQl@Rl;dd!XetNhj zKO>6Mnb-ggi__UDRHxrl_A$aFAIiJE`@PgZxMX)DY-K->%wx~T7Rsd9$$p^|OV6Rs zFG5ZBi=$k<1e>;D>G;wVYR8vRrftaKij-u(GF-D?6~*alY=(x#>Dm;k)1N6T8DX*y z<=x);Ug}@mu)7hqg5N;qnKxqlWK!$|zuAeuSVL{!ikjfJMY(!AHf_Vw?wu*rcK=G5 zwjqbVr6l;>;Trs&C{FicGc+tt_oYys?x(C|g#8bmcl(2G*xd+Q!5=2`%tx?wGAVX~ zKjy^BbExenP!s&gC|94trfpc-eI|w4?z5C>8*+F)CBa_^*WfQkae4_GpBenTNiKEt5&Hll(0wmYzd>zk{0O??$=$9yV>m((n5z z)P6spOxuvdM=44EakwV`cNC{jun`&-r_WNTPM=d2GQt5RAKd%W6{{OzEBV)C9{LTo zO(w-o^6#8jdk*#eJ!+Ew5asHR*t88xzr)Pb?sr(qv<*27pOWMwglqBgsL4J{l&iC1(>5#}&z?f*cu;Q+%CrqR%$1VtbBAm8 zd7?PYi_OrmIL)6zby|S3lMyESkl*dH*YBk+OKZ$svx+hWUP zQtTw(-ig)cP~SVECi&nfS9ijuZCLullDrqL$@hrjv?n$~!{XGJ zLUsBLWg#ORQ1ZdOeO$4+5w?>5mdr!L&eqI!(^RWRM z7N-kSs7`;RY-EHPDYsQLwUEizL$EN z8Jo8R$pG=CK;CDIk7i*~PyHOMTo+wxE#ingo+PyD@`cd6anYJN^2U8OK zp>PfUa1^IUuo)T_r^iyLPLESoGQ$1`&%6CeH|%bNt>90SdFC_NI++wZ!Jl(t`#IG1 z3#bYHVw9^dVbeA&?Y@#iZTD5mv<*4@BPGFK57*#tL~(i(8=+xwdMkzM^fqN5Bkae| zdHZ{*@48}hBWxxA7nz5?k1dl)v6K8mC)S@seSeIa7N_r0s80W(EM$ZONDis7}A2 zY-EH3O5WES&lS5HVJrCrWF9&pwoWF+&SyTc6Z_Ahz9&UZ^2wrHogACCVd;0u6l%Xy zQKoIkVVaaApEg{RPZz~$dTfA(#c9SAs?$uAjf^nK(GdRTJYV;Fsk8Xkwl~67_Swih zc6Mx`Op2ZCb2_mB9qN2;)MTG0%GG(XX&aV~=TD(@JgBz-W!i=u7D`F>g~K)bB2kaLtdmKx6MRD_wx2_7Z;YDYn?$*~DK>4x((dLd)ONR^OxuvdRw)U- zb+`uKCW_Ox*a!`a)AlJ;ryVH!7-2tt&fDKh9qfwDjj)w`XEG1n1zRSQVkh}-POLwN z`tG47`5sZO?ukv?u=LxPLhbiAlxZ7s*e4~)_YK$Nzm4LwA2ve6;&ea?)#*UWLPj{C zG!x4O2318$5Wt5!fRgw1&Tz%b(LZ-lMv*N}Pawb(+L6g%0kb7BKJ)cN(O$$mqWt2biPHY^?A zoI?GqZlO%uki%^$$$opdX1^nf)1BB14U5xVDO9JwQFb!IWFN}Az4g7+d)%ddLJ91VR8B}h3fPXWgjE#$Ip5Dd#V3+ z#pXuXO8zOChkk}FlS#3Y{0k@6pF@3rg_`7FN4fe9Hf_Vw?{_KG59&XZX&Z9*AtlLw z4Aua&&FwZ%0fmspyY#lBf4UBBWxufnao2+!M4ey*hxN`6Z_Ah zzQ;gK@-d@a9SfVbVd;0A6zV7S3(B+&IgFQ*d$#Z#!C)ga2W4LK~ClI%-`Yxbq1I4y(C(6Bfy zmqK+~p0bk>Ci_s{?XB;nuIPs4jj$DbWiroP1=}Z+Vkh`&PW;6hYI_aT1Ya}C)wQr` z8acGQC2d-{s+&yeG@nAZiKDi zn~{0u=GZ!!6g$DUbYlBC)b`e>3BFB~tJ`AJHZ1LKpF;hdcA!k#ki+1V1m7uKgYO*0 zX%}pShQ(>O6sptilzoh_A3x{q@1^eHip`C%m3%KU5ADO2$)wmxzPA(W&!N8eMNRVG zM!C8lHf_Vw?*S>)59&b5v<*2NoRZ{+glqCcqc|OgjnJ?-9g#wHI+C)G5e_K%;NH=$ zSltL)$&V%T(BrUeGAVYFpWwv)bExl=P?P-RC|6IxrfpdIJuQX$N&Sv8Z9@)cq$K&7 z;hOxcC{AZ%BQz{d=cG`b{y^Et2nUqBuXmm+b~nOS@(aj3^g?W%Op2Z47df&29P0ZL z)Fl5?l&hCw(>5&qUYoX5 zdFXf8GMN-R$-j4E{W;Y4kEluhQ{?GsQYyCN8+J+oPOiA*Q!ZrEG zQJhA>Mrc@^MoXc7};hKDkC{9yiBQz{dQ>Rd!rlD+Pgab<6*PG52 zyBlFE`3z(pIwQ7DCdE$jnVr~w4)r}NYLd?uBAI zntZ+}PV-{}G%QXFrcj+0qHJV@Nj{W!d-r>(i@0QaBWz_~jLc&f#}>+@*vY z&X+<>_NAj-T?U)BVd;3e6iUZ~ddpL$ZOCE8lw@BiT(hqn#c366hK9vywG^t;>Xe;~ zFxiLlZf|`rbxk)cZ-lMjza;a_wXuCNDRzRdUjRZNt*;hAGsK zY9q?D4LNL*lHi+$Yw*pYIBky2(6BgdnL>5iin5Xs_CI*u?c2CvcOz^C-;T^Px5w7W zq}U0*qZ8ZDp|*EIP4JzgT-^nmwqa>^w-jo-yHloZ$YGC^1m81UgYOl^sSg{WVR70! zh3d2qWgjE#$Ip5Dd#S&5#pXuXO1?jthaP|}lS#3Y{2(XRpF@2gf|}%qM!9+zHf_Vw z?-41~evhO~+mOT2DM@}zxF$a~iqmn}2n~zV2`N;k6DbQB;ee74?w#z4)s3)~{8Taz zJq_C?lVT_N=}zoFhx$GfHObG4a`kL%+J>dyb5f}N{(&-WLk{PqB>DN_n*4$&P8VV$ zG%QXRrBIzNrfg({14`c4`;#knH^Nr(%g8+Ra%`PUik;+FIicTcB)=xg)oZb7 z80vXK!c`B2{N-S4H|;gao* zu$BF<&nmX04tp?3TrW!i=u9!^R2N5VDx zqfwk5!)9n$oSsObIz36*$q18uDDU>x_fnsB!}3Ph3jQpaXFiARlS#1?`~@feVhy$Z z5^93K9OddO*t88xyRW5C+x-V++J+q7NJ;QF!!`Inqd2{V&Cswoy^}(9dY7`25%xcL z-tGT#!|q1d3jP6^XMTvSlS#1?{9`A!pF?ebf|}r;M!EVKHf_Vw?iVT4cE6-d+mOT8 zDGB~fxCZ|=iqm)42n~zV_bF7TA1M16VLyJ(+uuw5$rYO$XX_*%mdrzk!JamVa${yA1hpwj~&Hn9BhP!#cA9Ws?&Is zg^X}O$p`l)aK-9I*!s*TBJNE>wBO@G8^1j||uGrlOTgm4j^Uyi5buuY- zlF#kL{&T4Bc~O&mz9?7c$EIyq`du)E+V4V?X&Z7_Bqhlg4cFw0MR8gj8=zrvS|Ww& zv?OICBTVwK{k?scU3)uk<|ld1op#t|n=N|1rPVBHgsq6nl5t-S+Z&T&C*lfDEINl8 zTnRN1SB`RZ6>O-6rLomgsEw^o8LACmR{69Qs*w7Ze-e$hLu-iUstvcdJIT+UecOm?Se?H7G F{||AAV_yIO diff --git a/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_mod_list.json b/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_mod_list.json deleted file mode 100644 index 4cd94ad59e..0000000000 --- a/examples/stable-diffusion/quantize/measure_all_500/fp8_hooks_maxabs_mod_list.json +++ /dev/null @@ -1,506 +0,0 @@ -[ - "time_text_embed.timestep_embedder.linear_1", - "time_text_embed.timestep_embedder.linear_2", - "time_text_embed.guidance_embedder.linear_1", - "time_text_embed.guidance_embedder.linear_2", - "time_text_embed.text_embedder.linear_1", - "time_text_embed.text_embedder.linear_2", - "context_embedder", - "x_embedder", - "transformer_blocks.0.norm1.linear", - "transformer_blocks.0.norm1_context.linear", - "transformer_blocks.0.attn.to_q", - "transformer_blocks.0.attn.to_k", - "transformer_blocks.0.attn.to_v", - "transformer_blocks.0.attn.add_k_proj", - "transformer_blocks.0.attn.add_v_proj", - "transformer_blocks.0.attn.add_q_proj", - "transformer_blocks.0.attn.to_out.0", - "transformer_blocks.0.attn.to_add_out", - "transformer_blocks.0.ff.net.0.proj", - "transformer_blocks.0.ff.net.2", - "transformer_blocks.0.ff_context.net.0.proj", - "transformer_blocks.0.ff_context.net.2", - "transformer_blocks.1.norm1.linear", - "transformer_blocks.1.norm1_context.linear", - "transformer_blocks.1.attn.to_q", - "transformer_blocks.1.attn.to_k", - "transformer_blocks.1.attn.to_v", - "transformer_blocks.1.attn.add_k_proj", - "transformer_blocks.1.attn.add_v_proj", - "transformer_blocks.1.attn.add_q_proj", - "transformer_blocks.1.attn.to_out.0", - "transformer_blocks.1.attn.to_add_out", - "transformer_blocks.1.ff.net.0.proj", - "transformer_blocks.1.ff.net.2", - "transformer_blocks.1.ff_context.net.0.proj", - "transformer_blocks.1.ff_context.net.2", - "transformer_blocks.2.norm1.linear", - "transformer_blocks.2.norm1_context.linear", - "transformer_blocks.2.attn.to_q", - "transformer_blocks.2.attn.to_k", - "transformer_blocks.2.attn.to_v", - "transformer_blocks.2.attn.add_k_proj", - "transformer_blocks.2.attn.add_v_proj", - "transformer_blocks.2.attn.add_q_proj", - "transformer_blocks.2.attn.to_out.0", - "transformer_blocks.2.attn.to_add_out", - "transformer_blocks.2.ff.net.0.proj", - "transformer_blocks.2.ff.net.2", - "transformer_blocks.2.ff_context.net.0.proj", - "transformer_blocks.2.ff_context.net.2", - "transformer_blocks.3.norm1.linear", - "transformer_blocks.3.norm1_context.linear", - "transformer_blocks.3.attn.to_q", - "transformer_blocks.3.attn.to_k", - "transformer_blocks.3.attn.to_v", - "transformer_blocks.3.attn.add_k_proj", - "transformer_blocks.3.attn.add_v_proj", - "transformer_blocks.3.attn.add_q_proj", - "transformer_blocks.3.attn.to_out.0", - "transformer_blocks.3.attn.to_add_out", - "transformer_blocks.3.ff.net.0.proj", - "transformer_blocks.3.ff.net.2", - "transformer_blocks.3.ff_context.net.0.proj", - "transformer_blocks.3.ff_context.net.2", - "transformer_blocks.4.norm1.linear", - "transformer_blocks.4.norm1_context.linear", - "transformer_blocks.4.attn.to_q", - "transformer_blocks.4.attn.to_k", - "transformer_blocks.4.attn.to_v", - "transformer_blocks.4.attn.add_k_proj", - "transformer_blocks.4.attn.add_v_proj", - "transformer_blocks.4.attn.add_q_proj", - "transformer_blocks.4.attn.to_out.0", - "transformer_blocks.4.attn.to_add_out", - "transformer_blocks.4.ff.net.0.proj", - "transformer_blocks.4.ff.net.2", - "transformer_blocks.4.ff_context.net.0.proj", - "transformer_blocks.4.ff_context.net.2", - "transformer_blocks.5.norm1.linear", - "transformer_blocks.5.norm1_context.linear", - "transformer_blocks.5.attn.to_q", - "transformer_blocks.5.attn.to_k", - "transformer_blocks.5.attn.to_v", - "transformer_blocks.5.attn.add_k_proj", - "transformer_blocks.5.attn.add_v_proj", - "transformer_blocks.5.attn.add_q_proj", - "transformer_blocks.5.attn.to_out.0", - "transformer_blocks.5.attn.to_add_out", - "transformer_blocks.5.ff.net.0.proj", - "transformer_blocks.5.ff.net.2", - "transformer_blocks.5.ff_context.net.0.proj", - "transformer_blocks.5.ff_context.net.2", - "transformer_blocks.6.norm1.linear", - "transformer_blocks.6.norm1_context.linear", - "transformer_blocks.6.attn.to_q", - "transformer_blocks.6.attn.to_k", - "transformer_blocks.6.attn.to_v", - "transformer_blocks.6.attn.add_k_proj", - "transformer_blocks.6.attn.add_v_proj", - "transformer_blocks.6.attn.add_q_proj", - "transformer_blocks.6.attn.to_out.0", - "transformer_blocks.6.attn.to_add_out", - "transformer_blocks.6.ff.net.0.proj", - "transformer_blocks.6.ff.net.2", - "transformer_blocks.6.ff_context.net.0.proj", - "transformer_blocks.6.ff_context.net.2", - "transformer_blocks.7.norm1.linear", - "transformer_blocks.7.norm1_context.linear", - "transformer_blocks.7.attn.to_q", - "transformer_blocks.7.attn.to_k", - "transformer_blocks.7.attn.to_v", - "transformer_blocks.7.attn.add_k_proj", - "transformer_blocks.7.attn.add_v_proj", - "transformer_blocks.7.attn.add_q_proj", - "transformer_blocks.7.attn.to_out.0", - "transformer_blocks.7.attn.to_add_out", - "transformer_blocks.7.ff.net.0.proj", - "transformer_blocks.7.ff.net.2", - "transformer_blocks.7.ff_context.net.0.proj", - "transformer_blocks.7.ff_context.net.2", - "transformer_blocks.8.norm1.linear", - "transformer_blocks.8.norm1_context.linear", - "transformer_blocks.8.attn.to_q", - "transformer_blocks.8.attn.to_k", - "transformer_blocks.8.attn.to_v", - "transformer_blocks.8.attn.add_k_proj", - "transformer_blocks.8.attn.add_v_proj", - "transformer_blocks.8.attn.add_q_proj", - "transformer_blocks.8.attn.to_out.0", - "transformer_blocks.8.attn.to_add_out", - "transformer_blocks.8.ff.net.0.proj", - "transformer_blocks.8.ff.net.2", - "transformer_blocks.8.ff_context.net.0.proj", - "transformer_blocks.8.ff_context.net.2", - "transformer_blocks.9.norm1.linear", - "transformer_blocks.9.norm1_context.linear", - "transformer_blocks.9.attn.to_q", - "transformer_blocks.9.attn.to_k", - "transformer_blocks.9.attn.to_v", - "transformer_blocks.9.attn.add_k_proj", - "transformer_blocks.9.attn.add_v_proj", - "transformer_blocks.9.attn.add_q_proj", - "transformer_blocks.9.attn.to_out.0", - "transformer_blocks.9.attn.to_add_out", - "transformer_blocks.9.ff.net.0.proj", - "transformer_blocks.9.ff.net.2", - "transformer_blocks.9.ff_context.net.0.proj", - "transformer_blocks.9.ff_context.net.2", - "transformer_blocks.10.norm1.linear", - "transformer_blocks.10.norm1_context.linear", - "transformer_blocks.10.attn.to_q", - "transformer_blocks.10.attn.to_k", - "transformer_blocks.10.attn.to_v", - "transformer_blocks.10.attn.add_k_proj", - "transformer_blocks.10.attn.add_v_proj", - "transformer_blocks.10.attn.add_q_proj", - "transformer_blocks.10.attn.to_out.0", - "transformer_blocks.10.attn.to_add_out", - "transformer_blocks.10.ff.net.0.proj", - "transformer_blocks.10.ff.net.2", - "transformer_blocks.10.ff_context.net.0.proj", - "transformer_blocks.10.ff_context.net.2", - "transformer_blocks.11.norm1.linear", - "transformer_blocks.11.norm1_context.linear", - "transformer_blocks.11.attn.to_q", - "transformer_blocks.11.attn.to_k", - "transformer_blocks.11.attn.to_v", - "transformer_blocks.11.attn.add_k_proj", - "transformer_blocks.11.attn.add_v_proj", - "transformer_blocks.11.attn.add_q_proj", - "transformer_blocks.11.attn.to_out.0", - "transformer_blocks.11.attn.to_add_out", - "transformer_blocks.11.ff.net.0.proj", - "transformer_blocks.11.ff.net.2", - "transformer_blocks.11.ff_context.net.0.proj", - "transformer_blocks.11.ff_context.net.2", - "transformer_blocks.12.norm1.linear", - "transformer_blocks.12.norm1_context.linear", - "transformer_blocks.12.attn.to_q", - "transformer_blocks.12.attn.to_k", - "transformer_blocks.12.attn.to_v", - "transformer_blocks.12.attn.add_k_proj", - "transformer_blocks.12.attn.add_v_proj", - "transformer_blocks.12.attn.add_q_proj", - "transformer_blocks.12.attn.to_out.0", - "transformer_blocks.12.attn.to_add_out", - "transformer_blocks.12.ff.net.0.proj", - "transformer_blocks.12.ff.net.2", - "transformer_blocks.12.ff_context.net.0.proj", - "transformer_blocks.12.ff_context.net.2", - "transformer_blocks.13.norm1.linear", - "transformer_blocks.13.norm1_context.linear", - "transformer_blocks.13.attn.to_q", - "transformer_blocks.13.attn.to_k", - "transformer_blocks.13.attn.to_v", - "transformer_blocks.13.attn.add_k_proj", - "transformer_blocks.13.attn.add_v_proj", - "transformer_blocks.13.attn.add_q_proj", - "transformer_blocks.13.attn.to_out.0", - "transformer_blocks.13.attn.to_add_out", - "transformer_blocks.13.ff.net.0.proj", - "transformer_blocks.13.ff.net.2", - "transformer_blocks.13.ff_context.net.0.proj", - "transformer_blocks.13.ff_context.net.2", - "transformer_blocks.14.norm1.linear", - "transformer_blocks.14.norm1_context.linear", - "transformer_blocks.14.attn.to_q", - "transformer_blocks.14.attn.to_k", - "transformer_blocks.14.attn.to_v", - "transformer_blocks.14.attn.add_k_proj", - "transformer_blocks.14.attn.add_v_proj", - "transformer_blocks.14.attn.add_q_proj", - "transformer_blocks.14.attn.to_out.0", - "transformer_blocks.14.attn.to_add_out", - "transformer_blocks.14.ff.net.0.proj", - "transformer_blocks.14.ff.net.2", - "transformer_blocks.14.ff_context.net.0.proj", - "transformer_blocks.14.ff_context.net.2", - "transformer_blocks.15.norm1.linear", - "transformer_blocks.15.norm1_context.linear", - "transformer_blocks.15.attn.to_q", - "transformer_blocks.15.attn.to_k", - "transformer_blocks.15.attn.to_v", - "transformer_blocks.15.attn.add_k_proj", - "transformer_blocks.15.attn.add_v_proj", - "transformer_blocks.15.attn.add_q_proj", - "transformer_blocks.15.attn.to_out.0", - "transformer_blocks.15.attn.to_add_out", - "transformer_blocks.15.ff.net.0.proj", - "transformer_blocks.15.ff.net.2", - "transformer_blocks.15.ff_context.net.0.proj", - "transformer_blocks.15.ff_context.net.2", - "transformer_blocks.16.norm1.linear", - "transformer_blocks.16.norm1_context.linear", - "transformer_blocks.16.attn.to_q", - "transformer_blocks.16.attn.to_k", - "transformer_blocks.16.attn.to_v", - "transformer_blocks.16.attn.add_k_proj", - "transformer_blocks.16.attn.add_v_proj", - "transformer_blocks.16.attn.add_q_proj", - "transformer_blocks.16.attn.to_out.0", - "transformer_blocks.16.attn.to_add_out", - "transformer_blocks.16.ff.net.0.proj", - "transformer_blocks.16.ff.net.2", - "transformer_blocks.16.ff_context.net.0.proj", - "transformer_blocks.16.ff_context.net.2", - "transformer_blocks.17.norm1.linear", - "transformer_blocks.17.norm1_context.linear", - "transformer_blocks.17.attn.to_q", - "transformer_blocks.17.attn.to_k", - "transformer_blocks.17.attn.to_v", - "transformer_blocks.17.attn.add_k_proj", - "transformer_blocks.17.attn.add_v_proj", - "transformer_blocks.17.attn.add_q_proj", - "transformer_blocks.17.attn.to_out.0", - "transformer_blocks.17.attn.to_add_out", - "transformer_blocks.17.ff.net.0.proj", - "transformer_blocks.17.ff.net.2", - "transformer_blocks.17.ff_context.net.0.proj", - "transformer_blocks.17.ff_context.net.2", - "transformer_blocks.18.norm1.linear", - "transformer_blocks.18.norm1_context.linear", - "transformer_blocks.18.attn.to_q", - "transformer_blocks.18.attn.to_k", - "transformer_blocks.18.attn.to_v", - "transformer_blocks.18.attn.add_k_proj", - "transformer_blocks.18.attn.add_v_proj", - "transformer_blocks.18.attn.add_q_proj", - "transformer_blocks.18.attn.to_out.0", - "transformer_blocks.18.attn.to_add_out", - "transformer_blocks.18.ff.net.0.proj", - "transformer_blocks.18.ff.net.2", - "transformer_blocks.18.ff_context.net.0.proj", - "transformer_blocks.18.ff_context.net.2", - "single_transformer_blocks.0.norm.linear", - "single_transformer_blocks.0.proj_mlp", - "single_transformer_blocks.0.proj_out", - "single_transformer_blocks.0.attn.to_q", - "single_transformer_blocks.0.attn.to_k", - "single_transformer_blocks.0.attn.to_v", - "single_transformer_blocks.1.norm.linear", - "single_transformer_blocks.1.proj_mlp", - "single_transformer_blocks.1.proj_out", - "single_transformer_blocks.1.attn.to_q", - "single_transformer_blocks.1.attn.to_k", - "single_transformer_blocks.1.attn.to_v", - "single_transformer_blocks.2.norm.linear", - "single_transformer_blocks.2.proj_mlp", - "single_transformer_blocks.2.proj_out", - "single_transformer_blocks.2.attn.to_q", - "single_transformer_blocks.2.attn.to_k", - "single_transformer_blocks.2.attn.to_v", - "single_transformer_blocks.3.norm.linear", - "single_transformer_blocks.3.proj_mlp", - "single_transformer_blocks.3.proj_out", - "single_transformer_blocks.3.attn.to_q", - "single_transformer_blocks.3.attn.to_k", - "single_transformer_blocks.3.attn.to_v", - "single_transformer_blocks.4.norm.linear", - "single_transformer_blocks.4.proj_mlp", - "single_transformer_blocks.4.proj_out", - "single_transformer_blocks.4.attn.to_q", - "single_transformer_blocks.4.attn.to_k", - "single_transformer_blocks.4.attn.to_v", - "single_transformer_blocks.5.norm.linear", - "single_transformer_blocks.5.proj_mlp", - "single_transformer_blocks.5.proj_out", - "single_transformer_blocks.5.attn.to_q", - "single_transformer_blocks.5.attn.to_k", - "single_transformer_blocks.5.attn.to_v", - "single_transformer_blocks.6.norm.linear", - "single_transformer_blocks.6.proj_mlp", - "single_transformer_blocks.6.proj_out", - "single_transformer_blocks.6.attn.to_q", - "single_transformer_blocks.6.attn.to_k", - "single_transformer_blocks.6.attn.to_v", - "single_transformer_blocks.7.norm.linear", - "single_transformer_blocks.7.proj_mlp", - "single_transformer_blocks.7.proj_out", - "single_transformer_blocks.7.attn.to_q", - "single_transformer_blocks.7.attn.to_k", - "single_transformer_blocks.7.attn.to_v", - "single_transformer_blocks.8.norm.linear", - "single_transformer_blocks.8.proj_mlp", - "single_transformer_blocks.8.proj_out", - "single_transformer_blocks.8.attn.to_q", - "single_transformer_blocks.8.attn.to_k", - "single_transformer_blocks.8.attn.to_v", - "single_transformer_blocks.9.norm.linear", - "single_transformer_blocks.9.proj_mlp", - "single_transformer_blocks.9.proj_out", - "single_transformer_blocks.9.attn.to_q", - "single_transformer_blocks.9.attn.to_k", - "single_transformer_blocks.9.attn.to_v", - "single_transformer_blocks.10.norm.linear", - "single_transformer_blocks.10.proj_mlp", - "single_transformer_blocks.10.proj_out", - "single_transformer_blocks.10.attn.to_q", - "single_transformer_blocks.10.attn.to_k", - "single_transformer_blocks.10.attn.to_v", - "single_transformer_blocks.11.norm.linear", - "single_transformer_blocks.11.proj_mlp", - "single_transformer_blocks.11.proj_out", - "single_transformer_blocks.11.attn.to_q", - "single_transformer_blocks.11.attn.to_k", - "single_transformer_blocks.11.attn.to_v", - "single_transformer_blocks.12.norm.linear", - "single_transformer_blocks.12.proj_mlp", - "single_transformer_blocks.12.proj_out", - "single_transformer_blocks.12.attn.to_q", - "single_transformer_blocks.12.attn.to_k", - "single_transformer_blocks.12.attn.to_v", - "single_transformer_blocks.13.norm.linear", - "single_transformer_blocks.13.proj_mlp", - "single_transformer_blocks.13.proj_out", - "single_transformer_blocks.13.attn.to_q", - "single_transformer_blocks.13.attn.to_k", - "single_transformer_blocks.13.attn.to_v", - "single_transformer_blocks.14.norm.linear", - "single_transformer_blocks.14.proj_mlp", - "single_transformer_blocks.14.proj_out", - "single_transformer_blocks.14.attn.to_q", - "single_transformer_blocks.14.attn.to_k", - "single_transformer_blocks.14.attn.to_v", - "single_transformer_blocks.15.norm.linear", - "single_transformer_blocks.15.proj_mlp", - "single_transformer_blocks.15.proj_out", - "single_transformer_blocks.15.attn.to_q", - "single_transformer_blocks.15.attn.to_k", - "single_transformer_blocks.15.attn.to_v", - "single_transformer_blocks.16.norm.linear", - "single_transformer_blocks.16.proj_mlp", - "single_transformer_blocks.16.proj_out", - "single_transformer_blocks.16.attn.to_q", - "single_transformer_blocks.16.attn.to_k", - "single_transformer_blocks.16.attn.to_v", - "single_transformer_blocks.17.norm.linear", - "single_transformer_blocks.17.proj_mlp", - "single_transformer_blocks.17.proj_out", - "single_transformer_blocks.17.attn.to_q", - "single_transformer_blocks.17.attn.to_k", - "single_transformer_blocks.17.attn.to_v", - "single_transformer_blocks.18.norm.linear", - "single_transformer_blocks.18.proj_mlp", - "single_transformer_blocks.18.proj_out", - "single_transformer_blocks.18.attn.to_q", - "single_transformer_blocks.18.attn.to_k", - "single_transformer_blocks.18.attn.to_v", - "single_transformer_blocks.19.norm.linear", - "single_transformer_blocks.19.proj_mlp", - "single_transformer_blocks.19.proj_out", - "single_transformer_blocks.19.attn.to_q", - "single_transformer_blocks.19.attn.to_k", - "single_transformer_blocks.19.attn.to_v", - "single_transformer_blocks.20.norm.linear", - "single_transformer_blocks.20.proj_mlp", - "single_transformer_blocks.20.proj_out", - "single_transformer_blocks.20.attn.to_q", - "single_transformer_blocks.20.attn.to_k", - "single_transformer_blocks.20.attn.to_v", - "single_transformer_blocks.21.norm.linear", - "single_transformer_blocks.21.proj_mlp", - "single_transformer_blocks.21.proj_out", - "single_transformer_blocks.21.attn.to_q", - "single_transformer_blocks.21.attn.to_k", - "single_transformer_blocks.21.attn.to_v", - "single_transformer_blocks.22.norm.linear", - "single_transformer_blocks.22.proj_mlp", - "single_transformer_blocks.22.proj_out", - "single_transformer_blocks.22.attn.to_q", - "single_transformer_blocks.22.attn.to_k", - "single_transformer_blocks.22.attn.to_v", - "single_transformer_blocks.23.norm.linear", - "single_transformer_blocks.23.proj_mlp", - "single_transformer_blocks.23.proj_out", - "single_transformer_blocks.23.attn.to_q", - "single_transformer_blocks.23.attn.to_k", - "single_transformer_blocks.23.attn.to_v", - "single_transformer_blocks.24.norm.linear", - "single_transformer_blocks.24.proj_mlp", - "single_transformer_blocks.24.proj_out", - "single_transformer_blocks.24.attn.to_q", - "single_transformer_blocks.24.attn.to_k", - "single_transformer_blocks.24.attn.to_v", - "single_transformer_blocks.25.norm.linear", - "single_transformer_blocks.25.proj_mlp", - "single_transformer_blocks.25.proj_out", - "single_transformer_blocks.25.attn.to_q", - "single_transformer_blocks.25.attn.to_k", - "single_transformer_blocks.25.attn.to_v", - "single_transformer_blocks.26.norm.linear", - "single_transformer_blocks.26.proj_mlp", - "single_transformer_blocks.26.proj_out", - "single_transformer_blocks.26.attn.to_q", - "single_transformer_blocks.26.attn.to_k", - "single_transformer_blocks.26.attn.to_v", - "single_transformer_blocks.27.norm.linear", - "single_transformer_blocks.27.proj_mlp", - "single_transformer_blocks.27.proj_out", - "single_transformer_blocks.27.attn.to_q", - "single_transformer_blocks.27.attn.to_k", - "single_transformer_blocks.27.attn.to_v", - "single_transformer_blocks.28.norm.linear", - "single_transformer_blocks.28.proj_mlp", - "single_transformer_blocks.28.proj_out", - "single_transformer_blocks.28.attn.to_q", - "single_transformer_blocks.28.attn.to_k", - "single_transformer_blocks.28.attn.to_v", - "single_transformer_blocks.29.norm.linear", - "single_transformer_blocks.29.proj_mlp", - "single_transformer_blocks.29.proj_out", - "single_transformer_blocks.29.attn.to_q", - "single_transformer_blocks.29.attn.to_k", - "single_transformer_blocks.29.attn.to_v", - "single_transformer_blocks.30.norm.linear", - "single_transformer_blocks.30.proj_mlp", - "single_transformer_blocks.30.proj_out", - "single_transformer_blocks.30.attn.to_q", - "single_transformer_blocks.30.attn.to_k", - "single_transformer_blocks.30.attn.to_v", - "single_transformer_blocks.31.norm.linear", - "single_transformer_blocks.31.proj_mlp", - "single_transformer_blocks.31.proj_out", - "single_transformer_blocks.31.attn.to_q", - "single_transformer_blocks.31.attn.to_k", - "single_transformer_blocks.31.attn.to_v", - "single_transformer_blocks.32.norm.linear", - "single_transformer_blocks.32.proj_mlp", - "single_transformer_blocks.32.proj_out", - "single_transformer_blocks.32.attn.to_q", - "single_transformer_blocks.32.attn.to_k", - "single_transformer_blocks.32.attn.to_v", - "single_transformer_blocks.33.norm.linear", - "single_transformer_blocks.33.proj_mlp", - "single_transformer_blocks.33.proj_out", - "single_transformer_blocks.33.attn.to_q", - "single_transformer_blocks.33.attn.to_k", - "single_transformer_blocks.33.attn.to_v", - "single_transformer_blocks.34.norm.linear", - "single_transformer_blocks.34.proj_mlp", - "single_transformer_blocks.34.proj_out", - "single_transformer_blocks.34.attn.to_q", - "single_transformer_blocks.34.attn.to_k", - "single_transformer_blocks.34.attn.to_v", - "single_transformer_blocks.35.norm.linear", - "single_transformer_blocks.35.proj_mlp", - "single_transformer_blocks.35.proj_out", - "single_transformer_blocks.35.attn.to_q", - "single_transformer_blocks.35.attn.to_k", - "single_transformer_blocks.35.attn.to_v", - "single_transformer_blocks.36.norm.linear", - "single_transformer_blocks.36.proj_mlp", - "single_transformer_blocks.36.proj_out", - "single_transformer_blocks.36.attn.to_q", - "single_transformer_blocks.36.attn.to_k", - "single_transformer_blocks.36.attn.to_v", - "single_transformer_blocks.37.norm.linear", - "single_transformer_blocks.37.proj_mlp", - "single_transformer_blocks.37.proj_out", - "single_transformer_blocks.37.attn.to_q", - "single_transformer_blocks.37.attn.to_k", - "single_transformer_blocks.37.attn.to_v", - "norm_out.linear", - "proj_out" -] \ No newline at end of file From b5aee78273b35db3052630bc5ac2f5cebc3f5d19 Mon Sep 17 00:00:00 2001 From: baocheny Date: Mon, 14 Oct 2024 13:07:27 +0800 Subject: [PATCH 27/30] rem tmp tests files --- examples/stable-diffusion/readme.txt | 29 ------------------- examples/stable-diffusion/run_bf16.sh | 13 --------- .../stable-diffusion/run_bf16_prompts_100.sh | 13 --------- .../stable-diffusion/run_bf16_prompts_5.sh | 13 --------- examples/stable-diffusion/run_fp8.sh | 16 ---------- examples/stable-diffusion/run_fp8_500.sh | 16 ---------- .../stable-diffusion/run_fp8_500_hybrid.sh | 16 ---------- .../run_fp8_500_hybrid_prompts_5.sh | 16 ---------- 8 files changed, 132 deletions(-) delete mode 100644 examples/stable-diffusion/readme.txt delete mode 100755 examples/stable-diffusion/run_bf16.sh delete mode 100755 examples/stable-diffusion/run_bf16_prompts_100.sh delete mode 100755 examples/stable-diffusion/run_bf16_prompts_5.sh delete mode 100755 examples/stable-diffusion/run_fp8.sh delete mode 100755 examples/stable-diffusion/run_fp8_500.sh delete mode 100755 examples/stable-diffusion/run_fp8_500_hybrid.sh delete mode 100755 examples/stable-diffusion/run_fp8_500_hybrid_prompts_5.sh diff --git a/examples/stable-diffusion/readme.txt b/examples/stable-diffusion/readme.txt deleted file mode 100644 index e72ebcd497..0000000000 --- a/examples/stable-diffusion/readme.txt +++ /dev/null @@ -1,29 +0,0 @@ -This is experimental PR for Fal.ai ask based on current FLUX PR https://github.com/huggingface/optimum-habana/pull/1331 -* PR is fixed with timing (HPU device sync and include VAD into timing measure) -* Added FP8 quantization support - -To run sample with 1 image 1 batch in BF16 precision: -./run_bf16.sh - -To run sample with 1 image 1 batch in FP8 precision (quant weights were tuned with 1 prompt): -./run_fp8.sh - -To run sample with 1 image 1 batch in FP8 precision (quant weights were tuned with 500 prompts): -./run_fp8_500.sh - -* Added batching -* Added --prompt_file option for large number of input prompts - -To run sample with 5 prompts (batch size 1) in BF16 precision: -./run_bf16_prompts_5.sh - -To run sample with 100 prompts (batch size 1) in BF16 precision: -./run_bf16_prompts_100.sh - -* Added hybrid (mixed fp9 and bf16) precision denoising - -To run sample with 1 image 1 batch in hybrid precision: -./run_fp8_500_hybrid.sh - -To run sample with 5 prompts (batch size 1) in hybrid precision: -./run_fp8_500_hybrid_prompts_5.sh diff --git a/examples/stable-diffusion/run_bf16.sh b/examples/stable-diffusion/run_bf16.sh deleted file mode 100755 index e8b74d3312..0000000000 --- a/examples/stable-diffusion/run_bf16.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -python text_to_image_generation.py \ - --model_name_or_path black-forest-labs/FLUX.1-dev \ - --prompts "A cat holding a sign that says hello world" \ - --num_images_per_prompt 1 \ - --batch_size 1 \ - --num_inference_steps 30 \ - --image_save_dir /tmp/flux_1_images \ - --scheduler flow_match_euler_discrete \ - --use_habana \ - --use_hpu_graphs \ - --gaudi_config Habana/stable-diffusion \ - --bf16 diff --git a/examples/stable-diffusion/run_bf16_prompts_100.sh b/examples/stable-diffusion/run_bf16_prompts_100.sh deleted file mode 100755 index d229d5fe68..0000000000 --- a/examples/stable-diffusion/run_bf16_prompts_100.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -python text_to_image_generation.py \ - --model_name_or_path black-forest-labs/FLUX.1-dev \ - --prompts_file prompts_100.txt \ - --num_images_per_prompt 1 \ - --batch_size 1 \ - --num_inference_steps 30 \ - --image_save_dir /tmp/flux_1_images \ - --scheduler flow_match_euler_discrete \ - --use_habana \ - --use_hpu_graphs \ - --gaudi_config Habana/stable-diffusion \ - --bf16 diff --git a/examples/stable-diffusion/run_bf16_prompts_5.sh b/examples/stable-diffusion/run_bf16_prompts_5.sh deleted file mode 100755 index 22cc40dc0c..0000000000 --- a/examples/stable-diffusion/run_bf16_prompts_5.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -python text_to_image_generation.py \ - --model_name_or_path black-forest-labs/FLUX.1-dev \ - --prompts_file prompts_5.txt \ - --num_images_per_prompt 1 \ - --batch_size 1 \ - --num_inference_steps 30 \ - --image_save_dir /tmp/flux_1_images \ - --scheduler flow_match_euler_discrete \ - --use_habana \ - --use_hpu_graphs \ - --gaudi_config Habana/stable-diffusion \ - --bf16 diff --git a/examples/stable-diffusion/run_fp8.sh b/examples/stable-diffusion/run_fp8.sh deleted file mode 100755 index e914a3421d..0000000000 --- a/examples/stable-diffusion/run_fp8.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -QUANT_CONFIG=quantize/quant_config.json \ -PT_HPU_WEIGHT_SHARING=0 \ -python text_to_image_generation.py \ - --model_name_or_path black-forest-labs/FLUX.1-dev \ - --prompts "A cat holding a sign that says hello world" \ - --num_images_per_prompt 1 \ - --batch_size 1 \ - --num_inference_steps 30 \ - --image_save_dir /tmp/flux_1_images_fp8 \ - --scheduler flow_match_euler_discrete \ - --use_habana \ - --use_hpu_graphs \ - --gaudi_config Habana/stable-diffusion \ - --bf16 \ - --quant_mode quantize diff --git a/examples/stable-diffusion/run_fp8_500.sh b/examples/stable-diffusion/run_fp8_500.sh deleted file mode 100755 index 50ffc36b66..0000000000 --- a/examples/stable-diffusion/run_fp8_500.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -QUANT_CONFIG=quantize/quant_config_500.json \ -PT_HPU_WEIGHT_SHARING=0 \ -python text_to_image_generation.py \ - --model_name_or_path black-forest-labs/FLUX.1-dev \ - --prompts "A cat holding a sign that says hello world" \ - --num_images_per_prompt 1 \ - --batch_size 1 \ - --num_inference_steps 30 \ - --image_save_dir /tmp/flux_1_images_fp8_500 \ - --scheduler flow_match_euler_discrete \ - --use_habana \ - --use_hpu_graphs \ - --gaudi_config Habana/stable-diffusion \ - --bf16 \ - --quant_mode quantize diff --git a/examples/stable-diffusion/run_fp8_500_hybrid.sh b/examples/stable-diffusion/run_fp8_500_hybrid.sh deleted file mode 100755 index 6de1397574..0000000000 --- a/examples/stable-diffusion/run_fp8_500_hybrid.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -QUANT_CONFIG=quantize/quant_config_500.json \ -PT_HPU_WEIGHT_SHARING=0 \ -python text_to_image_generation.py \ - --model_name_or_path black-forest-labs/FLUX.1-dev \ - --prompts "A cat holding a sign that says hello world" \ - --num_images_per_prompt 1 \ - --batch_size 1 \ - --num_inference_steps 30 \ - --image_save_dir /tmp/flux_1_images_fp8_500 \ - --scheduler flow_match_euler_discrete \ - --use_habana \ - --use_hpu_graphs \ - --gaudi_config Habana/stable-diffusion \ - --bf16 \ - --quant_mode quantize-mixed diff --git a/examples/stable-diffusion/run_fp8_500_hybrid_prompts_5.sh b/examples/stable-diffusion/run_fp8_500_hybrid_prompts_5.sh deleted file mode 100755 index 29d4ac1d22..0000000000 --- a/examples/stable-diffusion/run_fp8_500_hybrid_prompts_5.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -QUANT_CONFIG=quantize/quant_config_500.json \ -PT_HPU_WEIGHT_SHARING=0 \ -python text_to_image_generation.py \ - --model_name_or_path black-forest-labs/FLUX.1-dev \ - --prompts_file prompts_5.txt \ - --num_images_per_prompt 1 \ - --batch_size 1 \ - --num_inference_steps 30 \ - --image_save_dir /tmp/flux_1_images \ - --scheduler flow_match_euler_discrete \ - --use_habana \ - --use_hpu_graphs \ - --gaudi_config Habana/stable-diffusion \ - --bf16 \ - --quant_mode quantize-mixed From 44a48c72105d2e9c89fafdbdd42f1140f94204e3 Mon Sep 17 00:00:00 2001 From: baocheny Date: Mon, 14 Oct 2024 14:02:06 +0800 Subject: [PATCH 28/30] rem text_ids image_ids from split into batches --- optimum/habana/diffusers/pipelines/flux/pipeline_flux.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 7606b6a2b5..7727f5fedf 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -201,8 +201,6 @@ def _split_inputs_into_batches(cls, batch_size, latents, prompt_embeds, pooled_p latents_batches = torch.stack(latents_batches) prompt_embeds_batches = torch.stack(prompt_embeds_batches) pooled_prompt_embeds_batches = torch.stack(pooled_prompt_embeds_batches) - text_ids_batches = torch.stack(text_ids_batches) - latent_image_ids_batches = torch.stack(latent_image_ids_batches) guidance_batches = torch.stack(guidance_batches) return ( From 6bd7351ea4c0f0bb1e88bda26a7af513c30b572c Mon Sep 17 00:00:00 2001 From: "Zhou, Huijuan" Date: Mon, 21 Oct 2024 23:11:24 -0700 Subject: [PATCH 29/30] fix guidance nan boolean tensor ambiguous err keep text_ids latent_image_ids split for diffuser 0.30.x --- .../diffusers/pipelines/flux/pipeline_flux.py | 46 ++++++++++++++++--- 1 file changed, 39 insertions(+), 7 deletions(-) diff --git a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py index 7727f5fedf..d0d9fd7c35 100644 --- a/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py +++ b/optimum/habana/diffusers/pipelines/flux/pipeline_flux.py @@ -155,12 +155,16 @@ def __init__( transformer = wrap_in_hpu_graph(transformer) @classmethod - def _split_inputs_into_batches(cls, batch_size, latents, prompt_embeds, pooled_prompt_embeds, guidance): + def _split_inputs_into_batches(cls, batch_size, latents, prompt_embeds, pooled_prompt_embeds, text_ids, latent_image_ids, guidance): # Use torch.split to generate num_batches batches of size batch_size latents_batches = list(torch.split(latents, batch_size)) prompt_embeds_batches = list(torch.split(prompt_embeds, batch_size)) if pooled_prompt_embeds is not None: pooled_prompt_embeds_batches = list(torch.split(pooled_prompt_embeds, batch_size)) + if text_ids is not None and text_ids.ndim == 3: + text_ids_batches = list(torch.split(text_ids, batch_size)) + if latent_image_ids is not None and latent_image_ids.ndim == 3: + latent_image_ids_batches = list(torch.split(latent_image_ids, batch_size)) if guidance is not None: guidance_batches = list(torch.split(guidance, batch_size)) else: @@ -190,6 +194,20 @@ def _split_inputs_into_batches(cls, batch_size, latents, prompt_embeds, pooled_p ) pooled_prompt_embeds_batches[-1] = torch.vstack(sequence_to_stack) + # Pad text_ids_batches if necessary + if text_ids is not None and text_ids.ndim == 3: + sequence_to_stack = (text_ids_batches[-1],) + tuple( + torch.zeros_like(text_ids_batches[-1][0][None, :]) for _ in range(num_dummy_samples) + ) + text_ids_batches[-1] = torch.vstack(sequence_to_stack) + + # Pad latent_image_ids if necessary + if latent_image_ids is not None and latent_image_ids.ndim == 3: + sequence_to_stack = (latent_image_ids_batches[-1],) + tuple( + torch.zeros_like(latent_image_ids_batches[-1][0][None, :]) for _ in range(num_dummy_samples) + ) + latent_image_ids_batches[-1] = torch.vstack(sequence_to_stack) + # Pad guidance if necessary if guidance is not None: sequence_to_stack = (guidance_batches[-1],) + tuple( @@ -201,12 +219,18 @@ def _split_inputs_into_batches(cls, batch_size, latents, prompt_embeds, pooled_p latents_batches = torch.stack(latents_batches) prompt_embeds_batches = torch.stack(prompt_embeds_batches) pooled_prompt_embeds_batches = torch.stack(pooled_prompt_embeds_batches) + if text_ids is not None and text_ids.ndim == 3: + text_ids_batches = torch.stack(text_ids_batches) + if latent_image_ids is not None and latent_image_ids.ndim == 3: + latent_image_ids_batches = torch.stack(latent_image_ids_batches) guidance_batches = torch.stack(guidance_batches) return ( latents_batches, prompt_embeds_batches, pooled_prompt_embeds_batches, + text_ids_batches if text_ids.ndim == 3 else text_ids, + latent_image_ids_batches if latent_image_ids.ndim == 3 else latent_image_ids, guidance_batches, num_dummy_samples, ) @@ -451,10 +475,12 @@ def __call__( latents_batches, text_embeddings_batches, pooled_prompt_embeddings_batches, + text_ids_batches, + latent_image_ids_batches, guidance_batches, num_dummy_samples, ) = self._split_inputs_into_batches( - batch_size, latents, prompt_embeds, pooled_prompt_embeds, guidance + batch_size, latents, prompt_embeds, pooled_prompt_embeds, text_ids, latent_image_ids, guidance ) outputs = { @@ -475,7 +501,13 @@ def __call__( text_embeddings_batches = torch.roll(text_embeddings_batches, shifts=-1, dims=0) pooled_prompt_embeddings_batch = pooled_prompt_embeddings_batches[0] pooled_prompt_embeddings_batches = torch.roll(pooled_prompt_embeddings_batches, shifts=-1, dims=0) - guidance_batch = None if guidance_batches[0].isnan() else guidance_batches[0] + if text_ids.ndim == 3: + text_ids_batch = text_ids_batches[0] + text_ids_batches = torch.roll(text_ids_batches, shifts=-1, dims=0) + if latent_image_ids.ndim == 3: + latent_image_ids_batch = latent_image_ids_batches[0] + latent_image_ids_batches = torch.roll(latent_image_ids_batches, shifts=-1, dims=0) + guidance_batch = None if guidance_batches[0].isnan().any() else guidance_batches[0] guidance_batches = torch.roll(guidance_batches, shifts=-1, dims=0) if hasattr(self.scheduler, "_init_step_index"): @@ -512,8 +544,8 @@ def __call__( guidance=guidance_batch, pooled_projections=pooled_prompt_embeddings_batch, encoder_hidden_states=text_embeddings_batch, - txt_ids=text_ids, - img_ids=latent_image_ids, + txt_ids=text_ids_batch if text_ids.ndim == 3 else text_ids, + img_ids=latent_image_ids_batch if latent_image_ids.ndim ==3 else latent_image_ids, joint_attention_kwargs=self.joint_attention_kwargs, return_dict=False, )[0] @@ -524,8 +556,8 @@ def __call__( guidance=guidance_batch, pooled_projections=pooled_prompt_embeddings_batch, encoder_hidden_states=text_embeddings_batch, - txt_ids=text_ids, - img_ids=latent_image_ids, + txt_ids=text_ids_batch if text_ids.ndim == 3 else text_ids, + img_ids=latent_image_ids_batch if latent_image_ids.ndim == 3 else latent_image_ids, joint_attention_kwargs=self.joint_attention_kwargs, return_dict=False, )[0] From 40d12e797425a8c63c9b597780dec0c19f31dd29 Mon Sep 17 00:00:00 2001 From: baocheny Date: Thu, 24 Oct 2024 11:38:35 +0800 Subject: [PATCH 30/30] upgrade diffusers to 0.31.0 relese version inrequ --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 388df57fcf..e8e693a108 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ "optimum", "torch", "accelerate >= 0.33.0, < 0.34.0", - "diffusers @ git+https://github.com/huggingface/diffusers.git@main", + "diffusers == 0.31.0", "huggingface_hub >= 0.24.7", "sentence-transformers[train] == 3.0.1", ]