From ed8c82cc4c4cd51632d7b12a6eff93e32963000b Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Mon, 14 Nov 2022 19:45:15 +0000 Subject: [PATCH 01/49] up --- v1-inference.yaml | 70 ----------------------------------------------- 1 file changed, 70 deletions(-) delete mode 100644 v1-inference.yaml diff --git a/v1-inference.yaml b/v1-inference.yaml deleted file mode 100644 index d4effe569e89..000000000000 --- a/v1-inference.yaml +++ /dev/null @@ -1,70 +0,0 @@ -model: - base_learning_rate: 1.0e-04 - target: ldm.models.diffusion.ddpm.LatentDiffusion - params: - linear_start: 0.00085 - linear_end: 0.0120 - num_timesteps_cond: 1 - log_every_t: 200 - timesteps: 1000 - first_stage_key: "jpg" - cond_stage_key: "txt" - image_size: 64 - channels: 4 - cond_stage_trainable: false # Note: different from the one we trained before - conditioning_key: crossattn - monitor: val/loss_simple_ema - scale_factor: 0.18215 - use_ema: False - - scheduler_config: # 10000 warmup steps - target: ldm.lr_scheduler.LambdaLinearScheduler - params: - warm_up_steps: [ 10000 ] - cycle_lengths: [ 10000000000000 ] # incredibly large number to prevent corner cases - f_start: [ 1.e-6 ] - f_max: [ 1. ] - f_min: [ 1. ] - - unet_config: - target: ldm.modules.diffusionmodules.openaimodel.UNetModel - params: - image_size: 32 # unused - in_channels: 4 - out_channels: 4 - model_channels: 320 - attention_resolutions: [ 4, 2, 1 ] - num_res_blocks: 2 - channel_mult: [ 1, 2, 4, 4 ] - num_heads: 8 - use_spatial_transformer: True - transformer_depth: 1 - context_dim: 768 - use_checkpoint: True - legacy: False - - first_stage_config: - target: ldm.models.autoencoder.AutoencoderKL - params: - embed_dim: 4 - monitor: val/rec_loss - ddconfig: - double_z: true - z_channels: 4 - resolution: 256 - in_channels: 3 - out_ch: 3 - ch: 128 - ch_mult: - - 1 - - 2 - - 4 - - 4 - num_res_blocks: 2 - attn_resolutions: [] - dropout: 0.0 - lossconfig: - target: torch.nn.Identity - - cond_stage_config: - target: ldm.modules.encoders.modules.FrozenCLIPEmbedder From d1e8a50bae592fa21b8b9dd5b7406ca9d6abba7b Mon Sep 17 00:00:00 2001 From: anton-l Date: Tue, 15 Nov 2022 14:14:02 +0100 Subject: [PATCH 02/49] convert dual unet --- ...onvert_versatile_diffusion_to_diffusers.py | 47 ++- src/diffusers/models/attention.py | 167 ++++++++ src/diffusers/models/unet_2d_blocks.py | 378 +++++++++++++++++- src/diffusers/models/unet_2d_condition.py | 4 +- 4 files changed, 578 insertions(+), 18 deletions(-) diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index 20ac78f944ea..07a4c2cd72b8 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -207,28 +207,30 @@ def conv_attn_to_linear(checkpoint): checkpoint[key] = checkpoint[key][:, :, 0] -def create_unet_diffusers_config(original_config): +def create_unet_diffusers_config(unet_params): """ Creates a config for the diffusers based on the config of the LDM model. """ - unet_params = original_config.model.params.unet_config.params block_out_channels = [unet_params.model_channels * mult for mult in unet_params.channel_mult] down_block_types = [] resolution = 1 for i in range(len(block_out_channels)): - block_type = "CrossAttnDownBlock2D" if resolution in unet_params.attention_resolutions else "DownBlock2D" + block_type = "DualCrossAttnDownBlock2D" if resolution in unet_params.attention_resolutions else "DownBlock2D" down_block_types.append(block_type) if i != len(block_out_channels) - 1: resolution *= 2 up_block_types = [] for i in range(len(block_out_channels)): - block_type = "CrossAttnUpBlock2D" if resolution in unet_params.attention_resolutions else "UpBlock2D" + block_type = "DualCrossAttnUpBlock2D" if resolution in unet_params.attention_resolutions else "UpBlock2D" up_block_types.append(block_type) resolution //= 2 + if not all(n == unet_params.num_res_blocks[0] for n in unet_params.num_res_blocks): + raise ValueError("Not all num_res_blocks are equal, which is not supported in this script.") + config = dict( sample_size=unet_params.image_size, in_channels=unet_params.in_channels, @@ -236,7 +238,7 @@ def create_unet_diffusers_config(original_config): down_block_types=tuple(down_block_types), up_block_types=tuple(up_block_types), block_out_channels=tuple(block_out_channels), - layers_per_block=unet_params.num_res_blocks, + layers_per_block=unet_params.num_res_blocks[0], cross_attention_dim=unet_params.context_dim, attention_head_dim=unet_params.num_heads, ) @@ -288,7 +290,7 @@ def create_ldm_bert_config(original_config): return config -def convert_ldm_unet_checkpoint(checkpoint, config, path=None, extract_ema=False): +def convert_vd_unet_checkpoint(checkpoint, config, path=None, extract_ema=False): """ Takes a state dict and a config, and returns a converted checkpoint. """ @@ -419,7 +421,6 @@ def convert_ldm_unet_checkpoint(checkpoint, config, path=None, extract_ema=False resnets = [key for key in output_blocks[i] if f"output_blocks.{i}.0" in key] attentions = [key for key in output_blocks[i] if f"output_blocks.{i}.1" in key] - resnet_0_paths = renew_resnet_paths(resnets) paths = renew_resnet_paths(resnets) meta_path = {"old": f"output_blocks.{i}.0", "new": f"up_blocks.{block_id}.resnets.{layer_in_block_id}"} @@ -457,7 +458,7 @@ def convert_ldm_unet_checkpoint(checkpoint, config, path=None, extract_ema=False new_checkpoint[new_path] = unet_state_dict[old_path] - return new_checkpoint + return new_checkpoint def convert_ldm_vae_checkpoint(checkpoint, config): @@ -656,6 +657,15 @@ def convert_ldm_clip_checkpoint(checkpoint): type=str, help="Type of scheduler to use. Should be one of ['pndm', 'lms', 'ddim', 'euler', 'euler-ancest', 'dpm']", ) + parser.add_argument( + "--extract_ema", + action="store_true", + help=( + "Only relevant for checkpoints that have both EMA and non-EMA weights. Whether to extract the EMA weights" + " or not. Defaults to `False`. Add `--extract_ema` to extract the EMA weights. EMA weights usually yield" + " higher quality images for inference. Non-EMA weights are usually better to continue fine-tuning." + ), + ) parser.add_argument("--dump_path", default=None, type=str, required=True, help="Path to the output model.") args = parser.parse_args() @@ -704,14 +714,19 @@ def convert_ldm_clip_checkpoint(checkpoint): raise ValueError(f"Scheduler of type {args.scheduler_type} doesn't exist!") # Convert the UNet2DConditionModel model. -# checkpoint = torch.load(args.unet_checkpoint_path) -# unet_config = create_unet_diffusers_config(original_config) -# converted_unet_checkpoint = convert_ldm_unet_checkpoint( -# checkpoint, unet_config, path=args.checkpoint_path, extract_ema=args.extract_ema -# ) -# -# unet = UNet2DConditionModel(**unet_config) -# unet.load_state_dict(converted_unet_checkpoint) + checkpoint = torch.load(args.unet_checkpoint_path) + # FIXME: temporary, extracted from a resolved cfg.model.unet_config object + # fmt: off + unet_config = {'image_size': None, 'in_channels': 4, 'out_channels': 4, 'model_channels': 320, 'attention_resolutions': [4, 2, 1], 'num_res_blocks': [2, 2, 2, 2], 'channel_mult': [1, 2, 4, 4], 'num_heads': 8, 'use_spatial_transformer': True, 'transformer_depth': 1, 'context_dim': 768, 'use_checkpoint': True, 'legacy': False} + unet_config = argparse.Namespace(**unet_config) + # fmt: on + unet_config = create_unet_diffusers_config(unet_config) + converted_unet_checkpoint = convert_vd_unet_checkpoint( + checkpoint, unet_config, path=args.unet_checkpoint_path, extract_ema=args.extract_ema + ) + + unet = UNet2DConditionModel(**unet_config) + unet.load_state_dict(converted_unet_checkpoint) # Convert the VAE model. if args.vae_checkpoint_path is not None: diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index e8ea37970e04..c436d472692b 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -225,6 +225,173 @@ def _set_use_memory_efficient_attention_xformers(self, use_memory_efficient_atte block._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) +class DualTransformer2DModel(ModelMixin, ConfigMixin): + """ + Transformer model for image-like data. Takes either discrete (classes of vector embeddings) or continuous (actual + embeddings) inputs. + + When input is continuous: First, project the input (aka embedding) and reshape to b, t, d. Then apply standard + transformer action. Finally, reshape to image. + + When input is discrete: First, input (classes of latent pixels) is converted to embeddings and has positional + embeddings applied, see `ImagePositionalEmbeddings`. Then apply standard transformer action. Finally, predict + classes of unnoised image. + + Note that it is assumed one of the input classes is the masked latent pixel. The predicted classes of the unnoised + image do not contain a prediction for the masked pixel as the unnoised image cannot be masked. + + Parameters: + num_attention_heads (`int`, *optional*, defaults to 16): The number of heads to use for multi-head attention. + attention_head_dim (`int`, *optional*, defaults to 88): The number of channels in each head. + in_channels (`int`, *optional*): + Pass if the input is continuous. The number of channels in the input and output. + num_layers (`int`, *optional*, defaults to 1): The number of layers of Transformer blocks to use. + dropout (`float`, *optional*, defaults to 0.1): The dropout probability to use. + cross_attention_dim (`int`, *optional*): The number of context dimensions to use. + sample_size (`int`, *optional*): Pass if the input is discrete. The width of the latent images. + Note that this is fixed at training time as it is used for learning a number of position embeddings. See + `ImagePositionalEmbeddings`. + num_vector_embeds (`int`, *optional*): + Pass if the input is discrete. The number of classes of the vector embeddings of the latent pixels. + Includes the class for the masked latent pixel. + activation_fn (`str`, *optional*, defaults to `"geglu"`): Activation function to be used in feed-forward. + num_embeds_ada_norm ( `int`, *optional*): Pass if at least one of the norm_layers is `AdaLayerNorm`. + The number of diffusion steps used during training. Note that this is fixed at training time as it is used + to learn a number of embeddings that are added to the hidden states. During inference, you can denoise for + up to but not more than steps than `num_embeds_ada_norm`. + attention_bias (`bool`, *optional*): + Configure if the TransformerBlocks' attention should contain a bias parameter. + """ + + @register_to_config + def __init__( + self, + num_attention_heads: int = 16, + attention_head_dim: int = 88, + in_channels: Optional[int] = None, + num_layers: int = 1, + dropout: float = 0.0, + norm_num_groups: int = 32, + cross_attention_dim: Optional[int] = None, + attention_bias: bool = False, + activation_fn: str = "geglu", + num_embeds_ada_norm: Optional[int] = None, + ): + super().__init__() + self.num_attention_heads = num_attention_heads + self.attention_head_dim = attention_head_dim + inner_dim = num_attention_heads * attention_head_dim + + # 1. DualTransformer2DModel can process both standard continous images of shape `(batch_size, num_channels, width, height)` as well as quantized image embeddings of shape `(batch_size, num_image_vectors)` + # Define whether input is continuous or discrete depending on configuration + self.is_input_continuous = in_channels is not None + + # 2. Define input layers + self.in_channels = in_channels + + self.norm_0 = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True) + self.proj_in_0 = nn.Conv2d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0) + self.norm_1 = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True) + self.proj_in_1 = nn.Conv2d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0) + + # 3. Define transformers blocks + self.transformer_blocks_0 = nn.ModuleList( + [ + BasicTransformerBlock( + inner_dim, + num_attention_heads, + attention_head_dim, + dropout=dropout, + cross_attention_dim=cross_attention_dim, + activation_fn=activation_fn, + num_embeds_ada_norm=num_embeds_ada_norm, + attention_bias=attention_bias, + ) + for d in range(num_layers) + ] + ) + self.transformer_blocks_1 = nn.ModuleList( + [ + BasicTransformerBlock( + inner_dim, + num_attention_heads, + attention_head_dim, + dropout=dropout, + cross_attention_dim=cross_attention_dim, + activation_fn=activation_fn, + num_embeds_ada_norm=num_embeds_ada_norm, + attention_bias=attention_bias, + ) + for d in range(num_layers) + ] + ) + + # 4. Define output layers + self.proj_out_0 = nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0) + self.proj_out_1 = nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0) + + def _set_attention_slice(self, slice_size): + for block in self.transformer_blocks: + block._set_attention_slice(slice_size) + + def forward(self, hidden_states, encoder_hidden_states=None, timestep=None, return_dict: bool = True): + """ + Args: + hidden_states ( When discrete, `torch.LongTensor` of shape `(batch size, num latent pixels)`. + When continous, `torch.FloatTensor` of shape `(batch size, channel, height, width)`): Input + hidden_states + encoder_hidden_states ( `torch.LongTensor` of shape `(batch size, context dim)`, *optional*): + Conditional embeddings for cross attention layer. If not given, cross-attention defaults to + self-attention. + timestep ( `torch.long`, *optional*): + Optional timestep to be applied as an embedding in AdaLayerNorm's. Used to indicate denoising step. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. + + Returns: + [`~models.attention.Transformer2DModelOutput`] or `tuple`: [`~models.attention.Transformer2DModelOutput`] + if `return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample + tensor. + """ + # 1. Input + if self.is_input_continuous: + batch, channel, height, weight = hidden_states.shape + residual = hidden_states + hidden_states = self.norm(hidden_states) + hidden_states = self.proj_in(hidden_states) + inner_dim = hidden_states.shape[1] + hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * weight, inner_dim) + elif self.is_input_vectorized: + hidden_states = self.latent_image_embedding(hidden_states) + + # 2. Blocks + for block in self.transformer_blocks: + hidden_states = block(hidden_states, context=encoder_hidden_states, timestep=timestep) + + # 3. Output + if self.is_input_continuous: + hidden_states = hidden_states.reshape(batch, height, weight, inner_dim).permute(0, 3, 1, 2) + hidden_states = self.proj_out(hidden_states) + output = hidden_states + residual + elif self.is_input_vectorized: + hidden_states = self.norm_out(hidden_states) + logits = self.out(hidden_states) + # (batch, self.num_vector_embeds - 1, self.num_latent_pixels) + logits = logits.permute(0, 2, 1) + + # log(p(x_0)) + output = F.log_softmax(logits.double(), dim=1).float() + + if not return_dict: + return (output,) + + return Transformer2DModelOutput(sample=output) + + def _set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for block in self.transformer_blocks: + block._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + class AttentionBlock(nn.Module): """ An attention block that allows spatial positions to attend to each other. Originally ported from here, but adapted diff --git a/src/diffusers/models/unet_2d_blocks.py b/src/diffusers/models/unet_2d_blocks.py index 770043f053b2..eef099eff288 100644 --- a/src/diffusers/models/unet_2d_blocks.py +++ b/src/diffusers/models/unet_2d_blocks.py @@ -15,7 +15,7 @@ import torch from torch import nn -from .attention import AttentionBlock, Transformer2DModel +from .attention import AttentionBlock, Transformer2DModel, DualTransformer2DModel from .resnet import Downsample2D, FirDownsample2D, FirUpsample2D, ResnetBlock2D, Upsample2D @@ -75,6 +75,22 @@ def get_down_block( cross_attention_dim=cross_attention_dim, attn_num_head_channels=attn_num_head_channels, ) + elif down_block_type == "DualCrossAttnDownBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") + return DualCrossAttnDownBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + ) elif down_block_type == "SkipDownBlock2D": return SkipDownBlock2D( num_layers=num_layers, @@ -167,6 +183,22 @@ def get_up_block( cross_attention_dim=cross_attention_dim, attn_num_head_channels=attn_num_head_channels, ) + elif up_block_type == "DualCrossAttnUpBlock2D": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlock2D") + return DualCrossAttnUpBlock2D( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + ) elif up_block_type == "AttnUpBlock2D": return AttnUpBlock2D( num_layers=num_layers, @@ -404,6 +436,103 @@ def forward(self, hidden_states, temb=None, encoder_hidden_states=None): return hidden_states +class UNetMidBlock2DDualCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + attention_type="default", + output_scale_factor=1.0, + cross_attention_dim=1280, + **kwargs, + ): + super().__init__() + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # there is always at least one resnet + resnets = [ + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + for _ in range(num_layers): + attentions.append( + DualTransformer2DModel( + attn_num_head_channels, + in_channels // attn_num_head_channels, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward(self, hidden_states, temb=None, encoder_hidden_states=None): + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + hidden_states = attn(hidden_states, encoder_hidden_states).sample + hidden_states = resnet(hidden_states, temb) + + return hidden_states + class AttnDownBlock2D(nn.Module): def __init__( self, @@ -607,6 +736,127 @@ def custom_forward(*inputs): return hidden_states, output_states +class DualCrossAttnDownBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + cross_attention_dim=1280, + attention_type="default", + output_scale_factor=1.0, + downsample_padding=1, + add_downsample=True, + ): + super().__init__() + resnets = [] + attentions = [] + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlock2D( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + DualTransformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + Downsample2D( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward(self, hidden_states, temb=None, encoder_hidden_states=None): + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample + + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states += (hidden_states,) + + return hidden_states, output_states + class DownBlock2D(nn.Module): def __init__( @@ -1196,6 +1446,132 @@ def custom_forward(*inputs): return hidden_states +class DualCrossAttnUpBlock2D(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + cross_attention_dim=1280, + attention_type="default", + output_scale_factor=1.0, + add_upsample=True, + ): + super().__init__() + resnets = [] + attentions = [] + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlock2D( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + DualTransformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + self.gradient_checkpointing = False + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward( + self, + hidden_states, + res_hidden_states_tuple, + temb=None, + encoder_hidden_states=None, + upsample_size=None, + ): + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + class UpBlock2D(nn.Module): def __init__( self, diff --git a/src/diffusers/models/unet_2d_condition.py b/src/diffusers/models/unet_2d_condition.py index 7f7f3ecd4435..5dcc705466ea 100644 --- a/src/diffusers/models/unet_2d_condition.py +++ b/src/diffusers/models/unet_2d_condition.py @@ -27,6 +27,7 @@ CrossAttnUpBlock2D, DownBlock2D, UNetMidBlock2DCrossAttn, + UNetMidBlock2DDualCrossAttn, UpBlock2D, get_down_block, get_up_block, @@ -149,7 +150,8 @@ def __init__( self.down_blocks.append(down_block) # mid - self.mid_block = UNetMidBlock2DCrossAttn( + # TODO: temporary, need to add get_mid_block() + self.mid_block = UNetMidBlock2DDualCrossAttn( in_channels=block_out_channels[-1], temb_channels=time_embed_dim, resnet_eps=norm_eps, From e00a9cf2d99ff619b50102e7c5364c485cb828d6 Mon Sep 17 00:00:00 2001 From: anton-l Date: Tue, 15 Nov 2022 17:53:26 +0100 Subject: [PATCH 03/49] revert dual attn --- src/diffusers/models/attention.py | 167 ---------- src/diffusers/models/unet_2d_blocks.py | 378 +--------------------- src/diffusers/models/unet_2d_condition.py | 4 +- 3 files changed, 2 insertions(+), 547 deletions(-) diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index c436d472692b..e8ea37970e04 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -225,173 +225,6 @@ def _set_use_memory_efficient_attention_xformers(self, use_memory_efficient_atte block._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) -class DualTransformer2DModel(ModelMixin, ConfigMixin): - """ - Transformer model for image-like data. Takes either discrete (classes of vector embeddings) or continuous (actual - embeddings) inputs. - - When input is continuous: First, project the input (aka embedding) and reshape to b, t, d. Then apply standard - transformer action. Finally, reshape to image. - - When input is discrete: First, input (classes of latent pixels) is converted to embeddings and has positional - embeddings applied, see `ImagePositionalEmbeddings`. Then apply standard transformer action. Finally, predict - classes of unnoised image. - - Note that it is assumed one of the input classes is the masked latent pixel. The predicted classes of the unnoised - image do not contain a prediction for the masked pixel as the unnoised image cannot be masked. - - Parameters: - num_attention_heads (`int`, *optional*, defaults to 16): The number of heads to use for multi-head attention. - attention_head_dim (`int`, *optional*, defaults to 88): The number of channels in each head. - in_channels (`int`, *optional*): - Pass if the input is continuous. The number of channels in the input and output. - num_layers (`int`, *optional*, defaults to 1): The number of layers of Transformer blocks to use. - dropout (`float`, *optional*, defaults to 0.1): The dropout probability to use. - cross_attention_dim (`int`, *optional*): The number of context dimensions to use. - sample_size (`int`, *optional*): Pass if the input is discrete. The width of the latent images. - Note that this is fixed at training time as it is used for learning a number of position embeddings. See - `ImagePositionalEmbeddings`. - num_vector_embeds (`int`, *optional*): - Pass if the input is discrete. The number of classes of the vector embeddings of the latent pixels. - Includes the class for the masked latent pixel. - activation_fn (`str`, *optional*, defaults to `"geglu"`): Activation function to be used in feed-forward. - num_embeds_ada_norm ( `int`, *optional*): Pass if at least one of the norm_layers is `AdaLayerNorm`. - The number of diffusion steps used during training. Note that this is fixed at training time as it is used - to learn a number of embeddings that are added to the hidden states. During inference, you can denoise for - up to but not more than steps than `num_embeds_ada_norm`. - attention_bias (`bool`, *optional*): - Configure if the TransformerBlocks' attention should contain a bias parameter. - """ - - @register_to_config - def __init__( - self, - num_attention_heads: int = 16, - attention_head_dim: int = 88, - in_channels: Optional[int] = None, - num_layers: int = 1, - dropout: float = 0.0, - norm_num_groups: int = 32, - cross_attention_dim: Optional[int] = None, - attention_bias: bool = False, - activation_fn: str = "geglu", - num_embeds_ada_norm: Optional[int] = None, - ): - super().__init__() - self.num_attention_heads = num_attention_heads - self.attention_head_dim = attention_head_dim - inner_dim = num_attention_heads * attention_head_dim - - # 1. DualTransformer2DModel can process both standard continous images of shape `(batch_size, num_channels, width, height)` as well as quantized image embeddings of shape `(batch_size, num_image_vectors)` - # Define whether input is continuous or discrete depending on configuration - self.is_input_continuous = in_channels is not None - - # 2. Define input layers - self.in_channels = in_channels - - self.norm_0 = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True) - self.proj_in_0 = nn.Conv2d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0) - self.norm_1 = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True) - self.proj_in_1 = nn.Conv2d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0) - - # 3. Define transformers blocks - self.transformer_blocks_0 = nn.ModuleList( - [ - BasicTransformerBlock( - inner_dim, - num_attention_heads, - attention_head_dim, - dropout=dropout, - cross_attention_dim=cross_attention_dim, - activation_fn=activation_fn, - num_embeds_ada_norm=num_embeds_ada_norm, - attention_bias=attention_bias, - ) - for d in range(num_layers) - ] - ) - self.transformer_blocks_1 = nn.ModuleList( - [ - BasicTransformerBlock( - inner_dim, - num_attention_heads, - attention_head_dim, - dropout=dropout, - cross_attention_dim=cross_attention_dim, - activation_fn=activation_fn, - num_embeds_ada_norm=num_embeds_ada_norm, - attention_bias=attention_bias, - ) - for d in range(num_layers) - ] - ) - - # 4. Define output layers - self.proj_out_0 = nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0) - self.proj_out_1 = nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0) - - def _set_attention_slice(self, slice_size): - for block in self.transformer_blocks: - block._set_attention_slice(slice_size) - - def forward(self, hidden_states, encoder_hidden_states=None, timestep=None, return_dict: bool = True): - """ - Args: - hidden_states ( When discrete, `torch.LongTensor` of shape `(batch size, num latent pixels)`. - When continous, `torch.FloatTensor` of shape `(batch size, channel, height, width)`): Input - hidden_states - encoder_hidden_states ( `torch.LongTensor` of shape `(batch size, context dim)`, *optional*): - Conditional embeddings for cross attention layer. If not given, cross-attention defaults to - self-attention. - timestep ( `torch.long`, *optional*): - Optional timestep to be applied as an embedding in AdaLayerNorm's. Used to indicate denoising step. - return_dict (`bool`, *optional*, defaults to `True`): - Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. - - Returns: - [`~models.attention.Transformer2DModelOutput`] or `tuple`: [`~models.attention.Transformer2DModelOutput`] - if `return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample - tensor. - """ - # 1. Input - if self.is_input_continuous: - batch, channel, height, weight = hidden_states.shape - residual = hidden_states - hidden_states = self.norm(hidden_states) - hidden_states = self.proj_in(hidden_states) - inner_dim = hidden_states.shape[1] - hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * weight, inner_dim) - elif self.is_input_vectorized: - hidden_states = self.latent_image_embedding(hidden_states) - - # 2. Blocks - for block in self.transformer_blocks: - hidden_states = block(hidden_states, context=encoder_hidden_states, timestep=timestep) - - # 3. Output - if self.is_input_continuous: - hidden_states = hidden_states.reshape(batch, height, weight, inner_dim).permute(0, 3, 1, 2) - hidden_states = self.proj_out(hidden_states) - output = hidden_states + residual - elif self.is_input_vectorized: - hidden_states = self.norm_out(hidden_states) - logits = self.out(hidden_states) - # (batch, self.num_vector_embeds - 1, self.num_latent_pixels) - logits = logits.permute(0, 2, 1) - - # log(p(x_0)) - output = F.log_softmax(logits.double(), dim=1).float() - - if not return_dict: - return (output,) - - return Transformer2DModelOutput(sample=output) - - def _set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for block in self.transformer_blocks: - block._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - class AttentionBlock(nn.Module): """ An attention block that allows spatial positions to attend to each other. Originally ported from here, but adapted diff --git a/src/diffusers/models/unet_2d_blocks.py b/src/diffusers/models/unet_2d_blocks.py index eef099eff288..770043f053b2 100644 --- a/src/diffusers/models/unet_2d_blocks.py +++ b/src/diffusers/models/unet_2d_blocks.py @@ -15,7 +15,7 @@ import torch from torch import nn -from .attention import AttentionBlock, Transformer2DModel, DualTransformer2DModel +from .attention import AttentionBlock, Transformer2DModel from .resnet import Downsample2D, FirDownsample2D, FirUpsample2D, ResnetBlock2D, Upsample2D @@ -75,22 +75,6 @@ def get_down_block( cross_attention_dim=cross_attention_dim, attn_num_head_channels=attn_num_head_channels, ) - elif down_block_type == "DualCrossAttnDownBlock2D": - if cross_attention_dim is None: - raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") - return DualCrossAttnDownBlock2D( - num_layers=num_layers, - in_channels=in_channels, - out_channels=out_channels, - temb_channels=temb_channels, - add_downsample=add_downsample, - resnet_eps=resnet_eps, - resnet_act_fn=resnet_act_fn, - resnet_groups=resnet_groups, - downsample_padding=downsample_padding, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attn_num_head_channels, - ) elif down_block_type == "SkipDownBlock2D": return SkipDownBlock2D( num_layers=num_layers, @@ -183,22 +167,6 @@ def get_up_block( cross_attention_dim=cross_attention_dim, attn_num_head_channels=attn_num_head_channels, ) - elif up_block_type == "DualCrossAttnUpBlock2D": - if cross_attention_dim is None: - raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlock2D") - return DualCrossAttnUpBlock2D( - num_layers=num_layers, - in_channels=in_channels, - out_channels=out_channels, - prev_output_channel=prev_output_channel, - temb_channels=temb_channels, - add_upsample=add_upsample, - resnet_eps=resnet_eps, - resnet_act_fn=resnet_act_fn, - resnet_groups=resnet_groups, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attn_num_head_channels, - ) elif up_block_type == "AttnUpBlock2D": return AttnUpBlock2D( num_layers=num_layers, @@ -436,103 +404,6 @@ def forward(self, hidden_states, temb=None, encoder_hidden_states=None): return hidden_states -class UNetMidBlock2DDualCrossAttn(nn.Module): - def __init__( - self, - in_channels: int, - temb_channels: int, - dropout: float = 0.0, - num_layers: int = 1, - resnet_eps: float = 1e-6, - resnet_time_scale_shift: str = "default", - resnet_act_fn: str = "swish", - resnet_groups: int = 32, - resnet_pre_norm: bool = True, - attn_num_head_channels=1, - attention_type="default", - output_scale_factor=1.0, - cross_attention_dim=1280, - **kwargs, - ): - super().__init__() - - self.attention_type = attention_type - self.attn_num_head_channels = attn_num_head_channels - resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) - - # there is always at least one resnet - resnets = [ - ResnetBlock2D( - in_channels=in_channels, - out_channels=in_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ] - attentions = [] - - for _ in range(num_layers): - attentions.append( - DualTransformer2DModel( - attn_num_head_channels, - in_channels // attn_num_head_channels, - in_channels=in_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, - ) - ) - resnets.append( - ResnetBlock2D( - in_channels=in_channels, - out_channels=in_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ) - - self.attentions = nn.ModuleList(attentions) - self.resnets = nn.ModuleList(resnets) - - def set_attention_slice(self, slice_size): - if slice_size is not None and self.attn_num_head_channels % slice_size != 0: - raise ValueError( - f"Make sure slice_size {slice_size} is a divisor of " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - if slice_size is not None and slice_size > self.attn_num_head_channels: - raise ValueError( - f"Chunk_size {slice_size} has to be smaller or equal to " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - - for attn in self.attentions: - attn._set_attention_slice(slice_size) - - def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for attn in self.attentions: - attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - def forward(self, hidden_states, temb=None, encoder_hidden_states=None): - hidden_states = self.resnets[0](hidden_states, temb) - for attn, resnet in zip(self.attentions, self.resnets[1:]): - hidden_states = attn(hidden_states, encoder_hidden_states).sample - hidden_states = resnet(hidden_states, temb) - - return hidden_states - class AttnDownBlock2D(nn.Module): def __init__( self, @@ -736,127 +607,6 @@ def custom_forward(*inputs): return hidden_states, output_states -class DualCrossAttnDownBlock2D(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - temb_channels: int, - dropout: float = 0.0, - num_layers: int = 1, - resnet_eps: float = 1e-6, - resnet_time_scale_shift: str = "default", - resnet_act_fn: str = "swish", - resnet_groups: int = 32, - resnet_pre_norm: bool = True, - attn_num_head_channels=1, - cross_attention_dim=1280, - attention_type="default", - output_scale_factor=1.0, - downsample_padding=1, - add_downsample=True, - ): - super().__init__() - resnets = [] - attentions = [] - - self.attention_type = attention_type - self.attn_num_head_channels = attn_num_head_channels - - for i in range(num_layers): - in_channels = in_channels if i == 0 else out_channels - resnets.append( - ResnetBlock2D( - in_channels=in_channels, - out_channels=out_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ) - attentions.append( - DualTransformer2DModel( - attn_num_head_channels, - out_channels // attn_num_head_channels, - in_channels=out_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, - ) - ) - self.attentions = nn.ModuleList(attentions) - self.resnets = nn.ModuleList(resnets) - - if add_downsample: - self.downsamplers = nn.ModuleList( - [ - Downsample2D( - out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" - ) - ] - ) - else: - self.downsamplers = None - - self.gradient_checkpointing = False - - def set_attention_slice(self, slice_size): - if slice_size is not None and self.attn_num_head_channels % slice_size != 0: - raise ValueError( - f"Make sure slice_size {slice_size} is a divisor of " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - if slice_size is not None and slice_size > self.attn_num_head_channels: - raise ValueError( - f"Chunk_size {slice_size} has to be smaller or equal to " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - - for attn in self.attentions: - attn._set_attention_slice(slice_size) - - def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for attn in self.attentions: - attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - def forward(self, hidden_states, temb=None, encoder_hidden_states=None): - output_states = () - - for resnet, attn in zip(self.resnets, self.attentions): - if self.training and self.gradient_checkpointing: - - def create_custom_forward(module, return_dict=None): - def custom_forward(*inputs): - if return_dict is not None: - return module(*inputs, return_dict=return_dict) - else: - return module(*inputs) - - return custom_forward - - hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) - hidden_states = torch.utils.checkpoint.checkpoint( - create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states - )[0] - else: - hidden_states = resnet(hidden_states, temb) - hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample - - output_states += (hidden_states,) - - if self.downsamplers is not None: - for downsampler in self.downsamplers: - hidden_states = downsampler(hidden_states) - - output_states += (hidden_states,) - - return hidden_states, output_states - class DownBlock2D(nn.Module): def __init__( @@ -1446,132 +1196,6 @@ def custom_forward(*inputs): return hidden_states -class DualCrossAttnUpBlock2D(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - prev_output_channel: int, - temb_channels: int, - dropout: float = 0.0, - num_layers: int = 1, - resnet_eps: float = 1e-6, - resnet_time_scale_shift: str = "default", - resnet_act_fn: str = "swish", - resnet_groups: int = 32, - resnet_pre_norm: bool = True, - attn_num_head_channels=1, - cross_attention_dim=1280, - attention_type="default", - output_scale_factor=1.0, - add_upsample=True, - ): - super().__init__() - resnets = [] - attentions = [] - - self.attention_type = attention_type - self.attn_num_head_channels = attn_num_head_channels - - for i in range(num_layers): - res_skip_channels = in_channels if (i == num_layers - 1) else out_channels - resnet_in_channels = prev_output_channel if i == 0 else out_channels - - resnets.append( - ResnetBlock2D( - in_channels=resnet_in_channels + res_skip_channels, - out_channels=out_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ) - attentions.append( - DualTransformer2DModel( - attn_num_head_channels, - out_channels // attn_num_head_channels, - in_channels=out_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, - ) - ) - self.attentions = nn.ModuleList(attentions) - self.resnets = nn.ModuleList(resnets) - - if add_upsample: - self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)]) - else: - self.upsamplers = None - - self.gradient_checkpointing = False - - def set_attention_slice(self, slice_size): - if slice_size is not None and self.attn_num_head_channels % slice_size != 0: - raise ValueError( - f"Make sure slice_size {slice_size} is a divisor of " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - if slice_size is not None and slice_size > self.attn_num_head_channels: - raise ValueError( - f"Chunk_size {slice_size} has to be smaller or equal to " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - - for attn in self.attentions: - attn._set_attention_slice(slice_size) - - self.gradient_checkpointing = False - - def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for attn in self.attentions: - attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - def forward( - self, - hidden_states, - res_hidden_states_tuple, - temb=None, - encoder_hidden_states=None, - upsample_size=None, - ): - for resnet, attn in zip(self.resnets, self.attentions): - # pop res hidden states - res_hidden_states = res_hidden_states_tuple[-1] - res_hidden_states_tuple = res_hidden_states_tuple[:-1] - hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) - - if self.training and self.gradient_checkpointing: - - def create_custom_forward(module, return_dict=None): - def custom_forward(*inputs): - if return_dict is not None: - return module(*inputs, return_dict=return_dict) - else: - return module(*inputs) - - return custom_forward - - hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) - hidden_states = torch.utils.checkpoint.checkpoint( - create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states - )[0] - else: - hidden_states = resnet(hidden_states, temb) - hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample - - if self.upsamplers is not None: - for upsampler in self.upsamplers: - hidden_states = upsampler(hidden_states, upsample_size) - - return hidden_states - - class UpBlock2D(nn.Module): def __init__( self, diff --git a/src/diffusers/models/unet_2d_condition.py b/src/diffusers/models/unet_2d_condition.py index 5dcc705466ea..7f7f3ecd4435 100644 --- a/src/diffusers/models/unet_2d_condition.py +++ b/src/diffusers/models/unet_2d_condition.py @@ -27,7 +27,6 @@ CrossAttnUpBlock2D, DownBlock2D, UNetMidBlock2DCrossAttn, - UNetMidBlock2DDualCrossAttn, UpBlock2D, get_down_block, get_up_block, @@ -150,8 +149,7 @@ def __init__( self.down_blocks.append(down_block) # mid - # TODO: temporary, need to add get_mid_block() - self.mid_block = UNetMidBlock2DDualCrossAttn( + self.mid_block = UNetMidBlock2DCrossAttn( in_channels=block_out_channels[-1], temb_channels=time_embed_dim, resnet_eps=norm_eps, From 833cd1de1c4ad297ba92e6c0a5a2d2760cfeeb44 Mon Sep 17 00:00:00 2001 From: anton-l Date: Tue, 15 Nov 2022 21:26:40 +0100 Subject: [PATCH 04/49] adapt for vd-official --- ...onvert_versatile_diffusion_to_diffusers.py | 144 +++++++++++------- 1 file changed, 91 insertions(+), 53 deletions(-) diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index 07a4c2cd72b8..825c31970fef 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -16,34 +16,79 @@ import argparse import os +from argparse import Namespace import torch - -try: - from omegaconf import OmegaConf -except ImportError: - raise ImportError( - "OmegaConf is required to convert the LDM checkpoints. Please install it with `pip install OmegaConf`." - ) - from diffusers import ( AutoencoderKL, DDIMScheduler, DPMSolverMultistepScheduler, EulerAncestralDiscreteScheduler, EulerDiscreteScheduler, - LDMTextToImagePipeline, LMSDiscreteScheduler, PNDMScheduler, StableDiffusionPipeline, UNet2DConditionModel, ) from diffusers.pipelines.latent_diffusion.pipeline_latent_diffusion import LDMBertConfig, LDMBertModel -from diffusers.pipelines.stable_diffusion import StableDiffusionSafetyChecker from transformers import AutoFeatureExtractor, BertTokenizerFast, CLIPTextModel, CLIPTokenizer +SCHEDULER_CONFIG = Namespace( + **{ + "beta_linear_start": 0.00085, + "beta_linear_end": 0.012, + "timesteps": 1000, + "scale_factor": 0.18215, + } +) + +UNET_IMAGE_CONFIG = Namespace( + **{ + "input_channels": 4, + "model_channels": 320, + "output_channels": 4, + "num_noattn_blocks": [2, 2, 2, 2], + "channel_mult": [1, 2, 4, 4], + "with_attn": [True, True, True, False], + "num_heads": 8, + "context_dim": 768, + "use_checkpoint": True, + } +) + +UNET_TEXT_CONFIG = Namespace( + **{ + "input_channels": 768, + "model_channels": 320, + "output_channels": 768, + "num_noattn_blocks": [2, 2, 2, 2], + "channel_mult": [1, 2, 4, 4], + "second_dim": [4, 4, 4, 4], + "with_attn": [True, True, True, False], + "num_heads": 8, + "context_dim": 768, + "use_checkpoint": True, + } +) + +AUTOENCODER_CONFIG = Namespace( + **{ + "double_z": True, + "z_channels": 4, + "resolution": 256, + "in_channels": 3, + "out_ch": 3, + "ch": 128, + "ch_mult": [1, 2, 4, 4], + "num_res_blocks": 2, + "attn_resolutions": [], + "dropout": 0.0, + } +) + + def shave_segments(path, n_shave_prefix_segments=1): """ Removes segments. Positive values shave the first segments, negative shave the last segments. @@ -217,28 +262,28 @@ def create_unet_diffusers_config(unet_params): down_block_types = [] resolution = 1 for i in range(len(block_out_channels)): - block_type = "DualCrossAttnDownBlock2D" if resolution in unet_params.attention_resolutions else "DownBlock2D" + block_type = "CrossAttnDownBlock2D" if unet_params.with_attn[i] else "DownBlock2D" down_block_types.append(block_type) if i != len(block_out_channels) - 1: resolution *= 2 up_block_types = [] for i in range(len(block_out_channels)): - block_type = "DualCrossAttnUpBlock2D" if resolution in unet_params.attention_resolutions else "UpBlock2D" + block_type = "CrossAttnUpBlock2D" if unet_params.with_attn[-i - 1] else "UpBlock2D" up_block_types.append(block_type) resolution //= 2 - if not all(n == unet_params.num_res_blocks[0] for n in unet_params.num_res_blocks): + if not all(n == unet_params.num_noattn_blocks[0] for n in unet_params.num_noattn_blocks): raise ValueError("Not all num_res_blocks are equal, which is not supported in this script.") config = dict( - sample_size=unet_params.image_size, - in_channels=unet_params.in_channels, - out_channels=unet_params.out_channels, + sample_size=None, + in_channels=unet_params.input_channels, + out_channels=unet_params.output_channels, down_block_types=tuple(down_block_types), up_block_types=tuple(up_block_types), block_out_channels=tuple(block_out_channels), - layers_per_block=unet_params.num_res_blocks[0], + layers_per_block=unet_params.num_noattn_blocks[0], cross_attention_dim=unet_params.context_dim, attention_head_dim=unet_params.num_heads, ) @@ -246,12 +291,10 @@ def create_unet_diffusers_config(unet_params): return config -def create_vae_diffusers_config(original_config): +def create_vae_diffusers_config(vae_params): """ - Creates a config for the diffusers based on the config of the LDM model. + Creates a config for the diffusers based on the config of the VD model. """ - vae_params = original_config.model.params.first_stage_config.params.ddconfig - _ = original_config.model.params.first_stage_config.params.embed_dim block_out_channels = [vae_params.ch * mult for mult in vae_params.ch_mult] down_block_types = ["DownEncoderBlock2D"] * len(block_out_channels) @@ -290,7 +333,7 @@ def create_ldm_bert_config(original_config): return config -def convert_vd_unet_checkpoint(checkpoint, config, path=None, extract_ema=False): +def convert_vd_unet_checkpoint(checkpoint, config, unet_key, extract_ema=False): """ Takes a state dict and a config, and returns a converted checkpoint. """ @@ -299,10 +342,9 @@ def convert_vd_unet_checkpoint(checkpoint, config, path=None, extract_ema=False) unet_state_dict = {} keys = list(checkpoint.keys()) - unet_key = "model.diffusion_model." # at least a 100 parameters have to start with `model_ema` in order for the checkpoint to be EMA if sum(k.startswith("model_ema") for k in keys) > 100: - print(f"Checkpoint {path} has both EMA and non-EMA weights.") + print(f"Checkpoint has both EMA and non-EMA weights.") if extract_ema: print( "In this conversion only the EMA weights are extracted. If you want to instead extract the non-EMA" @@ -324,10 +366,10 @@ def convert_vd_unet_checkpoint(checkpoint, config, path=None, extract_ema=False) new_checkpoint = {} - new_checkpoint["time_embedding.linear_1.weight"] = unet_state_dict["time_embed.0.weight"] - new_checkpoint["time_embedding.linear_1.bias"] = unet_state_dict["time_embed.0.bias"] - new_checkpoint["time_embedding.linear_2.weight"] = unet_state_dict["time_embed.2.weight"] - new_checkpoint["time_embedding.linear_2.bias"] = unet_state_dict["time_embed.2.bias"] + new_checkpoint["time_embedding.linear_1.weight"] = checkpoint["model.diffusion_model.time_embed.0.weight"] + new_checkpoint["time_embedding.linear_1.bias"] = checkpoint["model.diffusion_model.time_embed.0.bias"] + new_checkpoint["time_embedding.linear_2.weight"] = checkpoint["model.diffusion_model.time_embed.2.weight"] + new_checkpoint["time_embedding.linear_2.bias"] = checkpoint["model.diffusion_model.time_embed.2.bias"] new_checkpoint["conv_in.weight"] = unet_state_dict["input_blocks.0.0.weight"] new_checkpoint["conv_in.bias"] = unet_state_dict["input_blocks.0.0.bias"] @@ -458,7 +500,7 @@ def convert_vd_unet_checkpoint(checkpoint, config, path=None, extract_ema=False) new_checkpoint[new_path] = unet_state_dict[old_path] - return new_checkpoint + return new_checkpoint def convert_ldm_vae_checkpoint(checkpoint, config): @@ -670,18 +712,11 @@ def convert_ldm_clip_checkpoint(checkpoint): args = parser.parse_args() - if args.original_config_file is None: - os.system( - "wget https://raw.githubusercontent.com/CompVis/stable-diffusion/main/configs/stable-diffusion/v1-inference.yaml" - ) - args.original_config_file = "./v1-inference.yaml" - - original_config = OmegaConf.load(args.original_config_file) - + scheduler_config = SCHEDULER_CONFIG - num_train_timesteps = original_config.model.params.timesteps - beta_start = original_config.model.params.linear_start - beta_end = original_config.model.params.linear_end + num_train_timesteps = scheduler_config.timesteps + beta_start = scheduler_config.beta_linear_start + beta_end = scheduler_config.beta_linear_end if args.scheduler_type == "pndm": scheduler = PNDMScheduler( beta_end=beta_end, @@ -714,23 +749,26 @@ def convert_ldm_clip_checkpoint(checkpoint): raise ValueError(f"Scheduler of type {args.scheduler_type} doesn't exist!") # Convert the UNet2DConditionModel model. - checkpoint = torch.load(args.unet_checkpoint_path) - # FIXME: temporary, extracted from a resolved cfg.model.unet_config object - # fmt: off - unet_config = {'image_size': None, 'in_channels': 4, 'out_channels': 4, 'model_channels': 320, 'attention_resolutions': [4, 2, 1], 'num_res_blocks': [2, 2, 2, 2], 'channel_mult': [1, 2, 4, 4], 'num_heads': 8, 'use_spatial_transformer': True, 'transformer_depth': 1, 'context_dim': 768, 'use_checkpoint': True, 'legacy': False} - unet_config = argparse.Namespace(**unet_config) - # fmt: on - unet_config = create_unet_diffusers_config(unet_config) - converted_unet_checkpoint = convert_vd_unet_checkpoint( - checkpoint, unet_config, path=args.unet_checkpoint_path, extract_ema=args.extract_ema - ) + if args.unet_checkpoint_path is not None: + unet_image_config = create_unet_diffusers_config(UNET_IMAGE_CONFIG) + checkpoint = torch.load(args.unet_checkpoint_path) + converted_unet_image_checkpoint = convert_vd_unet_checkpoint( + checkpoint, unet_image_config, unet_key="model.diffusion_model.unet_image.", extract_ema=args.extract_ema + ) + unet_image = UNet2DConditionModel(**unet_image_config) + unet_image.load_state_dict(converted_unet_image_checkpoint) + unet_image.save_pretrained(os.path.join(args.dump_path, "unet_image")) - unet = UNet2DConditionModel(**unet_config) - unet.load_state_dict(converted_unet_checkpoint) + # unet_text_config = create_unet_diffusers_config(UNET_TEXT_CONFIG) + # converted_unet_text_checkpoint = convert_vd_unet_checkpoint( + # checkpoint, unet_text_config, unet_key="model.diffusion_model.unet_text.", extract_ema=args.extract_ema + # ) + # unet_text = UNet2DConditionModel(**unet_text_config) + # unet_text.load_state_dict(converted_unet_text_checkpoint) # Convert the VAE model. if args.vae_checkpoint_path is not None: - vae_config = create_vae_diffusers_config(original_config) + vae_config = create_vae_diffusers_config(AUTOENCODER_CONFIG) checkpoint = torch.load(args.vae_checkpoint_path) converted_vae_checkpoint = convert_ldm_vae_checkpoint(checkpoint, vae_config) From e455921ff0da509720f244fa3fd12503b19e64ef Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 16 Nov 2022 00:06:51 +0100 Subject: [PATCH 05/49] test the full pipeline --- ...onvert_versatile_diffusion_to_diffusers.py | 71 +++--- src/diffusers/__init__.py | 1 + src/diffusers/pipelines/__init__.py | 1 + .../pipelines/versatile_diffusion/__init__.py | 1 + .../pipeline_versatile_diffusion.py | 215 ++++++++++++++++++ .../pipelines/versatile_diffusion/__init__.py | 0 .../test_versatile_diffusion.py | 52 +++++ 7 files changed, 301 insertions(+), 40 deletions(-) create mode 100644 src/diffusers/pipelines/versatile_diffusion/__init__.py create mode 100644 src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py create mode 100644 tests/pipelines/versatile_diffusion/__init__.py create mode 100644 tests/pipelines/versatile_diffusion/test_versatile_diffusion.py diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index 825c31970fef..27fe7c1ffa6b 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -28,11 +28,11 @@ EulerDiscreteScheduler, LMSDiscreteScheduler, PNDMScheduler, - StableDiffusionPipeline, UNet2DConditionModel, + VersatileDiffusionPipeline, ) from diffusers.pipelines.latent_diffusion.pipeline_latent_diffusion import LDMBertConfig, LDMBertModel -from transformers import AutoFeatureExtractor, BertTokenizerFast, CLIPTextModel, CLIPTokenizer +from transformers import CLIPProcessor, CLIPTextModelWithProjection, CLIPTokenizer, CLIPVisionModelWithProjection SCHEDULER_CONFIG = Namespace( @@ -44,7 +44,7 @@ } ) -UNET_IMAGE_CONFIG = Namespace( +IMAGE_UNET_CONFIG = Namespace( **{ "input_channels": 4, "model_channels": 320, @@ -58,7 +58,7 @@ } ) -UNET_TEXT_CONFIG = Namespace( +TEXT_UNET_CONFIG = Namespace( **{ "input_channels": 768, "model_channels": 320, @@ -750,21 +750,20 @@ def convert_ldm_clip_checkpoint(checkpoint): # Convert the UNet2DConditionModel model. if args.unet_checkpoint_path is not None: - unet_image_config = create_unet_diffusers_config(UNET_IMAGE_CONFIG) + image_unet_config = create_unet_diffusers_config(IMAGE_UNET_CONFIG) checkpoint = torch.load(args.unet_checkpoint_path) - converted_unet_image_checkpoint = convert_vd_unet_checkpoint( - checkpoint, unet_image_config, unet_key="model.diffusion_model.unet_image.", extract_ema=args.extract_ema + converted_image_unet_checkpoint = convert_vd_unet_checkpoint( + checkpoint, image_unet_config, unet_key="model.diffusion_model.unet_image.", extract_ema=args.extract_ema ) - unet_image = UNet2DConditionModel(**unet_image_config) - unet_image.load_state_dict(converted_unet_image_checkpoint) - unet_image.save_pretrained(os.path.join(args.dump_path, "unet_image")) + image_unet = UNet2DConditionModel(**image_unet_config) + image_unet.load_state_dict(converted_image_unet_checkpoint) - # unet_text_config = create_unet_diffusers_config(UNET_TEXT_CONFIG) - # converted_unet_text_checkpoint = convert_vd_unet_checkpoint( - # checkpoint, unet_text_config, unet_key="model.diffusion_model.unet_text.", extract_ema=args.extract_ema + # text_unet_config = create_unet_diffusers_config(TEXT_UNET_CONFIG) + # converted_text_unet_checkpoint = convert_vd_unet_checkpoint( + # checkpoint, text_unet_config, unet_key="model.diffusion_model.unet_text.", extract_ema=args.extract_ema # ) - # unet_text = UNet2DConditionModel(**unet_text_config) - # unet_text.load_state_dict(converted_unet_text_checkpoint) + # text_unet = UNet2DConditionModel(**text_unet_config) + # text_unet.load_state_dict(converted_text_unet_checkpoint) # Convert the VAE model. if args.vae_checkpoint_path is not None: @@ -774,28 +773,20 @@ def convert_ldm_clip_checkpoint(checkpoint): vae = AutoencoderKL(**vae_config) vae.load_state_dict(converted_vae_checkpoint) - vae.save_pretrained(os.path.join(args.dump_path, "vae")) - - # Convert the text model. -# text_model_type = original_config.model.params.cond_stage_config.target.split(".")[-1] -# if text_model_type == "FrozenCLIPEmbedder": -# text_model = convert_ldm_clip_checkpoint(checkpoint) -# tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-large-patch14") -# safety_checker = StableDiffusionSafetyChecker.from_pretrained("CompVis/stable-diffusion-safety-checker") -# feature_extractor = AutoFeatureExtractor.from_pretrained("CompVis/stable-diffusion-safety-checker") -# pipe = StableDiffusionPipeline( -# vae=vae, -# text_encoder=text_model, -# tokenizer=tokenizer, -# unet=unet, -# scheduler=scheduler, -# safety_checker=safety_checker, -# feature_extractor=feature_extractor, -# ) -# else: -# text_config = create_ldm_bert_config(original_config) -# text_model = convert_ldm_bert_checkpoint(checkpoint, text_config) -# tokenizer = BertTokenizerFast.from_pretrained("bert-base-uncased") -# pipe = LDMTextToImagePipeline(vqvae=vae, bert=text_model, tokenizer=tokenizer, unet=unet, scheduler=scheduler) -# -# pipe.save_pretrained(args.dump_path) + + tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-large-patch14") + image_processor = CLIPProcessor.from_pretrained("openai/clip-vit-large-patch14") + text_encoder = CLIPTextModelWithProjection.from_pretrained("openai/clip-vit-large-patch14") + image_encoder = CLIPVisionModelWithProjection.from_pretrained("openai/clip-vit-large-patch14") + + pipe = VersatileDiffusionPipeline( + scheduler=scheduler, + tokenizer=tokenizer, + image_processor=image_processor, + text_encoder=text_encoder, + image_encoder=image_encoder, + image_unet=image_unet, + # text_unet=text_unet, + vae=vae, + ) + pipe.save_pretrained(args.dump_path) diff --git a/src/diffusers/__init__.py b/src/diffusers/__init__.py index 86eda7371fe9..19558334af5a 100644 --- a/src/diffusers/__init__.py +++ b/src/diffusers/__init__.py @@ -71,6 +71,7 @@ StableDiffusionInpaintPipeline, StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, + VersatileDiffusionPipeline, VQDiffusionPipeline, ) else: diff --git a/src/diffusers/pipelines/__init__.py b/src/diffusers/pipelines/__init__.py index ef4d23e5e6d0..abb09605e371 100644 --- a/src/diffusers/pipelines/__init__.py +++ b/src/diffusers/pipelines/__init__.py @@ -23,6 +23,7 @@ StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, ) + from .versatile_diffusion import VersatileDiffusionPipeline from .vq_diffusion import VQDiffusionPipeline if is_transformers_available() and is_onnx_available(): diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py new file mode 100644 index 000000000000..cd63bbfc28b6 --- /dev/null +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -0,0 +1 @@ +from .pipeline_versatile_diffusion import VersatileDiffusionPipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py new file mode 100644 index 000000000000..3e56e4b57ee8 --- /dev/null +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -0,0 +1,215 @@ +# Copyright 2022 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn as nn +import torch.utils.checkpoint + +from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel + +from ...models import AutoencoderKL, UNet2DConditionModel, UNet2DModel, VQModel +from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput +from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler + + +class VersatileDiffusionPipeline(DiffusionPipeline): + r""" + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + Parameters: + vqvae ([`VQModel`]): + Vector-quantized (VQ) Model to encode and decode images to and from latent representations. + bert ([`LDMBertModel`]): + Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture. + tokenizer (`transformers.BertTokenizer`): + Tokenizer of class + [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + """ + tokenizer: CLIPTokenizer + image_processor: CLIPProcessor + text_encoder: CLIPTextModel + image_encoder: CLIPVisionModel + image_unet: UNet2DConditionModel + vae: Union[VQModel, AutoencoderKL] + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] + + def __init__( + self, + tokenizer: CLIPTokenizer, + image_processor: CLIPProcessor, + text_encoder: CLIPTextModel, + image_encoder: CLIPVisionModel, + image_unet: UNet2DConditionModel, + vae: Union[VQModel, AutoencoderKL], + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], + ): + super().__init__() + self.register_modules( + tokenizer=tokenizer, + image_processor=image_processor, + text_encoder=text_encoder, + image_encoder=image_encoder, + image_unet=image_unet, + vae=vae, + scheduler=scheduler, + ) + + def _encode_prompt(self, prompt, do_classifier_free_guidance): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + """ + + def _normalize_embeddings(encoder_output): + embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) + embeds_pooled = encoder_output.text_embeds + embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) + return embeds + + batch_size = len(prompt) if isinstance(prompt, list) else 1 + + if do_classifier_free_guidance: + uncond_input = self.tokenizer([""] * batch_size, padding="max_length", max_length=77, return_tensors="pt") + uncond_embeddings = self.text_encoder(uncond_input.input_ids.to(self.device)) + uncond_embeddings = _normalize_embeddings(uncond_embeddings) + + # get prompt text embeddings + text_input = self.tokenizer(prompt, padding="max_length", max_length=77, return_tensors="pt") + text_embeddings = self.text_encoder(text_input.input_ids.to(self.device)) + text_embeddings = _normalize_embeddings(text_embeddings) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and text embeddings into a single batch + # to avoid doing two forward passes + text_embeddings = torch.cat([uncond_embeddings, text_embeddings]) + + return text_embeddings + + @torch.no_grad() + def __call__( + self, + prompt: Union[str, List[str]], + height: Optional[int] = 512, + width: Optional[int] = 512, + num_inference_steps: Optional[int] = 50, + guidance_scale: Optional[float] = 1.0, + eta: Optional[float] = 0.0, + generator: Optional[torch.Generator] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + **kwargs, + ) -> Union[Tuple, ImagePipelineOutput]: + r""" + Args: + prompt (`str` or `List[str]`): + The prompt or prompts to guide the image generation. + height (`int`, *optional*, defaults to 256): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to 256): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + guidance_scale (`float`, *optional*, defaults to 1.0): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt` at + the, usually at the expense of lower image quality. + generator (`torch.Generator`, *optional*): + A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation + deterministic. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*): + Whether or not to return a [`~pipeline_utils.ImagePipelineOutput`] instead of a plain tuple. + + Returns: + [`~pipeline_utils.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if + `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the + generated images. + """ + + if isinstance(prompt, str): + batch_size = 1 + elif isinstance(prompt, list): + batch_size = len(prompt) + else: + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + do_classifier_free_guidance = guidance_scale > 1.0 + + text_embeddings = self._encode_prompt(prompt, do_classifier_free_guidance) + + latents = torch.randn( + (batch_size, self.image_unet.in_channels, height // 8, width // 8), + generator=generator, + ) + latents = latents.to(self.device) + + self.scheduler.set_timesteps(num_inference_steps) + + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + + extra_kwargs = {} + if accepts_eta: + extra_kwargs["eta"] = eta + + for t in self.progress_bar(self.scheduler.timesteps): + if not do_classifier_free_guidance: + latents_input = latents + else: + latents_input = torch.cat([latents] * 2) + + # predict the noise residual + noise_pred = self.image_unet(latents_input, t, encoder_hidden_states=text_embeddings).sample + # perform guidance + if guidance_scale != 1.0: + noise_pred_uncond, noise_prediction_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample + + # scale and decode the image latents with vae + latents = 1 / 0.18215 * latents + image = self.vae.decode(latents).sample + + image = (image / 2 + 0.5).clamp(0, 1) + image = image.cpu().permute(0, 2, 3, 1).numpy() + if output_type == "pil": + image = self.numpy_to_pil(image) + + if not return_dict: + return (image,) + + return ImagePipelineOutput(images=image) diff --git a/tests/pipelines/versatile_diffusion/__init__.py b/tests/pipelines/versatile_diffusion/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py new file mode 100644 index 000000000000..e0f7618034ea --- /dev/null +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# Copyright 2022 HuggingFace Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch + +from diffusers import VersatileDiffusionPipeline +from diffusers.utils.testing_utils import require_torch, slow, torch_device + +from ...test_pipelines_common import PipelineTesterMixin + + +torch.backends.cuda.matmul.allow_tf32 = False + + +class VersatileDiffusionPipelineFastTests(PipelineTesterMixin, unittest.TestCase): + pass + + +@slow +@require_torch +class VersatileDiffusionPipelineIntegrationTests(unittest.TestCase): + def test_inference_text2img(self): + pipe = VersatileDiffusionPipeline.from_pretrained("scripts/vd-diffusers") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + prompt = "A painting of a squirrel eating a burger" + generator = torch.manual_seed(0) + image = pipe( + [prompt], generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" + ).images + + image_slice = image[0, -3:, -3:, -1] + + assert image.shape == (1, 256, 256, 3) + expected_slice = np.array([0.9256, 0.9340, 0.8933, 0.9361, 0.9113, 0.8727, 0.9122, 0.8745, 0.8099]) + assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 53f080f17afd197f8a67eb2212113893f3101871 Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 16 Nov 2022 15:36:42 +0100 Subject: [PATCH 06/49] mixed inference --- ...onvert_versatile_diffusion_to_diffusers.py | 96 ++---------- .../pipeline_versatile_diffusion.py | 137 +++++++++++++++++- .../test_versatile_diffusion.py | 6 +- 3 files changed, 145 insertions(+), 94 deletions(-) diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index 27fe7c1ffa6b..77191e6cb6e5 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -313,7 +313,7 @@ def create_vae_diffusers_config(vae_params): return config -def create_diffusers_schedular(original_config): +def create_diffusers_scheduler(original_config): schedular = DDIMScheduler( num_train_timesteps=original_config.model.params.timesteps, beta_start=original_config.model.params.linear_start, @@ -323,16 +323,6 @@ def create_diffusers_schedular(original_config): return schedular -def create_ldm_bert_config(original_config): - bert_params = original_config.model.parms.cond_stage_config.params - config = LDMBertConfig( - d_model=bert_params.n_embed, - encoder_layers=bert_params.n_layer, - encoder_ffn_dim=bert_params.n_embed * 4, - ) - return config - - def convert_vd_unet_checkpoint(checkpoint, config, unet_key, extract_ema=False): """ Takes a state dict and a config, and returns a converted checkpoint. @@ -503,7 +493,7 @@ def convert_vd_unet_checkpoint(checkpoint, config, unet_key, extract_ema=False): return new_checkpoint -def convert_ldm_vae_checkpoint(checkpoint, config): +def convert_vd_vae_checkpoint(checkpoint, config): # extract state dict for VAE vae_state_dict = {} keys = list(checkpoint.keys()) @@ -608,72 +598,6 @@ def convert_ldm_vae_checkpoint(checkpoint, config): return new_checkpoint -def convert_ldm_bert_checkpoint(checkpoint, config): - def _copy_attn_layer(hf_attn_layer, pt_attn_layer): - hf_attn_layer.q_proj.weight.data = pt_attn_layer.to_q.weight - hf_attn_layer.k_proj.weight.data = pt_attn_layer.to_k.weight - hf_attn_layer.v_proj.weight.data = pt_attn_layer.to_v.weight - - hf_attn_layer.out_proj.weight = pt_attn_layer.to_out.weight - hf_attn_layer.out_proj.bias = pt_attn_layer.to_out.bias - - def _copy_linear(hf_linear, pt_linear): - hf_linear.weight = pt_linear.weight - hf_linear.bias = pt_linear.bias - - def _copy_layer(hf_layer, pt_layer): - # copy layer norms - _copy_linear(hf_layer.self_attn_layer_norm, pt_layer[0][0]) - _copy_linear(hf_layer.final_layer_norm, pt_layer[1][0]) - - # copy attn - _copy_attn_layer(hf_layer.self_attn, pt_layer[0][1]) - - # copy MLP - pt_mlp = pt_layer[1][1] - _copy_linear(hf_layer.fc1, pt_mlp.net[0][0]) - _copy_linear(hf_layer.fc2, pt_mlp.net[2]) - - def _copy_layers(hf_layers, pt_layers): - for i, hf_layer in enumerate(hf_layers): - if i != 0: - i += i - pt_layer = pt_layers[i : i + 2] - _copy_layer(hf_layer, pt_layer) - - hf_model = LDMBertModel(config).eval() - - # copy embeds - hf_model.model.embed_tokens.weight = checkpoint.transformer.token_emb.weight - hf_model.model.embed_positions.weight.data = checkpoint.transformer.pos_emb.emb.weight - - # copy layer norm - _copy_linear(hf_model.model.layer_norm, checkpoint.transformer.norm) - - # copy hidden layers - _copy_layers(hf_model.model.layers, checkpoint.transformer.attn_layers.layers) - - _copy_linear(hf_model.to_logits, checkpoint.transformer.to_logits) - - return hf_model - - -def convert_ldm_clip_checkpoint(checkpoint): - text_model = CLIPTextModel.from_pretrained("openai/clip-vit-large-patch14") - - keys = list(checkpoint.keys()) - - text_model_dict = {} - - for key in keys: - if key.startswith("cond_stage_model.transformer"): - text_model_dict[key[len("cond_stage_model.transformer.") :]] = checkpoint[key] - - text_model.load_state_dict(text_model_dict) - - return text_model - - if __name__ == "__main__": parser = argparse.ArgumentParser() @@ -758,18 +682,18 @@ def convert_ldm_clip_checkpoint(checkpoint): image_unet = UNet2DConditionModel(**image_unet_config) image_unet.load_state_dict(converted_image_unet_checkpoint) - # text_unet_config = create_unet_diffusers_config(TEXT_UNET_CONFIG) - # converted_text_unet_checkpoint = convert_vd_unet_checkpoint( - # checkpoint, text_unet_config, unet_key="model.diffusion_model.unet_text.", extract_ema=args.extract_ema - # ) - # text_unet = UNet2DConditionModel(**text_unet_config) - # text_unet.load_state_dict(converted_text_unet_checkpoint) + text_unet_config = create_unet_diffusers_config(TEXT_UNET_CONFIG) + converted_text_unet_checkpoint = convert_vd_unet_checkpoint( + checkpoint, text_unet_config, unet_key="model.diffusion_model.unet_text.", extract_ema=args.extract_ema + ) + text_unet = UNet2DConditionModel(**text_unet_config) + text_unet.load_state_dict(converted_text_unet_checkpoint, strict=False) # Convert the VAE model. if args.vae_checkpoint_path is not None: vae_config = create_vae_diffusers_config(AUTOENCODER_CONFIG) checkpoint = torch.load(args.vae_checkpoint_path) - converted_vae_checkpoint = convert_ldm_vae_checkpoint(checkpoint, vae_config) + converted_vae_checkpoint = convert_vd_vae_checkpoint(checkpoint, vae_config) vae = AutoencoderKL(**vae_config) vae.load_state_dict(converted_vae_checkpoint) @@ -786,7 +710,7 @@ def convert_ldm_clip_checkpoint(checkpoint): text_encoder=text_encoder, image_encoder=image_encoder, image_unet=image_unet, - # text_unet=text_unet, + text_unet=text_unet, vae=vae, ) pipe.save_pretrained(args.dump_path) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 3e56e4b57ee8..09835e4a0d71 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -21,7 +21,9 @@ from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel -from ...models import AutoencoderKL, UNet2DConditionModel, UNet2DModel, VQModel +from ...models import AutoencoderKL, UNet2DConditionModel, VQModel +from ...models.unet_2d_condition import UNet2DConditionOutput +from ...models.attention import Transformer2DModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler @@ -59,6 +61,7 @@ def __init__( text_encoder: CLIPTextModel, image_encoder: CLIPVisionModel, image_unet: UNet2DConditionModel, + text_unet: UNet2DConditionModel, vae: Union[VQModel, AutoencoderKL], scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], ): @@ -72,6 +75,18 @@ def __init__( vae=vae, scheduler=scheduler, ) + self.image_transformer_blocks = { + name: module for name, module in image_unet.named_modules() if isinstance(module, Transformer2DModel) + } + self.text_transformer_blocks = { + name: module for name, module in text_unet.named_modules() if isinstance(module, Transformer2DModel) + } + + # text2img by default + for full_name, module in image_unet.named_modules(): + if isinstance(module, Transformer2DModel): + parent_name, name = full_name.rsplit('.', 1) + image_unet.get_submodule(parent_name)[name] = self.text_transformer_blocks[name] def _encode_prompt(self, prompt, do_classifier_free_guidance): r""" @@ -85,8 +100,8 @@ def _encode_prompt(self, prompt, do_classifier_free_guidance): """ def _normalize_embeddings(encoder_output): - embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) - embeds_pooled = encoder_output.text_embeds + embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) # sum == 19677.4570 + embeds_pooled = encoder_output.text_embeds # sum == 260.2655 embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) return embeds @@ -171,9 +186,8 @@ def __call__( latents = torch.randn( (batch_size, self.image_unet.in_channels, height // 8, width // 8), - generator=generator, + generator=generator, device=self.device ) - latents = latents.to(self.device) self.scheduler.set_timesteps(num_inference_steps) @@ -185,6 +199,7 @@ def __call__( extra_kwargs["eta"] = eta for t in self.progress_bar(self.scheduler.timesteps): + t += 1 if not do_classifier_free_guidance: latents_input = latents else: @@ -213,3 +228,115 @@ def __call__( return (image,) return ImagePipelineOutput(images=image) + + +class VDMixedModelWrapper(nn.Module): + def __init__(self, image_unet: UNet2DConditionModel, text_unet: UNet2DConditionModel): + super().__init__() + self.image_unet = image_unet + self.text_unet = text_unet + self.time_embedding = self.unet_image.time_embedding + self.time_proj = self.unet_image.time_proj + + def embed_imesteps(self, timesteps, sample): + if not torch.is_tensor(timesteps): + timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) + elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + t_emb = self.time_proj(timesteps) + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=self.dtype) + emb = self.time_embedding(t_emb) + return emb + + def forward(self, sample, timestep, encoder_hidden_states, latents_type="image", condition_type="text", return_dict: bool = True): + default_overall_up_factor = 2 ** self.image_unet.num_upsamplers + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): + forward_upsample_size = True + + # 1. time + emb = self.embed_imesteps(timestep, sample) + + # 2. pre-process + if latents_type == "image": + sample = self.image_unet.conv_in(sample) + elif latents_type == "text": + sample = self.text_unet.conv_in(sample) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + down_block_res_samples += res_samples + + # 4. mid + sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) + + # 5. up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets):] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + upsample_size=upsample_size, + ) + else: + sample = upsample_block( + hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size + ) + # 6. post-process + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + if not return_dict: + return (sample,) + + return UNet2DConditionOutput(sample=sample) + + + + def mixed_forward(self, image_module, text_module, hidden_state, timesteps_emb, condition, latents_type="image", condition_type="text"): + for ilayer, tlayer in zip(image_module, text_module): + if isinstance(ilayer, SpatialTransformer) and condition_type == 'image': + hidden_state = ilayer(hidden_state, condition) + elif isinstance(ilayer, SpatialTransformer) and condition_type == 'text': + hidden_state = tlayer(hidden_state, condition) + elif latents_type == 'image': + hidden_state = ilayer(hidden_state) + elif latents_type == 'text': + hidden_state = tlayer(hidden_state) + else: + raise ValueError(f"latents_type {latents_type} and condition_type {condition_type} not supported") + return hidden_state + + diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py index e0f7618034ea..be6c826af1bc 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py @@ -37,10 +37,10 @@ class VersatileDiffusionPipelineIntegrationTests(unittest.TestCase): def test_inference_text2img(self): pipe = VersatileDiffusionPipeline.from_pretrained("scripts/vd-diffusers") pipe.to(torch_device) - pipe.set_progress_bar_config(disable=None) + #pipe.set_progress_bar_config(disable=None) - prompt = "A painting of a squirrel eating a burger" - generator = torch.manual_seed(0) + prompt = "a dream of a village in china, by Caspar David Friedrich, matte painting trending on artstation HQ" + generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe( [prompt], generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" ).images From b5778e0ff369f158d00ec630dc6c75b56ee6936f Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 16 Nov 2022 17:23:29 +0100 Subject: [PATCH 07/49] mixed inference for text2img --- ...onvert_versatile_diffusion_to_diffusers.py | 22 +- .../pipeline_versatile_diffusion.py | 189 +++++------------- .../test_versatile_diffusion.py | 6 +- 3 files changed, 76 insertions(+), 141 deletions(-) diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index 77191e6cb6e5..f09c8d683974 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -648,6 +648,7 @@ def convert_vd_vae_checkpoint(checkpoint, config): beta_start=beta_start, num_train_timesteps=num_train_timesteps, skip_prk_steps=True, + steps_offset=1, ) elif args.scheduler_type == "lms": scheduler = LMSDiscreteScheduler(beta_start=beta_start, beta_end=beta_end, beta_schedule="scaled_linear") @@ -668,12 +669,14 @@ def convert_vd_vae_checkpoint(checkpoint, config): beta_schedule="scaled_linear", clip_sample=False, set_alpha_to_one=False, + steps_offset=1, ) else: raise ValueError(f"Scheduler of type {args.scheduler_type} doesn't exist!") - # Convert the UNet2DConditionModel model. + # Convert the UNet2DConditionModel models. if args.unet_checkpoint_path is not None: + # image UNet image_unet_config = create_unet_diffusers_config(IMAGE_UNET_CONFIG) checkpoint = torch.load(args.unet_checkpoint_path) converted_image_unet_checkpoint = convert_vd_unet_checkpoint( @@ -682,11 +685,28 @@ def convert_vd_vae_checkpoint(checkpoint, config): image_unet = UNet2DConditionModel(**image_unet_config) image_unet.load_state_dict(converted_image_unet_checkpoint) + # text UNet text_unet_config = create_unet_diffusers_config(TEXT_UNET_CONFIG) converted_text_unet_checkpoint = convert_vd_unet_checkpoint( checkpoint, text_unet_config, unet_key="model.diffusion_model.unet_text.", extract_ema=args.extract_ema ) text_unet = UNet2DConditionModel(**text_unet_config) + # TEMP hack to skip converting the 1x1 blocks for the text unet + del converted_text_unet_checkpoint["conv_in.weight"] + del converted_text_unet_checkpoint["conv_in.bias"] + del converted_text_unet_checkpoint["conv_out.weight"] + for block in ["down_blocks", "mid_block", "up_blocks"]: + for i in range(4): + for j in range(3): + for module in ["time_emb_proj", "conv1", "norm1", "conv2", "norm2", "conv_shortcut"]: + for type in ["weight", "bias"]: + if block == "mid_block": + key = f"{block}.resnets.{j}.{module}.{type}" + else: + key = f"{block}.{i}.resnets.{j}.{module}.{type}" + if key in converted_text_unet_checkpoint: + del converted_text_unet_checkpoint[key] + # END TEMP hack text_unet.load_state_dict(converted_text_unet_checkpoint, strict=False) # Convert the VAE model. diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 09835e4a0d71..f9f84737be62 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -13,21 +13,53 @@ # limitations under the License. import inspect +from functools import reduce from typing import List, Optional, Tuple, Union import torch -import torch.nn as nn import torch.utils.checkpoint from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel from ...models import AutoencoderKL, UNet2DConditionModel, VQModel -from ...models.unet_2d_condition import UNet2DConditionOutput from ...models.attention import Transformer2DModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler +class VersatileMixedModel: + """ + A context managet that swaps the transformer modules between the image and text unet during inference, + depending on the latent type and condition type. + """ + + def __init__(self, image_unet, text_unet, latent_type, condition_type): + self.image_unet = image_unet + self.text_unet = text_unet + self.latent_type = latent_type + self.condition_type = condition_type + + def swap_transformer_modules(self): + for name, module in self.image_unet.named_modules(): + if isinstance(module, Transformer2DModel): + parent_name, index = name.rsplit(".", 1) + index = int(index) + self.image_unet.get_submodule(parent_name)[index], self.text_unet.get_submodule(parent_name)[index] = ( + self.text_unet.get_submodule(parent_name)[index], + self.image_unet.get_submodule(parent_name)[index], + ) + + def __enter__(self): + if self.latent_type != self.condition_type: + self.swap_transformer_modules() + return self.image_unet if self.latent_type == "image" else self.text_unet + + def __exit__(self, *exc): + # swap the modules back + if self.latent_type != self.condition_type: + self.swap_transformer_modules() + + class VersatileDiffusionPipeline(DiffusionPipeline): r""" This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the @@ -51,6 +83,7 @@ class VersatileDiffusionPipeline(DiffusionPipeline): text_encoder: CLIPTextModel image_encoder: CLIPVisionModel image_unet: UNet2DConditionModel + text_unet: UNet2DConditionModel vae: Union[VQModel, AutoencoderKL] scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] @@ -72,6 +105,7 @@ def __init__( text_encoder=text_encoder, image_encoder=image_encoder, image_unet=image_unet, + text_unet=text_unet, vae=vae, scheduler=scheduler, ) @@ -82,12 +116,6 @@ def __init__( name: module for name, module in text_unet.named_modules() if isinstance(module, Transformer2DModel) } - # text2img by default - for full_name, module in image_unet.named_modules(): - if isinstance(module, Transformer2DModel): - parent_name, name = full_name.rsplit('.', 1) - image_unet.get_submodule(parent_name)[name] = self.text_transformer_blocks[name] - def _encode_prompt(self, prompt, do_classifier_free_guidance): r""" Encodes the prompt into text encoder hidden states. @@ -100,7 +128,7 @@ def _encode_prompt(self, prompt, do_classifier_free_guidance): """ def _normalize_embeddings(encoder_output): - embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) # sum == 19677.4570 + embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) # sum == 19677.4570 embeds_pooled = encoder_output.text_embeds # sum == 260.2655 embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) return embeds @@ -185,8 +213,7 @@ def __call__( text_embeddings = self._encode_prompt(prompt, do_classifier_free_guidance) latents = torch.randn( - (batch_size, self.image_unet.in_channels, height // 8, width // 8), - generator=generator, device=self.device + (batch_size, self.image_unet.in_channels, height // 8, width // 8), generator=generator, device=self.device ) self.scheduler.set_timesteps(num_inference_steps) @@ -198,22 +225,22 @@ def __call__( if accepts_eta: extra_kwargs["eta"] = eta - for t in self.progress_bar(self.scheduler.timesteps): - t += 1 - if not do_classifier_free_guidance: - latents_input = latents - else: - latents_input = torch.cat([latents] * 2) + with VersatileMixedModel(self.image_unet, self.text_unet, "image", "text") as unet: + for t in self.progress_bar(self.scheduler.timesteps): + if not do_classifier_free_guidance: + latents_input = latents + else: + latents_input = torch.cat([latents] * 2) - # predict the noise residual - noise_pred = self.image_unet(latents_input, t, encoder_hidden_states=text_embeddings).sample - # perform guidance - if guidance_scale != 1.0: - noise_pred_uncond, noise_prediction_text = noise_pred.chunk(2) - noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond) + # predict the noise residual + noise_pred = unet(latents_input, t, encoder_hidden_states=text_embeddings).sample + # perform guidance + if guidance_scale != 1.0: + noise_pred_uncond, noise_prediction_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond) - # compute the previous noisy sample x_t -> x_t-1 - latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample # scale and decode the image latents with vae latents = 1 / 0.18215 * latents @@ -228,115 +255,3 @@ def __call__( return (image,) return ImagePipelineOutput(images=image) - - -class VDMixedModelWrapper(nn.Module): - def __init__(self, image_unet: UNet2DConditionModel, text_unet: UNet2DConditionModel): - super().__init__() - self.image_unet = image_unet - self.text_unet = text_unet - self.time_embedding = self.unet_image.time_embedding - self.time_proj = self.unet_image.time_proj - - def embed_imesteps(self, timesteps, sample): - if not torch.is_tensor(timesteps): - timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) - elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: - timesteps = timesteps[None].to(sample.device) - # broadcast to batch dimension in a way that's compatible with ONNX/Core ML - timesteps = timesteps.expand(sample.shape[0]) - t_emb = self.time_proj(timesteps) - # timesteps does not contain any weights and will always return f32 tensors - # but time_embedding might actually be running in fp16. so we need to cast here. - # there might be better ways to encapsulate this. - t_emb = t_emb.to(dtype=self.dtype) - emb = self.time_embedding(t_emb) - return emb - - def forward(self, sample, timestep, encoder_hidden_states, latents_type="image", condition_type="text", return_dict: bool = True): - default_overall_up_factor = 2 ** self.image_unet.num_upsamplers - - # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` - forward_upsample_size = False - upsample_size = None - - if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): - forward_upsample_size = True - - # 1. time - emb = self.embed_imesteps(timestep, sample) - - # 2. pre-process - if latents_type == "image": - sample = self.image_unet.conv_in(sample) - elif latents_type == "text": - sample = self.text_unet.conv_in(sample) - - # 3. down - down_block_res_samples = (sample,) - for downsample_block in self.down_blocks: - if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: - sample, res_samples = downsample_block( - hidden_states=sample, - temb=emb, - encoder_hidden_states=encoder_hidden_states, - ) - else: - sample, res_samples = downsample_block(hidden_states=sample, temb=emb) - - down_block_res_samples += res_samples - - # 4. mid - sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) - - # 5. up - for i, upsample_block in enumerate(self.up_blocks): - is_final_block = i == len(self.up_blocks) - 1 - - res_samples = down_block_res_samples[-len(upsample_block.resnets):] - down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] - - # if we have not reached the final block and need to forward the - # upsample size, we do it here - if not is_final_block and forward_upsample_size: - upsample_size = down_block_res_samples[-1].shape[2:] - - if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: - sample = upsample_block( - hidden_states=sample, - temb=emb, - res_hidden_states_tuple=res_samples, - encoder_hidden_states=encoder_hidden_states, - upsample_size=upsample_size, - ) - else: - sample = upsample_block( - hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size - ) - # 6. post-process - sample = self.conv_norm_out(sample) - sample = self.conv_act(sample) - sample = self.conv_out(sample) - - if not return_dict: - return (sample,) - - return UNet2DConditionOutput(sample=sample) - - - - def mixed_forward(self, image_module, text_module, hidden_state, timesteps_emb, condition, latents_type="image", condition_type="text"): - for ilayer, tlayer in zip(image_module, text_module): - if isinstance(ilayer, SpatialTransformer) and condition_type == 'image': - hidden_state = ilayer(hidden_state, condition) - elif isinstance(ilayer, SpatialTransformer) and condition_type == 'text': - hidden_state = tlayer(hidden_state, condition) - elif latents_type == 'image': - hidden_state = ilayer(hidden_state) - elif latents_type == 'text': - hidden_state = tlayer(hidden_state) - else: - raise ValueError(f"latents_type {latents_type} and condition_type {condition_type} not supported") - return hidden_state - - diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py index be6c826af1bc..82b6a6d66044 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py @@ -37,9 +37,9 @@ class VersatileDiffusionPipelineIntegrationTests(unittest.TestCase): def test_inference_text2img(self): pipe = VersatileDiffusionPipeline.from_pretrained("scripts/vd-diffusers") pipe.to(torch_device) - #pipe.set_progress_bar_config(disable=None) + # pipe.set_progress_bar_config(disable=None) - prompt = "a dream of a village in china, by Caspar David Friedrich, matte painting trending on artstation HQ" + prompt = "A painting of a squirrel eating a burger " generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe( [prompt], generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" @@ -47,6 +47,6 @@ def test_inference_text2img(self): image_slice = image[0, -3:, -3:, -1] - assert image.shape == (1, 256, 256, 3) + assert image.shape == (1, 512, 512, 3) expected_slice = np.array([0.9256, 0.9340, 0.8933, 0.9361, 0.9113, 0.8727, 0.9122, 0.8745, 0.8099]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 9a8114a8d63370685a8365797da1f6fe0a7528b4 Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 16 Nov 2022 18:51:33 +0100 Subject: [PATCH 08/49] add image prompting --- .../pipeline_versatile_diffusion.py | 97 ++++++++++++++----- .../test_versatile_diffusion.py | 28 +++++- 2 files changed, 98 insertions(+), 27 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 4e3f25225186..a0f40d0390bf 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -13,7 +13,7 @@ # limitations under the License. import inspect -from functools import reduce +import PIL from typing import List, Optional, Tuple, Union import torch @@ -116,12 +116,6 @@ def __init__( name: module for name, module in text_unet.named_modules() if isinstance(module, Transformer2DModel) } - def _normalize_embeddings(self, encoder_output): - embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) - embeds_pooled = encoder_output.text_embeds - embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) - return embeds - def _encode_prompt(self, prompt, do_classifier_free_guidance): r""" Encodes the prompt into text encoder hidden states. @@ -132,17 +126,23 @@ def _encode_prompt(self, prompt, do_classifier_free_guidance): do_classifier_free_guidance (`bool`): whether to use classifier free guidance or not """ + def normalize_embeddings(encoder_output): + embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) + embeds_pooled = encoder_output.text_embeds + embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) + return embeds + batch_size = len(prompt) if isinstance(prompt, list) else 1 if do_classifier_free_guidance: uncond_input = self.tokenizer([""] * batch_size, padding="max_length", max_length=77, return_tensors="pt") uncond_embeddings = self.text_encoder(uncond_input.input_ids.to(self.device)) - uncond_embeddings = self._normalize_embeddings(uncond_embeddings) + uncond_embeddings = normalize_embeddings(uncond_embeddings) # get prompt text embeddings text_input = self.tokenizer(prompt, padding="max_length", max_length=77, return_tensors="pt") text_embeddings = self.text_encoder(text_input.input_ids.to(self.device)) - text_embeddings = self._normalize_embeddings(text_embeddings) + text_embeddings = normalize_embeddings(text_embeddings) # For classifier free guidance, we need to do two forward passes. # Here we concatenate the unconditional and text embeddings into a single batch @@ -151,10 +151,46 @@ def _encode_prompt(self, prompt, do_classifier_free_guidance): return text_embeddings + def _encode_image_prompt(self, prompt, do_classifier_free_guidance): + r""" + Encodes the image prompt into image encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + """ + def normalize_embeddings(encoder_output): + embeds = self.image_encoder.visual_projection(encoder_output.last_hidden_state) + embeds_pooled = encoder_output.image_embeds + embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) + return embeds + + batch_size = len(prompt) if isinstance(prompt, list) else 1 + + if do_classifier_free_guidance: + dummy_images = torch.zeros((batch_size, 3, 224, 224)).to(self.device) + uncond_embeddings = self.image_encoder(dummy_images) + uncond_embeddings = normalize_embeddings(uncond_embeddings) + + # get prompt text embeddings + image_input = self.image_processor(images=prompt, return_tensors="pt") + image_embeddings = self.image_encoder(image_input.pixel_values.to(self.device)) + image_embeddings = normalize_embeddings(image_embeddings) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and image embeddings into a single batch + # to avoid doing two forward passes + image_embeddings = torch.cat([uncond_embeddings, image_embeddings]) + + return image_embeddings + @torch.no_grad() def __call__( self, - prompt: Union[str, List[str]], + prompt: Optional[Union[str, List[str]]] = None, + image_prompt: Optional[Union[torch.Tensor, PIL.Image.Image]] = None, height: Optional[int] = 512, width: Optional[int] = 512, num_inference_steps: Optional[int] = 50, @@ -196,20 +232,35 @@ def __call__( `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images. """ - - if isinstance(prompt, str): - batch_size = 1 - elif isinstance(prompt, list): - batch_size = len(prompt) - else: - raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + do_classifier_free_guidance = guidance_scale > 1.0 if height % 8 != 0 or width % 8 != 0: raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") - do_classifier_free_guidance = guidance_scale > 1.0 + if prompt is not None: + if isinstance(prompt, str): + batch_size = 1 + elif isinstance(prompt, list): + batch_size = len(prompt) + else: + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + + condition_embeddings = self._encode_prompt(prompt, do_classifier_free_guidance) + prompt_type = "text" + elif image_prompt is not None: + if isinstance(image_prompt, PIL.Image.Image): + batch_size = 1 + elif isinstance(image_prompt, torch.Tensor): + batch_size = image_prompt.shape[0] + else: + raise ValueError( + f"`image_prompt` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(image_prompt)}" + ) - text_embeddings = self._encode_prompt(prompt, do_classifier_free_guidance) + condition_embeddings = self._encode_image_prompt(image_prompt, do_classifier_free_guidance) + prompt_type = "image" + else: + raise ValueError("Either `prompt` or `image_prompt` has to be provided.") latents = torch.randn( (batch_size, self.image_unet.in_channels, height // 8, width // 8), generator=generator, device=self.device @@ -224,7 +275,7 @@ def __call__( if accepts_eta: extra_kwargs["eta"] = eta - with VersatileMixedModel(self.image_unet, self.text_unet, "image", "text") as unet: + with VersatileMixedModel(self.image_unet, self.text_unet, "image", prompt_type) as unet: for t in self.progress_bar(self.scheduler.timesteps): if not do_classifier_free_guidance: latents_input = latents @@ -232,11 +283,11 @@ def __call__( latents_input = torch.cat([latents] * 2) # predict the noise residual - noise_pred = unet(latents_input, t, encoder_hidden_states=text_embeddings).sample + noise_pred = unet(latents_input, t, encoder_hidden_states=condition_embeddings).sample # perform guidance if guidance_scale != 1.0: - noise_pred_uncond, noise_prediction_text = noise_pred.chunk(2) - noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond) + noise_pred_uncond, noise_prediction_cond = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_cond - noise_pred_uncond) # compute the previous noisy sample x_t -> x_t-1 latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py index 82b6a6d66044..5c37ebabee98 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py @@ -19,7 +19,7 @@ import torch from diffusers import VersatileDiffusionPipeline -from diffusers.utils.testing_utils import require_torch, slow, torch_device +from diffusers.utils.testing_utils import require_torch, slow, torch_device, load_image from ...test_pipelines_common import PipelineTesterMixin @@ -35,14 +35,34 @@ class VersatileDiffusionPipelineFastTests(PipelineTesterMixin, unittest.TestCase @require_torch class VersatileDiffusionPipelineIntegrationTests(unittest.TestCase): def test_inference_text2img(self): - pipe = VersatileDiffusionPipeline.from_pretrained("scripts/vd-diffusers") + pipe = VersatileDiffusionPipeline.from_pretrained("diffusers/vd-official-test") pipe.to(torch_device) - # pipe.set_progress_bar_config(disable=None) + pipe.set_progress_bar_config(disable=None) prompt = "A painting of a squirrel eating a burger " generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe( - [prompt], generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" + prompt=prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" + ).images + + image_slice = image[0, -3:, -3:, -1] + + assert image.shape == (1, 512, 512, 3) + expected_slice = np.array([0.9256, 0.9340, 0.8933, 0.9361, 0.9113, 0.8727, 0.9122, 0.8745, 0.8099]) + assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 + + def test_inference_image_variations(self): + pipe = VersatileDiffusionPipeline.from_pretrained("diffusers/vd-official-test") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + image_prompt = load_image( + "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" + "/in_paint/overture-creations-5sI6fQgYIuo.png" + ) + generator = torch.Generator(device=torch_device).manual_seed(0) + image = pipe( + image_prompt=image_prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" ).images image_slice = image[0, -3:, -3:, -1] From b17475e6f017af65890ea9cfcead83f5a0753f1a Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 16 Nov 2022 19:59:48 +0100 Subject: [PATCH 09/49] fix clip norm --- .../pipeline_versatile_diffusion.py | 21 ++++++++++++------- .../test_versatile_diffusion.py | 11 ++++++---- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index a0f40d0390bf..b5f3148a0b30 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -13,12 +13,13 @@ # limitations under the License. import inspect -import PIL from typing import List, Optional, Tuple, Union +import numpy as np import torch import torch.utils.checkpoint +import PIL from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel from ...models import AutoencoderKL, UNet2DConditionModel, VQModel @@ -29,8 +30,8 @@ class VersatileMixedModel: """ - A context managet that swaps the transformer modules between the image and text unet during inference, - depending on the latent type and condition type. + A context managet that swaps the transformer modules between the image and text unet during inference, depending on + the latent type and condition type. """ def __init__(self, image_unet, text_unet, latent_type, condition_type): @@ -126,6 +127,7 @@ def _encode_prompt(self, prompt, do_classifier_free_guidance): do_classifier_free_guidance (`bool`): whether to use classifier free guidance or not """ + def normalize_embeddings(encoder_output): embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) embeds_pooled = encoder_output.text_embeds @@ -161,17 +163,20 @@ def _encode_image_prompt(self, prompt, do_classifier_free_guidance): do_classifier_free_guidance (`bool`): whether to use classifier free guidance or not """ + def normalize_embeddings(encoder_output): - embeds = self.image_encoder.visual_projection(encoder_output.last_hidden_state) - embeds_pooled = encoder_output.image_embeds - embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) + embeds = self.image_encoder.vision_model.post_layernorm(encoder_output.last_hidden_state) + embeds = self.image_encoder.visual_projection(embeds) + embeds_pooled = embeds[:, 0:1] + embeds = embeds / torch.norm(embeds_pooled, dim=-1, keepdim=True) return embeds batch_size = len(prompt) if isinstance(prompt, list) else 1 if do_classifier_free_guidance: - dummy_images = torch.zeros((batch_size, 3, 224, 224)).to(self.device) - uncond_embeddings = self.image_encoder(dummy_images) + dummy_images = [np.zeros((512, 512, 3))] * batch_size + dummy_images = self.image_processor(images=dummy_images, return_tensors="pt") + uncond_embeddings = self.image_encoder(dummy_images.pixel_values.to(self.device)) uncond_embeddings = normalize_embeddings(uncond_embeddings) # get prompt text embeddings diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py index 5c37ebabee98..4a34264952f5 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py @@ -19,7 +19,7 @@ import torch from diffusers import VersatileDiffusionPipeline -from diffusers.utils.testing_utils import require_torch, slow, torch_device, load_image +from diffusers.utils.testing_utils import load_image, require_torch, slow, torch_device from ...test_pipelines_common import PipelineTesterMixin @@ -57,12 +57,15 @@ def test_inference_image_variations(self): pipe.set_progress_bar_config(disable=None) image_prompt = load_image( - "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" - "/in_paint/overture-creations-5sI6fQgYIuo.png" + "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" ) generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe( - image_prompt=image_prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" + image_prompt=image_prompt, + generator=generator, + guidance_scale=7.5, + num_inference_steps=50, + output_type="numpy", ).images image_slice = image[0, -3:, -3:, -1] From 74fde820167b0cd639f5b32e222b083d176861df Mon Sep 17 00:00:00 2001 From: anton-l Date: Mon, 21 Nov 2022 12:33:09 +0100 Subject: [PATCH 10/49] split text2img and img2img --- src/diffusers/__init__.py | 3 +- src/diffusers/pipelines/__init__.py | 2 +- .../pipelines/versatile_diffusion/__init__.py | 3 +- ...ne_versatile_diffusion_image_variation.py} | 153 +---- ...eline_versatile_diffusion_text_to_image.py | 621 ++++++++++++++++++ ...st_versatile_diffusion_image_variation.py} | 27 +- .../test_versatile_diffusion_text_to_image.py | 52 ++ 7 files changed, 712 insertions(+), 149 deletions(-) rename src/diffusers/pipelines/versatile_diffusion/{pipeline_versatile_diffusion.py => pipeline_versatile_diffusion_image_variation.py} (58%) create mode 100644 src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py rename tests/pipelines/versatile_diffusion/{test_versatile_diffusion.py => test_versatile_diffusion_image_variation.py} (60%) create mode 100644 tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py diff --git a/src/diffusers/__init__.py b/src/diffusers/__init__.py index 06855e3a6916..33b16881dfcb 100644 --- a/src/diffusers/__init__.py +++ b/src/diffusers/__init__.py @@ -73,7 +73,8 @@ StableDiffusionInpaintPipeline, StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, - VersatileDiffusionPipeline, + VersatileDiffusionImageVariationPipeline, + VersatileDiffusionTextToImagePipeline, VQDiffusionPipeline, ) else: diff --git a/src/diffusers/pipelines/__init__.py b/src/diffusers/pipelines/__init__.py index c284855aacfb..821477204130 100644 --- a/src/diffusers/pipelines/__init__.py +++ b/src/diffusers/pipelines/__init__.py @@ -24,7 +24,7 @@ StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, ) - from .versatile_diffusion import VersatileDiffusionPipeline + from .versatile_diffusion import VersatileDiffusionImageVariationPipeline, VersatileDiffusionTextToImagePipeline from .vq_diffusion import VQDiffusionPipeline if is_transformers_available() and is_onnx_available(): diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index cd63bbfc28b6..98975ba6cbee 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -1 +1,2 @@ -from .pipeline_versatile_diffusion import VersatileDiffusionPipeline +from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline +from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py similarity index 58% rename from src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py rename to src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index b5f3148a0b30..6c2c8fb77c54 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -17,51 +17,18 @@ import numpy as np import torch +import torch.nn as nn import torch.utils.checkpoint import PIL from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel from ...models import AutoencoderKL, UNet2DConditionModel, VQModel -from ...models.attention import Transformer2DModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler -class VersatileMixedModel: - """ - A context managet that swaps the transformer modules between the image and text unet during inference, depending on - the latent type and condition type. - """ - - def __init__(self, image_unet, text_unet, latent_type, condition_type): - self.image_unet = image_unet - self.text_unet = text_unet - self.latent_type = latent_type - self.condition_type = condition_type - - def swap_transformer_modules(self): - for name, module in self.image_unet.named_modules(): - if isinstance(module, Transformer2DModel): - parent_name, index = name.rsplit(".", 1) - index = int(index) - self.image_unet.get_submodule(parent_name)[index], self.text_unet.get_submodule(parent_name)[index] = ( - self.text_unet.get_submodule(parent_name)[index], - self.image_unet.get_submodule(parent_name)[index], - ) - - def __enter__(self): - if self.latent_type != self.condition_type: - self.swap_transformer_modules() - return self.image_unet if self.latent_type == "image" else self.text_unet - - def __exit__(self, *exc): - # swap the modules back - if self.latent_type != self.condition_type: - self.swap_transformer_modules() - - -class VersatileDiffusionPipeline(DiffusionPipeline): +class VersatileDiffusionImageVariationPipeline(DiffusionPipeline): r""" This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) @@ -92,10 +59,8 @@ def __init__( self, tokenizer: CLIPTokenizer, image_processor: CLIPProcessor, - text_encoder: CLIPTextModel, image_encoder: CLIPVisionModel, image_unet: UNet2DConditionModel, - text_unet: UNet2DConditionModel, vae: Union[VQModel, AutoencoderKL], scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], ): @@ -103,57 +68,13 @@ def __init__( self.register_modules( tokenizer=tokenizer, image_processor=image_processor, - text_encoder=text_encoder, image_encoder=image_encoder, image_unet=image_unet, - text_unet=text_unet, vae=vae, scheduler=scheduler, ) - self.image_transformer_blocks = { - name: module for name, module in image_unet.named_modules() if isinstance(module, Transformer2DModel) - } - self.text_transformer_blocks = { - name: module for name, module in text_unet.named_modules() if isinstance(module, Transformer2DModel) - } def _encode_prompt(self, prompt, do_classifier_free_guidance): - r""" - Encodes the prompt into text encoder hidden states. - - Args: - prompt (`str` or `list(int)`): - prompt to be encoded - do_classifier_free_guidance (`bool`): - whether to use classifier free guidance or not - """ - - def normalize_embeddings(encoder_output): - embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) - embeds_pooled = encoder_output.text_embeds - embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) - return embeds - - batch_size = len(prompt) if isinstance(prompt, list) else 1 - - if do_classifier_free_guidance: - uncond_input = self.tokenizer([""] * batch_size, padding="max_length", max_length=77, return_tensors="pt") - uncond_embeddings = self.text_encoder(uncond_input.input_ids.to(self.device)) - uncond_embeddings = normalize_embeddings(uncond_embeddings) - - # get prompt text embeddings - text_input = self.tokenizer(prompt, padding="max_length", max_length=77, return_tensors="pt") - text_embeddings = self.text_encoder(text_input.input_ids.to(self.device)) - text_embeddings = normalize_embeddings(text_embeddings) - - # For classifier free guidance, we need to do two forward passes. - # Here we concatenate the unconditional and text embeddings into a single batch - # to avoid doing two forward passes - text_embeddings = torch.cat([uncond_embeddings, text_embeddings]) - - return text_embeddings - - def _encode_image_prompt(self, prompt, do_classifier_free_guidance): r""" Encodes the image prompt into image encoder hidden states. @@ -194,8 +115,7 @@ def normalize_embeddings(encoder_output): @torch.no_grad() def __call__( self, - prompt: Optional[Union[str, List[str]]] = None, - image_prompt: Optional[Union[torch.Tensor, PIL.Image.Image]] = None, + image: Optional[Union[torch.Tensor, PIL.Image.Image]] = None, height: Optional[int] = 512, width: Optional[int] = 512, num_inference_steps: Optional[int] = 50, @@ -221,8 +141,8 @@ def __call__( Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). `guidance_scale` is defined as `w` of equation 2. of [Imagen Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > - 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt` at - the, usually at the expense of lower image quality. + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. generator (`torch.Generator`, *optional*): A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation deterministic. @@ -242,30 +162,16 @@ def __call__( if height % 8 != 0 or width % 8 != 0: raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") - if prompt is not None: - if isinstance(prompt, str): - batch_size = 1 - elif isinstance(prompt, list): - batch_size = len(prompt) - else: - raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") - - condition_embeddings = self._encode_prompt(prompt, do_classifier_free_guidance) - prompt_type = "text" - elif image_prompt is not None: - if isinstance(image_prompt, PIL.Image.Image): - batch_size = 1 - elif isinstance(image_prompt, torch.Tensor): - batch_size = image_prompt.shape[0] - else: - raise ValueError( - f"`image_prompt` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(image_prompt)}" - ) - - condition_embeddings = self._encode_image_prompt(image_prompt, do_classifier_free_guidance) - prompt_type = "image" + if isinstance(image, PIL.Image.Image): + batch_size = 1 + elif isinstance(image, torch.Tensor): + batch_size = image.shape[0] else: - raise ValueError("Either `prompt` or `image_prompt` has to be provided.") + raise ValueError( + f"`image_prompt` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(image)}" + ) + + condition_embeddings = self._encode_prompt(image, do_classifier_free_guidance) latents = torch.randn( (batch_size, self.image_unet.in_channels, height // 8, width // 8), generator=generator, device=self.device @@ -280,22 +186,21 @@ def __call__( if accepts_eta: extra_kwargs["eta"] = eta - with VersatileMixedModel(self.image_unet, self.text_unet, "image", prompt_type) as unet: - for t in self.progress_bar(self.scheduler.timesteps): - if not do_classifier_free_guidance: - latents_input = latents - else: - latents_input = torch.cat([latents] * 2) - - # predict the noise residual - noise_pred = unet(latents_input, t, encoder_hidden_states=condition_embeddings).sample - # perform guidance - if guidance_scale != 1.0: - noise_pred_uncond, noise_prediction_cond = noise_pred.chunk(2) - noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_cond - noise_pred_uncond) - - # compute the previous noisy sample x_t -> x_t-1 - latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample + for t in self.progress_bar(self.scheduler.timesteps): + if not do_classifier_free_guidance: + latents_input = latents + else: + latents_input = torch.cat([latents] * 2) + + # predict the noise residual + noise_pred = self.image_unet(latents_input, t, encoder_hidden_states=condition_embeddings).sample + # perform guidance + if guidance_scale != 1.0: + noise_pred_uncond, noise_prediction_cond = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_cond - noise_pred_uncond) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample # scale and decode the image latents with vae latents = 1 / 0.18215 * latents diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py new file mode 100644 index 000000000000..3e8881a7c9ef --- /dev/null +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -0,0 +1,621 @@ +# Copyright 2022 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect +from typing import List, Optional, Tuple, Union + +import numpy as np +import torch +import torch.utils.checkpoint + +import PIL +from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel + +from ...models import AutoencoderKL, UNet2DConditionModel, VQModel +from ...models.attention import Transformer2DModel +from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput +from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler + + +class VersatileDiffusionTextToImagePipeline(DiffusionPipeline): + r""" + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + Parameters: + vqvae ([`VQModel`]): + Vector-quantized (VQ) Model to encode and decode images to and from latent representations. + bert ([`LDMBertModel`]): + Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture. + tokenizer (`transformers.BertTokenizer`): + Tokenizer of class + [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + """ + tokenizer: CLIPTokenizer + image_processor: CLIPProcessor + text_encoder: CLIPTextModel + image_encoder: CLIPVisionModel + image_unet: UNet2DConditionModel + text_unet: UNet2DConditionModel + vae: Union[VQModel, AutoencoderKL] + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] + + def __init__( + self, + tokenizer: CLIPTokenizer, + image_processor: CLIPProcessor, + text_encoder: CLIPTextModel, + image_unet: UNet2DConditionModel, + text_unet: UNet2DConditionModel, + vae: Union[VQModel, AutoencoderKL], + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], + ): + super().__init__() + self.register_modules( + tokenizer=tokenizer, + image_processor=image_processor, + text_encoder=text_encoder, + image_unet=image_unet, + text_unet=text_unet, + vae=vae, + scheduler=scheduler, + ) + for name, module in self.image_unet.named_modules(): + if isinstance(module, Transformer2DModel): + parent_name, index = name.rsplit(".", 1) + index = int(index) + self.image_unet.get_submodule(parent_name)[index], self.text_unet.get_submodule(parent_name)[index] = ( + self.text_unet.get_submodule(parent_name)[index], + self.image_unet.get_submodule(parent_name)[index], + ) + + def _encode_prompt(self, prompt, do_classifier_free_guidance): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + """ + + def normalize_embeddings(encoder_output): + embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) + embeds_pooled = encoder_output.text_embeds + embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) + return embeds + + batch_size = len(prompt) if isinstance(prompt, list) else 1 + + if do_classifier_free_guidance: + uncond_input = self.tokenizer([""] * batch_size, padding="max_length", max_length=77, return_tensors="pt") + uncond_embeddings = self.text_encoder(uncond_input.input_ids.to(self.device)) + uncond_embeddings = normalize_embeddings(uncond_embeddings) + + # get prompt text embeddings + text_input = self.tokenizer(prompt, padding="max_length", max_length=77, return_tensors="pt") + text_embeddings = self.text_encoder(text_input.input_ids.to(self.device)) + text_embeddings = normalize_embeddings(text_embeddings) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and text embeddings into a single batch + # to avoid doing two forward passes + text_embeddings = torch.cat([uncond_embeddings, text_embeddings]) + + return text_embeddings + + def _encode_image_prompt(self, prompt, do_classifier_free_guidance): + r""" + Encodes the image prompt into image encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + """ + + def normalize_embeddings(encoder_output): + embeds = self.image_encoder.vision_model.post_layernorm(encoder_output.last_hidden_state) + embeds = self.image_encoder.visual_projection(embeds) + embeds_pooled = embeds[:, 0:1] + embeds = embeds / torch.norm(embeds_pooled, dim=-1, keepdim=True) + return embeds + + batch_size = len(prompt) if isinstance(prompt, list) else 1 + + if do_classifier_free_guidance: + dummy_images = [np.zeros((512, 512, 3))] * batch_size + dummy_images = self.image_processor(images=dummy_images, return_tensors="pt") + uncond_embeddings = self.image_encoder(dummy_images.pixel_values.to(self.device)) + uncond_embeddings = normalize_embeddings(uncond_embeddings) + + # get prompt text embeddings + image_input = self.image_processor(images=prompt, return_tensors="pt") + image_embeddings = self.image_encoder(image_input.pixel_values.to(self.device)) + image_embeddings = normalize_embeddings(image_embeddings) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and image embeddings into a single batch + # to avoid doing two forward passes + image_embeddings = torch.cat([uncond_embeddings, image_embeddings]) + + return image_embeddings + + @torch.no_grad() + def __call__( + self, + prompt: Optional[Union[str, List[str]]] = None, + height: Optional[int] = 512, + width: Optional[int] = 512, + num_inference_steps: Optional[int] = 50, + guidance_scale: Optional[float] = 1.0, + eta: Optional[float] = 0.0, + generator: Optional[torch.Generator] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + **kwargs, + ) -> Union[Tuple, ImagePipelineOutput]: + r""" + Args: + prompt (`str` or `List[str]`): + The prompt or prompts to guide the image generation. + height (`int`, *optional*, defaults to 256): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to 256): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + guidance_scale (`float`, *optional*, defaults to 1.0): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt` at + the, usually at the expense of lower image quality. + generator (`torch.Generator`, *optional*): + A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation + deterministic. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*): + Whether or not to return a [`~pipeline_utils.ImagePipelineOutput`] instead of a plain tuple. + + Returns: + [`~pipeline_utils.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if + `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the + generated images. + """ + do_classifier_free_guidance = guidance_scale > 1.0 + + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + if isinstance(prompt, str): + batch_size = 1 + elif isinstance(prompt, list): + batch_size = len(prompt) + else: + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + + condition_embeddings = self._encode_prompt(prompt, do_classifier_free_guidance) + + latents = torch.randn( + (batch_size, self.image_unet.in_channels, height // 8, width // 8), generator=generator, device=self.device + ) + + self.scheduler.set_timesteps(num_inference_steps) + + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + + extra_kwargs = {} + if accepts_eta: + extra_kwargs["eta"] = eta + + for t in self.progress_bar(self.scheduler.timesteps): + if not do_classifier_free_guidance: + latents_input = latents + else: + latents_input = torch.cat([latents] * 2) + + # predict the noise residual + noise_pred = self.image_unet(latents_input, t, encoder_hidden_states=condition_embeddings).sample + # perform guidance + if guidance_scale != 1.0: + noise_pred_uncond, noise_prediction_cond = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_cond - noise_pred_uncond) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample + + # scale and decode the image latents with vae + latents = 1 / 0.18215 * latents + image = self.vae.decode(latents).sample + + image = (image / 2 + 0.5).clamp(0, 1) + image = image.cpu().permute(0, 2, 3, 1).numpy() + if output_type == "pil": + image = self.numpy_to_pil(image) + + if not return_dict: + return (image,) + + return ImagePipelineOutput(images=image) + + +# class UNetMultiDimConditionModel(ModelMixin, ConfigMixin): +# r""" +# UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep # +and returns sample shaped output. # # This model inherits from [`ModelMixin`]. Check the superclass documentation for +the generic methods the library # implements for all the models (such as downloading or saving, etc.) # # Parameters: # +sample_size (`int`, *optional*): The size of the input sample. # in_channels (`int`, *optional*, defaults to 4): The +number of channels in the input sample. # out_channels (`int`, *optional*, defaults to 4): The number of channels in +the output. # center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. # +flip_sin_to_cos (`bool`, *optional*, defaults to `True`): # Whether to flip the sin to cos in the time embedding. # +freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. # down_block_types +(`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", +"DownBlock2D")`): # The tuple of downsample blocks to use. # up_block_types (`Tuple[str]`, *optional*, defaults to +`("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D",)`): # The tuple of upsample blocks to +use. # block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): # The tuple of output +channels for each block. # layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. # +downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. # +mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. # act_fn +(`str`, *optional*, defaults to `"silu"`): The activation function to use. # norm_num_groups (`int`, *optional*, +defaults to 32): The number of groups to use for the normalization. # norm_eps (`float`, *optional*, defaults to 1e-5): +The epsilon to use for the normalization. # cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of +the cross attention features. # attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention +heads. #""" +# +# _supports_gradient_checkpointing = True +# +# @register_to_config +# def __init__( +# self, +# sample_size: Optional[int] = None, +# in_channels: int = 4, +# out_channels: int = 4, +# center_input_sample: bool = False, +# flip_sin_to_cos: bool = True, +# freq_shift: int = 0, +# down_block_types: Tuple[str] = ( +# "CrossAttnDownBlockMultiDim", +# "CrossAttnDownBlockMultiDim", +# "CrossAttnDownBlockMultiDim", +# "DownBlockMultiDim", +# ), +# up_block_types: Tuple[str] = ( +# "UpBlockMultiDim", +# "CrossAttnUpBlockMultiDim", +# "CrossAttnUpBlockMultiDim", +# "CrossAttnUpBlockMultiDim" +# ), +# block_out_channels: Tuple[int] = (320, 640, 1280, 1280), +# block_second_dim: Tuple[int] = (4, 4, 4, 4), +# layers_per_block: int = 2, +# downsample_padding: int = 1, +# mid_block_scale_factor: float = 1, +# act_fn: str = "silu", +# norm_num_groups: int = 32, +# norm_eps: float = 1e-5, +# cross_attention_dim: int = 1280, +# attention_head_dim: int = 8, +# ): +# super().__init__() +# +# self.sample_size = sample_size +# time_embed_dim = block_out_channels[0] * 4 +# +# # input +# self.conv_in = LinearMultiDim([in_channels, 1, 1], block_out_channels[0], kernel_size=3, padding=(1, 1)) +# +# # time +# self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) +# timestep_input_dim = block_out_channels[0] +# +# self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) +# +# self.down_blocks = nn.ModuleList([]) +# self.mid_block = None +# self.up_blocks = nn.ModuleList([]) +# +# # down +# output_channel = block_out_channels[0] +# for i, down_block_type in enumerate(down_block_types): +# input_channel = output_channel +# output_channel = block_out_channels[i] +# is_final_block = i == len(block_out_channels) - 1 +# +# down_block = self.get_down_block( +# down_block_type, +# num_layers=layers_per_block, +# in_channels=input_channel, +# out_channels=output_channel, +# temb_channels=time_embed_dim, +# add_downsample=not is_final_block, +# resnet_eps=norm_eps, +# resnet_act_fn=act_fn, +# resnet_groups=norm_num_groups, +# cross_attention_dim=cross_attention_dim, +# attn_num_head_channels=attention_head_dim, +# downsample_padding=downsample_padding, +# ) +# self.down_blocks.append(down_block) +# +# # mid +# self.mid_block = UNetMidBlockMultiDimCrossAttn( +# in_channels=block_out_channels[-1], +# temb_channels=time_embed_dim, +# resnet_eps=norm_eps, +# resnet_act_fn=act_fn, +# output_scale_factor=mid_block_scale_factor, +# resnet_time_scale_shift="default", +# cross_attention_dim=cross_attention_dim, +# attn_num_head_channels=attention_head_dim, +# resnet_groups=norm_num_groups, +# ) +# +# # count how many layers upsample the images +# self.num_upsamplers = 0 +# +# # up +# reversed_block_out_channels = list(reversed(block_out_channels)) +# output_channel = reversed_block_out_channels[0] +# for i, up_block_type in enumerate(up_block_types): +# is_final_block = i == len(block_out_channels) - 1 +# +# prev_output_channel = output_channel +# output_channel = reversed_block_out_channels[i] +# input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] +# +# # add upsample block for all BUT final layer +# if not is_final_block: +# add_upsample = True +# self.num_upsamplers += 1 +# else: +# add_upsample = False +# +# up_block = self.get_up_block( +# up_block_type, +# num_layers=layers_per_block + 1, +# in_channels=input_channel, +# out_channels=output_channel, +# prev_output_channel=prev_output_channel, +# temb_channels=time_embed_dim, +# add_upsample=add_upsample, +# resnet_eps=norm_eps, +# resnet_act_fn=act_fn, +# resnet_groups=norm_num_groups, +# cross_attention_dim=cross_attention_dim, +# attn_num_head_channels=attention_head_dim, +# ) +# self.up_blocks.append(up_block) +# prev_output_channel = output_channel +# +# # out +# self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) +# self.conv_act = nn.SiLU() +# self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, 3, padding=1) +# +# def get_down_block( +# down_block_type, +# num_layers, +# in_channels, +# out_channels, +# temb_channels, +# add_downsample, +# resnet_eps, +# resnet_act_fn, +# attn_num_head_channels, +# resnet_groups=None, +# cross_attention_dim=None, +# downsample_padding=None, +# ): +# down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type +# if down_block_type == "DownBlockMultiDim": +# return DownBlockMultiDim( +# num_layers=num_layers, +# in_channels=in_channels, +# out_channels=out_channels, +# temb_channels=temb_channels, +# add_downsample=add_downsample, +# resnet_eps=resnet_eps, +# resnet_act_fn=resnet_act_fn, +# resnet_groups=resnet_groups, +# downsample_padding=downsample_padding, +# ) +# elif down_block_type == "CrossAttnDownBlockMultiDim": +# if cross_attention_dim is None: +# raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") +# return CrossAttnDownBlockMultiDim( +# num_layers=num_layers, +# in_channels=in_channels, +# out_channels=out_channels, +# temb_channels=temb_channels, +# add_downsample=add_downsample, +# resnet_eps=resnet_eps, +# resnet_act_fn=resnet_act_fn, +# resnet_groups=resnet_groups, +# downsample_padding=downsample_padding, +# cross_attention_dim=cross_attention_dim, +# attn_num_head_channels=attn_num_head_channels, +# ) +# +# def set_attention_slice(self, slice_size): +# if slice_size is not None and self.config.attention_head_dim % slice_size != 0: +# raise ValueError( +# f"Make sure slice_size {slice_size} is a divisor of " +# f"the number of heads used in cross_attention {self.config.attention_head_dim}" +# ) +# if slice_size is not None and slice_size > self.config.attention_head_dim: +# raise ValueError( +# f"Chunk_size {slice_size} has to be smaller or equal to " +# f"the number of heads used in cross_attention {self.config.attention_head_dim}" +# ) +# +# for block in self.down_blocks: +# if hasattr(block, "attentions") and block.attentions is not None: +# block.set_attention_slice(slice_size) +# +# self.mid_block.set_attention_slice(slice_size) +# +# for block in self.up_blocks: +# if hasattr(block, "attentions") and block.attentions is not None: +# block.set_attention_slice(slice_size) +# +# def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): +# for block in self.down_blocks: +# if hasattr(block, "attentions") and block.attentions is not None: +# block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) +# +# self.mid_block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) +# +# for block in self.up_blocks: +# if hasattr(block, "attentions") and block.attentions is not None: +# block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) +# +# def _set_gradient_checkpointing(self, module, value=False): +# if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D, CrossAttnUpBlock2D, UpBlock2D)): +# module.gradient_checkpointing = value +# +# def forward( +# self, +# sample: torch.FloatTensor, +# timestep: Union[torch.Tensor, float, int], +# encoder_hidden_states: torch.Tensor, +# return_dict: bool = True, +# ) -> Union[UNet2DConditionOutput, Tuple]: +# r""" +# Args: # sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor # timestep +(`torch.FloatTensor` or `float` or `int`): (batch) timesteps # encoder_hidden_states (`torch.FloatTensor`): (batch, +channel, height, width) encoder hidden states # return_dict (`bool`, *optional*, defaults to `True`): # Whether or not +to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. # # Returns: # +[`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: # [`~models.unet_2d_condition.UNet2DConditionOutput`] +if `return_dict` is True, otherwise a `tuple`. When # returning a tuple, the first element is the sample tensor. #""" +# # By default samples have to be AT least a multiple of the overall upsampling factor. +# # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). +# # However, the upsampling interpolation output size can be forced to fit any upsampling size +# # on the fly if necessary. +# default_overall_up_factor = 2**self.num_upsamplers +# +# # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` +# forward_upsample_size = False +# upsample_size = None +# +# if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): +# logger.info("Forward upsample size to force interpolation output size.") +# forward_upsample_size = True +# +# # 0. center input if necessary +# if self.config.center_input_sample: +# sample = 2 * sample - 1.0 +# +# # 1. time +# timesteps = timestep +# if not torch.is_tensor(timesteps): +# # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can +# timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) +# elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: +# timesteps = timesteps[None].to(sample.device) +# +# # broadcast to batch dimension in a way that's compatible with ONNX/Core ML +# timesteps = timesteps.expand(sample.shape[0]) +# +# t_emb = self.time_proj(timesteps) +# +# # timesteps does not contain any weights and will always return f32 tensors +# # but time_embedding might actually be running in fp16. so we need to cast here. +# # there might be better ways to encapsulate this. +# t_emb = t_emb.to(dtype=self.dtype) +# emb = self.time_embedding(t_emb) +# +# # 2. pre-process +# sample = self.conv_in(sample) +# +# # 3. down +# down_block_res_samples = (sample,) +# for downsample_block in self.down_blocks: +# if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: +# sample, res_samples = downsample_block( +# hidden_states=sample, +# temb=emb, +# encoder_hidden_states=encoder_hidden_states, +# ) +# else: +# sample, res_samples = downsample_block(hidden_states=sample, temb=emb) +# +# down_block_res_samples += res_samples +# +# # 4. mid +# sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) +# +# # 5. up +# for i, upsample_block in enumerate(self.up_blocks): +# is_final_block = i == len(self.up_blocks) - 1 +# +# res_samples = down_block_res_samples[-len(upsample_block.resnets) :] +# down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] +# +# # if we have not reached the final block and need to forward the +# # upsample size, we do it here +# if not is_final_block and forward_upsample_size: +# upsample_size = down_block_res_samples[-1].shape[2:] +# +# if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: +# sample = upsample_block( +# hidden_states=sample, +# temb=emb, +# res_hidden_states_tuple=res_samples, +# encoder_hidden_states=encoder_hidden_states, +# upsample_size=upsample_size, +# ) +# else: +# sample = upsample_block( +# hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size +# ) +# # 6. post-process +# sample = self.conv_norm_out(sample) +# sample = self.conv_act(sample) +# sample = self.conv_out(sample) +# +# if not return_dict: +# return (sample,) +# +# return UNet2DConditionOutput(sample=sample) +# +# +# class LinearMultiDim(nn.Linear): +# def __init__(self, in_features, out_features, *args, **kwargs): +# in_features = [in_features] if isinstance(in_features, int) else list(in_features) +# out_features = [out_features] if isinstance(out_features, int) else list(out_features) +# self.in_features_multidim = in_features +# self.out_features_multidim = out_features +# super().__init__( +# np.array(in_features).prod(), +# np.array(out_features).prod(), +# *args, **kwargs) +# +# def forward(self, x): +# shape = x.shape +# n = len(self.in_features_multidim) +# x = x.view(*shape[0:-n], self.in_features) +# y = super().forward(x) +# y = y.view(*shape[0:-n], *self.out_features_multidim) +# return y diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py similarity index 60% rename from tests/pipelines/versatile_diffusion/test_versatile_diffusion.py rename to tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py index 4a34264952f5..d990238c2194 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py @@ -18,7 +18,7 @@ import numpy as np import torch -from diffusers import VersatileDiffusionPipeline +from diffusers import VersatileDiffusionImageVariationPipeline from diffusers.utils.testing_utils import load_image, require_torch, slow, torch_device from ...test_pipelines_common import PipelineTesterMixin @@ -27,32 +27,15 @@ torch.backends.cuda.matmul.allow_tf32 = False -class VersatileDiffusionPipelineFastTests(PipelineTesterMixin, unittest.TestCase): +class VersatileDiffusionImageVariationPipelineFastTests(PipelineTesterMixin, unittest.TestCase): pass @slow @require_torch -class VersatileDiffusionPipelineIntegrationTests(unittest.TestCase): - def test_inference_text2img(self): - pipe = VersatileDiffusionPipeline.from_pretrained("diffusers/vd-official-test") - pipe.to(torch_device) - pipe.set_progress_bar_config(disable=None) - - prompt = "A painting of a squirrel eating a burger " - generator = torch.Generator(device=torch_device).manual_seed(0) - image = pipe( - prompt=prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" - ).images - - image_slice = image[0, -3:, -3:, -1] - - assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.9256, 0.9340, 0.8933, 0.9361, 0.9113, 0.8727, 0.9122, 0.8745, 0.8099]) - assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 - +class VersatileDiffusionImageVariationPipelineIntegrationTests(unittest.TestCase): def test_inference_image_variations(self): - pipe = VersatileDiffusionPipeline.from_pretrained("diffusers/vd-official-test") + pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test") pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) @@ -61,7 +44,7 @@ def test_inference_image_variations(self): ) generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe( - image_prompt=image_prompt, + image=image_prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py new file mode 100644 index 000000000000..08a31366e1b1 --- /dev/null +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# Copyright 2022 HuggingFace Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch + +from diffusers import VersatileDiffusionTextToImagePipeline +from diffusers.utils.testing_utils import load_image, require_torch, slow, torch_device + +from ...test_pipelines_common import PipelineTesterMixin + + +torch.backends.cuda.matmul.allow_tf32 = False + + +class VersatileDiffusionTextToImagePipelineFastTests(PipelineTesterMixin, unittest.TestCase): + pass + + +@slow +@require_torch +class VersatileDiffusionTextToImagePipelineIntegrationTests(unittest.TestCase): + def test_inference_text2img(self): + pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + prompt = "A painting of a squirrel eating a burger " + generator = torch.Generator(device=torch_device).manual_seed(0) + image = pipe( + prompt=prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" + ).images + + image_slice = image[0, -3:, -3:, -1] + + assert image.shape == (1, 512, 512, 3) + expected_slice = np.array([0.9256, 0.9340, 0.8933, 0.9361, 0.9113, 0.8727, 0.9122, 0.8745, 0.8099]) + assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 22e6b5401b16ced8c9090f95c685ed3b1a144b94 Mon Sep 17 00:00:00 2001 From: anton-l Date: Mon, 21 Nov 2022 12:37:18 +0100 Subject: [PATCH 11/49] fix format --- ...eline_versatile_diffusion_text_to_image.py | 67 ++++++++++++------- 1 file changed, 41 insertions(+), 26 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index 3e8881a7c9ef..3f833a6ba4d4 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -263,26 +263,35 @@ def __call__( # class UNetMultiDimConditionModel(ModelMixin, ConfigMixin): # r""" -# UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep # -and returns sample shaped output. # # This model inherits from [`ModelMixin`]. Check the superclass documentation for -the generic methods the library # implements for all the models (such as downloading or saving, etc.) # # Parameters: # -sample_size (`int`, *optional*): The size of the input sample. # in_channels (`int`, *optional*, defaults to 4): The -number of channels in the input sample. # out_channels (`int`, *optional*, defaults to 4): The number of channels in -the output. # center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. # -flip_sin_to_cos (`bool`, *optional*, defaults to `True`): # Whether to flip the sin to cos in the time embedding. # -freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. # down_block_types -(`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", -"DownBlock2D")`): # The tuple of downsample blocks to use. # up_block_types (`Tuple[str]`, *optional*, defaults to -`("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D",)`): # The tuple of upsample blocks to -use. # block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): # The tuple of output -channels for each block. # layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. # -downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. # -mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. # act_fn -(`str`, *optional*, defaults to `"silu"`): The activation function to use. # norm_num_groups (`int`, *optional*, -defaults to 32): The number of groups to use for the normalization. # norm_eps (`float`, *optional*, defaults to 1e-5): -The epsilon to use for the normalization. # cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of -the cross attention features. # attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention -heads. #""" +# UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep +# and returns sample shaped output. +# +# This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library +# implements for all the models (such as downloading or saving, etc.) +# +# Parameters: +# sample_size (`int`, *optional*): The size of the input sample. +# in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample. +# out_channels (`int`, *optional*, defaults to 4): The number of channels in the output. +# center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. +# flip_sin_to_cos (`bool`, *optional*, defaults to `True`): +# Whether to flip the sin to cos in the time embedding. +# freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. +# down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): +# The tuple of downsample blocks to use. +# up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D",)`): +# The tuple of upsample blocks to use. +# block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): +# The tuple of output channels for each block. +# layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. +# downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. +# mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. +# act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. +# norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. +# norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. +# cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of the cross attention features. +# attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. +# """ # # _supports_gradient_checkpointing = True # @@ -503,12 +512,18 @@ def __call__( # return_dict: bool = True, # ) -> Union[UNet2DConditionOutput, Tuple]: # r""" -# Args: # sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor # timestep -(`torch.FloatTensor` or `float` or `int`): (batch) timesteps # encoder_hidden_states (`torch.FloatTensor`): (batch, -channel, height, width) encoder hidden states # return_dict (`bool`, *optional*, defaults to `True`): # Whether or not -to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. # # Returns: # -[`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: # [`~models.unet_2d_condition.UNet2DConditionOutput`] -if `return_dict` is True, otherwise a `tuple`. When # returning a tuple, the first element is the sample tensor. #""" +# Args: +# sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor +# timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps +# encoder_hidden_states (`torch.FloatTensor`): (batch, channel, height, width) encoder hidden states +# return_dict (`bool`, *optional*, defaults to `True`): +# Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. +# +# Returns: +# [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: +# [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When +# returning a tuple, the first element is the sample tensor. +# """ # # By default samples have to be AT least a multiple of the overall upsampling factor. # # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). # # However, the upsampling interpolation output size can be forced to fit any upsampling size From d36cf41b83e0c7fa1c40a3bc25c134c016a7e7a7 Mon Sep 17 00:00:00 2001 From: anton-l Date: Mon, 21 Nov 2022 13:40:10 +0100 Subject: [PATCH 12/49] refactor text2img --- .../pipeline_versatile_diffusion.py | 0 ...eline_versatile_diffusion_image_to_text.py | 412 +++++++++ ...ine_versatile_diffusion_image_variation.py | 1 - ...eline_versatile_diffusion_text_to_image.py | 801 ++++++++---------- .../dummy_torch_and_transformers_objects.py | 30 + .../test_versatile_diffusion_text_to_image.py | 8 +- 6 files changed, 777 insertions(+), 475 deletions(-) create mode 100644 src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py create mode 100644 src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py new file mode 100644 index 000000000000..801fe2d39be6 --- /dev/null +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py @@ -0,0 +1,412 @@ +from typing import Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn as nn + +from ...configuration_utils import ConfigMixin, register_to_config +from ...modeling_utils import ModelMixin +from ...models.embeddings import TimestepEmbedding, Timesteps +from ...models.unet_2d_condition import UNet2DConditionOutput +from ...utils import logging + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +class UNetMultiDimConditionModel(ModelMixin, ConfigMixin): + r""" + UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep + and returns sample shaped output. + + This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library + implements for all the models (such as downloading or saving, etc.) + + Parameters: + sample_size (`int`, *optional*): The size of the input sample. + in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample. + out_channels (`int`, *optional*, defaults to 4): The number of channels in the output. + center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. + flip_sin_to_cos (`bool`, *optional*, defaults to `True`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. + down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + The tuple of downsample blocks to use. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D",)`): + The tuple of upsample blocks to use. + block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. + downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. + act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. + norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. + cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of the cross attention features. + attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + sample_size: Optional[int] = None, + in_channels: int = 4, + out_channels: int = 4, + center_input_sample: bool = False, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str] = ( + "CrossAttnDownBlockMultiDim", + "CrossAttnDownBlockMultiDim", + "CrossAttnDownBlockMultiDim", + "DownBlockMultiDim", + ), + up_block_types: Tuple[str] = ( + "UpBlockMultiDim", + "CrossAttnUpBlockMultiDim", + "CrossAttnUpBlockMultiDim", + "CrossAttnUpBlockMultiDim", + ), + block_out_channels: Tuple[int] = (320, 640, 1280, 1280), + block_second_dim: Tuple[int] = (4, 4, 4, 4), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: int = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + attention_head_dim: int = 8, + ): + super().__init__() + + self.sample_size = sample_size + time_embed_dim = block_out_channels[0] * 4 + + # input + self.conv_in = LinearMultiDim([in_channels, 1, 1], block_out_channels[0], kernel_size=3, padding=(1, 1)) + + # time + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + + self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + + self.down_blocks = nn.ModuleList([]) + self.mid_block = None + self.up_blocks = nn.ModuleList([]) + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = self.get_down_block( + down_block_type, + num_layers=layers_per_block, + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + downsample_padding=downsample_padding, + ) + self.down_blocks.append(down_block) + + # mid + self.mid_block = UNetMidBlockMultiDimCrossAttn( + in_channels=block_out_channels[-1], + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift="default", + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + resnet_groups=norm_num_groups, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = self.get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) + self.conv_act = nn.SiLU() + self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, 3, padding=1) + + def get_down_block( + down_block_type, + num_layers, + in_channels, + out_channels, + temb_channels, + add_downsample, + resnet_eps, + resnet_act_fn, + attn_num_head_channels, + resnet_groups=None, + cross_attention_dim=None, + downsample_padding=None, + ): + down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type + if down_block_type == "DownBlockMultiDim": + return DownBlockMultiDim( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + ) + elif down_block_type == "CrossAttnDownBlockMultiDim": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") + return CrossAttnDownBlockMultiDim( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + ) + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.config.attention_head_dim % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.config.attention_head_dim}" + ) + if slice_size is not None and slice_size > self.config.attention_head_dim: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.config.attention_head_dim}" + ) + + for block in self.down_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_attention_slice(slice_size) + + self.mid_block.set_attention_slice(slice_size) + + for block in self.up_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for block in self.down_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + self.mid_block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + for block in self.up_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance( + module, (CrossAttnDownBlockMultiDim, DownBlockMultiDim, CrossAttnUpBlockMultiDim, UpBlockMultiDim) + ): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + return_dict: bool = True, + ) -> Union[UNet2DConditionOutput, Tuple]: + r""" + Args: + sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor + timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps + encoder_hidden_states (`torch.FloatTensor`): (batch, channel, height, width) encoder hidden states + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. + + Returns: + [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: + [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When + returning a tuple, the first element is the sample tensor. + """ + # By default samples have to be AT least a multiple of the overall upsampling factor. + # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). + # However, the upsampling interpolation output size can be forced to fit any upsampling size + # on the fly if necessary. + default_overall_up_factor = 2**self.num_upsamplers + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): + logger.info("Forward upsample size to force interpolation output size.") + forward_upsample_size = True + + # 0. center input if necessary + if self.config.center_input_sample: + sample = 2 * sample - 1.0 + + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) + elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=self.dtype) + emb = self.time_embedding(t_emb) + + # 2. pre-process + sample = self.conv_in(sample) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + down_block_res_samples += res_samples + + # 4. mid + sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) + + # 5. up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + upsample_size=upsample_size, + ) + else: + sample = upsample_block( + hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size + ) + # 6. post-process + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + if not return_dict: + return (sample,) + + return UNet2DConditionOutput(sample=sample) + + +class LinearMultiDim(nn.Linear): + def __init__(self, in_features, out_features, *args, **kwargs): + in_features = [in_features] if isinstance(in_features, int) else list(in_features) + out_features = [out_features] if isinstance(out_features, int) else list(out_features) + self.in_features_multidim = in_features + self.out_features_multidim = out_features + super().__init__(np.array(in_features).prod(), np.array(out_features).prod(), *args, **kwargs) + + def forward(self, x): + shape = x.shape + n = len(self.in_features_multidim) + x = x.view(*shape[0:-n], self.in_features) + y = super().forward(x) + y = y.view(*shape[0:-n], *self.out_features_multidim) + return y + + +class DownBlockMultiDim(nn.Module): + pass + + +class UNetMidBlockMultiDimCrossAttn(nn.Module): + pass + + +class DownBlockMultiDim(nn.Module): + pass + + +class CrossAttnDownBlockMultiDim(nn.Module): + pass + + +class UpBlockMultiDim(nn.Module): + pass + + +class CrossAttnUpBlockMultiDim(nn.Module): + pass diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index 6c2c8fb77c54..24c2df835d9a 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -17,7 +17,6 @@ import numpy as np import torch -import torch.nn as nn import torch.utils.checkpoint import PIL diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index 3f833a6ba4d4..ed2a67bbe33c 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -13,19 +13,22 @@ # limitations under the License. import inspect -from typing import List, Optional, Tuple, Union +import logging +from typing import Callable, List, Optional, Union -import numpy as np import torch import torch.utils.checkpoint -import PIL from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel from ...models import AutoencoderKL, UNet2DConditionModel, VQModel from ...models.attention import Transformer2DModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler +from ...utils import is_accelerate_available, logging + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name class VersatileDiffusionTextToImagePipeline(DiffusionPipeline): @@ -75,6 +78,8 @@ def __init__( vae=vae, scheduler=scheduler, ) + + def swap_unet_attention_blocks(self): for name, module in self.image_unet.named_modules(): if isinstance(module, Transformer2DModel): parent_name, index = name.rsplit(".", 1) @@ -84,15 +89,107 @@ def __init__( self.image_unet.get_submodule(parent_name)[index], ) - def _encode_prompt(self, prompt, do_classifier_free_guidance): + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet + def enable_xformers_memory_efficient_attention(self): + r""" + Enable memory efficient attention as implemented in xformers. + + When this option is enabled, you should observe lower GPU memory usage and a potential speed up at inference + time. Speed up at training time is not guaranteed. + + Warning: When Memory Efficient Attention and Sliced attention are both enabled, the Memory Efficient Attention + is used. + """ + self.image_unet.set_use_memory_efficient_attention_xformers(True) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_xformers_memory_efficient_attention with unet->image_unet + def disable_xformers_memory_efficient_attention(self): + r""" + Disable memory efficient attention as implemented in xformers. + """ + self.image_unet.set_use_memory_efficient_attention_xformers(False) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_attention_slicing with unet->image_unet + def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = "auto"): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module will split the input tensor in slices, to compute attention + in several steps. This is useful to save some memory in exchange for a small speed decrease. + + Args: + slice_size (`str` or `int`, *optional*, defaults to `"auto"`): + When `"auto"`, halves the input to the attention heads, so attention will be computed in two steps. If + a number is provided, uses as many slices as `attention_head_dim // slice_size`. In this case, + `attention_head_dim` must be a multiple of `slice_size`. + """ + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = self.image_unet.config.attention_head_dim // 2 + self.image_unet.set_attention_slice(slice_size) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_attention_slicing + def disable_attention_slicing(self): + r""" + Disable sliced attention computation. If `enable_attention_slicing` was previously invoked, this method will go + back to computing attention in one step. + """ + # set slice_size = `None` to disable `attention slicing` + self.enable_attention_slicing(None) + + def enable_sequential_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet, + text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a + `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called. + """ + if is_accelerate_available(): + from accelerate import cpu_offload + else: + raise ImportError("Please install accelerate via `pip install accelerate`") + + device = torch.device(f"cuda:{gpu_id}") + + for cpu_offloaded_model in [self.image_unet, self.text_unet, self.text_encoder, self.vae]: + if cpu_offloaded_model is not None: + cpu_offload(cpu_offloaded_model, device) + + @property + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device with unet->image_unet + def _execution_device(self): + r""" + Returns the device on which the pipeline's models will be executed. After calling + `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module + hooks. + """ + if self.device != torch.device("meta") or not hasattr(self.image_unet, "_hf_hook"): + return self.device + for module in self.image_unet.modules(): + if ( + hasattr(module, "_hf_hook") + and hasattr(module._hf_hook, "execution_device") + and module._hf_hook.execution_device is not None + ): + return torch.device(module._hf_hook.execution_device) + return self.device + + def _encode_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt): r""" Encodes the prompt into text encoder hidden states. Args: prompt (`str` or `list(int)`): prompt to be encoded + device: (`torch.device`): + torch device + num_images_per_prompt (`int`): + number of images that should be generated per prompt do_classifier_free_guidance (`bool`): whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). """ def normalize_embeddings(encoder_output): @@ -103,155 +200,294 @@ def normalize_embeddings(encoder_output): batch_size = len(prompt) if isinstance(prompt, list) else 1 + text_inputs = self.tokenizer( + prompt, + padding="max_length", + max_length=self.tokenizer.model_max_length, + truncation=True, + return_tensors="pt", + ) + text_input_ids = text_inputs.input_ids + untruncated_ids = self.tokenizer(prompt, padding="max_length", return_tensors="pt").input_ids + + if not torch.equal(text_input_ids, untruncated_ids): + removed_text = self.tokenizer.batch_decode(untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]) + logger.warning( + "The following part of your input was truncated because CLIP can only handle sequences up to" + f" {self.tokenizer.model_max_length} tokens: {removed_text}" + ) + + if hasattr(self.text_encoder.config, "use_attention_mask") and self.text_encoder.config.use_attention_mask: + attention_mask = text_inputs.attention_mask.to(device) + else: + attention_mask = None + + text_embeddings = self.text_encoder( + text_input_ids.to(device), + attention_mask=attention_mask, + ) + text_embeddings = normalize_embeddings(text_embeddings) + + # duplicate text embeddings for each generation per prompt, using mps friendly method + bs_embed, seq_len, _ = text_embeddings.shape + text_embeddings = text_embeddings.repeat(1, num_images_per_prompt, 1) + text_embeddings = text_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1) + + # get unconditional embeddings for classifier free guidance if do_classifier_free_guidance: - uncond_input = self.tokenizer([""] * batch_size, padding="max_length", max_length=77, return_tensors="pt") - uncond_embeddings = self.text_encoder(uncond_input.input_ids.to(self.device)) + uncond_tokens: List[str] + if negative_prompt is None: + uncond_tokens = [""] * batch_size + elif type(prompt) is not type(negative_prompt): + raise TypeError( + f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" + f" {type(prompt)}." + ) + elif isinstance(negative_prompt, str): + uncond_tokens = [negative_prompt] + elif batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + else: + uncond_tokens = negative_prompt + + max_length = text_input_ids.shape[-1] + uncond_input = self.tokenizer( + uncond_tokens, + padding="max_length", + max_length=max_length, + truncation=True, + return_tensors="pt", + ) + + if hasattr(self.text_encoder.config, "use_attention_mask") and self.text_encoder.config.use_attention_mask: + attention_mask = uncond_input.attention_mask.to(device) + else: + attention_mask = None + + uncond_embeddings = self.text_encoder( + uncond_input.input_ids.to(device), + attention_mask=attention_mask, + ) uncond_embeddings = normalize_embeddings(uncond_embeddings) - # get prompt text embeddings - text_input = self.tokenizer(prompt, padding="max_length", max_length=77, return_tensors="pt") - text_embeddings = self.text_encoder(text_input.input_ids.to(self.device)) - text_embeddings = normalize_embeddings(text_embeddings) + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = uncond_embeddings.shape[1] + uncond_embeddings = uncond_embeddings.repeat(1, num_images_per_prompt, 1) + uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1) - # For classifier free guidance, we need to do two forward passes. - # Here we concatenate the unconditional and text embeddings into a single batch - # to avoid doing two forward passes - text_embeddings = torch.cat([uncond_embeddings, text_embeddings]) + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and text embeddings into a single batch + # to avoid doing two forward passes + text_embeddings = torch.cat([uncond_embeddings, text_embeddings]) return text_embeddings - def _encode_image_prompt(self, prompt, do_classifier_free_guidance): - r""" - Encodes the image prompt into image encoder hidden states. + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents + def decode_latents(self, latents): + latents = 1 / 0.18215 * latents + image = self.vae.decode(latents).sample + image = (image / 2 + 0.5).clamp(0, 1) + # we always cast to float32 as this does not cause significant overhead and is compatible with bfloa16 + image = image.cpu().permute(0, 2, 3, 1).float().numpy() + return image - Args: - prompt (`str` or `list(int)`): - prompt to be encoded - do_classifier_free_guidance (`bool`): - whether to use classifier free guidance or not - """ + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] - def normalize_embeddings(encoder_output): - embeds = self.image_encoder.vision_model.post_layernorm(encoder_output.last_hidden_state) - embeds = self.image_encoder.visual_projection(embeds) - embeds_pooled = embeds[:, 0:1] - embeds = embeds / torch.norm(embeds_pooled, dim=-1, keepdim=True) - return embeds + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta - batch_size = len(prompt) if isinstance(prompt, list) else 1 + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs - if do_classifier_free_guidance: - dummy_images = [np.zeros((512, 512, 3))] * batch_size - dummy_images = self.image_processor(images=dummy_images, return_tensors="pt") - uncond_embeddings = self.image_encoder(dummy_images.pixel_values.to(self.device)) - uncond_embeddings = normalize_embeddings(uncond_embeddings) + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs + def check_inputs(self, prompt, height, width, callback_steps): + if not isinstance(prompt, str) and not isinstance(prompt, list): + raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") - # get prompt text embeddings - image_input = self.image_processor(images=prompt, return_tensors="pt") - image_embeddings = self.image_encoder(image_input.pixel_values.to(self.device)) - image_embeddings = normalize_embeddings(image_embeddings) + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") - # For classifier free guidance, we need to do two forward passes. - # Here we concatenate the unconditional and image embeddings into a single batch - # to avoid doing two forward passes - image_embeddings = torch.cat([uncond_embeddings, image_embeddings]) + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents + def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None): + shape = (batch_size, num_channels_latents, height // 8, width // 8) + if latents is None: + if device.type == "mps": + # randn does not work reproducibly on mps + latents = torch.randn(shape, generator=generator, device="cpu", dtype=dtype).to(device) + else: + latents = torch.randn(shape, generator=generator, device=device, dtype=dtype) + else: + if latents.shape != shape: + raise ValueError(f"Unexpected latents shape, got {latents.shape}, expected {shape}") + latents = latents.to(device) - return image_embeddings + # scale the initial noise by the standard deviation required by the scheduler + latents = latents * self.scheduler.init_noise_sigma + return latents @torch.no_grad() def __call__( self, - prompt: Optional[Union[str, List[str]]] = None, - height: Optional[int] = 512, - width: Optional[int] = 512, - num_inference_steps: Optional[int] = 50, - guidance_scale: Optional[float] = 1.0, - eta: Optional[float] = 0.0, + prompt: Union[str, List[str]], + height: int = 512, + width: int = 512, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + negative_prompt: Optional[Union[str, List[str]]] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, generator: Optional[torch.Generator] = None, + latents: Optional[torch.FloatTensor] = None, output_type: Optional[str] = "pil", return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: Optional[int] = 1, **kwargs, - ) -> Union[Tuple, ImagePipelineOutput]: + ): r""" + Function invoked when calling the pipeline for generation. + Args: prompt (`str` or `List[str]`): The prompt or prompts to guide the image generation. - height (`int`, *optional*, defaults to 256): + height (`int`, *optional*, defaults to 512): The height in pixels of the generated image. - width (`int`, *optional*, defaults to 256): + width (`int`, *optional*, defaults to 512): The width in pixels of the generated image. num_inference_steps (`int`, *optional*, defaults to 50): The number of denoising steps. More denoising steps usually lead to a higher quality image at the expense of slower inference. - guidance_scale (`float`, *optional*, defaults to 1.0): + guidance_scale (`float`, *optional*, defaults to 7.5): Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). `guidance_scale` is defined as `w` of equation 2. of [Imagen Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > - 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt` at - the, usually at the expense of lower image quality. + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. generator (`torch.Generator`, *optional*): A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. output_type (`str`, *optional*, defaults to `"pil"`): The output format of the generate image. Choose between [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. - return_dict (`bool`, *optional*): - Whether or not to return a [`~pipeline_utils.ImagePipelineOutput`] instead of a plain tuple. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. Returns: - [`~pipeline_utils.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if - `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the - generated images. + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. """ - do_classifier_free_guidance = guidance_scale > 1.0 - - if height % 8 != 0 or width % 8 != 0: - raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") - if isinstance(prompt, str): - batch_size = 1 - elif isinstance(prompt, list): - batch_size = len(prompt) - else: - raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}") + # 1. Check inputs. Raise error if not correct + self.check_inputs(prompt, height, width, callback_steps) - condition_embeddings = self._encode_prompt(prompt, do_classifier_free_guidance) + # 2. Define call parameters + batch_size = 1 if isinstance(prompt, str) else len(prompt) + device = self._execution_device + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 - latents = torch.randn( - (batch_size, self.image_unet.in_channels, height // 8, width // 8), generator=generator, device=self.device + # 3. Encode input prompt + text_embeddings = self._encode_prompt( + prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt ) - self.scheduler.set_timesteps(num_inference_steps) + # 4. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + timesteps = self.scheduler.timesteps + + # 5. Prepare latent variables + num_channels_latents = self.image_unet.in_channels + latents = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + height, + width, + text_embeddings.dtype, + device, + generator, + latents, + ) - # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature - accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + # 6. Prepare extra step kwargs. + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) - extra_kwargs = {} - if accepts_eta: - extra_kwargs["eta"] = eta + # 7. Swap the attention blocks between the image and text UNets + self.swap_unet_attention_blocks() - for t in self.progress_bar(self.scheduler.timesteps): - if not do_classifier_free_guidance: - latents_input = latents - else: - latents_input = torch.cat([latents] * 2) + # 8. Denoising loop + for i, t in enumerate(self.progress_bar(timesteps)): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) # predict the noise residual - noise_pred = self.image_unet(latents_input, t, encoder_hidden_states=condition_embeddings).sample + noise_pred = self.image_unet(latent_model_input, t, encoder_hidden_states=text_embeddings).sample + # perform guidance - if guidance_scale != 1.0: - noise_pred_uncond, noise_prediction_cond = noise_pred.chunk(2) - noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_cond - noise_pred_uncond) + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) # compute the previous noisy sample x_t -> x_t-1 - latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample - # scale and decode the image latents with vae - latents = 1 / 0.18215 * latents - image = self.vae.decode(latents).sample + # call the callback, if provided + if callback is not None and i % callback_steps == 0: + callback(i, t, latents) - image = (image / 2 + 0.5).clamp(0, 1) - image = image.cpu().permute(0, 2, 3, 1).numpy() + # 9. Swap the attention blocks backs in case the UNets are reused in another pipeline + self.swap_unet_attention_blocks() + + # 10. Post-processing + image = self.decode_latents(latents) + + # 11. Convert to PIL if output_type == "pil": image = self.numpy_to_pil(image) @@ -259,378 +495,3 @@ def __call__( return (image,) return ImagePipelineOutput(images=image) - - -# class UNetMultiDimConditionModel(ModelMixin, ConfigMixin): -# r""" -# UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep -# and returns sample shaped output. -# -# This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library -# implements for all the models (such as downloading or saving, etc.) -# -# Parameters: -# sample_size (`int`, *optional*): The size of the input sample. -# in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample. -# out_channels (`int`, *optional*, defaults to 4): The number of channels in the output. -# center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. -# flip_sin_to_cos (`bool`, *optional*, defaults to `True`): -# Whether to flip the sin to cos in the time embedding. -# freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. -# down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): -# The tuple of downsample blocks to use. -# up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D",)`): -# The tuple of upsample blocks to use. -# block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): -# The tuple of output channels for each block. -# layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. -# downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. -# mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. -# act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. -# norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. -# norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. -# cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of the cross attention features. -# attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. -# """ -# -# _supports_gradient_checkpointing = True -# -# @register_to_config -# def __init__( -# self, -# sample_size: Optional[int] = None, -# in_channels: int = 4, -# out_channels: int = 4, -# center_input_sample: bool = False, -# flip_sin_to_cos: bool = True, -# freq_shift: int = 0, -# down_block_types: Tuple[str] = ( -# "CrossAttnDownBlockMultiDim", -# "CrossAttnDownBlockMultiDim", -# "CrossAttnDownBlockMultiDim", -# "DownBlockMultiDim", -# ), -# up_block_types: Tuple[str] = ( -# "UpBlockMultiDim", -# "CrossAttnUpBlockMultiDim", -# "CrossAttnUpBlockMultiDim", -# "CrossAttnUpBlockMultiDim" -# ), -# block_out_channels: Tuple[int] = (320, 640, 1280, 1280), -# block_second_dim: Tuple[int] = (4, 4, 4, 4), -# layers_per_block: int = 2, -# downsample_padding: int = 1, -# mid_block_scale_factor: float = 1, -# act_fn: str = "silu", -# norm_num_groups: int = 32, -# norm_eps: float = 1e-5, -# cross_attention_dim: int = 1280, -# attention_head_dim: int = 8, -# ): -# super().__init__() -# -# self.sample_size = sample_size -# time_embed_dim = block_out_channels[0] * 4 -# -# # input -# self.conv_in = LinearMultiDim([in_channels, 1, 1], block_out_channels[0], kernel_size=3, padding=(1, 1)) -# -# # time -# self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) -# timestep_input_dim = block_out_channels[0] -# -# self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) -# -# self.down_blocks = nn.ModuleList([]) -# self.mid_block = None -# self.up_blocks = nn.ModuleList([]) -# -# # down -# output_channel = block_out_channels[0] -# for i, down_block_type in enumerate(down_block_types): -# input_channel = output_channel -# output_channel = block_out_channels[i] -# is_final_block = i == len(block_out_channels) - 1 -# -# down_block = self.get_down_block( -# down_block_type, -# num_layers=layers_per_block, -# in_channels=input_channel, -# out_channels=output_channel, -# temb_channels=time_embed_dim, -# add_downsample=not is_final_block, -# resnet_eps=norm_eps, -# resnet_act_fn=act_fn, -# resnet_groups=norm_num_groups, -# cross_attention_dim=cross_attention_dim, -# attn_num_head_channels=attention_head_dim, -# downsample_padding=downsample_padding, -# ) -# self.down_blocks.append(down_block) -# -# # mid -# self.mid_block = UNetMidBlockMultiDimCrossAttn( -# in_channels=block_out_channels[-1], -# temb_channels=time_embed_dim, -# resnet_eps=norm_eps, -# resnet_act_fn=act_fn, -# output_scale_factor=mid_block_scale_factor, -# resnet_time_scale_shift="default", -# cross_attention_dim=cross_attention_dim, -# attn_num_head_channels=attention_head_dim, -# resnet_groups=norm_num_groups, -# ) -# -# # count how many layers upsample the images -# self.num_upsamplers = 0 -# -# # up -# reversed_block_out_channels = list(reversed(block_out_channels)) -# output_channel = reversed_block_out_channels[0] -# for i, up_block_type in enumerate(up_block_types): -# is_final_block = i == len(block_out_channels) - 1 -# -# prev_output_channel = output_channel -# output_channel = reversed_block_out_channels[i] -# input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] -# -# # add upsample block for all BUT final layer -# if not is_final_block: -# add_upsample = True -# self.num_upsamplers += 1 -# else: -# add_upsample = False -# -# up_block = self.get_up_block( -# up_block_type, -# num_layers=layers_per_block + 1, -# in_channels=input_channel, -# out_channels=output_channel, -# prev_output_channel=prev_output_channel, -# temb_channels=time_embed_dim, -# add_upsample=add_upsample, -# resnet_eps=norm_eps, -# resnet_act_fn=act_fn, -# resnet_groups=norm_num_groups, -# cross_attention_dim=cross_attention_dim, -# attn_num_head_channels=attention_head_dim, -# ) -# self.up_blocks.append(up_block) -# prev_output_channel = output_channel -# -# # out -# self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) -# self.conv_act = nn.SiLU() -# self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, 3, padding=1) -# -# def get_down_block( -# down_block_type, -# num_layers, -# in_channels, -# out_channels, -# temb_channels, -# add_downsample, -# resnet_eps, -# resnet_act_fn, -# attn_num_head_channels, -# resnet_groups=None, -# cross_attention_dim=None, -# downsample_padding=None, -# ): -# down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type -# if down_block_type == "DownBlockMultiDim": -# return DownBlockMultiDim( -# num_layers=num_layers, -# in_channels=in_channels, -# out_channels=out_channels, -# temb_channels=temb_channels, -# add_downsample=add_downsample, -# resnet_eps=resnet_eps, -# resnet_act_fn=resnet_act_fn, -# resnet_groups=resnet_groups, -# downsample_padding=downsample_padding, -# ) -# elif down_block_type == "CrossAttnDownBlockMultiDim": -# if cross_attention_dim is None: -# raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") -# return CrossAttnDownBlockMultiDim( -# num_layers=num_layers, -# in_channels=in_channels, -# out_channels=out_channels, -# temb_channels=temb_channels, -# add_downsample=add_downsample, -# resnet_eps=resnet_eps, -# resnet_act_fn=resnet_act_fn, -# resnet_groups=resnet_groups, -# downsample_padding=downsample_padding, -# cross_attention_dim=cross_attention_dim, -# attn_num_head_channels=attn_num_head_channels, -# ) -# -# def set_attention_slice(self, slice_size): -# if slice_size is not None and self.config.attention_head_dim % slice_size != 0: -# raise ValueError( -# f"Make sure slice_size {slice_size} is a divisor of " -# f"the number of heads used in cross_attention {self.config.attention_head_dim}" -# ) -# if slice_size is not None and slice_size > self.config.attention_head_dim: -# raise ValueError( -# f"Chunk_size {slice_size} has to be smaller or equal to " -# f"the number of heads used in cross_attention {self.config.attention_head_dim}" -# ) -# -# for block in self.down_blocks: -# if hasattr(block, "attentions") and block.attentions is not None: -# block.set_attention_slice(slice_size) -# -# self.mid_block.set_attention_slice(slice_size) -# -# for block in self.up_blocks: -# if hasattr(block, "attentions") and block.attentions is not None: -# block.set_attention_slice(slice_size) -# -# def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): -# for block in self.down_blocks: -# if hasattr(block, "attentions") and block.attentions is not None: -# block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) -# -# self.mid_block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) -# -# for block in self.up_blocks: -# if hasattr(block, "attentions") and block.attentions is not None: -# block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) -# -# def _set_gradient_checkpointing(self, module, value=False): -# if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D, CrossAttnUpBlock2D, UpBlock2D)): -# module.gradient_checkpointing = value -# -# def forward( -# self, -# sample: torch.FloatTensor, -# timestep: Union[torch.Tensor, float, int], -# encoder_hidden_states: torch.Tensor, -# return_dict: bool = True, -# ) -> Union[UNet2DConditionOutput, Tuple]: -# r""" -# Args: -# sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor -# timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps -# encoder_hidden_states (`torch.FloatTensor`): (batch, channel, height, width) encoder hidden states -# return_dict (`bool`, *optional*, defaults to `True`): -# Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. -# -# Returns: -# [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: -# [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When -# returning a tuple, the first element is the sample tensor. -# """ -# # By default samples have to be AT least a multiple of the overall upsampling factor. -# # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). -# # However, the upsampling interpolation output size can be forced to fit any upsampling size -# # on the fly if necessary. -# default_overall_up_factor = 2**self.num_upsamplers -# -# # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` -# forward_upsample_size = False -# upsample_size = None -# -# if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): -# logger.info("Forward upsample size to force interpolation output size.") -# forward_upsample_size = True -# -# # 0. center input if necessary -# if self.config.center_input_sample: -# sample = 2 * sample - 1.0 -# -# # 1. time -# timesteps = timestep -# if not torch.is_tensor(timesteps): -# # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can -# timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) -# elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: -# timesteps = timesteps[None].to(sample.device) -# -# # broadcast to batch dimension in a way that's compatible with ONNX/Core ML -# timesteps = timesteps.expand(sample.shape[0]) -# -# t_emb = self.time_proj(timesteps) -# -# # timesteps does not contain any weights and will always return f32 tensors -# # but time_embedding might actually be running in fp16. so we need to cast here. -# # there might be better ways to encapsulate this. -# t_emb = t_emb.to(dtype=self.dtype) -# emb = self.time_embedding(t_emb) -# -# # 2. pre-process -# sample = self.conv_in(sample) -# -# # 3. down -# down_block_res_samples = (sample,) -# for downsample_block in self.down_blocks: -# if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: -# sample, res_samples = downsample_block( -# hidden_states=sample, -# temb=emb, -# encoder_hidden_states=encoder_hidden_states, -# ) -# else: -# sample, res_samples = downsample_block(hidden_states=sample, temb=emb) -# -# down_block_res_samples += res_samples -# -# # 4. mid -# sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) -# -# # 5. up -# for i, upsample_block in enumerate(self.up_blocks): -# is_final_block = i == len(self.up_blocks) - 1 -# -# res_samples = down_block_res_samples[-len(upsample_block.resnets) :] -# down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] -# -# # if we have not reached the final block and need to forward the -# # upsample size, we do it here -# if not is_final_block and forward_upsample_size: -# upsample_size = down_block_res_samples[-1].shape[2:] -# -# if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: -# sample = upsample_block( -# hidden_states=sample, -# temb=emb, -# res_hidden_states_tuple=res_samples, -# encoder_hidden_states=encoder_hidden_states, -# upsample_size=upsample_size, -# ) -# else: -# sample = upsample_block( -# hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size -# ) -# # 6. post-process -# sample = self.conv_norm_out(sample) -# sample = self.conv_act(sample) -# sample = self.conv_out(sample) -# -# if not return_dict: -# return (sample,) -# -# return UNet2DConditionOutput(sample=sample) -# -# -# class LinearMultiDim(nn.Linear): -# def __init__(self, in_features, out_features, *args, **kwargs): -# in_features = [in_features] if isinstance(in_features, int) else list(in_features) -# out_features = [out_features] if isinstance(out_features, int) else list(out_features) -# self.in_features_multidim = in_features -# self.out_features_multidim = out_features -# super().__init__( -# np.array(in_features).prod(), -# np.array(out_features).prod(), -# *args, **kwargs) -# -# def forward(self, x): -# shape = x.shape -# n = len(self.in_features_multidim) -# x = x.view(*shape[0:-n], self.in_features) -# y = super().forward(x) -# y = y.view(*shape[0:-n], *self.out_features_multidim) -# return y diff --git a/src/diffusers/utils/dummy_torch_and_transformers_objects.py b/src/diffusers/utils/dummy_torch_and_transformers_objects.py index 92c163ba74ea..2ad0ead4403f 100644 --- a/src/diffusers/utils/dummy_torch_and_transformers_objects.py +++ b/src/diffusers/utils/dummy_torch_and_transformers_objects.py @@ -124,6 +124,36 @@ def from_pretrained(cls, *args, **kwargs): requires_backends(cls, ["torch", "transformers"]) +class VersatileDiffusionImageVariationPipeline(metaclass=DummyObject): + _backends = ["torch", "transformers"] + + def __init__(self, *args, **kwargs): + requires_backends(self, ["torch", "transformers"]) + + @classmethod + def from_config(cls, *args, **kwargs): + requires_backends(cls, ["torch", "transformers"]) + + @classmethod + def from_pretrained(cls, *args, **kwargs): + requires_backends(cls, ["torch", "transformers"]) + + +class VersatileDiffusionTextToImagePipeline(metaclass=DummyObject): + _backends = ["torch", "transformers"] + + def __init__(self, *args, **kwargs): + requires_backends(self, ["torch", "transformers"]) + + @classmethod + def from_config(cls, *args, **kwargs): + requires_backends(cls, ["torch", "transformers"]) + + @classmethod + def from_pretrained(cls, *args, **kwargs): + requires_backends(cls, ["torch", "transformers"]) + + class VQDiffusionPipeline(metaclass=DummyObject): _backends = ["torch", "transformers"] diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py index 08a31366e1b1..3ba275df766f 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py @@ -19,7 +19,7 @@ import torch from diffusers import VersatileDiffusionTextToImagePipeline -from diffusers.utils.testing_utils import load_image, require_torch, slow, torch_device +from diffusers.utils.testing_utils import require_torch_gpu, slow, torch_device from ...test_pipelines_common import PipelineTesterMixin @@ -32,7 +32,7 @@ class VersatileDiffusionTextToImagePipelineFastTests(PipelineTesterMixin, unitte @slow -@require_torch +@require_torch_gpu class VersatileDiffusionTextToImagePipelineIntegrationTests(unittest.TestCase): def test_inference_text2img(self): pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test") @@ -45,8 +45,8 @@ def test_inference_text2img(self): prompt=prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" ).images - image_slice = image[0, -3:, -3:, -1] + image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.9256, 0.9340, 0.8933, 0.9361, 0.9113, 0.8727, 0.9122, 0.8745, 0.8099]) + expected_slice = np.array([0.0657, 0.0529, 0.0455, 0.0802, 0.0570, 0.0179, 0.0267, 0.0483, 0.0769]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 303052dc70d9e1df7f41f275d4289f1cad53689c Mon Sep 17 00:00:00 2001 From: anton-l Date: Mon, 21 Nov 2022 14:10:12 +0100 Subject: [PATCH 13/49] mega pipeline --- ...onvert_versatile_diffusion_to_diffusers.py | 21 +- src/diffusers/__init__.py | 1 + src/diffusers/pipelines/__init__.py | 6 +- .../pipelines/versatile_diffusion/__init__.py | 1 + .../pipeline_versatile_diffusion.py | 179 ++++++++++++++++++ ...eline_versatile_diffusion_image_to_text.py | 4 - ...eline_versatile_diffusion_text_to_image.py | 8 +- 7 files changed, 197 insertions(+), 23 deletions(-) diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index f09c8d683974..abdcc495ce73 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -31,8 +31,12 @@ UNet2DConditionModel, VersatileDiffusionPipeline, ) -from diffusers.pipelines.latent_diffusion.pipeline_latent_diffusion import LDMBertConfig, LDMBertModel -from transformers import CLIPProcessor, CLIPTextModelWithProjection, CLIPTokenizer, CLIPVisionModelWithProjection +from transformers import ( + CLIPFeatureExtractor, + CLIPTextModelWithProjection, + CLIPTokenizer, + CLIPVisionModelWithProjection, +) SCHEDULER_CONFIG = Namespace( @@ -334,7 +338,7 @@ def convert_vd_unet_checkpoint(checkpoint, config, unet_key, extract_ema=False): # at least a 100 parameters have to start with `model_ema` in order for the checkpoint to be EMA if sum(k.startswith("model_ema") for k in keys) > 100: - print(f"Checkpoint has both EMA and non-EMA weights.") + print("Checkpoint has both EMA and non-EMA weights.") if extract_ema: print( "In this conversion only the EMA weights are extracted. If you want to instead extract the non-EMA" @@ -610,13 +614,6 @@ def convert_vd_vae_checkpoint(checkpoint, config): parser.add_argument( "--optimus_checkpoint_path", default=None, type=str, required=False, help="Path to the checkpoint to convert." ) - # !wget https://raw.githubusercontent.com/CompVis/stable-diffusion/main/configs/stable-diffusion/v1-inference.yaml - parser.add_argument( - "--original_config_file", - default=None, - type=str, - help="The YAML config file corresponding to the original architecture.", - ) parser.add_argument( "--scheduler_type", default="pndm", @@ -719,14 +716,14 @@ def convert_vd_vae_checkpoint(checkpoint, config): vae.load_state_dict(converted_vae_checkpoint) tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-large-patch14") - image_processor = CLIPProcessor.from_pretrained("openai/clip-vit-large-patch14") + image_feature_extractor = CLIPFeatureExtractor.from_pretrained("openai/clip-vit-large-patch14") text_encoder = CLIPTextModelWithProjection.from_pretrained("openai/clip-vit-large-patch14") image_encoder = CLIPVisionModelWithProjection.from_pretrained("openai/clip-vit-large-patch14") pipe = VersatileDiffusionPipeline( scheduler=scheduler, tokenizer=tokenizer, - image_processor=image_processor, + image_feature_extractor=image_feature_extractor, text_encoder=text_encoder, image_encoder=image_encoder, image_unet=image_unet, diff --git a/src/diffusers/__init__.py b/src/diffusers/__init__.py index 7a5f8544fcab..b6bd0790e076 100644 --- a/src/diffusers/__init__.py +++ b/src/diffusers/__init__.py @@ -74,6 +74,7 @@ StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, VersatileDiffusionImageVariationPipeline, + VersatileDiffusionPipeline, VersatileDiffusionTextToImagePipeline, VQDiffusionPipeline, ) diff --git a/src/diffusers/pipelines/__init__.py b/src/diffusers/pipelines/__init__.py index 2ebd051daf1f..60cde85f7916 100644 --- a/src/diffusers/pipelines/__init__.py +++ b/src/diffusers/pipelines/__init__.py @@ -24,7 +24,11 @@ StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, ) - from .versatile_diffusion import VersatileDiffusionImageVariationPipeline, VersatileDiffusionTextToImagePipeline + from .versatile_diffusion import ( + VersatileDiffusionImageVariationPipeline, + VersatileDiffusionPipeline, + VersatileDiffusionTextToImagePipeline, + ) from .vq_diffusion import VQDiffusionPipeline if is_transformers_available() and is_onnx_available(): diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index 98975ba6cbee..b219335ba580 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -1,2 +1,3 @@ +from .pipeline_versatile_diffusion import VersatileDiffusionPipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index e69de29bb2d1..8b8b59bc26cc 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -0,0 +1,179 @@ +from typing import Any, Callable, Dict, List, Optional, Union + +import torch + +import PIL.Image +from transformers import CLIPFeatureExtractor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel + +from ...models import AutoencoderKL, UNet2DConditionModel +from ...pipeline_utils import DiffusionPipeline +from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler +from ...utils import logging +from . import VersatileDiffusionImageVariationPipeline, VersatileDiffusionTextToImagePipeline + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +class VersatileDiffusionPipeline(DiffusionPipeline): + r""" + Pipeline for text-to-image generation using Stable Diffusion. + + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + Args: + vae ([`AutoencoderKL`]): + Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations. + text_encoder ([`CLIPTextModel`]): + Frozen text-encoder. Stable Diffusion uses the text portion of + [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically + the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant. + tokenizer (`CLIPTokenizer`): + Tokenizer of class + [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + safety_checker ([`StableDiffusionMegaSafetyChecker`]): + Classification module that estimates whether generated images could be considered offensive or harmful. + Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details. + feature_extractor ([`CLIPFeatureExtractor`]): + Model that extracts features from generated images to be used as inputs for the `safety_checker`. + """ + + tokenizer: CLIPTokenizer + image_feature_extractor: CLIPFeatureExtractor + text_encoder: CLIPTextModel + image_encoder: CLIPVisionModel + image_unet: UNet2DConditionModel + text_unet: UNet2DConditionModel + vae: AutoencoderKL + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] + + def __init__( + self, + tokenizer: CLIPTokenizer, + image_feature_extractor: CLIPFeatureExtractor, + text_encoder: CLIPTextModel, + image_encoder: CLIPVisionModel, + image_unet: UNet2DConditionModel, + text_unet: UNet2DConditionModel, + vae: AutoencoderKL, + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], + ): + super().__init__() + + self.register_modules( + tokenizer=tokenizer, + image_feature_extractor=image_feature_extractor, + text_encoder=text_encoder, + image_encoder=image_encoder, + image_unet=image_unet, + text_unet=text_unet, + vae=vae, + scheduler=scheduler, + ) + + @property + def components(self) -> Dict[str, Any]: + return {k: getattr(self, k) for k in self.config.keys() if not k.startswith("_")} + + def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = "auto"): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module will split the input tensor in slices, to compute attention + in several steps. This is useful to save some memory in exchange for a small speed decrease. + + Args: + slice_size (`str` or `int`, *optional*, defaults to `"auto"`): + When `"auto"`, halves the input to the attention heads, so attention will be computed in two steps. If + a number is provided, uses as many slices as `attention_head_dim // slice_size`. In this case, + `attention_head_dim` must be a multiple of `slice_size`. + """ + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = self.image_unet.config.attention_head_dim // 2 + self.image_unet.set_attention_slice(slice_size) + self.text_unet.set_attention_slice(slice_size) + + def disable_attention_slicing(self): + r""" + Disable sliced attention computation. If `enable_attention_slicing` was previously invoked, this method will go + back to computing attention in one step. + """ + # set slice_size = `None` to disable `attention slicing` + self.enable_attention_slicing(None) + + @torch.no_grad() + def image_variation( + self, + image: Union[torch.FloatTensor, PIL.Image.Image], + height: int = 512, + width: int = 512, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + negative_prompt: Optional[Union[str, List[str]]] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[torch.Generator] = None, + latents: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: Optional[int] = 1, + ): + return VersatileDiffusionImageVariationPipeline(**self.components)( + image=image, + height=height, + width=width, + num_inference_steps=num_inference_steps, + guidance_scale=guidance_scale, + negative_prompt=negative_prompt, + num_images_per_prompt=num_images_per_prompt, + eta=eta, + generator=generator, + latents=latents, + output_type=output_type, + return_dict=return_dict, + callback=callback, + callback_steps=callback_steps, + ) + + @torch.no_grad() + def text_to_image( + self, + prompt: Union[str, List[str]], + height: int = 512, + width: int = 512, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + negative_prompt: Optional[Union[str, List[str]]] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[torch.Generator] = None, + latents: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: Optional[int] = 1, + ): + return VersatileDiffusionTextToImagePipeline(**self.components)( + prompt=prompt, + height=height, + width=width, + num_inference_steps=num_inference_steps, + guidance_scale=guidance_scale, + negative_prompt=negative_prompt, + num_images_per_prompt=num_images_per_prompt, + eta=eta, + generator=generator, + latents=latents, + output_type=output_type, + return_dict=return_dict, + callback=callback, + callback_steps=callback_steps, + ) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py index 801fe2d39be6..4d2441103b97 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py @@ -388,10 +388,6 @@ def forward(self, x): return y -class DownBlockMultiDim(nn.Module): - pass - - class UNetMidBlockMultiDimCrossAttn(nn.Module): pass diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index ed2a67bbe33c..4ab582b16913 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -13,13 +13,12 @@ # limitations under the License. import inspect -import logging from typing import Callable, List, Optional, Union import torch import torch.utils.checkpoint -from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel +from transformers import CLIPFeatureExtractor, CLIPTextModel, CLIPTokenizer from ...models import AutoencoderKL, UNet2DConditionModel, VQModel from ...models.attention import Transformer2DModel @@ -50,9 +49,8 @@ class VersatileDiffusionTextToImagePipeline(DiffusionPipeline): [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. """ tokenizer: CLIPTokenizer - image_processor: CLIPProcessor + image_feature_extractor: CLIPFeatureExtractor text_encoder: CLIPTextModel - image_encoder: CLIPVisionModel image_unet: UNet2DConditionModel text_unet: UNet2DConditionModel vae: Union[VQModel, AutoencoderKL] @@ -61,7 +59,6 @@ class VersatileDiffusionTextToImagePipeline(DiffusionPipeline): def __init__( self, tokenizer: CLIPTokenizer, - image_processor: CLIPProcessor, text_encoder: CLIPTextModel, image_unet: UNet2DConditionModel, text_unet: UNet2DConditionModel, @@ -71,7 +68,6 @@ def __init__( super().__init__() self.register_modules( tokenizer=tokenizer, - image_processor=image_processor, text_encoder=text_encoder, image_unet=image_unet, text_unet=text_unet, From f2bc526d5672aad7dd8952272f3556ef985aa3a9 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Mon, 21 Nov 2022 13:44:04 +0000 Subject: [PATCH 14/49] add optimus --- .../pipelines/versatile_diffusion/__init__.py | 7 +- .../modeling_gpt2_optimus.py | 241 ++++++++++++++++++ 2 files changed, 247 insertions(+), 1 deletion(-) create mode 100644 src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index cd63bbfc28b6..6c336bfa2fd8 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -1 +1,6 @@ -from .pipeline_versatile_diffusion import VersatileDiffusionPipeline +from ...utils import is_torch_available, is_transformers_available + + +if is_transformers_available() and is_torch_available(): + from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector + from .pipeline_versatile_diffusion import VersatileDiffusionPipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py b/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py new file mode 100644 index 000000000000..02a0ba822c67 --- /dev/null +++ b/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py @@ -0,0 +1,241 @@ +import torch +from torch import nn + +from transformers.modeling_outputs import CausalLMOutputWithCrossAttentions +from transformers.models.gpt2.modeling_gpt2 import GPT2Block, GPT2PreTrainedModel + + +class GPT2OptimusModel(GPT2PreTrainedModel): + def __init__(self, config, latent_as_gpt_emb, latent_as_gpt_memory, latent_size): + super().__init__(config) + self.latent_as_gpt_emb = latent_as_gpt_emb + self.latent_as_gpt_memory = latent_as_gpt_memory + self.latent_size = latent_size + self.output_hidden_states = config.output_hidden_states + self.output_attentions = config.output_attentions + + self.wte = nn.Embedding(config.vocab_size, config.n_embd) + self.wpe = nn.Embedding(config.n_positions, config.n_embd) + self.drop = nn.Dropout(config.embd_pdrop) + self.h = nn.ModuleList([GPT2Block(config, i) for i in range(config.n_layer)]) + self.ln_f = nn.LayerNorm(config.n_embd, eps=config.layer_norm_epsilon) + + self.linear = nn.Linear( + self.latent_size, config.hidden_size * config.n_layer, bias=False + ) # different latent vector for each layer + self.linear_emb = nn.Linear( + self.latent_size, config.hidden_size, bias=False + ) # share the same latent vector as the embeddings + + # Initialize weights and apply final processing + self.post_init() + + def forward( + self, + input_ids, + past=None, + attention_mask=None, + token_type_ids=None, + position_ids=None, + head_mask=None, + ): + if past is None: + past_length = 0 + past = [None] * len(self.h) + else: + if self.latent_as_gpt_emb: + past_emb = self.linear_emb(past) # used as embeddings to add on other three embeddings + + if self.latent_as_gpt_memory: + past = self.linear(past) + share_latent = False + if share_latent: + # the same latent vector shared by all layers + past = [past.unsqueeze(-2), past.unsqueeze(-2)] # query, key + past = [past] * len(self.h) + past_length = past[0][0].size(-2) + else: + # different latent vectors for each layer + past_split = torch.split(past.unsqueeze(1), self.config.hidden_size, dim=2) + past = list(zip(past_split, past_split)) + past_length = 1 # past[0][0].size(-2) + else: + past_length = 0 + past = [None] * len(self.h) + + if position_ids is None: + position_ids = torch.arange( + past_length, input_ids.size(-1) + past_length, dtype=torch.long, device=input_ids.device + ) + position_ids = position_ids.unsqueeze(0).expand_as(input_ids) + + # Attention mask. + if attention_mask is not None: + # We create a 3D attention mask from a 2D tensor mask. + # Sizes are [batch_size, 1, 1, to_seq_length] + # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] + # this attention mask is more simple than the triangular masking of causal attention + # used in OpenAI GPT, we just need to prepare the broadcast dimension here. + attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + attention_mask = attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility + attention_mask = (1.0 - attention_mask) * -10000.0 + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # head_mask has shape n_layer x batch x n_heads x N x N + if head_mask is not None: + if head_mask.dim() == 1: + head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1) + head_mask = head_mask.expand(self.config.n_layer, -1, -1, -1, -1) + elif head_mask.dim() == 2: + head_mask = ( + head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) + ) # We can specify head_mask for each layer + head_mask = head_mask.to( + dtype=next(self.parameters()).dtype + ) # switch to fload if need + fp16 compatibility + else: + head_mask = [None] * self.config.n_layer + + input_shape = input_ids.size() + input_ids = input_ids.view(-1, input_ids.size(-1)) + position_ids = position_ids.view(-1, position_ids.size(-1)) + + inputs_embeds = self.wte(input_ids) + position_embeds = self.wpe(position_ids) + if token_type_ids is not None: + token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) + token_type_embeds = self.wte(token_type_ids) + else: + token_type_embeds = 0 + + hidden_states = inputs_embeds + position_embeds + token_type_embeds + if self.latent_as_gpt_emb: + hidden_states = hidden_states + past_emb.unsqueeze(1) + + hidden_states = self.drop(hidden_states) + + output_shape = input_shape + (hidden_states.size(-1),) + + presents = () + all_attentions = [] + all_hidden_states = () + for i, (block, layer_past) in enumerate(zip(self.h, past)): + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states.view(*output_shape),) + + outputs = block( + hidden_states, layer_past=layer_past, attention_mask=attention_mask, head_mask=head_mask[i] + ) + + hidden_states, present = outputs[:2] + presents = presents + (present,) + + if self.output_attentions: + all_attentions.append(outputs[2]) + + hidden_states = self.ln_f(hidden_states) + + hidden_states = hidden_states.view(*output_shape) + # Add last hidden state + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + outputs = (hidden_states, presents) + if self.output_hidden_states: + outputs = outputs + (all_hidden_states,) + if self.output_attentions: + # let the number of heads free (-1) so we can extract attention even after head pruning + attention_output_shape = input_shape[:-1] + (-1,) + all_attentions[0].shape[-2:] + all_attentions = tuple(t.view(*attention_output_shape) for t in all_attentions) + outputs = outputs + (all_attentions,) + + return outputs # last hidden state, presents, (all hidden_states), (attentions) + + +class GPT2OptimusForLatentConnector(GPT2PreTrainedModel): + def __init__(self, config): + super().__init__(config) + self.latent_as_gpt_emb = True + self.latent_as_gpt_memory = True + self.latent_size = getattr(config, "latent_size", 32) + self.transformer = GPT2OptimusModel( + config, + latent_as_gpt_emb=self.latent_as_gpt_emb, + latent_as_gpt_memory=self.latent_as_gpt_memory, + latent_size=self.latent_size, + ) + self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False) + self.init_weights() + self.tie_weights() + + # Initialize weights and apply final processing + self.post_init() + self.tie_weights() + + def _tie_or_clone_weights(self, first_module, second_module): + """Tie or clone module weights depending of weither we are using TorchScript or not""" + if self.config.torchscript: + first_module.weight = nn.Parameter(second_module.weight.clone()) + else: + first_module.weight = second_module.weight + + if hasattr(first_module, "bias") and first_module.bias is not None: + first_module.bias.data = torch.nn.functional.pad( + first_module.bias.data, + (0, first_module.weight.shape[0] - first_module.bias.shape[0]), + "constant", + 0, + ) + + def tie_weights(self): + """Make sure we are sharing the input and output embeddings. + Export to TorchScript can't handle parameter sharing so we are cloning them instead. + """ + self._tie_or_clone_weights(self.lm_head, self.transformer.wte) + + def forward( + self, + input_ids, + past_key_values=None, + attention_mask=None, + token_type_ids=None, + position_ids=None, + head_mask=None, + output_attentions=None, + output_hidden_states=None, + return_dict=True, + ): + transformer_outputs = self.transformer( + input_ids, + past=past_key_values, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask, + ) + hidden_states = transformer_outputs[0] + + lm_logits = self.lm_head(hidden_states) + + return CausalLMOutputWithCrossAttentions( + loss=None, + logits=lm_logits, + past_key_values=past_key_values, + hidden_states=None, + attentions=None, + cross_attentions=None, + ) + + def prepare_inputs_for_generation(self, input_ids, past, **kwargs): + return { + "input_ids": input_ids, + "past_key_values": past, + } From bc509b2e1cbbbd1e4d79b3b37adbf3cd24e69c83 Mon Sep 17 00:00:00 2001 From: anton-l Date: Mon, 21 Nov 2022 15:04:24 +0100 Subject: [PATCH 15/49] refactor image var --- ...ine_versatile_diffusion_image_variation.py | 392 ++++++++++++++---- ...eline_versatile_diffusion_text_to_image.py | 8 +- ...est_versatile_diffusion_image_variation.py | 8 +- 3 files changed, 319 insertions(+), 89 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index 24c2df835d9a..e37010d1f811 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -13,18 +13,23 @@ # limitations under the License. import inspect -from typing import List, Optional, Tuple, Union +from typing import Callable, List, Optional, Union import numpy as np import torch import torch.utils.checkpoint import PIL -from transformers import CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel +from transformers import CLIPFeatureExtractor, CLIPVisionModelWithProjection from ...models import AutoencoderKL, UNet2DConditionModel, VQModel +from ...models.attention import Transformer2DModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler +from ...utils import is_accelerate_available, logging + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name class VersatileDiffusionImageVariationPipeline(DiffusionPipeline): @@ -45,43 +50,140 @@ class VersatileDiffusionImageVariationPipeline(DiffusionPipeline): A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. """ - tokenizer: CLIPTokenizer - image_processor: CLIPProcessor - text_encoder: CLIPTextModel - image_encoder: CLIPVisionModel + image_feature_extractor: CLIPFeatureExtractor + image_encoder: CLIPVisionModelWithProjection image_unet: UNet2DConditionModel - text_unet: UNet2DConditionModel - vae: Union[VQModel, AutoencoderKL] + vae: AutoencoderKL scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] def __init__( self, - tokenizer: CLIPTokenizer, - image_processor: CLIPProcessor, - image_encoder: CLIPVisionModel, + image_feature_extractor: CLIPFeatureExtractor, + image_encoder: CLIPVisionModelWithProjection, image_unet: UNet2DConditionModel, - vae: Union[VQModel, AutoencoderKL], + vae: AutoencoderKL, scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], ): super().__init__() self.register_modules( - tokenizer=tokenizer, - image_processor=image_processor, + image_feature_extractor=image_feature_extractor, image_encoder=image_encoder, image_unet=image_unet, vae=vae, scheduler=scheduler, ) - def _encode_prompt(self, prompt, do_classifier_free_guidance): + def swap_unet_attention_blocks(self): + for name, module in self.image_unet.named_modules(): + if isinstance(module, Transformer2DModel): + parent_name, index = name.rsplit(".", 1) + index = int(index) + self.image_unet.get_submodule(parent_name)[index], self.text_unet.get_submodule(parent_name)[index] = ( + self.text_unet.get_submodule(parent_name)[index], + self.image_unet.get_submodule(parent_name)[index], + ) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet + def enable_xformers_memory_efficient_attention(self): + r""" + Enable memory efficient attention as implemented in xformers. + + When this option is enabled, you should observe lower GPU memory usage and a potential speed up at inference + time. Speed up at training time is not guaranteed. + + Warning: When Memory Efficient Attention and Sliced attention are both enabled, the Memory Efficient Attention + is used. + """ + self.image_unet.set_use_memory_efficient_attention_xformers(True) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_xformers_memory_efficient_attention with unet->image_unet + def disable_xformers_memory_efficient_attention(self): + r""" + Disable memory efficient attention as implemented in xformers. + """ + self.image_unet.set_use_memory_efficient_attention_xformers(False) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_attention_slicing with unet->image_unet + def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = "auto"): r""" - Encodes the image prompt into image encoder hidden states. + Enable sliced attention computation. + + When this option is enabled, the attention module will split the input tensor in slices, to compute attention + in several steps. This is useful to save some memory in exchange for a small speed decrease. + + Args: + slice_size (`str` or `int`, *optional*, defaults to `"auto"`): + When `"auto"`, halves the input to the attention heads, so attention will be computed in two steps. If + a number is provided, uses as many slices as `attention_head_dim // slice_size`. In this case, + `attention_head_dim` must be a multiple of `slice_size`. + """ + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = self.image_unet.config.attention_head_dim // 2 + self.image_unet.set_attention_slice(slice_size) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_attention_slicing + def disable_attention_slicing(self): + r""" + Disable sliced attention computation. If `enable_attention_slicing` was previously invoked, this method will go + back to computing attention in one step. + """ + # set slice_size = `None` to disable `attention slicing` + self.enable_attention_slicing(None) + + def enable_sequential_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet, + text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a + `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called. + """ + if is_accelerate_available(): + from accelerate import cpu_offload + else: + raise ImportError("Please install accelerate via `pip install accelerate`") + + device = torch.device(f"cuda:{gpu_id}") + + for cpu_offloaded_model in [self.image_unet, self.text_unet, self.text_encoder, self.vae]: + if cpu_offloaded_model is not None: + cpu_offload(cpu_offloaded_model, device) + + @property + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device with unet->image_unet + def _execution_device(self): + r""" + Returns the device on which the pipeline's models will be executed. After calling + `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module + hooks. + """ + if self.device != torch.device("meta") or not hasattr(self.image_unet, "_hf_hook"): + return self.device + for module in self.image_unet.modules(): + if ( + hasattr(module, "_hf_hook") + and hasattr(module._hf_hook, "execution_device") + and module._hf_hook.execution_device is not None + ): + return torch.device(module._hf_hook.execution_device) + return self.device + + def _encode_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt): + r""" + Encodes the prompt into text encoder hidden states. Args: prompt (`str` or `list(int)`): prompt to be encoded + device: (`torch.device`): + torch device + num_images_per_prompt (`int`): + number of images that should be generated per prompt do_classifier_free_guidance (`bool`): whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). """ def normalize_embeddings(encoder_output): @@ -93,120 +195,248 @@ def normalize_embeddings(encoder_output): batch_size = len(prompt) if isinstance(prompt, list) else 1 - if do_classifier_free_guidance: - dummy_images = [np.zeros((512, 512, 3))] * batch_size - dummy_images = self.image_processor(images=dummy_images, return_tensors="pt") - uncond_embeddings = self.image_encoder(dummy_images.pixel_values.to(self.device)) - uncond_embeddings = normalize_embeddings(uncond_embeddings) - # get prompt text embeddings - image_input = self.image_processor(images=prompt, return_tensors="pt") + image_input = self.image_feature_extractor(images=prompt, return_tensors="pt") image_embeddings = self.image_encoder(image_input.pixel_values.to(self.device)) image_embeddings = normalize_embeddings(image_embeddings) - # For classifier free guidance, we need to do two forward passes. - # Here we concatenate the unconditional and image embeddings into a single batch - # to avoid doing two forward passes - image_embeddings = torch.cat([uncond_embeddings, image_embeddings]) + # duplicate image embeddings for each generation per prompt, using mps friendly method + bs_embed, seq_len, _ = image_embeddings.shape + image_embeddings = image_embeddings.repeat(1, num_images_per_prompt, 1) + image_embeddings = image_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1) + + # get unconditional embeddings for classifier free guidance + if do_classifier_free_guidance: + uncond_images: List[str] + if negative_prompt is None: + uncond_images = [np.zeros((512, 512, 3))] * batch_size + elif type(prompt) is not type(negative_prompt): + raise TypeError( + f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" + f" {type(prompt)}." + ) + elif isinstance(negative_prompt, PIL.Image.Image): + uncond_images = [negative_prompt] + elif batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + else: + uncond_images = negative_prompt + + uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors="pt") + uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(self.device)) + uncond_embeddings = normalize_embeddings(uncond_embeddings) + + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = uncond_embeddings.shape[1] + uncond_embeddings = uncond_embeddings.repeat(1, num_images_per_prompt, 1) + uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and conditional embeddings into a single batch + # to avoid doing two forward passes + image_embeddings = torch.cat([uncond_embeddings, image_embeddings]) return image_embeddings + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents + def decode_latents(self, latents): + latents = 1 / 0.18215 * latents + image = self.vae.decode(latents).sample + image = (image / 2 + 0.5).clamp(0, 1) + # we always cast to float32 as this does not cause significant overhead and is compatible with bfloa16 + image = image.cpu().permute(0, 2, 3, 1).float().numpy() + return image + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + def check_inputs(self, image, height, width, callback_steps): + if not isinstance(image, PIL.Image.Image) and not isinstance(image, torch.Tensor): + raise ValueError(f"`image` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(image)}") + + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents + def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None): + shape = (batch_size, num_channels_latents, height // 8, width // 8) + if latents is None: + if device.type == "mps": + # randn does not work reproducibly on mps + latents = torch.randn(shape, generator=generator, device="cpu", dtype=dtype).to(device) + else: + latents = torch.randn(shape, generator=generator, device=device, dtype=dtype) + else: + if latents.shape != shape: + raise ValueError(f"Unexpected latents shape, got {latents.shape}, expected {shape}") + latents = latents.to(device) + + # scale the initial noise by the standard deviation required by the scheduler + latents = latents * self.scheduler.init_noise_sigma + return latents + @torch.no_grad() def __call__( self, - image: Optional[Union[torch.Tensor, PIL.Image.Image]] = None, - height: Optional[int] = 512, - width: Optional[int] = 512, - num_inference_steps: Optional[int] = 50, - guidance_scale: Optional[float] = 1.0, - eta: Optional[float] = 0.0, + image: Union[PIL.Image.Image, List[PIL.Image.Image], torch.Tensor], + height: int = 512, + width: int = 512, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + negative_prompt: Optional[Union[str, List[str]]] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, generator: Optional[torch.Generator] = None, + latents: Optional[torch.FloatTensor] = None, output_type: Optional[str] = "pil", return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: Optional[int] = 1, **kwargs, - ) -> Union[Tuple, ImagePipelineOutput]: + ): r""" + Function invoked when calling the pipeline for generation. + Args: - prompt (`str` or `List[str]`): - The prompt or prompts to guide the image generation. - height (`int`, *optional*, defaults to 256): + image (`PIL.Image.Image`, `List[PIL.Image.Image]` or `torch.Tensor`): + The image prompt or prompts to guide the image generation. + height (`int`, *optional*, defaults to 512): The height in pixels of the generated image. - width (`int`, *optional*, defaults to 256): + width (`int`, *optional*, defaults to 512): The width in pixels of the generated image. num_inference_steps (`int`, *optional*, defaults to 50): The number of denoising steps. More denoising steps usually lead to a higher quality image at the expense of slower inference. - guidance_scale (`float`, *optional*, defaults to 1.0): + guidance_scale (`float`, *optional*, defaults to 7.5): Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). `guidance_scale` is defined as `w` of equation 2. of [Imagen Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, usually at the expense of lower image quality. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. generator (`torch.Generator`, *optional*): A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. output_type (`str`, *optional*, defaults to `"pil"`): The output format of the generate image. Choose between [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. - return_dict (`bool`, *optional*): - Whether or not to return a [`~pipeline_utils.ImagePipelineOutput`] instead of a plain tuple. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. Returns: - [`~pipeline_utils.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if - `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the - generated images. + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. """ - do_classifier_free_guidance = guidance_scale > 1.0 - if height % 8 != 0 or width % 8 != 0: - raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") - - if isinstance(image, PIL.Image.Image): - batch_size = 1 - elif isinstance(image, torch.Tensor): - batch_size = image.shape[0] - else: - raise ValueError( - f"`image_prompt` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(image)}" - ) + # 1. Check inputs. Raise error if not correct + self.check_inputs(image, height, width, callback_steps) - condition_embeddings = self._encode_prompt(image, do_classifier_free_guidance) + # 2. Define call parameters + batch_size = 1 if isinstance(image, PIL.Image.Image) else len(image) + device = self._execution_device + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 - latents = torch.randn( - (batch_size, self.image_unet.in_channels, height // 8, width // 8), generator=generator, device=self.device + # 3. Encode input prompt + image_embeddings = self._encode_prompt( + image, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt ) - self.scheduler.set_timesteps(num_inference_steps) - - # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature - accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + # 4. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + timesteps = self.scheduler.timesteps + + # 5. Prepare latent variables + num_channels_latents = self.image_unet.in_channels + latents = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + height, + width, + image_embeddings.dtype, + device, + generator, + latents, + ) - extra_kwargs = {} - if accepts_eta: - extra_kwargs["eta"] = eta + # 6. Prepare extra step kwargs. + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) - for t in self.progress_bar(self.scheduler.timesteps): - if not do_classifier_free_guidance: - latents_input = latents - else: - latents_input = torch.cat([latents] * 2) + # 7. Denoising loop + for i, t in enumerate(self.progress_bar(timesteps)): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) # predict the noise residual - noise_pred = self.image_unet(latents_input, t, encoder_hidden_states=condition_embeddings).sample + noise_pred = self.image_unet(latent_model_input, t, encoder_hidden_states=image_embeddings).sample + # perform guidance - if guidance_scale != 1.0: - noise_pred_uncond, noise_prediction_cond = noise_pred.chunk(2) - noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_cond - noise_pred_uncond) + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) # compute the previous noisy sample x_t -> x_t-1 - latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample - # scale and decode the image latents with vae - latents = 1 / 0.18215 * latents - image = self.vae.decode(latents).sample + # call the callback, if provided + if callback is not None and i % callback_steps == 0: + callback(i, t, latents) - image = (image / 2 + 0.5).clamp(0, 1) - image = image.cpu().permute(0, 2, 3, 1).numpy() + # 8. Post-processing + image = self.decode_latents(latents) + + # 9. Convert to PIL if output_type == "pil": image = self.numpy_to_pil(image) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index 4ab582b16913..991b58c357b0 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -18,7 +18,7 @@ import torch import torch.utils.checkpoint -from transformers import CLIPFeatureExtractor, CLIPTextModel, CLIPTokenizer +from transformers import CLIPFeatureExtractor, CLIPTextModelWithProjection, CLIPTokenizer from ...models import AutoencoderKL, UNet2DConditionModel, VQModel from ...models.attention import Transformer2DModel @@ -50,16 +50,16 @@ class VersatileDiffusionTextToImagePipeline(DiffusionPipeline): """ tokenizer: CLIPTokenizer image_feature_extractor: CLIPFeatureExtractor - text_encoder: CLIPTextModel + text_encoder: CLIPTextModelWithProjection image_unet: UNet2DConditionModel text_unet: UNet2DConditionModel - vae: Union[VQModel, AutoencoderKL] + vae: AutoencoderKL scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] def __init__( self, tokenizer: CLIPTokenizer, - text_encoder: CLIPTextModel, + text_encoder: CLIPTextModelWithProjection, image_unet: UNet2DConditionModel, text_unet: UNet2DConditionModel, vae: Union[VQModel, AutoencoderKL], diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py index d990238c2194..b8297ea63cf8 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py @@ -19,7 +19,7 @@ import torch from diffusers import VersatileDiffusionImageVariationPipeline -from diffusers.utils.testing_utils import load_image, require_torch, slow, torch_device +from diffusers.utils.testing_utils import load_image, require_torch_gpu, slow, torch_device from ...test_pipelines_common import PipelineTesterMixin @@ -32,7 +32,7 @@ class VersatileDiffusionImageVariationPipelineFastTests(PipelineTesterMixin, uni @slow -@require_torch +@require_torch_gpu class VersatileDiffusionImageVariationPipelineIntegrationTests(unittest.TestCase): def test_inference_image_variations(self): pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test") @@ -51,8 +51,8 @@ def test_inference_image_variations(self): output_type="numpy", ).images - image_slice = image[0, -3:, -3:, -1] + image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.9256, 0.9340, 0.8933, 0.9361, 0.9113, 0.8727, 0.9122, 0.8745, 0.8099]) + expected_slice = np.array([0.1811, 0.0430, 0.0433, 0.1082, 0.0144, 0.0306, 0.0683, 0.0248, 0.0876]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 8c989ebe40e962c819d239b2f5a42cbb895b0272 Mon Sep 17 00:00:00 2001 From: anton-l Date: Mon, 21 Nov 2022 17:18:31 +0100 Subject: [PATCH 16/49] wip text_unet --- ...onvert_versatile_diffusion_to_diffusers.py | 68 +- .../versatile_diffusion/modeling_text_unet.py | 971 ++++++++++++++++++ ...eline_versatile_diffusion_image_to_text.py | 408 -------- 3 files changed, 1017 insertions(+), 430 deletions(-) create mode 100644 src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index abdcc495ce73..ca54f05f05d5 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -37,6 +37,7 @@ CLIPTokenizer, CLIPVisionModelWithProjection, ) +from diffusers.pipelines.versatile_diffusion.modeling_text_unet import UNetMultiDimConditionModel SCHEDULER_CONFIG = Namespace( @@ -256,9 +257,9 @@ def conv_attn_to_linear(checkpoint): checkpoint[key] = checkpoint[key][:, :, 0] -def create_unet_diffusers_config(unet_params): +def create_image_unet_diffusers_config(unet_params): """ - Creates a config for the diffusers based on the config of the LDM model. + Creates a config for the diffusers based on the config of the VD model. """ block_out_channels = [unet_params.model_channels * mult for mult in unet_params.channel_mult] @@ -295,6 +296,45 @@ def create_unet_diffusers_config(unet_params): return config +def create_text_unet_diffusers_config(unet_params): + """ + Creates a config for the diffusers based on the config of the VD model. + """ + + block_out_channels = [unet_params.model_channels * mult for mult in unet_params.channel_mult] + + down_block_types = [] + resolution = 1 + for i in range(len(block_out_channels)): + block_type = "CrossAttnDownBlockMultiDim" if unet_params.with_attn[i] else "DownBlockMultiDim" + down_block_types.append(block_type) + if i != len(block_out_channels) - 1: + resolution *= 2 + + up_block_types = [] + for i in range(len(block_out_channels)): + block_type = "CrossAttnUpBlockMultiDim" if unet_params.with_attn[-i - 1] else "UpBlockMultiDim" + up_block_types.append(block_type) + resolution //= 2 + + if not all(n == unet_params.num_noattn_blocks[0] for n in unet_params.num_noattn_blocks): + raise ValueError("Not all num_res_blocks are equal, which is not supported in this script.") + + config = dict( + sample_size=None, + in_channels=unet_params.input_channels, + out_channels=unet_params.output_channels, + down_block_types=tuple(down_block_types), + up_block_types=tuple(up_block_types), + block_out_channels=tuple(block_out_channels), + layers_per_block=unet_params.num_noattn_blocks[0], + cross_attention_dim=unet_params.context_dim, + attention_head_dim=unet_params.num_heads, + ) + + return config + + def create_vae_diffusers_config(vae_params): """ Creates a config for the diffusers based on the config of the VD model. @@ -674,7 +714,7 @@ def convert_vd_vae_checkpoint(checkpoint, config): # Convert the UNet2DConditionModel models. if args.unet_checkpoint_path is not None: # image UNet - image_unet_config = create_unet_diffusers_config(IMAGE_UNET_CONFIG) + image_unet_config = create_image_unet_diffusers_config(IMAGE_UNET_CONFIG) checkpoint = torch.load(args.unet_checkpoint_path) converted_image_unet_checkpoint = convert_vd_unet_checkpoint( checkpoint, image_unet_config, unet_key="model.diffusion_model.unet_image.", extract_ema=args.extract_ema @@ -683,28 +723,12 @@ def convert_vd_vae_checkpoint(checkpoint, config): image_unet.load_state_dict(converted_image_unet_checkpoint) # text UNet - text_unet_config = create_unet_diffusers_config(TEXT_UNET_CONFIG) + text_unet_config = create_text_unet_diffusers_config(TEXT_UNET_CONFIG) converted_text_unet_checkpoint = convert_vd_unet_checkpoint( checkpoint, text_unet_config, unet_key="model.diffusion_model.unet_text.", extract_ema=args.extract_ema ) - text_unet = UNet2DConditionModel(**text_unet_config) - # TEMP hack to skip converting the 1x1 blocks for the text unet - del converted_text_unet_checkpoint["conv_in.weight"] - del converted_text_unet_checkpoint["conv_in.bias"] - del converted_text_unet_checkpoint["conv_out.weight"] - for block in ["down_blocks", "mid_block", "up_blocks"]: - for i in range(4): - for j in range(3): - for module in ["time_emb_proj", "conv1", "norm1", "conv2", "norm2", "conv_shortcut"]: - for type in ["weight", "bias"]: - if block == "mid_block": - key = f"{block}.resnets.{j}.{module}.{type}" - else: - key = f"{block}.{i}.resnets.{j}.{module}.{type}" - if key in converted_text_unet_checkpoint: - del converted_text_unet_checkpoint[key] - # END TEMP hack - text_unet.load_state_dict(converted_text_unet_checkpoint, strict=False) + text_unet = UNetMultiDimConditionModel(**text_unet_config) + text_unet.load_state_dict(converted_text_unet_checkpoint) # Convert the VAE model. if args.vae_checkpoint_path is not None: diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py new file mode 100644 index 000000000000..5c8aecfeb122 --- /dev/null +++ b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py @@ -0,0 +1,971 @@ +from typing import Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn as nn + +from ...configuration_utils import ConfigMixin, register_to_config +from ...modeling_utils import ModelMixin +from ...models.embeddings import TimestepEmbedding, Timesteps +from ...models.attention import Transformer2DModel +from ...models.unet_2d_condition import UNet2DConditionOutput +from ...utils import logging + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +def get_down_block_multi_dim( + down_block_type, + num_layers, + in_channels, + out_channels, + temb_channels, + add_downsample, + resnet_eps, + attn_num_head_channels, + resnet_groups=None, + cross_attention_dim=None, +): + down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type + if down_block_type == "DownBlockMultiDim": + return DownBlockMultiDim( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_groups=resnet_groups, + ) + elif down_block_type == "CrossAttnDownBlockMultiDim": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlockMultiDim") + return CrossAttnDownBlockMultiDim( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + ) + raise ValueError(f"{down_block_type} is not supported.") + +def get_up_block_multi_dim( + up_block_type, + num_layers, + in_channels, + out_channels, + prev_output_channel, + temb_channels, + add_upsample, + resnet_eps, + attn_num_head_channels, + resnet_groups=None, + cross_attention_dim=None, +): + up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type + if up_block_type == "UpBlockMultiDim": + return UpBlockMultiDim( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_groups=resnet_groups, + ) + elif up_block_type == "CrossAttnUpBlockMultiDim": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlockMultiDim") + return CrossAttnUpBlockMultiDim( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + ) + raise ValueError(f"{up_block_type} is not supported.") + + +# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->LMSDiscrete +class UNetMultiDimConditionModel(ModelMixin, ConfigMixin): + r""" + UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep + and returns sample shaped output. + + This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library + implements for all the models (such as downloading or saving, etc.) + + Parameters: + sample_size (`int`, *optional*): The size of the input sample. + in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample. + out_channels (`int`, *optional*, defaults to 4): The number of channels in the output. + center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. + flip_sin_to_cos (`bool`, *optional*, defaults to `True`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. + down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + The tuple of downsample blocks to use. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D",)`): + The tuple of upsample blocks to use. + block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. + downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. + act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. + norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. + cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of the cross attention features. + attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + sample_size: Optional[int] = None, + in_channels: int = 4, + out_channels: int = 4, + center_input_sample: bool = False, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str] = ( + "CrossAttnDownBlockMultiDim", + "CrossAttnDownBlockMultiDim", + "CrossAttnDownBlockMultiDim", + "DownBlockMultiDim", + ), + up_block_types: Tuple[str] = ( + "UpBlockMultiDim", + "CrossAttnUpBlockMultiDim", + "CrossAttnUpBlockMultiDim", + "CrossAttnUpBlockMultiDim", + ), + block_out_channels: Tuple[int] = (320, 640, 1280, 1280), + block_second_dim: Tuple[int] = (4, 4, 4, 4), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: int = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + attention_head_dim: int = 8, + ): + super().__init__() + + self.sample_size = sample_size + time_embed_dim = block_out_channels[0] * 4 + + # input + self.conv_in = LinearMultiDim([in_channels, 1, 1], [block_out_channels[0], block_second_dim[0], 1]) + + # time + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + + self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + + self.down_blocks = nn.ModuleList([]) + self.mid_block = None + self.up_blocks = nn.ModuleList([]) + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + second_dim = block_second_dim[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block_multi_dim( + down_block_type, + num_layers=layers_per_block, + in_channels=input_channel, + out_channels=[output_channel, second_dim, 1], + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + ) + self.down_blocks.append(down_block) + + # mid + self.mid_block = UNetMidBlockMultiDimCrossAttn( + in_channels=block_out_channels[-1], + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift="default", + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + resnet_groups=norm_num_groups, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block_multi_dim( + up_block_type, + num_layers=layers_per_block + 1, + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) + self.conv_act = nn.SiLU() + self.conv_out = LinearMultiDim(block_out_channels[0], [out_channels, 1, 1]) + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.config.attention_head_dim % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.config.attention_head_dim}" + ) + if slice_size is not None and slice_size > self.config.attention_head_dim: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.config.attention_head_dim}" + ) + + for block in self.down_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_attention_slice(slice_size) + + self.mid_block.set_attention_slice(slice_size) + + for block in self.up_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for block in self.down_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + self.mid_block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + for block in self.up_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance( + module, (CrossAttnDownBlockMultiDim, DownBlockMultiDim, CrossAttnUpBlockMultiDim, UpBlockMultiDim) + ): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + return_dict: bool = True, + ) -> Union[UNet2DConditionOutput, Tuple]: + r""" + Args: + sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor + timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps + encoder_hidden_states (`torch.FloatTensor`): (batch, channel, height, width) encoder hidden states + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. + + Returns: + [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: + [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When + returning a tuple, the first element is the sample tensor. + """ + # By default samples have to be AT least a multiple of the overall upsampling factor. + # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). + # However, the upsampling interpolation output size can be forced to fit any upsampling size + # on the fly if necessary. + default_overall_up_factor = 2**self.num_upsamplers + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): + logger.info("Forward upsample size to force interpolation output size.") + forward_upsample_size = True + + # 0. center input if necessary + if self.config.center_input_sample: + sample = 2 * sample - 1.0 + + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) + elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=self.dtype) + emb = self.time_embedding(t_emb) + + # 2. pre-process + sample = self.conv_in(sample) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + down_block_res_samples += res_samples + + # 4. mid + sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) + + # 5. up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + upsample_size=upsample_size, + ) + else: + sample = upsample_block( + hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size + ) + # 6. post-process + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + if not return_dict: + return (sample,) + + return UNet2DConditionOutput(sample=sample) + + +class LinearMultiDim(nn.Linear): + def __init__(self, in_features, out_features, second_dim=4, *args, **kwargs): + in_features = [in_features, second_dim, 1] if isinstance(in_features, int) else list(in_features) + out_features = [out_features, second_dim, 1] if isinstance(out_features, int) else list(out_features) + self.in_features_multidim = in_features + self.out_features_multidim = out_features + super().__init__(np.array(in_features).prod(), np.array(out_features).prod()) + + def forward(self, x): + shape = x.shape + n = len(self.in_features_multidim) + x = x.view(*shape[0:-n], self.in_features) + y = super().forward(x) + y = y.view(*shape[0:-n], *self.out_features_multidim) + return y + + +class ResnetBlockMultiDim(nn.Module): + def __init__( + self, + *, + in_channels, + out_channels=None, + dropout=0.0, + temb_channels=512, + groups=32, + groups_out=None, + pre_norm=True, + eps=1e-6, + time_embedding_norm="default", + use_in_shortcut=None, + ): + super().__init__() + self.pre_norm = pre_norm + self.pre_norm = True + + in_channels = [in_channels] if isinstance(in_channels, int) else list(in_channels) + in_channels_prod = np.array(in_channels).prod() + self.channels_multidim = in_channels + + if out_channels is not None: + out_channels = [out_channels] if isinstance(out_channels, int) else list(out_channels) + out_channels_prod = np.array(out_channels).prod() + self.out_channels_multidim = out_channels + else: + out_channels_prod = in_channels_prod + self.out_channels_multidim = self.channels_multidim + self.time_embedding_norm = time_embedding_norm + + if groups_out is None: + groups_out = groups + + self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=in_channels_prod, eps=eps, affine=True) + self.conv1 = torch.nn.Conv2d(in_channels_prod, out_channels_prod, kernel_size=1, padding=0) + + if temb_channels is not None: + self.time_emb_proj = torch.nn.Linear(temb_channels, out_channels_prod) + else: + self.time_emb_proj = None + + self.norm2 = torch.nn.GroupNorm(num_groups=groups_out, num_channels=out_channels_prod, eps=eps, affine=True) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d(out_channels_prod, out_channels_prod, kernel_size=1, padding=0) + + self.nonlinearity = nn.SiLU() + + self.use_in_shortcut = in_channels_prod != out_channels_prod if use_in_shortcut is None else use_in_shortcut + + self.conv_shortcut = None + if self.use_in_shortcut: + self.conv_shortcut = torch.nn.Conv2d( + in_channels_prod, out_channels_prod, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, input_tensor, temb): + hidden_states = input_tensor + + hidden_states = self.norm1(hidden_states) + hidden_states = self.nonlinearity(hidden_states) + hidden_states = self.conv1(hidden_states) + + if temb is not None: + temb = self.time_emb_proj(self.nonlinearity(temb))[:, :, None, None] + hidden_states = hidden_states + temb + + hidden_states = self.norm2(hidden_states) + hidden_states = self.nonlinearity(hidden_states) + + hidden_states = self.dropout(hidden_states) + hidden_states = self.conv2(hidden_states) + + if self.conv_shortcut is not None: + input_tensor = self.conv_shortcut(input_tensor) + + output_tensor = input_tensor + hidden_states + + return output_tensor + + +class DownBlockMultiDim(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + add_downsample=True, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlockMultiDim( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList([LinearMultiDim(out_channels, out_channels)]) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward(self, hidden_states, temb=None): + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + else: + hidden_states = resnet(hidden_states, temb) + + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states += (hidden_states,) + + return hidden_states, output_states + + +class CrossAttnDownBlockMultiDim(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + cross_attention_dim=1280, + attention_type="default", + add_downsample=True, + ): + super().__init__() + resnets = [] + attentions = [] + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlockMultiDim( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Transformer2DModel( + attn_num_head_channels, + out_channels[0] // attn_num_head_channels, + in_channels=out_channels[0], + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList([LinearMultiDim(out_channels, out_channels)]) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward(self, hidden_states, temb=None, encoder_hidden_states=None): + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample + + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states += (hidden_states,) + + return hidden_states, output_states + + +class UpBlockMultiDim(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + add_upsample=True, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlockMultiDim( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None): + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + else: + hidden_states = resnet(hidden_states, temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + +class CrossAttnUpBlockMultiDim(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + cross_attention_dim=1280, + attention_type="default", + add_upsample=True, + ): + super().__init__() + resnets = [] + attentions = [] + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlockMultiDim( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Transformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + self.gradient_checkpointing = False + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward( + self, + hidden_states, + res_hidden_states_tuple, + temb=None, + encoder_hidden_states=None, + upsample_size=None, + ): + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + +class UNetMidBlockMultiDimCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + attention_type="default", + cross_attention_dim=1280, + **kwargs, + ): + super().__init__() + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # there is always at least one resnet + resnets = [ + ResnetBlockMultiDim( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + for _ in range(num_layers): + attentions.append( + Transformer2DModel( + attn_num_head_channels, + in_channels // attn_num_head_channels, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + resnets.append( + ResnetBlockMultiDim( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward(self, hidden_states, temb=None, encoder_hidden_states=None): + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + hidden_states = attn(hidden_states, encoder_hidden_states).sample + hidden_states = resnet(hidden_states, temb) + + return hidden_states diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py index 4d2441103b97..e69de29bb2d1 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py @@ -1,408 +0,0 @@ -from typing import Optional, Tuple, Union - -import numpy as np -import torch -import torch.nn as nn - -from ...configuration_utils import ConfigMixin, register_to_config -from ...modeling_utils import ModelMixin -from ...models.embeddings import TimestepEmbedding, Timesteps -from ...models.unet_2d_condition import UNet2DConditionOutput -from ...utils import logging - - -logger = logging.get_logger(__name__) # pylint: disable=invalid-name - - -class UNetMultiDimConditionModel(ModelMixin, ConfigMixin): - r""" - UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep - and returns sample shaped output. - - This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library - implements for all the models (such as downloading or saving, etc.) - - Parameters: - sample_size (`int`, *optional*): The size of the input sample. - in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample. - out_channels (`int`, *optional*, defaults to 4): The number of channels in the output. - center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. - flip_sin_to_cos (`bool`, *optional*, defaults to `True`): - Whether to flip the sin to cos in the time embedding. - freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. - down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): - The tuple of downsample blocks to use. - up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D",)`): - The tuple of upsample blocks to use. - block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): - The tuple of output channels for each block. - layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. - downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. - mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. - act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. - norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. - norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. - cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of the cross attention features. - attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. - """ - - _supports_gradient_checkpointing = True - - @register_to_config - def __init__( - self, - sample_size: Optional[int] = None, - in_channels: int = 4, - out_channels: int = 4, - center_input_sample: bool = False, - flip_sin_to_cos: bool = True, - freq_shift: int = 0, - down_block_types: Tuple[str] = ( - "CrossAttnDownBlockMultiDim", - "CrossAttnDownBlockMultiDim", - "CrossAttnDownBlockMultiDim", - "DownBlockMultiDim", - ), - up_block_types: Tuple[str] = ( - "UpBlockMultiDim", - "CrossAttnUpBlockMultiDim", - "CrossAttnUpBlockMultiDim", - "CrossAttnUpBlockMultiDim", - ), - block_out_channels: Tuple[int] = (320, 640, 1280, 1280), - block_second_dim: Tuple[int] = (4, 4, 4, 4), - layers_per_block: int = 2, - downsample_padding: int = 1, - mid_block_scale_factor: float = 1, - act_fn: str = "silu", - norm_num_groups: int = 32, - norm_eps: float = 1e-5, - cross_attention_dim: int = 1280, - attention_head_dim: int = 8, - ): - super().__init__() - - self.sample_size = sample_size - time_embed_dim = block_out_channels[0] * 4 - - # input - self.conv_in = LinearMultiDim([in_channels, 1, 1], block_out_channels[0], kernel_size=3, padding=(1, 1)) - - # time - self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) - timestep_input_dim = block_out_channels[0] - - self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) - - self.down_blocks = nn.ModuleList([]) - self.mid_block = None - self.up_blocks = nn.ModuleList([]) - - # down - output_channel = block_out_channels[0] - for i, down_block_type in enumerate(down_block_types): - input_channel = output_channel - output_channel = block_out_channels[i] - is_final_block = i == len(block_out_channels) - 1 - - down_block = self.get_down_block( - down_block_type, - num_layers=layers_per_block, - in_channels=input_channel, - out_channels=output_channel, - temb_channels=time_embed_dim, - add_downsample=not is_final_block, - resnet_eps=norm_eps, - resnet_act_fn=act_fn, - resnet_groups=norm_num_groups, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attention_head_dim, - downsample_padding=downsample_padding, - ) - self.down_blocks.append(down_block) - - # mid - self.mid_block = UNetMidBlockMultiDimCrossAttn( - in_channels=block_out_channels[-1], - temb_channels=time_embed_dim, - resnet_eps=norm_eps, - resnet_act_fn=act_fn, - output_scale_factor=mid_block_scale_factor, - resnet_time_scale_shift="default", - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attention_head_dim, - resnet_groups=norm_num_groups, - ) - - # count how many layers upsample the images - self.num_upsamplers = 0 - - # up - reversed_block_out_channels = list(reversed(block_out_channels)) - output_channel = reversed_block_out_channels[0] - for i, up_block_type in enumerate(up_block_types): - is_final_block = i == len(block_out_channels) - 1 - - prev_output_channel = output_channel - output_channel = reversed_block_out_channels[i] - input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] - - # add upsample block for all BUT final layer - if not is_final_block: - add_upsample = True - self.num_upsamplers += 1 - else: - add_upsample = False - - up_block = self.get_up_block( - up_block_type, - num_layers=layers_per_block + 1, - in_channels=input_channel, - out_channels=output_channel, - prev_output_channel=prev_output_channel, - temb_channels=time_embed_dim, - add_upsample=add_upsample, - resnet_eps=norm_eps, - resnet_act_fn=act_fn, - resnet_groups=norm_num_groups, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attention_head_dim, - ) - self.up_blocks.append(up_block) - prev_output_channel = output_channel - - # out - self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) - self.conv_act = nn.SiLU() - self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, 3, padding=1) - - def get_down_block( - down_block_type, - num_layers, - in_channels, - out_channels, - temb_channels, - add_downsample, - resnet_eps, - resnet_act_fn, - attn_num_head_channels, - resnet_groups=None, - cross_attention_dim=None, - downsample_padding=None, - ): - down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type - if down_block_type == "DownBlockMultiDim": - return DownBlockMultiDim( - num_layers=num_layers, - in_channels=in_channels, - out_channels=out_channels, - temb_channels=temb_channels, - add_downsample=add_downsample, - resnet_eps=resnet_eps, - resnet_act_fn=resnet_act_fn, - resnet_groups=resnet_groups, - downsample_padding=downsample_padding, - ) - elif down_block_type == "CrossAttnDownBlockMultiDim": - if cross_attention_dim is None: - raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlock2D") - return CrossAttnDownBlockMultiDim( - num_layers=num_layers, - in_channels=in_channels, - out_channels=out_channels, - temb_channels=temb_channels, - add_downsample=add_downsample, - resnet_eps=resnet_eps, - resnet_act_fn=resnet_act_fn, - resnet_groups=resnet_groups, - downsample_padding=downsample_padding, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attn_num_head_channels, - ) - - def set_attention_slice(self, slice_size): - if slice_size is not None and self.config.attention_head_dim % slice_size != 0: - raise ValueError( - f"Make sure slice_size {slice_size} is a divisor of " - f"the number of heads used in cross_attention {self.config.attention_head_dim}" - ) - if slice_size is not None and slice_size > self.config.attention_head_dim: - raise ValueError( - f"Chunk_size {slice_size} has to be smaller or equal to " - f"the number of heads used in cross_attention {self.config.attention_head_dim}" - ) - - for block in self.down_blocks: - if hasattr(block, "attentions") and block.attentions is not None: - block.set_attention_slice(slice_size) - - self.mid_block.set_attention_slice(slice_size) - - for block in self.up_blocks: - if hasattr(block, "attentions") and block.attentions is not None: - block.set_attention_slice(slice_size) - - def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for block in self.down_blocks: - if hasattr(block, "attentions") and block.attentions is not None: - block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - self.mid_block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - for block in self.up_blocks: - if hasattr(block, "attentions") and block.attentions is not None: - block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - def _set_gradient_checkpointing(self, module, value=False): - if isinstance( - module, (CrossAttnDownBlockMultiDim, DownBlockMultiDim, CrossAttnUpBlockMultiDim, UpBlockMultiDim) - ): - module.gradient_checkpointing = value - - def forward( - self, - sample: torch.FloatTensor, - timestep: Union[torch.Tensor, float, int], - encoder_hidden_states: torch.Tensor, - return_dict: bool = True, - ) -> Union[UNet2DConditionOutput, Tuple]: - r""" - Args: - sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor - timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps - encoder_hidden_states (`torch.FloatTensor`): (batch, channel, height, width) encoder hidden states - return_dict (`bool`, *optional*, defaults to `True`): - Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. - - Returns: - [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: - [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When - returning a tuple, the first element is the sample tensor. - """ - # By default samples have to be AT least a multiple of the overall upsampling factor. - # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). - # However, the upsampling interpolation output size can be forced to fit any upsampling size - # on the fly if necessary. - default_overall_up_factor = 2**self.num_upsamplers - - # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` - forward_upsample_size = False - upsample_size = None - - if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): - logger.info("Forward upsample size to force interpolation output size.") - forward_upsample_size = True - - # 0. center input if necessary - if self.config.center_input_sample: - sample = 2 * sample - 1.0 - - # 1. time - timesteps = timestep - if not torch.is_tensor(timesteps): - # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can - timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) - elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: - timesteps = timesteps[None].to(sample.device) - - # broadcast to batch dimension in a way that's compatible with ONNX/Core ML - timesteps = timesteps.expand(sample.shape[0]) - - t_emb = self.time_proj(timesteps) - - # timesteps does not contain any weights and will always return f32 tensors - # but time_embedding might actually be running in fp16. so we need to cast here. - # there might be better ways to encapsulate this. - t_emb = t_emb.to(dtype=self.dtype) - emb = self.time_embedding(t_emb) - - # 2. pre-process - sample = self.conv_in(sample) - - # 3. down - down_block_res_samples = (sample,) - for downsample_block in self.down_blocks: - if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: - sample, res_samples = downsample_block( - hidden_states=sample, - temb=emb, - encoder_hidden_states=encoder_hidden_states, - ) - else: - sample, res_samples = downsample_block(hidden_states=sample, temb=emb) - - down_block_res_samples += res_samples - - # 4. mid - sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) - - # 5. up - for i, upsample_block in enumerate(self.up_blocks): - is_final_block = i == len(self.up_blocks) - 1 - - res_samples = down_block_res_samples[-len(upsample_block.resnets) :] - down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] - - # if we have not reached the final block and need to forward the - # upsample size, we do it here - if not is_final_block and forward_upsample_size: - upsample_size = down_block_res_samples[-1].shape[2:] - - if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: - sample = upsample_block( - hidden_states=sample, - temb=emb, - res_hidden_states_tuple=res_samples, - encoder_hidden_states=encoder_hidden_states, - upsample_size=upsample_size, - ) - else: - sample = upsample_block( - hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size - ) - # 6. post-process - sample = self.conv_norm_out(sample) - sample = self.conv_act(sample) - sample = self.conv_out(sample) - - if not return_dict: - return (sample,) - - return UNet2DConditionOutput(sample=sample) - - -class LinearMultiDim(nn.Linear): - def __init__(self, in_features, out_features, *args, **kwargs): - in_features = [in_features] if isinstance(in_features, int) else list(in_features) - out_features = [out_features] if isinstance(out_features, int) else list(out_features) - self.in_features_multidim = in_features - self.out_features_multidim = out_features - super().__init__(np.array(in_features).prod(), np.array(out_features).prod(), *args, **kwargs) - - def forward(self, x): - shape = x.shape - n = len(self.in_features_multidim) - x = x.view(*shape[0:-n], self.in_features) - y = super().forward(x) - y = y.view(*shape[0:-n], *self.out_features_multidim) - return y - - -class UNetMidBlockMultiDimCrossAttn(nn.Module): - pass - - -class DownBlockMultiDim(nn.Module): - pass - - -class CrossAttnDownBlockMultiDim(nn.Module): - pass - - -class UpBlockMultiDim(nn.Module): - pass - - -class CrossAttnUpBlockMultiDim(nn.Module): - pass From f706729d3cefc9ab02f74c09dee6b655a24ec750 Mon Sep 17 00:00:00 2001 From: anton-l Date: Mon, 21 Nov 2022 23:56:35 +0100 Subject: [PATCH 17/49] text unet end to end --- ...onvert_versatile_diffusion_to_diffusers.py | 52 +- src/diffusers/__init__.py | 1 + src/diffusers/models/unet_2d.py | 2 +- src/diffusers/models/unet_2d_condition.py | 2 +- src/diffusers/pipelines/__init__.py | 1 + .../pipelines/versatile_diffusion/__init__.py | 3 + .../versatile_diffusion/modeling_text_unet.py | 210 +++++--- .../pipeline_versatile_diffusion.py | 3 +- ...eline_versatile_diffusion_image_to_text.py | 461 ++++++++++++++++++ ...ine_versatile_diffusion_image_variation.py | 13 +- ...eline_versatile_diffusion_text_to_image.py | 9 +- .../dummy_torch_and_transformers_objects.py | 15 + .../test_versatile_diffusion_image_to_text.py | 56 +++ 13 files changed, 728 insertions(+), 100 deletions(-) create mode 100644 tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index ca54f05f05d5..efa01a73b89e 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -31,13 +31,14 @@ UNet2DConditionModel, VersatileDiffusionPipeline, ) +from diffusers.pipelines.versatile_diffusion.modeling_text_unet import UNetFlatConditionModel +from diffusers.pipelines.versatile_diffusion.modeling_gpt2_optimus import GPT2OptimusForLatentConnector from transformers import ( CLIPFeatureExtractor, CLIPTextModelWithProjection, CLIPTokenizer, CLIPVisionModelWithProjection, ) -from diffusers.pipelines.versatile_diffusion.modeling_text_unet import UNetMultiDimConditionModel SCHEDULER_CONFIG = Namespace( @@ -241,7 +242,7 @@ def assign_to_checkpoint( # proj_attn.weight has to be converted from conv 1D to linear if "proj_attn.weight" in new_path: checkpoint[new_path] = old_checkpoint[path["old"]][:, :, 0] - else: + elif path["old"] in old_checkpoint: checkpoint[new_path] = old_checkpoint[path["old"]] @@ -306,14 +307,14 @@ def create_text_unet_diffusers_config(unet_params): down_block_types = [] resolution = 1 for i in range(len(block_out_channels)): - block_type = "CrossAttnDownBlockMultiDim" if unet_params.with_attn[i] else "DownBlockMultiDim" + block_type = "CrossAttnDownBlockFlat" if unet_params.with_attn[i] else "DownBlockFlat" down_block_types.append(block_type) if i != len(block_out_channels) - 1: resolution *= 2 up_block_types = [] for i in range(len(block_out_channels)): - block_type = "CrossAttnUpBlockMultiDim" if unet_params.with_attn[-i - 1] else "UpBlockMultiDim" + block_type = "CrossAttnUpBlockFlat" if unet_params.with_attn[-i - 1] else "UpBlockFlat" up_block_types.append(block_type) resolution //= 2 @@ -322,8 +323,8 @@ def create_text_unet_diffusers_config(unet_params): config = dict( sample_size=None, - in_channels=unet_params.input_channels, - out_channels=unet_params.output_channels, + in_channels=(unet_params.input_channels, 1, 1), + out_channels=(unet_params.output_channels, 1, 1), down_block_types=tuple(down_block_types), up_block_types=tuple(up_block_types), block_out_channels=tuple(block_out_channels), @@ -450,6 +451,17 @@ def convert_vd_unet_checkpoint(checkpoint, config, unet_key, extract_ema=False): new_checkpoint[f"down_blocks.{block_id}.downsamplers.0.conv.bias"] = unet_state_dict.pop( f"input_blocks.{i}.0.op.bias" ) + elif f"input_blocks.{i}.0.weight" in unet_state_dict: + # text_unet uses linear layers in place of downsamplers + shape = unet_state_dict[f"input_blocks.{i}.0.weight"].shape + if shape[0] != shape[1]: + continue + new_checkpoint[f"down_blocks.{block_id}.downsamplers.0.weight"] = unet_state_dict.pop( + f"input_blocks.{i}.0.weight" + ) + new_checkpoint[f"down_blocks.{block_id}.downsamplers.0.bias"] = unet_state_dict.pop( + f"input_blocks.{i}.0.bias" + ) paths = renew_resnet_paths(resnets) meta_path = {"old": f"input_blocks.{i}.0", "new": f"down_blocks.{block_id}.resnets.{layer_in_block_id}"} @@ -512,10 +524,34 @@ def convert_vd_unet_checkpoint(checkpoint, config, unet_key, extract_ema=False): new_checkpoint[f"up_blocks.{block_id}.upsamplers.0.conv.bias"] = unet_state_dict[ f"output_blocks.{i}.{index}.conv.bias" ] - # Clear attentions as they have been attributed above. if len(attentions) == 2: attentions = [] + elif f"output_blocks.{i}.1.weight" in unet_state_dict: + # text_unet uses linear layers in place of upsamplers + shape = unet_state_dict[f"output_blocks.{i}.1.weight"].shape + if shape[0] != shape[1]: + continue + new_checkpoint[f"up_blocks.{block_id}.upsamplers.0.weight"] = unet_state_dict.pop( + f"output_blocks.{i}.1.weight" + ) + new_checkpoint[f"up_blocks.{block_id}.upsamplers.0.bias"] = unet_state_dict.pop( + f"output_blocks.{i}.1.bias" + ) + # Clear attentions as they have been attributed above. + if len(attentions) == 2: + attentions = [] + elif f"output_blocks.{i}.2.weight" in unet_state_dict: + # text_unet uses linear layers in place of upsamplers + shape = unet_state_dict[f"output_blocks.{i}.2.weight"].shape + if shape[0] != shape[1]: + continue + new_checkpoint[f"up_blocks.{block_id}.upsamplers.0.weight"] = unet_state_dict.pop( + f"output_blocks.{i}.2.weight" + ) + new_checkpoint[f"up_blocks.{block_id}.upsamplers.0.bias"] = unet_state_dict.pop( + f"output_blocks.{i}.2.bias" + ) if len(attentions): paths = renew_attention_paths(attentions) @@ -727,7 +763,7 @@ def convert_vd_vae_checkpoint(checkpoint, config): converted_text_unet_checkpoint = convert_vd_unet_checkpoint( checkpoint, text_unet_config, unet_key="model.diffusion_model.unet_text.", extract_ema=args.extract_ema ) - text_unet = UNetMultiDimConditionModel(**text_unet_config) + text_unet = UNetFlatConditionModel(**text_unet_config) text_unet.load_state_dict(converted_text_unet_checkpoint) # Convert the VAE model. diff --git a/src/diffusers/__init__.py b/src/diffusers/__init__.py index b6bd0790e076..bedf36d51665 100644 --- a/src/diffusers/__init__.py +++ b/src/diffusers/__init__.py @@ -76,6 +76,7 @@ VersatileDiffusionImageVariationPipeline, VersatileDiffusionPipeline, VersatileDiffusionTextToImagePipeline, + VersatileDiffusionImageToTextPipeline, VQDiffusionPipeline, ) else: diff --git a/src/diffusers/models/unet_2d.py b/src/diffusers/models/unet_2d.py index 04324057609e..cd7767d10e65 100644 --- a/src/diffusers/models/unet_2d.py +++ b/src/diffusers/models/unet_2d.py @@ -175,7 +175,7 @@ def __init__( num_groups_out = norm_num_groups if norm_num_groups is not None else min(block_out_channels[0] // 4, 32) self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=num_groups_out, eps=norm_eps) self.conv_act = nn.SiLU() - self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, 3, padding=1) + self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, kernel_size=3, padding=1) def forward( self, diff --git a/src/diffusers/models/unet_2d_condition.py b/src/diffusers/models/unet_2d_condition.py index c3f2fb87b60d..5a02a3ba1e7d 100644 --- a/src/diffusers/models/unet_2d_condition.py +++ b/src/diffusers/models/unet_2d_condition.py @@ -201,7 +201,7 @@ def __init__( # out self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) self.conv_act = nn.SiLU() - self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, 3, padding=1) + self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, kernel_size=3, padding=1) def set_attention_slice(self, slice_size): if slice_size is not None and self.config.attention_head_dim % slice_size != 0: diff --git a/src/diffusers/pipelines/__init__.py b/src/diffusers/pipelines/__init__.py index 60cde85f7916..a87a94a9a52f 100644 --- a/src/diffusers/pipelines/__init__.py +++ b/src/diffusers/pipelines/__init__.py @@ -28,6 +28,7 @@ VersatileDiffusionImageVariationPipeline, VersatileDiffusionPipeline, VersatileDiffusionTextToImagePipeline, + VersatileDiffusionImageToTextPipeline, ) from .vq_diffusion import VQDiffusionPipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index 60257f272829..58822a8f0318 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -3,6 +3,9 @@ if is_transformers_available() and is_torch_available(): from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector + from .modeling_text_unet import UNetFlatConditionModel + from .pipeline_versatile_diffusion import VersatileDiffusionPipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline + from .pipeline_versatile_diffusion_image_to_text import VersatileDiffusionImageToTextPipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py index 5c8aecfeb122..5fddb3dca943 100644 --- a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py +++ b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py @@ -6,8 +6,8 @@ from ...configuration_utils import ConfigMixin, register_to_config from ...modeling_utils import ModelMixin -from ...models.embeddings import TimestepEmbedding, Timesteps from ...models.attention import Transformer2DModel +from ...models.embeddings import TimestepEmbedding, Timesteps from ...models.unet_2d_condition import UNet2DConditionOutput from ...utils import logging @@ -15,7 +15,7 @@ logger = logging.get_logger(__name__) # pylint: disable=invalid-name -def get_down_block_multi_dim( +def get_down_block( down_block_type, num_layers, in_channels, @@ -23,38 +23,45 @@ def get_down_block_multi_dim( temb_channels, add_downsample, resnet_eps, + resnet_act_fn, attn_num_head_channels, resnet_groups=None, cross_attention_dim=None, + downsample_padding=None, ): down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type - if down_block_type == "DownBlockMultiDim": - return DownBlockMultiDim( + if down_block_type == "DownBlockFlat": + return DownBlockFlat( num_layers=num_layers, in_channels=in_channels, out_channels=out_channels, temb_channels=temb_channels, add_downsample=add_downsample, resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, resnet_groups=resnet_groups, + downsample_padding=downsample_padding, ) - elif down_block_type == "CrossAttnDownBlockMultiDim": + elif down_block_type == "CrossAttnDownBlockFlat": if cross_attention_dim is None: - raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlockMultiDim") - return CrossAttnDownBlockMultiDim( + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlockFlat") + return CrossAttnDownBlockFlat( num_layers=num_layers, in_channels=in_channels, out_channels=out_channels, temb_channels=temb_channels, add_downsample=add_downsample, resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, resnet_groups=resnet_groups, + downsample_padding=downsample_padding, cross_attention_dim=cross_attention_dim, attn_num_head_channels=attn_num_head_channels, ) raise ValueError(f"{down_block_type} is not supported.") -def get_up_block_multi_dim( + +def get_up_block( up_block_type, num_layers, in_channels, @@ -63,13 +70,14 @@ def get_up_block_multi_dim( temb_channels, add_upsample, resnet_eps, + resnet_act_fn, attn_num_head_channels, resnet_groups=None, cross_attention_dim=None, ): up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type - if up_block_type == "UpBlockMultiDim": - return UpBlockMultiDim( + if up_block_type == "UpBlockFlat": + return UpBlockFlat( num_layers=num_layers, in_channels=in_channels, out_channels=out_channels, @@ -77,12 +85,13 @@ def get_up_block_multi_dim( temb_channels=temb_channels, add_upsample=add_upsample, resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, resnet_groups=resnet_groups, ) - elif up_block_type == "CrossAttnUpBlockMultiDim": + elif up_block_type == "CrossAttnUpBlockFlat": if cross_attention_dim is None: - raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlockMultiDim") - return CrossAttnUpBlockMultiDim( + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlockFlat") + return CrossAttnUpBlockFlat( num_layers=num_layers, in_channels=in_channels, out_channels=out_channels, @@ -90,6 +99,7 @@ def get_up_block_multi_dim( temb_channels=temb_channels, add_upsample=add_upsample, resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, resnet_groups=resnet_groups, cross_attention_dim=cross_attention_dim, attn_num_head_channels=attn_num_head_channels, @@ -97,11 +107,11 @@ def get_up_block_multi_dim( raise ValueError(f"{up_block_type} is not supported.") -# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->LMSDiscrete -class UNetMultiDimConditionModel(ModelMixin, ConfigMixin): +# Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel with UNet2DConditionModel->UNetFlatConditionModel, nn.Conv2d->LinearMultiDim, Block2D->BlockFlat +class UNetFlatConditionModel(ModelMixin, ConfigMixin): r""" - UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep - and returns sample shaped output. + UNetFlatConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a + timestep and returns sample shaped output. This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library implements for all the models (such as downloading or saving, etc.) @@ -114,9 +124,9 @@ class UNetMultiDimConditionModel(ModelMixin, ConfigMixin): flip_sin_to_cos (`bool`, *optional*, defaults to `True`): Whether to flip the sin to cos in the time embedding. freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. - down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "CrossAttnDownBlock2D", "DownBlock2D")`): + down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlockFlat", "CrossAttnDownBlockFlat", "CrossAttnDownBlockFlat", "DownBlockFlat")`): The tuple of downsample blocks to use. - up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D", "CrossAttnUpBlock2D",)`): + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlockFlat", "CrossAttnUpBlockFlat", "CrossAttnUpBlockFlat", "CrossAttnUpBlockFlat",)`): The tuple of upsample blocks to use. block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): The tuple of output channels for each block. @@ -142,19 +152,18 @@ def __init__( flip_sin_to_cos: bool = True, freq_shift: int = 0, down_block_types: Tuple[str] = ( - "CrossAttnDownBlockMultiDim", - "CrossAttnDownBlockMultiDim", - "CrossAttnDownBlockMultiDim", - "DownBlockMultiDim", + "CrossAttnDownBlockFlat", + "CrossAttnDownBlockFlat", + "CrossAttnDownBlockFlat", + "DownBlockFlat", ), up_block_types: Tuple[str] = ( - "UpBlockMultiDim", - "CrossAttnUpBlockMultiDim", - "CrossAttnUpBlockMultiDim", - "CrossAttnUpBlockMultiDim", + "UpBlockFlat", + "CrossAttnUpBlockFlat", + "CrossAttnUpBlockFlat", + "CrossAttnUpBlockFlat", ), block_out_channels: Tuple[int] = (320, 640, 1280, 1280), - block_second_dim: Tuple[int] = (4, 4, 4, 4), layers_per_block: int = 2, downsample_padding: int = 1, mid_block_scale_factor: float = 1, @@ -170,7 +179,7 @@ def __init__( time_embed_dim = block_out_channels[0] * 4 # input - self.conv_in = LinearMultiDim([in_channels, 1, 1], [block_out_channels[0], block_second_dim[0], 1]) + self.conv_in = LinearMultiDim(in_channels, block_out_channels[0], kernel_size=3, padding=(1, 1)) # time self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) @@ -187,25 +196,26 @@ def __init__( for i, down_block_type in enumerate(down_block_types): input_channel = output_channel output_channel = block_out_channels[i] - second_dim = block_second_dim[i] is_final_block = i == len(block_out_channels) - 1 - down_block = get_down_block_multi_dim( + down_block = get_down_block( down_block_type, num_layers=layers_per_block, in_channels=input_channel, - out_channels=[output_channel, second_dim, 1], + out_channels=output_channel, temb_channels=time_embed_dim, add_downsample=not is_final_block, resnet_eps=norm_eps, + resnet_act_fn=act_fn, resnet_groups=norm_num_groups, cross_attention_dim=cross_attention_dim, attn_num_head_channels=attention_head_dim, + downsample_padding=downsample_padding, ) self.down_blocks.append(down_block) # mid - self.mid_block = UNetMidBlockMultiDimCrossAttn( + self.mid_block = UNetMidBlockFlatCrossAttn( in_channels=block_out_channels[-1], temb_channels=time_embed_dim, resnet_eps=norm_eps, @@ -237,7 +247,7 @@ def __init__( else: add_upsample = False - up_block = get_up_block_multi_dim( + up_block = get_up_block( up_block_type, num_layers=layers_per_block + 1, in_channels=input_channel, @@ -246,6 +256,7 @@ def __init__( temb_channels=time_embed_dim, add_upsample=add_upsample, resnet_eps=norm_eps, + resnet_act_fn=act_fn, resnet_groups=norm_num_groups, cross_attention_dim=cross_attention_dim, attn_num_head_channels=attention_head_dim, @@ -256,7 +267,7 @@ def __init__( # out self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) self.conv_act = nn.SiLU() - self.conv_out = LinearMultiDim(block_out_channels[0], [out_channels, 1, 1]) + self.conv_out = LinearMultiDim(block_out_channels[0], out_channels, kernel_size=3, padding=1) def set_attention_slice(self, slice_size): if slice_size is not None and self.config.attention_head_dim % slice_size != 0: @@ -292,9 +303,7 @@ def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_atten block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) def _set_gradient_checkpointing(self, module, value=False): - if isinstance( - module, (CrossAttnDownBlockMultiDim, DownBlockMultiDim, CrossAttnUpBlockMultiDim, UpBlockMultiDim) - ): + if isinstance(module, (CrossAttnDownBlockFlat, DownBlockFlat, CrossAttnUpBlockFlat, UpBlockFlat)): module.gradient_checkpointing = value def forward( @@ -308,7 +317,8 @@ def forward( Args: sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps - encoder_hidden_states (`torch.FloatTensor`): (batch, channel, height, width) encoder hidden states + encoder_hidden_states (`torch.FloatTensor`): + (batch_size, sequence_length, hidden_size) encoder hidden states return_dict (`bool`, *optional*, defaults to `True`): Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. @@ -410,23 +420,25 @@ def forward( class LinearMultiDim(nn.Linear): - def __init__(self, in_features, out_features, second_dim=4, *args, **kwargs): + def __init__(self, in_features, out_features=None, second_dim=4, *args, **kwargs): in_features = [in_features, second_dim, 1] if isinstance(in_features, int) else list(in_features) + if out_features is None: + out_features = in_features out_features = [out_features, second_dim, 1] if isinstance(out_features, int) else list(out_features) self.in_features_multidim = in_features self.out_features_multidim = out_features super().__init__(np.array(in_features).prod(), np.array(out_features).prod()) - def forward(self, x): - shape = x.shape - n = len(self.in_features_multidim) - x = x.view(*shape[0:-n], self.in_features) - y = super().forward(x) - y = y.view(*shape[0:-n], *self.out_features_multidim) - return y + def forward(self, input_tensor, *args, **kwargs): + shape = input_tensor.shape + n_dim = len(self.in_features_multidim) + input_tensor = input_tensor.reshape(*shape[0:-n_dim], self.in_features) + output_tensor = super().forward(input_tensor) + output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_features_multidim) + return output_tensor -class ResnetBlockMultiDim(nn.Module): +class ResnetBlockFlat(nn.Module): def __init__( self, *, @@ -440,29 +452,31 @@ def __init__( eps=1e-6, time_embedding_norm="default", use_in_shortcut=None, + second_dim=4, + **kwargs, ): super().__init__() self.pre_norm = pre_norm self.pre_norm = True - in_channels = [in_channels] if isinstance(in_channels, int) else list(in_channels) - in_channels_prod = np.array(in_channels).prod() + in_channels = [in_channels, second_dim, 1] if isinstance(in_channels, int) else list(in_channels) + self.in_channels_prod = np.array(in_channels).prod() self.channels_multidim = in_channels if out_channels is not None: - out_channels = [out_channels] if isinstance(out_channels, int) else list(out_channels) + out_channels = [out_channels, second_dim, 1] if isinstance(out_channels, int) else list(out_channels) out_channels_prod = np.array(out_channels).prod() self.out_channels_multidim = out_channels else: - out_channels_prod = in_channels_prod + out_channels_prod = self.in_channels_prod self.out_channels_multidim = self.channels_multidim self.time_embedding_norm = time_embedding_norm if groups_out is None: groups_out = groups - self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=in_channels_prod, eps=eps, affine=True) - self.conv1 = torch.nn.Conv2d(in_channels_prod, out_channels_prod, kernel_size=1, padding=0) + self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=self.in_channels_prod, eps=eps, affine=True) + self.conv1 = torch.nn.Conv2d(self.in_channels_prod, out_channels_prod, kernel_size=1, padding=0) if temb_channels is not None: self.time_emb_proj = torch.nn.Linear(temb_channels, out_channels_prod) @@ -475,15 +489,20 @@ def __init__( self.nonlinearity = nn.SiLU() - self.use_in_shortcut = in_channels_prod != out_channels_prod if use_in_shortcut is None else use_in_shortcut + self.use_in_shortcut = self.in_channels_prod != out_channels_prod if use_in_shortcut is None else use_in_shortcut self.conv_shortcut = None if self.use_in_shortcut: self.conv_shortcut = torch.nn.Conv2d( - in_channels_prod, out_channels_prod, kernel_size=1, stride=1, padding=0 + self.in_channels_prod, out_channels_prod, kernel_size=1, stride=1, padding=0 ) def forward(self, input_tensor, temb): + shape = input_tensor.shape + n_dim = len(self.channels_multidim) + input_tensor = input_tensor.reshape(*shape[0:-n_dim], self.in_channels_prod, 1, 1) + input_tensor = input_tensor.view(-1, self.in_channels_prod, 1, 1) + hidden_states = input_tensor hidden_states = self.norm1(hidden_states) @@ -505,10 +524,15 @@ def forward(self, input_tensor, temb): output_tensor = input_tensor + hidden_states + output_tensor = output_tensor.view(*shape[0:-n_dim], -1) + output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_channels_multidim) + + print("resblock.output_tensor", output_tensor.abs().sum()) return output_tensor -class DownBlockMultiDim(nn.Module): +# Copied from diffusers.models.unet_2d_blocks.DownBlock2D with DownBlock2D->DownBlockFlat, ResnetBlock2D->ResnetBlockFlat, Downsample2D->LinearMultiDim +class DownBlockFlat(nn.Module): def __init__( self, in_channels: int, @@ -518,9 +542,12 @@ def __init__( num_layers: int = 1, resnet_eps: float = 1e-6, resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", resnet_groups: int = 32, resnet_pre_norm: bool = True, + output_scale_factor=1.0, add_downsample=True, + downsample_padding=1, ): super().__init__() resnets = [] @@ -528,7 +555,7 @@ def __init__( for i in range(num_layers): in_channels = in_channels if i == 0 else out_channels resnets.append( - ResnetBlockMultiDim( + ResnetBlockFlat( in_channels=in_channels, out_channels=out_channels, temb_channels=temb_channels, @@ -536,6 +563,8 @@ def __init__( groups=resnet_groups, dropout=dropout, time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, pre_norm=resnet_pre_norm, ) ) @@ -543,7 +572,13 @@ def __init__( self.resnets = nn.ModuleList(resnets) if add_downsample: - self.downsamplers = nn.ModuleList([LinearMultiDim(out_channels, out_channels)]) + self.downsamplers = nn.ModuleList( + [ + LinearMultiDim( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) else: self.downsamplers = None @@ -576,7 +611,8 @@ def custom_forward(*inputs): return hidden_states, output_states -class CrossAttnDownBlockMultiDim(nn.Module): +# Copied from diffusers.models.unet_2d_blocks.CrossAttnDownBlock2D with CrossAttnDownBlock2D->CrossAttnDownBlockFlat, ResnetBlock2D->ResnetBlockFlat, Downsample2D->LinearMultiDim +class CrossAttnDownBlockFlat(nn.Module): def __init__( self, in_channels: int, @@ -586,11 +622,14 @@ def __init__( num_layers: int = 1, resnet_eps: float = 1e-6, resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", resnet_groups: int = 32, resnet_pre_norm: bool = True, attn_num_head_channels=1, cross_attention_dim=1280, attention_type="default", + output_scale_factor=1.0, + downsample_padding=1, add_downsample=True, ): super().__init__() @@ -603,7 +642,7 @@ def __init__( for i in range(num_layers): in_channels = in_channels if i == 0 else out_channels resnets.append( - ResnetBlockMultiDim( + ResnetBlockFlat( in_channels=in_channels, out_channels=out_channels, temb_channels=temb_channels, @@ -611,14 +650,16 @@ def __init__( groups=resnet_groups, dropout=dropout, time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, pre_norm=resnet_pre_norm, ) ) attentions.append( Transformer2DModel( attn_num_head_channels, - out_channels[0] // attn_num_head_channels, - in_channels=out_channels[0], + out_channels // attn_num_head_channels, + in_channels=out_channels, num_layers=1, cross_attention_dim=cross_attention_dim, norm_num_groups=resnet_groups, @@ -628,7 +669,13 @@ def __init__( self.resnets = nn.ModuleList(resnets) if add_downsample: - self.downsamplers = nn.ModuleList([LinearMultiDim(out_channels, out_channels)]) + self.downsamplers = nn.ModuleList( + [ + LinearMultiDim( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) else: self.downsamplers = None @@ -687,7 +734,8 @@ def custom_forward(*inputs): return hidden_states, output_states -class UpBlockMultiDim(nn.Module): +# Copied from diffusers.models.unet_2d_blocks.UpBlock2D with UpBlock2D->UpBlockFlat, ResnetBlock2D->ResnetBlockFlat, Upsample2D->LinearMultiDim +class UpBlockFlat(nn.Module): def __init__( self, in_channels: int, @@ -698,8 +746,10 @@ def __init__( num_layers: int = 1, resnet_eps: float = 1e-6, resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", resnet_groups: int = 32, resnet_pre_norm: bool = True, + output_scale_factor=1.0, add_upsample=True, ): super().__init__() @@ -710,7 +760,7 @@ def __init__( resnet_in_channels = prev_output_channel if i == 0 else out_channels resnets.append( - ResnetBlockMultiDim( + ResnetBlockFlat( in_channels=resnet_in_channels + res_skip_channels, out_channels=out_channels, temb_channels=temb_channels, @@ -718,6 +768,8 @@ def __init__( groups=resnet_groups, dropout=dropout, time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, pre_norm=resnet_pre_norm, ) ) @@ -725,7 +777,7 @@ def __init__( self.resnets = nn.ModuleList(resnets) if add_upsample: - self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, out_channels)]) + self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, use_conv=True, out_channels=out_channels)]) else: self.upsamplers = None @@ -757,7 +809,8 @@ def custom_forward(*inputs): return hidden_states -class CrossAttnUpBlockMultiDim(nn.Module): +# Copied from diffusers.models.unet_2d_blocks.CrossAttnUpBlock2D with CrossAttnUpBlock2D->CrossAttnUpBlockFlat, ResnetBlock2D->ResnetBlockFlat, Upsample2D->LinearMultiDim +class CrossAttnUpBlockFlat(nn.Module): def __init__( self, in_channels: int, @@ -768,11 +821,13 @@ def __init__( num_layers: int = 1, resnet_eps: float = 1e-6, resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", resnet_groups: int = 32, resnet_pre_norm: bool = True, attn_num_head_channels=1, cross_attention_dim=1280, attention_type="default", + output_scale_factor=1.0, add_upsample=True, ): super().__init__() @@ -787,7 +842,7 @@ def __init__( resnet_in_channels = prev_output_channel if i == 0 else out_channels resnets.append( - ResnetBlockMultiDim( + ResnetBlockFlat( in_channels=resnet_in_channels + res_skip_channels, out_channels=out_channels, temb_channels=temb_channels, @@ -795,6 +850,8 @@ def __init__( groups=resnet_groups, dropout=dropout, time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, pre_norm=resnet_pre_norm, ) ) @@ -812,7 +869,7 @@ def __init__( self.resnets = nn.ModuleList(resnets) if add_upsample: - self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, out_channels)]) + self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, use_conv=True, out_channels=out_channels)]) else: self.upsamplers = None @@ -879,7 +936,8 @@ def custom_forward(*inputs): return hidden_states -class UNetMidBlockMultiDimCrossAttn(nn.Module): +# Copied from diffusers.models.unet_2d_blocks.UNetMidBlock2DCrossAttn with UNetMidBlock2DCrossAttn->UNetMidBlockFlatCrossAttn, ResnetBlock2D->ResnetBlockFlat +class UNetMidBlockFlatCrossAttn(nn.Module): def __init__( self, in_channels: int, @@ -888,10 +946,12 @@ def __init__( num_layers: int = 1, resnet_eps: float = 1e-6, resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", resnet_groups: int = 32, resnet_pre_norm: bool = True, attn_num_head_channels=1, attention_type="default", + output_scale_factor=1.0, cross_attention_dim=1280, **kwargs, ): @@ -903,7 +963,7 @@ def __init__( # there is always at least one resnet resnets = [ - ResnetBlockMultiDim( + ResnetBlockFlat( in_channels=in_channels, out_channels=in_channels, temb_channels=temb_channels, @@ -911,6 +971,8 @@ def __init__( groups=resnet_groups, dropout=dropout, time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, pre_norm=resnet_pre_norm, ) ] @@ -928,7 +990,7 @@ def __init__( ) ) resnets.append( - ResnetBlockMultiDim( + ResnetBlockFlat( in_channels=in_channels, out_channels=in_channels, temb_channels=temb_channels, @@ -936,6 +998,8 @@ def __init__( groups=resnet_groups, dropout=dropout, time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, pre_norm=resnet_pre_norm, ) ) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 8b8b59bc26cc..89453edcb16e 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -9,7 +9,8 @@ from ...pipeline_utils import DiffusionPipeline from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import logging -from . import VersatileDiffusionImageVariationPipeline, VersatileDiffusionTextToImagePipeline +from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline +from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline logger = logging.get_logger(__name__) # pylint: disable=invalid-name diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py index e69de29bb2d1..5e49cab2205f 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py @@ -0,0 +1,461 @@ +# Copyright 2022 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect +from dataclasses import dataclass +from typing import Callable, List, Optional, Union + +import numpy as np +import torch +import torch.utils.checkpoint + +import PIL +from transformers import CLIPFeatureExtractor, CLIPVisionModelWithProjection, GPT2Tokenizer + +from .modeling_text_unet import UNetFlatConditionModel +from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector +from ...models import AutoencoderKL, UNet2DConditionModel +from ...models.attention import Transformer2DModel +from ...pipeline_utils import DiffusionPipeline, BaseOutput +from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler +from ...utils import is_accelerate_available, logging + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +@dataclass +class TextPipelineOutput(BaseOutput): + """ + Output class for text generation pipelines. + + Args: + text (`List[str]` or `np.ndarray`) + List of generated text of length `batch_size` or a numpy array of tokens of shape `(batch_size, num_tokens)`. + """ + + text: Union[List[str], np.ndarray] + + +class VersatileDiffusionImageToTextPipeline(DiffusionPipeline): + r""" + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + Parameters: + vqvae ([`VQModel`]): + Vector-quantized (VQ) Model to encode and decode images to and from latent representations. + bert ([`LDMBertModel`]): + Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture. + tokenizer (`transformers.BertTokenizer`): + Tokenizer of class + [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + """ + image_feature_extractor: CLIPFeatureExtractor + image_encoder: CLIPVisionModelWithProjection + image_unet: UNet2DConditionModel + text_unet: UNetFlatConditionModel + vae: AutoencoderKL + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] + + def __init__( + self, + image_feature_extractor: CLIPFeatureExtractor, + image_encoder: CLIPVisionModelWithProjection, + image_unet: UNet2DConditionModel, + text_unet: UNetFlatConditionModel, + vae: AutoencoderKL, + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], + ): + super().__init__() + self.register_modules( + image_feature_extractor=image_feature_extractor, + image_encoder=image_encoder, + image_unet=image_unet, + text_unet=text_unet, + vae=vae, + scheduler=scheduler, + ) + + self.text_vae_decoder = GPT2OptimusForLatentConnector.from_pretrained("fusing/gpt2_optimus") + self.text_vae_tokenizer = GPT2Tokenizer.from_pretrained("fusing/gpt2_optimus") + + def swap_unet_attention_blocks(self): + for name, module in self.image_unet.named_modules(): + if isinstance(module, Transformer2DModel): + parent_name, index = name.rsplit(".", 1) + index = int(index) + self.image_unet.get_submodule(parent_name)[index], self.text_unet.get_submodule(parent_name)[index] = ( + self.text_unet.get_submodule(parent_name)[index], + self.image_unet.get_submodule(parent_name)[index], + ) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet + def enable_xformers_memory_efficient_attention(self): + r""" + Enable memory efficient attention as implemented in xformers. + + When this option is enabled, you should observe lower GPU memory usage and a potential speed up at inference + time. Speed up at training time is not guaranteed. + + Warning: When Memory Efficient Attention and Sliced attention are both enabled, the Memory Efficient Attention + is used. + """ + self.image_unet.set_use_memory_efficient_attention_xformers(True) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_xformers_memory_efficient_attention with unet->image_unet + def disable_xformers_memory_efficient_attention(self): + r""" + Disable memory efficient attention as implemented in xformers. + """ + self.image_unet.set_use_memory_efficient_attention_xformers(False) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_attention_slicing with unet->image_unet + def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = "auto"): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module will split the input tensor in slices, to compute attention + in several steps. This is useful to save some memory in exchange for a small speed decrease. + + Args: + slice_size (`str` or `int`, *optional*, defaults to `"auto"`): + When `"auto"`, halves the input to the attention heads, so attention will be computed in two steps. If + a number is provided, uses as many slices as `attention_head_dim // slice_size`. In this case, + `attention_head_dim` must be a multiple of `slice_size`. + """ + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = self.image_unet.config.attention_head_dim // 2 + self.image_unet.set_attention_slice(slice_size) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_attention_slicing + def disable_attention_slicing(self): + r""" + Disable sliced attention computation. If `enable_attention_slicing` was previously invoked, this method will go + back to computing attention in one step. + """ + # set slice_size = `None` to disable `attention slicing` + self.enable_attention_slicing(None) + + def enable_sequential_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet, + text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a + `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called. + """ + if is_accelerate_available(): + from accelerate import cpu_offload + else: + raise ImportError("Please install accelerate via `pip install accelerate`") + + device = torch.device(f"cuda:{gpu_id}") + + for cpu_offloaded_model in [self.image_unet, self.text_unet, self.text_encoder, self.vae]: + if cpu_offloaded_model is not None: + cpu_offload(cpu_offloaded_model, device) + + @property + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device with unet->image_unet + def _execution_device(self): + r""" + Returns the device on which the pipeline's models will be executed. After calling + `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module + hooks. + """ + if self.device != torch.device("meta") or not hasattr(self.image_unet, "_hf_hook"): + return self.device + for module in self.image_unet.modules(): + if ( + hasattr(module, "_hf_hook") + and hasattr(module._hf_hook, "execution_device") + and module._hf_hook.execution_device is not None + ): + return torch.device(module._hf_hook.execution_device) + return self.device + + def _encode_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + device: (`torch.device`): + torch device + num_images_per_prompt (`int`): + number of images that should be generated per prompt + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + negative_prompt (`str` or `List[str]`): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + """ + + def normalize_embeddings(encoder_output): + embeds = self.image_encoder.vision_model.post_layernorm(encoder_output.last_hidden_state) + embeds = self.image_encoder.visual_projection(embeds) + embeds_pooled = embeds[:, 0:1] + embeds = embeds / torch.norm(embeds_pooled, dim=-1, keepdim=True) + return embeds + + batch_size = len(prompt) if isinstance(prompt, list) else 1 + + # get prompt text embeddings + image_input = self.image_feature_extractor(images=prompt, return_tensors="pt") + image_embeddings = self.image_encoder(image_input.pixel_values.to(self.device)) + image_embeddings = normalize_embeddings(image_embeddings) + + # duplicate image embeddings for each generation per prompt, using mps friendly method + bs_embed, seq_len, _ = image_embeddings.shape + image_embeddings = image_embeddings.repeat(1, num_images_per_prompt, 1) + image_embeddings = image_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1) + + # get unconditional embeddings for classifier free guidance + if do_classifier_free_guidance: + uncond_images: List[str] + if negative_prompt is None: + uncond_images = [np.zeros((512, 512, 3))] * batch_size + elif type(prompt) is not type(negative_prompt): + raise TypeError( + f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" + f" {type(prompt)}." + ) + elif isinstance(negative_prompt, PIL.Image.Image): + uncond_images = [negative_prompt] + elif batch_size != len(negative_prompt): + raise ValueError( + f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" + f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" + " the batch size of `prompt`." + ) + else: + uncond_images = negative_prompt + + uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors="pt") + uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(self.device)) + uncond_embeddings = normalize_embeddings(uncond_embeddings) + + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = uncond_embeddings.shape[1] + uncond_embeddings = uncond_embeddings.repeat(1, num_images_per_prompt, 1) + uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and conditional embeddings into a single batch + # to avoid doing two forward passes + image_embeddings = torch.cat([uncond_embeddings, image_embeddings]) + + return image_embeddings + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents + def decode_latents(self, latents): + latents = latents.reshape(latents.shape[:-2]) + self.text_vae_decoder = self.text_vae_decoder.to(self._execution_device) + bos_token = self.text_vae_tokenizer.bos_token_id + output = self.text_vae_decoder.generate(bos_token_id=bos_token, past=latents) + return output + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + def check_inputs(self, image, callback_steps): + if not isinstance(image, PIL.Image.Image) and not isinstance(image, torch.Tensor): + raise ValueError(f"`image` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(image)}") + + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + + def prepare_latents(self, batch_size, num_channels_latents, dtype, device, generator, latents=None): + shape = (batch_size, num_channels_latents, 1, 1) + if latents is None: + if device.type == "mps": + # randn does not work reproducibly on mps + latents = torch.randn(shape, generator=generator, device="cpu", dtype=dtype).to(device) + else: + latents = torch.randn(shape, generator=generator, device=device, dtype=dtype) + else: + if latents.shape != shape: + raise ValueError(f"Unexpected latents shape, got {latents.shape}, expected {shape}") + latents = latents.to(device) + + # scale the initial noise by the standard deviation required by the scheduler + latents = latents * self.scheduler.init_noise_sigma + return latents + + @torch.no_grad() + def __call__( + self, + image: Union[PIL.Image.Image, List[PIL.Image.Image], torch.Tensor], + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + negative_prompt: Optional[Union[str, List[str]]] = None, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[torch.Generator] = None, + latents: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "str", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: Optional[int] = 1, + **kwargs, + ): + r""" + Function invoked when calling the pipeline for generation. + + Args: + image (`PIL.Image.Image`, `List[PIL.Image.Image]` or `torch.Tensor`): + The image prompt or prompts to guide the image generation. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + guidance_scale (`float`, *optional*, defaults to 7.5): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator`, *optional*): + A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation + deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + + Returns: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. + """ + + # 1. Check inputs. Raise error if not correct + self.check_inputs(image, callback_steps) + + # 2. Define call parameters + batch_size = 1 if isinstance(image, PIL.Image.Image) else len(image) + device = self._execution_device + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + # 3. Encode input prompt + image_embeddings = self._encode_prompt( + image, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt + ) + + # 4. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + timesteps = self.scheduler.timesteps + + # 5. Prepare latent variables + num_channels_latents = self.text_unet.in_channels[0] + latents = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + image_embeddings.dtype, + device, + generator, + latents, + ) + + # 6. Prepare extra step kwargs. + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 7. Swap the attention blocks between the image and text UNets + self.swap_unet_attention_blocks() + + # 8. Denoising loop + for i, t in enumerate(self.progress_bar(timesteps)): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + # predict the noise residual + noise_pred = self.text_unet(latent_model_input, t, encoder_hidden_states=image_embeddings).sample + + # perform guidance + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample + + # call the callback, if provided + if callback is not None and i % callback_steps == 0: + callback(i, t, latents) + + # 9. Swap the attention blocks backs in case the UNets are reused in another pipeline + self.swap_unet_attention_blocks() + + # 10. Post-processing + text = self.decode_latents(latents) + + # 11. Convert to strings + if output_type == "str": + text = self.text_vae_tokenizer.decode(text) + + if not return_dict: + return (text,) + + return TextPipelineOutput(text=text) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index e37010d1f811..bf764f47ae6c 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -22,8 +22,7 @@ import PIL from transformers import CLIPFeatureExtractor, CLIPVisionModelWithProjection -from ...models import AutoencoderKL, UNet2DConditionModel, VQModel -from ...models.attention import Transformer2DModel +from ...models import AutoencoderKL, UNet2DConditionModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import is_accelerate_available, logging @@ -73,16 +72,6 @@ def __init__( scheduler=scheduler, ) - def swap_unet_attention_blocks(self): - for name, module in self.image_unet.named_modules(): - if isinstance(module, Transformer2DModel): - parent_name, index = name.rsplit(".", 1) - index = int(index) - self.image_unet.get_submodule(parent_name)[index], self.text_unet.get_submodule(parent_name)[index] = ( - self.text_unet.get_submodule(parent_name)[index], - self.image_unet.get_submodule(parent_name)[index], - ) - # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet def enable_xformers_memory_efficient_attention(self): r""" diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index 991b58c357b0..d28c88cb2915 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -20,7 +20,8 @@ from transformers import CLIPFeatureExtractor, CLIPTextModelWithProjection, CLIPTokenizer -from ...models import AutoencoderKL, UNet2DConditionModel, VQModel +from .modeling_text_unet import UNetFlatConditionModel +from ...models import UNet2DConditionModel, AutoencoderKL from ...models.attention import Transformer2DModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler @@ -52,7 +53,7 @@ class VersatileDiffusionTextToImagePipeline(DiffusionPipeline): image_feature_extractor: CLIPFeatureExtractor text_encoder: CLIPTextModelWithProjection image_unet: UNet2DConditionModel - text_unet: UNet2DConditionModel + text_unet: UNetFlatConditionModel vae: AutoencoderKL scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] @@ -61,8 +62,8 @@ def __init__( tokenizer: CLIPTokenizer, text_encoder: CLIPTextModelWithProjection, image_unet: UNet2DConditionModel, - text_unet: UNet2DConditionModel, - vae: Union[VQModel, AutoencoderKL], + text_unet: UNetFlatConditionModel, + vae: AutoencoderKL, scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], ): super().__init__() diff --git a/src/diffusers/utils/dummy_torch_and_transformers_objects.py b/src/diffusers/utils/dummy_torch_and_transformers_objects.py index 2ad0ead4403f..7aa12e46b4fd 100644 --- a/src/diffusers/utils/dummy_torch_and_transformers_objects.py +++ b/src/diffusers/utils/dummy_torch_and_transformers_objects.py @@ -139,6 +139,21 @@ def from_pretrained(cls, *args, **kwargs): requires_backends(cls, ["torch", "transformers"]) +class VersatileDiffusionPipeline(metaclass=DummyObject): + _backends = ["torch", "transformers"] + + def __init__(self, *args, **kwargs): + requires_backends(self, ["torch", "transformers"]) + + @classmethod + def from_config(cls, *args, **kwargs): + requires_backends(cls, ["torch", "transformers"]) + + @classmethod + def from_pretrained(cls, *args, **kwargs): + requires_backends(cls, ["torch", "transformers"]) + + class VersatileDiffusionTextToImagePipeline(metaclass=DummyObject): _backends = ["torch", "transformers"] diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py new file mode 100644 index 000000000000..f8ec184c7756 --- /dev/null +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# Copyright 2022 HuggingFace Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch + +from diffusers import VersatileDiffusionImageToTextPipeline +from diffusers.utils.testing_utils import load_image, require_torch_gpu, slow, torch_device + +from ...test_pipelines_common import PipelineTesterMixin + + +torch.backends.cuda.matmul.allow_tf32 = False + + +class VersatileDiffusionImageVariationPipelineFastTests(PipelineTesterMixin, unittest.TestCase): + pass + + +@slow +@require_torch_gpu +class VersatileDiffusionImageVariationPipelineIntegrationTests(unittest.TestCase): + def test_inference_image_to_text(self): + pipe = VersatileDiffusionImageToTextPipeline.from_pretrained("scripts/vd_official") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + image_prompt = load_image( + "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" + ) + generator = torch.Generator(device=torch_device).manual_seed(0) + tokens = pipe( + image=image_prompt, + generator=generator, + guidance_scale=7.5, + num_inference_steps=50, + output_type="numpy", + ).text + + assert tokens.shape == (1, 30) + expected_tokens = np.array([0, 1, 2, 3, 4, 5, 6, 7]) + assert self.assertItemsEqual(tokens[0] , expected_tokens) From bf8f2fb2c9a2f66f2ad650a0480ad50f19190d48 Mon Sep 17 00:00:00 2001 From: anton-l Date: Tue, 22 Nov 2022 01:47:08 +0100 Subject: [PATCH 18/49] update tests --- scripts/convert_versatile_diffusion_to_diffusers.py | 2 -- src/diffusers/__init__.py | 2 +- src/diffusers/pipelines/__init__.py | 2 +- src/diffusers/pipelines/versatile_diffusion/__init__.py | 3 +-- .../pipelines/versatile_diffusion/modeling_text_unet.py | 5 +++-- .../versatile_diffusion/pipeline_versatile_diffusion.py | 2 +- .../pipeline_versatile_diffusion_image_to_text.py | 9 +++++---- .../pipeline_versatile_diffusion_text_to_image.py | 4 ++-- .../test_versatile_diffusion_image_to_text.py | 8 ++++---- 9 files changed, 18 insertions(+), 19 deletions(-) diff --git a/scripts/convert_versatile_diffusion_to_diffusers.py b/scripts/convert_versatile_diffusion_to_diffusers.py index efa01a73b89e..86fb0e7b4c97 100644 --- a/scripts/convert_versatile_diffusion_to_diffusers.py +++ b/scripts/convert_versatile_diffusion_to_diffusers.py @@ -15,7 +15,6 @@ """ Conversion script for the Versatile Stable Diffusion checkpoints. """ import argparse -import os from argparse import Namespace import torch @@ -32,7 +31,6 @@ VersatileDiffusionPipeline, ) from diffusers.pipelines.versatile_diffusion.modeling_text_unet import UNetFlatConditionModel -from diffusers.pipelines.versatile_diffusion.modeling_gpt2_optimus import GPT2OptimusForLatentConnector from transformers import ( CLIPFeatureExtractor, CLIPTextModelWithProjection, diff --git a/src/diffusers/__init__.py b/src/diffusers/__init__.py index bedf36d51665..920fa9a348ac 100644 --- a/src/diffusers/__init__.py +++ b/src/diffusers/__init__.py @@ -73,10 +73,10 @@ StableDiffusionInpaintPipeline, StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, + VersatileDiffusionImageToTextPipeline, VersatileDiffusionImageVariationPipeline, VersatileDiffusionPipeline, VersatileDiffusionTextToImagePipeline, - VersatileDiffusionImageToTextPipeline, VQDiffusionPipeline, ) else: diff --git a/src/diffusers/pipelines/__init__.py b/src/diffusers/pipelines/__init__.py index a87a94a9a52f..957e6991d5e1 100644 --- a/src/diffusers/pipelines/__init__.py +++ b/src/diffusers/pipelines/__init__.py @@ -25,10 +25,10 @@ StableDiffusionPipeline, ) from .versatile_diffusion import ( + VersatileDiffusionImageToTextPipeline, VersatileDiffusionImageVariationPipeline, VersatileDiffusionPipeline, VersatileDiffusionTextToImagePipeline, - VersatileDiffusionImageToTextPipeline, ) from .vq_diffusion import VQDiffusionPipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index 58822a8f0318..864756998d2e 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -4,8 +4,7 @@ if is_transformers_available() and is_torch_available(): from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector from .modeling_text_unet import UNetFlatConditionModel - from .pipeline_versatile_diffusion import VersatileDiffusionPipeline + from .pipeline_versatile_diffusion_image_to_text import VersatileDiffusionImageToTextPipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline - from .pipeline_versatile_diffusion_image_to_text import VersatileDiffusionImageToTextPipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py index 5fddb3dca943..a1129a4c7c45 100644 --- a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py +++ b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py @@ -489,7 +489,9 @@ def __init__( self.nonlinearity = nn.SiLU() - self.use_in_shortcut = self.in_channels_prod != out_channels_prod if use_in_shortcut is None else use_in_shortcut + self.use_in_shortcut = ( + self.in_channels_prod != out_channels_prod if use_in_shortcut is None else use_in_shortcut + ) self.conv_shortcut = None if self.use_in_shortcut: @@ -527,7 +529,6 @@ def forward(self, input_tensor, temb): output_tensor = output_tensor.view(*shape[0:-n_dim], -1) output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_channels_multidim) - print("resblock.output_tensor", output_tensor.abs().sum()) return output_tensor diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 89453edcb16e..88800417b14b 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -9,8 +9,8 @@ from ...pipeline_utils import DiffusionPipeline from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import logging -from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline +from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline logger = logging.get_logger(__name__) # pylint: disable=invalid-name diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py index 5e49cab2205f..ac384d8fb3a2 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py @@ -23,13 +23,13 @@ import PIL from transformers import CLIPFeatureExtractor, CLIPVisionModelWithProjection, GPT2Tokenizer -from .modeling_text_unet import UNetFlatConditionModel -from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector from ...models import AutoencoderKL, UNet2DConditionModel from ...models.attention import Transformer2DModel -from ...pipeline_utils import DiffusionPipeline, BaseOutput +from ...pipeline_utils import BaseOutput, DiffusionPipeline from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import is_accelerate_available, logging +from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector +from .modeling_text_unet import UNetFlatConditionModel logger = logging.get_logger(__name__) # pylint: disable=invalid-name @@ -42,7 +42,8 @@ class TextPipelineOutput(BaseOutput): Args: text (`List[str]` or `np.ndarray`) - List of generated text of length `batch_size` or a numpy array of tokens of shape `(batch_size, num_tokens)`. + List of generated text of length `batch_size` or a numpy array of tokens of shape `(batch_size, + num_tokens)`. """ text: Union[List[str], np.ndarray] diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index d28c88cb2915..5880dd6d76ba 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -20,12 +20,12 @@ from transformers import CLIPFeatureExtractor, CLIPTextModelWithProjection, CLIPTokenizer -from .modeling_text_unet import UNetFlatConditionModel -from ...models import UNet2DConditionModel, AutoencoderKL +from ...models import AutoencoderKL, UNet2DConditionModel from ...models.attention import Transformer2DModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import is_accelerate_available, logging +from .modeling_text_unet import UNetFlatConditionModel logger = logging.get_logger(__name__) # pylint: disable=invalid-name diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py index f8ec184c7756..7e5cb92536f6 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py @@ -27,15 +27,15 @@ torch.backends.cuda.matmul.allow_tf32 = False -class VersatileDiffusionImageVariationPipelineFastTests(PipelineTesterMixin, unittest.TestCase): +class VersatileDiffusionImageToTextPipelineFastTests(PipelineTesterMixin, unittest.TestCase): pass @slow @require_torch_gpu -class VersatileDiffusionImageVariationPipelineIntegrationTests(unittest.TestCase): +class VersatileDiffusionImageToTextPipelineIntegrationTests(unittest.TestCase): def test_inference_image_to_text(self): - pipe = VersatileDiffusionImageToTextPipeline.from_pretrained("scripts/vd_official") + pipe = VersatileDiffusionImageToTextPipeline.from_pretrained("diffusers/vd-official-test") pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) @@ -53,4 +53,4 @@ def test_inference_image_to_text(self): assert tokens.shape == (1, 30) expected_tokens = np.array([0, 1, 2, 3, 4, 5, 6, 7]) - assert self.assertItemsEqual(tokens[0] , expected_tokens) + assert self.assertItemsEqual(tokens[0], expected_tokens) From e4728c2086e841e13c15106760cd2675dc836143 Mon Sep 17 00:00:00 2001 From: anton-l Date: Tue, 22 Nov 2022 02:05:39 +0100 Subject: [PATCH 19/49] reshape --- .../pipeline_versatile_diffusion_image_to_text.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py index ac384d8fb3a2..216ed9efe6f9 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py @@ -267,7 +267,7 @@ def normalize_embeddings(encoder_output): # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents def decode_latents(self, latents): - latents = latents.reshape(latents.shape[:-2]) + latents = latents.reshape(latents.shape[:-2]).unsqueeze(1) self.text_vae_decoder = self.text_vae_decoder.to(self._execution_device) bos_token = self.text_vae_tokenizer.bos_token_id output = self.text_vae_decoder.generate(bos_token_id=bos_token, past=latents) From 2b7cd87694e320661d5914db8d2dab5e7a02fc1f Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Tue, 22 Nov 2022 13:09:26 +0000 Subject: [PATCH 20/49] fix image to text --- .../modeling_gpt2_optimus.py | 130 ++++++++++++++++-- ...eline_versatile_diffusion_image_to_text.py | 5 +- .../test_versatile_diffusion_image_to_text.py | 8 +- 3 files changed, 122 insertions(+), 21 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py b/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py index 02a0ba822c67..647eb841fc80 100644 --- a/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py +++ b/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py @@ -1,8 +1,118 @@ +import math + import torch from torch import nn from transformers.modeling_outputs import CausalLMOutputWithCrossAttentions -from transformers.models.gpt2.modeling_gpt2 import GPT2Block, GPT2PreTrainedModel +from transformers.models.gpt2.modeling_gpt2 import GPT2MLP, GPT2PreTrainedModel +from transformers.pytorch_utils import Conv1D + + +class GPT2OptimusAttention(nn.Module): + def __init__(self, nx, n_ctx, config, scale=False): + super().__init__() + self.output_attentions = config.output_attentions + + n_state = nx # in Attention: n_state=768 (nx=n_embd) + # [switch nx => n_state from Block to Attention to keep identical to TF implem] + assert n_state % config.n_head == 0 + self.register_buffer("bias", torch.tril(torch.ones(n_ctx, n_ctx)).view(1, 1, n_ctx, n_ctx)) + self.n_head = config.n_head + self.split_size = n_state + self.scale = scale + + self.c_attn = Conv1D(n_state * 3, nx) + self.c_proj = Conv1D(n_state, nx) + self.attn_dropout = nn.Dropout(config.attn_pdrop) + self.resid_dropout = nn.Dropout(config.resid_pdrop) + self.pruned_heads = set() + + def _attn(self, q, k, v, attention_mask=None, head_mask=None): + w = torch.matmul(q, k) + if self.scale: + w = w / math.sqrt(v.size(-1)) + nd, ns = w.size(-2), w.size(-1) + b = self.bias[:, :, ns - nd : ns, :ns] + w = w * b - 1e4 * (1 - b) + + if attention_mask is not None: + # Apply the attention mask + w = w + attention_mask + + w = nn.Softmax(dim=-1)(w) + w = self.attn_dropout(w) + + # Mask heads if we want to + if head_mask is not None: + w = w * head_mask + + outputs = [torch.matmul(w, v)] + if self.output_attentions: + outputs.append(w) + return outputs + + def merge_heads(self, x): + x = x.permute(0, 2, 1, 3).contiguous() + new_x_shape = x.size()[:-2] + (x.size(-2) * x.size(-1),) + return x.view(*new_x_shape) # in Tensorflow implem: fct merge_states + + def split_heads(self, x, k=False): + new_x_shape = x.size()[:-1] + (self.n_head, x.size(-1) // self.n_head) + x = x.view(*new_x_shape) # in Tensorflow implem: fct split_states + if k: + return x.permute(0, 2, 3, 1) # (batch, head, head_features, seq_length) + else: + return x.permute(0, 2, 1, 3) # (batch, head, seq_length, head_features) + + def forward(self, x, layer_past=None, attention_mask=None, head_mask=None): + x = self.c_attn(x) + query, key, value = x.split(self.split_size, dim=2) + query = self.split_heads(query) + key = self.split_heads(key, k=True) + value = self.split_heads(value) + + if layer_past is not None: + past_key, past_value = layer_past[0], layer_past[1] # transpose back cf below + + past_key = self.split_heads(past_key, k=True) + past_value = self.split_heads(past_value) + # pdb.set_trace() + key = torch.cat((past_key, key), dim=-1) + value = torch.cat((past_value, value), dim=-2) + present = torch.stack((key.transpose(-2, -1), value)) # transpose to have same shapes for stacking + + attn_outputs = self._attn(query, key, value, attention_mask, head_mask) + a = attn_outputs[0] + + a = self.merge_heads(a) + a = self.c_proj(a) + a = self.resid_dropout(a) + + outputs = [a, present] + attn_outputs[1:] + return outputs # a, present, (attentions) + + +class GPT2OptimusBlock(nn.Module): + def __init__(self, config): + super().__init__() + nx = config.n_embd + self.ln_1 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon) + self.attn = GPT2OptimusAttention(nx, config.n_ctx, config, scale=True) + self.ln_2 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon) + self.mlp = GPT2MLP(4 * nx, config) + + def forward(self, x, layer_past=None, attention_mask=None, head_mask=None): + output_attn = self.attn( + self.ln_1(x), layer_past=layer_past, attention_mask=attention_mask, head_mask=head_mask + ) + a = output_attn[0] # output_attn: a, present, (attentions) + + x = x + a + m = self.mlp(self.ln_2(x)) + x = x + m + + outputs = [x] + output_attn[1:] + return outputs # x, present, (attentions) class GPT2OptimusModel(GPT2PreTrainedModel): @@ -17,7 +127,7 @@ def __init__(self, config, latent_as_gpt_emb, latent_as_gpt_memory, latent_size) self.wte = nn.Embedding(config.vocab_size, config.n_embd) self.wpe = nn.Embedding(config.n_positions, config.n_embd) self.drop = nn.Dropout(config.embd_pdrop) - self.h = nn.ModuleList([GPT2Block(config, i) for i in range(config.n_layer)]) + self.h = nn.ModuleList([GPT2OptimusBlock(config) for i in range(config.n_layer)]) self.ln_f = nn.LayerNorm(config.n_embd, eps=config.layer_norm_epsilon) self.linear = nn.Linear( @@ -48,17 +158,11 @@ def forward( if self.latent_as_gpt_memory: past = self.linear(past) - share_latent = False - if share_latent: - # the same latent vector shared by all layers - past = [past.unsqueeze(-2), past.unsqueeze(-2)] # query, key - past = [past] * len(self.h) - past_length = past[0][0].size(-2) - else: - # different latent vectors for each layer - past_split = torch.split(past.unsqueeze(1), self.config.hidden_size, dim=2) - past = list(zip(past_split, past_split)) - past_length = 1 # past[0][0].size(-2) + + # different latent vectors for each layer + past_split = torch.split(past.unsqueeze(1), self.config.hidden_size, dim=2) + past = list(zip(past_split, past_split)) + past_length = 1 # past[0][0].size(-2) else: past_length = 0 past = [None] * len(self.h) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py index 216ed9efe6f9..129134a47944 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py @@ -265,9 +265,8 @@ def normalize_embeddings(encoder_output): return image_embeddings - # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents def decode_latents(self, latents): - latents = latents.reshape(latents.shape[:-2]).unsqueeze(1) + latents = latents.reshape(latents.shape[:-2]) self.text_vae_decoder = self.text_vae_decoder.to(self._execution_device) bos_token = self.text_vae_tokenizer.bos_token_id output = self.text_vae_decoder.generate(bos_token_id=bos_token, past=latents) @@ -454,7 +453,7 @@ def __call__( # 11. Convert to strings if output_type == "str": - text = self.text_vae_tokenizer.decode(text) + text = self.text_vae_tokenizer.batch_decode(text) if not return_dict: return (text,) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py index 7e5cb92536f6..648ef96758c9 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py @@ -43,14 +43,12 @@ def test_inference_image_to_text(self): "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" ) generator = torch.Generator(device=torch_device).manual_seed(0) - tokens = pipe( + text = pipe( image=image_prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, - output_type="numpy", + output_type="str", ).text - assert tokens.shape == (1, 30) - expected_tokens = np.array([0, 1, 2, 3, 4, 5, 6, 7]) - assert self.assertItemsEqual(tokens[0], expected_tokens) + assert text == "Corret me" From 7c999fe9640f5035be766b0cd67d935c088056d0 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Tue, 22 Nov 2022 16:50:06 +0000 Subject: [PATCH 21/49] add some first docs --- docs/source/_toctree.yml | 2 + .../api/pipelines/versatile_diffusion.mdx | 82 +++++++++++++++++++ 2 files changed, 84 insertions(+) create mode 100644 docs/source/api/pipelines/versatile_diffusion.mdx diff --git a/docs/source/_toctree.yml b/docs/source/_toctree.yml index c143dab9f5b7..dbd413ac65ae 100644 --- a/docs/source/_toctree.yml +++ b/docs/source/_toctree.yml @@ -110,6 +110,8 @@ title: "Stochastic Karras VE" - local: api/pipelines/dance_diffusion title: "Dance Diffusion" + - local: api/pipelines/versatile_diffusion + title: "Versatile Diffusion" - local: api/pipelines/vq_diffusion title: "VQ Diffusion" - local: api/pipelines/repaint diff --git a/docs/source/api/pipelines/versatile_diffusion.mdx b/docs/source/api/pipelines/versatile_diffusion.mdx new file mode 100644 index 000000000000..e589d9e1aed0 --- /dev/null +++ b/docs/source/api/pipelines/versatile_diffusion.mdx @@ -0,0 +1,82 @@ + + +# VersatileDiffusion + +VersatileDiffusion was proposed in [Versatile Diffusion: Text, Images and Variations All in One Diffusion Model](https://arxiv.org/abs/2211.08332) by Xingqian Xu, Zhangyang Wang, Eric Zhang, Kai Wang, Humphrey Shi . + +The abstract of the paper is the following: + +*The recent advances in diffusion models have set an impressive milestone in many generation tasks. Trending works such as DALL-E2, Imagen, and Stable Diffusion have attracted great interest in academia and industry. Despite the rapid landscape changes, recent new approaches focus on extensions and performance rather than capacity, thus requiring separate models for separate tasks. In this work, we expand the existing single-flow diffusion pipeline into a multi-flow network, dubbed Versatile Diffusion (VD), that handles text-to-image, image-to-text, image-variation, and text-variation in one unified model. Moreover, we generalize VD to a unified multi-flow multimodal diffusion framework with grouped layers, swappable streams, and other propositions that can process modalities beyond images and text. Through our experiments, we demonstrate that VD and its underlying framework have the following merits: a) VD handles all subtasks with competitive quality; b) VD initiates novel extensions and applications such as disentanglement of style and semantic, image-text dual-guided generation, etc.; c) Through these experiments and applications, VD provides more semantic insights of the generated outputs.* + +*Overview*: + +| Pipeline | Tasks | Colab | Demo +|---|---|:---:|:---:| +| [pipeline_alt_diffusion.py](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/alt_diffusion/pipeline_alt_diffusion.py) | *Text-to-Image Generation* | - | - +| [pipeline_alt_diffusion_img2img.py](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/alt_diffusion/pipeline_alt_diffusion_img2img.py) | *Image-to-Image Text-Guided Generation* | - |- + +## Tips + +- VersatileDiffusion is conceptually very similar as [Stable Diffusion](./api/pipelines/stable_diffusion), but instead of providing just a image data stream conditioned on text, VersatileDiffusion provides both a image and text data stream and can be conditioned on both text and image. + +- *Run VersatileDiffusion* + +All task VersatileDiffusion can be tested very easily with the [`VersatileDiffusionPipeline`], [`VersatileDiffusionImg2ImgPipeline`] and the `"BAAI/VersatileDiffusion-m9"` checkpoint exactly in the same way it is shown in the [Conditional Image Generation Guide](./using-diffusers/conditional_image_generation) and the [Image-to-Image Generation Guide](./using-diffusers/img2img). + +- *How to load and use different schedulers.* + +The alt diffusion pipeline uses [`DDIMScheduler`] scheduler by default. But `diffusers` provides many other schedulers that can be used with the alt diffusion pipeline such as [`PNDMScheduler`], [`LMSDiscreteScheduler`], [`EulerDiscreteScheduler`], [`EulerAncestralDiscreteScheduler`] etc. +To use a different scheduler, you can either change it via the [`ConfigMixin.from_config`] method or pass the `scheduler` argument to the `from_pretrained` method of the pipeline. For example, to use the [`EulerDiscreteScheduler`], you can do the following: + +```python +>>> from diffusers import VersatileDiffusionPipeline, EulerDiscreteScheduler + +>>> pipeline = VersatileDiffusionPipeline.from_pretrained("BAAI/VersatileDiffusion-m9") +>>> pipeline.scheduler = EulerDiscreteScheduler.from_config(pipeline.scheduler.config) + +>>> # or +>>> euler_scheduler = EulerDiscreteScheduler.from_pretrained("BAAI/VersatileDiffusion-m9", subfolder="scheduler") +>>> pipeline = VersatileDiffusionPipeline.from_pretrained("BAAI/VersatileDiffusion-m9", scheduler=euler_scheduler) +``` + + +- *How to conver all use cases with multiple or single pipeline* + +If you want to use all possible use cases in a single `DiffusionPipeline` we recommend using the `components` functionality to instantiate all components in the most memory-efficient way: + +```python +>>> from diffusers import ( +... VersatileDiffusionPipeline, +... VersatileDiffusionImg2ImgPipeline, +... ) + +>>> text2img = VersatileDiffusionPipeline.from_pretrained("BAAI/VersatileDiffusion-m9") +>>> img2img = VersatileDiffusionImg2ImgPipeline(**text2img.components) + +>>> # now you can use text2img(...) and img2img(...) just like the call methods of each respective pipeline +``` + +## VersatileDiffusionPipelineOutput +[[autodoc]] pipelines.alt_diffusion.VersatileDiffusionPipelineOutput + +## VersatileDiffusionPipeline +[[autodoc]] VersatileDiffusionPipeline + - __call__ + - enable_attention_slicing + - disable_attention_slicing + +## VersatileDiffusionImg2ImgPipeline +[[autodoc]] VersatileDiffusionImg2ImgPipeline + - __call__ + - enable_attention_slicing + - disable_attention_slicing From 02254cbb2224a5a44e6a95e8a8654042351929f8 Mon Sep 17 00:00:00 2001 From: anton-l Date: Tue, 22 Nov 2022 18:19:47 +0100 Subject: [PATCH 22/49] dual guided pipeline --- src/diffusers/__init__.py | 1 + src/diffusers/pipelines/__init__.py | 1 + .../pipelines/versatile_diffusion/__init__.py | 1 + ...ipeline_versatile_diffusion_dual_guided.py | 607 ++++++++++++++++++ ...ine_versatile_diffusion_image_variation.py | 4 +- .../test_versatile_diffusion_dual_guided.py | 61 ++ 6 files changed, 673 insertions(+), 2 deletions(-) create mode 100644 src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py create mode 100644 tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py diff --git a/src/diffusers/__init__.py b/src/diffusers/__init__.py index 920fa9a348ac..1b8a6689cee5 100644 --- a/src/diffusers/__init__.py +++ b/src/diffusers/__init__.py @@ -73,6 +73,7 @@ StableDiffusionInpaintPipeline, StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, + VersatileDiffusionDualGuidedPipeline, VersatileDiffusionImageToTextPipeline, VersatileDiffusionImageVariationPipeline, VersatileDiffusionPipeline, diff --git a/src/diffusers/pipelines/__init__.py b/src/diffusers/pipelines/__init__.py index 957e6991d5e1..a236740edc64 100644 --- a/src/diffusers/pipelines/__init__.py +++ b/src/diffusers/pipelines/__init__.py @@ -25,6 +25,7 @@ StableDiffusionPipeline, ) from .versatile_diffusion import ( + VersatileDiffusionDualGuidedPipeline, VersatileDiffusionImageToTextPipeline, VersatileDiffusionImageVariationPipeline, VersatileDiffusionPipeline, diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index 864756998d2e..d2ea09aaad7d 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -5,6 +5,7 @@ from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector from .modeling_text_unet import UNetFlatConditionModel from .pipeline_versatile_diffusion import VersatileDiffusionPipeline + from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline from .pipeline_versatile_diffusion_image_to_text import VersatileDiffusionImageToTextPipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py new file mode 100644 index 000000000000..9c185e5ed933 --- /dev/null +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -0,0 +1,607 @@ +# Copyright 2022 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect +from typing import Callable, List, Optional, Union + +import numpy as np +import torch +import torch.nn as nn +import torch.utils.checkpoint + +import PIL +from transformers import ( + CLIPFeatureExtractor, + CLIPTextModelWithProjection, + CLIPTokenizer, + CLIPVisionModelWithProjection, +) + +from ...models import AutoencoderKL, UNet2DConditionModel +from ...models.attention import Transformer2DModel, Transformer2DModelOutput +from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput +from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler +from ...utils import is_accelerate_available, logging +from .modeling_text_unet import UNetFlatConditionModel + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +class VersatileDiffusionDualGuidedPipeline(DiffusionPipeline): + r""" + This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the + library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) + + Parameters: + vqvae ([`VQModel`]): + Vector-quantized (VQ) Model to encode and decode images to and from latent representations. + bert ([`LDMBertModel`]): + Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture. + tokenizer (`transformers.BertTokenizer`): + Tokenizer of class + [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer). + unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. + scheduler ([`SchedulerMixin`]): + A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of + [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. + """ + tokenizer: CLIPTokenizer + image_feature_extractor: CLIPFeatureExtractor + text_encoder: CLIPTextModelWithProjection + image_encoder: CLIPVisionModelWithProjection + image_unet: UNet2DConditionModel + text_unet: UNetFlatConditionModel + vae: AutoencoderKL + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] + + def __init__( + self, + tokenizer: CLIPTokenizer, + image_feature_extractor: CLIPFeatureExtractor, + text_encoder: CLIPTextModelWithProjection, + image_encoder: CLIPVisionModelWithProjection, + image_unet: UNet2DConditionModel, + text_unet: UNetFlatConditionModel, + vae: AutoencoderKL, + scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], + ): + super().__init__() + self.register_modules( + tokenizer=tokenizer, + image_feature_extractor=image_feature_extractor, + text_encoder=text_encoder, + image_encoder=image_encoder, + image_unet=image_unet, + text_unet=text_unet, + vae=vae, + scheduler=scheduler, + ) + + def convert_to_dual_attention(self, mix_ratio=0.5, condition_types=("image", "text")): + for name, module in self.image_unet.named_modules(): + if isinstance(module, Transformer2DModel): + parent_name, index = name.rsplit(".", 1) + index = int(index) + image_transformer = self.image_unet.get_submodule(parent_name)[index] + text_transformer = self.text_unet.get_submodule(parent_name)[index] + + dual_transformer = DualTransformer2DModel( + image_transformer, text_transformer, mix_ratio=mix_ratio, condition_types=condition_types + ) + self.image_unet.get_submodule(parent_name)[index] = dual_transformer + + def remove_dual_attention(self): + for name, module in self.image_unet.named_modules(): + if isinstance(module, DualTransformer2DModel): + parent_name, index = name.rsplit(".", 1) + index = int(index) + self.image_unet.get_submodule(parent_name)[index] = module.image_transformer + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet + def enable_xformers_memory_efficient_attention(self): + r""" + Enable memory efficient attention as implemented in xformers. + + When this option is enabled, you should observe lower GPU memory usage and a potential speed up at inference + time. Speed up at training time is not guaranteed. + + Warning: When Memory Efficient Attention and Sliced attention are both enabled, the Memory Efficient Attention + is used. + """ + self.image_unet.set_use_memory_efficient_attention_xformers(True) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_xformers_memory_efficient_attention with unet->image_unet + def disable_xformers_memory_efficient_attention(self): + r""" + Disable memory efficient attention as implemented in xformers. + """ + self.image_unet.set_use_memory_efficient_attention_xformers(False) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_attention_slicing with unet->image_unet + def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = "auto"): + r""" + Enable sliced attention computation. + + When this option is enabled, the attention module will split the input tensor in slices, to compute attention + in several steps. This is useful to save some memory in exchange for a small speed decrease. + + Args: + slice_size (`str` or `int`, *optional*, defaults to `"auto"`): + When `"auto"`, halves the input to the attention heads, so attention will be computed in two steps. If + a number is provided, uses as many slices as `attention_head_dim // slice_size`. In this case, + `attention_head_dim` must be a multiple of `slice_size`. + """ + if slice_size == "auto": + # half the attention head size is usually a good trade-off between + # speed and memory + slice_size = self.image_unet.config.attention_head_dim // 2 + self.image_unet.set_attention_slice(slice_size) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_attention_slicing + def disable_attention_slicing(self): + r""" + Disable sliced attention computation. If `enable_attention_slicing` was previously invoked, this method will go + back to computing attention in one step. + """ + # set slice_size = `None` to disable `attention slicing` + self.enable_attention_slicing(None) + + def enable_sequential_cpu_offload(self, gpu_id=0): + r""" + Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet, + text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a + `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called. + """ + if is_accelerate_available(): + from accelerate import cpu_offload + else: + raise ImportError("Please install accelerate via `pip install accelerate`") + + device = torch.device(f"cuda:{gpu_id}") + + for cpu_offloaded_model in [self.image_unet, self.text_unet, self.text_encoder, self.vae]: + if cpu_offloaded_model is not None: + cpu_offload(cpu_offloaded_model, device) + + @property + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device with unet->image_unet + def _execution_device(self): + r""" + Returns the device on which the pipeline's models will be executed. After calling + `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module + hooks. + """ + if self.device != torch.device("meta") or not hasattr(self.image_unet, "_hf_hook"): + return self.device + for module in self.image_unet.modules(): + if ( + hasattr(module, "_hf_hook") + and hasattr(module._hf_hook, "execution_device") + and module._hf_hook.execution_device is not None + ): + return torch.device(module._hf_hook.execution_device) + return self.device + + def _encode_text_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + device: (`torch.device`): + torch device + num_images_per_prompt (`int`): + number of images that should be generated per prompt + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + """ + + def normalize_embeddings(encoder_output): + embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state) + embeds_pooled = encoder_output.text_embeds + embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True) + return embeds + + batch_size = len(prompt) + + text_inputs = self.tokenizer( + prompt, + padding="max_length", + max_length=self.tokenizer.model_max_length, + truncation=True, + return_tensors="pt", + ) + text_input_ids = text_inputs.input_ids + untruncated_ids = self.tokenizer(prompt, padding="max_length", return_tensors="pt").input_ids + + if not torch.equal(text_input_ids, untruncated_ids): + removed_text = self.tokenizer.batch_decode(untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]) + logger.warning( + "The following part of your input was truncated because CLIP can only handle sequences up to" + f" {self.tokenizer.model_max_length} tokens: {removed_text}" + ) + + if hasattr(self.text_encoder.config, "use_attention_mask") and self.text_encoder.config.use_attention_mask: + attention_mask = text_inputs.attention_mask.to(device) + else: + attention_mask = None + + text_embeddings = self.text_encoder( + text_input_ids.to(device), + attention_mask=attention_mask, + ) + text_embeddings = normalize_embeddings(text_embeddings) + + # duplicate text embeddings for each generation per prompt, using mps friendly method + bs_embed, seq_len, _ = text_embeddings.shape + text_embeddings = text_embeddings.repeat(1, num_images_per_prompt, 1) + text_embeddings = text_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1) + + # get unconditional embeddings for classifier free guidance + if do_classifier_free_guidance: + uncond_tokens = [""] * batch_size + max_length = text_input_ids.shape[-1] + uncond_input = self.tokenizer( + uncond_tokens, + padding="max_length", + max_length=max_length, + truncation=True, + return_tensors="pt", + ) + + if hasattr(self.text_encoder.config, "use_attention_mask") and self.text_encoder.config.use_attention_mask: + attention_mask = uncond_input.attention_mask.to(device) + else: + attention_mask = None + + uncond_embeddings = self.text_encoder( + uncond_input.input_ids.to(device), + attention_mask=attention_mask, + ) + uncond_embeddings = normalize_embeddings(uncond_embeddings) + + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = uncond_embeddings.shape[1] + uncond_embeddings = uncond_embeddings.repeat(1, num_images_per_prompt, 1) + uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and text embeddings into a single batch + # to avoid doing two forward passes + text_embeddings = torch.cat([uncond_embeddings, text_embeddings]) + + return text_embeddings + + def _encode_image_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance): + r""" + Encodes the prompt into text encoder hidden states. + + Args: + prompt (`str` or `list(int)`): + prompt to be encoded + device: (`torch.device`): + torch device + num_images_per_prompt (`int`): + number of images that should be generated per prompt + do_classifier_free_guidance (`bool`): + whether to use classifier free guidance or not + """ + + def normalize_embeddings(encoder_output): + embeds = self.image_encoder.vision_model.post_layernorm(encoder_output.last_hidden_state) + embeds = self.image_encoder.visual_projection(embeds) + embeds_pooled = embeds[:, 0:1] + embeds = embeds / torch.norm(embeds_pooled, dim=-1, keepdim=True) + return embeds + + batch_size = len(prompt) if isinstance(prompt, list) else 1 + + # get prompt text embeddings + image_input = self.image_feature_extractor(images=prompt, return_tensors="pt") + image_embeddings = self.image_encoder(image_input.pixel_values.to(device)) + image_embeddings = normalize_embeddings(image_embeddings) + + # duplicate image embeddings for each generation per prompt, using mps friendly method + bs_embed, seq_len, _ = image_embeddings.shape + image_embeddings = image_embeddings.repeat(1, num_images_per_prompt, 1) + image_embeddings = image_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1) + + # get unconditional embeddings for classifier free guidance + if do_classifier_free_guidance: + uncond_images = [np.zeros((512, 512, 3))] * batch_size + uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors="pt") + uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(device)) + uncond_embeddings = normalize_embeddings(uncond_embeddings) + + # duplicate unconditional embeddings for each generation per prompt, using mps friendly method + seq_len = uncond_embeddings.shape[1] + uncond_embeddings = uncond_embeddings.repeat(1, num_images_per_prompt, 1) + uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1) + + # For classifier free guidance, we need to do two forward passes. + # Here we concatenate the unconditional and conditional embeddings into a single batch + # to avoid doing two forward passes + image_embeddings = torch.cat([uncond_embeddings, image_embeddings]) + + return image_embeddings + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents + def decode_latents(self, latents): + latents = 1 / 0.18215 * latents + image = self.vae.decode(latents).sample + image = (image / 2 + 0.5).clamp(0, 1) + # we always cast to float32 as this does not cause significant overhead and is compatible with bfloa16 + image = image.cpu().permute(0, 2, 3, 1).float().numpy() + return image + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs + def prepare_extra_step_kwargs(self, generator, eta): + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. + # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 + # and should be between [0, 1] + + accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) + extra_step_kwargs = {} + if accepts_eta: + extra_step_kwargs["eta"] = eta + + # check if the scheduler accepts generator + accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) + if accepts_generator: + extra_step_kwargs["generator"] = generator + return extra_step_kwargs + + def check_inputs(self, first_prompt, second_prompt, height, width, callback_steps): + if ( + not isinstance(first_prompt, str) + and not isinstance(first_prompt, PIL.Image.Image) + and not isinstance(first_prompt, list) + ): + raise ValueError( + f"`first_prompt` has to be of type `str` `PIL.Image` or `list` but is {type(first_prompt)}" + ) + if ( + not isinstance(second_prompt, str) + and not isinstance(second_prompt, PIL.Image.Image) + and not isinstance(second_prompt, list) + ): + raise ValueError( + f"`second_prompt` has to be of type `str` `PIL.Image` or `list` but is {type(second_prompt)}" + ) + + if height % 8 != 0 or width % 8 != 0: + raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") + + if (callback_steps is None) or ( + callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) + ): + raise ValueError( + f"`callback_steps` has to be a positive integer but is {callback_steps} of type" + f" {type(callback_steps)}." + ) + + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents + def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None): + shape = (batch_size, num_channels_latents, height // 8, width // 8) + if latents is None: + if device.type == "mps": + # randn does not work reproducibly on mps + latents = torch.randn(shape, generator=generator, device="cpu", dtype=dtype).to(device) + else: + latents = torch.randn(shape, generator=generator, device=device, dtype=dtype) + else: + if latents.shape != shape: + raise ValueError(f"Unexpected latents shape, got {latents.shape}, expected {shape}") + latents = latents.to(device) + + # scale the initial noise by the standard deviation required by the scheduler + latents = latents * self.scheduler.init_noise_sigma + return latents + + @torch.no_grad() + def __call__( + self, + first_prompt: Union[str, List[str], PIL.Image.Image, List[PIL.Image.Image]], + second_prompt: Union[str, List[str], PIL.Image.Image, List[PIL.Image.Image]], + prompt_mix_ratio: float = 0.5, + height: int = 512, + width: int = 512, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[torch.Generator] = None, + latents: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: Optional[int] = 1, + **kwargs, + ): + r""" + Function invoked when calling the pipeline for generation. + + Args: + prompt (`str` or `List[str]`): + The prompt or prompts to guide the image generation. + height (`int`, *optional*, defaults to 512): + The height in pixels of the generated image. + width (`int`, *optional*, defaults to 512): + The width in pixels of the generated image. + num_inference_steps (`int`, *optional*, defaults to 50): + The number of denoising steps. More denoising steps usually lead to a higher quality image at the + expense of slower inference. + guidance_scale (`float`, *optional*, defaults to 7.5): + Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). + `guidance_scale` is defined as `w` of equation 2. of [Imagen + Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > + 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, + usually at the expense of lower image quality. + negative_prompt (`str` or `List[str]`, *optional*): + The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored + if `guidance_scale` is less than `1`). + num_images_per_prompt (`int`, *optional*, defaults to 1): + The number of images to generate per prompt. + eta (`float`, *optional*, defaults to 0.0): + Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to + [`schedulers.DDIMScheduler`], will be ignored for others. + generator (`torch.Generator`, *optional*): + A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation + deterministic. + latents (`torch.FloatTensor`, *optional*): + Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image + generation. Can be used to tweak the same generation with different prompts. If not provided, a latents + tensor will ge generated by sampling using the supplied random `generator`. + output_type (`str`, *optional*, defaults to `"pil"`): + The output format of the generate image. Choose between + [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a + plain tuple. + callback (`Callable`, *optional*): + A function that will be called every `callback_steps` steps during inference. The function will be + called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. + callback_steps (`int`, *optional*, defaults to 1): + The frequency at which the `callback` function will be called. If not specified, the callback will be + called at every step. + + Returns: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images, and the second element is a + list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" + (nsfw) content, according to the `safety_checker`. + """ + + # 1. Check inputs. Raise error if not correct + self.check_inputs(first_prompt, second_prompt, height, width, callback_steps) + + # 2. Define call parameters + first_prompt = [first_prompt] if not isinstance(first_prompt, list) else first_prompt + second_prompt = [second_prompt] if not isinstance(second_prompt, list) else second_prompt + batch_size = len(first_prompt) + device = self._execution_device + # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) + # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` + # corresponds to doing no classifier free guidance. + do_classifier_free_guidance = guidance_scale > 1.0 + + # 3. Encode input prompts + dual_prompt_embeddings = [] + prompt_types = [] + for prompt in [first_prompt, second_prompt]: + if isinstance(prompt[0], str): + embeddings = self._encode_text_prompt( + prompt, device, num_images_per_prompt, do_classifier_free_guidance + ) + prompt_types.append("text") + else: + embeddings = self._encode_image_prompt( + prompt, device, num_images_per_prompt, do_classifier_free_guidance + ) + prompt_types.append("image") + dual_prompt_embeddings.append(embeddings) + dual_prompt_embeddings = torch.cat(dual_prompt_embeddings, dim=1) + + # 4. Prepare timesteps + self.scheduler.set_timesteps(num_inference_steps, device=device) + timesteps = self.scheduler.timesteps + + # 5. Prepare latent variables + num_channels_latents = self.image_unet.in_channels + latents = self.prepare_latents( + batch_size * num_images_per_prompt, + num_channels_latents, + height, + width, + dual_prompt_embeddings.dtype, + device, + generator, + latents, + ) + + # 6. Prepare extra step kwargs. + extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) + + # 7. Combine the attention blocks of the image and text UNets + self.convert_to_dual_attention(prompt_mix_ratio, prompt_types) + + # 8. Denoising loop + for i, t in enumerate(self.progress_bar(timesteps)): + # expand the latents if we are doing classifier free guidance + latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents + latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + + # predict the noise residual + noise_pred = self.image_unet(latent_model_input, t, encoder_hidden_states=dual_prompt_embeddings).sample + + # perform guidance + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) + + # compute the previous noisy sample x_t -> x_t-1 + latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample + + # call the callback, if provided + if callback is not None and i % callback_steps == 0: + callback(i, t, latents) + + # 9. Return the image unet to its original state + self.remove_dual_attention() + + # 10. Post-processing + image = self.decode_latents(latents) + + # 11. Convert to PIL + if output_type == "pil": + image = self.numpy_to_pil(image) + + if not return_dict: + return (image,) + + return ImagePipelineOutput(images=image) + + +class DualTransformer2DModel(nn.Module): + def __init__(self, image_transformer, text_transformer, mix_ratio=0.5, condition_types=("text", "image")): + super().__init__() + self.image_transformer = image_transformer + self.text_transformer = text_transformer + self.mix_ratio = mix_ratio + self.condition_types = condition_types + + def forward(self, input_states, encoder_hidden_states, timestep=None, return_dict: bool = True): + condition_states = encoder_hidden_states.chunk(2, dim=1) + + encoded_states = [] + for i in range(2): + if self.condition_types[i] == "image": + image_output = self.image_transformer(input_states, condition_states[i], timestep, return_dict) + encoded_states.append(image_output[0]) + else: + text_output = self.text_transformer(input_states, condition_states[i], timestep, return_dict) + encoded_states.append(text_output[0]) + encoded_states[i] = encoded_states[i] - input_states + + output_states = encoded_states[0] * self.mix_ratio + encoded_states[1] * (1 - self.mix_ratio) + output_states = output_states + input_states + + if not return_dict: + return (output_states,) + + return Transformer2DModelOutput(sample=output_states) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index bf764f47ae6c..d1c3156431db 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -186,7 +186,7 @@ def normalize_embeddings(encoder_output): # get prompt text embeddings image_input = self.image_feature_extractor(images=prompt, return_tensors="pt") - image_embeddings = self.image_encoder(image_input.pixel_values.to(self.device)) + image_embeddings = self.image_encoder(image_input.pixel_values.to(device)) image_embeddings = normalize_embeddings(image_embeddings) # duplicate image embeddings for each generation per prompt, using mps friendly method @@ -216,7 +216,7 @@ def normalize_embeddings(encoder_output): uncond_images = negative_prompt uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors="pt") - uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(self.device)) + uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(device)) uncond_embeddings = normalize_embeddings(uncond_embeddings) # duplicate unconditional embeddings for each generation per prompt, using mps friendly method diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py new file mode 100644 index 000000000000..2ff7572b74a1 --- /dev/null +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# Copyright 2022 HuggingFace Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch + +from diffusers import VersatileDiffusionDualGuidedPipeline +from diffusers.utils.testing_utils import load_image, require_torch_gpu, slow, torch_device + +from ...test_pipelines_common import PipelineTesterMixin + + +torch.backends.cuda.matmul.allow_tf32 = False + + +class VersatileDiffusionDualGuidedPipelineFastTests(PipelineTesterMixin, unittest.TestCase): + pass + + +@slow +@require_torch_gpu +class VersatileDiffusionDualGuidedPipelineIntegrationTests(unittest.TestCase): + def test_inference_image_variations(self): + pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("diffusers/vd-official-test") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + first_prompt = "cyberpunk 2077" + second_prompt = load_image( + "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" + ) + generator = torch.Generator(device=torch_device).manual_seed(22) + image = pipe( + first_prompt=first_prompt, + second_prompt=second_prompt, + prompt_mix_ratio=0.75, + generator=generator, + guidance_scale=7.5, + num_inference_steps=50, + output_type="numpy", + ).images + + image_slice = image[0, 253:256, 253:256, -1] + + assert image.shape == (1, 512, 512, 3) + expected_slice = np.array([0.1811, 0.0430, 0.0433, 0.1082, 0.0144, 0.0306, 0.0683, 0.0248, 0.0876]) + assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 95e37119e943ca812a4d41c45c3e247f9a8b192b Mon Sep 17 00:00:00 2001 From: anton-l Date: Tue, 22 Nov 2022 19:01:15 +0100 Subject: [PATCH 23/49] fix token ratio --- .../pipeline_versatile_diffusion_dual_guided.py | 13 ++++++++----- .../test_versatile_diffusion_dual_guided.py | 2 +- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index 9c185e5ed933..a6e12dfc17b4 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -586,16 +586,19 @@ def __init__(self, image_transformer, text_transformer, mix_ratio=0.5, condition self.condition_types = condition_types def forward(self, input_states, encoder_hidden_states, timestep=None, return_dict: bool = True): - condition_states = encoder_hidden_states.chunk(2, dim=1) + if self.condition_types[0] == "text": + condition_states = [encoder_hidden_states[:, :77], encoder_hidden_states[:, 77:]] + else: + condition_states = [encoder_hidden_states[:, :257], encoder_hidden_states[:, 257:]] encoded_states = [] for i in range(2): - if self.condition_types[i] == "image": - image_output = self.image_transformer(input_states, condition_states[i], timestep, return_dict) - encoded_states.append(image_output[0]) - else: + if self.condition_types[i] == "text": text_output = self.text_transformer(input_states, condition_states[i], timestep, return_dict) encoded_states.append(text_output[0]) + else: + image_output = self.image_transformer(input_states, condition_states[i], timestep, return_dict) + encoded_states.append(image_output[0]) encoded_states[i] = encoded_states[i] - input_states output_states = encoded_states[0] * self.mix_ratio + encoded_states[1] * (1 - self.mix_ratio) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index 2ff7572b74a1..57f0b5544677 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -43,7 +43,7 @@ def test_inference_image_variations(self): second_prompt = load_image( "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" ) - generator = torch.Generator(device=torch_device).manual_seed(22) + generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe( first_prompt=first_prompt, second_prompt=second_prompt, From 22c6b32672e79aa2110c8d4e0dd451d9ee72e73d Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 08:43:17 +0000 Subject: [PATCH 24/49] propose change --- src/diffusers/models/unet_2d_blocks.py | 35 ++++++++++++++----- src/diffusers/models/unet_2d_condition.py | 4 +++ .../test_versatile_diffusion_image_to_text.py | 2 +- 3 files changed, 31 insertions(+), 10 deletions(-) diff --git a/src/diffusers/models/unet_2d_blocks.py b/src/diffusers/models/unet_2d_blocks.py index 770043f053b2..c645f9f607e6 100644 --- a/src/diffusers/models/unet_2d_blocks.py +++ b/src/diffusers/models/unet_2d_blocks.py @@ -32,6 +32,7 @@ def get_down_block( resnet_groups=None, cross_attention_dim=None, downsample_padding=None, + dual_cross_attention=False, ): down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type if down_block_type == "DownBlock2D": @@ -74,6 +75,7 @@ def get_down_block( downsample_padding=downsample_padding, cross_attention_dim=cross_attention_dim, attn_num_head_channels=attn_num_head_channels, + dual_cross_attention=dual_cross_attention, ) elif down_block_type == "SkipDownBlock2D": return SkipDownBlock2D( @@ -137,6 +139,7 @@ def get_up_block( attn_num_head_channels, resnet_groups=None, cross_attention_dim=None, + dual_cross_attention=False, ): up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type if up_block_type == "UpBlock2D": @@ -322,6 +325,7 @@ def __init__( attention_type="default", output_scale_factor=1.0, cross_attention_dim=1280, + dual_cross_attention=False, **kwargs, ): super().__init__() @@ -505,6 +509,7 @@ def __init__( output_scale_factor=1.0, downsample_padding=1, add_downsample=True, + dual_cross_attention=False, ): super().__init__() resnets = [] @@ -529,16 +534,28 @@ def __init__( pre_norm=resnet_pre_norm, ) ) - attentions.append( - Transformer2DModel( - attn_num_head_channels, - out_channels // attn_num_head_channels, - in_channels=out_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, + if dual_cross_attention is False: + attentions.append( + Transformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) ) - ) self.attentions = nn.ModuleList(attentions) self.resnets = nn.ModuleList(resnets) diff --git a/src/diffusers/models/unet_2d_condition.py b/src/diffusers/models/unet_2d_condition.py index 5a02a3ba1e7d..49ccb66e4a07 100644 --- a/src/diffusers/models/unet_2d_condition.py +++ b/src/diffusers/models/unet_2d_condition.py @@ -106,6 +106,7 @@ def __init__( norm_eps: float = 1e-5, cross_attention_dim: int = 1280, attention_head_dim: int = 8, + dual_cross_attention: bool = False, ): super().__init__() @@ -145,6 +146,7 @@ def __init__( cross_attention_dim=cross_attention_dim, attn_num_head_channels=attention_head_dim, downsample_padding=downsample_padding, + dual_cross_attention=dual_cross_attention, ) self.down_blocks.append(down_block) @@ -159,6 +161,7 @@ def __init__( cross_attention_dim=cross_attention_dim, attn_num_head_channels=attention_head_dim, resnet_groups=norm_num_groups, + dual_cross_attention=dual_cross_attention, ) # count how many layers upsample the images @@ -194,6 +197,7 @@ def __init__( resnet_groups=norm_num_groups, cross_attention_dim=cross_attention_dim, attn_num_head_channels=attention_head_dim, + dual_cross_attention=dual_cross_attention, ) self.up_blocks.append(up_block) prev_output_channel = output_channel diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py index 648ef96758c9..dbaaeeb262dc 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py @@ -40,7 +40,7 @@ def test_inference_image_to_text(self): pipe.set_progress_bar_config(disable=None) image_prompt = load_image( - "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" + "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/boy_and_girl.jpg" ) generator = torch.Generator(device=torch_device).manual_seed(0) text = pipe( From 8f5f372573cef2c60c60159ee05df9677986ab3f Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 23 Nov 2022 11:58:08 +0100 Subject: [PATCH 25/49] dual transformer as a native module --- src/diffusers/models/attention.py | 122 +++++++++++++++++- ...ipeline_versatile_diffusion_dual_guided.py | 56 +++----- .../test_versatile_diffusion_dual_guided.py | 43 ++++++ 3 files changed, 181 insertions(+), 40 deletions(-) diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index be9203b4d699..3a281c1594df 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -22,7 +22,7 @@ from ..configuration_utils import ConfigMixin, register_to_config from ..modeling_utils import ModelMixin from ..models.embeddings import ImagePositionalEmbeddings -from ..utils import BaseOutput +from ..utils import CONFIG_NAME, BaseOutput from ..utils.import_utils import is_xformers_available @@ -666,3 +666,123 @@ def forward(self, x, timestep): scale, shift = torch.chunk(emb, 2) x = self.norm(x) * (1 + scale) + shift return x + + +class DualTransformer2DModel(nn.Module, ConfigMixin): + """ + Dual transformer wrapper that combines two `Transformer2DModel`s for mixed inference. + + Parameters: + num_attention_heads (`int`, *optional*, defaults to 16): The number of heads to use for multi-head attention. + attention_head_dim (`int`, *optional*, defaults to 88): The number of channels in each head. + in_channels (`int`, *optional*): + Pass if the input is continuous. The number of channels in the input and output. + num_layers (`int`, *optional*, defaults to 1): The number of layers of Transformer blocks to use. + dropout (`float`, *optional*, defaults to 0.1): The dropout probability to use. + cross_attention_dim (`int`, *optional*): The number of context dimensions to use. + sample_size (`int`, *optional*): Pass if the input is discrete. The width of the latent images. + Note that this is fixed at training time as it is used for learning a number of position embeddings. See + `ImagePositionalEmbeddings`. + num_vector_embeds (`int`, *optional*): + Pass if the input is discrete. The number of classes of the vector embeddings of the latent pixels. + Includes the class for the masked latent pixel. + activation_fn (`str`, *optional*, defaults to `"geglu"`): Activation function to be used in feed-forward. + num_embeds_ada_norm ( `int`, *optional*): Pass if at least one of the norm_layers is `AdaLayerNorm`. + The number of diffusion steps used during training. Note that this is fixed at training time as it is used + to learn a number of embeddings that are added to the hidden states. During inference, you can denoise for + up to but not more than steps than `num_embeds_ada_norm`. + attention_bias (`bool`, *optional*): + Configure if the TransformerBlocks' attention should contain a bias parameter. + """ + + config_name = CONFIG_NAME + + @register_to_config + def __init__( + self, + num_attention_heads: int = 16, + attention_head_dim: int = 88, + in_channels: Optional[int] = None, + num_layers: int = 1, + dropout: float = 0.0, + norm_num_groups: int = 32, + cross_attention_dim: Optional[int] = None, + attention_bias: bool = False, + sample_size: Optional[int] = None, + num_vector_embeds: Optional[int] = None, + activation_fn: str = "geglu", + num_embeds_ada_norm: Optional[int] = None, + ): + super().__init__() + self.transformers = nn.ModuleList( + [ + Transformer2DModel( + num_attention_heads=num_attention_heads, + attention_head_dim=attention_head_dim, + in_channels=in_channels, + num_layers=num_layers, + dropout=dropout, + norm_num_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attention_bias=attention_bias, + sample_size=sample_size, + num_vector_embeds=num_vector_embeds, + activation_fn=activation_fn, + num_embeds_ada_norm=num_embeds_ada_norm, + ) + for _ in range(2) + ] + ) + + # The ratio of transformer1 to transformer2's output states to be combined during inference + self.mix_ratio = 0.5 + + # The shape of `encoder_hidden_states` is expected to be + # `(batch_size, num_condition_tokens[0]+num_condition_tokens[1], num_features)` + self.num_condition_tokens = (77, 257) + + def forward(self, hidden_states, encoder_hidden_states, timestep=None, return_dict: bool = True): + """ + Args: + hidden_states ( When discrete, `torch.LongTensor` of shape `(batch size, num latent pixels)`. + When continuous, `torch.FloatTensor` of shape `(batch size, channel, height, width)`): Input + hidden_states + encoder_hidden_states ( `torch.LongTensor` of shape `(batch size, context dim)`, *optional*): + Conditional embeddings for cross attention layer. If not given, cross-attention defaults to + self-attention. + timestep ( `torch.long`, *optional*): + Optional timestep to be applied as an embedding in AdaLayerNorm's. Used to indicate denoising step. + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. + + Returns: + [`~models.attention.Transformer2DModelOutput`] or `tuple`: [`~models.attention.Transformer2DModelOutput`] + if `return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample + tensor. + """ + input_states = hidden_states + + encoded_states = [] + tokens_start = 0 + for i in range(2): + # for each of the two transformers, pass the corresponding condition tokens + condition_state = encoder_hidden_states[:, tokens_start : tokens_start + self.num_condition_tokens[i]] + encoded_state = self.transformers[i](input_states, condition_state, timestep, return_dict)[0] + encoded_states.append(encoded_state - input_states) + tokens_start += self.num_condition_tokens[i] + + output_states = encoded_states[0] * self.mix_ratio + encoded_states[1] * (1 - self.mix_ratio) + output_states = output_states + input_states + + if not return_dict: + return (output_states,) + + return Transformer2DModelOutput(sample=output_states) + + def _set_attention_slice(self, slice_size): + for transformer in self.transformers: + transformer._set_attention_slice(slice_size) + + def _set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for transformer in self.transformers: + transformer._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index a6e12dfc17b4..106caf5c1bf3 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -17,7 +17,6 @@ import numpy as np import torch -import torch.nn as nn import torch.utils.checkpoint import PIL @@ -29,7 +28,7 @@ ) from ...models import AutoencoderKL, UNet2DConditionModel -from ...models.attention import Transformer2DModel, Transformer2DModelOutput +from ...models.attention import DualTransformer2DModel, Transformer2DModel from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import is_accelerate_available, logging @@ -94,12 +93,18 @@ def convert_to_dual_attention(self, mix_ratio=0.5, condition_types=("image", "te if isinstance(module, Transformer2DModel): parent_name, index = name.rsplit(".", 1) index = int(index) + image_transformer = self.image_unet.get_submodule(parent_name)[index] text_transformer = self.text_unet.get_submodule(parent_name)[index] - dual_transformer = DualTransformer2DModel( - image_transformer, text_transformer, mix_ratio=mix_ratio, condition_types=condition_types - ) + dual_transformer = DualTransformer2DModel(**image_transformer.config) + for i, type in enumerate(condition_types): + if type == "image": + dual_transformer.transformers[i] = image_transformer + else: + dual_transformer.transformers[i] = text_transformer + + dual_transformer.mix_ratio = mix_ratio self.image_unet.get_submodule(parent_name)[index] = dual_transformer def remove_dual_attention(self): @@ -107,7 +112,7 @@ def remove_dual_attention(self): if isinstance(module, DualTransformer2DModel): parent_name, index = name.rsplit(".", 1) index = int(index) - self.image_unet.get_submodule(parent_name)[index] = module.image_transformer + self.image_unet.get_submodule(parent_name)[index] = module.transformers[0] # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet def enable_xformers_memory_efficient_attention(self): @@ -412,6 +417,11 @@ def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype latents = latents * self.scheduler.init_noise_sigma return latents + def set_mix_ratio(self, mix_ratio): + for name, module in self.image_unet.named_modules(): + if isinstance(module, DualTransformer2DModel): + module.mix_ratio = mix_ratio + @torch.no_grad() def __call__( self, @@ -539,6 +549,7 @@ def __call__( # 7. Combine the attention blocks of the image and text UNets self.convert_to_dual_attention(prompt_mix_ratio, prompt_types) + self.set_mix_ratio(prompt_mix_ratio) # 8. Denoising loop for i, t in enumerate(self.progress_bar(timesteps)): @@ -575,36 +586,3 @@ def __call__( return (image,) return ImagePipelineOutput(images=image) - - -class DualTransformer2DModel(nn.Module): - def __init__(self, image_transformer, text_transformer, mix_ratio=0.5, condition_types=("text", "image")): - super().__init__() - self.image_transformer = image_transformer - self.text_transformer = text_transformer - self.mix_ratio = mix_ratio - self.condition_types = condition_types - - def forward(self, input_states, encoder_hidden_states, timestep=None, return_dict: bool = True): - if self.condition_types[0] == "text": - condition_states = [encoder_hidden_states[:, :77], encoder_hidden_states[:, 77:]] - else: - condition_states = [encoder_hidden_states[:, :257], encoder_hidden_states[:, 257:]] - - encoded_states = [] - for i in range(2): - if self.condition_types[i] == "text": - text_output = self.text_transformer(input_states, condition_states[i], timestep, return_dict) - encoded_states.append(text_output[0]) - else: - image_output = self.image_transformer(input_states, condition_states[i], timestep, return_dict) - encoded_states.append(image_output[0]) - encoded_states[i] = encoded_states[i] - input_states - - output_states = encoded_states[0] * self.mix_ratio + encoded_states[1] * (1 - self.mix_ratio) - output_states = output_states + input_states - - if not return_dict: - return (output_states,) - - return Transformer2DModelOutput(sample=output_states) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index 57f0b5544677..568f674338f0 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import gc +import tempfile import unittest import numpy as np @@ -34,6 +36,47 @@ class VersatileDiffusionDualGuidedPipelineFastTests(PipelineTesterMixin, unittes @slow @require_torch_gpu class VersatileDiffusionDualGuidedPipelineIntegrationTests(unittest.TestCase): + def tearDown(self): + # clean up the VRAM after each test + super().tearDown() + gc.collect() + torch.cuda.empty_cache() + + def test_from_pretrained_save_pretrained(self): + pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("diffusers/vd-official-test") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + generator = torch.Generator(device=torch_device).manual_seed(0) + image = pipe( + first_prompt="first prompt", + second_prompt="second prompt", + prompt_mix_ratio=0.75, + generator=generator, + guidance_scale=7.5, + num_inference_steps=2, + output_type="numpy", + ).images + + with tempfile.TemporaryDirectory() as tmpdirname: + pipe.save_pretrained(tmpdirname) + pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained(tmpdirname) + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + generator = generator.manual_seed(0) + new_image = pipe( + first_prompt="first prompt", + second_prompt="second prompt", + prompt_mix_ratio=0.75, + generator=generator, + guidance_scale=7.5, + num_inference_steps=2, + output_type="numpy", + ).images + + assert np.abs(image - new_image).sum() < 1e-5, "Models don't have the same forward pass" + def test_inference_image_variations(self): pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("diffusers/vd-official-test") pipe.to(torch_device) From f5e8ec6179f09ac765ee37b4a46bd7a9503c1d78 Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 23 Nov 2022 12:16:41 +0100 Subject: [PATCH 26/49] DualTransformer(nn.Module) --- src/diffusers/models/attention.py | 5 +---- .../pipeline_versatile_diffusion_dual_guided.py | 16 +++++++++++++++- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index 3a281c1594df..729bce548ec9 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -668,7 +668,7 @@ def forward(self, x, timestep): return x -class DualTransformer2DModel(nn.Module, ConfigMixin): +class DualTransformer2DModel(nn.Module): """ Dual transformer wrapper that combines two `Transformer2DModel`s for mixed inference. @@ -695,9 +695,6 @@ class DualTransformer2DModel(nn.Module, ConfigMixin): Configure if the TransformerBlocks' attention should contain a bias parameter. """ - config_name = CONFIG_NAME - - @register_to_config def __init__( self, num_attention_heads: int = 16, diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index 106caf5c1bf3..93ac157b2c2c 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -97,7 +97,21 @@ def convert_to_dual_attention(self, mix_ratio=0.5, condition_types=("image", "te image_transformer = self.image_unet.get_submodule(parent_name)[index] text_transformer = self.text_unet.get_submodule(parent_name)[index] - dual_transformer = DualTransformer2DModel(**image_transformer.config) + config = image_transformer.config + dual_transformer = DualTransformer2DModel( + num_attention_heads=config.num_attention_heads, + attention_head_dim=config.attention_head_dim, + in_channels=config.in_channels, + num_layers=config.num_layers, + dropout=config.dropout, + norm_num_groups=config.norm_num_groups, + cross_attention_dim=config.cross_attention_dim, + attention_bias=config.attention_bias, + sample_size=config.sample_size, + num_vector_embeds=config.num_vector_embeds, + activation_fn=config.activation_fn, + num_embeds_ada_norm=config.num_embeds_ada_norm, + ) for i, type in enumerate(condition_types): if type == "image": dual_transformer.transformers[i] = image_transformer From 914942feb3db68f9602b852d1c2b717cb3da3802 Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 23 Nov 2022 12:21:01 +0100 Subject: [PATCH 27/49] DualTransformer(nn.Module) --- src/diffusers/models/unet_2d_blocks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/models/unet_2d_blocks.py b/src/diffusers/models/unet_2d_blocks.py index c645f9f607e6..0988cbb0ab9c 100644 --- a/src/diffusers/models/unet_2d_blocks.py +++ b/src/diffusers/models/unet_2d_blocks.py @@ -15,7 +15,7 @@ import torch from torch import nn -from .attention import AttentionBlock, Transformer2DModel +from .attention import AttentionBlock, Transformer2DModel, DualTransformer2DModel from .resnet import Downsample2D, FirDownsample2D, FirUpsample2D, ResnetBlock2D, Upsample2D From 8d4207ddd2ab04914fca4599c5cfc29fc2d4106d Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 12:08:46 +0000 Subject: [PATCH 28/49] correct unconditional image --- .../pipeline_versatile_diffusion_image_to_text.py | 11 +++++++++-- .../test_versatile_diffusion_image_to_text.py | 9 ++++++--- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py index 129134a47944..40c432f44f98 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py @@ -219,6 +219,7 @@ def normalize_embeddings(encoder_output): batch_size = len(prompt) if isinstance(prompt, list) else 1 # get prompt text embeddings + # prompt = [(np.asarray(prompt) / 255)] image_input = self.image_feature_extractor(images=prompt, return_tensors="pt") image_embeddings = self.image_encoder(image_input.pixel_values.to(self.device)) image_embeddings = normalize_embeddings(image_embeddings) @@ -232,7 +233,7 @@ def normalize_embeddings(encoder_output): if do_classifier_free_guidance: uncond_images: List[str] if negative_prompt is None: - uncond_images = [np.zeros((512, 512, 3))] * batch_size + uncond_images = [np.zeros((512, 512, 3)) + 0.5] * batch_size elif type(prompt) is not type(negative_prompt): raise TypeError( f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" @@ -430,16 +431,22 @@ def __call__( latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) + print("latent_model_input", latent_model_input.abs().sum()) + print("timestep", t) + # predict the noise residual noise_pred = self.text_unet(latent_model_input, t, encoder_hidden_states=image_embeddings).sample - # perform guidance if do_classifier_free_guidance: noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) + print("e_t", noise_pred.abs().sum()) + print("e_t[3,3]", noise_pred[0, :5, 0, 0]) + # compute the previous noisy sample x_t -> x_t-1 latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample + print("latents", latents.abs().sum()) # call the callback, if provided if callback is not None and i % callback_steps == 0: diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py index dbaaeeb262dc..f03535692e01 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py @@ -18,7 +18,7 @@ import numpy as np import torch -from diffusers import VersatileDiffusionImageToTextPipeline +from diffusers import VersatileDiffusionImageToTextPipeline, DDIMScheduler from diffusers.utils.testing_utils import load_image, require_torch_gpu, slow, torch_device from ...test_pipelines_common import PipelineTesterMixin @@ -42,10 +42,13 @@ def test_inference_image_to_text(self): image_prompt = load_image( "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/boy_and_girl.jpg" ) - generator = torch.Generator(device=torch_device).manual_seed(0) + # generator = torch.Generator(device=torch_device).manual_seed(0) + np.random.seed(8) + torch.manual_seed(108) + pipe.scheduler = DDIMScheduler.from_config(pipe.scheduler.config) text = pipe( image=image_prompt, - generator=generator, + # generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="str", From 5ab90f6d5c71747a5193d30e78833192094bc724 Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 23 Nov 2022 14:16:09 +0100 Subject: [PATCH 29/49] save-load with mega pipeline --- src/diffusers/models/attention.py | 19 ++- src/diffusers/models/unet_2d_blocks.py | 66 +++++++--- .../pipeline_versatile_diffusion.py | 57 ++++++++- ...ipeline_versatile_diffusion_dual_guided.py | 51 +++++--- .../test_versatile_diffusion_dual_guided.py | 4 +- .../test_versatile_diffusion_mega.py | 116 ++++++++++++++++++ 6 files changed, 263 insertions(+), 50 deletions(-) create mode 100644 tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index 729bce548ec9..f47327fdbeb8 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -731,12 +731,18 @@ def __init__( ] ) + # Variables that can be set by a pipeline: + # The ratio of transformer1 to transformer2's output states to be combined during inference self.mix_ratio = 0.5 # The shape of `encoder_hidden_states` is expected to be - # `(batch_size, num_condition_tokens[0]+num_condition_tokens[1], num_features)` - self.num_condition_tokens = (77, 257) + # `(batch_size, condition_lengths[0]+condition_lengths[1], num_features)` + self.condition_lengths = [77, 257] + + # Which transformer to use to encode which condition. + # E.g. `(1, 0)` means that we'll use `transformers[1](conditions[0])` and `transformers[0](conditions[1])` + self.transformer_index_for_condition = [1, 0] def forward(self, hidden_states, encoder_hidden_states, timestep=None, return_dict: bool = True): """ @@ -763,10 +769,13 @@ def forward(self, hidden_states, encoder_hidden_states, timestep=None, return_di tokens_start = 0 for i in range(2): # for each of the two transformers, pass the corresponding condition tokens - condition_state = encoder_hidden_states[:, tokens_start : tokens_start + self.num_condition_tokens[i]] - encoded_state = self.transformers[i](input_states, condition_state, timestep, return_dict)[0] + condition_state = encoder_hidden_states[:, tokens_start : tokens_start + self.condition_lengths[i]] + transformer_index = self.transformer_index_for_condition[i] + encoded_state = self.transformers[transformer_index](input_states, condition_state, timestep, return_dict)[ + 0 + ] encoded_states.append(encoded_state - input_states) - tokens_start += self.num_condition_tokens[i] + tokens_start += self.condition_lengths[i] output_states = encoded_states[0] * self.mix_ratio + encoded_states[1] * (1 - self.mix_ratio) output_states = output_states + input_states diff --git a/src/diffusers/models/unet_2d_blocks.py b/src/diffusers/models/unet_2d_blocks.py index 0988cbb0ab9c..4dd15845e04f 100644 --- a/src/diffusers/models/unet_2d_blocks.py +++ b/src/diffusers/models/unet_2d_blocks.py @@ -15,7 +15,7 @@ import torch from torch import nn -from .attention import AttentionBlock, Transformer2DModel, DualTransformer2DModel +from .attention import AttentionBlock, DualTransformer2DModel, Transformer2DModel from .resnet import Downsample2D, FirDownsample2D, FirUpsample2D, ResnetBlock2D, Upsample2D @@ -169,6 +169,7 @@ def get_up_block( resnet_groups=resnet_groups, cross_attention_dim=cross_attention_dim, attn_num_head_channels=attn_num_head_channels, + dual_cross_attention=dual_cross_attention, ) elif up_block_type == "AttnUpBlock2D": return AttnUpBlock2D( @@ -352,16 +353,28 @@ def __init__( attentions = [] for _ in range(num_layers): - attentions.append( - Transformer2DModel( - attn_num_head_channels, - in_channels // attn_num_head_channels, - in_channels=in_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + attn_num_head_channels, + in_channels // attn_num_head_channels, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + attn_num_head_channels, + in_channels // attn_num_head_channels, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) ) - ) resnets.append( ResnetBlock2D( in_channels=in_channels, @@ -534,7 +547,7 @@ def __init__( pre_norm=resnet_pre_norm, ) ) - if dual_cross_attention is False: + if not dual_cross_attention: attentions.append( Transformer2DModel( attn_num_head_channels, @@ -1106,6 +1119,7 @@ def __init__( attention_type="default", output_scale_factor=1.0, add_upsample=True, + dual_cross_attention=False, ): super().__init__() resnets = [] @@ -1132,16 +1146,28 @@ def __init__( pre_norm=resnet_pre_norm, ) ) - attentions.append( - Transformer2DModel( - attn_num_head_channels, - out_channels // attn_num_head_channels, - in_channels=out_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, + if not dual_cross_attention: + attentions.append( + Transformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + else: + attentions.append( + DualTransformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) ) - ) self.attentions = nn.ModuleList(attentions) self.resnets = nn.ModuleList(resnets) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 88800417b14b..50192233de7a 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -1,3 +1,4 @@ +import inspect from typing import Any, Callable, Dict, List, Optional, Union import torch @@ -9,6 +10,7 @@ from ...pipeline_utils import DiffusionPipeline from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import logging +from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline @@ -77,10 +79,6 @@ def __init__( scheduler=scheduler, ) - @property - def components(self) -> Dict[str, Any]: - return {k: getattr(self, k) for k in self.config.keys() if not k.startswith("_")} - def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = "auto"): r""" Enable sliced attention computation. @@ -127,7 +125,9 @@ def image_variation( callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, callback_steps: Optional[int] = 1, ): - return VersatileDiffusionImageVariationPipeline(**self.components)( + expected_components = inspect.signature(VersatileDiffusionImageVariationPipeline.__init__).parameters.keys() + components = {name: component for name, component in self.components.items() if name in expected_components} + return VersatileDiffusionImageVariationPipeline(**components)( image=image, height=height, width=width, @@ -162,7 +162,9 @@ def text_to_image( callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, callback_steps: Optional[int] = 1, ): - return VersatileDiffusionTextToImagePipeline(**self.components)( + expected_components = inspect.signature(VersatileDiffusionTextToImagePipeline.__init__).parameters.keys() + components = {name: component for name, component in self.components.items() if name in expected_components} + return VersatileDiffusionTextToImagePipeline(**components)( prompt=prompt, height=height, width=width, @@ -178,3 +180,46 @@ def text_to_image( callback=callback, callback_steps=callback_steps, ) + + @torch.no_grad() + def dual_guided( + self, + first_prompt: Union[str, List[str], PIL.Image.Image, List[PIL.Image.Image]], + second_prompt: Union[str, List[str], PIL.Image.Image, List[PIL.Image.Image]], + prompt_mix_ratio: float = 0.5, + height: int = 512, + width: int = 512, + num_inference_steps: int = 50, + guidance_scale: float = 7.5, + num_images_per_prompt: Optional[int] = 1, + eta: float = 0.0, + generator: Optional[torch.Generator] = None, + latents: Optional[torch.FloatTensor] = None, + output_type: Optional[str] = "pil", + return_dict: bool = True, + callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, + callback_steps: Optional[int] = 1, + ): + expected_components = inspect.signature(VersatileDiffusionDualGuidedPipeline.__init__).parameters.keys() + components = {name: component for name, component in self.components.items() if name in expected_components} + temp_pipeline = VersatileDiffusionDualGuidedPipeline(**components) + output = temp_pipeline( + first_prompt=first_prompt, + second_prompt=second_prompt, + prompt_mix_ratio=prompt_mix_ratio, + height=height, + width=width, + num_inference_steps=num_inference_steps, + guidance_scale=guidance_scale, + num_images_per_prompt=num_images_per_prompt, + eta=eta, + generator=generator, + latents=latents, + output_type=output_type, + return_dict=return_dict, + callback=callback, + callback_steps=callback_steps, + ) + temp_pipeline._revert_dual_attention() + + return output diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index 93ac157b2c2c..cb03877ef97e 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -13,7 +13,7 @@ # limitations under the License. import inspect -from typing import Callable, List, Optional, Union +from typing import Callable, List, Optional, Tuple, Union import numpy as np import torch @@ -29,6 +29,7 @@ from ...models import AutoencoderKL, UNet2DConditionModel from ...models.attention import DualTransformer2DModel, Transformer2DModel +from ...models.unet_2d_blocks import CrossAttnDownBlock2D, CrossAttnUpBlock2D, UNetMidBlock2DCrossAttn from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import is_accelerate_available, logging @@ -88,7 +89,18 @@ def __init__( scheduler=scheduler, ) - def convert_to_dual_attention(self, mix_ratio=0.5, condition_types=("image", "text")): + if "dual_cross_attention" not in self.image_unet.config or not self.image_unet.config.dual_cross_attention: + # if loading from a universal checkpoint rather than a saved dual-guided pipeline + self._convert_to_dual_attention() + if self.text_unet is not None: + # release the memory taken up by `text_unet` + self.register_modules(text_unet=None) + + def _convert_to_dual_attention(self): + """ + Replace image_unet's `Transformer2DModel` blocks with `DualTransformer2DModel` that contains transformer blocks + from both `image_unet` and `text_unet` + """ for name, module in self.image_unet.named_modules(): if isinstance(module, Transformer2DModel): parent_name, index = name.rsplit(".", 1) @@ -112,21 +124,22 @@ def convert_to_dual_attention(self, mix_ratio=0.5, condition_types=("image", "te activation_fn=config.activation_fn, num_embeds_ada_norm=config.num_embeds_ada_norm, ) - for i, type in enumerate(condition_types): - if type == "image": - dual_transformer.transformers[i] = image_transformer - else: - dual_transformer.transformers[i] = text_transformer + dual_transformer.transformers[0] = image_transformer + dual_transformer.transformers[1] = text_transformer - dual_transformer.mix_ratio = mix_ratio self.image_unet.get_submodule(parent_name)[index] = dual_transformer - def remove_dual_attention(self): + def _revert_dual_attention(self): + """ + Revert the image_unet `DualTransformer2DModel` blocks back to `Transformer2DModel` with image_unet weights Call + this function if you reuse `image_unet` in another pipeline, e.g. `VersatileDiffusionPipeline` + """ for name, module in self.image_unet.named_modules(): if isinstance(module, DualTransformer2DModel): parent_name, index = name.rsplit(".", 1) index = int(index) self.image_unet.get_submodule(parent_name)[index] = module.transformers[0] + self.image_unet.config.dual_cross_attention = False # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet def enable_xformers_memory_efficient_attention(self): @@ -431,11 +444,19 @@ def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype latents = latents * self.scheduler.init_noise_sigma return latents - def set_mix_ratio(self, mix_ratio): + def set_transformer_params(self, mix_ratio: float = 0.5, condition_types: Tuple = ("text", "image")): for name, module in self.image_unet.named_modules(): if isinstance(module, DualTransformer2DModel): module.mix_ratio = mix_ratio + for i, type in enumerate(condition_types): + if type == "text": + module.condition_lengths[i] = self.text_encoder.config.max_position_embeddings + module.transformer_index_for_condition[i] = 1 # use the second (text) transformer + else: + module.condition_lengths[i] = 257 + module.transformer_index_for_condition[i] = 0 # use the first (image) transformer + @torch.no_grad() def __call__( self, @@ -562,8 +583,7 @@ def __call__( extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) # 7. Combine the attention blocks of the image and text UNets - self.convert_to_dual_attention(prompt_mix_ratio, prompt_types) - self.set_mix_ratio(prompt_mix_ratio) + self.set_transformer_params(prompt_mix_ratio, prompt_types) # 8. Denoising loop for i, t in enumerate(self.progress_bar(timesteps)): @@ -586,13 +606,10 @@ def __call__( if callback is not None and i % callback_steps == 0: callback(i, t, latents) - # 9. Return the image unet to its original state - self.remove_dual_attention() - - # 10. Post-processing + # 9. Post-processing image = self.decode_latents(latents) - # 11. Convert to PIL + # 10. Convert to PIL if output_type == "pil": image = self.numpy_to_pil(image) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index 568f674338f0..a315c68b2768 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -77,7 +77,7 @@ def test_from_pretrained_save_pretrained(self): assert np.abs(image - new_image).sum() < 1e-5, "Models don't have the same forward pass" - def test_inference_image_variations(self): + def test_inference_dual_guided(self): pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("diffusers/vd-official-test") pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) @@ -100,5 +100,5 @@ def test_inference_image_variations(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.1811, 0.0430, 0.0433, 0.1082, 0.0144, 0.0306, 0.0683, 0.0248, 0.0876]) + expected_slice = np.array([0.5727, 0.5625, 0.5617, 0.5703, 0.5530, 0.5620, 0.5864, 0.5742, 0.5665]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py new file mode 100644 index 000000000000..df45266f4b33 --- /dev/null +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# Copyright 2022 HuggingFace Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import gc +import tempfile +import unittest + +import numpy as np +import torch + +from diffusers import VersatileDiffusionPipeline +from diffusers.utils.testing_utils import load_image, require_torch_gpu, slow, torch_device + +from ...test_pipelines_common import PipelineTesterMixin + + +torch.backends.cuda.matmul.allow_tf32 = False + + +class VersatileDiffusionMegaPipelineFastTests(PipelineTesterMixin, unittest.TestCase): + pass + + +@slow +@require_torch_gpu +class VersatileDiffusionMegaPipelineIntegrationTests(unittest.TestCase): + def tearDown(self): + # clean up the VRAM after each test + super().tearDown() + gc.collect() + torch.cuda.empty_cache() + + def test_from_pretrained_save_pretrained(self): + pipe = VersatileDiffusionPipeline.from_pretrained("diffusers/vd-official-test") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + generator = torch.Generator(device=torch_device).manual_seed(0) + image = pipe.dual_guided( + first_prompt="first prompt", + second_prompt="second prompt", + prompt_mix_ratio=0.75, + generator=generator, + guidance_scale=7.5, + num_inference_steps=2, + output_type="numpy", + ).images + + with tempfile.TemporaryDirectory() as tmpdirname: + pipe.save_pretrained(tmpdirname) + pipe = VersatileDiffusionPipeline.from_pretrained(tmpdirname) + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + generator = generator.manual_seed(0) + new_image = pipe.dual_guided( + first_prompt="first prompt", + second_prompt="second prompt", + prompt_mix_ratio=0.75, + generator=generator, + guidance_scale=7.5, + num_inference_steps=2, + output_type="numpy", + ).images + + assert np.abs(image - new_image).sum() < 1e-5, "Models don't have the same forward pass" + + def test_inference_dual_guided_then_text_to_image(self): + pipe = VersatileDiffusionPipeline.from_pretrained("diffusers/vd-official-test") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + first_prompt = "cyberpunk 2077" + second_prompt = load_image( + "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" + ) + generator = torch.Generator(device=torch_device).manual_seed(0) + image = pipe.dual_guided( + first_prompt=first_prompt, + second_prompt=second_prompt, + prompt_mix_ratio=0.75, + generator=generator, + guidance_scale=7.5, + num_inference_steps=50, + output_type="numpy", + ).images + + image_slice = image[0, 253:256, 253:256, -1] + + assert image.shape == (1, 512, 512, 3) + expected_slice = np.array([0.5727, 0.5625, 0.5617, 0.5703, 0.5530, 0.5620, 0.5864, 0.5742, 0.5665]) + assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 + + prompt = "A painting of a squirrel eating a burger " + generator = torch.Generator(device=torch_device).manual_seed(0) + image = pipe.text_to_image( + prompt=prompt, generator=generator, guidance_scale=7.5, num_inference_steps=50, output_type="numpy" + ).images + + image_slice = image[0, 253:256, 253:256, -1] + + assert image.shape == (1, 512, 512, 3) + expected_slice = np.array([0.0657, 0.0529, 0.0455, 0.0802, 0.0570, 0.0179, 0.0267, 0.0483, 0.0769]) + assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 008af3a95261e7f9a490175bc6b3de3e69849b70 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 13:40:57 +0000 Subject: [PATCH 30/49] remove image to text --- ...eline_versatile_diffusion_image_to_text.py | 468 ------------------ 1 file changed, 468 deletions(-) delete mode 100644 src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py deleted file mode 100644 index 40c432f44f98..000000000000 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_to_text.py +++ /dev/null @@ -1,468 +0,0 @@ -# Copyright 2022 The HuggingFace Team. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import inspect -from dataclasses import dataclass -from typing import Callable, List, Optional, Union - -import numpy as np -import torch -import torch.utils.checkpoint - -import PIL -from transformers import CLIPFeatureExtractor, CLIPVisionModelWithProjection, GPT2Tokenizer - -from ...models import AutoencoderKL, UNet2DConditionModel -from ...models.attention import Transformer2DModel -from ...pipeline_utils import BaseOutput, DiffusionPipeline -from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler -from ...utils import is_accelerate_available, logging -from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector -from .modeling_text_unet import UNetFlatConditionModel - - -logger = logging.get_logger(__name__) # pylint: disable=invalid-name - - -@dataclass -class TextPipelineOutput(BaseOutput): - """ - Output class for text generation pipelines. - - Args: - text (`List[str]` or `np.ndarray`) - List of generated text of length `batch_size` or a numpy array of tokens of shape `(batch_size, - num_tokens)`. - """ - - text: Union[List[str], np.ndarray] - - -class VersatileDiffusionImageToTextPipeline(DiffusionPipeline): - r""" - This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the - library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.) - - Parameters: - vqvae ([`VQModel`]): - Vector-quantized (VQ) Model to encode and decode images to and from latent representations. - bert ([`LDMBertModel`]): - Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture. - tokenizer (`transformers.BertTokenizer`): - Tokenizer of class - [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer). - unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents. - scheduler ([`SchedulerMixin`]): - A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of - [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`]. - """ - image_feature_extractor: CLIPFeatureExtractor - image_encoder: CLIPVisionModelWithProjection - image_unet: UNet2DConditionModel - text_unet: UNetFlatConditionModel - vae: AutoencoderKL - scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler] - - def __init__( - self, - image_feature_extractor: CLIPFeatureExtractor, - image_encoder: CLIPVisionModelWithProjection, - image_unet: UNet2DConditionModel, - text_unet: UNetFlatConditionModel, - vae: AutoencoderKL, - scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], - ): - super().__init__() - self.register_modules( - image_feature_extractor=image_feature_extractor, - image_encoder=image_encoder, - image_unet=image_unet, - text_unet=text_unet, - vae=vae, - scheduler=scheduler, - ) - - self.text_vae_decoder = GPT2OptimusForLatentConnector.from_pretrained("fusing/gpt2_optimus") - self.text_vae_tokenizer = GPT2Tokenizer.from_pretrained("fusing/gpt2_optimus") - - def swap_unet_attention_blocks(self): - for name, module in self.image_unet.named_modules(): - if isinstance(module, Transformer2DModel): - parent_name, index = name.rsplit(".", 1) - index = int(index) - self.image_unet.get_submodule(parent_name)[index], self.text_unet.get_submodule(parent_name)[index] = ( - self.text_unet.get_submodule(parent_name)[index], - self.image_unet.get_submodule(parent_name)[index], - ) - - # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet - def enable_xformers_memory_efficient_attention(self): - r""" - Enable memory efficient attention as implemented in xformers. - - When this option is enabled, you should observe lower GPU memory usage and a potential speed up at inference - time. Speed up at training time is not guaranteed. - - Warning: When Memory Efficient Attention and Sliced attention are both enabled, the Memory Efficient Attention - is used. - """ - self.image_unet.set_use_memory_efficient_attention_xformers(True) - - # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_xformers_memory_efficient_attention with unet->image_unet - def disable_xformers_memory_efficient_attention(self): - r""" - Disable memory efficient attention as implemented in xformers. - """ - self.image_unet.set_use_memory_efficient_attention_xformers(False) - - # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_attention_slicing with unet->image_unet - def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = "auto"): - r""" - Enable sliced attention computation. - - When this option is enabled, the attention module will split the input tensor in slices, to compute attention - in several steps. This is useful to save some memory in exchange for a small speed decrease. - - Args: - slice_size (`str` or `int`, *optional*, defaults to `"auto"`): - When `"auto"`, halves the input to the attention heads, so attention will be computed in two steps. If - a number is provided, uses as many slices as `attention_head_dim // slice_size`. In this case, - `attention_head_dim` must be a multiple of `slice_size`. - """ - if slice_size == "auto": - # half the attention head size is usually a good trade-off between - # speed and memory - slice_size = self.image_unet.config.attention_head_dim // 2 - self.image_unet.set_attention_slice(slice_size) - - # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_attention_slicing - def disable_attention_slicing(self): - r""" - Disable sliced attention computation. If `enable_attention_slicing` was previously invoked, this method will go - back to computing attention in one step. - """ - # set slice_size = `None` to disable `attention slicing` - self.enable_attention_slicing(None) - - def enable_sequential_cpu_offload(self, gpu_id=0): - r""" - Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet, - text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a - `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called. - """ - if is_accelerate_available(): - from accelerate import cpu_offload - else: - raise ImportError("Please install accelerate via `pip install accelerate`") - - device = torch.device(f"cuda:{gpu_id}") - - for cpu_offloaded_model in [self.image_unet, self.text_unet, self.text_encoder, self.vae]: - if cpu_offloaded_model is not None: - cpu_offload(cpu_offloaded_model, device) - - @property - # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device with unet->image_unet - def _execution_device(self): - r""" - Returns the device on which the pipeline's models will be executed. After calling - `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module - hooks. - """ - if self.device != torch.device("meta") or not hasattr(self.image_unet, "_hf_hook"): - return self.device - for module in self.image_unet.modules(): - if ( - hasattr(module, "_hf_hook") - and hasattr(module._hf_hook, "execution_device") - and module._hf_hook.execution_device is not None - ): - return torch.device(module._hf_hook.execution_device) - return self.device - - def _encode_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt): - r""" - Encodes the prompt into text encoder hidden states. - - Args: - prompt (`str` or `list(int)`): - prompt to be encoded - device: (`torch.device`): - torch device - num_images_per_prompt (`int`): - number of images that should be generated per prompt - do_classifier_free_guidance (`bool`): - whether to use classifier free guidance or not - negative_prompt (`str` or `List[str]`): - The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored - if `guidance_scale` is less than `1`). - """ - - def normalize_embeddings(encoder_output): - embeds = self.image_encoder.vision_model.post_layernorm(encoder_output.last_hidden_state) - embeds = self.image_encoder.visual_projection(embeds) - embeds_pooled = embeds[:, 0:1] - embeds = embeds / torch.norm(embeds_pooled, dim=-1, keepdim=True) - return embeds - - batch_size = len(prompt) if isinstance(prompt, list) else 1 - - # get prompt text embeddings - # prompt = [(np.asarray(prompt) / 255)] - image_input = self.image_feature_extractor(images=prompt, return_tensors="pt") - image_embeddings = self.image_encoder(image_input.pixel_values.to(self.device)) - image_embeddings = normalize_embeddings(image_embeddings) - - # duplicate image embeddings for each generation per prompt, using mps friendly method - bs_embed, seq_len, _ = image_embeddings.shape - image_embeddings = image_embeddings.repeat(1, num_images_per_prompt, 1) - image_embeddings = image_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1) - - # get unconditional embeddings for classifier free guidance - if do_classifier_free_guidance: - uncond_images: List[str] - if negative_prompt is None: - uncond_images = [np.zeros((512, 512, 3)) + 0.5] * batch_size - elif type(prompt) is not type(negative_prompt): - raise TypeError( - f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" - f" {type(prompt)}." - ) - elif isinstance(negative_prompt, PIL.Image.Image): - uncond_images = [negative_prompt] - elif batch_size != len(negative_prompt): - raise ValueError( - f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:" - f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches" - " the batch size of `prompt`." - ) - else: - uncond_images = negative_prompt - - uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors="pt") - uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(self.device)) - uncond_embeddings = normalize_embeddings(uncond_embeddings) - - # duplicate unconditional embeddings for each generation per prompt, using mps friendly method - seq_len = uncond_embeddings.shape[1] - uncond_embeddings = uncond_embeddings.repeat(1, num_images_per_prompt, 1) - uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1) - - # For classifier free guidance, we need to do two forward passes. - # Here we concatenate the unconditional and conditional embeddings into a single batch - # to avoid doing two forward passes - image_embeddings = torch.cat([uncond_embeddings, image_embeddings]) - - return image_embeddings - - def decode_latents(self, latents): - latents = latents.reshape(latents.shape[:-2]) - self.text_vae_decoder = self.text_vae_decoder.to(self._execution_device) - bos_token = self.text_vae_tokenizer.bos_token_id - output = self.text_vae_decoder.generate(bos_token_id=bos_token, past=latents) - return output - - # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs - def prepare_extra_step_kwargs(self, generator, eta): - # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature - # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. - # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 - # and should be between [0, 1] - - accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) - extra_step_kwargs = {} - if accepts_eta: - extra_step_kwargs["eta"] = eta - - # check if the scheduler accepts generator - accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys()) - if accepts_generator: - extra_step_kwargs["generator"] = generator - return extra_step_kwargs - - def check_inputs(self, image, callback_steps): - if not isinstance(image, PIL.Image.Image) and not isinstance(image, torch.Tensor): - raise ValueError(f"`image` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(image)}") - - if (callback_steps is None) or ( - callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0) - ): - raise ValueError( - f"`callback_steps` has to be a positive integer but is {callback_steps} of type" - f" {type(callback_steps)}." - ) - - def prepare_latents(self, batch_size, num_channels_latents, dtype, device, generator, latents=None): - shape = (batch_size, num_channels_latents, 1, 1) - if latents is None: - if device.type == "mps": - # randn does not work reproducibly on mps - latents = torch.randn(shape, generator=generator, device="cpu", dtype=dtype).to(device) - else: - latents = torch.randn(shape, generator=generator, device=device, dtype=dtype) - else: - if latents.shape != shape: - raise ValueError(f"Unexpected latents shape, got {latents.shape}, expected {shape}") - latents = latents.to(device) - - # scale the initial noise by the standard deviation required by the scheduler - latents = latents * self.scheduler.init_noise_sigma - return latents - - @torch.no_grad() - def __call__( - self, - image: Union[PIL.Image.Image, List[PIL.Image.Image], torch.Tensor], - num_inference_steps: int = 50, - guidance_scale: float = 7.5, - negative_prompt: Optional[Union[str, List[str]]] = None, - num_images_per_prompt: Optional[int] = 1, - eta: float = 0.0, - generator: Optional[torch.Generator] = None, - latents: Optional[torch.FloatTensor] = None, - output_type: Optional[str] = "str", - return_dict: bool = True, - callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None, - callback_steps: Optional[int] = 1, - **kwargs, - ): - r""" - Function invoked when calling the pipeline for generation. - - Args: - image (`PIL.Image.Image`, `List[PIL.Image.Image]` or `torch.Tensor`): - The image prompt or prompts to guide the image generation. - num_inference_steps (`int`, *optional*, defaults to 50): - The number of denoising steps. More denoising steps usually lead to a higher quality image at the - expense of slower inference. - guidance_scale (`float`, *optional*, defaults to 7.5): - Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598). - `guidance_scale` is defined as `w` of equation 2. of [Imagen - Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale > - 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, - usually at the expense of lower image quality. - negative_prompt (`str` or `List[str]`, *optional*): - The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored - if `guidance_scale` is less than `1`). - num_images_per_prompt (`int`, *optional*, defaults to 1): - The number of images to generate per prompt. - eta (`float`, *optional*, defaults to 0.0): - Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to - [`schedulers.DDIMScheduler`], will be ignored for others. - generator (`torch.Generator`, *optional*): - A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation - deterministic. - latents (`torch.FloatTensor`, *optional*): - Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image - generation. Can be used to tweak the same generation with different prompts. If not provided, a latents - tensor will ge generated by sampling using the supplied random `generator`. - output_type (`str`, *optional*, defaults to `"pil"`): - The output format of the generate image. Choose between - [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`. - return_dict (`bool`, *optional*, defaults to `True`): - Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a - plain tuple. - callback (`Callable`, *optional*): - A function that will be called every `callback_steps` steps during inference. The function will be - called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`. - callback_steps (`int`, *optional*, defaults to 1): - The frequency at which the `callback` function will be called. If not specified, the callback will be - called at every step. - - Returns: - [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: - [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. - When returning a tuple, the first element is a list with the generated images, and the second element is a - list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" - (nsfw) content, according to the `safety_checker`. - """ - - # 1. Check inputs. Raise error if not correct - self.check_inputs(image, callback_steps) - - # 2. Define call parameters - batch_size = 1 if isinstance(image, PIL.Image.Image) else len(image) - device = self._execution_device - # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) - # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` - # corresponds to doing no classifier free guidance. - do_classifier_free_guidance = guidance_scale > 1.0 - - # 3. Encode input prompt - image_embeddings = self._encode_prompt( - image, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt - ) - - # 4. Prepare timesteps - self.scheduler.set_timesteps(num_inference_steps, device=device) - timesteps = self.scheduler.timesteps - - # 5. Prepare latent variables - num_channels_latents = self.text_unet.in_channels[0] - latents = self.prepare_latents( - batch_size * num_images_per_prompt, - num_channels_latents, - image_embeddings.dtype, - device, - generator, - latents, - ) - - # 6. Prepare extra step kwargs. - extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) - - # 7. Swap the attention blocks between the image and text UNets - self.swap_unet_attention_blocks() - - # 8. Denoising loop - for i, t in enumerate(self.progress_bar(timesteps)): - # expand the latents if we are doing classifier free guidance - latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents - latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) - - print("latent_model_input", latent_model_input.abs().sum()) - print("timestep", t) - - # predict the noise residual - noise_pred = self.text_unet(latent_model_input, t, encoder_hidden_states=image_embeddings).sample - # perform guidance - if do_classifier_free_guidance: - noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) - noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) - - print("e_t", noise_pred.abs().sum()) - print("e_t[3,3]", noise_pred[0, :5, 0, 0]) - - # compute the previous noisy sample x_t -> x_t-1 - latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample - print("latents", latents.abs().sum()) - - # call the callback, if provided - if callback is not None and i % callback_steps == 0: - callback(i, t, latents) - - # 9. Swap the attention blocks backs in case the UNets are reused in another pipeline - self.swap_unet_attention_blocks() - - # 10. Post-processing - text = self.decode_latents(latents) - - # 11. Convert to strings - if output_type == "str": - text = self.text_vae_tokenizer.batch_decode(text) - - if not return_dict: - return (text,) - - return TextPipelineOutput(text=text) From ff8188a686965afd6c6308e870e4173271281720 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 13:48:53 +0000 Subject: [PATCH 31/49] up --- .../pipelines/versatile_diffusion/__init__.py | 1 - .../test_versatile_diffusion_image_to_text.py | 57 ------------------- 2 files changed, 58 deletions(-) delete mode 100644 tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index d2ea09aaad7d..972422f13d96 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -8,4 +8,3 @@ from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline from .pipeline_versatile_diffusion_image_to_text import VersatileDiffusionImageToTextPipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline - from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py deleted file mode 100644 index f03535692e01..000000000000 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_to_text.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# Copyright 2022 HuggingFace Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import numpy as np -import torch - -from diffusers import VersatileDiffusionImageToTextPipeline, DDIMScheduler -from diffusers.utils.testing_utils import load_image, require_torch_gpu, slow, torch_device - -from ...test_pipelines_common import PipelineTesterMixin - - -torch.backends.cuda.matmul.allow_tf32 = False - - -class VersatileDiffusionImageToTextPipelineFastTests(PipelineTesterMixin, unittest.TestCase): - pass - - -@slow -@require_torch_gpu -class VersatileDiffusionImageToTextPipelineIntegrationTests(unittest.TestCase): - def test_inference_image_to_text(self): - pipe = VersatileDiffusionImageToTextPipeline.from_pretrained("diffusers/vd-official-test") - pipe.to(torch_device) - pipe.set_progress_bar_config(disable=None) - - image_prompt = load_image( - "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/boy_and_girl.jpg" - ) - # generator = torch.Generator(device=torch_device).manual_seed(0) - np.random.seed(8) - torch.manual_seed(108) - pipe.scheduler = DDIMScheduler.from_config(pipe.scheduler.config) - text = pipe( - image=image_prompt, - # generator=generator, - guidance_scale=7.5, - num_inference_steps=50, - output_type="str", - ).text - - assert text == "Corret me" From 1bded5a31bc11a7a95857610b8403218db9578a0 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 13:50:40 +0000 Subject: [PATCH 32/49] uP --- .../modeling_gpt2_optimus.py | 345 ------ .../versatile_diffusion/modeling_text_unet.py | 1036 ----------------- 2 files changed, 1381 deletions(-) delete mode 100644 src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py delete mode 100644 src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py b/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py deleted file mode 100644 index 647eb841fc80..000000000000 --- a/src/diffusers/pipelines/versatile_diffusion/modeling_gpt2_optimus.py +++ /dev/null @@ -1,345 +0,0 @@ -import math - -import torch -from torch import nn - -from transformers.modeling_outputs import CausalLMOutputWithCrossAttentions -from transformers.models.gpt2.modeling_gpt2 import GPT2MLP, GPT2PreTrainedModel -from transformers.pytorch_utils import Conv1D - - -class GPT2OptimusAttention(nn.Module): - def __init__(self, nx, n_ctx, config, scale=False): - super().__init__() - self.output_attentions = config.output_attentions - - n_state = nx # in Attention: n_state=768 (nx=n_embd) - # [switch nx => n_state from Block to Attention to keep identical to TF implem] - assert n_state % config.n_head == 0 - self.register_buffer("bias", torch.tril(torch.ones(n_ctx, n_ctx)).view(1, 1, n_ctx, n_ctx)) - self.n_head = config.n_head - self.split_size = n_state - self.scale = scale - - self.c_attn = Conv1D(n_state * 3, nx) - self.c_proj = Conv1D(n_state, nx) - self.attn_dropout = nn.Dropout(config.attn_pdrop) - self.resid_dropout = nn.Dropout(config.resid_pdrop) - self.pruned_heads = set() - - def _attn(self, q, k, v, attention_mask=None, head_mask=None): - w = torch.matmul(q, k) - if self.scale: - w = w / math.sqrt(v.size(-1)) - nd, ns = w.size(-2), w.size(-1) - b = self.bias[:, :, ns - nd : ns, :ns] - w = w * b - 1e4 * (1 - b) - - if attention_mask is not None: - # Apply the attention mask - w = w + attention_mask - - w = nn.Softmax(dim=-1)(w) - w = self.attn_dropout(w) - - # Mask heads if we want to - if head_mask is not None: - w = w * head_mask - - outputs = [torch.matmul(w, v)] - if self.output_attentions: - outputs.append(w) - return outputs - - def merge_heads(self, x): - x = x.permute(0, 2, 1, 3).contiguous() - new_x_shape = x.size()[:-2] + (x.size(-2) * x.size(-1),) - return x.view(*new_x_shape) # in Tensorflow implem: fct merge_states - - def split_heads(self, x, k=False): - new_x_shape = x.size()[:-1] + (self.n_head, x.size(-1) // self.n_head) - x = x.view(*new_x_shape) # in Tensorflow implem: fct split_states - if k: - return x.permute(0, 2, 3, 1) # (batch, head, head_features, seq_length) - else: - return x.permute(0, 2, 1, 3) # (batch, head, seq_length, head_features) - - def forward(self, x, layer_past=None, attention_mask=None, head_mask=None): - x = self.c_attn(x) - query, key, value = x.split(self.split_size, dim=2) - query = self.split_heads(query) - key = self.split_heads(key, k=True) - value = self.split_heads(value) - - if layer_past is not None: - past_key, past_value = layer_past[0], layer_past[1] # transpose back cf below - - past_key = self.split_heads(past_key, k=True) - past_value = self.split_heads(past_value) - # pdb.set_trace() - key = torch.cat((past_key, key), dim=-1) - value = torch.cat((past_value, value), dim=-2) - present = torch.stack((key.transpose(-2, -1), value)) # transpose to have same shapes for stacking - - attn_outputs = self._attn(query, key, value, attention_mask, head_mask) - a = attn_outputs[0] - - a = self.merge_heads(a) - a = self.c_proj(a) - a = self.resid_dropout(a) - - outputs = [a, present] + attn_outputs[1:] - return outputs # a, present, (attentions) - - -class GPT2OptimusBlock(nn.Module): - def __init__(self, config): - super().__init__() - nx = config.n_embd - self.ln_1 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon) - self.attn = GPT2OptimusAttention(nx, config.n_ctx, config, scale=True) - self.ln_2 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon) - self.mlp = GPT2MLP(4 * nx, config) - - def forward(self, x, layer_past=None, attention_mask=None, head_mask=None): - output_attn = self.attn( - self.ln_1(x), layer_past=layer_past, attention_mask=attention_mask, head_mask=head_mask - ) - a = output_attn[0] # output_attn: a, present, (attentions) - - x = x + a - m = self.mlp(self.ln_2(x)) - x = x + m - - outputs = [x] + output_attn[1:] - return outputs # x, present, (attentions) - - -class GPT2OptimusModel(GPT2PreTrainedModel): - def __init__(self, config, latent_as_gpt_emb, latent_as_gpt_memory, latent_size): - super().__init__(config) - self.latent_as_gpt_emb = latent_as_gpt_emb - self.latent_as_gpt_memory = latent_as_gpt_memory - self.latent_size = latent_size - self.output_hidden_states = config.output_hidden_states - self.output_attentions = config.output_attentions - - self.wte = nn.Embedding(config.vocab_size, config.n_embd) - self.wpe = nn.Embedding(config.n_positions, config.n_embd) - self.drop = nn.Dropout(config.embd_pdrop) - self.h = nn.ModuleList([GPT2OptimusBlock(config) for i in range(config.n_layer)]) - self.ln_f = nn.LayerNorm(config.n_embd, eps=config.layer_norm_epsilon) - - self.linear = nn.Linear( - self.latent_size, config.hidden_size * config.n_layer, bias=False - ) # different latent vector for each layer - self.linear_emb = nn.Linear( - self.latent_size, config.hidden_size, bias=False - ) # share the same latent vector as the embeddings - - # Initialize weights and apply final processing - self.post_init() - - def forward( - self, - input_ids, - past=None, - attention_mask=None, - token_type_ids=None, - position_ids=None, - head_mask=None, - ): - if past is None: - past_length = 0 - past = [None] * len(self.h) - else: - if self.latent_as_gpt_emb: - past_emb = self.linear_emb(past) # used as embeddings to add on other three embeddings - - if self.latent_as_gpt_memory: - past = self.linear(past) - - # different latent vectors for each layer - past_split = torch.split(past.unsqueeze(1), self.config.hidden_size, dim=2) - past = list(zip(past_split, past_split)) - past_length = 1 # past[0][0].size(-2) - else: - past_length = 0 - past = [None] * len(self.h) - - if position_ids is None: - position_ids = torch.arange( - past_length, input_ids.size(-1) + past_length, dtype=torch.long, device=input_ids.device - ) - position_ids = position_ids.unsqueeze(0).expand_as(input_ids) - - # Attention mask. - if attention_mask is not None: - # We create a 3D attention mask from a 2D tensor mask. - # Sizes are [batch_size, 1, 1, to_seq_length] - # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] - # this attention mask is more simple than the triangular masking of causal attention - # used in OpenAI GPT, we just need to prepare the broadcast dimension here. - attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) - - # Since attention_mask is 1.0 for positions we want to attend and 0.0 for - # masked positions, this operation will create a tensor which is 0.0 for - # positions we want to attend and -10000.0 for masked positions. - # Since we are adding it to the raw scores before the softmax, this is - # effectively the same as removing these entirely. - attention_mask = attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility - attention_mask = (1.0 - attention_mask) * -10000.0 - - # Prepare head mask if needed - # 1.0 in head_mask indicate we keep the head - # attention_probs has shape bsz x n_heads x N x N - # head_mask has shape n_layer x batch x n_heads x N x N - if head_mask is not None: - if head_mask.dim() == 1: - head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1) - head_mask = head_mask.expand(self.config.n_layer, -1, -1, -1, -1) - elif head_mask.dim() == 2: - head_mask = ( - head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) - ) # We can specify head_mask for each layer - head_mask = head_mask.to( - dtype=next(self.parameters()).dtype - ) # switch to fload if need + fp16 compatibility - else: - head_mask = [None] * self.config.n_layer - - input_shape = input_ids.size() - input_ids = input_ids.view(-1, input_ids.size(-1)) - position_ids = position_ids.view(-1, position_ids.size(-1)) - - inputs_embeds = self.wte(input_ids) - position_embeds = self.wpe(position_ids) - if token_type_ids is not None: - token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) - token_type_embeds = self.wte(token_type_ids) - else: - token_type_embeds = 0 - - hidden_states = inputs_embeds + position_embeds + token_type_embeds - if self.latent_as_gpt_emb: - hidden_states = hidden_states + past_emb.unsqueeze(1) - - hidden_states = self.drop(hidden_states) - - output_shape = input_shape + (hidden_states.size(-1),) - - presents = () - all_attentions = [] - all_hidden_states = () - for i, (block, layer_past) in enumerate(zip(self.h, past)): - if self.output_hidden_states: - all_hidden_states = all_hidden_states + (hidden_states.view(*output_shape),) - - outputs = block( - hidden_states, layer_past=layer_past, attention_mask=attention_mask, head_mask=head_mask[i] - ) - - hidden_states, present = outputs[:2] - presents = presents + (present,) - - if self.output_attentions: - all_attentions.append(outputs[2]) - - hidden_states = self.ln_f(hidden_states) - - hidden_states = hidden_states.view(*output_shape) - # Add last hidden state - if self.output_hidden_states: - all_hidden_states = all_hidden_states + (hidden_states,) - - outputs = (hidden_states, presents) - if self.output_hidden_states: - outputs = outputs + (all_hidden_states,) - if self.output_attentions: - # let the number of heads free (-1) so we can extract attention even after head pruning - attention_output_shape = input_shape[:-1] + (-1,) + all_attentions[0].shape[-2:] - all_attentions = tuple(t.view(*attention_output_shape) for t in all_attentions) - outputs = outputs + (all_attentions,) - - return outputs # last hidden state, presents, (all hidden_states), (attentions) - - -class GPT2OptimusForLatentConnector(GPT2PreTrainedModel): - def __init__(self, config): - super().__init__(config) - self.latent_as_gpt_emb = True - self.latent_as_gpt_memory = True - self.latent_size = getattr(config, "latent_size", 32) - self.transformer = GPT2OptimusModel( - config, - latent_as_gpt_emb=self.latent_as_gpt_emb, - latent_as_gpt_memory=self.latent_as_gpt_memory, - latent_size=self.latent_size, - ) - self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False) - self.init_weights() - self.tie_weights() - - # Initialize weights and apply final processing - self.post_init() - self.tie_weights() - - def _tie_or_clone_weights(self, first_module, second_module): - """Tie or clone module weights depending of weither we are using TorchScript or not""" - if self.config.torchscript: - first_module.weight = nn.Parameter(second_module.weight.clone()) - else: - first_module.weight = second_module.weight - - if hasattr(first_module, "bias") and first_module.bias is not None: - first_module.bias.data = torch.nn.functional.pad( - first_module.bias.data, - (0, first_module.weight.shape[0] - first_module.bias.shape[0]), - "constant", - 0, - ) - - def tie_weights(self): - """Make sure we are sharing the input and output embeddings. - Export to TorchScript can't handle parameter sharing so we are cloning them instead. - """ - self._tie_or_clone_weights(self.lm_head, self.transformer.wte) - - def forward( - self, - input_ids, - past_key_values=None, - attention_mask=None, - token_type_ids=None, - position_ids=None, - head_mask=None, - output_attentions=None, - output_hidden_states=None, - return_dict=True, - ): - transformer_outputs = self.transformer( - input_ids, - past=past_key_values, - attention_mask=attention_mask, - token_type_ids=token_type_ids, - position_ids=position_ids, - head_mask=head_mask, - ) - hidden_states = transformer_outputs[0] - - lm_logits = self.lm_head(hidden_states) - - return CausalLMOutputWithCrossAttentions( - loss=None, - logits=lm_logits, - past_key_values=past_key_values, - hidden_states=None, - attentions=None, - cross_attentions=None, - ) - - def prepare_inputs_for_generation(self, input_ids, past, **kwargs): - return { - "input_ids": input_ids, - "past_key_values": past, - } diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py deleted file mode 100644 index a1129a4c7c45..000000000000 --- a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py +++ /dev/null @@ -1,1036 +0,0 @@ -from typing import Optional, Tuple, Union - -import numpy as np -import torch -import torch.nn as nn - -from ...configuration_utils import ConfigMixin, register_to_config -from ...modeling_utils import ModelMixin -from ...models.attention import Transformer2DModel -from ...models.embeddings import TimestepEmbedding, Timesteps -from ...models.unet_2d_condition import UNet2DConditionOutput -from ...utils import logging - - -logger = logging.get_logger(__name__) # pylint: disable=invalid-name - - -def get_down_block( - down_block_type, - num_layers, - in_channels, - out_channels, - temb_channels, - add_downsample, - resnet_eps, - resnet_act_fn, - attn_num_head_channels, - resnet_groups=None, - cross_attention_dim=None, - downsample_padding=None, -): - down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type - if down_block_type == "DownBlockFlat": - return DownBlockFlat( - num_layers=num_layers, - in_channels=in_channels, - out_channels=out_channels, - temb_channels=temb_channels, - add_downsample=add_downsample, - resnet_eps=resnet_eps, - resnet_act_fn=resnet_act_fn, - resnet_groups=resnet_groups, - downsample_padding=downsample_padding, - ) - elif down_block_type == "CrossAttnDownBlockFlat": - if cross_attention_dim is None: - raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlockFlat") - return CrossAttnDownBlockFlat( - num_layers=num_layers, - in_channels=in_channels, - out_channels=out_channels, - temb_channels=temb_channels, - add_downsample=add_downsample, - resnet_eps=resnet_eps, - resnet_act_fn=resnet_act_fn, - resnet_groups=resnet_groups, - downsample_padding=downsample_padding, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attn_num_head_channels, - ) - raise ValueError(f"{down_block_type} is not supported.") - - -def get_up_block( - up_block_type, - num_layers, - in_channels, - out_channels, - prev_output_channel, - temb_channels, - add_upsample, - resnet_eps, - resnet_act_fn, - attn_num_head_channels, - resnet_groups=None, - cross_attention_dim=None, -): - up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type - if up_block_type == "UpBlockFlat": - return UpBlockFlat( - num_layers=num_layers, - in_channels=in_channels, - out_channels=out_channels, - prev_output_channel=prev_output_channel, - temb_channels=temb_channels, - add_upsample=add_upsample, - resnet_eps=resnet_eps, - resnet_act_fn=resnet_act_fn, - resnet_groups=resnet_groups, - ) - elif up_block_type == "CrossAttnUpBlockFlat": - if cross_attention_dim is None: - raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlockFlat") - return CrossAttnUpBlockFlat( - num_layers=num_layers, - in_channels=in_channels, - out_channels=out_channels, - prev_output_channel=prev_output_channel, - temb_channels=temb_channels, - add_upsample=add_upsample, - resnet_eps=resnet_eps, - resnet_act_fn=resnet_act_fn, - resnet_groups=resnet_groups, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attn_num_head_channels, - ) - raise ValueError(f"{up_block_type} is not supported.") - - -# Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel with UNet2DConditionModel->UNetFlatConditionModel, nn.Conv2d->LinearMultiDim, Block2D->BlockFlat -class UNetFlatConditionModel(ModelMixin, ConfigMixin): - r""" - UNetFlatConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a - timestep and returns sample shaped output. - - This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library - implements for all the models (such as downloading or saving, etc.) - - Parameters: - sample_size (`int`, *optional*): The size of the input sample. - in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample. - out_channels (`int`, *optional*, defaults to 4): The number of channels in the output. - center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. - flip_sin_to_cos (`bool`, *optional*, defaults to `True`): - Whether to flip the sin to cos in the time embedding. - freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. - down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlockFlat", "CrossAttnDownBlockFlat", "CrossAttnDownBlockFlat", "DownBlockFlat")`): - The tuple of downsample blocks to use. - up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlockFlat", "CrossAttnUpBlockFlat", "CrossAttnUpBlockFlat", "CrossAttnUpBlockFlat",)`): - The tuple of upsample blocks to use. - block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): - The tuple of output channels for each block. - layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. - downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. - mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. - act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. - norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. - norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. - cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of the cross attention features. - attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. - """ - - _supports_gradient_checkpointing = True - - @register_to_config - def __init__( - self, - sample_size: Optional[int] = None, - in_channels: int = 4, - out_channels: int = 4, - center_input_sample: bool = False, - flip_sin_to_cos: bool = True, - freq_shift: int = 0, - down_block_types: Tuple[str] = ( - "CrossAttnDownBlockFlat", - "CrossAttnDownBlockFlat", - "CrossAttnDownBlockFlat", - "DownBlockFlat", - ), - up_block_types: Tuple[str] = ( - "UpBlockFlat", - "CrossAttnUpBlockFlat", - "CrossAttnUpBlockFlat", - "CrossAttnUpBlockFlat", - ), - block_out_channels: Tuple[int] = (320, 640, 1280, 1280), - layers_per_block: int = 2, - downsample_padding: int = 1, - mid_block_scale_factor: float = 1, - act_fn: str = "silu", - norm_num_groups: int = 32, - norm_eps: float = 1e-5, - cross_attention_dim: int = 1280, - attention_head_dim: int = 8, - ): - super().__init__() - - self.sample_size = sample_size - time_embed_dim = block_out_channels[0] * 4 - - # input - self.conv_in = LinearMultiDim(in_channels, block_out_channels[0], kernel_size=3, padding=(1, 1)) - - # time - self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) - timestep_input_dim = block_out_channels[0] - - self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) - - self.down_blocks = nn.ModuleList([]) - self.mid_block = None - self.up_blocks = nn.ModuleList([]) - - # down - output_channel = block_out_channels[0] - for i, down_block_type in enumerate(down_block_types): - input_channel = output_channel - output_channel = block_out_channels[i] - is_final_block = i == len(block_out_channels) - 1 - - down_block = get_down_block( - down_block_type, - num_layers=layers_per_block, - in_channels=input_channel, - out_channels=output_channel, - temb_channels=time_embed_dim, - add_downsample=not is_final_block, - resnet_eps=norm_eps, - resnet_act_fn=act_fn, - resnet_groups=norm_num_groups, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attention_head_dim, - downsample_padding=downsample_padding, - ) - self.down_blocks.append(down_block) - - # mid - self.mid_block = UNetMidBlockFlatCrossAttn( - in_channels=block_out_channels[-1], - temb_channels=time_embed_dim, - resnet_eps=norm_eps, - resnet_act_fn=act_fn, - output_scale_factor=mid_block_scale_factor, - resnet_time_scale_shift="default", - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attention_head_dim, - resnet_groups=norm_num_groups, - ) - - # count how many layers upsample the images - self.num_upsamplers = 0 - - # up - reversed_block_out_channels = list(reversed(block_out_channels)) - output_channel = reversed_block_out_channels[0] - for i, up_block_type in enumerate(up_block_types): - is_final_block = i == len(block_out_channels) - 1 - - prev_output_channel = output_channel - output_channel = reversed_block_out_channels[i] - input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] - - # add upsample block for all BUT final layer - if not is_final_block: - add_upsample = True - self.num_upsamplers += 1 - else: - add_upsample = False - - up_block = get_up_block( - up_block_type, - num_layers=layers_per_block + 1, - in_channels=input_channel, - out_channels=output_channel, - prev_output_channel=prev_output_channel, - temb_channels=time_embed_dim, - add_upsample=add_upsample, - resnet_eps=norm_eps, - resnet_act_fn=act_fn, - resnet_groups=norm_num_groups, - cross_attention_dim=cross_attention_dim, - attn_num_head_channels=attention_head_dim, - ) - self.up_blocks.append(up_block) - prev_output_channel = output_channel - - # out - self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) - self.conv_act = nn.SiLU() - self.conv_out = LinearMultiDim(block_out_channels[0], out_channels, kernel_size=3, padding=1) - - def set_attention_slice(self, slice_size): - if slice_size is not None and self.config.attention_head_dim % slice_size != 0: - raise ValueError( - f"Make sure slice_size {slice_size} is a divisor of " - f"the number of heads used in cross_attention {self.config.attention_head_dim}" - ) - if slice_size is not None and slice_size > self.config.attention_head_dim: - raise ValueError( - f"Chunk_size {slice_size} has to be smaller or equal to " - f"the number of heads used in cross_attention {self.config.attention_head_dim}" - ) - - for block in self.down_blocks: - if hasattr(block, "attentions") and block.attentions is not None: - block.set_attention_slice(slice_size) - - self.mid_block.set_attention_slice(slice_size) - - for block in self.up_blocks: - if hasattr(block, "attentions") and block.attentions is not None: - block.set_attention_slice(slice_size) - - def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for block in self.down_blocks: - if hasattr(block, "attentions") and block.attentions is not None: - block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - self.mid_block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - for block in self.up_blocks: - if hasattr(block, "attentions") and block.attentions is not None: - block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - def _set_gradient_checkpointing(self, module, value=False): - if isinstance(module, (CrossAttnDownBlockFlat, DownBlockFlat, CrossAttnUpBlockFlat, UpBlockFlat)): - module.gradient_checkpointing = value - - def forward( - self, - sample: torch.FloatTensor, - timestep: Union[torch.Tensor, float, int], - encoder_hidden_states: torch.Tensor, - return_dict: bool = True, - ) -> Union[UNet2DConditionOutput, Tuple]: - r""" - Args: - sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor - timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps - encoder_hidden_states (`torch.FloatTensor`): - (batch_size, sequence_length, hidden_size) encoder hidden states - return_dict (`bool`, *optional*, defaults to `True`): - Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. - - Returns: - [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: - [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When - returning a tuple, the first element is the sample tensor. - """ - # By default samples have to be AT least a multiple of the overall upsampling factor. - # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). - # However, the upsampling interpolation output size can be forced to fit any upsampling size - # on the fly if necessary. - default_overall_up_factor = 2**self.num_upsamplers - - # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` - forward_upsample_size = False - upsample_size = None - - if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): - logger.info("Forward upsample size to force interpolation output size.") - forward_upsample_size = True - - # 0. center input if necessary - if self.config.center_input_sample: - sample = 2 * sample - 1.0 - - # 1. time - timesteps = timestep - if not torch.is_tensor(timesteps): - # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can - timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) - elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: - timesteps = timesteps[None].to(sample.device) - - # broadcast to batch dimension in a way that's compatible with ONNX/Core ML - timesteps = timesteps.expand(sample.shape[0]) - - t_emb = self.time_proj(timesteps) - - # timesteps does not contain any weights and will always return f32 tensors - # but time_embedding might actually be running in fp16. so we need to cast here. - # there might be better ways to encapsulate this. - t_emb = t_emb.to(dtype=self.dtype) - emb = self.time_embedding(t_emb) - - # 2. pre-process - sample = self.conv_in(sample) - - # 3. down - down_block_res_samples = (sample,) - for downsample_block in self.down_blocks: - if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: - sample, res_samples = downsample_block( - hidden_states=sample, - temb=emb, - encoder_hidden_states=encoder_hidden_states, - ) - else: - sample, res_samples = downsample_block(hidden_states=sample, temb=emb) - - down_block_res_samples += res_samples - - # 4. mid - sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) - - # 5. up - for i, upsample_block in enumerate(self.up_blocks): - is_final_block = i == len(self.up_blocks) - 1 - - res_samples = down_block_res_samples[-len(upsample_block.resnets) :] - down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] - - # if we have not reached the final block and need to forward the - # upsample size, we do it here - if not is_final_block and forward_upsample_size: - upsample_size = down_block_res_samples[-1].shape[2:] - - if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: - sample = upsample_block( - hidden_states=sample, - temb=emb, - res_hidden_states_tuple=res_samples, - encoder_hidden_states=encoder_hidden_states, - upsample_size=upsample_size, - ) - else: - sample = upsample_block( - hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size - ) - # 6. post-process - sample = self.conv_norm_out(sample) - sample = self.conv_act(sample) - sample = self.conv_out(sample) - - if not return_dict: - return (sample,) - - return UNet2DConditionOutput(sample=sample) - - -class LinearMultiDim(nn.Linear): - def __init__(self, in_features, out_features=None, second_dim=4, *args, **kwargs): - in_features = [in_features, second_dim, 1] if isinstance(in_features, int) else list(in_features) - if out_features is None: - out_features = in_features - out_features = [out_features, second_dim, 1] if isinstance(out_features, int) else list(out_features) - self.in_features_multidim = in_features - self.out_features_multidim = out_features - super().__init__(np.array(in_features).prod(), np.array(out_features).prod()) - - def forward(self, input_tensor, *args, **kwargs): - shape = input_tensor.shape - n_dim = len(self.in_features_multidim) - input_tensor = input_tensor.reshape(*shape[0:-n_dim], self.in_features) - output_tensor = super().forward(input_tensor) - output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_features_multidim) - return output_tensor - - -class ResnetBlockFlat(nn.Module): - def __init__( - self, - *, - in_channels, - out_channels=None, - dropout=0.0, - temb_channels=512, - groups=32, - groups_out=None, - pre_norm=True, - eps=1e-6, - time_embedding_norm="default", - use_in_shortcut=None, - second_dim=4, - **kwargs, - ): - super().__init__() - self.pre_norm = pre_norm - self.pre_norm = True - - in_channels = [in_channels, second_dim, 1] if isinstance(in_channels, int) else list(in_channels) - self.in_channels_prod = np.array(in_channels).prod() - self.channels_multidim = in_channels - - if out_channels is not None: - out_channels = [out_channels, second_dim, 1] if isinstance(out_channels, int) else list(out_channels) - out_channels_prod = np.array(out_channels).prod() - self.out_channels_multidim = out_channels - else: - out_channels_prod = self.in_channels_prod - self.out_channels_multidim = self.channels_multidim - self.time_embedding_norm = time_embedding_norm - - if groups_out is None: - groups_out = groups - - self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=self.in_channels_prod, eps=eps, affine=True) - self.conv1 = torch.nn.Conv2d(self.in_channels_prod, out_channels_prod, kernel_size=1, padding=0) - - if temb_channels is not None: - self.time_emb_proj = torch.nn.Linear(temb_channels, out_channels_prod) - else: - self.time_emb_proj = None - - self.norm2 = torch.nn.GroupNorm(num_groups=groups_out, num_channels=out_channels_prod, eps=eps, affine=True) - self.dropout = torch.nn.Dropout(dropout) - self.conv2 = torch.nn.Conv2d(out_channels_prod, out_channels_prod, kernel_size=1, padding=0) - - self.nonlinearity = nn.SiLU() - - self.use_in_shortcut = ( - self.in_channels_prod != out_channels_prod if use_in_shortcut is None else use_in_shortcut - ) - - self.conv_shortcut = None - if self.use_in_shortcut: - self.conv_shortcut = torch.nn.Conv2d( - self.in_channels_prod, out_channels_prod, kernel_size=1, stride=1, padding=0 - ) - - def forward(self, input_tensor, temb): - shape = input_tensor.shape - n_dim = len(self.channels_multidim) - input_tensor = input_tensor.reshape(*shape[0:-n_dim], self.in_channels_prod, 1, 1) - input_tensor = input_tensor.view(-1, self.in_channels_prod, 1, 1) - - hidden_states = input_tensor - - hidden_states = self.norm1(hidden_states) - hidden_states = self.nonlinearity(hidden_states) - hidden_states = self.conv1(hidden_states) - - if temb is not None: - temb = self.time_emb_proj(self.nonlinearity(temb))[:, :, None, None] - hidden_states = hidden_states + temb - - hidden_states = self.norm2(hidden_states) - hidden_states = self.nonlinearity(hidden_states) - - hidden_states = self.dropout(hidden_states) - hidden_states = self.conv2(hidden_states) - - if self.conv_shortcut is not None: - input_tensor = self.conv_shortcut(input_tensor) - - output_tensor = input_tensor + hidden_states - - output_tensor = output_tensor.view(*shape[0:-n_dim], -1) - output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_channels_multidim) - - return output_tensor - - -# Copied from diffusers.models.unet_2d_blocks.DownBlock2D with DownBlock2D->DownBlockFlat, ResnetBlock2D->ResnetBlockFlat, Downsample2D->LinearMultiDim -class DownBlockFlat(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - temb_channels: int, - dropout: float = 0.0, - num_layers: int = 1, - resnet_eps: float = 1e-6, - resnet_time_scale_shift: str = "default", - resnet_act_fn: str = "swish", - resnet_groups: int = 32, - resnet_pre_norm: bool = True, - output_scale_factor=1.0, - add_downsample=True, - downsample_padding=1, - ): - super().__init__() - resnets = [] - - for i in range(num_layers): - in_channels = in_channels if i == 0 else out_channels - resnets.append( - ResnetBlockFlat( - in_channels=in_channels, - out_channels=out_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ) - - self.resnets = nn.ModuleList(resnets) - - if add_downsample: - self.downsamplers = nn.ModuleList( - [ - LinearMultiDim( - out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" - ) - ] - ) - else: - self.downsamplers = None - - self.gradient_checkpointing = False - - def forward(self, hidden_states, temb=None): - output_states = () - - for resnet in self.resnets: - if self.training and self.gradient_checkpointing: - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - - return custom_forward - - hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) - else: - hidden_states = resnet(hidden_states, temb) - - output_states += (hidden_states,) - - if self.downsamplers is not None: - for downsampler in self.downsamplers: - hidden_states = downsampler(hidden_states) - - output_states += (hidden_states,) - - return hidden_states, output_states - - -# Copied from diffusers.models.unet_2d_blocks.CrossAttnDownBlock2D with CrossAttnDownBlock2D->CrossAttnDownBlockFlat, ResnetBlock2D->ResnetBlockFlat, Downsample2D->LinearMultiDim -class CrossAttnDownBlockFlat(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - temb_channels: int, - dropout: float = 0.0, - num_layers: int = 1, - resnet_eps: float = 1e-6, - resnet_time_scale_shift: str = "default", - resnet_act_fn: str = "swish", - resnet_groups: int = 32, - resnet_pre_norm: bool = True, - attn_num_head_channels=1, - cross_attention_dim=1280, - attention_type="default", - output_scale_factor=1.0, - downsample_padding=1, - add_downsample=True, - ): - super().__init__() - resnets = [] - attentions = [] - - self.attention_type = attention_type - self.attn_num_head_channels = attn_num_head_channels - - for i in range(num_layers): - in_channels = in_channels if i == 0 else out_channels - resnets.append( - ResnetBlockFlat( - in_channels=in_channels, - out_channels=out_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ) - attentions.append( - Transformer2DModel( - attn_num_head_channels, - out_channels // attn_num_head_channels, - in_channels=out_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, - ) - ) - self.attentions = nn.ModuleList(attentions) - self.resnets = nn.ModuleList(resnets) - - if add_downsample: - self.downsamplers = nn.ModuleList( - [ - LinearMultiDim( - out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" - ) - ] - ) - else: - self.downsamplers = None - - self.gradient_checkpointing = False - - def set_attention_slice(self, slice_size): - if slice_size is not None and self.attn_num_head_channels % slice_size != 0: - raise ValueError( - f"Make sure slice_size {slice_size} is a divisor of " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - if slice_size is not None and slice_size > self.attn_num_head_channels: - raise ValueError( - f"Chunk_size {slice_size} has to be smaller or equal to " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - - for attn in self.attentions: - attn._set_attention_slice(slice_size) - - def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for attn in self.attentions: - attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - def forward(self, hidden_states, temb=None, encoder_hidden_states=None): - output_states = () - - for resnet, attn in zip(self.resnets, self.attentions): - if self.training and self.gradient_checkpointing: - - def create_custom_forward(module, return_dict=None): - def custom_forward(*inputs): - if return_dict is not None: - return module(*inputs, return_dict=return_dict) - else: - return module(*inputs) - - return custom_forward - - hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) - hidden_states = torch.utils.checkpoint.checkpoint( - create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states - )[0] - else: - hidden_states = resnet(hidden_states, temb) - hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample - - output_states += (hidden_states,) - - if self.downsamplers is not None: - for downsampler in self.downsamplers: - hidden_states = downsampler(hidden_states) - - output_states += (hidden_states,) - - return hidden_states, output_states - - -# Copied from diffusers.models.unet_2d_blocks.UpBlock2D with UpBlock2D->UpBlockFlat, ResnetBlock2D->ResnetBlockFlat, Upsample2D->LinearMultiDim -class UpBlockFlat(nn.Module): - def __init__( - self, - in_channels: int, - prev_output_channel: int, - out_channels: int, - temb_channels: int, - dropout: float = 0.0, - num_layers: int = 1, - resnet_eps: float = 1e-6, - resnet_time_scale_shift: str = "default", - resnet_act_fn: str = "swish", - resnet_groups: int = 32, - resnet_pre_norm: bool = True, - output_scale_factor=1.0, - add_upsample=True, - ): - super().__init__() - resnets = [] - - for i in range(num_layers): - res_skip_channels = in_channels if (i == num_layers - 1) else out_channels - resnet_in_channels = prev_output_channel if i == 0 else out_channels - - resnets.append( - ResnetBlockFlat( - in_channels=resnet_in_channels + res_skip_channels, - out_channels=out_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ) - - self.resnets = nn.ModuleList(resnets) - - if add_upsample: - self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, use_conv=True, out_channels=out_channels)]) - else: - self.upsamplers = None - - self.gradient_checkpointing = False - - def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None): - for resnet in self.resnets: - # pop res hidden states - res_hidden_states = res_hidden_states_tuple[-1] - res_hidden_states_tuple = res_hidden_states_tuple[:-1] - hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) - - if self.training and self.gradient_checkpointing: - - def create_custom_forward(module): - def custom_forward(*inputs): - return module(*inputs) - - return custom_forward - - hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) - else: - hidden_states = resnet(hidden_states, temb) - - if self.upsamplers is not None: - for upsampler in self.upsamplers: - hidden_states = upsampler(hidden_states, upsample_size) - - return hidden_states - - -# Copied from diffusers.models.unet_2d_blocks.CrossAttnUpBlock2D with CrossAttnUpBlock2D->CrossAttnUpBlockFlat, ResnetBlock2D->ResnetBlockFlat, Upsample2D->LinearMultiDim -class CrossAttnUpBlockFlat(nn.Module): - def __init__( - self, - in_channels: int, - out_channels: int, - prev_output_channel: int, - temb_channels: int, - dropout: float = 0.0, - num_layers: int = 1, - resnet_eps: float = 1e-6, - resnet_time_scale_shift: str = "default", - resnet_act_fn: str = "swish", - resnet_groups: int = 32, - resnet_pre_norm: bool = True, - attn_num_head_channels=1, - cross_attention_dim=1280, - attention_type="default", - output_scale_factor=1.0, - add_upsample=True, - ): - super().__init__() - resnets = [] - attentions = [] - - self.attention_type = attention_type - self.attn_num_head_channels = attn_num_head_channels - - for i in range(num_layers): - res_skip_channels = in_channels if (i == num_layers - 1) else out_channels - resnet_in_channels = prev_output_channel if i == 0 else out_channels - - resnets.append( - ResnetBlockFlat( - in_channels=resnet_in_channels + res_skip_channels, - out_channels=out_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ) - attentions.append( - Transformer2DModel( - attn_num_head_channels, - out_channels // attn_num_head_channels, - in_channels=out_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, - ) - ) - self.attentions = nn.ModuleList(attentions) - self.resnets = nn.ModuleList(resnets) - - if add_upsample: - self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, use_conv=True, out_channels=out_channels)]) - else: - self.upsamplers = None - - self.gradient_checkpointing = False - - def set_attention_slice(self, slice_size): - if slice_size is not None and self.attn_num_head_channels % slice_size != 0: - raise ValueError( - f"Make sure slice_size {slice_size} is a divisor of " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - if slice_size is not None and slice_size > self.attn_num_head_channels: - raise ValueError( - f"Chunk_size {slice_size} has to be smaller or equal to " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - - for attn in self.attentions: - attn._set_attention_slice(slice_size) - - self.gradient_checkpointing = False - - def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for attn in self.attentions: - attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - def forward( - self, - hidden_states, - res_hidden_states_tuple, - temb=None, - encoder_hidden_states=None, - upsample_size=None, - ): - for resnet, attn in zip(self.resnets, self.attentions): - # pop res hidden states - res_hidden_states = res_hidden_states_tuple[-1] - res_hidden_states_tuple = res_hidden_states_tuple[:-1] - hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) - - if self.training and self.gradient_checkpointing: - - def create_custom_forward(module, return_dict=None): - def custom_forward(*inputs): - if return_dict is not None: - return module(*inputs, return_dict=return_dict) - else: - return module(*inputs) - - return custom_forward - - hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) - hidden_states = torch.utils.checkpoint.checkpoint( - create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states - )[0] - else: - hidden_states = resnet(hidden_states, temb) - hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample - - if self.upsamplers is not None: - for upsampler in self.upsamplers: - hidden_states = upsampler(hidden_states, upsample_size) - - return hidden_states - - -# Copied from diffusers.models.unet_2d_blocks.UNetMidBlock2DCrossAttn with UNetMidBlock2DCrossAttn->UNetMidBlockFlatCrossAttn, ResnetBlock2D->ResnetBlockFlat -class UNetMidBlockFlatCrossAttn(nn.Module): - def __init__( - self, - in_channels: int, - temb_channels: int, - dropout: float = 0.0, - num_layers: int = 1, - resnet_eps: float = 1e-6, - resnet_time_scale_shift: str = "default", - resnet_act_fn: str = "swish", - resnet_groups: int = 32, - resnet_pre_norm: bool = True, - attn_num_head_channels=1, - attention_type="default", - output_scale_factor=1.0, - cross_attention_dim=1280, - **kwargs, - ): - super().__init__() - - self.attention_type = attention_type - self.attn_num_head_channels = attn_num_head_channels - resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) - - # there is always at least one resnet - resnets = [ - ResnetBlockFlat( - in_channels=in_channels, - out_channels=in_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ] - attentions = [] - - for _ in range(num_layers): - attentions.append( - Transformer2DModel( - attn_num_head_channels, - in_channels // attn_num_head_channels, - in_channels=in_channels, - num_layers=1, - cross_attention_dim=cross_attention_dim, - norm_num_groups=resnet_groups, - ) - ) - resnets.append( - ResnetBlockFlat( - in_channels=in_channels, - out_channels=in_channels, - temb_channels=temb_channels, - eps=resnet_eps, - groups=resnet_groups, - dropout=dropout, - time_embedding_norm=resnet_time_scale_shift, - non_linearity=resnet_act_fn, - output_scale_factor=output_scale_factor, - pre_norm=resnet_pre_norm, - ) - ) - - self.attentions = nn.ModuleList(attentions) - self.resnets = nn.ModuleList(resnets) - - def set_attention_slice(self, slice_size): - if slice_size is not None and self.attn_num_head_channels % slice_size != 0: - raise ValueError( - f"Make sure slice_size {slice_size} is a divisor of " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - if slice_size is not None and slice_size > self.attn_num_head_channels: - raise ValueError( - f"Chunk_size {slice_size} has to be smaller or equal to " - f"the number of heads used in cross_attention {self.attn_num_head_channels}" - ) - - for attn in self.attentions: - attn._set_attention_slice(slice_size) - - def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): - for attn in self.attentions: - attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) - - def forward(self, hidden_states, temb=None, encoder_hidden_states=None): - hidden_states = self.resnets[0](hidden_states, temb) - for attn, resnet in zip(self.attentions, self.resnets[1:]): - hidden_states = attn(hidden_states, encoder_hidden_states).sample - hidden_states = resnet(hidden_states, temb) - - return hidden_states From af8a378d4f0a6950d0e463fccd14aa717a7bc5b4 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 14:02:27 +0000 Subject: [PATCH 33/49] fix --- src/diffusers/pipelines/versatile_diffusion/__init__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index 972422f13d96..c3c39ff37e36 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -2,9 +2,7 @@ if is_transformers_available() and is_torch_available(): - from .modeling_gpt2_optimus import GPT2OptimusForLatentConnector - from .modeling_text_unet import UNetFlatConditionModel from .pipeline_versatile_diffusion import VersatileDiffusionPipeline from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline - from .pipeline_versatile_diffusion_image_to_text import VersatileDiffusionImageToTextPipeline + from .pipeline_versatile_diffusion_text_image import VersatileDiffusionTextToImagePipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline From 7bf2d4dc568083c90f65acfbad8aaee27f8e3018 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 14:04:34 +0000 Subject: [PATCH 34/49] up --- .../pipelines/versatile_diffusion/__init__.py | 1 + .../versatile_diffusion/modeling_text_unet.py | 1036 +++++++++++++++++ 2 files changed, 1037 insertions(+) create mode 100644 src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index c3c39ff37e36..5de973f0cc93 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -6,3 +6,4 @@ from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline from .pipeline_versatile_diffusion_text_image import VersatileDiffusionTextToImagePipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline + from .modeling_text_unet import UNetFlatConditionModel diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py new file mode 100644 index 000000000000..a1129a4c7c45 --- /dev/null +++ b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py @@ -0,0 +1,1036 @@ +from typing import Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn as nn + +from ...configuration_utils import ConfigMixin, register_to_config +from ...modeling_utils import ModelMixin +from ...models.attention import Transformer2DModel +from ...models.embeddings import TimestepEmbedding, Timesteps +from ...models.unet_2d_condition import UNet2DConditionOutput +from ...utils import logging + + +logger = logging.get_logger(__name__) # pylint: disable=invalid-name + + +def get_down_block( + down_block_type, + num_layers, + in_channels, + out_channels, + temb_channels, + add_downsample, + resnet_eps, + resnet_act_fn, + attn_num_head_channels, + resnet_groups=None, + cross_attention_dim=None, + downsample_padding=None, +): + down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type + if down_block_type == "DownBlockFlat": + return DownBlockFlat( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + ) + elif down_block_type == "CrossAttnDownBlockFlat": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnDownBlockFlat") + return CrossAttnDownBlockFlat( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + add_downsample=add_downsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + downsample_padding=downsample_padding, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + ) + raise ValueError(f"{down_block_type} is not supported.") + + +def get_up_block( + up_block_type, + num_layers, + in_channels, + out_channels, + prev_output_channel, + temb_channels, + add_upsample, + resnet_eps, + resnet_act_fn, + attn_num_head_channels, + resnet_groups=None, + cross_attention_dim=None, +): + up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type + if up_block_type == "UpBlockFlat": + return UpBlockFlat( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + ) + elif up_block_type == "CrossAttnUpBlockFlat": + if cross_attention_dim is None: + raise ValueError("cross_attention_dim must be specified for CrossAttnUpBlockFlat") + return CrossAttnUpBlockFlat( + num_layers=num_layers, + in_channels=in_channels, + out_channels=out_channels, + prev_output_channel=prev_output_channel, + temb_channels=temb_channels, + add_upsample=add_upsample, + resnet_eps=resnet_eps, + resnet_act_fn=resnet_act_fn, + resnet_groups=resnet_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attn_num_head_channels, + ) + raise ValueError(f"{up_block_type} is not supported.") + + +# Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel with UNet2DConditionModel->UNetFlatConditionModel, nn.Conv2d->LinearMultiDim, Block2D->BlockFlat +class UNetFlatConditionModel(ModelMixin, ConfigMixin): + r""" + UNetFlatConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a + timestep and returns sample shaped output. + + This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library + implements for all the models (such as downloading or saving, etc.) + + Parameters: + sample_size (`int`, *optional*): The size of the input sample. + in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample. + out_channels (`int`, *optional*, defaults to 4): The number of channels in the output. + center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample. + flip_sin_to_cos (`bool`, *optional*, defaults to `True`): + Whether to flip the sin to cos in the time embedding. + freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding. + down_block_types (`Tuple[str]`, *optional*, defaults to `("CrossAttnDownBlockFlat", "CrossAttnDownBlockFlat", "CrossAttnDownBlockFlat", "DownBlockFlat")`): + The tuple of downsample blocks to use. + up_block_types (`Tuple[str]`, *optional*, defaults to `("UpBlockFlat", "CrossAttnUpBlockFlat", "CrossAttnUpBlockFlat", "CrossAttnUpBlockFlat",)`): + The tuple of upsample blocks to use. + block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`): + The tuple of output channels for each block. + layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block. + downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution. + mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block. + act_fn (`str`, *optional*, defaults to `"silu"`): The activation function to use. + norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization. + norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization. + cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of the cross attention features. + attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads. + """ + + _supports_gradient_checkpointing = True + + @register_to_config + def __init__( + self, + sample_size: Optional[int] = None, + in_channels: int = 4, + out_channels: int = 4, + center_input_sample: bool = False, + flip_sin_to_cos: bool = True, + freq_shift: int = 0, + down_block_types: Tuple[str] = ( + "CrossAttnDownBlockFlat", + "CrossAttnDownBlockFlat", + "CrossAttnDownBlockFlat", + "DownBlockFlat", + ), + up_block_types: Tuple[str] = ( + "UpBlockFlat", + "CrossAttnUpBlockFlat", + "CrossAttnUpBlockFlat", + "CrossAttnUpBlockFlat", + ), + block_out_channels: Tuple[int] = (320, 640, 1280, 1280), + layers_per_block: int = 2, + downsample_padding: int = 1, + mid_block_scale_factor: float = 1, + act_fn: str = "silu", + norm_num_groups: int = 32, + norm_eps: float = 1e-5, + cross_attention_dim: int = 1280, + attention_head_dim: int = 8, + ): + super().__init__() + + self.sample_size = sample_size + time_embed_dim = block_out_channels[0] * 4 + + # input + self.conv_in = LinearMultiDim(in_channels, block_out_channels[0], kernel_size=3, padding=(1, 1)) + + # time + self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift) + timestep_input_dim = block_out_channels[0] + + self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim) + + self.down_blocks = nn.ModuleList([]) + self.mid_block = None + self.up_blocks = nn.ModuleList([]) + + # down + output_channel = block_out_channels[0] + for i, down_block_type in enumerate(down_block_types): + input_channel = output_channel + output_channel = block_out_channels[i] + is_final_block = i == len(block_out_channels) - 1 + + down_block = get_down_block( + down_block_type, + num_layers=layers_per_block, + in_channels=input_channel, + out_channels=output_channel, + temb_channels=time_embed_dim, + add_downsample=not is_final_block, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + downsample_padding=downsample_padding, + ) + self.down_blocks.append(down_block) + + # mid + self.mid_block = UNetMidBlockFlatCrossAttn( + in_channels=block_out_channels[-1], + temb_channels=time_embed_dim, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + output_scale_factor=mid_block_scale_factor, + resnet_time_scale_shift="default", + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + resnet_groups=norm_num_groups, + ) + + # count how many layers upsample the images + self.num_upsamplers = 0 + + # up + reversed_block_out_channels = list(reversed(block_out_channels)) + output_channel = reversed_block_out_channels[0] + for i, up_block_type in enumerate(up_block_types): + is_final_block = i == len(block_out_channels) - 1 + + prev_output_channel = output_channel + output_channel = reversed_block_out_channels[i] + input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)] + + # add upsample block for all BUT final layer + if not is_final_block: + add_upsample = True + self.num_upsamplers += 1 + else: + add_upsample = False + + up_block = get_up_block( + up_block_type, + num_layers=layers_per_block + 1, + in_channels=input_channel, + out_channels=output_channel, + prev_output_channel=prev_output_channel, + temb_channels=time_embed_dim, + add_upsample=add_upsample, + resnet_eps=norm_eps, + resnet_act_fn=act_fn, + resnet_groups=norm_num_groups, + cross_attention_dim=cross_attention_dim, + attn_num_head_channels=attention_head_dim, + ) + self.up_blocks.append(up_block) + prev_output_channel = output_channel + + # out + self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps) + self.conv_act = nn.SiLU() + self.conv_out = LinearMultiDim(block_out_channels[0], out_channels, kernel_size=3, padding=1) + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.config.attention_head_dim % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.config.attention_head_dim}" + ) + if slice_size is not None and slice_size > self.config.attention_head_dim: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.config.attention_head_dim}" + ) + + for block in self.down_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_attention_slice(slice_size) + + self.mid_block.set_attention_slice(slice_size) + + for block in self.up_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for block in self.down_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + self.mid_block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + for block in self.up_blocks: + if hasattr(block, "attentions") and block.attentions is not None: + block.set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, (CrossAttnDownBlockFlat, DownBlockFlat, CrossAttnUpBlockFlat, UpBlockFlat)): + module.gradient_checkpointing = value + + def forward( + self, + sample: torch.FloatTensor, + timestep: Union[torch.Tensor, float, int], + encoder_hidden_states: torch.Tensor, + return_dict: bool = True, + ) -> Union[UNet2DConditionOutput, Tuple]: + r""" + Args: + sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor + timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps + encoder_hidden_states (`torch.FloatTensor`): + (batch_size, sequence_length, hidden_size) encoder hidden states + return_dict (`bool`, *optional*, defaults to `True`): + Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple. + + Returns: + [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`: + [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When + returning a tuple, the first element is the sample tensor. + """ + # By default samples have to be AT least a multiple of the overall upsampling factor. + # The overall upsampling factor is equal to 2 ** (# num of upsampling layears). + # However, the upsampling interpolation output size can be forced to fit any upsampling size + # on the fly if necessary. + default_overall_up_factor = 2**self.num_upsamplers + + # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor` + forward_upsample_size = False + upsample_size = None + + if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]): + logger.info("Forward upsample size to force interpolation output size.") + forward_upsample_size = True + + # 0. center input if necessary + if self.config.center_input_sample: + sample = 2 * sample - 1.0 + + # 1. time + timesteps = timestep + if not torch.is_tensor(timesteps): + # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can + timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device) + elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0: + timesteps = timesteps[None].to(sample.device) + + # broadcast to batch dimension in a way that's compatible with ONNX/Core ML + timesteps = timesteps.expand(sample.shape[0]) + + t_emb = self.time_proj(timesteps) + + # timesteps does not contain any weights and will always return f32 tensors + # but time_embedding might actually be running in fp16. so we need to cast here. + # there might be better ways to encapsulate this. + t_emb = t_emb.to(dtype=self.dtype) + emb = self.time_embedding(t_emb) + + # 2. pre-process + sample = self.conv_in(sample) + + # 3. down + down_block_res_samples = (sample,) + for downsample_block in self.down_blocks: + if hasattr(downsample_block, "attentions") and downsample_block.attentions is not None: + sample, res_samples = downsample_block( + hidden_states=sample, + temb=emb, + encoder_hidden_states=encoder_hidden_states, + ) + else: + sample, res_samples = downsample_block(hidden_states=sample, temb=emb) + + down_block_res_samples += res_samples + + # 4. mid + sample = self.mid_block(sample, emb, encoder_hidden_states=encoder_hidden_states) + + # 5. up + for i, upsample_block in enumerate(self.up_blocks): + is_final_block = i == len(self.up_blocks) - 1 + + res_samples = down_block_res_samples[-len(upsample_block.resnets) :] + down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)] + + # if we have not reached the final block and need to forward the + # upsample size, we do it here + if not is_final_block and forward_upsample_size: + upsample_size = down_block_res_samples[-1].shape[2:] + + if hasattr(upsample_block, "attentions") and upsample_block.attentions is not None: + sample = upsample_block( + hidden_states=sample, + temb=emb, + res_hidden_states_tuple=res_samples, + encoder_hidden_states=encoder_hidden_states, + upsample_size=upsample_size, + ) + else: + sample = upsample_block( + hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size + ) + # 6. post-process + sample = self.conv_norm_out(sample) + sample = self.conv_act(sample) + sample = self.conv_out(sample) + + if not return_dict: + return (sample,) + + return UNet2DConditionOutput(sample=sample) + + +class LinearMultiDim(nn.Linear): + def __init__(self, in_features, out_features=None, second_dim=4, *args, **kwargs): + in_features = [in_features, second_dim, 1] if isinstance(in_features, int) else list(in_features) + if out_features is None: + out_features = in_features + out_features = [out_features, second_dim, 1] if isinstance(out_features, int) else list(out_features) + self.in_features_multidim = in_features + self.out_features_multidim = out_features + super().__init__(np.array(in_features).prod(), np.array(out_features).prod()) + + def forward(self, input_tensor, *args, **kwargs): + shape = input_tensor.shape + n_dim = len(self.in_features_multidim) + input_tensor = input_tensor.reshape(*shape[0:-n_dim], self.in_features) + output_tensor = super().forward(input_tensor) + output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_features_multidim) + return output_tensor + + +class ResnetBlockFlat(nn.Module): + def __init__( + self, + *, + in_channels, + out_channels=None, + dropout=0.0, + temb_channels=512, + groups=32, + groups_out=None, + pre_norm=True, + eps=1e-6, + time_embedding_norm="default", + use_in_shortcut=None, + second_dim=4, + **kwargs, + ): + super().__init__() + self.pre_norm = pre_norm + self.pre_norm = True + + in_channels = [in_channels, second_dim, 1] if isinstance(in_channels, int) else list(in_channels) + self.in_channels_prod = np.array(in_channels).prod() + self.channels_multidim = in_channels + + if out_channels is not None: + out_channels = [out_channels, second_dim, 1] if isinstance(out_channels, int) else list(out_channels) + out_channels_prod = np.array(out_channels).prod() + self.out_channels_multidim = out_channels + else: + out_channels_prod = self.in_channels_prod + self.out_channels_multidim = self.channels_multidim + self.time_embedding_norm = time_embedding_norm + + if groups_out is None: + groups_out = groups + + self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=self.in_channels_prod, eps=eps, affine=True) + self.conv1 = torch.nn.Conv2d(self.in_channels_prod, out_channels_prod, kernel_size=1, padding=0) + + if temb_channels is not None: + self.time_emb_proj = torch.nn.Linear(temb_channels, out_channels_prod) + else: + self.time_emb_proj = None + + self.norm2 = torch.nn.GroupNorm(num_groups=groups_out, num_channels=out_channels_prod, eps=eps, affine=True) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d(out_channels_prod, out_channels_prod, kernel_size=1, padding=0) + + self.nonlinearity = nn.SiLU() + + self.use_in_shortcut = ( + self.in_channels_prod != out_channels_prod if use_in_shortcut is None else use_in_shortcut + ) + + self.conv_shortcut = None + if self.use_in_shortcut: + self.conv_shortcut = torch.nn.Conv2d( + self.in_channels_prod, out_channels_prod, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, input_tensor, temb): + shape = input_tensor.shape + n_dim = len(self.channels_multidim) + input_tensor = input_tensor.reshape(*shape[0:-n_dim], self.in_channels_prod, 1, 1) + input_tensor = input_tensor.view(-1, self.in_channels_prod, 1, 1) + + hidden_states = input_tensor + + hidden_states = self.norm1(hidden_states) + hidden_states = self.nonlinearity(hidden_states) + hidden_states = self.conv1(hidden_states) + + if temb is not None: + temb = self.time_emb_proj(self.nonlinearity(temb))[:, :, None, None] + hidden_states = hidden_states + temb + + hidden_states = self.norm2(hidden_states) + hidden_states = self.nonlinearity(hidden_states) + + hidden_states = self.dropout(hidden_states) + hidden_states = self.conv2(hidden_states) + + if self.conv_shortcut is not None: + input_tensor = self.conv_shortcut(input_tensor) + + output_tensor = input_tensor + hidden_states + + output_tensor = output_tensor.view(*shape[0:-n_dim], -1) + output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_channels_multidim) + + return output_tensor + + +# Copied from diffusers.models.unet_2d_blocks.DownBlock2D with DownBlock2D->DownBlockFlat, ResnetBlock2D->ResnetBlockFlat, Downsample2D->LinearMultiDim +class DownBlockFlat(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor=1.0, + add_downsample=True, + downsample_padding=1, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlockFlat( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + LinearMultiDim( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def forward(self, hidden_states, temb=None): + output_states = () + + for resnet in self.resnets: + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + else: + hidden_states = resnet(hidden_states, temb) + + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states += (hidden_states,) + + return hidden_states, output_states + + +# Copied from diffusers.models.unet_2d_blocks.CrossAttnDownBlock2D with CrossAttnDownBlock2D->CrossAttnDownBlockFlat, ResnetBlock2D->ResnetBlockFlat, Downsample2D->LinearMultiDim +class CrossAttnDownBlockFlat(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + cross_attention_dim=1280, + attention_type="default", + output_scale_factor=1.0, + downsample_padding=1, + add_downsample=True, + ): + super().__init__() + resnets = [] + attentions = [] + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + + for i in range(num_layers): + in_channels = in_channels if i == 0 else out_channels + resnets.append( + ResnetBlockFlat( + in_channels=in_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Transformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_downsample: + self.downsamplers = nn.ModuleList( + [ + LinearMultiDim( + out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name="op" + ) + ] + ) + else: + self.downsamplers = None + + self.gradient_checkpointing = False + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward(self, hidden_states, temb=None, encoder_hidden_states=None): + output_states = () + + for resnet, attn in zip(self.resnets, self.attentions): + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample + + output_states += (hidden_states,) + + if self.downsamplers is not None: + for downsampler in self.downsamplers: + hidden_states = downsampler(hidden_states) + + output_states += (hidden_states,) + + return hidden_states, output_states + + +# Copied from diffusers.models.unet_2d_blocks.UpBlock2D with UpBlock2D->UpBlockFlat, ResnetBlock2D->ResnetBlockFlat, Upsample2D->LinearMultiDim +class UpBlockFlat(nn.Module): + def __init__( + self, + in_channels: int, + prev_output_channel: int, + out_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + output_scale_factor=1.0, + add_upsample=True, + ): + super().__init__() + resnets = [] + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlockFlat( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None): + for resnet in self.resnets: + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + else: + hidden_states = resnet(hidden_states, temb) + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + +# Copied from diffusers.models.unet_2d_blocks.CrossAttnUpBlock2D with CrossAttnUpBlock2D->CrossAttnUpBlockFlat, ResnetBlock2D->ResnetBlockFlat, Upsample2D->LinearMultiDim +class CrossAttnUpBlockFlat(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + prev_output_channel: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + cross_attention_dim=1280, + attention_type="default", + output_scale_factor=1.0, + add_upsample=True, + ): + super().__init__() + resnets = [] + attentions = [] + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + + for i in range(num_layers): + res_skip_channels = in_channels if (i == num_layers - 1) else out_channels + resnet_in_channels = prev_output_channel if i == 0 else out_channels + + resnets.append( + ResnetBlockFlat( + in_channels=resnet_in_channels + res_skip_channels, + out_channels=out_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + attentions.append( + Transformer2DModel( + attn_num_head_channels, + out_channels // attn_num_head_channels, + in_channels=out_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + if add_upsample: + self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, use_conv=True, out_channels=out_channels)]) + else: + self.upsamplers = None + + self.gradient_checkpointing = False + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + self.gradient_checkpointing = False + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward( + self, + hidden_states, + res_hidden_states_tuple, + temb=None, + encoder_hidden_states=None, + upsample_size=None, + ): + for resnet, attn in zip(self.resnets, self.attentions): + # pop res hidden states + res_hidden_states = res_hidden_states_tuple[-1] + res_hidden_states_tuple = res_hidden_states_tuple[:-1] + hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1) + + if self.training and self.gradient_checkpointing: + + def create_custom_forward(module, return_dict=None): + def custom_forward(*inputs): + if return_dict is not None: + return module(*inputs, return_dict=return_dict) + else: + return module(*inputs) + + return custom_forward + + hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb) + hidden_states = torch.utils.checkpoint.checkpoint( + create_custom_forward(attn, return_dict=False), hidden_states, encoder_hidden_states + )[0] + else: + hidden_states = resnet(hidden_states, temb) + hidden_states = attn(hidden_states, encoder_hidden_states=encoder_hidden_states).sample + + if self.upsamplers is not None: + for upsampler in self.upsamplers: + hidden_states = upsampler(hidden_states, upsample_size) + + return hidden_states + + +# Copied from diffusers.models.unet_2d_blocks.UNetMidBlock2DCrossAttn with UNetMidBlock2DCrossAttn->UNetMidBlockFlatCrossAttn, ResnetBlock2D->ResnetBlockFlat +class UNetMidBlockFlatCrossAttn(nn.Module): + def __init__( + self, + in_channels: int, + temb_channels: int, + dropout: float = 0.0, + num_layers: int = 1, + resnet_eps: float = 1e-6, + resnet_time_scale_shift: str = "default", + resnet_act_fn: str = "swish", + resnet_groups: int = 32, + resnet_pre_norm: bool = True, + attn_num_head_channels=1, + attention_type="default", + output_scale_factor=1.0, + cross_attention_dim=1280, + **kwargs, + ): + super().__init__() + + self.attention_type = attention_type + self.attn_num_head_channels = attn_num_head_channels + resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32) + + # there is always at least one resnet + resnets = [ + ResnetBlockFlat( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ] + attentions = [] + + for _ in range(num_layers): + attentions.append( + Transformer2DModel( + attn_num_head_channels, + in_channels // attn_num_head_channels, + in_channels=in_channels, + num_layers=1, + cross_attention_dim=cross_attention_dim, + norm_num_groups=resnet_groups, + ) + ) + resnets.append( + ResnetBlockFlat( + in_channels=in_channels, + out_channels=in_channels, + temb_channels=temb_channels, + eps=resnet_eps, + groups=resnet_groups, + dropout=dropout, + time_embedding_norm=resnet_time_scale_shift, + non_linearity=resnet_act_fn, + output_scale_factor=output_scale_factor, + pre_norm=resnet_pre_norm, + ) + ) + + self.attentions = nn.ModuleList(attentions) + self.resnets = nn.ModuleList(resnets) + + def set_attention_slice(self, slice_size): + if slice_size is not None and self.attn_num_head_channels % slice_size != 0: + raise ValueError( + f"Make sure slice_size {slice_size} is a divisor of " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + if slice_size is not None and slice_size > self.attn_num_head_channels: + raise ValueError( + f"Chunk_size {slice_size} has to be smaller or equal to " + f"the number of heads used in cross_attention {self.attn_num_head_channels}" + ) + + for attn in self.attentions: + attn._set_attention_slice(slice_size) + + def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool): + for attn in self.attentions: + attn._set_use_memory_efficient_attention_xformers(use_memory_efficient_attention_xformers) + + def forward(self, hidden_states, temb=None, encoder_hidden_states=None): + hidden_states = self.resnets[0](hidden_states, temb) + for attn, resnet in zip(self.attentions, self.resnets[1:]): + hidden_states = attn(hidden_states, encoder_hidden_states).sample + hidden_states = resnet(hidden_states, temb) + + return hidden_states From a32c942ed2434554f6722ea93aa3e11e26b4bad1 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 14:06:17 +0000 Subject: [PATCH 35/49] final fix --- src/diffusers/__init__.py | 1 - src/diffusers/pipelines/__init__.py | 1 - src/diffusers/pipelines/versatile_diffusion/__init__.py | 2 +- 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/diffusers/__init__.py b/src/diffusers/__init__.py index 1b8a6689cee5..5787cfe67f5b 100644 --- a/src/diffusers/__init__.py +++ b/src/diffusers/__init__.py @@ -74,7 +74,6 @@ StableDiffusionInpaintPipelineLegacy, StableDiffusionPipeline, VersatileDiffusionDualGuidedPipeline, - VersatileDiffusionImageToTextPipeline, VersatileDiffusionImageVariationPipeline, VersatileDiffusionPipeline, VersatileDiffusionTextToImagePipeline, diff --git a/src/diffusers/pipelines/__init__.py b/src/diffusers/pipelines/__init__.py index a236740edc64..138dadf5e31c 100644 --- a/src/diffusers/pipelines/__init__.py +++ b/src/diffusers/pipelines/__init__.py @@ -26,7 +26,6 @@ ) from .versatile_diffusion import ( VersatileDiffusionDualGuidedPipeline, - VersatileDiffusionImageToTextPipeline, VersatileDiffusionImageVariationPipeline, VersatileDiffusionPipeline, VersatileDiffusionTextToImagePipeline, diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index 5de973f0cc93..2018d7501722 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -4,6 +4,6 @@ if is_transformers_available() and is_torch_available(): from .pipeline_versatile_diffusion import VersatileDiffusionPipeline from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline - from .pipeline_versatile_diffusion_text_image import VersatileDiffusionTextToImagePipeline + from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline from .modeling_text_unet import UNetFlatConditionModel From 447780d7489693420cafa72935034bdcae26fb8a Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 23 Nov 2022 15:32:08 +0100 Subject: [PATCH 36/49] remove_unused_weights --- .../pipelines/versatile_diffusion/__init__.py | 4 +-- .../pipeline_versatile_diffusion.py | 7 +++- ...ipeline_versatile_diffusion_dual_guided.py | 2 +- ...ine_versatile_diffusion_image_variation.py | 2 +- ...eline_versatile_diffusion_text_to_image.py | 23 +++++++------ .../test_versatile_diffusion_text_to_image.py | 34 +++++++++++++++++++ 6 files changed, 57 insertions(+), 15 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/__init__.py b/src/diffusers/pipelines/versatile_diffusion/__init__.py index 2018d7501722..65bc1b72007a 100644 --- a/src/diffusers/pipelines/versatile_diffusion/__init__.py +++ b/src/diffusers/pipelines/versatile_diffusion/__init__.py @@ -2,8 +2,8 @@ if is_transformers_available() and is_torch_available(): + from .modeling_text_unet import UNetFlatConditionModel from .pipeline_versatile_diffusion import VersatileDiffusionPipeline from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline - from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline - from .modeling_text_unet import UNetFlatConditionModel + from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 50192233de7a..23ab9ba8f4dd 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -164,7 +164,8 @@ def text_to_image( ): expected_components = inspect.signature(VersatileDiffusionTextToImagePipeline.__init__).parameters.keys() components = {name: component for name, component in self.components.items() if name in expected_components} - return VersatileDiffusionTextToImagePipeline(**components)( + temp_pipeline = VersatileDiffusionTextToImagePipeline(**components) + output = temp_pipeline( prompt=prompt, height=height, width=width, @@ -180,6 +181,10 @@ def text_to_image( callback=callback, callback_steps=callback_steps, ) + # swap the attention blocks back to the original state + temp_pipeline._swap_unet_attention_blocks() + + return output @torch.no_grad() def dual_guided( diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index cb03877ef97e..6b1a7a9575fc 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -353,7 +353,7 @@ def normalize_embeddings(encoder_output): # get unconditional embeddings for classifier free guidance if do_classifier_free_guidance: - uncond_images = [np.zeros((512, 512, 3))] * batch_size + uncond_images = [np.zeros((512, 512, 3)) + 0.5] * batch_size uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors="pt") uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(device)) uncond_embeddings = normalize_embeddings(uncond_embeddings) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index d1c3156431db..27b017801984 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -198,7 +198,7 @@ def normalize_embeddings(encoder_output): if do_classifier_free_guidance: uncond_images: List[str] if negative_prompt is None: - uncond_images = [np.zeros((512, 512, 3))] * batch_size + uncond_images = [np.zeros((512, 512, 3)) + 0.5] * batch_size elif type(prompt) is not type(negative_prompt): raise TypeError( f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=" diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index 5880dd6d76ba..f2fa2c0d53f2 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -76,7 +76,13 @@ def __init__( scheduler=scheduler, ) - def swap_unet_attention_blocks(self): + if self.text_unet is not None: + self._swap_unet_attention_blocks() + + def _swap_unet_attention_blocks(self): + """ + Swap the `Transformer2DModel` blocks between the image and text UNets + """ for name, module in self.image_unet.named_modules(): if isinstance(module, Transformer2DModel): parent_name, index = name.rsplit(".", 1) @@ -86,6 +92,9 @@ def swap_unet_attention_blocks(self): self.image_unet.get_submodule(parent_name)[index], ) + def remove_unused_weights(self): + self.register_modules(text_unet=None) + # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet def enable_xformers_memory_efficient_attention(self): r""" @@ -454,10 +463,7 @@ def __call__( # 6. Prepare extra step kwargs. extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) - # 7. Swap the attention blocks between the image and text UNets - self.swap_unet_attention_blocks() - - # 8. Denoising loop + # 7. Denoising loop for i, t in enumerate(self.progress_bar(timesteps)): # expand the latents if we are doing classifier free guidance latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents @@ -478,13 +484,10 @@ def __call__( if callback is not None and i % callback_steps == 0: callback(i, t, latents) - # 9. Swap the attention blocks backs in case the UNets are reused in another pipeline - self.swap_unet_attention_blocks() - - # 10. Post-processing + # 9. Post-processing image = self.decode_latents(latents) - # 11. Convert to PIL + # 10. Convert to PIL if output_type == "pil": image = self.numpy_to_pil(image) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py index 3ba275df766f..392b83597b2f 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import gc +import tempfile import unittest import numpy as np @@ -34,6 +36,38 @@ class VersatileDiffusionTextToImagePipelineFastTests(PipelineTesterMixin, unitte @slow @require_torch_gpu class VersatileDiffusionTextToImagePipelineIntegrationTests(unittest.TestCase): + def tearDown(self): + # clean up the VRAM after each test + super().tearDown() + gc.collect() + torch.cuda.empty_cache() + + def test_remove_unused_weights_save_load(self): + pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test") + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + # remove text_unet + pipe.remove_unused_weights() + + prompt = "A painting of a squirrel eating a burger " + generator = torch.Generator(device=torch_device).manual_seed(0) + image = pipe( + prompt=prompt, generator=generator, guidance_scale=7.5, num_inference_steps=2, output_type="numpy" + ).images + + with tempfile.TemporaryDirectory() as tmpdirname: + pipe.save_pretrained(tmpdirname) + pipe = VersatileDiffusionTextToImagePipeline.from_pretrained(tmpdirname) + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) + + generator = generator.manual_seed(0) + new_image = pipe( + prompt=prompt, generator=generator, guidance_scale=7.5, num_inference_steps=2, output_type="numpy" + ).images + + assert np.abs(image - new_image).sum() < 1e-5, "Models don't have the same forward pass" + def test_inference_text2img(self): pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test") pipe.to(torch_device) From 1b85e343f177a3cd6c638743beb1512584b8566b Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 23 Nov 2022 15:51:13 +0100 Subject: [PATCH 37/49] test updates --- .../pipeline_versatile_diffusion_dual_guided.py | 11 ++++++----- .../test_versatile_diffusion_dual_guided.py | 6 ++++-- .../test_versatile_diffusion_text_to_image.py | 4 ++-- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index 6b1a7a9575fc..92c624d59ffd 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -89,12 +89,14 @@ def __init__( scheduler=scheduler, ) - if "dual_cross_attention" not in self.image_unet.config or not self.image_unet.config.dual_cross_attention: + if self.text_unet is not None and ( + "dual_cross_attention" not in self.image_unet.config or not self.image_unet.config.dual_cross_attention + ): # if loading from a universal checkpoint rather than a saved dual-guided pipeline self._convert_to_dual_attention() - if self.text_unet is not None: - # release the memory taken up by `text_unet` - self.register_modules(text_unet=None) + + def remove_unused_weights(self): + self.register_modules(text_unet=None) def _convert_to_dual_attention(self): """ @@ -139,7 +141,6 @@ def _revert_dual_attention(self): parent_name, index = name.rsplit(".", 1) index = int(index) self.image_unet.get_submodule(parent_name)[index] = module.transformers[0] - self.image_unet.config.dual_cross_attention = False # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_xformers_memory_efficient_attention with unet->image_unet def enable_xformers_memory_efficient_attention(self): diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index a315c68b2768..46f9127ba8d3 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -42,8 +42,10 @@ def tearDown(self): gc.collect() torch.cuda.empty_cache() - def test_from_pretrained_save_pretrained(self): + def test_remove_unused_weights_save_load(self): pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("diffusers/vd-official-test") + # remove text_unet + pipe.remove_unused_weights() pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) @@ -100,5 +102,5 @@ def test_inference_dual_guided(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.5727, 0.5625, 0.5617, 0.5703, 0.5530, 0.5620, 0.5864, 0.5742, 0.5665]) + expected_slice = np.array([0.0607, 0.0695, 0.0750, 0.0650, 0.0703, 0.0796, 0.0726, 0.0768, 0.0858]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py index 392b83597b2f..241df51c06a1 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py @@ -44,10 +44,10 @@ def tearDown(self): def test_remove_unused_weights_save_load(self): pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test") - pipe.to(torch_device) - pipe.set_progress_bar_config(disable=None) # remove text_unet pipe.remove_unused_weights() + pipe.to(torch_device) + pipe.set_progress_bar_config(disable=None) prompt = "A painting of a squirrel eating a burger " generator = torch.Generator(device=torch_device).manual_seed(0) From e9501998c1c51e7d8ba7b06d25bf2cce86d0896a Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 15:11:44 +0000 Subject: [PATCH 38/49] save progress --- ...ipeline_versatile_diffusion_dual_guided.py | 57 +++++++++---------- ...ine_versatile_diffusion_image_variation.py | 30 +++++++++- ...eline_versatile_diffusion_text_to_image.py | 15 +++++ 3 files changed, 71 insertions(+), 31 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index cb03877ef97e..a2c67626c020 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -343,7 +343,8 @@ def normalize_embeddings(encoder_output): # get prompt text embeddings image_input = self.image_feature_extractor(images=prompt, return_tensors="pt") - image_embeddings = self.image_encoder(image_input.pixel_values.to(device)) + pixel_values = image_input.pixel_values.to(device).to(self.image_encoder.dtype) + image_embeddings = self.image_encoder(pixel_values) image_embeddings = normalize_embeddings(image_embeddings) # duplicate image embeddings for each generation per prompt, using mps friendly method @@ -355,7 +356,8 @@ def normalize_embeddings(encoder_output): if do_classifier_free_guidance: uncond_images = [np.zeros((512, 512, 3))] * batch_size uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors="pt") - uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(device)) + pixel_values = uncond_images.pixel_values.to(device).to(self.image_encoder.dtype) + uncond_embeddings = self.image_encoder(pixel_values) uncond_embeddings = normalize_embeddings(uncond_embeddings) # duplicate unconditional embeddings for each generation per prompt, using mps friendly method @@ -397,22 +399,22 @@ def prepare_extra_step_kwargs(self, generator, eta): extra_step_kwargs["generator"] = generator return extra_step_kwargs - def check_inputs(self, first_prompt, second_prompt, height, width, callback_steps): + def check_inputs(self, prompt, image, height, width, callback_steps): if ( - not isinstance(first_prompt, str) - and not isinstance(first_prompt, PIL.Image.Image) - and not isinstance(first_prompt, list) + not isinstance(prompt, str) + and not isinstance(prompt, PIL.Image.Image) + and not isinstance(prompt, list) ): raise ValueError( - f"`first_prompt` has to be of type `str` `PIL.Image` or `list` but is {type(first_prompt)}" + f"`prompt` has to be of type `str` `PIL.Image` or `list` but is {type(prompt)}" ) if ( - not isinstance(second_prompt, str) - and not isinstance(second_prompt, PIL.Image.Image) - and not isinstance(second_prompt, list) + not isinstance(image, str) + and not isinstance(image, PIL.Image.Image) + and not isinstance(image, list) ): raise ValueError( - f"`second_prompt` has to be of type `str` `PIL.Image` or `list` but is {type(second_prompt)}" + f"`image` has to be of type `str` `PIL.Image` or `list` but is {type(image)}" ) if height % 8 != 0 or width % 8 != 0: @@ -460,8 +462,8 @@ def set_transformer_params(self, mix_ratio: float = 0.5, condition_types: Tuple @torch.no_grad() def __call__( self, - first_prompt: Union[str, List[str], PIL.Image.Image, List[PIL.Image.Image]], - second_prompt: Union[str, List[str], PIL.Image.Image, List[PIL.Image.Image]], + image: Union[str, List[str]], + prompt: Union[PIL.Image.Image, List[PIL.Image.Image]], prompt_mix_ratio: float = 0.5, height: int = 512, width: int = 512, @@ -533,12 +535,12 @@ def __call__( """ # 1. Check inputs. Raise error if not correct - self.check_inputs(first_prompt, second_prompt, height, width, callback_steps) + self.check_inputs(prompt, image, height, width, callback_steps) # 2. Define call parameters - first_prompt = [first_prompt] if not isinstance(first_prompt, list) else first_prompt - second_prompt = [second_prompt] if not isinstance(second_prompt, list) else second_prompt - batch_size = len(first_prompt) + prompt = [prompt] if not isinstance(prompt, list) else prompt + image = [image] if not isinstance(image, list) else image + batch_size = len(prompt) device = self._execution_device # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` @@ -548,18 +550,15 @@ def __call__( # 3. Encode input prompts dual_prompt_embeddings = [] prompt_types = [] - for prompt in [first_prompt, second_prompt]: - if isinstance(prompt[0], str): - embeddings = self._encode_text_prompt( - prompt, device, num_images_per_prompt, do_classifier_free_guidance - ) - prompt_types.append("text") - else: - embeddings = self._encode_image_prompt( - prompt, device, num_images_per_prompt, do_classifier_free_guidance - ) - prompt_types.append("image") - dual_prompt_embeddings.append(embeddings) + embeddings = self._encode_text_prompt( + prompt, device, num_images_per_prompt, do_classifier_free_guidance + ) + prompt_types.append("text") + embeddings = self._encode_image_prompt( + image, device, num_images_per_prompt, do_classifier_free_guidance + ) + prompt_types.append("image") + dual_prompt_embeddings.append(embeddings) dual_prompt_embeddings = torch.cat(dual_prompt_embeddings, dim=1) # 4. Prepare timesteps diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index d1c3156431db..0da99fafcebc 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -186,7 +186,8 @@ def normalize_embeddings(encoder_output): # get prompt text embeddings image_input = self.image_feature_extractor(images=prompt, return_tensors="pt") - image_embeddings = self.image_encoder(image_input.pixel_values.to(device)) + pixel_values = image_input.pixel_values.to(device).to(self.image_encoder.dtype) + image_embeddings = self.image_encoder(pixel_values) image_embeddings = normalize_embeddings(image_embeddings) # duplicate image embeddings for each generation per prompt, using mps friendly method @@ -216,7 +217,8 @@ def normalize_embeddings(encoder_output): uncond_images = negative_prompt uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors="pt") - uncond_embeddings = self.image_encoder(uncond_images.pixel_values.to(device)) + pixel_values = uncond_images.pixel_values.to(device).to(self.image_encoder.dtype) + uncond_embeddings = self.image_encoder(pixel_values) uncond_embeddings = normalize_embeddings(uncond_embeddings) # duplicate unconditional embeddings for each generation per prompt, using mps friendly method @@ -357,6 +359,30 @@ def __call__( The frequency at which the `callback` function will be called. If not specified, the callback will be called at every step. + Examples: + + ```py + >>> from diffusers import VersatileDiffusionImageVariationPipeline + >>> import torch + >>> import requests + >>> from io import BytesIO + >>> from PIL import Image + + >>> # let's download an initial image + >>> url = "https://huggingface.co/datasets/diffusers/images/resolve/main/benz.jpg" + + >>> response = requests.get(url) + >>> image = Image.open(BytesIO(response.content)).convert("RGB") + + >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) + >>> # pipe.remove_unused_weights() + >>> pipe = pipe.to("cuda") + + >>> generator = torch.Generator(device="cuda").manual_seed(0) + >>> image = pipe(image, generator=generator).images[0] + >>> image.save("./car_variation.png") + ``` + Returns: [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index 5880dd6d76ba..1a33dffe5951 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -410,6 +410,21 @@ def __call__( The frequency at which the `callback` function will be called. If not specified, the callback will be called at every step. + Examples: + + ```py + >>> from diffusers import VersatileDiffusionTextToImagePipeline + >>> import torch + + >>> pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) + >>> # pipe.remove_unused_weights() + >>> pipe = pipe.to("cuda") + + >>> generator = torch.Generator(device="cuda").manual_seed(0) + >>> image = pipe("an astronaut riding on a horse on mars", generator=generator).images[0] + >>> image.save("./astronaut.png") + ``` + Returns: [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. From 2e2df188cb608f40aa156a19087580ec8877ce62 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 15:26:49 +0000 Subject: [PATCH 39/49] uP --- ...ipeline_versatile_diffusion_dual_guided.py | 29 +++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index 3d961fd70a59..6bda0a4edb60 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -465,7 +465,7 @@ def __call__( self, image: Union[str, List[str]], prompt: Union[PIL.Image.Image, List[PIL.Image.Image]], - prompt_mix_ratio: float = 0.5, + text_to_image_strength: float = 0.5, height: int = 512, width: int = 512, num_inference_steps: int = 50, @@ -527,6 +527,31 @@ def __call__( The frequency at which the `callback` function will be called. If not specified, the callback will be called at every step. + ```py + >>> from diffusers import VersatileDiffusionImageVariationPipeline + >>> import torch + >>> import requests + >>> from io import BytesIO + >>> from PIL import Image + + >>> # let's download an initial image + >>> url = "https://huggingface.co/datasets/diffusers/images/resolve/main/benz.jpg" + + >>> response = requests.get(url) + >>> image = Image.open(BytesIO(response.content)).convert("RGB") + >>> text = "a painting, mosaic style" + + >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) + >>> # pipe.remove_unused_weights() + >>> pipe = pipe.to("cuda") + + >>> generator = torch.Generator(device="cuda").manual_seed(0) + >>> text_to_image_strength = 0.5 + + >>> image = pipe(image=image, text=text, text_to_image_strength=text_to_image_strength, generator=generator).images[0] + >>> image.save("./car_variation.png") + ``` + Returns: [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. @@ -583,7 +608,7 @@ def __call__( extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta) # 7. Combine the attention blocks of the image and text UNets - self.set_transformer_params(prompt_mix_ratio, prompt_types) + self.set_transformer_params(text_to_image_strength, prompt_types) # 8. Denoising loop for i, t in enumerate(self.progress_bar(timesteps)): From dd9dce58f0f7023751b7eabad72bc0e296bb6521 Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 23 Nov 2022 16:45:15 +0100 Subject: [PATCH 40/49] fix dual prompts --- ...ipeline_versatile_diffusion_dual_guided.py | 28 ++++++++----------- .../test_versatile_diffusion_dual_guided.py | 6 ++-- ...est_versatile_diffusion_image_variation.py | 2 +- .../test_versatile_diffusion_text_to_image.py | 2 +- 4 files changed, 16 insertions(+), 22 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index 6bda0a4edb60..e7402a357d4b 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -463,8 +463,8 @@ def set_transformer_params(self, mix_ratio: float = 0.5, condition_types: Tuple @torch.no_grad() def __call__( self, - image: Union[str, List[str]], prompt: Union[PIL.Image.Image, List[PIL.Image.Image]], + image: Union[str, List[str]], text_to_image_strength: float = 0.5, height: int = 512, width: int = 512, @@ -542,22 +542,20 @@ def __call__( >>> text = "a painting, mosaic style" >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) - >>> # pipe.remove_unused_weights() + >>> pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") >>> generator = torch.Generator(device="cuda").manual_seed(0) - >>> text_to_image_strength = 0.5 + >>> text_to_image_strength = 0.75 - >>> image = pipe(image=image, text=text, text_to_image_strength=text_to_image_strength, generator=generator).images[0] + >>> image = pipe(prompt=text, image=image, text_to_image_strength=text_to_image_strength, generator=generator).images[0] >>> image.save("./car_variation.png") ``` Returns: - [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`: - [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple. - When returning a tuple, the first element is a list with the generated images, and the second element is a - list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" - (nsfw) content, according to the `safety_checker`. + [`~pipelines.stable_diffusion.ImagePipelineOutput`] or `tuple`: + [`~pipelines.stable_diffusion.ImagePipelineOutput`] if `return_dict` is True, otherwise a `tuple. + When returning a tuple, the first element is a list with the generated images. """ # 1. Check inputs. Raise error if not correct @@ -574,18 +572,14 @@ def __call__( do_classifier_free_guidance = guidance_scale > 1.0 # 3. Encode input prompts - dual_prompt_embeddings = [] - prompt_types = [] - embeddings = self._encode_text_prompt( + text_embeddings = self._encode_text_prompt( prompt, device, num_images_per_prompt, do_classifier_free_guidance ) - prompt_types.append("text") - embeddings = self._encode_image_prompt( + image_embeddings = self._encode_image_prompt( image, device, num_images_per_prompt, do_classifier_free_guidance ) - prompt_types.append("image") - dual_prompt_embeddings.append(embeddings) - dual_prompt_embeddings = torch.cat(dual_prompt_embeddings, dim=1) + dual_prompt_embeddings = torch.cat([text_embeddings, image_embeddings], dim=1) + prompt_types = ("text", "image") # 4. Prepare timesteps self.scheduler.set_timesteps(num_inference_steps, device=device) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index 46f9127ba8d3..91739f14bdb0 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -90,9 +90,9 @@ def test_inference_dual_guided(self): ) generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe( - first_prompt=first_prompt, - second_prompt=second_prompt, - prompt_mix_ratio=0.75, + prompt=first_prompt, + image=second_prompt, + text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, num_inference_steps=50, diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py index b8297ea63cf8..2bb2a6bfadac 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py @@ -54,5 +54,5 @@ def test_inference_image_variations(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.1811, 0.0430, 0.0433, 0.1082, 0.0144, 0.0306, 0.0683, 0.0248, 0.0876]) + expected_slice = np.array([0.0113, 0.2241, 0.4024, 0.0839, 0.0871, 0.2725, 0.2581, 0., 0.1096]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py index 241df51c06a1..523eda94f33f 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py @@ -82,5 +82,5 @@ def test_inference_text2img(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.0657, 0.0529, 0.0455, 0.0802, 0.0570, 0.0179, 0.0267, 0.0483, 0.0769]) + expected_slice = np.array([0.0408, 0.0181, 0., 0.0388, 0.0046, 0.0461, 0.0411, 0., 0.0222]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 6cbee51c6a79a29693f8414de1c2c117690f1ba2 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 16:04:34 +0000 Subject: [PATCH 41/49] some fixes --- .../pipeline_versatile_diffusion.py | 14 +++++++------- ...line_versatile_diffusion_image_variation.py | 1 - ...peline_versatile_diffusion_text_to_image.py | 2 +- .../test_versatile_diffusion_dual_guided.py | 4 ++-- .../test_versatile_diffusion_mega.py | 18 +++++++++++------- 5 files changed, 21 insertions(+), 18 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 23ab9ba8f4dd..6fcc299dde8b 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -1,5 +1,5 @@ import inspect -from typing import Any, Callable, Dict, List, Optional, Union +from typing import Callable, Dict, List, Optional, Union import torch @@ -189,9 +189,9 @@ def text_to_image( @torch.no_grad() def dual_guided( self, - first_prompt: Union[str, List[str], PIL.Image.Image, List[PIL.Image.Image]], - second_prompt: Union[str, List[str], PIL.Image.Image, List[PIL.Image.Image]], - prompt_mix_ratio: float = 0.5, + prompt: Union[PIL.Image.Image, List[PIL.Image.Image]], + image: Union[str, List[str]], + text_to_image_strength: float = 0.5, height: int = 512, width: int = 512, num_inference_steps: int = 50, @@ -209,9 +209,9 @@ def dual_guided( components = {name: component for name, component in self.components.items() if name in expected_components} temp_pipeline = VersatileDiffusionDualGuidedPipeline(**components) output = temp_pipeline( - first_prompt=first_prompt, - second_prompt=second_prompt, - prompt_mix_ratio=prompt_mix_ratio, + prompt=prompt, + image=image, + text_to_image_strength=text_to_image_strength, height=height, width=width, num_inference_steps=num_inference_steps, diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index 6f91400e610c..56bbbea636dd 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -375,7 +375,6 @@ def __call__( >>> image = Image.open(BytesIO(response.content)).convert("RGB") >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) - >>> # pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") >>> generator = torch.Generator(device="cuda").manual_seed(0) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index 2652cdcaaf00..5f987ee1a850 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -426,7 +426,7 @@ def __call__( >>> import torch >>> pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) - >>> # pipe.remove_unused_weights() + >>> pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") >>> generator = torch.Generator(device="cuda").manual_seed(0) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index 91739f14bdb0..792c52feb15a 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -53,7 +53,7 @@ def test_remove_unused_weights_save_load(self): image = pipe( first_prompt="first prompt", second_prompt="second prompt", - prompt_mix_ratio=0.75, + text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, num_inference_steps=2, @@ -70,7 +70,7 @@ def test_remove_unused_weights_save_load(self): new_image = pipe( first_prompt="first prompt", second_prompt="second prompt", - prompt_mix_ratio=0.75, + text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, num_inference_steps=2, diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py index df45266f4b33..bad9bb0bf00d 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py @@ -47,11 +47,15 @@ def test_from_pretrained_save_pretrained(self): pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) + second_prompt = load_image( + "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" + ) + generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe.dual_guided( first_prompt="first prompt", - second_prompt="second prompt", - prompt_mix_ratio=0.75, + second_prompt=second_prompt, + text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, num_inference_steps=2, @@ -67,8 +71,8 @@ def test_from_pretrained_save_pretrained(self): generator = generator.manual_seed(0) new_image = pipe.dual_guided( first_prompt="first prompt", - second_prompt="second prompt", - prompt_mix_ratio=0.75, + second_prompt=second_prompt, + text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, num_inference_steps=2, @@ -88,9 +92,9 @@ def test_inference_dual_guided_then_text_to_image(self): ) generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe.dual_guided( - first_prompt=first_prompt, - second_prompt=second_prompt, - prompt_mix_ratio=0.75, + text=first_prompt, + image=second_prompt, + text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, num_inference_steps=50, From e9843faa8798f2f3d2b725733088fd4c01e85267 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 16:29:01 +0000 Subject: [PATCH 42/49] finish --- ...ipeline_versatile_diffusion_dual_guided.py | 41 +++++++------------ ...ine_versatile_diffusion_image_variation.py | 4 +- ...eline_versatile_diffusion_text_to_image.py | 4 +- .../test_versatile_diffusion_dual_guided.py | 16 +++++--- ...est_versatile_diffusion_image_variation.py | 2 +- .../test_versatile_diffusion_mega.py | 18 ++++---- .../test_versatile_diffusion_text_to_image.py | 2 +- 7 files changed, 43 insertions(+), 44 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index e7402a357d4b..a63eac838eb3 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -130,6 +130,7 @@ def _convert_to_dual_attention(self): dual_transformer.transformers[1] = text_transformer self.image_unet.get_submodule(parent_name)[index] = dual_transformer + self.image_unet.register_to_config(dual_cross_attention=True) def _revert_dual_attention(self): """ @@ -401,22 +402,10 @@ def prepare_extra_step_kwargs(self, generator, eta): return extra_step_kwargs def check_inputs(self, prompt, image, height, width, callback_steps): - if ( - not isinstance(prompt, str) - and not isinstance(prompt, PIL.Image.Image) - and not isinstance(prompt, list) - ): - raise ValueError( - f"`prompt` has to be of type `str` `PIL.Image` or `list` but is {type(prompt)}" - ) - if ( - not isinstance(image, str) - and not isinstance(image, PIL.Image.Image) - and not isinstance(image, list) - ): - raise ValueError( - f"`image` has to be of type `str` `PIL.Image` or `list` but is {type(image)}" - ) + if not isinstance(prompt, str) and not isinstance(prompt, PIL.Image.Image) and not isinstance(prompt, list): + raise ValueError(f"`prompt` has to be of type `str` `PIL.Image` or `list` but is {type(prompt)}") + if not isinstance(image, str) and not isinstance(image, PIL.Image.Image) and not isinstance(image, list): + raise ValueError(f"`image` has to be of type `str` `PIL.Image` or `list` but is {type(image)}") if height % 8 != 0 or width % 8 != 0: raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.") @@ -541,21 +530,25 @@ def __call__( >>> image = Image.open(BytesIO(response.content)).convert("RGB") >>> text = "a painting, mosaic style" - >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) + >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained( + ... "diffusers/vd-official-test", torch_dtype=torch.float16 + ... ) >>> pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") >>> generator = torch.Generator(device="cuda").manual_seed(0) >>> text_to_image_strength = 0.75 - >>> image = pipe(prompt=text, image=image, text_to_image_strength=text_to_image_strength, generator=generator).images[0] + >>> image = pipe( + ... prompt=text, image=image, text_to_image_strength=text_to_image_strength, generator=generator + ... ).images[0] >>> image.save("./car_variation.png") ``` Returns: [`~pipelines.stable_diffusion.ImagePipelineOutput`] or `tuple`: - [`~pipelines.stable_diffusion.ImagePipelineOutput`] if `return_dict` is True, otherwise a `tuple. - When returning a tuple, the first element is a list with the generated images. + [`~pipelines.stable_diffusion.ImagePipelineOutput`] if `return_dict` is True, otherwise a `tuple. When + returning a tuple, the first element is a list with the generated images. """ # 1. Check inputs. Raise error if not correct @@ -572,12 +565,8 @@ def __call__( do_classifier_free_guidance = guidance_scale > 1.0 # 3. Encode input prompts - text_embeddings = self._encode_text_prompt( - prompt, device, num_images_per_prompt, do_classifier_free_guidance - ) - image_embeddings = self._encode_image_prompt( - image, device, num_images_per_prompt, do_classifier_free_guidance - ) + text_embeddings = self._encode_text_prompt(prompt, device, num_images_per_prompt, do_classifier_free_guidance) + image_embeddings = self._encode_image_prompt(image, device, num_images_per_prompt, do_classifier_free_guidance) dual_prompt_embeddings = torch.cat([text_embeddings, image_embeddings], dim=1) prompt_types = ("text", "image") diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index 56bbbea636dd..8d547bce2fce 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -374,7 +374,9 @@ def __call__( >>> response = requests.get(url) >>> image = Image.open(BytesIO(response.content)).convert("RGB") - >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) + >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained( + ... "diffusers/vd-official-test", torch_dtype=torch.float16 + ... ) >>> pipe = pipe.to("cuda") >>> generator = torch.Generator(device="cuda").manual_seed(0) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index 5f987ee1a850..ad9d4ce83dca 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -425,7 +425,9 @@ def __call__( >>> from diffusers import VersatileDiffusionTextToImagePipeline >>> import torch - >>> pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test", torch_dtype=torch.float16) + >>> pipe = VersatileDiffusionTextToImagePipeline.from_pretrained( + ... "diffusers/vd-official-test", torch_dtype=torch.float16 + ... ) >>> pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index 792c52feb15a..0e3ab432a43f 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -49,10 +49,14 @@ def test_remove_unused_weights_save_load(self): pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) + second_prompt = load_image( + "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" + ) + generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe( - first_prompt="first prompt", - second_prompt="second prompt", + prompt="first prompt", + image=second_prompt, text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, @@ -63,13 +67,14 @@ def test_remove_unused_weights_save_load(self): with tempfile.TemporaryDirectory() as tmpdirname: pipe.save_pretrained(tmpdirname) pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained(tmpdirname) + pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) generator = generator.manual_seed(0) new_image = pipe( - first_prompt="first prompt", - second_prompt="second prompt", + prompt="first prompt", + image=second_prompt, text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, @@ -81,6 +86,7 @@ def test_remove_unused_weights_save_load(self): def test_inference_dual_guided(self): pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("diffusers/vd-official-test") + pipe.remove_unused_weights() pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) @@ -102,5 +108,5 @@ def test_inference_dual_guided(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.0607, 0.0695, 0.0750, 0.0650, 0.0703, 0.0796, 0.0726, 0.0768, 0.0858]) + expected_slice = np.array([0.014 , 0.0112, 0.0136, 0.0145, 0.0107, 0.0113, 0.0272, 0.0215, 0.0216]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py index 2bb2a6bfadac..364e6c3b8404 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py @@ -54,5 +54,5 @@ def test_inference_image_variations(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.0113, 0.2241, 0.4024, 0.0839, 0.0871, 0.2725, 0.2581, 0., 0.1096]) + expected_slice = np.array([0.0113, 0.2241, 0.4024, 0.0839, 0.0871, 0.2725, 0.2581, 0.0, 0.1096]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py index bad9bb0bf00d..da578d2b9c18 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py @@ -47,14 +47,14 @@ def test_from_pretrained_save_pretrained(self): pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) - second_prompt = load_image( + image = load_image( "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" ) generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe.dual_guided( - first_prompt="first prompt", - second_prompt=second_prompt, + prompt="first prompt", + image=image, text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, @@ -70,8 +70,8 @@ def test_from_pretrained_save_pretrained(self): generator = generator.manual_seed(0) new_image = pipe.dual_guided( - first_prompt="first prompt", - second_prompt=second_prompt, + prompt="first prompt", + image=image, text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, @@ -86,14 +86,14 @@ def test_inference_dual_guided_then_text_to_image(self): pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) - first_prompt = "cyberpunk 2077" - second_prompt = load_image( + prompt = "cyberpunk 2077" + image = load_image( "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" ) generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe.dual_guided( - text=first_prompt, - image=second_prompt, + text=prompt, + image=image, text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py index 523eda94f33f..92f8d8c1262a 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py @@ -82,5 +82,5 @@ def test_inference_text2img(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.0408, 0.0181, 0., 0.0388, 0.0046, 0.0461, 0.0411, 0., 0.0222]) + expected_slice = np.array([0.0408, 0.0181, 0.0, 0.0388, 0.0046, 0.0461, 0.0411, 0.0, 0.0222]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From cea10a0e2d2466d554631b5afbd76ff4f08aba04 Mon Sep 17 00:00:00 2001 From: anton-l Date: Wed, 23 Nov 2022 18:16:56 +0100 Subject: [PATCH 43/49] style --- src/diffusers/models/attention.py | 2 +- .../pipeline_versatile_diffusion.py | 2 +- .../pipeline_versatile_diffusion_dual_guided.py | 1 - .../test_versatile_diffusion_dual_guided.py | 2 +- .../test_versatile_diffusion_mega.py | 16 +++++++--------- 5 files changed, 10 insertions(+), 13 deletions(-) diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index f47327fdbeb8..95e9ef8d1a2d 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -22,7 +22,7 @@ from ..configuration_utils import ConfigMixin, register_to_config from ..modeling_utils import ModelMixin from ..models.embeddings import ImagePositionalEmbeddings -from ..utils import CONFIG_NAME, BaseOutput +from ..utils import BaseOutput from ..utils.import_utils import is_xformers_available diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index 6fcc299dde8b..6736ff02f0af 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -1,5 +1,5 @@ import inspect -from typing import Callable, Dict, List, Optional, Union +from typing import Callable, List, Optional, Union import torch diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index a63eac838eb3..24e25f926544 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -29,7 +29,6 @@ from ...models import AutoencoderKL, UNet2DConditionModel from ...models.attention import DualTransformer2DModel, Transformer2DModel -from ...models.unet_2d_blocks import CrossAttnDownBlock2D, CrossAttnUpBlock2D, UNetMidBlock2DCrossAttn from ...pipeline_utils import DiffusionPipeline, ImagePipelineOutput from ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler from ...utils import is_accelerate_available, logging diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index 0e3ab432a43f..0641309ce801 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -108,5 +108,5 @@ def test_inference_dual_guided(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.014 , 0.0112, 0.0136, 0.0145, 0.0107, 0.0113, 0.0272, 0.0215, 0.0216]) + expected_slice = np.array([0.014, 0.0112, 0.0136, 0.0145, 0.0107, 0.0113, 0.0272, 0.0215, 0.0216]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py index da578d2b9c18..7a8f1e6bb277 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py @@ -47,14 +47,14 @@ def test_from_pretrained_save_pretrained(self): pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) - image = load_image( + prompt_image = load_image( "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" ) generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe.dual_guided( prompt="first prompt", - image=image, + image=prompt_image, text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, @@ -71,7 +71,7 @@ def test_from_pretrained_save_pretrained(self): generator = generator.manual_seed(0) new_image = pipe.dual_guided( prompt="first prompt", - image=image, + image=prompt_image, text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, @@ -87,12 +87,10 @@ def test_inference_dual_guided_then_text_to_image(self): pipe.set_progress_bar_config(disable=None) prompt = "cyberpunk 2077" - image = load_image( - "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" - ) + image = load_image("https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg") generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe.dual_guided( - text=prompt, + prompt=prompt, image=image, text_to_image_strength=0.75, generator=generator, @@ -104,7 +102,7 @@ def test_inference_dual_guided_then_text_to_image(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.5727, 0.5625, 0.5617, 0.5703, 0.5530, 0.5620, 0.5864, 0.5742, 0.5665]) + expected_slice = np.array([0.014, 0.0112, 0.0136, 0.0145, 0.0107, 0.0113, 0.0272, 0.0215, 0.0216]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 prompt = "A painting of a squirrel eating a burger " @@ -116,5 +114,5 @@ def test_inference_dual_guided_then_text_to_image(self): image_slice = image[0, 253:256, 253:256, -1] assert image.shape == (1, 512, 512, 3) - expected_slice = np.array([0.0657, 0.0529, 0.0455, 0.0802, 0.0570, 0.0179, 0.0267, 0.0483, 0.0769]) + expected_slice = np.array([0.0408, 0.0181, 0.0, 0.0388, 0.0046, 0.0461, 0.0411, 0.0, 0.0222]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 From 59c2fef0e20cfa827f445a9300498095cc177e7d Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 17:23:25 +0000 Subject: [PATCH 44/49] finish renaming --- ...ipeline_versatile_diffusion_dual_guided.py | 2 +- ...ine_versatile_diffusion_image_variation.py | 2 +- ...eline_versatile_diffusion_text_to_image.py | 2 +- .../test_versatile_diffusion_dual_guided.py | 4 ++-- ...est_versatile_diffusion_image_variation.py | 2 +- .../test_versatile_diffusion_mega.py | 22 ++++++++++++++----- .../test_versatile_diffusion_text_to_image.py | 4 ++-- 7 files changed, 24 insertions(+), 14 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py index a63eac838eb3..78dfbfc34c38 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py @@ -531,7 +531,7 @@ def __call__( >>> text = "a painting, mosaic style" >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained( - ... "diffusers/vd-official-test", torch_dtype=torch.float16 + ... "shi-labs/versatile-diffusion", torch_dtype=torch.float16 ... ) >>> pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py index 8d547bce2fce..652b7b735af4 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py @@ -375,7 +375,7 @@ def __call__( >>> image = Image.open(BytesIO(response.content)).convert("RGB") >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained( - ... "diffusers/vd-official-test", torch_dtype=torch.float16 + ... "shi-labs/versatile-diffusion", torch_dtype=torch.float16 ... ) >>> pipe = pipe.to("cuda") diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py index ad9d4ce83dca..d07d734a642f 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py @@ -426,7 +426,7 @@ def __call__( >>> import torch >>> pipe = VersatileDiffusionTextToImagePipeline.from_pretrained( - ... "diffusers/vd-official-test", torch_dtype=torch.float16 + ... "shi-labs/versatile-diffusion", torch_dtype=torch.float16 ... ) >>> pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py index 0e3ab432a43f..27541ce54c5e 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_dual_guided.py @@ -43,7 +43,7 @@ def tearDown(self): torch.cuda.empty_cache() def test_remove_unused_weights_save_load(self): - pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("diffusers/vd-official-test") + pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("shi-labs/versatile-diffusion") # remove text_unet pipe.remove_unused_weights() pipe.to(torch_device) @@ -85,7 +85,7 @@ def test_remove_unused_weights_save_load(self): assert np.abs(image - new_image).sum() < 1e-5, "Models don't have the same forward pass" def test_inference_dual_guided(self): - pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("diffusers/vd-official-test") + pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained("shi-labs/versatile-diffusion") pipe.remove_unused_weights() pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py index 364e6c3b8404..4eddc271db52 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_image_variation.py @@ -35,7 +35,7 @@ class VersatileDiffusionImageVariationPipelineFastTests(PipelineTesterMixin, uni @require_torch_gpu class VersatileDiffusionImageVariationPipelineIntegrationTests(unittest.TestCase): def test_inference_image_variations(self): - pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("diffusers/vd-official-test") + pipe = VersatileDiffusionImageVariationPipeline.from_pretrained("shi-labs/versatile-diffusion") pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py index da578d2b9c18..2245b04ca7f2 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_mega.py @@ -43,7 +43,7 @@ def tearDown(self): torch.cuda.empty_cache() def test_from_pretrained_save_pretrained(self): - pipe = VersatileDiffusionPipeline.from_pretrained("diffusers/vd-official-test") + pipe = VersatileDiffusionPipeline.from_pretrained("shi-labs/versatile-diffusion", torch_dtype=torch.float16) pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) @@ -64,7 +64,7 @@ def test_from_pretrained_save_pretrained(self): with tempfile.TemporaryDirectory() as tmpdirname: pipe.save_pretrained(tmpdirname) - pipe = VersatileDiffusionPipeline.from_pretrained(tmpdirname) + pipe = VersatileDiffusionPipeline.from_pretrained(tmpdirname, torch_dtype=torch.float16) pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) @@ -82,18 +82,18 @@ def test_from_pretrained_save_pretrained(self): assert np.abs(image - new_image).sum() < 1e-5, "Models don't have the same forward pass" def test_inference_dual_guided_then_text_to_image(self): - pipe = VersatileDiffusionPipeline.from_pretrained("diffusers/vd-official-test") + pipe = VersatileDiffusionPipeline.from_pretrained("shi-labs/versatile-diffusion", torch_dtype=torch.float16) pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) prompt = "cyberpunk 2077" - image = load_image( + init_image = load_image( "https://raw.githubusercontent.com/SHI-Labs/Versatile-Diffusion/master/assets/benz.jpg" ) generator = torch.Generator(device=torch_device).manual_seed(0) image = pipe.dual_guided( - text=prompt, - image=image, + prompt=prompt, + image=init_image, text_to_image_strength=0.75, generator=generator, guidance_scale=7.5, @@ -118,3 +118,13 @@ def test_inference_dual_guided_then_text_to_image(self): assert image.shape == (1, 512, 512, 3) expected_slice = np.array([0.0657, 0.0529, 0.0455, 0.0802, 0.0570, 0.0179, 0.0267, 0.0483, 0.0769]) assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 + + pipe = VersatileDiffusionPipeline.from_pretrained("shi-labs/versatile-diffusion", torch_dtype=torch.float16) + image = pipe.image_variation(init_image, generator=generator, output_type="numpy").images[0] + + image_slice = image[0, 253:256, 253:256, -1] + + assert image.shape == (1, 512, 512, 3) + expected_slice = np.array([0.0657, 0.0529, 0.0455, 0.0802, 0.0570, 0.0179, 0.0267, 0.0483, 0.0769]) + assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2 + diff --git a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py index 92f8d8c1262a..027819efee9f 100644 --- a/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py +++ b/tests/pipelines/versatile_diffusion/test_versatile_diffusion_text_to_image.py @@ -43,7 +43,7 @@ def tearDown(self): torch.cuda.empty_cache() def test_remove_unused_weights_save_load(self): - pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test") + pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("shi-labs/versatile-diffusion") # remove text_unet pipe.remove_unused_weights() pipe.to(torch_device) @@ -69,7 +69,7 @@ def test_remove_unused_weights_save_load(self): assert np.abs(image - new_image).sum() < 1e-5, "Models don't have the same forward pass" def test_inference_text2img(self): - pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("diffusers/vd-official-test") + pipe = VersatileDiffusionTextToImagePipeline.from_pretrained("shi-labs/versatile-diffusion") pipe.to(torch_device) pipe.set_progress_bar_config(disable=None) From 2e5128d8ac25f9fa4b27bd004cddb50ac3662792 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 18:46:16 +0100 Subject: [PATCH 45/49] up --- docs/source/api/pipelines/overview.mdx | 3 + .../api/pipelines/versatile_diffusion.mdx | 66 +++++++------------ docs/source/index.mdx | 3 + 3 files changed, 30 insertions(+), 42 deletions(-) diff --git a/docs/source/api/pipelines/overview.mdx b/docs/source/api/pipelines/overview.mdx index ff83bad05525..c43f09d66dde 100644 --- a/docs/source/api/pipelines/overview.mdx +++ b/docs/source/api/pipelines/overview.mdx @@ -60,6 +60,9 @@ available a colab notebook to directly try them out. | [stable_diffusion](./api/pipelines/stable_diffusion) | [**Stable Diffusion**](https://stability.ai/blog/stable-diffusion-public-release) | Text-Guided Image Inpainting | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/in_painting_with_stable_diffusion_using_diffusers.ipynb) | [stable_diffusion_safe](./api/pipelines/stable_diffusion_safe) | [**Safe Stable Diffusion**](https://arxiv.org/abs/2211.05105) | Text-Guided Generation | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/ml-research/safe-latent-diffusion/blob/main/examples/Safe%20Latent%20Diffusion.ipynb) | [stochastic_karras_ve](./api/pipelines/stochastic_karras_ve) | [**Elucidating the Design Space of Diffusion-Based Generative Models**](https://arxiv.org/abs/2206.00364) | Unconditional Image Generation | +| [versatile_diffusion](./api/pipelines/versatile_diffusion) | [Versatile Diffusion: Text, Images and Variations All in One Diffusion Model](https://arxiv.org/abs/2211.08332) | Text-to-Image Generation | +| [versatile_diffusion](./api/pipelines/versatile_diffusion) | [Versatile Diffusion: Text, Images and Variations All in One Diffusion Model](https://arxiv.org/abs/2211.08332) | Image Variations Generation | +| [versatile_diffusion](./api/pipelines/versatile_diffusion) | [Versatile Diffusion: Text, Images and Variations All in One Diffusion Model](https://arxiv.org/abs/2211.08332) | Dual Image and Text Guided Generation | | [vq_diffusion](./api/pipelines/vq_diffusion) | [Vector Quantized Diffusion Model for Text-to-Image Synthesis](https://arxiv.org/abs/2111.14822) | Text-to-Image Generation | diff --git a/docs/source/api/pipelines/versatile_diffusion.mdx b/docs/source/api/pipelines/versatile_diffusion.mdx index e589d9e1aed0..d9b513329c60 100644 --- a/docs/source/api/pipelines/versatile_diffusion.mdx +++ b/docs/source/api/pipelines/versatile_diffusion.mdx @@ -18,65 +18,47 @@ The abstract of the paper is the following: *The recent advances in diffusion models have set an impressive milestone in many generation tasks. Trending works such as DALL-E2, Imagen, and Stable Diffusion have attracted great interest in academia and industry. Despite the rapid landscape changes, recent new approaches focus on extensions and performance rather than capacity, thus requiring separate models for separate tasks. In this work, we expand the existing single-flow diffusion pipeline into a multi-flow network, dubbed Versatile Diffusion (VD), that handles text-to-image, image-to-text, image-variation, and text-variation in one unified model. Moreover, we generalize VD to a unified multi-flow multimodal diffusion framework with grouped layers, swappable streams, and other propositions that can process modalities beyond images and text. Through our experiments, we demonstrate that VD and its underlying framework have the following merits: a) VD handles all subtasks with competitive quality; b) VD initiates novel extensions and applications such as disentanglement of style and semantic, image-text dual-guided generation, etc.; c) Through these experiments and applications, VD provides more semantic insights of the generated outputs.* -*Overview*: - -| Pipeline | Tasks | Colab | Demo -|---|---|:---:|:---:| -| [pipeline_alt_diffusion.py](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/alt_diffusion/pipeline_alt_diffusion.py) | *Text-to-Image Generation* | - | - -| [pipeline_alt_diffusion_img2img.py](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/alt_diffusion/pipeline_alt_diffusion_img2img.py) | *Image-to-Image Text-Guided Generation* | - |- - ## Tips - VersatileDiffusion is conceptually very similar as [Stable Diffusion](./api/pipelines/stable_diffusion), but instead of providing just a image data stream conditioned on text, VersatileDiffusion provides both a image and text data stream and can be conditioned on both text and image. -- *Run VersatileDiffusion* +### *Run VersatileDiffusion* + +You can both load the memory intensive "all-in-one" [`VersatileDiffusionPipeline`] that can run all tasks +with the same class as shown in [`VersatileDiffusionPipeline.text_to_image`], [`VersatileDiffusionPipeline.image_variation`], and [`VersatileDiffusionPipeline.dual_guided`] + +**or** -All task VersatileDiffusion can be tested very easily with the [`VersatileDiffusionPipeline`], [`VersatileDiffusionImg2ImgPipeline`] and the `"BAAI/VersatileDiffusion-m9"` checkpoint exactly in the same way it is shown in the [Conditional Image Generation Guide](./using-diffusers/conditional_image_generation) and the [Image-to-Image Generation Guide](./using-diffusers/img2img). +You can run the individual pipelines which are much more memory efficient: -- *How to load and use different schedulers.* +- *Text-to-Image*: [`VersatileDiffusionTextToImagePipeline.__call__`] +- *Image Variation*: [`VersatileDiffusionImageVariationPipeline.__call__`] +- *Dual Text and Image Guided Generation*: [`VersatileDiffusionDualGuidedPipeline.__call__`] -The alt diffusion pipeline uses [`DDIMScheduler`] scheduler by default. But `diffusers` provides many other schedulers that can be used with the alt diffusion pipeline such as [`PNDMScheduler`], [`LMSDiscreteScheduler`], [`EulerDiscreteScheduler`], [`EulerAncestralDiscreteScheduler`] etc. +### *How to load and use different schedulers.* + +The versatile diffusion pipelines uses [`DDIMScheduler`] scheduler by default. But `diffusers` provides many other schedulers that can be used with the alt diffusion pipeline such as [`PNDMScheduler`], [`LMSDiscreteScheduler`], [`EulerDiscreteScheduler`], [`EulerAncestralDiscreteScheduler`] etc. To use a different scheduler, you can either change it via the [`ConfigMixin.from_config`] method or pass the `scheduler` argument to the `from_pretrained` method of the pipeline. For example, to use the [`EulerDiscreteScheduler`], you can do the following: ```python >>> from diffusers import VersatileDiffusionPipeline, EulerDiscreteScheduler ->>> pipeline = VersatileDiffusionPipeline.from_pretrained("BAAI/VersatileDiffusion-m9") +>>> pipeline = VersatileDiffusionPipeline.from_pretrained("shi-labs/versatile-diffusion") >>> pipeline.scheduler = EulerDiscreteScheduler.from_config(pipeline.scheduler.config) >>> # or ->>> euler_scheduler = EulerDiscreteScheduler.from_pretrained("BAAI/VersatileDiffusion-m9", subfolder="scheduler") ->>> pipeline = VersatileDiffusionPipeline.from_pretrained("BAAI/VersatileDiffusion-m9", scheduler=euler_scheduler) +>>> euler_scheduler = EulerDiscreteScheduler.from_pretrained("shi-labs/versatile-diffusion", subfolder="scheduler") +>>> pipeline = VersatileDiffusionPipeline.from_pretrained("shi-labs/versatile-diffusion", scheduler=euler_scheduler) ``` +## VersatileDiffusionPipeline +[[autodoc]] VersatileDiffusionPipeline -- *How to conver all use cases with multiple or single pipeline* - -If you want to use all possible use cases in a single `DiffusionPipeline` we recommend using the `components` functionality to instantiate all components in the most memory-efficient way: - -```python ->>> from diffusers import ( -... VersatileDiffusionPipeline, -... VersatileDiffusionImg2ImgPipeline, -... ) - ->>> text2img = VersatileDiffusionPipeline.from_pretrained("BAAI/VersatileDiffusion-m9") ->>> img2img = VersatileDiffusionImg2ImgPipeline(**text2img.components) +## VersatileDiffusionTextToImagePipeline +[[autodoc]] VersatileDiffusionTextToImagePipeline ->>> # now you can use text2img(...) and img2img(...) just like the call methods of each respective pipeline -``` +## VersatileDiffusionImageVariationPipeline +[[autodoc]] VersatileDiffusionImageVariationPipeline -## VersatileDiffusionPipelineOutput -[[autodoc]] pipelines.alt_diffusion.VersatileDiffusionPipelineOutput - -## VersatileDiffusionPipeline -[[autodoc]] VersatileDiffusionPipeline - - __call__ - - enable_attention_slicing - - disable_attention_slicing - -## VersatileDiffusionImg2ImgPipeline -[[autodoc]] VersatileDiffusionImg2ImgPipeline - - __call__ - - enable_attention_slicing - - disable_attention_slicing +## VersatileDiffusionDualGuidedPipeline +[[autodoc]] VersatileDiffusionDualGuidedPipeline diff --git a/docs/source/index.mdx b/docs/source/index.mdx index 1c5ecc5fe3cb..09cc59fda99c 100644 --- a/docs/source/index.mdx +++ b/docs/source/index.mdx @@ -50,6 +50,9 @@ available a colab notebook to directly try them out. | [stable_diffusion](./api/pipelines/stable_diffusion) | [**Stable Diffusion**](https://stability.ai/blog/stable-diffusion-public-release) | Text-Guided Image Inpainting | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/in_painting_with_stable_diffusion_using_diffusers.ipynb) | [stable_diffusion_safe](./api/pipelines/stable_diffusion_safe) | [**Safe Stable Diffusion**](https://arxiv.org/abs/2211.05105) | Text-Guided Generation | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/ml-research/safe-latent-diffusion/blob/main/examples/Safe%20Latent%20Diffusion.ipynb) | [stochastic_karras_ve](./api/pipelines/stochastic_karras_ve) | [**Elucidating the Design Space of Diffusion-Based Generative Models**](https://arxiv.org/abs/2206.00364) | Unconditional Image Generation | +| [versatile_diffusion](./api/pipelines/versatile_diffusion) | [Versatile Diffusion: Text, Images and Variations All in One Diffusion Model](https://arxiv.org/abs/2211.08332) | Text-to-Image Generation | +| [versatile_diffusion](./api/pipelines/versatile_diffusion) | [Versatile Diffusion: Text, Images and Variations All in One Diffusion Model](https://arxiv.org/abs/2211.08332) | Image Variations Generation | +| [versatile_diffusion](./api/pipelines/versatile_diffusion) | [Versatile Diffusion: Text, Images and Variations All in One Diffusion Model](https://arxiv.org/abs/2211.08332) | Dual Image and Text Guided Generation | | [vq_diffusion](./api/pipelines/vq_diffusion) | [Vector Quantized Diffusion Model for Text-to-Image Synthesis](https://arxiv.org/abs/2111.14822) | Text-to-Image Generation | **Note**: Pipelines are simple examples of how to play around with the diffusion systems as described in the corresponding papers. From 9f31d8af602c057ccda236a7a9dcf8f37e455b55 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 18:47:45 +0100 Subject: [PATCH 46/49] fix --- .../versatile_diffusion/pipeline_versatile_diffusion.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py index bdf556006125..1280419c343d 100644 --- a/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py +++ b/src/diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py @@ -297,7 +297,6 @@ def text_to_image( >>> pipe = VersatileDiffusionPipeline.from_pretrained( ... "shi-labs/versatile-diffusion", torch_dtype=torch.float16 ... ) - >>> pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") >>> generator = torch.Generator(device="cuda").manual_seed(0) @@ -421,7 +420,6 @@ def dual_guided( >>> pipe = VersatileDiffusionPipeline.from_pretrained( ... "shi-labs/versatile-diffusion", torch_dtype=torch.float16 ... ) - >>> pipe.remove_unused_weights() >>> pipe = pipe.to("cuda") >>> generator = torch.Generator(device="cuda").manual_seed(0) From e742f166ae9059f078f348fe8c6b4b2e6d7ac33c Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 18:48:58 +0100 Subject: [PATCH 47/49] fix --- docs/source/api/pipelines/versatile_diffusion.mdx | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/source/api/pipelines/versatile_diffusion.mdx b/docs/source/api/pipelines/versatile_diffusion.mdx index d9b513329c60..f557c5b0aac8 100644 --- a/docs/source/api/pipelines/versatile_diffusion.mdx +++ b/docs/source/api/pipelines/versatile_diffusion.mdx @@ -56,9 +56,18 @@ To use a different scheduler, you can either change it via the [`ConfigMixin.fro ## VersatileDiffusionTextToImagePipeline [[autodoc]] VersatileDiffusionTextToImagePipeline + - __call__ + - enable_attention_slicing + - disable_attention_slicing ## VersatileDiffusionImageVariationPipeline [[autodoc]] VersatileDiffusionImageVariationPipeline + - __call__ + - enable_attention_slicing + - disable_attention_slicing ## VersatileDiffusionDualGuidedPipeline [[autodoc]] VersatileDiffusionDualGuidedPipeline + - __call__ + - enable_attention_slicing + - disable_attention_slicing From ace71234cfce782cd8f2d21e731a8fdc3e540406 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 17:56:07 +0000 Subject: [PATCH 48/49] fix --- car_variation.png | Bin 0 -> 494266 bytes .../versatile_diffusion/modeling_text_unet.py | 2 ++ 2 files changed, 2 insertions(+) create mode 100644 car_variation.png diff --git a/car_variation.png b/car_variation.png new file mode 100644 index 0000000000000000000000000000000000000000..6d791e7bf617d8f3d4f26891492723a891dd771c GIT binary patch literal 494266 zcmV)ZK&!urP)G*<~?K~VG4X#|>Y~>jNp^ADd zD?3-Oo)3oSCxiKHT24nv8n;_XyPY2HA1D%QXF7_!1tf^Hij%b6T|qE!zVn0GboTN4 zpZvpr`n#QWKE4<{d-D1I^GEa9Tp3dp^F))a>MYAhYo)cf4pFsRt#+rCCdutr@3+#G z{^kwTdT@Bq>E^9Y-tP4WCxfKBR+Y2)c$RhAX=a?SfAXh))Za|%V&Lm)dVI9Dy}5Pm z)vIs)FzIb0__?2(b7i zEWSec&wupIf8-ngD4s30x$Fqs|XW#kZt(99Qgr{5&m7f)7M@L^CwYzln z?Q0iLFIH~fz&zRe+0*`N`{1k7&FibZxB4&MKdA@h&YkvrR?f!$_A9sk?Z5tfJ)@ml zTkF?SqUb;U(Idv>*|9%7E8l!=<=%C3=dFI{om*ncc@ynkb)ddjP?|*+|=l+hA_0gC6S>Aba z3ZH&C>-XgD{rJ5fT{}A(9Xvc++uS}onmm6#m=}=u)>!1%e{}a>{_DSg@Z~to^R4X` z2Yj?QY~^O9pPn6$*H+RO2jgM_t@RiLuI;SHk$LoRe>iZxmDu@UYfjs(*}PN4wzk%QD2bA1PamzWu1u!$q?Mna&pnHvN;`RPwV$=Rt^UgS-tP6SHIuXj zqyG9zr@#K$mmlB1apmItXlJYU%l9AZ7muHR@HammPK)_@7`}0~-oLW3+W+8h#vG;ouyJLLopY5}MhjQs?eyXTYEHF@JN-IR+MqEC#dO;}Th9So`-ca{WHFhz*Agd( zFOIJ6?DYF|^Wx!P5Dvlu3Y*sm0r{3C>bh96BQ?lg9MM(3FNfC(mS(K7j{17G?G|HNgUfK7_GSW&T|ySkW{y zOg_}s<=r+2uniKNSRYs-Wwy3fZ5^t)D{biZsMlSc56f6beHk(pG^Dx@!9hiJe|&mXQxl#bZ@9Nh3B6=OjdI6GQW7bp6Bg6y>@eZ z_vuA3o6RN@mFiZj7e$da(p~MI9v>d=74>>gQ5=}_td(S~>1gaEl+`?rBT;Fh+IN!F zy1+gN29xTIR$RHsryu^u!11@f`(A{6F*xQ>9*wH`e4a!}n&q{tgR?@|S;Gsx8UpHW`-+r~b(*EM( zpUr0Dyp`sycAlBh-Y3VOe-NVV&0qU3Rn)5{L+^u8Sj`IWYHQDTUi(846hH)CgsrAJ z{zf6Zh>(j9;W8R8s`JZI{bd-%CYUe7=5j+#-jHPuc^TSq5m=X*V_6%bAON!nHW5Mq zfSXoe5m}e-68=W?Z-RbPk}tL?a2d~=f|0o?#b54|xhXZjnQoSC$>L<1h`%^Iz{@kb z%sQ8Q`_IaMSzeVUZz2l|HrEqnk^h7c7%#(72u(y3=Kt&-0EjRF2muHSb5rCZDk2~d z4uO!BiION&sDelc-j^6*mDgG;Jpu(MUp6vSipMEUR({fsnU>5*MN8m~?0?v6A$G)!0(TrCu0g+Jvh(P!<6^b-njtDk) zg&6=zfSYfHnFRolkcgUxfq@|~5CX8UNZ=-Ygs>FkiM+fQW)h zkpOVuripAS9~2f5LSSLIP%e>RC)YV&qK?ezKR{KmWM z&%c_oirU@Y;BajEX?Dfffa8g%R&umgT)!T@^(!|SSfaL<$y-0TvVE<7b{sx=JRgka zt9O!>>pL5-toCm9$Io|HcRJ&fa=160lD_pyckM=hd@+k7h@*6JI2#_8{cBwbYGq?( z@97v*_2&1tk6&CIUyQfYDAM_vpUuQoq46($&y()?RGmt;pvMPM(cLF)or&#xSi#twN+UJCq-39 z+Ds->B<;OCdKeYu%-Mo5496oRMcye#MNvTrN(n?-M5?ONTBlJgf^|_wMyGKS07k>v zWI9C%>ucSsSGKoz*2|(AjfQ*AAL}eXzc^%Z*RS0{jZtg>T)P#wJFQM@Z9XgN{r!Vy z`(A(rvi5oatg8?_+R{Vdom+QdS?3DSqK;iw)XSi@thbwpCi2aIB(T`PH~v``*vke2 zu?d1NOZt}^Ty6uF0LlXKc-co>W(#}?*$6Zr+2!v6MWFd(S&T1^^zsGGK`zh(p_ebd zJe_4qT7nKDf&jqZz`|tFrYzE*EKU!2kty)<`YeE>WeQpxz+#t8{fmMiEF6Rx1q9{t z5-iasXl`VXRV4_P#V|5C5!(R2X;A#!NKI4rO< zUOrTa2*6F_K(y#kS(Z112x)<1A__0=00ID^NB{x=2#y1L&%xQ+aWNS^?RPm|>8e&s zMdRY@!_6Hs@2$kEt^Lm?^-sR6t8(S~%DkxVUA;OwcQ2lde*QQ0=5-jKmWhF@uf!Lp zy!)((R?ZV8ZgR*~=xirfUu$FAe)!38QGoUNgHMll?sh91t>1WUviqdID8Kvrn}-kQ z{X4z>Ygg%ThIYRH*}>?3CWfo@eu7eLLOU?oS5ATkqZ-jix{T zizgw-?w~M{E~|P%w7C|oUFi%@W_9UeqlV}6+01L5=qU08HdLZOEYGGk&J>bjp*W7q zqBKe?t=Wl6b-&d*8PCMa#rd?zBL#5x*7f1}xDIxEYc+&0noMo&YcCh47jY{QL2I3> zMQ5g*RVIzA%6jjMs?tc#Nvt)Z?yj`0xAR#!oXwLYzOubh&F5e29S4SXH$Oi=w{>y2 ze;_Ec;Ygc!KAxTri%zEX@#(pu98izWM&se%Ge=pRM0qqi8??J|-pM@&7Ez2KG8_zw zd66Ws3wBz}BAc*;lk;N;3}OSzWIXh~Ug`Cs{_6B>R)@+&Q4+^#l0-&doSnDYD1az-3f`zV7En#(<0U2o@XZMJ5{Y^F_cE$2ff&4uaNmgLunggg&*w|nX7Qfp zt7V0|{1%rb^z!Wi8#HFIOTlG>vW(mfw!_OReHrDUIf{lV;BO){mo+*P!2$(Z07T7x zmIdna1TJ@u*lZ4onhh^6NW(KMird9zalth#nuTv%j%Au?_JbmUgH?pw)Um=K+|S{qv6 zRbsReMpxOu92tWV_`o69z%DSyNlcm+Tm)k90Tg*_mCi^Jl7tYfXVwS+Wo6?eR*FCv z34-?kbcvfaxetk&r-T_r1W3uE!4st@_#na}ute=agqxFH+)W7}BF(uscLeG3`D9^4 z03bjR6e6{FXq#q^5CD`y7H+Zwa{v?u7J$H(LAEOe^9en2xueP@aowfXp@7((L|Ihbt|LV0H zZ}h+T`T3{6IDO;o?u~n^{ms@Fzc_@5e)-99<>9w}_uAgUSuuj5DxKAnsh0y8W^qn8Qa<(^a zt#sG#Kyr4vaj)~q&!4>eyKk(u=+PI4&z?>C>l^Q0UCX+vEcHhpKYH=y_`Ub~5UE?Q zY=8E_(|R^JIXcO-D_M^x4ztJ*)m6E<*~`0X|IiE3Xjn|9&P0(QPK)_;?g*ikM8Xaf zhURHSn$OF+u63%bqS6|Bt!!M(qeuaZ5VzZDx6{dT?Kmz6MVv*0!C-BzYwIe_BI`T? z7PWKMHlK43;;LF$Sfnbe$QWjsmE7re=5^^k2jrMcaPngBx%F(Fpwg{88;?hx-Edfq z&&yVud|ezDRjbpEjnZsglE<^zL|7(g!#K@mqD%~=8ANPlkswmaId?vsk1s~uPA3Ez zj7C|WpP!v3Sys+wQ5<0ibqHCK1OX73mDOxoSX->GZz$BkI}qr1yOY_plcy*SxT(E- zuh$+9Yv)6|ou+YQlyZ)36-b*90uX9#5XG?%wJ(cGCnk#2yey2$zxwK7Ih)kh#tCZD ztyZ4p?ZI$(dh%pvYopU?cROwGt2m2}4vw9-{Zw_bXk{xo+B<&oVn2^N@BJv>xO1OF z%-RTO5o+N42K>M!81oHeUcSMKf}r3sA})T{FmHn22#90 zUp4^<*wlBtz=;>zY3jaZ*$uoPk1z44MRb07Y|FDiY|8CLt;k@##V#|DfY2oqjNka}Wt;E~LgcbVU?Ie&{zL%eK(dH%B8%<48fW0Nu7+6pUDKd(I*$W7J?-Z%P1cK~CQ;;(R zAPLM8SV(ga;HD%uKGYh?18SuOfxQP1Leg5*bqFEoIP%W>z)I=htTxFdH-v~tD3|%0 z2;q`x=Y|?z+*gIdjDU*--82Er-a3&$M9L@-A)|tINU9-LLSRH(@K^*O$jl404+($~ z7EOo%0}3!NTR-IBkjU4y3p}spM5fib$h$r4B4D;V2~W~qd$w00vx$9GOz4oK zDBHMA-y$D*e`PK8F&^$8zx&%a>oYn#nj_)WE9Ukqn}qnYKYJEuwB5#6-{{XSN{#-% z`QQEekN?$QoLjDueg2F2#_HtiM!O#K`Sa6TZ(TPLDFm@W z9Nf6KeX>9Q=w}B>L|^~%{OoA3aeM3Dd-onbp3IBMwO2bU8yQuFiPGcUi@-B|C$Gmm zncDSrbNzR=zx}=2hx?~reDP>=YooMY#I*X!HElj;Hw%R5R(ma5;ls}+vpHw2G*zOz z$~&lo<$$diT3P070HZtufS4$XB11$hpp#fjbmK~YGAqJt8l@2%s7hz+&~EikPLIyd zM_DU=@_4V?&CUm-Buk2-?({l?!PM5Zvoi71P{!?wes0)mMK+MR+F2KE4{hhot%%#DVQWNk=fo{ zn~bK@*+sh>&!+Pfy-~U->RC~qo(&?3vm}rKZ+U1ZMpHR0^DH-!CO~B?Lh5&VNtDD{ z%ptT}X%@$69L?ubTRTJ`@ll*96K!s+9-o|4j=6!Z@>W03XGPiWCG~6$#MV~MvL-|& zJ{a~v8Df@lUbb5q64Z4SMm~?D*|-46Ec?i`pFcfZ*;sk@{9t%7xPA9F`FdWv$Itdh z=ND`1y*!O)i&?|ZoZ+jZBmJ#7C;0921Mkhb6$Mp@MYoj z5{7Ad?Iz4E>aK6J4h^oojKs@Uzxf82_44u)w}^Q5N@y=6j?qyz#_8DV~D`P za|j{=tneimCjh`9fG{as2!%wLnFvTNC=mfhT0SsH&@>Ms+>pyn-@zOLH(8%WkQ(Ag zLhup-0|-KrX4A=VHlA$0bziryqe|2Ev-<3rilujL6|B5?a5y-X*WPQT8?Cb^^P>kx zv7zlZuN;1{XKghb^Zg(0Z2s=I_Wu0+Vo*GNQW)f~KHL9|-@l8$Yc;!lW9944 zVZTow|KyYH?aq~ZR}P&4G2(NjMlHL zISJRVY}C&0zc|}{F|5n#y>Io0v+~o=o)1QR;ERpb_3`s!Wj)zh@9(}ia;#jX3JFG& ztGqJUicIZ7WV8h5y{|oIsRmEZ*X!$T7aSHyM!UKk&1WpNh;=oR;!Zo>+Uy=2?U86g z9ho?I7*A*3g$8JyPYM!1p-!F+#zn8!3Lzj#WgUvJZTd{XJ&T4y|%>L^x3B#g|2!FwMiNn5f8(p5&S zZ)|RCZk`-mgn%k0R_JP9R@K(pdJ-olQr@|QbSvv>MM)gTX*`{qR-Px3niW-5)dJE^ z(^7aopZ0ofB<%KAit41Qm?#0zBDB_9EqqnwWy^y}$ll(*t6jka#=C=FSXmDM~pSy5IS*RDQ$ z_^fhxel~u$K2E#0fzvCuUZ*4$)WA&&pe7b9BDTO~O?xRy0U&t^X*YVHCQvs>#Ui## z6U`gY0x$c2ghsme5?GSu@8sowvJe(EwSDvPU#L8J`LSLiKCsXT0?1{uS=8mrZev+N zqKFq%O+a3v{7ZHiupt2OGL(Y|A_;>4Hv-OO_wusDzpQO9lfz|#Sprd)*$tOS#W$en zz`{!z&_aw7L@}aT=@_>xBad zKvQ3Xa0n=12n96|2n&!#h2X?{ugNe`;K0r!Di?gDw6iWU5eH@#QUHK~Ju{P1&IN6h z^8!FxlZlc%Ybk9);3gP1l|C>DFt8wzfCzJdfC~Yd0579I05G5uQVK+x(-vH)bec!T zhoBm0pm{<812c0VMXJd+O_t>4(;nxL10bWKPCKpYiH_9xyow@3B8o@}l0xT$iD*33WQLE#d59$P%vatUh~oApoXrP7ltuJ4>RI)6=S$S9LWWqteFLuAbK>iQ@!2t@gN8 zPZ+wP2p^{kjxm0Y=g<^G%R zMk+TNzxw18N*=6i?0o*&U#xC!x#`8`&TC2DZLe;|CNgOUAVL)ZY6&0THRdEHL%UK)6uwEDB#-^27*Btx!s zdC?X$;h!4dXptkR$z{AG2^xrrnzmqB&Hv{Z5DFj?a{#0T`Vt!48JF>QnIsye3S1_| zM)I=IDPBsCV0l3rY563y1^;5`={eRU{x1FGMncjT&Z=AwnZ$L_}`RdI4-P zFwDjSJDrU$E`kpTs;RxOF`=zJ3W(vxW1DvH{OjF(bu-;udHB=EZ@zQwt4|KSr>{PqeD`V$@0etKWZ3tFqnGpZ?WTK=!9|Qh|(x= z)|Ex!fUD94v1L(ei~=~-S4om|I~|3PbmD$DiwvEgb^G0pF_CpPN#iV!TkWnEvcc`` z9Y#r%rkT+Rh$jWJ>747>x;oFZsvvymcedlS+wO0I=s0irz$k!GETCioC?E(yGdu?P4H&4=VtivxfN+6b zEdCW(R>V#9zXW>(8?SxR|C8X{3Hk7^OzWuXFUrivN_=VkO?Vpc56W&&X$Zf_8O z5CTFHWDx;jL~3^3oRWJDoiXkzMALIS-J4P;>E7BYjvpp{Eq40Rzxi@F zihujR|FwGZzEXO9=Q`M4+~pts{&#ktK7aJ_*NE}av$M%KeEI3*pZ)K@Yr1PM-v4_$ zo~~_ElIFe56spqv2hqqA~42@=uDX8-W8{QUFX>8wg)2`pE)l6E&o zv}Z>L)4}wEpB@~Y*49BgSF63X{_2gj_uZTfK6-dKEW+0Hjq$MPbh5greGPFEz4z_cUK||l?VcT+&sR6P z?qpchrMHl^<0R9)UbhbR;(Wffy^&_7oEKRy-`?5z+0Q>whHl>2h)JeJDOS4Oq_n|W zM<7b6R;FtooDac!6Y09LBA7%G0JzF21Wl@PHi{z?$8}YmpU-L^3@TKJ1e%mlT9K-1 zm*tU(l+pTPG?P%~Y2sbbMs%!or{5Wz&y-Qq(R_}UIf&=1n<5eT&`P@!#CcnkWqEW& zpz?OUvANpmCgRydF*w%B1PV%P7VxgFJbULl?aT${+RlP=&Q7QEJWnD+g4D_)=K-|t z^;d7*zWe%{-#tCpofdU9FSU-mt7r2%%d*;5wyu>@{oZQj!@Q_AH`bMcqN)@knW&si z1t>O=*1Fs67>z|`vv#(+x@PP8VE@^{(TlKyV2w?|L9MD`=9=^ot^7PhiBjZ{;#*!H)8S+KK;~sr*({)J(u&zK6t2$ zwvICpfI^$NOHn2Qs7-_TH{j1Vn1Ds>2VAJ{fnV0%i!#3%d0kFYEu=<^Y_Mo4WC?aR z@v7OL0KX(!mj(8s+h3-KB`&heH<#gaksFq80=P_FBB6nsn!Lq}SzG~vru`9a4&)NH zY6^Q=a>9#4Z)T1cqp2^IsxR5`CiFC*cf)ot2a&J-g?hmOeT4^J)X`E_MQ(<>dbXFwq@hibyVx4&wh5iJC3@o z`JnjlCk3WyJE!T$e*Do%Q96?px23#zx?4@FwR=}?zjf==pPfGb#nZF%B57scf8*7E z`G5a^Z>&W(UR~*Ltw-RTCV?I7}!~KdxJ$o`ljEb@-O78bspM3DF|5V?8ZRM+< zy=e7X@7!KLcvb+UX{$AQarBG-@R9d0D(&;bahmJh@%-&?UDu?i!`c7m|Mug+Ve5M9 z{@a^t8yhd49?c8?bbqq8=0v~+7Og-+r{8*Xco<=1(yW{psfpKCTJvJw>*n)u$x3D3 zef{v6iX)9==MEW*xnHeSZ>7`j<*O_0(Zv{*%F}o{ou{ot7-}0xAqpXo2oNd(aMlM8 zC`v_20izHgm`E$ZIEk(0;2|dMLm*NnGQjLYsF{_f$~w8rFAi@ocFzci$Db*q9jVw9-{w*RHa)i*u-) zJv%)ck0zaNH%+oUiD$E!t%K5_m4G-wa?TCMVx2T^92qiIO~Pwer+EKb@DP4WLc3tSDVzGD;_Do~IdV4jg!1dB*?&k%z-E5R4}i z1~@xE6A$-axjCCoifNhjJ4Y`LfA%MT`>h}T9&`LFKl;tT{*yo6+kJ7dw`&sfcYph{ zyq*7}|Nj3_93QTBH+Js7fv2LH0z^M>=8mEMp;N* zmZ5ivxWfVnYNB22gO)0sxosVX4AtdJw#9 z1e$}R25Dls6pI4U%XG6;crK5pNnHy8$)bc`qWHYX22HA4Fw!DINX?|&C08t$1$vWO zUP45Rv>?#LI#vJ>8giq7i9{GsU0#W%8Inc%z?aAv5iG_Y5P=y8fIR`GY0T|(cXF z4F@(^nzyn%Z?{+0y4%}2GG4$K#nviRydZ6uniebsSjajT5KWWpSh$JlycFcZ65d)g zhAarcguo&KEYfsT0z!m~w2Gt*ApkInF!KUF5*8vA&;x-0k{|<60ER|-3nFZck>G>( z7pJG!Z{13hWIlU#v46IbAil8DGeJ^1O#*|_?>|N6C)quIggWHOv@ z_D!6-Tfh1GY1Ul(zo#U;mrgxccGmzH;-`n^j%(R#pM`fBHAi z(_U+JCHeU8K56I4ul&v{)qGmpDr@Qac$v3a``n69xU}QKMR<;I{J^J`*5Vdu415IXHvV3p;$D_k8yyWRFI ztG#}w)jl{nu~pC!&5JtAGa+4-bwc{a&6`JuX9ov|$n1;K+C;}m#8}oY&vK=yEIf!3 zA!c3(NC@YJj}$6p@;s|t?HtEx9GRG%l(rlfg^40S4uZi;)-nKMy${TRSv=byMw90d zC2SC>O9p+6afWN%K91*QK{*q zD2pPEwTQcYZM)sco!5E3dgt|f+t)TfMOnXc=c`|Sgi4FTCm%m}>xZwt`-AUZy?=Fd zu{#`vfAo)k&!nxRuRi>n|M2JU{>poGRg+3J)={F9I9^#_>u+q2M`L56wA~dFKo2?= zKoqg3L8IBDokKCwv7V}mMd657wY10N^;g_{`^T`C3Z;T;dhEc)B z7Jvol63bt983IC$&ac6WxG9@);UXgo2&O6W7nS)-%u1FsqKoK{0Nh|ti|*!f7z`Jo zotIEn1KA@0HvFnUgBmds@uKZ$P?4}uyetp6d37W0Z_b3d(FI+Wxk89QAReCl&~nx3MjCshc|c;M!@5bX|r_DMC#1B+>DF zUX^88l%=phEJRWiWutqGBZc5wSv()l%W+-X+69ghQ@eV8HchAk{`^1P*mx4P;;rq~gS~^zt$tNjKDZ=_vN)R- zr}Y>A%fH>&+0cpJKRAz9dq6hL_36{S7cZXVEAg{OPdBe$yL#jL_51HW`}DK+ zdO8`OOg_GN@3(&Q;_&&|@o~SMv@&H>h_wNYc7DtOqP(TE4b&;3K~T)XP6(R0s^(@j zL0)h%T>MhCvUE^rWE$L5$a1L*!li)!l23*Ps$Z7>FBPe93E?kO#W%|9OZbSF738Ai zZaNUWR3!;72@IsAqzD&1gfvL;vX`Kyw^%TU3({ku-hm|)BM3-Hi;9(58lqQNXlY`= zj7nV|HWCOd$z2vw3zM3K@Dmu8lN!9Fk(+CV4fVOOFknOkMDHCDDrHQXS<9dlC}n*B zM&JM-1Q>jy_M*USNE=etSs`koDiD!Dt;tze)!qjd(h*@CXGz;6X%c5yl*CaQCs`cD zu{KeX=V{i8k|c^#t+ZueWvsOTm?Wm@zOjMJ5P$<2B?#28>_Cf44J6BYACVg~pyqA~ zFJV9d1O`RK!W@7F8}67cGdebdcnAnc61;#YrCE4E-YqtWQR+S{8^+-HQ*xVnDxDyZ`>KYwZr zyz||yz4PJOQNhQu`*3`I688T1Mb?S8uB@J)+Qa9w{pYhc-+E=G+jrIM_x@Ku_&5LK zUzd~7&aI987sq>hhp&C>ov*+6_}Bi6e|EHcuHtlONB{f3JbLxjZr)!VjjCzs_YRK- z<5|{Ux%c{;MKSsG(@*>T_LIFM4PxSG|8S6X>UP$B{Pgk4O8@Td>sPK^o7DAZ4}bFd zt*e9c<9SiY1$d{ryW*0oZNlo*2dZjs;*R1r*XBvn&vC> z>EQakjs3@8AKbii{r>&w**^I4@kd_;b{N9x(}z1Z?mzqLqa;u6zIk)Fd;HZe|N8Gg z_+^r`-+S-Pt2eHn9zP^yqKF1(gSxKQwl~w>wn=*IgVG&{`-*bV5eK&@ib*y8im-51 zXcRaiOR8WI*q5d2Qt-K?0fd*r$tH-0;zi}eV*(CvJ%qE+Rvd9?= zPl?Onf`!)&dq9?jt0fR1=_WDIB}lcLccUinGZLH?n#LB+@F5OqwYZ zd7%Kz%n~?40tD8gjW$LH&nS(efJqY~g}}~BKqjL-D2#O8Nzx>3_j+-hMroGiS(3GM zl8{kJmgqE6NG3})86+Z7!37qGSVZYKMrWOKRwI#8>^v$$2BQrkumD3d2L~J$EglCZ zBEIBRpt+cr+{!}Ni723eeGu`k7L>-@RyUpyq%;Br762~@w6Hit4qg}mByeD6MGPRo z3-YBAV=*uhl2Spuj?C7ztG%^6k6KCW^R+cj`T1e- z;?dxIe~{&Byf<8LUstQEShJ2PY9;sI&YwS<@W5W($yZikwEuYhd#{6s(eb#w)w%NO zDkVS(y!GCV!Tx!>o8Elqc4-IOw^mIeXL}>j)V-ST{?yM$)#_HioEE$L!w^)jZPGkG zIk`9=&u?AZ)Nil$T4^YXwe>Vn98A>S>TYU&@bJ__e0cnHXLU7>vRhZL%%)|(+k>@q zbmsr7fAWw1Z(C(U}7Y}XaJN<4P<-?QXtJgM=$A@2j!OXplozD7AkGV?L zlxYEK6as+d=UW6sI3N+1G6+3Jkc`Y zkRnA&6Dg$?01;ASLeuC~nlUs0Pe=`fgv~L57io;oP>@N8C7>+fvPG}3C|aS>iUOcg zi)6GckV$D_Mj$|p(>zL3Su{Z6g*o^njZl%O2D$?Yf?At6BBKQ`0wAN4Rx379oVW6J zC&@Z#JCEW_MJ7s8QiMblMM@D8DXl|bA3Xbj-m?Iqq9`&+%-&n)=fymWGovF$bhTF+ zg%B8m_XwzUgblC*0t^iofQ?o5Qk4r$^&kKM9K4`-Yn!3u=A?-bRnxvmbMbu$&{*ve zh(HK#(PIfq5JVwVA}lP;j9Q~6LO>HCgH+Z|r;|93Z{54OfA|z5tg1N44g`-7saJ8P?l`;*b>JSH_bEnofi*4Et>4_-*wx-xnE=}#Wz8>?S_a;`K)iN1CB zX3~n!-+z4cIvB5CSxHyAYir$) zADkvyz3_vzUi<6A0~4WlFdU9gcK@WmX}?teo}Hc7CN`+pI{_3g!H_XZqk=R| zE9=4f(t1tg*#eOZ!8sA3IF58+1u?U1ZKl@x+A$JS6Kz7!aT1u>vx!v}Ya*=cTKnK_ zuq#PR=V_Lv*4Ey+ez&8QYC3cPL1R3J+4-z4T@-6-9<>N{Y+PVLoX=~mO_ryfJT0ma zLI9CsR`omW06cf4^L{kmqpNv29Zj!nZQZDjK7-x?SJ~|oA17D>$$U4 z%eCj{56g<Bz38Rr9M5dtx8t8wC=fHA&K$a1IF$TQw$P{Um00@mxV+ran zC78|okekHNl-I}-5D)=*=@^4V4Q90Xp#UmE08y7ne>2mHq#G$`VD=b7UX1b zB8}8hST)hFnNtN>=CTC~y_n2ipjwC`OM5fmW_FlCK#&DZoV5Gh^de8vxF~9N!5EE7 zS?3k%G)5F!Cm2u;hTw2o(!A{bHZwZ_QA3p6lr)9p7zZ{R=;`h+CRil~_;M^G<`rAz)F zQ8*v~dnQ5?B}jxs-Umb^tvIk+5G{h-43Dt{5#$AC*6fRyYy%*nVhkL_OVCDxhhkn~ zVklO5C&@cVx*G33Lrhh>Th3>#m2T@wK6vr4z0pMulhe{f`pVsI<@vAvcz5T<;m*D8 zz2AGs=tN1hb5iaePTzTNb#=8f9Lxvj)BE?f_8)u}udQ6^xRbr(ey9ED#qn$3+gaQ0 zL4~K!jz{b6)g%bAi&9|Bm_u}IXgdF>33%HqN=LA*K^j#iTU7@&$Wtf-n#Ml!CsU{ zgUL)IVKd1=U>!Rfge6UrC^D1DysUkk)QYFBvrL@WmP*H2+28* zO0~PKc~L9WEI!i42_isM+aOX0ONx}zz@nLbaDGxuJ9(=t=CgTaZKX8;w?Ih0McS

)W`S#}8#n}**k|2S- ziq!hn=43bl7EvlrQjcadpQ$7g90)n!%LnnXEg2ZMuqZ@jj?k$v+1Cx*hoi@kt6o6H_R_-dus z>h|A`lh_s0o40rJl}+~6R}N89gJ`PrXU~E>Yn?qz;=tPzS(;|aXLnHNMuSOC$4 zzzo2M9J~|uBFx@0~mfUJ%E6-t} zH)0Od1Wlnv=LXQU1;nC_23LWvqR0rMNI+;tW^tkHL5&I<0}Mb0ApvR%e89_)xWw56 zs7Vf_D2nntZ+F{CmT3gGU{IMPHL=b*?N+y^lT=5Ei4%&VBu!A0AgV}_QXD`)GeGTv zMl_Lj970`_i4g!mDMIGp>kw>cU`j-VgaR4_NUIRUD5E1$#vl-xSgVHV1(pC18uKD( z{2MrcB0yjiYJfm~dDjpLAuObxgiSXo92ylUpaMkSI6spL3?Sk|Xc~)Vatu&01Q7yh zF1>0jAC~!4kPsOgdY`@bwI{7S^K>vbQLEK!D>rxZnoCltNs_COBzE_~^>^R)l^^X- zfA*8Zn>R6P_lAe_?HkGUH`ikfv*UrmINw-(<9E9LZ=SsKt-CLtowqvh^uhV*(Qt4w zTfNbK{k6?|Z-+-;?hi(iznzgo)9n`Je!H(pY`52r`w?iBuC_tK>b3shi}PY$KY4II zn}(ngTSF^}-gxuY_1o(o{_Xpnt?u{#@vj^|KM^!w()G>k!B@v=nx-kW*S2-829xvi zi;LR9>FLwNqEw|x=LcAw9u)sqLi`-h(*#d(s= zN^ZB}>2wePJKe0??F>hg$zYP_skaUT*LD5*N1u+z^Qv~jyxQ+-B`Yhf!Ejbgr;g?E zgOhGQCQXVo5*ibwQ6%Cy_(W?FR7hy35p=flY%;5}mWkphO9T`S#$yQ#$jm&Ol~JrC zt;*UbQNkfO6a?|!3jz=lXrr`Ul2}MsmBGXqnBz3oQ4$x^+A|lkVmg_Wb%=~MK_^iH zNQ3EUI-a#U`Oa2XTi@%0ie+$iA(rDfPE9ePiv~*5*Ta zQPwl>tS_yK)zR@uJ8xG-xwCU~=lZRK-6zGf2W~PuA55Y&W`eWRvtB1_rI`SQz=SlL zPDp9?-YA_$iLER!w9|Ibp*D5!O^d=xt9HMe#JaY{Y|ce(hojE%!G2x2Znsrml!M7d zIoBrA+Z&zfymfGJ^8C@GR@RyR{KL*_w!XD?bas~H+4)J-7O{`>SXIj%Q-v8;tM-Ssju?=s2`&*l@y*ocT`1;}kSWb=)*S6af;o08) z+2ONDncIAtcW;y>^)_#2-F4C_l6DmX5>aF03lbUws`!NnADh)p7Bnh0*!|@o z*QLIGF|fN(mo72^Ga?F#1a4+S7T&`g1O=KAqTs}PVef6p!Fg8&Z^iq@p-Dmz4k+9R zTo)DvM8bloNi&N8D!@is+31{LSrp?^?Ih4x2Z2z7{BV=y1Ofb20tp@v0N+C418307hP*{`dpsD*s7V5>NS*pN7C{Kh)1f-HITiMzM=hCE|Caow< z^L9Jww1E_8YFG{yFh+xFIt#YefiP-RsMEZqq8LGtv@kcSqb9*2Dnw@Ru%JSarAc57 z=1xGQlu|}pSD7SoOu~VM#fL`ez32m>nFRnvN|+e{0wV$lQNs`m$dZaj(j-6_Jc@V^ zrdbAoL;?$j5JZH%YZRvn2?>=}AdJ#X?Ew)sYBD0Vc(iEYzlsD($l5vx=iOWsot~d} zR(qqtdDe+qo3{ps4^|8zrSqrz_M%+BwIjv(^T*GA_z%|NBuldV=yclawX!U2{j#%`_BVQjSuvUxj4MP2G`^d+)SAB}r!-CnzOw1433 zd~K_vG*PN%6W3eKKm6r~{k6QRO=0QUohzXAkN@=JPTSmhb?w&Gywy^VzCIpLPJiXS zYyZ>#^w(cME`R$suKx0a&mo}mbn~_QpZ)x^$v9Z6wl}YgMxFrIH`YG-{HX|StgVfT zu{)mwVDKm&qa-4v)FiDuKRurGSG$Q(M8FWv&qqm?4=%=?UZ>s5f&^q@CgD&!m!?r& zvoVH)WLeBC>=}uoNR4L&k|1CjMb^4Bisof8pH(^{6B$9U*4x@Er3l4^;Js8;U0E9( zr&$syH7UwSldBuICyHW2S|yRH3#-#SQ=+Hkytb~kzACEj#(I{e0&sYA6c7}mGLiM} z^!$7_nJPo0*(^yk0~A#iytGD`;RuV1}k((K_EAEB_Os0v6#z{>mJgGWJh7{r@MtgS5+NTL<7gh?^cS_7en zP)_TB1%%DA|ln)>L`Guq&ZowjW)qLrImLfu$K^wQX=B5 zYm%8FL`A~D3J{e@P_)nmBhi8YfW=yvxV!+u0!Ug%07@8$j4(G>Qjuz=1sheVEEauP zplJ+^v2^3DO0w{Q5Ek~$x{(5?OkK=t&n7izM~`mYGCQ5t%H7x3dK>=5`{Yg`gzkH9 zyQj})=k?v!Zn}`L;Dh&{ogdGRU&z_{`IYVdJ2y-eTaBT=q3^%d)lVm{efQ4BW_pX|3g+2CLjwC=5U zo;==v^r%i-uyb?mr$2tu>X`5S#(TlxXP-T|*c)%&*m?Zd*4?jM3M*iY2KV4aA|JHBad;0aBIZ!^(+2QoJbo=g|TYvrI2a}oI z-056QXIYXRo{ovMR(LTO)S+&*qS0vHZs&kl&Z_OLjaHO@_4H}K-`?5W2tJ$-&wy0k zZjEPkuiKs1w$sjlfGwBK&x^9%)ty!{nobA>8PR(M&_H>!qGlFjbeg0L0SShqu@Dha zthHk&985&)K@$N$WVDV51ewJoN+DWno9P{9Zv%^?DAoYz85mek#&d<(>bA62q%k%z zDyUuUo#h|`1Y|%kjoYofdww!(sifQKwX*p5@ML{st<}!sBsFm~omEj}PEO8G&xglH zM~ng43)sk*)s*6Ihv2~7s+j%-G z=EA5b8VtwG{_J$DWBp?Pu_9M_b?f!JpM3b~+1bT-Hp;qbmPd~sKWKOR4#U-(*Kgdq zI-Sl3=Yz7SPEStqw%NJ9e*4yKw&-eU=lS4h|M^#6TBf|!P0}`(EZj=s&D*!k+1cUA z(KCU)c6TN1)XHc&w|Ae1UL}sWke@gg4c~QrqN?CHV~~qDu<=$>1mY2d z*(=f_qKKFUAOv6#X$IDo7OV?U15lb=`f}kJ0tyI<5Srm(3?ige-d|7i7OJ=zmOus{ zTnJtOkQ6Ee0T%16^K}(M(8hq)Mk!ZU^KogdC#BL>&Qkh7d8ccXQA#(~MMOzt5=FYv zEQJs_1Oh;%B?J>0BC3N2BxVnYQ4(t;;SgL1-XUTmvC>i0a5Au10%t z{EO??S21-vx38^itv~;KznoTgzjZh2MrV)CqgK4TdqEMs^3K}mv^+f?Z)~^T`u^Sh z!_$Xf9lZOUH+rp3)X!i1@$lipsS|@y92?_HIXNA)bNyfbi{Bca?Cxx*EFOV>`SJeq zCoguc?R@#!!RHUE+c#rdxipF2|JlR$zIDZcDjgSk*;h#vzAPRfU{Xmq^sH` z2CLeE2r0_rq_WoAkmfM}&nGh^&48||wNaIKENrwUT6A-DWg9bEr7i4@! zL#MS;+iGonV`F`zo##qH6eW2ppBJ;wzxcG~FrQ3}mOO98kr@oedAFksRikmFqsmsp zgXv`SR~zdas0onH{Sc!4|ews4vu$i@Q3>+>ziwPyGO%OML?*Owc-4D zSk2sUIB)ejyHEGd&rWXNzTWHf_YRL{LmRN#yq;}cx%Sa79=v#Z{QVz&|Nd*8Pd|0% z7w0u7uha2lxcB^NoS5ycO*HE0>B-d{>aDK6@!k)jc3+d(*t~J=&YcIJB*%x(kB^`I z{y+KOOeXb5qhBU*lqT8i{76EQtgJ33sgMzx`b&To%Q(GU+ykHi^q0$kEWJFHTG;X^ z5(&7_-COpKz2)H8+29>}$H4(}uoVXn5P%tJvF;^cAcVj{JU0$;id0yvI|+oygtBCq zanYVEP>u!EgsKtzEI^-yOaEfdXu;cJqYGLjki{Tx)40IWrLOr|U`AnJ3Z9XuSwf~+ z$gt5hF*WO#HSTXLK-f5%0m8!I9%a#w&|*5l2llqEKq(bPE&u^K5no#dCPpGfN*kk9 zq&RqEunxZFK!j<#6J>c|=bhJa(k9Yz3_?I;;!K2483Yn9ii}D%Ha@b%20)p}NrfoH zVHwNWJ14@*Xl#~Y3E~3_AtU;_LL_Z8pduwi!~uz#yoU{-*BAvuv#@)^0xib?M8c(; z_vMIrGatQZmq^K_wip)RArLCk0t77BXmAk#F)}GjFg{ogpr9z{(~H^0&UelSF9u1W z%t<=OvI3qJre)gQ^my+u$$Rg7=lJ-OcW={@1_x^)J5WM}ss?k50q$fBoeA+?BKO+uzt)5j~fHy(a@ z5(MfhV~O8+cY8dY4nLpl+~`02@MrJ-)@yl=AN}M(+R@#v8cph10i}iQog~Ryc}K1E zay0zozkcF^VlhWwp6Ga(B(aJJmDF4ja+W77l0?+*w&wHFd9PSiQ(shNoao7THky^4 zR^~8dkwjWernQN)IEnydUODG%uiM*NU3Y;ie;2?7US z6gEqY(i#XNQpDb|q>-5w6(W(6Rvw*Rl&uy?K&2EQ)s-`mi4$!d$4OFEMWi4IS{HPj z7^OA)uzP%Ji@GkYR*|g=MDbqYD0LX-RZTRD5|!r>2QP>U>B`Pl8fRrym?-j4C<0&B z)7jJ{hA`OL0U=1zZgsqu(O`rGtvoO0vtW7Th9S64r!C$h(sVSct7>y?vnX7YBuXn+ z*Kv|+O+LtIG|94*5J5t|lE=mbL0<+hT-LRYj55&9yS&`!`>|uawc4tgf!ql^f@NO|dlrV^g&QGivqlRy%<5uLh_Equ5awoh zsxkUUY}O`R$U=acl5q(@;LGqIBuF5DMzgdaCh!vSU#L|UvjR(QcIiLSI2L^41pxw~ z(B+~Ejr^1WU@_1@YGDU~3b66bK?J0RdKUJC2o2r^BF&%x5d;K5LE{{I2%Xg~su%-P zahfy?2PvbXL@Pz8S-`|GDdRm0C~h1VXraPl_TD<3 z$G{w%6#%6T2sCS!MUf#yA_^gd5Pa}~Ss9a>*qg-HWnEQOS*J;?k_el{+)*#1zVK4? zsz@;h_Cago0XYQkd~+Do*n~H$UM|LGkdzXU-~)4j1(^T>Kt!a7MOeT$b9tmtCg}*VhbL6REX3SF7VP%0viMl2w%*zBm{j4Zi;ezdkz|nKT-o z&!Px=E4{s^2+2+J2$Q#eD&n{gVX)zc4f6| zM`1Ls=QDadE=_xQbZ~tC?JH@gQ%=g`-EnWT2x;k_S%z~D(fm%QDC^bwQ;gr z{Nca&!DqjC{_!tPchvZ`Z{3|wm-hATa$^V zMd56{k;YjokD_E&R`dDHL?(%h(Hc-h90%-nb1!~2sw{=bm}+igokp>WG}X0lcL|tD z<7_^TjY`v)6d7&21R<}Kw$^oeoj8u2b%W8cl_gp=3#1V##YlB+qu4a8-)vU4+ig_k z>T0K#-53uBTB~3D^!=>c?XRq#)aSjGZdsJfY!Ypq%i}yS%x1I6Xx2JDVdv5~-Pl|W zt~z{iG9FLYzqR(g-}=#uhmRz{{qKBtyw|GBiqdvwT5%HhS9*^hKQUz5Yb!VJzIx-< z^#@-(I=>j}G&(;&olefK-Mo?I`NjF!;Os1ob*mMPrx$yVABbRYt^eTj5B3gDyZz0L zovSe>H{SYw*4kpD#d{Q#W;u>#_=jLA2?_!X%qW4`1A7k6+nU(}2XE`(op5095JiG# z_99-95H~IkywqmGLheq`7)dfPJ5Ne2io%AN1!{EQ0=)1x0-W^SK;ChM0q$kz2t*5bi!gvFtq^qs!3V~wE)clW>&972si-V# zXDt{7EY3P%Op+WJn{^8q9RMOJ0v5kmj4g^AbAH<}Hz__9(+$|zK*RU`;NOf0Atei{Ivv=$LU9mFfGoB4AhZ1h>7Vf&@I zyn;wXu+V$~A~p*w3yXjv07g77xt7s``~#J>n9Hf-}wi3rsKWMl_-MzbTa(IKYZ`Q&kle3*N-OSYUfIu1>+>E zXJy*SUOXMlOPN!VRM7*{|3vrjfwS2nK1gR`obojrU!EWMdu zjA!%8bJ6dr*Y9uLx%Xxoxo4m3-+udccY8QLw9^Ym9RBoAe>NNzI?4|A2Axh`7NE6S z>$H_lXO$~zTRAs4Ec@M98C>02GdeyxIxUMDD_1+us0bB_%CfZE?hQvn9h;(dWmQ?0 zUMF)QRCQ%c6eZE-##-?1;$lKkT3N20MP-t>_8}Nj&EiA|qEOe)A!-)Rk|fRI@pwj& zIv-XFpw(7oS$i*O7PDs}g@i_F6q!y+TZ>6!2Ew3;u-$3Ri+PBBaGprId8F4jd)}hu z-~({*)A3w{j3OT-Zl{Ra)srksw4&K`R@YTsyL3`WCf3S$JPzLWdY!t0c_@PSN++na zj-r@EVGMrpnO8(E@N7O0j$5s~aVZJFEI1rZ^0XyM8plZpfwTd^ILX?byn&dN(*4zb zaab3{RL2^`hXAqBWJmyu!q)X$zB;o}7L9$;0jKtG8~vo<^%TZvtuAf41v_ zJ2`sM>vhjAPOS?>c;n8^$#|%=>aFyKqtWKtijJoJm44nzKKuK>eevvIYiHYi^wURQ ze7Uo;dF|HCE4Od1Z*1IoE5fajwXZgvnP{Lb#lBbgzOnW zhuRvgXT|jS?*8>#H!dztRFo)WF&M|4-pchf?TfH@S=9JTZD=E32M}N%TrJ*uU$M6m z>fl{)RdANQgXPL(O<%9H0Tko_1QMFoL6g!F!ZKnug*WnI*+&Ak(7OZ?2uw-|3nB4R zQMBY$!=)mXg_%7ru&Mx}7reCKr5QE=EG#?0MlT4NG*W>1lYOXk{T1L?A^- zu2X1t9M4HGboiK`^QYQ7>$RZsJG5{cH6Op1I z;Jniu8tN;`S~^M)wGWI)CQft~6A%XpAvjxm??IH(nj(#)oAG0%P*5;13Mdk=9=x-4 zF`UdmgmjERah#C|5yf$2OUr=~DN2mDwF@E&lU6GMsLLt@>%C_JC4kWA1=RwL13(l4 z#Xhiz1lHQ%WtJ6a*8C!+sqy$jQA#zSn=m!GHF%E#q!l6|k!Mc}rw$?#9~KM9>o_*- z_K#m=ZS^a!zGLucWqs1#2&2beh1HxAJ={Gj=g5#E+Psr@Hdn?E54)?~pZ)o8JaPBG z)B5aZPs^fM-|4PwZ(3{jA0L$xpI;37>rpY0q^&pBdItyd$%R9mrrmUBXL~gAAAj+p z^!C-a*G?|Ze`RH-zuiAO-2L(=`)_>zh8k3=wwjfl`MUP=*=#bXJi(Q#tDk)KB5K7O zE9(y*9XiH)_pko_`v1^i3 zZ>{z(hIM5QN`|a7xPD@8d*`iLfOnG10AOVfhS(3y_bb5YKTb_*zRwg(Ggkm-~ z8saGOjrcF1YVciKs0u#?9kP`D~dW36yfT4CsQvtBEm&dM}S z;uz*-EfjlilO&qVXDgj-_u!<{ij}6_!=Wzbo&~oy*Di*m$!MyP@;ndTB~hd_Ip-Bd zgTbVoCz=$Pq@8yaC}gs=L&V6$%xn^ajU6T`g=A|-3d_p*+6yXMlu?$ND6)0Xihx-- z)YUv`<(@gP%*!%Tx&m4h3rKH5-jg z3~QS!j3G``F|W$`JZopZ=G9)SaMWGtqDhBI|L!EAT%{8 zE!HKz1Ya6T8Mu)aHY&yjHeNy}z|C?h3&#Nuq?)Nz1d!!2jUpoK056gK#%PpQlywgAczM5VUI+F0>OpG0W&b82m}W3-~%%g zY7u4*pp+mGMWSQT2{0?8fC}Ld0w@Afgg{6JNeL_$SyxI+tgq@nV&VwSX8&`2>! zC2@u(GA0rT3S;M76eSX%bP|Gdwl<_l5elG3CJZ4cQh)*|sI<6RfKf0s=)Eka|CG{A zvlW;F2gRYeHv*_oEpI*W!eWU5ks<@qh{i;O6_L{MtSaj&v^%YvH`mv$WpNs}vxDK& zpQjtKz)-g_YU{yixh`?=3@To^8jp^P;lX5mHM#q(D>9p1$n8y8;m4}}^ z`{+L%{Nca&!C(HH2gR&D=W=gv{Kx&i2~-zj*j!cXZ`W@4x$RzxQwd?faYS z8(*Ltd$X7hCzFe;0}S?OKYLWos#o5+Iv+$wliAmyYPSaO{pvfPfAzRzoR`&X z?$RVLt+&p#AYq~N;Vg~xoS)pey7prCw&S;%1BWXDdFR}cn2&>>o|#$B=yeD%bE8cwMK-_YOg3OrLog%=V?}#m5>oKQIu&?n1j|zk($qn zh315%a#idMvpDOG3V;~LNvGW+RD>#cpTtq^VKN+7WtC^Cb8c^U@8azAd@$CJ09063W+x!O+0oTh0firGt1*Cjx|-@SHw=i=nFsEba! zeXzG5Wof(93ZXtcJQMI|N6#O8{`AWA4N_({EieQm7QxAA?1Rg)DA>@>yS;9&PFPd4 z+FQAK_hx6Mmt^hTCy%YQ2QLoqy#BV<9bo`;C;<`0TU*Y9cy^W$I7nc}!AtNGghZGb z*o!z#h$sq)L3pvW2uec&(jurzV{9>QZ#t$Z!@y9p;w|?Ltw%RPr}s z6JR80+&!D@ps3MU1_=%@5e}%-A|U|Ca*lP$?lx`B!VHe6S$d2YY9eVkN@fWVT<{hM zh=?=<@rW1#OJE{EAYs(1X_EpdLuFDXd zjuR6l&BC}1eXWTB0mXUet#=44ijc@?V{ItwvMfuZNNG(N1t1_=s3!peI}k(;F4V55 z>bh$B9`+$g5=v5~NDzfl0GUA$C5Rw;v9_w~*$jxnF8 z6q7P0cy1OkMnn#N>8IB$F4G8NMX5&r!-OaV;+q9E>2i!*n47sU1{UVv11dyPf+WBV z=7uPMQrV&`>x;=Kjn&mxzDMe0HaJP5iRq;MjqBynb7u5hZrtc(-Sp(?u+9radi6luwair{Kvnc!?}*!u%36ioisp_8D@X9cP>hHZge^;{VTUt zzxrhFlTXhW;jOo>?eAZN0HYDFtgH2Qw<(v62-AOG|J^|wPXgQJt9qmk{jcK1dv zcIVqS)=zgwDuO@z_wR?f3};o6#P?piR!*zO&(2rZ`_9!_W@d%Gn3QH(U7Sog0E3!N zOV2QV{G#8>`Vn3X<|xWKr(@me=F`${toJS^L+hog?A!(>(1c2vD_2*nqKUDo4?1Ccy4kup{7>)L4*2})TplJtAIv(8(ttplJ~WpSQ)Te;Hr`#o)H>-?-@ zQi`I_OtUo2 z(=5+ONEu_jujX~Xvj(CFF;7!es7MK5W$OTJ40-WKC&#vS+9*UAO$a1JQN%3X)zfL& z%3IoKTe+g}bvf^JyW@+I1nzb^ab$p?ET{XsPoqe^c=~8Cn5?d^xw1MwK5lnAc{|^@ zx>-zSF7Wy3fC#cU4mMbCpM3o!Z|A*U4^btpmiJuMrPgAU8C?wKv#Af&bTknJThzDj zUWub)V}jF%RkOOG)W!At95;D$ZGg(Qh307yVW@pbSON&rrgP@`FxHppVx;%4y{Xq;yn z@*24r3*$vT;HK3JfeC_eSS*SFEWpUZz`&w#ktc{i*b9f{T8B(p8BM`8hX(`<0klam zY5^Kn5h%qzG}0-P}-<@+$f6ne# zH=8m1u=XYWkP$Z{Bl4cOd*8L@9OL)2?B7xQjpp5}d%22D3Jg;vPe*fq6p)2U7JW7 z5PHu2ZzD($ec%;l6_nELCjxD_NFl1WNf<%mYQciYF*3*66^$$3`trM-E-hwLiPPeA zH#If0PSUU>A^_rCcL z-#~!nq|Varr5COofB&xMc>P9q|F~Hnb{y$dd5!((manzw^q@ z_3cl7@oBfOJ%o8x`T25VG8@l*s+dqsL))S9I0Xt7x2os393Kp(h~ST5(| zr5}!xx^hb&j7A@*39PizCSeXqnh6v!fFTMX-Z4ioR{18>5VY0ZZl_q(q`}u-nCC@B zB0(TY?P6AehIih6GtctF!)HN?Jj?Ds+?y}U;4$qd5kk>~#?^^Yjf4bE*_6v_HYQZLG~SX6aWd+$fVC(|hs zjn+rg$xHzTi7~{wae11~m&>qwFxp-thFQ5ZT3ga}T^F;-a5PG@p0FGp9F=9sfN7G( zSQm>%lNt^NiA_3rH_@G{t`&hq9-mFRon$hZYc#G35+F2=$O6Ra`EoGq57tLj6CXW# z@X?3&uiw16xwUh&w|8=Q=zUC*wJ1Q4QW6kR~I(T0g>4& zY?GoQ5~6FGqG-G)fTYt=wkt$TfLf>2&9Y8Y7T)=8r=v|0nNdKYs+&czoK?k45oJkc zI!TgP0rX8RLTD6{Y83}YBPk%j=pB#>bwsVSwSbw@wy6t&STAPYIGtofiji!dDh?#L zC`iipvf(pnKoV34R4wJ%Xo5@X=D(9jZCR|^sFvmZs=s+b_ig|$IGHP3U4DJz(MS7F zK0DmH(i4amZ>^hXLg~xH<>IXFZZwml*_BtX)J6UNpZu(J>TBPA_x`=z^V8jTzjo{P zM+YB&vKtM2=O4fR`Olx+`Sh6q9FL1(4?q0*hu{63F9y;NK7Zotg2=9I^?a1^x$BS8 zMHTlCnz!HR_By@wwfwc$>B0To-~NYR`N0oASzk}b<4M}l7uGvs!f*bAucL3CoSZ$r zv-|mH`=x`)j0eNCtisNP^~rdNRv#an8;iRK`>5-yw|3aXy@QF;sEzFndNK0p*=(|? z329zb-Nc$yjn=yP$nHHG=d4(qAPz^p)AK0;Ww~7x^N5_M#u$}|?&kgJteDQK;b8d1 zFTQ#A&S(42_CQB*p{#>KP7)OZYt)@Kdq?vj2WSI@j^t{p< zRXwj87sWfJbTMBr10ey!bUX=Z(;xOtVu8>(m*=T98UTv2&&hfJkC( znwq+DN}<*ioS)9-K1L2PX(!+)d8gx>AROoOGDc4dNue<~7!0drp^To->pacsszDG< z#%kk>dNyAWfXR|-SqMjAsj6~5M<3#HJ~uYe+BhHBvo*Gp_v)%)FU4XJqubuz+}Ymx z<@+B1V^d47)Adp63N>j)5RbiKZ}37tAFl0e@02w@_-sF-U)varHhQ~{i`XPp3BD<_ ze6X>x9)nax$=++y$kZ}!6ykG(`YN(VM61&=D#6%N71{~P>Xc8bi94;(`E7iWg@s!x zT9oJ07=%@gR*;YdnFFa7(g`FY?M}6w&$E}{IC_au+HsO_bdEV93Q6!y)o@b-X`3P{ zr3?cg8W6(Wm1PKBh2|?Hidqtp#P#6IfiUL+-+PWAjg$SSo+ZZSy za}*I$gvvliJ~9ZA){30=zOI_uMF!yLf-4uZrY;a9OFLUl~N<`_^sh>qSM2Kx$9LVTar5RBKA|MhH>-HT2L|E6AK-{{p zWfLhT&qiOk3CH_S-}@}jhUgLe{NVFXD_1@JwC)a%51)oBSIVZEvp{FN^X%hi*t@t` zo>sBke{%5Y&!(Vx^To~H#r}4;$9CzQTifh@;VUmKruF!A9E$Ma&-VHqMZiD$$?W%k z@7jO*{!^9Jx?hg>m$Rw3Nc~<~yTB}=i6Sa8UA(+;HXfgy z%?AC%IB$$eNS{^V;?^ip`sB1YIGzW@RHd4zyVkK%T@F-E0LU^$!3Bt(N@P1&lf2b7g>kkbeaMRLlj=| zpPzHwgjcC{Tr~sQY$!y)m1W@hj0aW|jrQz~LZqPG53UmU3=)Y5Tl{Z~Eq`BtK_V)G zC_qFKy{|cX21bc7G#mr8aZG8Sxr_kB5lJP8#;O<{Gf1qNS%`R*{ZRlUNC2XgMkHuu zRrEX*s`RQC(7p*`)3Q7XHL_{(2CNVvpaN;j3@zkYuOw;A5dlbRmWZU$rbH=Y0?G;+ z5Cl}xw&(&NArK{L2Zj-arLH780ErTnN!tOHG6qB^S&nTzyPY!u00}>LxEEC+^SP^Q z9~-S8O|mS{5|c7gj?sId)t{%dgiobFfW-qM zFp$zn21o!%I4GqQ=@x4^z8+Wq7V=zw>#tP% z&yJ#rXUF561a$1Y`h^{>CcF2a?Chjp|3Cj%Km1QWeDdV%;S}C{bMVF&tt6q_0j-$P zxym2zoui4J;UFKS)AKnC&L-8xOY6V>oh!TdPjzB9w`e@6A3QoeK3y8E_n%C=-K0On zv+;5{tN-Rde>Oio`?X*FQuW2;mp{HU*j)d`XL|`JCQt~Hr^)$vc4cRDajUNs#=xKa z;z84dG)oX5O{}ZjbXs)#iN++!I}-VO-}{xKj(_k6e+m)a{rW4&@zYP9Zmn(HxcR~h zuYcj^AAI!5?a#BkcXGB6gSBgvPA+a=9Av#$ZoOOv_xR~UA6RL6uzwob9hpd?&0^{D zK~6x<`)+@*EShCq8%^5i;9XM&G_*17^ixyLPkXuU^=z>ygA2Xh;NjD|s3_|VU%a|~ z>%~`*>@Ple-$mc+51GyBY`m5AUc7ZHy7HZ`eBN*H=bly3oO^Cj6 zA+a{GSy{RyHK=r`gY!*@oadR3vU_m2*6+H8>&9EF$ETBv7k73pTpFK_NjF`qG#MgW zEDP!=Kx~3PJ~;_4UcLg^aD?cl^ONyudi~Z`x38Mwe72Z%yX(u@3DQl7po!a^TT6>Z zqSn*Bs$~L;tZ2Y(j00NI3qB``3Lvg9{LCyICB_vsKp0v@I*M>?Y5UJfI#D1>48UGu z6h@599EDdy9$*HE!oK~nH0idJEP$jyShY$Ujl{$p84+S^vz^i^WmYtC5RPr=fiLA7O6$Bq*WTmv2#H30mX=|s7B5j^?RSIs&cL>7N8rh=|izZ{V=`f`Xicur# zmUM>jd?ii{!b*!xj5r6O0*ML{+bI*HQiemcX@W{45OZu-DMX}D17VV+1R&8ZrZYfJ zj85}Bv8hfI5e_agfMV#B*erf7RiO+>e7c=+ithtZJgJLgZ`i{mQ_{uLTKN_tr}BXyOOkp7!e6k zRv%+K9wbm0IY?Z^zuK;0WabzVK`V?Q{QXLRkRvOlbQ?Po7KFr_B%!23b+I_UcYFJl zUi#9oIhuuF*Dh^m-O<7Qy)-esY!H{|^Y+V08l_q^mtP#bdE>?}fABcEcC_6`|fx-mo$^2BG2XZTN}QPkDs1A zJDMnnPQ#TegUVgGd+(&KB62_Bd zbWsE4*#A?vt4TtW7bRV-oB$~FB^3K-T(A&x>kSVSHJPzPks!cE*f|1 z%GI**v%Tih)!dVw7C~9Oe&fX$f^Kom#u)FNA)FSqF_fp7QWRpaiRpAQ-LeutA*hH7hnmVL_LI^R0Sa4(xA!>zHStDSlms^ua2$@wV zwyEl>tO!LBvEce)vf6jcp~!k}#e!p_I4_K0AuwJKA}YiqK+8-iClsmeO|HtPbG^q|vS%oodQ zSuPfNni7yTsdJ&OVpGnFW!3KuMflR?%hSnhd_E?nv)nXw(^REW`sm>4q?nu?pUkFb z2fJcBaP;)?)7^tBH?QS+|Js#H(3JJEtf~?fB(_ULG5U5$vs%dtaNF+(L>5p092uAc zlOkAAKv$Y?=Fm2TSHf2b?InQ#C3VG$5ffGb2q>og{f zs6+%6wJv!Ej(@jBTseM48`D1WiMT}@qiRbH?G+@D2y;}}a^)f+#3llupdxJu>F|2IhbvM(;Q}?<(JvuCAGb zgrJntMrXs3iJ2gvfYK&%M8Ft1uGZ!7ob8NC6R?gF2!#NXBu$b`X&pI69~jUk84J&= zYX8y02lwutA0Lxe#9&g})JtFcs%$Ff7DX&~7fCw3cjw8aUwriKU;mAl-+WW&DTfFI zN+$|XoQtlh>Vl+3HbE&AG|E)XvZ>1~P5(}v7Ft{=2uO@EGAcp>0i%)-Tc1Pwsz9ZP zlp?|w90BbW!m|kD3K6uz3W-Dp)Y@30G{*If4PVYr_s*5AdV@`+xH>s$&ZondZov%3 z&kl=e-MKLw3`U>*BFXsVn_l1lYjD6(*Eq(u?Q_^Oq5Pf7Nbi&g%QGd{mPBm-eT12 z>_0u)xX=eo6vFn!;isP*X45Lq`_`z=Kwa6{JfGFa$CIYyx8J@rKAfE%M>6oY@7?2g zXZL7vasXMPzW?6+mtVedd1LGG?zCyVN%ieVM^%s@abwuC+K9%LDd$)0u@hplzIX+{MUf!w7 zaO2vw-eCCb=-K*Ukmt#=nB-lPce=C5LSjg>%!5a*dfk2$?)3YICnx>&ooP`8z)<-# zHCkMfCa869&_zwR?>}&^M&eOFJDbj`GLojsRRA!XxIg>zKT%|iF+07rI>aDy_3D+E zUVh)OTOZECI7S(e5SSXdExS5L|! zy3px%1lmvr_@;4z+YzSLRz#R3v55jrM1$eTSaW_lPXS_xCQo8$(oPye14@%d)LJLj zH%P>!Rqg#^S#|Q9qp+|sMky`go8TE(n}h)X|3Co0g=2rv@ArFIlAWE7t&Q3!_6@V7 z)>xadaGIFuJe2j^>dkJq>zq%L?EHKJEd72j22onCZ>=wvh1S&Txgvhad8?6qZ0)!Z2Pzb_I zN)a)TZo{X{(t>QHML!dg(xg@E7jC~WNE!%G830nsGH7N))}m6(f+_^pkc1FisOEKD zvU3O#MT{byB!~%;*2aiP@Ks=DA5D@0ph-}KNoj}zq+q4x5(MGsyek1B8I@$YQWga` zdH~cm4I#|NlY4jX-T(a4;DT#Hl44O5E{1AR)pb}b>N-S|CZjG7czirx{@L#! zz53=GolYmXhzhOAEvc$>uBj*GGD-R%((CopG+8bu^Yc@^xkaQ`y|?H^BD(+tq&0wq z7>TqXDMJh%Kw1o();e^^P{D<>7bvB`2NUBun=G{>_Io_^-p31aCG^`MrV-U`|Qc? zC&$;`y88NCS5207?1G5@@{d2ba&vulTqhZ1o$k(sjb{gkZ@+u%3OaEDJ0fBMUl$pnI$&!(qWU$``1`nwNyceaNA z^1u7XKlo37T{U15v)029MFTeMk^rNTO|ta#q&YsBK0BH(OQ#Jv=dWH`J6kluTsLwy zUiAB&$k;@{=)DiQNv>VpzVyP2`}^m2?ma#}Ih!4iuin@`-8(GH#mTv`xj8*s{_RhG zx-nWWi^5pjMJ=RWdHH6VSRyz)IXoF3D)I^;Nz&zV8ADW7A&Eo*Wd_5M03wVmSaqXHA zKq0VqM`sIb%y6*Q?+zZ`e*`+&+R3NWaxyM;5Lw$AXi8Z^U5Ea#ml|C)KCl>Vx`SR- zH+54ht(7sUO$}mdlDcXVtvN==oGBU(286087eth&320mvOXg5FHAqCK(lk>yPsP)!p(MF!Dwyg`t@tSMj@gE0wmQ2IH^632`~Y*DNI~J&V;BH zOt!jt=IBV#N)QYzu7T*0I5v&21Bkfd)(f;=7zCw=+UGE>*h5OS7IL6=e~82^q;%kj z?VJQK#s~sRX$D3`3PF7Md!Y5Ixd$NxX@w&zRA-wuL2kJOM6!~ctcVn3lv28dd=Tl? zx>A^1eE^|SmXu{gj&0-}fE1uI)&MC06@?HrC?lviFewn{C^k(wTQ27ky)k4AT9Yv# zp$0{Y6evQ{NGgUvB0-wq8dohELI%Et-Rx2q%biw)H8|R%5o&LbsG{gWPK8no~ zLO4I0eEN%@-@kj8P*uxH8`CO5W<|lCMJPlg0U;6~7>gWKVv^JI=^y=Xf5b7q{`R{` znuS)*14v{vX;+k0by^jbx^M|e<#{L8&3rmd(u|Y_MbUY!wN6r50V2qxDJiwW3*vJq zDJhT@=?~RPvp_=agMY;g5^mq+EWixWduDdtoy`tjdiBQoRY)as5& zOnFie!RCej?BpC3T)eSao)wGJ<>}FK^Va$cUw>I#+`E1H@a}Q6aPy^2WZJuZ{)KmL zeEg$F&+Z(a&0s!U$tNP))#_TAO8H=-CrJV?_{S>W}iNuzwxD^BB?!%pUj=7 z(Prm(?!p^CatK)$z<9oh>5A#KQJQcWu}kj@FB++I@279WIs;kOAYM zn@#6_t!p|4o;0-Y0qm86-QETJ8SW8K2cXl=| zZ(M%=?#Cfi%fkKmr$5g!Wwh#~S<^JYf{2XK7+rhEfZ!woJl%UT7!Gf}^2%g-GMksX z#}kwF`=jmci`S+Te>Oh8aCKvP`m8F0wLML#>1IGWgmCT3_1%L#*SO&Q(fm*XX6x&M z(k$y~xzIve7cPv?$76l0gKO%d>JAkd^vgyACH+pn({s^RmFwl%bWx1Yrrlwu-|wf1 zS?^yg+RZI!Yg3L;l@+2Wg^9J^2OpwQnh?vn;mBHx*0@AaLJ zid51`GE_}%LyUx|Z5Cn>AtXdn+GuT*2-mgO*0z{(@0-SBMyWJKj;& zn7XMUMhu9CLi8bGl5`2M(=$b}gf^BAIGHRqHk)3zS1lLb`!q8l1ch{bcA`vTNYxD5 zYNK_GjDRlq$dRM3v~A_5X1GD4&l&wvc<5khnx znOOpIU}gy(KtKYpUzz__j7|U)B-J*CR$0*4NNf7v((NF&utwe;muSw&hItw^;?6h)|+Fvhl!9F(Tk zKdiJR5{?{#2M|}Ow8f1`y99^ey>N&=YHbqRA+3XJqy@u?nVg;8{^dsxZ{HIJ)KoVO z_&8fujf*aVFe(r-deG}U2OLqFVDM~Bb~;{s|3CkrGa9}0$`^=KWR5^8$x;;GxN0#& zLshxVI~`!InmU4{Sq37;Sf#8&0u)$gAfX*Wr$%ii-kU=-%a zq}n`LtB?dDB~9q0oo>GmI-qsFnof_WuB=AeS!bv3m)zZeYEgH(Th!@>*qoi5OwQ`@ zQCPp4W}U2V>Q8=r_R<^arMGVM2I(j7J=wW&0bY7(bFbE(n!Eeo`!8Sm^k;`xUcRaf zs}XH)>+~y^5@RNnd+BEOkN)}V`%jmTKRbN(th)Z%%ifJU-Sl*S*&mqupNzNG`@=z( zl)U^(uW)*_F&LNe;B+z`$CqCi%;)nz`-4Az<4Z4FYac(EYYM6sLUsSnK^18C>G}Gw z=f#{JO<1JUG5ujOna<`_IUM!22c4T&*N>L2ZX7e79M+b~wW0l$uU!A-C;K5{)oC)T zx=D97pOcDRt2sttc7;!@icyZw&XPpeu2u@HPL9qF%DP#W%W_#JiAlN%_+U+`_Gw5}r zcUfYRG}oO$rnM@TRg&1P3maCOJWH}HbJK+~I_vbbO?-$|Rkm^S+Bft0f;k$km*or? z1f?oUrO;$n0eN2s&qk{xF$idk0)nb4V+`cQ7_EZusV1;60@9@%jNRY#bLjn;mNrVrkiC!VhG&p z=l$(8IN#KjbES9n`RVc6*1EXHXo$5$9|yx83fI1TynDwaRs%6NlSzSy0%kg?0ZzS- zi}}(w%Or)x_+U1j=bfQi+tDf`H0`K6N}v|l8G~>Di6o4`;u~&(Wb7pdi2*o>#5UUB z;+By_Y5n)CAYf_r(9bCh^1Ks;0^IJKaYczH0tVuC7=*2>5Rd>{JVW~~ifyEvaYdE{ zk=6L8edrLOFr%PA%lK?*Z|%=OfD%JM(wYuv#){`z0MFb=YdI609{q*Dex9>U^44TDaZqv?u zS(K%(14qF`EAN9*Mi~G`k672iD2N=1Fi~dj>A}x_{G;`?&GqeV=b4O2(o`ZVGM&ND zBsQ^Gy)3mtV6LmuJC}C)QBZ4*E5v`x&lVqI3}J;7VMauy6vvj4#0U&PEKIVpNC=Cd zaO71>L_|nXKq*6tnh<^Mh$!v%w3;`u=xlVuK@CXFyq+B`MaAi%fBEZIT@g+XCJ*l{ zBSB}h_UX^gJN?1lqqB`I+PQMsomcz&=bipwxN-6EAHLTFy8Y4N?jgKW``5pI^Mk+o zSgEw~{N>+xsXkSI`0qbDJdtnw`ub--**)B?KK)Fv6(GrF>o>r(0b=}K5BAj=U(WsXg5|HJxj2w%y&{_xY>!wCv z4k0q2mk6v-oC}FgGGm$$G${@eIj|Q%iXpnXQbuzuMZo#cw^|^E)-0>2)9VHAIY!Xf z9d#6{_H3$@S}d2{PS-WAk{F^-tVvR9i~;~xJKr=wiWG@(mL$w13nzG@exV}ELX?}iuly}myuB_Hgh~=U%hLDsn*L5U8@YJ=PVp1kuqe*;I zx^i){zqfy|d-cZ6vKaUJTX}a)*%X06JVzD|1k4;HG$N5>ly;HJ5g=mZR`AZx!M|mG&4&S z(ne{^NPu8an&qUL9Rq@|sw~a3ZYM}Y5Legp*?2LX7Nt`N%A{$M^oN_B;n1czl2Q;M zGLRskwF!Wf<$N(6=e>dL^;<}W(aJ}*+G?EuQC%%#h-set81^1Lx_A4IcQLW)`1H)A zSyj8~tgKuRhCH!^;3aBh2*q<0j;U2q7_Ddp!E*@m`R%)}y>@?e;ZmB}Aj|+dO>~+_ zP+8suP^f*Hn6zrn&W?*^$D|#tjrX1y0EGb{3gZgvthLspT2UG)3L&CGBMLy_iXr$s z7qn`Hw|7hwVGYP?CE9G7y1U-LaPyj0d)4G9)sXCL#Mqdw_{DN~(SXiST$0&(I^VlH zPHcYR>iVLROM}73KiaFx;?^5iMpw5!`l~z3%D?ly*Ijg3C%y5;l@I>l&h-~CQQgxg z=Qi!X@%1l0_;_#sL8%d6d3|U5YW1tX`TEh5@s-OtA$)Ro&)4BMe)IBTQeJxTA|U+W z&mUggI=^^*3z7_ob`RX-bU9ld^m@r}{qEa8{o$Rhjjq=Ey`Oxnb@G4u-~ayq?f>x~ z)&_&qV%{`Tm4FDtLD$Rj^k8lQZ(bW{gBNy&U;XlHr_ZK;@aI22o|M@j+1%-!PpeK& zCQ+mHjiSM=owUEX^Wo=r&X-f?n{KZ++}PS0bUu6h%yzQ~tL`R)guok{YrVXGKAA0- z^WpkhmiE&`|JJYl7YFADfAv>?5`sS&&j9f1#fxu$@k@X8qxX)FPn*4Papyv6aDC^} zI6ZU0fBcJ&u3f!|q=;a)tRL?kPU~1T4sCMh^9NhQ^y0OP!SVk2B*xJ14q|A^qFTga zQB;T�B3ZNtz`&yJoSNXI2CEo;-U{R;Av&uvX1oQB`@Gluhj$&q&S%5b&0d50ACg z0HQTIA3O)&1O}eYriwIh0FK6}JT)NbgZIpq3<_6uohZ|U03b24h$M*uF)B+*Y41I1 z6(ug01raV|=^2Zn5D=q~MV8CL8nRjl6mt|5o9GZdD1?ZKwL};~XwUC4t|*8|s0F=q zB;-P^71f?43WCgvf{Hpx5>-gA(^HxxhCEBtEJb8wX{si2NVFj|C@>lhDbl>C?(aUb ziDnmF<2t>L#8_3Ka?7yvzNu4dQe!%6nbr2>;CN$uXpN51FJ>hY3Q&x$o9EgnLeZ#0 zWTSLqjn+6A^t@|MPmd4wA5G`e;b@?>XcVIyz=E5S8AFH=qr^zS5~HwtUfPd{(xTN` ztUu^>4FGMGtgZ0~YCy)-x(@+>h?t+3eA>VC&yzROBI4Tj1Oj5)B73e5LF5<#P!SR7 z;2HpGfsugn9M9Y~*YNMa|Gc6sz{)hass*>H6(NO02rSAV12V8u6q$(>#~1}rP#cBF zD86M)E7YPWG6+C~Hv5W*K+JVLt>+V?by*h5rdhAUBEkFV$ z7UAJ={nFKIH(z*l>*AFp&rvZ4jvN4lv`La|IiJ=z%LW4y@*F_GBoY^)*D zXNO0hefZJ&$)wjEl!dFBm~`}FHm@5W0_KS}T8gC;giNU@;$t8oXx+@Dh)^jQ4La4b zd2s*U3$MO8*w~T?fS{AqWL@u;5iw0tm88vbVbhMbiT57bO;BsaRHTSRI0TLeN@-GU ztQe6nwsBh_2@EYBT-qTXGPmdmLIMse_^z~2j8|z7Cyl~}9j<9{PA4%7<`ak?n@1epU{rKR;m)Ab|WIS9? z{-4KxJlyK-?oU7a<^JUtH%j*NbKZS&vaCZ!@bZi6to-ib{7dg#-P+WjeST6b>!ys0 zCPWSl-0Ngj6&T9GnXi5IYg?B`fBI*Cu_$XFoDgZ%|G|%bQP=U})y;>uACfVHL2pyi z2;>D%&!^j4m#3^7fUx~%KEQVP3yCx)PP(v&3#M35|1zC=VKg)u_#KA=xc+C*2?jZ%nS zNJwehxPV#_A}}RMrc?roDp$?sGes)V2!xrn#*&0MpU*_3o97@HBS+>e)kN6%&=j>c znvm+MF-9fUs&0(h?HhY^wA`GIh|!`s;x?la?KJLhQk3!SS$-r zY;BSxCeKY%hZvop^!kJPymHQ!%LSq!1wzu=RLezo(60~18gabfb zHjFVuuRsUjEYG|Tb>+&+nM7Ly^I4U3Gr;)bE7!_d+3j{1)Y;+QXmi_|)Rm1k8bxA^ z!Vv%zf}*zEO6?WDrRz&eL1>S2fWVA^?U5g;4cg1|v?w7l0ZO}CkhW=t?aZcqoQMDl z3deR_3=%;m>No%7*JYrKDIf%9jYI^`tu_D*jHrxhxpd&8YnJ8WJTd4SPl)~D8VeWm^Zlp0 zyN@1BC)1{?nz}JIsh8DkQIvIf@ZjXm-N*OtKmGbwzjpK0*So!*C_zxDL_}p+cLas1-MLh_T!0)kSsp_NNEW_KmWLsw^v$rgf0IY|7fFNk^gP=p&;| zj82ol9+W~L1|g#X(6MOL##ra$<0pILvy;tBmt*5&tTmDCcKoD7o2on)7n2SwW(xup z10!M#y!sR1=N~^20aH8XCypUX8wy%EmpO_+%l~1DjKYFQt!=c;>$ai>4ndfc)M~HB z=O^G3QX_FsPM;*&?k&p$bL9zT09{@Pcs z{`NP&G1}7d{rgWIFJ5}h6eqKJ17WAyes%ELyH}R;`RV?dHoB~qM~{!6J*l^@?EL7x z)1p%5-lMPo?(0cfEoc63fAFwl^f$iqW|GG&vkJtZ>TUD~YlEGuJ0HGx|Hh4-VtV$S zfAqD(XOj(yWU;x6h;JMhAP_`m$c+388+ntqb~ z%YXU5d*_W`5tSVNS|fmh0;_u4mcW{unWs_X%oAgFSJ%v1n<4) z7(02|>Ew-bm2**;5Z6cjB+C}_Idfbt7fs^`G0T(0BvsjXADK~UjY4G^L8z+hpukw$ z?)w^)ey(JscHcPobt@3 z78sN=9DSB|n!2G}`=$m4(X2^(=gMUjVlb)hV zsh*7??^v4|Lz+}{-aBqjb1m>0o*UH#1XxgGOVw;^QUEO&v)%8lIQb&9ig>PqjV#hq zfw09!BGKQS=ed1zFrxsWVgP{%KuQ53aLe&;my3iFBIAnv*|wS6RRj@oly+Xku}z4g zLPCXzU`PQ#2;Mg?I7gms*1^yyoshCfDzV9`04yAWCsY6mVxw&;0>&6rw*A3^TQ=o9 z(G)`CLL81Z0p;xM^x>V)o;HR^>^-j0+rj-i- zF$7NtQ6jN~7-IBgy+qg<-FV&Ou@9VOiH-^aBO!RU-7L?O>62wWkB3hu*RKu_j_QMH z^W;#jUp~9^TDF{rw|;fKn0eoX@qz#Ce{pMh7A)Chn|v8}E^U4A$?lKe`~0_l_szl` zB1+crpWi*r6i&;JZe8n7pDoT$>)_&KA=kHdrYA=yPsdr;Tzz@RmDRJyd)?I9L4Rkv zI@sNv9+k(>=HL4lzjF1)=*8D&I?DRSVry&g@{3ph9Yfqm&`@*f4zWtrApP$#apWZ$hpJX-xfPS|l+)SiN39r9;>*t?-a`*P5t*!Mu??vacVRpKx2+e#v zJ=lGmq{(D514iG(L6*P#?mMSP=ib#-g|RAq^l4%#QK>S=XHx=5I%&<7w2lo4!m*!q z0)a-Abh;8_WX!wWc~K1d10AC%jV{C(2?T?L>0+jhL4i)*4Iz+gymNrkR1QEw^fBs! zIZ13_&bv7Pq*=nk&O0lZ*#xy}s@gM?#3p)eNRb*22hMv^DsWVYwf6~H=Yo%6KA9zX z0tDVyA)o@7oSz9{sGKsy46gB6CkKSqtX-9jLP!#WB1zUswQXE*uF*CL-Z4Vilp>x` z7n6pRVUFH8t8`*b)6{|4RZG`kmKlx8m0l}NqKwr(I9Jt))lJi6X;x_>gy?-)G=qM> z*Y6n`df(JlG3a%H`26g=scMs?2oaTXO>Hc(k5Pgqm1m|BDT;yxJ9!da9Io{RxU8DR zy!PyGy?nK7!u)J9J)08h)5D|D`ud>Pud61@EfR5?sRUM9AtDI2%er=iv%+|;aB!_Z zrA=BsC&xXX@!(3xj?eM@tBF6f72P%`v=RlPAh#k}B5bz<^1Rdw*sc~^CLIz`lz`7I zQy^{ctSy;EVL?;~afK8`Mn%!p9H5P%Dy0ZfYsJ33PXGYN;El3~LP`Mx5sE-tYz07O zApv8pkcuKoVRX*bOOVJSp^p90CTTi5Jh*%N_U@xc^|B zlf*|DpUwX2uYO3{zWpoTO0yI-Xq8I{TALtHESGMvG+8Qwq%>;Nnw*m~YXTo09xTRl zg~n(*olXE$j9gZYAlXzKrR&DE86BM@DD4YesO2QJ`mWY*3^C7A=c}{RGv8EcuaAtV zhyapY15pfsWDrC|D`Q+;Rm(yT4YlNLB95&Rk_ZWe2?Qbo3PctnlGQh)wVrW|s8p-4 z6qZ%ZgGhu4Fh(DP(>4*rWlD|NfbXK0us+sEy`&oa` z&j(MRl)HQ7D_`h!`^Dm{di|XXgHdOCc6RyJ<@x#K;U{NxfX0yv^6c~DYQF4r@~Y8` zs(AP7FV#NVx^m@5Km1H*_+R|jukPJBxc6{&{kyj^OaI6J%Rl*7|JVPR_w!j@Ke@B_ z!GAt{>ucHmy@R@_yS;377K+D{>69Knetb4=@_zF3AAh0|j!(|AZtr+p>>e%_PQLY> zFaGF+ftSvQuzJ0A7&MKwP@ zK1=hAP$rWZlA2GKf+|bxd{H1{qV1qJ5Q&q?m_qHm%d-@sxTZ)G>&hA;1JcH@&nQJz zYlPI=s~0aUL!3+(o`W?i2CtFoVxik}Td&_wlp(~k^D!Y>WBO??>t+xox)6N;gv_Mc zsLIs<Y;CN0Ymf=$xegEcD2vbw6u zWwCHYub&UP-Po{$CPXnvX`V*sXS0Q|TD*8@VhC&fj9JE%5(b0J8*u z*sd2>@qR?%$T6VOC=3uecm!g0F-BA*t$dIL0E{t&g2?P*2uxa~X`<3DAZl%eS7UvS zPMDmJtJ#$@%2*&mr3JMM(I%;|YyDgVQu~ng2gcg7dLzykSJnQA9uL+#*WSD_I~+$BioI&M zxjqaTUTNGVsG!+ zS+8fSrmlUw@xrC-h(7=5;iTdJ?!SEFaJJ~LXOr`CHYqmNlKF!6cBiN?&+S6ukAC#H zC>v&&F6)2uKYYI_>t%z7AD<@xCWV9J;%xTd$?oj>6kfAg;&?j4*yJG**g6Po%fUw`%Vr0k6@EO!sAHRor?$7g3% z>7ox0$A|OzVz@D?Y69jjzy0?1#@3>VjdOM3yMy&>moE4FYm50JP1D)w8Fv!KMgT}F zt+h6Kxh%6z{^`db-MH~Gd(RM~#Kf4*jZvOvRpAXe*PhY5KRZ9qyVfzA= z2{<3u*GA`OXG)>fieuDimKbHTZl37me0+we5^FnIZnb5W#?|x1($$U8x|^qq`O>B- z2x0`ArCLX$jj<-iC=6+mXpE^zy$_|<+FAhW54(WUI8QMst&+qjO?j48rQ;Yw@NMJ2 zE*e0)v#}$HuB?qVojfN+F-8=X*d#heg+{47%~&%k0wuBWD8xWYL7JFU8I@sO6j_pS zkmHkctBf&Lq9~;^o9ZN=&6lwjW3+c(X^a6vYy!yNIbOj-aDI-9&8^XFw!px3s6~u% zdbU_9lr-<8Hc@1>GR9Vl6bNQ{zrk5ql#e4x#OqSa$aaFB|+MOYA zYqJG{)#x2opv_gF2Up?F=g)c;!S)`|YP;n5MS-PFjJ7`ZcATS?Ffd2v0Buc_h&c#E zBxs4y0!-~IAv8=N%m~OFguTED9w!`yR|j?kQVO-T*!BhpKqAM00y;5CZjg*96+s~p z3u~JYVu(T-QGp^wL`tUsx@CL@77=gLTsQ{jTvNAN#Kd&0&1aMGgFE*gKX~L_1u?qd z71E+;>N++o>>*Dq0f8`~ip*MJY7~2^mhQdxezbGx%I4)OZJr6lC~a(-_4-i&kqqiI z2$p4;*hD0r96lSLjoWEk*;HN_LCV@mj7pg{4jULnpqr(D0300C zfP_es>9i_}5dx!vmCz9pB*qrqueCPX+BVG_g4eoTgrHIgsGhq-Q2@0z+8~H9fb%Ww z1DFAYL?S%j#v>xPJP#m_j4Sl23c&-A0+r?c$GhjfVc*SWol)+`p&2*Y4pq9dHd-4$ zy|ee^@azySzcRe_rAudL)6YLFG;!_c`}dw)d3j_;Su>p(0}&yp^8ArV9k(tzOnbd` zb9Q!gaD4L1&n8!I?%ck6e|yt@`D?Fz@cxIFchZ|TuDEjf*+)y$4<^&_JOA|?7hb$LJ{)gdzIOO@d_G>z zCv}>eyqkac(P3T7)`fKE(%@w8d^vB18=VW+^0h9#`o$OizyI_5(?t|S6zKP|Af}gQ zPxc+ zQdEu!Qk%G@8K0jSn*s|!K*nx2ySg^mKbs+#K`(<`*RCcqW!Y$em#<${!Tja>zo?eA z0@&JF_f44?>gU;Pv9wz27{nyj7_!EoqRq|q#3seEaM4TT;MrG!S?0^4shb$1Ha1B! zL#7EKu^E!Y7?sf+Ls6C-Ss9aLc0_{^gHbxqhV%KnDr+K>rfHHS0uco|XBdC2qy_4iBhDD4jeS;_4Rd*JRMJ6?Ys|}HCiY)xoF)qZa$s0#@U9Q61lLm zArhw3B|8pHC})MyN_DdF`RU~g7jIsR9N(i1~>AET8QkHr5YEy{7cT!G$Z=HiGx{qGAS8x~>}3gfXJh*hVm0 z>^}k^;p#o`yyJy!Eo)U9#+3`4pL2j2S7VlTDS*^&1vw&#CiUFr0&QUkK@eM(KvV?6 zQII8i5kX-E!s4O}9J~MuOYmNVi4aK;+mk2&Xr+u&y1k>I0+tYhLqKeo0U%7Ej0Mpv z)qz3*L_rotAtJ3*qKrjlW2DFkq)Azph!~@**f;IABkAO|s~_LLbN}|;f@ zX%drEv)TUc!D2Q|jA}f)=pzxbxZnXmAwpzO5~Em?*u;=hQN$V$1citOT1FlSFsk+! z&dd4vWU*KX#8%I_+S&sMFewCP(h5;x^xk_UBCSJ+F|g9QJ-DL~5Gpk7&9I%#X{|&6 zWn~l+77i=~M62JLNFe~A;#La~St&y#ovgFHab;OF$EVGOQ8wCsp_-k1_Q^-1tx z{Nk5ibIrZMR`1nsTp?>ro@QHV@be_Asxw~SPR@1@N>_Kg_LX-pU7Qy4Y4z&6S08=w zZ12u)HXNRx;0NzN*}1$uFOQ!+IT)_Pum9#No15Ls+izca_0`kIyJAv>_|N{^*Z%U) zo*eH_#Y>3I`u5=NFQ0z-TQ^*+6AdPlzx@8^m#%RZD_tw_!@BZ^|{`vp%!%siH zgDCX^dK+e5Hl0qoe>&OdcO%R0v%3%P?Ak<^jn}A*fs5Pyz5PiHtgIP!Q;BtIu%G6y zzWPe(l44OINQ`c}ENbtzwlKdGBxDTP&B}%lUYbcYD3w+O(RVo*s1v zJq~5wv5LUeO|;;gFYBV)Pb3ECYh#o94{k>ZS&|?}2Kn&*BW6f!Ht2PGsaY=17gbO( zeBtF6zy8K6|L%YNvq9e!%cc%Z$DnueTi^cTtSFxx96q>xq8sl;D1yvFDa=AKO$}h@ z{A|3jxz3J}RozrdnW`=kA`?-bQCc)vB=IFqyK*#ugwBgDEWpY1>E_xk<3@OJ~wV8xx`|mrcj1YuBz5g=f#6G#&tE_uI3mcqC#S65Amy+d zAN@paHyD{iY?}fMx}^k5KoLY1j>1ubkI{2mqGgf5BFr%;(u4|?l6JhK6|_W)7WqIb zaujAD!XP53ltx8_N#Vc<=svHZ@7IT+C6M=)ul5^t zcK&p+ee0cG9}hqJWq(8G+run&sB2y2_CMae_}bMpGrd8+er0fa2j2h7qg!v}>%&gv zK}Dm{Zf;zAAmei$@BO!L{qiRdZ@qDII5LqJlySpn`;Q-=?fUa) z%V&Gzx4v}Y-`|HCg{=@(E%+m$=J4RoiPlOfMN!y?powbdW{agZMp=`l zX)R5E)U~xaJvr_5^5wG7S~X1*Lgc_I)q$J3s0F35WR$Y0(u|GsBA92nLPC_hn*pLy zhyqm;op%AyNJPX=mM2Ntvg;LN?VGZ!fkZf#vB~n(X6)RJAzo06;td`^qh6 zh@_gjQ3x6_I^YBq@nTtek$HPIdDZH^A=HPu$2Doz8rXUi$g9wY|LIMa1il9WRHEBeF$N?oDoz6)h?__1H zg7?+3+1$ARTF%c;DlYO)HW>DfcAph>;K)QE(Ro+9+8fm-^F(Pa49+{#2D4GM>F*T= zdbQPE;n*0ER#$}9>f0vxK}1+Yfsk4yhk!vKDinzU2nZuYCPGpq-m?gYfFdzA9Q{fq z6D7tV$iPfQN&t;fqy-X*qLg8dqL7G)D8?wj0vv_eb9-GO078vguZ&@U%qV~q7}ZM5 zh)4o$F6nuB3=uS<0wM%O3`&RykiB;Te zT&(7{VczU>3G(LXk zw_oq|Db?Bk;lJO$eDl)FufOoZ^(*iFCg-aW*uIIC+t~dhhA3nKq_3FjTTTgeN3b^f^(c{OH zZl4>0*~z#l3Q=az@ArpWr<21AiCtgcnM}`#CQHj)rI++t zJbJLJvu*}$B0xlhbK4Pws;aEj&O4=v0JAjdbaUsb_02UTHCq&Uo)M|Mn`hAR-Y@1$ z(n%13+KWcbLI^&_s3O-P%M!H$yATqtjn)a(d@=PQ5TMbTg{`*07@Y{nvS<>EZ8gYR zRWFL@UEa+)iCwZsVc`%fRzgWH2O%^ny2gj-Jwj-d(wQ|OGDbEUgCGhwRh134IdD0)^v+SFV(j+25yE6P&-2{& z(pZGF*M-207^=oQhltD~%p4dstzw+*O0bRi2t04(vLLk^-F9Atw0c#jHt-+0wRHlC zaFFP|uLVUTD5J!OPz4E*0oZd4o;e^yrBoXbv>F8zD5bO(AR^u7h&Vv>fdv$4MCD_U z$btxhN@+qQrEtX@CRAElP8qbc2#sVT(<+au+y7_#B10gBND>7ISRy-*gp5o`!W^AP zr8tT*sdw&R@9FO2r@&~_^n5-+W1JIL`49vIt+ATWArZ1R1{FdZ!XXLm6#%)DX6K9Y z$)iVK`s!Ev>+7*`L71bEF@TD+iNY*w(RTfy4I&B=6>2SUcR~M9GX`;*T4GeFaF)nf?9m_yDttuJL@Fs z@YyuR@ZkQT^WlqMeaYtCpZ@I$WYEpv-EUvL{)Mg2e{t9}>MLK~zWUMygYuKVzTfHF zZ-4LQ8@H~!_v4>yODeIbOlHjoAGjub{PnMH_lJ@xx3g_rm@lSJ&rVLJbJfkvr1r~M zh!MW>&ee}TJ?7v?{qEHZg9rEb*L&Ip?k6fSdVJbkyt;uHUVm|?@jPGD-G0B9VLn*@ z{N9r&P?gIkM@O#l!MmO9tq^Qca|jqb)Q#j_6Fk5A!prxbKDc-P&h~iv@|AZltZ)77 zgCAOBs-~$MuEWwtzBJkzlQMeC$_GhPof^q9_0_koeSY^*s&P7Q zzWcQ+Pj^rH9sAC2ym7R5cziq~bGEfU0Kwn;-LL<@|Mg$=dr8NNu4^ZelWuBL>lGNDn>@6=tI)&_&R81IL11J#uc4T zF2K&aP;!zb%%V_h(%uChp{SbReQNFY=DIO@F)fqSR+Vv0;~N3?*3fh^1IBV*%qCN# z6~y2|$WkTV+cdYRYgYvyv?vnpWnI_Q4c8##9TS39#4)(0Vjm(YR4g1_9jr1&!)&^= zS|MWeF}fy%h(yGSofKTP%R2UKjS*q3wFo-z4QXWQBq}S!3=vgOLR!yt62e7in!*k4!tVCTa^yUh(#=d2A3Zg{e7*K+M#0b*PkgJ+0dRG&I zF$!dLVnsv+N^3+iM5gt}qSl&JE0JkONz%S$gklV&lNJfm!k>{$E87J`(h3NLpYNHK zWe_0+QHT^N%dFZBMw;imdL^{E^p+#-0i>=yY6~dZ8XXysgRf)sN?8$~r0HyYzW4a? zayADtb>$I6MAc1bJbMo|O|(g*1uFp=W7@Q2#8~}h^hy-wQdVYFxHo9<;MKMR$ z{$)pfZHwZj3nyWDo7*LFEbAs}%K2$&>^A|(L9Ig_TVk{y7;7WN9vF+N8E zVGD^97KC=C5q(hYZA>JJAfPY{>NYnZo=F*nV1Sjev&EdOBAX1>w+2JWhjDLre|h%o zwRdkSN_?QEu8$r)8ElL!1T{2Sm$EJ$KCV!KNg7dj_0_G|ctqdrLa&qm@V(hb9~?b; zdh(m!e&Ln3HlwS4`Tq3nFR9Vh3m^UM-Ah+``yU>%!37)_4Pzw)&UX_9^T z<0ol0h_!nB(bJo^l48auCw_e7d)fR;-?;VU!T6_t^C(^)^+)+DZ(SnRKl|YJ-tH{V zO;tBl?T?;~*L$P4zZ5CKx|Xz(n&GEEdHAJozOr>;;|p(=`%jx_Ro=@!`RsUE#-IJ- zNtR_*RhtwKj$LnMd8e zh1Cj3Ydx8|&9&{fFTJq)>|W=>_BX%v?sQxn9-Ypr^DA4I&W?@(7Y~j<(E!Dw8V%Pw zO?fh1*3NfQGcD(q@KEcJXw~rW)>m85V}b$kI#kS_3PG`$CuXh z&6i&J&aZuYzS#SNfB&bWIehb1ukY+^a5H`7M)%&6@t0n`MBaI6C#@{jn4+qyy3X^I zNOKTltTonoCjv=giIjkl(LOjLU;#p0AB-TvVpg46Y#<1TfP{1n7vu?NB@AZ|^o~5p8SG(vuM6gNHhI0S{#86ii0w6$9 zEXzgFRHekA2wYR?cD^f76Z@+4uFMncrZ!6rurzf53qkb0-Bcw;uA8c?3SdEI0<_k~ z7*k`|1#4|5&tnt>1%#k5>m*3f6t#09G#-HxWj5AFYomT{?F%{Qz`+{b?ssVGS;RYVA3Wr zIyE*^Hq%M2Yzm}BNKgq9C?iP9SZysC1Ed5|rCKG16rxgu#I4K#p;g-|iR^=fAU+@< z5u!q2ZbP@hIc3uzGMSv89v?S#trXRD(=I}cPe?NyyofBVw#ov**R z)-%iTY}mKI_G`B;?DQw6^P+&o!sV2|@Z!y3zq7Wn)gN>RgKmtWSQeA<+?OG^@ZbOQ z@4R?@RF;1C!M)%A-aq*6*ZC3y1A3VHwe`|aD z)~hc!^>TOj_Gp+6d-;X+&GGEKHyT#8i(aJmnWE@o-PA|3C4h>8A;yiZfkgk`{MK&{ zI@vp~UVQCFccTvp!rCzTz3+YTn_pjl>$UaCk?Z#H%BAGl)6YKq;BN+f|K+#WuV3k1 z+R_g{yMOP_cyrCpT&b(lDaG2R!MQ2~K`M%+H6}??tq6odj0otQZ<;ExsSlxXt}N?3 zO}m{QL-ZkPV--h(IPCSyx?ao{y>4$b%!|5qO%r1fAjJ{T0AlTH@v+yI zzCePt(U27N2ffMpm<5C9V1yJK0-ahPTw+w|IR+t3(FGtP!cLN9R)^>za^2Kfl30yG zl=f0ZsDLA)GS<~j69Fhh6d(I(x25SYyXbvtQ0+k;`J;|&dwHIn4_dw-1OF_g^bvu|7~J`1fmE_+uH_MJ!L_dpC4G61%L#| z2N4lrik=y9S$JcN?N~&WbSwz4#MPdeP*{{95s;P&4Tzvv5Zm8KsC5Ms0~2v%BIM9= zi77-@N&|o*3Q-8b8bewm2y@^VgZBua$W+V4@xlIlJ{JJjG$5Xt8Dj7uh$yACR)`RU zNht)?CLKuuAwio|)j+5M2SwD>)#U8Rmqj|<3`GQph%K%TlmeR=CEewCf-Kf*=Y153 z9Bc2ABnvUZb7M`6!px`?u68y+0H{dX&rGBp(6zuG02FHP0a!P6Q#F7>T5%LX5Fk_r z#S0^n2GRl)qX@U@9a%BmVg#0mQCjP#LXE1e(6z)*VPc7ds`X9~5i>K303j$r22?BK zLL00ysTY@;Oni+%3M_j(I-D)1kAM5SZ;DQtmwtL4XN${U_?Fg2cyjvSr=*(byXmOk zzuw>d*<)*Hb_mmxS(-ROT-zAUCiT@@n_0K~-j~1h^v;u?eE;K6SJ$rhe)hw?&C7## ze&bhv@wdBw^~WD)9ed^Sg-<^`+_}-V-yZ%bYB=?V==|soip6nggRk^Xgy`0R3!}VfXZeJX2471Pg9*9Ct zKs#}<{t?Qry^4IRYin@2hWq?;C&iAOEAk;2k&BZBmgKxgb1SBka)X5U|~Sg zKoVkG7X?NQ$^dErK(g&>0JK-u=rofc5tu|IBA_5iE1LSdSy9WPLsad8ml#-Jb**7h zS`(5;AOz9`f`Y&tghhk^HJJq4RcGYjgO33Ol-5eY?n%F0ab=C}1AXbLfi z00^NX_5qPtn6!#4z6mVR7+n;#Ynrh14t(ClfzA$p4UVi&lvG5=N{PykJ)33a}M&vGR(fMqec42e7J1^Y-_8)%wPk;AY zv-6*woh`;kO?Qwcy4RHRawZ@Abbjr6r<->)83wiY^lUPpU%r-i`u0zL@JCwl>3KDs zveHN$WFOXs_6x6GSnH@C{Q13M9tPk3;`hFFvsupH{o2*(?#T-m@=L32I>%aQXUjEbn`=`2?S56TrG*&@`tdqvbA;##V zbAkv}-NX<9pv7@H=TNJn@;u@D2Th^^H3<`BwF=S3fG_+N#fT^`j2nYZZOR!~#TO(tv^n$b_Kk1FA?Yh({vg&@|(V^Z8`Vr0T^x2M8gA zx?Kv>)*umx02_k_)JLuba||4VQe+}6(-K;@ZZsOTO{;*&WGHKV03c4RZ(CGf&gV^4 z<=t*_F{w*OK*6cCsl-=^)jG=#hUGsJKyVq=!)53&>K*{GeIYXPf@-T_JRUuL^7Qu0 zZ+0@05RjFYvN&2|%PFX7Tc=r^?=L|Qpi0Cs#Sy#;2{Q)oH3$-`Dl9ugIf6(5AVC&T zKxAW8KtvfWB&;HaEf9N!Svd{fjb}6G95IPH9~vs`$@!$)Ewaoh<6!sdqkA7^<=Nos z+F*ChJ5Om>WO4DySWA5T#UU8nxxK#i+{$cGpN*!+dyl3U<=SQ^#b#rx^YW|R?VDS> zFRnd!|LMia^qb$=`RLcrwl>ZE$EVk?ZT{-L(QwUw=XY*c=9k~O)8E=S-#teC6qr`LEx9eEZIXlccQV=5vGf^{xWE`s(gq|Jl=bzP2^9?dPA4 z*0%D$_?r(_H#(gTpN|>@^Dq9z&$c$QL8r4igq_XK>e}%8e|7iX-dF<7CShf*0Wqhi zvozyg(HmXN`@Nz+Ok6(y7|qHt?O&s z&+T87|Nh_o)u7YeTFJis%~!|!_rLw!cdqSZ&9q#B$&D=wcH_tI-5d7%og%NQ(eux3 z?H%riM;HH_|NIaB{SW?(&d;;;&f($QbjiEWp277yovxi+yS{Nk{-xK~GprWVvwL5B zIG#h~3ITBfwL{0TQTq@5Ou2K*pH39I7faDFtyV5HEkkMvyTv z5)_oU(<9SiTL2Zv36N1TXe=xd6bPcEA?dcEo_lqQA^J})sROudrUAv+M*s*&k?Rmq z^b#4>42l$j5P~9b^u92O3NbMoVM5FljaOj_j3lw_P(ml~%|;jFi}R+b71W2|B_Mzz zD-eQO!yG-5h(hcEgv3CE6rn1KDk(~kXh2mE%ciR8$z}dN`{nEW3ABD!EVY)=~2}m14-TM8*lLrqz z{Z9COSbDx<2MI+ntlM>We(Uv> zo~b5#!H-R^OSAdjuRivaY;N};r^)*#vj#S=lYj%6IRy z(`r5`^FF??J2WKfL*DN^`e<_Ps_kZ`Y3Sg=DJ632g^lsa!JW5m%zyOR?yJxL;%`1~ zn#yJvw5>|tSzVideE#4;sqSp|_STE9zA!nz=nvDX@^@}-ojtj@a&2dKi_67>`}^bG zEFZ4*zwyqUpMSV_ayUAE=2P~&8@(6qgsd>k;p{?pv*!1H_m$9~+LR3b=trNOO)5|R z`t`MWRcq_F*ZWs)?QnInzMlNi-+x=l|LH&Z_x+B!^1@A_CWN!+Z-7tgY;}6oCjar@ ze)W~t-zr--K6>;=|Jir*jQ)TB^uJc|6i(ane6ZEIv73#{rkvLZ?r48+eWkzm;Njo= z&7Z&f*6nitk!8R2a{9G*U-;=S9&)I)UJQrYA2gr;{P<*;t!?*47v;bGS0BCf%3yW4 zRW;%4P*iz7g`&5X_0smR<^(S0vb9z0KEE?sl<85e4h#UgMw|px^z5qIxz-zFk|Yxe zaY$(LlmM%y_8L-?_#~vpvLPhS)67^Kg7mu9W?8eSTkpL0(28$ensJ&Yu60dYGMaf= z8iv$bkzhDUqX!!R7=w6XF58k7h?$v_EYB0$%qNx!5rS6|_Tn)F@7gpqAP_{2Gos)_ zU_xt|$tH&Bl6?ydRaF~nlEie1L?zUXvx#Z^LdlBP#9DFay$gjThB|2;+P0Gw>#M^M z6hzv}8B5lhwsGZbVT^5E3)J`CdmqS!B+rBM64WMERGWEiieh7Ztv?ug=R`t~21%z) ziEZ7<3h{0+DLdWn_zQx z+9y*W8AXFg$_#oLEk#6%K0?5{l&VS$N)!iimU|5Kh|0zSuxK!hh@=2ySwY1s1aWOJmYFH|prWb) z6va!xAfQ2lu~rm%{cgFKBWPl6H(RNv&G!0jZK{)_a`p8c%-5Rw!(`Yuypn9LQ`4M1 zJh=JQ=h&EMpFU|y@D45xM@^`ofAtMiw>UZ9zkA4Oc6{2j&h6Y;>n5ESUV3F~_v-$K z&-Ndj&F0O@dUt&QPacoI`mH;E^t-n{eegv+yI5P(#kAbN_jGN27;!& zt&7Y~PhFa1MLyrSx>oMpTiNRNcCt6$NldZ1GF;Tvq%dJ~v-|50?runZ7A?n_4L`^te=FAu^ZcKrzeZ{yy-|d7y66ue{ptp zwz8V949)J9LE3G$w&blhUg-6^H=ZBX?PzV7-urytB-vyVPA*_;tXbb|Y+bqg@uRkd zrb0kCJRf`Pj_YRjtp3>tM>lqdiwX`7r>i-bG|L#G<`Vp%*ML9CNg#9l9Y|LOniKNxW2JwI5n(> zFikQh41{T7(>%?BHPPHx6hH|vX;5U-%;sn)%ew&BDRR^JnrbB0kP?$LBqmRb#D#V| z8d*xJx*cxzw{~`utb=5{b2iJ9q5zU8AbRIPNf6P{G8c=$hygGJj{q@u#Rh|47JqA; z>k@15@z)5*3<4q|5?rK6JHaIH=EECa5SRfIGz^GftcoZYAcJUBBOx+wH55U^$atAC zi(Ym{PJ*-uEU>g4f+`zU2q18op~YwmjYAMX0>qd=4&njHm_)%`CYc}x^AO2{s#i`e zaY|&>qKK$s+qPs(RoBz$tgcE3D5%aghHc#nkqyo(0$XHaVph>F=Q4y*ilE3uieU*6 zK_&=diM+Z+wGis94A;^; z)1VSuzsS%)-IQ6jxcXvm<5tm}KV6Kz7!I!ID{F2%o}Nt*le0$`z17a3*L~~TLod)- zO;7fYL+i6n#}xC`Ya7>Izq$7KxI63^V?O@Tr~9Afs~hXRb=%%;fBW~ID?^%gxk)gbDLMY*v#i!+d~L`HkzM43pZXctLwdbu^3%c(kA1@^zCoGd@%ZKF>mS? z25bFd&$3TP=grx(vtGaR&;Qxa&d${2Xifg?_~iYM=ScAMXmWNs{V)H^@3PfyFIjJT z`~f@HJpX+E+uywXfB(Pz^2T<0<@$}H(`lN6rVOv&y87wf@uI5h+Mk}epM3vs{`T*^ zwDbJ6+41MiJiGqpO|A9({ttbi&wuvxrElE&_HVt)S$=%>N z`r_H=?%Cr*`>p2o`tan@`PX0C+`6)}w!QNHukOyz&Qa#Oy9G;IO)hqJi;XqAv6cMd zM|YWFdxM7?UF7iW@xjXW_KiC$%y{p?*{?s|+uxtIu3B5^&u2|t!)RPNN=nt6&ux@X z7N34}gd#WAdDuzqx*450gQCcg*whEFs)KHi6DDHkLtF$-?`D|X*_2}f-5HYbayxJcGk#3QfW!1KgWiGNzH1urO zhtRfd(a94w(Ksj0XK6-=5K2XK&bJ^4kXi#8z{|4eXem@ODJCXCBx`N#L1ln6P0FTm z5?0q&P{3F_nT#z_x5($SGEEc1Hi$6s+WLBMO;cA1r)Q@pE9*YD34 zb3-g3g6PDHgQC+}EGkBfy}PklNgCJbq}HU~N$}p1A#1auOT?_EuIAlduWf5qxOV;a z#rdS@_7PJyxrR8e%0xs$%^;EXrDOn6BtY;n)d`3;F%T#OC(a2d#Q?3U_nrWe7=Y0v zqH)s8NCu5HNJa_R2JhQ)fyKJ!8-zv)K^X~y3X(yyqy+(T9Ad<75mmS(#xFByQAZ#$ zNUQ>&q9I`LL`cRUiYft$B5D9mtONn#xZVMxs>ZS!g-G0pP8JOsDQfB@pcq91h+vGh z8FEUBp=ni71enmZ?R++y&8BSGd&g)*5S27Uhz_QSU4oHRf{OD33L&Vq5Wl{lilPve z+YouRh^n*EMO9VC=DzhIh@i;_y)0!Zz`3dhut_?f&eGI2PC%rsy$7@ggMdMVh~5AI zA|PBEZ4_~N0|2}%^b&;-9I+9_;6vqn!jR`#h`Kw*3>hpdcZivcDhg0oiZ~c|2m~aK zkqc4h5!qN1bAlqUL<1s<3ZRM)Ook#rj$-wj03r~ns)QIjL!SiHM z62qcixQo+QULLaTYBRb1?%(#!3HS=ysWq;*n)QcS)6~;(xqaozY(6=7c$(zt#gn7W z7jM{ZKA#>w{P?j=igF^NzSrN(2i?2x-FDlR- z{_c0KBuQEa*X!riBE0)fe|V+WA0+j7{I%bGz1Q#0PNr4q+SUo1AN|$CcfWDx>%a4r z_x}3*qy5=p_W0|+^~&Ls@!`Sf<+rYX`0?RJ!r%DHig#ggv5+9Y_~}P)y>Vr0<7(Fe zk^k+#^~!gD|6Bj$|M9PX>s#C3{K`8&{pDw$KRBB%XgUvcdU1PqMPsB`{$z(imU%%BGtPIvx&!*Ft?(~M8>wEj<&whRO;!FMBpu0AN!GLzIbhfYMlatCd z?)?0uK5$=rc6jZ^b78)?I6FT&Dz9FJx4v@ocmL=s|MdUzCq*_`-Rvdl{NQAEa?!m1 z%QK%KwMYnC*RQ<$mFth6d{U3SnM`MSkqdHWm*+18D$800h$%~RlO)Di=OiArS^TDnSAn>g64g zPSBtthLBkMo1MjCK}4ibH;r?%Rs*T#CN*ROdPHSbRFhbXN^FdPU~FJ|*@Zy>FpWkaSY3K3V;r8-H| z!Zl%abB9wao>Uo>H?F_ZRx^=Cod5)cNX&L|Wh5dp2%zG{w=DqzFhb0PfdVO7RTW~k zsgeaHLIxtTIRFD#A|pX4Daf~=N04;YWC8}3tMFxF2pEaukRM``BqGJ0ucc~AOH)s* zizHGNbq=LP4bi!-F`Ph4Lm@Ew5SR@CD7P*r?dFYFZ_F1y@TnI&&*0^Feqy`EB(D+KN*iIN|Mo$PfbsV7xQM&?`>_bt*owO zy=-k2u51@iAL!P4cevHL_h5f(cks(!oc!unXRqGcnx4%PW3o=(U&-=bVu-A<-3>b) zPmlL!8@mJH#J9d`+dulFcR%~&(dr8K^(^b?%0}<$gRyI6>*nod-c06AKS^e#?0+_r zG&#O^GTpB`skw3W+KX?zv3ci}#l_Ft(!KGOtDf?@ZhrCor%gFo8TRM%^Q*h-oDGWP z@y5FS;?uLmWcrn_K6higSd34as``V!`&$p+|Ki~A;H5Y2oF0tp5>8Ie=NDz&)B?PF zt>^3cqt6fegYNY^yY+c_cRamydvjwu{Pd^$!D)Xb*;q}F51#(&z0;KezWT~dQ*;Vc zQc>q4U_AH`ybsnI1xeFXMcT$SO*3E2y4~)^+FF(*L4t^aYPqO`I6y!)Oq`@CDTmf! zPKXkmGloGV?-W2tXxN%T(q-nhuIi?-#v)T_Lo1F@)tKO&XX{8p)eu5&&@b3FP3fD) zSffFZF!;dCHZjK9wsL?0RMR|R!ksJIOp@otbav4+ZPV5({ToHmY3i!m?Np1Bm^_JV znngJR#D0Gef*TCC%6h$AEP6#hh^XLVvFN1fd{%3KqSLB~cOlJE1PBVDan6M##Wb-^ z-4cMYMkGYiVq1G;?RUD-)$Zz=(?q?1U}92R(8=+cv@RLgqPwzK)NLEGyst@)9uXQ; zan)2sn30K80)T*U#G?^e{j$s}7D|y(Vo)-OzzLZg$ZEU_AOINvhG_3GfHnb2IA2lD z6|D%O(DIzW#Fzc1Gdw0%V?kJ4@ior%;qpWa6x(4$1%u;Q<*NaaRFn+G6tN=2NFP;z+V?7&dxDg4iWy9)Pl@O;ngj6q8 z6GWm2Q}7{(50~WZ2*i%uBv1vz(Y7kBKx%2xG)G4##1us9Hdr>GB#Iyq0x}S>O_HS7 z%||B}q)^q)c}a5$3W{Mld7ZtJzIKztAPK6h(q zY<_rf(31Q4`}5m3V5M73>(%=oKGL@B^mioF(#w-yo^5Oox)08KYs1c<>%o5UI6zXp zd2^+grSJXK=f`I*>DZ_5KibT6Tj&8lU<12XYCm)v8{Mv5%>Ra2JS2lm}m!Ca8JU;s$&ZVip`qeGd zr)w|0n62O5e(9v0olo|U=QF=`>&DUJaXr?Le)@2-Xr+NHgEzi%XZ`Bd4}b83JLgXa zt6}r@wudgLYJh_iNP3H;u0Hsi55E50=Qgje2CTN8+x&eAr%9HilX~u2m!`H;WLcI;5LK+3MuGrABrUqohTtpT zAVS+TMA$7lX_^$BoZHX>6h(ocRnxL05Tx;eK@ij!Ho!%a2Lw)VQ7*D1%aSzD((Bi+ zTFYcC>@~&K=IOb~i+nMifxwmRoyBz8hLEK`0F5VOGTc_BOY^cS(=1C9Q)DS42hnm? zg}NCGx*$-OHIg<>gM>-HU-V45sE7;(9;;84M?E(bbNvQ4Jo{1s8m) z$o+1AD8srec{=HBZDTD8MokKkP}Ub$0aY#l22Z?fc~l8r1AyVQi<~2}0vIyD7RdlI zB1442v5AW~E-^9~b3}lQtR#wPfdM&1w15U<%sA4tLEuuZgr!*>F8>>UMKL3QFrxae*B}Z~5%*ssoMo1Iq}N^wfM*sh&d|t1aT1t z2ohPL2p}jCeHUUkFQ6f=ePx*AHyB;Ew_%(d3Fs2krZKQbXqNM#rRyqcD;&HRiHo`z zG74f$#zfHrCL$3Hj)p)rfW(qfP$dE{9+~4iTNF8>nO~d(LhYKeS_BocCQEY{>W@Er za&>rP?fPn%Pn^`{-X19w!){0*I4RGe*(fnBnn`{3tT{ZcLJNbF+V-Zyjm`%@xjR@J zzWd!9P1E&(KL7dsqx1RWhoiQF@BJORdM$r=e}cTzl=Zdi@a{XeN2AHV{pUaET7K)T z+n@dH@pE6<{l@Qp3{vd``thN(_cP(bolE}PEXI^ zJ73Fw@rzG=Tdu6G9PW=MqiHwGwg#Qeox$VNi}}UEjs4fZcjebVdfE;C&I`9(iQ&n9 zXJrHGa(r5zpOl-ouiklk4c)W-z0vw+XSg-kT<=~SUsSWsJ70O{);n)|cO2@ZT%0Y= z&&$f)d9`@uwU>+4?q~1c`{7?asH+DrzFMs8Ztw1{Kll6lfU!(-%65RmW9{ez1m;x1)n9kyY>8) zwUsq5?fK;R!_W4hb@ik*39D4CEv@=jx_C9$nb+I88DF z>~uSbNW?DqG);)8TXdYO=kwY4;-c}s^^OSv*@rNnmu1-~fJi&&bt5&Wn-_KC+P2M$ ze7Mmw)-nJR6`g{)?PRI*zSAlC-Tq=attMk_+fI_# zW_h|;%mK8n>fpj6G)Pt6=`h>zbYj@5kToVHYMfJ1U=$}xD4@o2OxXnq&edg8HLXME zPR|4xt%tUD1k}0!Rh<+NtkG!jJmE?&$RK34v4y(J)<}W zmq&FX3L>fikz-7Z#B#Zh#l1=)A|1eA3Z#*E69Ljom}n3}6b%YQ7J&$mVz5>qM2Mw# zZM`Vlrb1)t>4I(ITW1)Ys0$*X3C84=E=3DbP#{4zNz^+oJF+5*4!j5-ut-6F)<@1Hq%&)vxG}TS1tlT@FA|PUk-HQKxT=dAVMIOpa_<+ zli1MOSnUEx0wF5sDyk|0AnNBK;`Z415GcfbPF&-b0M+=8g&@n_4H1$UxzOcR1mgt{ z2t(w1M?|ZN#3D&W*Vf)S0WDiUn@_y>x~UuAs)+Y)Iv&+-GR5(DzTPg9$==E83oEua zsLCoZb5oLPmP3DSH5+sf9$lP{n*Bq#@$!mg``*8O@Ye5Kf9osHfAsSw-~a#oS!aE? z^W3$juC{Ni)n)VGY4zUE_cyz+zO|iJ`pJj)|G^)=MZ+ZgYE+Xn5<0#7bUIHS9^C!( z>2y|a>~^yxx&P_j?HAYb0=HM~dy8=Y(fR)9X#)9=fBq={j_m9XD<6LAZ@=@!FYo^7 zM+YyzzWLp64f|cnQnuC(uVml;t(X4#Pk-L;z~M2twprioB6W9m*At{KDt~b3F2-ls zN|${5wU@h_-`QB(?yjVRZ~xBoi?f4Fhc>6W9&4WVwudhc`@NMsNH{w`omKG7Z{ECm zYg4s-_TcW$&6{gG{ZLN#9zH(UKRMce9NG%)fRlb*>uTP6{?6_X{&er%Z}->NR>`^J z{qy1WT9#lr_rpH)(v^+j_HeM{=97D$Kb_ukJKJmj=HGl=SMbf>+UVvyo3@jAb$T-Y z;mslMf6V|?Pqz`>vxhYH72R6dbm2^;LGW}b`B6krEyLw zO{^gd#$d1Cu~tFa>FlDMU9?V!X*OLPAMfA1an&-RhN`SIhsOJPxllzi1`s)g@qC_%~Qg~bUg2@bV5@g@ag@TRMgG+^hCC=V$tawd~*KP zKYHW0x3>P`U;SupJzd>NPCmb%tqi~V&F6NWpZu$T_VmVg*WUihtp}g%U*FxhzxVWe zZ@qJIn1Sg%_<1g{FBGW|JDEYTbw~NJ4dZw`T9E>H#QfC)`jW#_@G?O z23M}g2>_H!z3?;n``xA^7!uKpa0D-&lZibXmT9%O}|ro z^x>n`-5u_lKl(?{-T&lp#P!3cpRt;k-hAck_^{jWjQfpqu=n8EWV$HF3r@S8_0{9k z#p-%@u^3$}&_B&t80S^L#R!ogY1UcGh}% zLwXL99rf#p} znr1O;ovRn~*}SUD*?chQA*lG^4YuXNBqmL>MZ3@t7K=Jb4FI+hy1ibOWrnFNOH{2I zN61A{Ov}r36bOM3Sy1CRKs73U@$@e7aQEf< z9OvE31AR;?AuT;AaYPc0>IgA3v^<~21N0@fKrSK1@tz?P5JV*)v$y~%5zv7^3aElA zNQg`}x-FNUdQn<>QCL-pNkx$`7SO0_Z+d2+P0j}W@jgd z_dodf?BcvFy-m_kHbEp7p%EeiF)=RdGf6-d!K;M$+Qe}f0^lR4A-ZyyFn};M z4{SI|*(8QJ0@|X)k01a7qH2j9NDwCx0;t}3Meo#59O1CxxU7p3AQ)o+NC8oV6cq%8 zm`PNvu_6K4Dn#S7M^$D*GNRJ9tzk;6otG687rhP{uBzH=LrhLQCnN}H?7XUG3%`E* z_IUr7a(H4pDVszWGm~eeeD?IXo^;c+=nwtojqa~LF8}P`9Q;Rr|K*MK?&+iP`O(R( z*RJI&#c*qI^~IfwllJi7=+&36J$&WN0Nua8_r-&=x4wDnSjOhKKkSg zeYkpM!*Cdl%cH~c?85)>r^h>2*GKavHTlDbqwSqFdpKLk^S}PV&%g2R^?MJWE~c~K z-4~x69Xx&dfA~NBqo4fiADlkBc>ax@enHKg+8Xx{54NwYagw+7xi+}#-Y2Miw zuKeuh&xF}4PHNc>@@W6!Yu~!DKR-Bs`uXOqEYwN5mN$o!^S#62){wnpYaK{i)x+)8 z>o4616%j#ImS@k7*SB}ZXHQu2qi4s{aZ?QXL1AmPqb5H+D-Vw6AAETK^G~0?{PwP0 z>D+&OJ}=$1D=TMbhqH1z7wUj@pbGt4rTETbn+Cj=wI_Um7J@zKHPqfd^8Tju7~4b$xvS=#ZU z-uuP80)#xbty7yYGa-O$+u+08&ouxsTVwJpcg_dVvaAv&X0U90km+>R)OEF3w9e0F z^E}TO*#fh%&UrE(7(*MXs;+8C6J$*lsCBMsLfur-N|K~a)udV4?RM+BvTR#d54xRp zUe#sQ)^(bg%n|@}vowIF#&oh`*y~)qy~`+%zW7`avQ8&U)26L^{p^JoZftC?Ly*Bn ze{?op?U}{}A)4BGb9keCd|i#7np;?__#tsqRNa~FJnSgfzFku{X0J}9So2!w2v z62PD;0m&(PCul6OWkU+cN}S|maz%@j5ODN}Goh$1y_JBtJTp=}g+{~2C4B}&qjQJ= z8331nGK`JEq`#?y|8nvS(Ww&0N^p52PykgSMARjfLcAvskbo3TLrZ(R0s}IlGV@Yk zi1?u(K6nIxNW_d6k(e)AiU*N>Kp-J_Pnr7QLfZnN5}P#Zc30Zg4>z}`)k0=-*ZQE! zM2+`Eq@u_emNpv#5&ZH_qkw`CrSGLF9~feP+7gY-h789o5`%yjFCma3#^FAaNz-^^ zAkvsiCxRdWkd;V69P>x>4oPsxhsPP z6$J=hRD;*xnkMb{thF|=k<@|;%qAcy1h$Nb5>;MejJY&o1?A|lQd;ij(drwGxCp_E zibf-Co)?1HhA^*6=Y8-l$+I9jnamqi$MEU>qi?p3Ue8WO)A@s`yQ%-+zxdXl z{F5JkcK38q!FT@I=Q$=4#t4>Zv2ZfBFCYeE*4=*7g7V z4_|$HJPwWjkN)1fciwpE|M-vp+mqwb&Xv`=uB-WcYsKu`=sx@Sqm3)ws+@fB$-~*a zdFkyJ?%sX!i=W?fjo;WAynN^S;raaDgHf;JEfk$%PzRr2=ydtvv*yqL)}WHYo=qk!#N?V^HtL6J$`bEICd?Zp48aQ zi^1Tt&kyzvr#8!i51hck;iz%)(J!6>P;bx|YJdIVSd+WgudmD-x0c&-q}#WyOOn0! zlSgxDni6)e&^up!;pxM(^U>mi51!_EFG;G=ytRF}_44k=AMPtGlwf4)GV@+b%OR#Ki+`O6)GXtY!3H7?!u-CU)nr3;~ zlxf1-ue{J73`fW3z6rhV0E9Yd=jF7!y=6#=H%8;>K5mNOZ|-)scByaWVcVo)Ktl#XL?4B`?7 zN-=U8XYBxrap)nc+InLc5EKF^2$G^2wm|}Xsocgyv;uKLY&o}zNSGpoHmTAionK52 z_V=24&d4CK?OfSJ!kg;S)(SwhB%obRxD=KJ_i=v|U1lC)M=mPFLp>lcQwRzmh%8|_ z{Q^QD##po)>2<6cmJUim@IDwK1?A{uUA{YIkgWm$0RR9=L_t&_%TK99Mpc2(g}@BV z*g9_$OGs=v%Tf&x#T31?;{u_o1o1#XY?9fWdiK>i_6NAQg%~bv^{trfCyvJfg8lChEgH;>VLD>8`AV#T>Z1x_T$wdQojk z`M$fDP3v=+)#d&;UF|wYS(Y9>onlgd_?e$8oD9g$TORCnq1A9KQFHPk;T9V}wDEfB%oae(=fJ|MkEA#j7u+?L1tZPMCq5zWdJA zm!Avo-+TD#JDcmfE9=+S_1m|Jyoma$G3%ZAs1`Be(Rtl|^|`P7Ugy!>M`d|tjLACP z&pv!+$qv_gPaYqYRZwe>4ypzwY;bQMW^MhC{_gA3>HP5QaO>Jmf?;oOs*HI*&5F`= zv-4T?@ZQPYduIvL{^{&=|DpzX``z9BC#TQ9u(h+*zkm1S;9z!oUTtpn=cOoE&itpJ z?URMNMzVZ9af5+r6vvhO*{=>=z(K$G^n>~F)7H!;lf2tY($u#;O%m$SY(6Jyjo~1^ ztZP-ZoHCPw2&nqF9(Yyo-UDh~2{U4r8e_Uer)nE>2~t0PzC2apmIWj5-@bTdFvWuOmM!cDi#_Pw&<+7axREPr%1>eqHZV8 zvV?+fOWjyo=?}Ugge*72NpRkv^|fP@RMWWvWJv}DDf4tP>2brmULk^!%XMy=^iZH1z_4CICW&bAbV1V^RqrG8;ifpb3D0 z2FU;sYBUHjpdgTbDa^u`N4n*TyYzi8=`#qiyz-&&GM^NP*rgbIWfi( zCumad1r4=z*|grZL4vX|HcQ;1Hc1wO08&_L6^anO|6xhYQ@OmCsH$kt=+caEi+FRA z7z6_i!RvBf6A$aeL}b8;M-T-uiH#vFLiHLW&9 zNaJ%5QEM#$6G@(=iOG-|nNTD`{KR`zK|`+b-m54n0P&Lc42?%Y1yEEC8oWnDt0so7 zQPoF>?PW+qB9xApDBk=?BUD^`>zpEzG0yv@twW1st@jQ<4VzipAW2!4b<+|eI9-%F zo%`*btHG_NYp-P6P7h8FKO1v7x2b8!zxIulwAXvMcl6+~D|GVwmCnk#ZPcF~RWCvg z6(yb3D|eiC)VtNYX^Oia?VTUa*86#CB%s-UFe)dY(0%rBw7Rz1)>VIPFxVOxbQcFt zE{;bxU)XWvUir%U%1-a6KY6^ilU=*Le*L*SKlp$CzZVm2nmMN_)S9RI{3|!+q1-IJnz-?3x^X?LTl(;$r$-k9B>L*t zuMInGW-09sckVntdicS!d-wB|L2!6*{}|JY)4fHu(Opy$n&t<8a{uMmH@^BiUw?A% z;Ms$!w`$ooXXkCNpKtE02A#H^*4FwrZ}(2lW~1?RYqS5sC-vHf&O`X8 zQk8WT1Pv1)`hbMSuwk1T(}V_kIQFa~c|)^F#X)!x?isti~M1%qOOpyAX(1!w~_L4-}b|C3iR z$ciY6v@B3uS~J7)%pNBk%a1OpoAF#8-Hv+cL5Z9NAW&LLjHn10qByzKegRZ5ZWX^7 z`^R-T=6FRBh%p4mOe#pm5+gvQ8<0wz>5Hl^0ToC@4B~}I#Dgj_Eu&>jpb*g&Dk30= ztf~s6ObQeL7{G_%gDODlJaP)uR*e^hB+1)_L&PBBd|*TfA{1!pz0rv27nPs^34lRC ziI>?bS*qG`qNPmeoUiKIwca@wJcK1x4SBfzfO)mGNoL!+ef;orJzFt8Ev{Vi^OM=Z{f!qkt%k*DUX?z}@^(BQX1ddd;b!*M zS2n)+wHr_GKi*vJ&PEqcKY4g^czWZ-tssq?w@w<@$cMjdpL@A|^{cn5lbRsgdwfK? zzj|l*$*)ep>YZ0Nxvi!L2P|dL&qinS>D+f#%y7kg`W=3{_N9JC#|)-ab@_!KmT-nv$t5ZhexN0?Iby-v*{oI z!*}kywDsTopZ~`}VUJHIpMP?3{WB92`?7V?`L(TlWCiG2@D!*3|dnzS}!3> zleYF*!In5L@@latiabqI%eM8tZe5lnst{dq)|wYY9pWP3wv@P4g@<2?#)dBu&yZRSm5R%$RpOZQ~Y|mkTxPD*W1}e50QLI@_w+o~31q(p%!;v$wfxgY_UmZHM)q5_xIiW*{vf~u&J z#2sRZX^Uu101@?K6bOKjhy;LsmAkfjE{^3_c(eF#$pBlfhW;6GbEfV_AZz5*vt#V%N4!(^|tt(FI?(jW^6n zz-W!_PeU*SNt!8u&24XGaQN)(!)E%%%Z1J>O6qDMML+9r{_bS|u{HGmUw*K4^UC(M!5{xa=oBk}cv4lX zMfTFGH=jJ2eD6Pd;ZHvJ@#)blPjI{9t!te$>lwhuAMLLV;QC8zML*rX(q$+*Tf4rV zfAx2EuHD`$C*@#uc=OKs#rgDbG(9<)gQS1_{pxpKeeX{n zo*j;|V*csR55D*N*RO9CAN=HVT(rmg`kmjq{_Nw6Z@jVnjqkp`vD>{keysg&*5Asq z&SF&VKfKs|{z_uHw_e;SlJ$$dllvd;pPs_K&yKRJvl#nccX)DE7g;u2_^Z2HC#SQf z2|%=SI#Eb~AOlzLr*HeRSkc&Y(YNd;R&K$W2yE#d&2Wo}(FTa?BcM0Jhh z)Qb1S7{uE&8IR9>kSt59rZt8LkU2pzic-~1B^cyIy>LxH!@X|Dg7#MiA{d+>_E!k8 zzmhLz)%n?II+~jROd%oDNpe@W<*f3po1C4VjnA)MS?~9|ZL?Ti9cmj)kv6jyM2$_C zhH#gQCweJJvlnPI3HJSv$M6exxHI07S<#tNgEeT zn&w#nf=P#w>YzSJlD2J&ykKNdh?y-m%M$Vq7(+^`M#U*2C<9PX1O)>e%|pxS3^4!+P{bEdbo;?lQM^3+FYS~|88>o= zm&yPlFhYpa5d>6Nf;s=gL;mH4vHXw%DlCWm@jxGZh(~qrgY!WEh^?X$Vf0|eazSJQ zqUe*vOPGY}vRFbdL%E`e0&JJ11)wMzVoMFPZCi&XPP!??4jEu1^0EMc5VdFkkN~P2 ztw7x*2mwJUu}#wqx7MF~`T3Rg zwQ^o5D6=W@P6#OiB$Pn_YZ3{9s>BHyUM3JpC8)%>Lwp;9gg6`&6$x>c=Uv>4K}AIa zA%)X-KW}O+d_w{>g(+yIX_d zbFYQ@6J@{s!mgkCYFbr|zy0#+_G@dm41f58C&%aICwEWBBbm$^5Dx%pH}`Jz{F}QM z$8NDWzy89NXAe#n(?-+}*X;Oolx6VvlhgCl)+XlZlSS6aU;N(I?CHhf!v+-zB*5SG5uYdN% zqg&4>`;RX6kLF2kd+X~Te|mIyIJ*BBZtu2-XEOnyM6az66&Fg$-d<@e3TeRNm`YSHMj_K5oIO-vX%kVwXKto*`(X;qG(#A3W5Nw zcWgN{GMklYo^|qEgZSXEg*YO}Q%v&I%@%~l7zx3HVhF@e%BuAtfQpEwg$1Ivb>*UB zBo&*GNqW89b^t2uKxt17`GzSEeCcR!~zG!aUem4kZSy8z~nyfc%Fq9n~@3*a6lvP#E zf|I6d`h#AQWo;|-c9CUSQX~j!6GqUYlNnCD4^>&4gtI)U>l*x$h3|a;Y5}lmolP^F zB%qL_Srw|b@n78gc(`*bjpeYhlMsv&1;8LmO3NX=qAGzBAOq4;6TGY#Ty~GdrTI&@ z=5pn}oCU@C#?r_io%_o&nB{z%NF_vtc6mWS1eK+XfU0pzS?&)iqHI_tFdGdX1q@s9 z4wm`>F1mk$NHvHWVxrgp z%mBo!L5*P%!I+YZka0qF-ZjBHSJzFRBz17s7*I_)aqQZ<@!laok!D$vg{paWI2wM@ zq;>QBwT*mp1qM(ae4H6s%;w#l;qdm(#ZMl(>C|RP2-99SsaJ~W*?c;2Ya8jaPaZ&9 zUK|~CH_|INuGy@6_oF9@esi~f<$7mrXYJYEi01s`AKyPbE?;|NTO>UBtbFO67DBRp z`}z^?f$>Gh{`J3nGB2U|Zj;^%NoJ8@baIyE$z)W&{X1V>JRLnay!i0@_a<}53j6V= zXDdC>Mf0_9^_pt@#joza_c!py*Cp@ZTi>|-i=TWtbe-$Z?+&jNFMe%jc5(Ld8=KZ- z3UG0-*DOjl;K~bE&jE4w=FZ3OKY8)Z*MI%qJ?Erq`1w0GLaSAK`q9VFs#?dTjHb=v z-d?ZM|MjmP7kPq$=cn#3{^EXQ3qQ7;e9OSb&wbOtYpJFZ(E(i09 zOC%2<0z`P_lE-s-8@Oz&y==@vB~&63KoCJiVs3-ChUs#1K_rNQ!OJBT$i(K|FFz1O zK_&JNlZm6HI1f@J1R{|LIg5NY4Wb}=X^KI`*n2KWL~KPBC{OLlys>^#7tS5*d))`8fGRNBYFyi@q|uDh$KW1yM-AL zG)XhIM8IsA04bVH<&rcP;|_wLih`v8ZSo^3_|5vjggwbaepd7s0#PbbRlx z4xT-M?VH8Uuv4zose>-LJD)br`=@bUQF-+3iTGA}00)2lbu=jZ#w zE@v-l!pi9{V$rgztVg8&1BZ_>WZ0;%W_tuHCJz5v1Fdy z-J4IQlX1DRzLC+>YuB>Vk#suUja_{Ejh*hwN;wOdWdGoQ39r9>Q~lz}y$5f;z1v+` z`OAOz(OLb;OD}G0Y!$g>Xf77#i}}U=?h9AghITRbt#fS)!Cbv@<^7*OXc|ef{L0$u z#rgd1=ZC-jy|@43uRlIMb9DeZUGTwDMka+&24(QxTM zL5u;gI55&n;|}QMc!xwK_D@0ddtXMsE-pS75e) zsu~+bfRG4H{3Jn_eZn!36vtS^AS%I2{H_BbB~V(T5+nc`MIr+BK{Z|!;@1F6XX=+5 z8W{#silr-}uq2dT#;r8+L zckSq8wENNxKbxHF?Y;B0-Ni)CCT{22YHzJGJ)eH@+0(3NuRK58*j_^%Wu5-+&0#%j zkM|DWe{VKeg*&egn`&};a%NMpsTmzk?>#!`^lT>|j3=YhvwCwQ`^q=o^v<0=+k5-# zue|)ui=e)(v1WIzH*Ko>_a^7Fc6GBz z2+*)4nAdfulLRpHSwl>1Eih?oEuXu#c5*r^>$VAU_sQ98)I2+!7YS``_R}0vMWceP z4_>9$OA?z(2*HbUJ_JvOjA2zZmPA79+7QIe=EfRp3=uENQbgL;1@DMBOA`PMUfQMw zgrbuZl40%?IkWnJu5F1)1cDPZMn!`6t_h%_==6)c>wO?*OWC|~K1{q1gO!zXQCC$1 zphb~_sM96k4B2elxHdjJtH+C`DYGO6sIP2o_A`5Sao)+@mDM#*)GA;IOoWVCrx!<5 z^XWoSIz=xIse8RuLzbAGb6K8;;G3!uFG-S$R|)EULkNrcB&4~M76RBJZOew4lY}`n zRaG(@B``_CfOXqgwcfXe(loVAON;3uO>CZLlZ$Ea;rg|kDW@6&8{@r@qJ(&MCsHC} z3Q>bJD!QzNLRu!I;(AY)Q;fLk#x)}@;7b$dWgy_o26E)f#S<_6DwqEYU(#vx(lr|Q z30dAiu zy0ic-(WW6@N>mLI_)wJ#4Q&VkLL6(c#yCYRSdVs5Pz4D-M9&kdr~)%y&e_yCpIBx? zaVLzey>U;bxZ{JWXsoA+>pW7F8z7qQF{bvSfMHLP18U-9@#N}DSHY)2`OV+n2^H+!e-tW1L`jzRJ9ct0K0B&bHwHJauRqv3JUKtRe&GAudL@bdGzRXF?Wkbwl}+q(CuZ5%Aa1eg4M&rvtF;L0}N7g zc(#C`(}^EG*L9%J-(KCjdt%zg0gxpY0Y*{<@r`#LRKcJGVZ$M~s?s3ACTUy*n0Zl_ zqLL;yNvw}*8l!VAXsFwUEhAEq+NQ0YcV*SGNka(Wg$zOnZ3D>Qyk}#)LYm|f)aD7P z=2=d}jHdNX)6}BMmaC?0nszYgPbTxKTr`b)x_4M)i8a*8a|p8VO_nDQ4v*_{(Me3- zIVBioJtqJ`HPps36Ji^hs;R2FZK}5RHp^5I0eyhQWRhlCH@9T-g#&`B+aJuP3l$Y7 zjduWAI|nLBmX(vaC>qNK>)-01%6lIL}?W`Q?(8yqq1x^7JLSVQJ{0W#iSQ3I0D20pRjVaJi5y zft5@CJ6>wQF)D=u7z@BIyGy>52$5r7%d+oL-4aB<06ZMto(J zODj+ez-o{Pxkdq9(xO2X!59odktE4^J{?u%{1UJkn=KVo2}#r`f(sIrWI$RD(V~(J z1hACmA*#a)A$X2!u*z?qGwQNBn4^J6BGS(=;pG-iK*-ByPRuGm$bbfbh)6>)Y?f{D z#2`XM@j3$t!E5wuMO;WU=tfyZA!hVqSZ-P4881&vCIl6L09tf&2Lga?-K3o~$vbIk zY@T`XjjP4Erd8Lr0GcORr!$z${n`Pjex zotp+aqy2+a-apeW{KNm%8ymZWhoA2M_^&_z)_1mRnSb!_J{)f5TkpQA zZ+7!u$A)*XqpYLGe{k@x4caqEspv~>^$;2im0{D#O*<6_6EAQOC z|MX-wg*1nhq0`AH4Ja1cp z@vIs4(;}h8!e5*!ersx)X(Q_nkNq*p3P4xW2^xsV+_i&=fj22HonNy z$muHEI?L0x^~C6XNRosI(=?S3+NzZh05vfNBoZ(aLg{yt5Q6tDq$v_JGh)g%ZA8R} zrf%~rspfUw%ksRFSd-)h5{e51z+}=SWn|~PYkih>oV3n6V=Xh!=8MFn-a)H!esr+X zUrm_(Fc{=CPf4LJ>uNFQWRUo9I-Z(=Juerl-EOZa+O|%Jn8-CPDyh(XvFH-}AoJNg zO}lmJL^QLOEVZsl)7+-Hv5jk7@KV=}HCDYRBCDVx^`bFJVvQmwFwACi32GAC8TMV{ zKs8BH--bn5I`3A8{WMSOw(&}7mYtlQ^h+m%Du76_k{dt;1c+5h6(K+%0R;(h1Py>3 z59aaUk4p<9E>Gsm!}{gOq~aoj#7YS=+UmRKVZ1twBeK#Is^B+8OP#FL?TT~diC z>SdZsR9hd)(gB%eLONDwNj&X`cuhj0%L^7Dkpx*j9RwhP<=W~M6hR{W;WA^ld^hw` zT>*lK;Eb>J<+ThHBh+s>3xdmFg~Y}kRnXvr2C%915F(r-LWJUt1VEWhglt^OMF1ki z2mru@vDM)+W{k#?2oR}2nr3zI=o?V*;-ryYXT@0KeCTA^qN?UqRpfb#A+re?(>Ct( zWCnD&ny2%_i^QxChWX|z-)ct>Gcv$@u-6PWu(!guUtKQ-1Mzfz(SG#FeD3hsqhIda z>c9ETS2{QDTs-+?|K6TSPZpC|J}9;ao5ToN^PN9@v99=|Up;#Ml`C5-iLP(nzJ7aA z9;GPf`=^gTd-md6*VcCW&z@Wmi%t2(w_bbr(Zg!G7#$rPpSAtf)wSKMSj$$Q!ek0B zzPMGj_2*9ynkj7E+V1v~gVXVFz4teN^5a{tz0$bw(yMoV^p|(v{qCK!QT@}u_~P&U zz2|LG`QRRZe6PPf$kOEa!TrYJ>67{04=(b~;QZqF{A4od!fv%U3a|L#3vnw9SA zm6g4{qxnT$WZB1G?2E#9(W=q)elhd9+U=ZNj4#gHG@-S%{{Hb(rnM98Y}S zfnncYjAq7Cmhp2AZZ-<+gGGHAqktVD#R#O`V!NrF4CGbvmDD4&&*x=%nSm?K1?# zxeqvB@Jin`fv1xUC=gA`g#j`tqNsN+;WTjYUVIyr$@@UqlA#b(f!fBgNzg#k2Itke z;JqVc1!OivMg`d0vPzPq$g-l7xmE?H1p)xh#*6Msudd3; zY++3XWa9B1nBzH^Vyqt!nF6S)8d?4vFFirAGX)fZqaYelpo*6$gUjndT)&sAHz-0x z3d0hKvz+f=#(ghGM8BExENAS|qoiL7&i_I9fDnP%7@J5i2+FLWAh8EVm&b7el_ez< zR6~S5YY?L4CZ!UBMn|JDWa8o21y5{)^JwBEN@Y1V*X6z>5bFn215vD|5F{i1^6(vp zVk!j78i1f7Xb3?9(X3vegw~5XHB72tZH#NF4;8Q0#Dya=%^*wSddwOh%44=cRxh z-M0WLq9URM7$0X~Ln44gNt$}^LE?-jASy8t#26YXf~a@iA%ZBxcA6l800=mQh?F4j z+xc{wB%InD5uIzxx^#7wrp576&%Fda6oEE?+< zv~xSDs?fHftee%XIv$@xcfHp({`?r~8aIj_n)H=--?%tF zUE5k^<8HtE>Wvrb^|h7rgYSR#;ppVN>?dLW>2zZV*4nm_(M9t&e|@jl>kKz8c3l8{n~o7maN~|y7y;)HCX9EOUL6$l5){Y-~H}xwa}xZ`N`Sh z+rRZ<>Es7L_@x-`b@~_6^9N7PpB>M(c2+yx{^952BE#7NY~oNMEs6)vPJ)N^wcIu8 z8`sYYaAm)nZLIZ0<&*ozh?u36!FeFY zJhjFcCUQO`Y=VTctU5)OSOrucT<}7SRn?U9g(@(cG)<^&napsoK8A6gW{S|Zt%zn> z1`2gmHFax}Sl59`!i%O}jLRgki@I_m-U}neju1p_eG4Eq;o!U%-?oi_W=X-82K}Cb zbK=TYM5q;?WChrqvSPewR+6N0((ey2NviQ|(CLWCVqO}gCO99&rb#F7)vi*}vMx=M z)@4<8a#Bfb(gqJGP1Aw`a~7l_wpk}{%GR}QlBCvJ4S|SV-3WqBOe?_}LxvGiM5dDx zh&r93Zr$T&hrLdbbqg{qpvKxIsSyE`mU3)741!u@6^N*{LViY!@|1O!ZkfDwzvhL>^zAQQL{;L9LZqznb25MvLQS0fdbwsEcvX=;l+u^nOninTKe01`AXsRz+KF~$HQ8A3$$ z@hmPxq!40dR+P@;gN=y-B3+6KL6C`}w)R5iXrg5HBAQ6?D0m$IIO1 z-^6H69B4u&o%)|0-EMo-87bsgw~17Y&LeLnVF=3 zjaOf3pFTqE_J_I2bZa~G3(AdDv#ML*n_s=IgipVCc6xI9{Hr^w>)n&)XwdJEj_}d@ zPrIvm+s->{E5_sppPxQ_aQcV;_#1!r&wlmUhv(xNK7O!w^ZII@;PuXdzR_=betBZzmkt<)WFFuB!aQqw?x* z_TI<8>U5Gkvw&tj!@c8bvzH$njR%>j$~NoLmG$Ci4CQ>@>E_H*P3p|Tt8Z+NMs1cS z(|Te6v8lY0$x_=^EfO?Ui^O@BgrERyjA@-m1R``%Ya*NCbUeT zc$7mRe`K}^~%A!Y`C(vw$@3~D%1{WG@V(}&@@FS zF&eQ7Dj+_1n^*z}A(+%G%6Y%j$@0|A+Teu^OHgBN-87ahA**Ur)`m=$r>T`B$;#56 zoSY1cfuTxr4B7oqOzO`UoNhAS)B-?V93M* z%FDaI#D>M_&b*w}kX*We;s-9L5}e|ZzQ0!)3a&H8XkPQo> zhQQWbel@XzI1cg^5S19fs1j(rMG+EcxTN4CW3>E-I9>mU* zC`6ui00%3<$F_sr;(xGjgy4k$E7@xZk5@P}c6;<$|Z8d~WQ8;USfZ$!4rfusIwrvRg zZr6*OHH%Isf3kP{%A0-6)U3{_XGF&?2v}29?ZMIc*3|;d*2T%>_G>rWW@gBzi5Z=q zOpY&}e)tfx&TJ%)e*IWc-~EHv5IcFNTHkE%|MF;Sd!v)r?YusHqQhdAZ1y{O@BHY( zSITK`>&Ec>;JhiRnuPPSrZZ?4r7wnSt>Z_Z@BiqBk2W@otJk}K@jt$|wo$zJ^7Cis z)6nuKzc`pqLp?he)y?hU{A@9+W?%iiJ57sEPERk++VSziW?3_?Mkm#LI_maUzV>_1 zPfzEo*RIS?8*Ps?=Yal$&!457zxvLr<*)Cqu6IvI)o8p>g~iO1B^5CVY;1R{vOGSM zZci>o^B11G-ZjZ&+KePP4={3B%Im|PNs9B?xvRDEFzCZ#-uC-NnkHpAnpAD43r8pQ ze7c9gh;Ze~>iMivfyXC{;C&Fa8Cgr8Jv-T0?WGADf|YI;)y&7IqfwO?$?5Uj7;B_; zM8-Fv;f9e_metja7!ZPYRV6_KGqPm{bZzU=1l5F115hq%n{Wss9+Jf7NoI^ufU0y2 zHd$)DcS(|1V<63{rmnoGs0#rAm>CG$)?@HqL{yQu=yqJw6h#3b-nY)x{eC}qKRufX zi1$uG6O*+*7(@}Drb$)SoRWlqkdgx0q;1-j&LGMA%n4fSszu|RlnYWvs!U2YQv^1= zC@Um3NtzH7^Sqp+XpHXzqD?u3u&67Wn>0%Rq$*1!EP9>RIV9?J3g?1ASYIDnn*`P1 zrK)QU;F{JHMuINN()-XG^ybqAS)-!H80VyJ%9!!hWpoVV!4(vI2tm}NKsNJ z9PG=NXLY=^hAxx9x@3Pv%~LPkq07ZT&Iw|PSPU`!Mh1}RhKx(nm%}80IHbOuZeM;F zmwtN$N>XtYF3+(MCqu*vpejI`*d%6=#d|;sJ^(0El&mq9=Mpgyd+$V55hDO8zJRFc zQaFtPRzO8!>)JY!ClDE90RRzDd_*w=%K_9}PQ=6uGqtU^%+V1mDzX%5m54-S>D)mI z3Wk6b0F+{Pt|+01s+V9;C5RWVC_zJP8k6`Sk>ai#B*dpC%Ql`E+g?6K1tDOK&GgZ( z8O^Imgcw1*aw*M#Kpgo4VF(g}1OZ@Wlb=+;dU`tTY(!uC=9eG;>~21ttZfhToNv6E z4VL4pH@ENp^1)=5-(nnTv zlg=PV}i zat8;eok6^{Iovy#UJ_j@4U{Zu4$U0s=~jqNIFEeAR>FVjgO6H1_P8OW;UBurR{Y3wsA&9kM^GI zF=R$9ce{f~#c|Y6yTqQIWAO?VqVk?yZ7EW9-7GB+0VSVJS-7gIx@lOXlcr^D<0t}v zrl_3r{XtJyVx@vJPHBSxDvFxM7j+FHq$!KzwY8(3i z0N}7iF@jR0h!DYJ87i2g{YMo5odjsgyz> zP-w_F_Ps1|&dn#eRw_0~ghi3ZaSR|4rn9E#Y-L4NXNf8%xf@NVFN&hB&n6`)n9a&b zVYk1%I&|@`-k)x7r_g2bwXKatTpF%E`Q={L>sUwKRrC1MyWQ@Jci!(zd#nAQ{pIts zxs8(~?ojRO-b$1$#b=}W?%ijDjl93SdGP#rX=CN`+gJa?zxXL*{ru34XY>8N+uba> zba^;j>p$H;U0&<2Zmj>&AHDa^o3A~%J64PjpU=Mcy|;zSLhO~-c4qsNTkqZd&fj?b zt=Hl0ubex0vb*|dWp;Lmq*k_A+1cSNi{t&hX=G@))a_+@cQ@Cu+TQ9qnbg)t0Cia`t!&1z zL5*pmW<@ifRr8Yb0`k0uRoPe>jHE!oy5e)&E03Qa?LV4yyRh6-m(Hy&_p&$Nd=-&N z%4T!vaR0bI^3%LY;><*TG%m8pIHuSrm1;s1?>Oxw2&k1tf~=dEL?L04B)x98*Xtx{ z92w)BXr-KU-aG4Dlqzdo6zl$=YqZf?DMj8njp&^ZG7sBuRhF~Kd~`ONO{c=XEUQRs zXB}HtmZcA+d)`HnVs_%4RumgkR&(d9Q7Ve#BCl-YfF+63VRz6?x}Buc&$?NZC_!TE z#IZqD&2nGYB;stjwA3Asj~T>?%&T%VojC6X!@<(>((1}mzuPmQNht^!yh>?p0B|~+ z0U`@{&$V;4v(EczQ9AL{*{tzyURHHuo#(2m8|MLFR#s(GDIyca9;B(8x@nrCan?B& zCvINM%erG0+2OY@btl-5xz6BE)5LM_BU#R5?Ue5Gz^OsiBxEODXhy+%L%BaCjnksaK;K2Q&DZGaFSe!M>) zRZTTL-QQbU?iZ&M-?-oX-8a7ewH;;HI$2ukzVhn!?N9F?@0If@|NOnjMFoe4`E2fw zM)jo|8~2_*olhoz`p0B<@Ct4w~ARvr3rj+I2HC8c)3Q zORGi@w$_L1>*?|2?AiXa?s9VZ>P}ge$a4La{<-rTCnvLfR&ygPNI*s90F{ojL?Hs9 zNlem7CC@=on#O6IG9wX=Co@DLVDb$Rph5wutEzD|fdT*>Bpo8Qu_$1h1_V6=5yVCz zi1Q8^qs%yOo4Pi}gwWrnsz|Z+hMiThV&N=JMEq=CX+^`Oeq|fyIMOCfQq`r#x~i@b z3A`YYMpTHxP}i0;#yTRcy=^+(q$(y^mX60~-8fBktt>6 zD1p{dRTsdlO{A0-K&>=s?T5DkW12qFTr_nrwk1b!$W za7>yY2=YOI!3z^9FD^(Y2~jEK*rQ1JDRM4|9|?d_0TLl8h3Ld7rBD$7pdt}$%0fjE zpi&yedtmRqmvDqAErA_DWOfv=eRxTg2gSV#$crLsk1!%(07bwa1%W^jh9WOO1f{$e zKtc>5sI7}af^nIGV+!DlM+O3+QVazkBp(R^K|A7!ps`+@0D;m$6vZe=3_^q`*1PCs zULMQB^(_MJZy`aJkS0n*280ku959d%c%0G_lwU*$!rp;E97m*8Q`OpN5JAE?4oST% z4z$*F(}>dEIkkART48wq#^_F`;}v>AP0F$r0PzyXW_$he@&4zOcuTU`p!om97b7+Qm<@eo99=QhR=TT z@jKssSHz#aIN5trEN^R?$@;}XZ)sSs=3dP5&hYae-0N>FZ(mr?^IC&m-&{vj$NSC3 zh3jiuS9c%m?mv3*;IoITo2$1!xGNgsi1KOkrLSE|V!C_ld8cE(^LKx)eN*KC5dlN@z5H@^t#{4gJ=;Z){#&A3dKg-&o$b*c~5^b<#QAJ=%S;o2CE+ z&O?@_X=TE^n#3lS*`zkAsVn&I?_3#l)5{mu zvQDzG-YIt{uCZs0UtLQ489dpYpTD@gV&&PBnRxZ=(Q&Hv>3F<1Z_-r%#@8?W$L~Md zT+eooi`mpyQ5|a?C;<^rR0#O(&HiXKK07_#I=6b~)`R1tBcrj(tH#;B=|8ylyr^q3 z%9n07Z@PjtvqrDuOcUxTi8Y~fKI>&Bi4>{W7yxqKWxYg1lvXA&RaJDd4tt*@T;?UB zYU+jw5P+Q{MCVxmoMmIQQWP4a2*f*I&MOg)qR2LNq_ijz#}R5B>aas!u1GnbXK6=k zWt-Yt=RDVSlf;S1Qf0`^YOM{Ut(~*Bc9s*P6BQ9qp3j9SGP*pQM_Fu(QYkaf>jow1 zb+gsM*2cQetE#HO+N0x>rRC+?d11F~_ z%yMDyO2jiNEQ=Z_YMd38IEhtoTC!M{GB28{(oq!02~j*Nrp|h9SP?4Hq)m(H1d=x- zd_rg)2NoeD3BDdc2ob)&F2-rc`Ty@ifG@Ef{Pm1bKyC@%7NA*V;4F+?054gCU)veJ z*dP`tk>G+BUff1Kv#>Xs5Ye*_0I5J$znVkUS{4K#^sXQW?{4g;=1e8W9(qt|;I=tA%i>mESCmU1m=NAtH!F_%llg>X25EMTmmJ z%*39N9gBo!w9wHdfJTup6R0r3ec0V-0W`HX?NGHDVwNa$F9Rq<@?x1GOA-L_-r2eq z0Zp1&Ty3=pA#knjnW(KyV+0*XjkRs;wfBH50Nyi_8uW)7>C%(kJ7rZwCi0%WZInh* za(w(K(%i}P$@u6SU;8cFymFo$St&Wb)eEmnY93Z*Swqx}Tps$OZ-i4#x?)Ywuhhdpj~ohpmhbCa7TT>b0jI z-#Y)=x#f+u)6+aITz7f(;^nx%vot-OJ-V}RlHt(B3wZrwjTJU)5r)$^NMLs#;n51)VZ;iKKtLUXjav0OX1eQxvgV5DN|^$mKUn6jOI z1;xqn(Zn9FY_72K9_we%cOTrIU%IUNLoH2>@eru4EshF5T<_nzH$9nFX{R%;e10&} zpx2iAv-ym)0*29~9%L|{978jH?X`{djm%Zm)@tYZL4g2EYtiNN!$&X1$0zygmse)v znYY#M-a#>&B(bgD>3%be6_M`gAnyWZ7^j8xEpJk19~A z%Im7OGMZMid8Ezu*2dxS*_CVCMdhA6J{pgCX<1w66~VdlYdY&DNhfm+6QI&cM>Y0L)BEA)+vg1hhe+5)mM+ zNK_PQM69bCJ;br`-hwbH(%NKcqO}3TMC_d}%SHhDy>1pov!bX=Fo_{TR~N39G)_sV z7sczV%T-mmrWs~gxG(fb%scRI!^uxL;dijK{>ovplSCZmyKOtP3g zTW1j!pst&`tjbO=(<*KpSJnZN6Cgp`TJ_mF>pdiqFv_>$ahJ;N`>DvJX`eoafX_zHwtk&;%cilJ3E z7?%WqnUz)x!DvWd0`fr;6Ex-u2>=z?UvJdmuqT zg^Cfpb3h1;gbJkf{$iloWf8dmfCvoS?pYGFJQQigLO>Gog+jRkhJ4fTCMeXeh029i z0UY@7Hq|Hi!Ns$9=RF{J_97k$lvXSf(lHdXF**c(D-OH}IQG*1U;zLlkgXdA22^25 zAyBTV0lBHG-f$?soSaMys=~QQYcaZ#lAWva@%CCbhKY`5B9p58%L~{0?r?JMX0*Q4 znNRd9SJl2x`s?ZA&vtjNyz=zZhxK@3()7-U&(B}q+&nip9nDWiO&Ukv{Jr1we%$T$ zCP(8Ux8B+D$>{WCzP7!MO1*e+d~$zuspdGPH0piyXP=j&{LA0_#^}Xq)L-6vxO@Ba zXVdxY=(JSn(dKgg;m<$Pw(fTH=Bu0MZfxI|Rt8t6wU!V& zNt#7TUDYUHNI9S=)*2Nm(pnQzCrNv&OP97c4v$XHMq`MRR7Jhj<)*B&I9g8P&Ee88 ziW0=CuDy6njE5x}?IPebFMxDaSJk}i_IpTF6s5Cu z6m_erBqA~jP{MvqgaT5Or3hqMR+W{AI2UjTtcsjH6jkX3sI(?AN`mYs2-pR@2tx=3 z=9bDO@0kFt^C9d7zK8%_Fu^YeI0C#B?*~hg$ihC>s?S+OBQdo1?iOQs`4T~Uv)D-% zC(O(917Cji_Ne)S4FCaTFD*yVrdJRV5Gn6mI}^iyiM)4!A$JqoIKmLkU_I%`M4gZ; zLX-fq_2RvYvLx1h0OOh2OPY$Ui%OJMI-F+0=-Z38XK2+_fhPzgs(>h@fXly>W&%Kn z<7pu-31A%{gNlr;Eh2@GJB^B4O(uviJLjBh9}eSUb6VtVqj<-P(1$}C5g4@504=T; zT7rQrhn>xLQx5X;tp8UA?qiY!=lsW%7rg?lp9zU)H#lB`JwLeqrLEJ0+0m2n*>PUxe7JX3&Zf@SZ+_$E z!M(l1=XXT84fvd9u5= zw48kFwYPSkK7Hl2jjw(6;*&e~Z@&3vl^6f=fBos&>hRvvah#d0ZX){yGoRa7p3d|A z=Vuqzm9=g@8=V~=udc%O`QFFBJo@sxD;IYzeR5~Fa7`ywd1dRWu4_?5!{zRBCoY=m zc*`vN#0~DprVCm$r&6Ae2>;Vj><;fV9?{ zNFk910Hd{61_8{#AZYK!TC22DTFu8r62%&#rl6?^Fz7-^1c4beMZi|Xw}GLA z>^$HXv@L{jvmk4w<#3@@lS7*#uu%1*ybP*uS%Y@$2G}@g85cwB*Ip;!5ACgg@qaHc zr;sEQOjrnlE&2?Ap}SHGdqiv%>=N=dL&6;>twSa>he%GP8937K>^@U!`LQIygWw<0fa>>B?#WzwvQeO!W>`V_AC&7ltOX* zg1iAD6sADxBLXQTMWjeToO2=!iV%^sQbL~DIVbE%VH6u@9ZQJPLmg=jrfcLxMGiz6 z6T+g&QB#$jPL|iRBqlaCPH^q~4%x}xi$`5E#V8Vv(ySTT{fuVo%hADuXU#n6q&z#_ z%a=Gmo<6$ugG+C{vAngWJ96;s=vO~EdHc)ffAr^{UcS0L9hExLzx_8}JK8_i3ikH* zl(KQ!J>5T9#%^zA71gksjPm)s)9=UUmYZ2sB)S?Tn_J0+w=P!mN~5DVI-67npP!nf zJ6LOG)qGSoWmV@*_2lV`$B#zmw&@$c_l>plJEYZ*|NT#oj{Vu0TUqaEW9E7D;>p?9 zzV*hXtI5@C7yi~i{2R}n9Y46eTR3|3>^N2iiH1u(FU9n5-2Kw3!d0$M(wJ_%e&Oh7 ziU_C@=bB<%6_euP&6}UR_vFrTg$@VsV>v!H7olej1EPs0E$#k6WAAk(& z&u=|mS&pt;+a3%CSFc}Ly|7b^PP5FcY%YCkXZifqmH+8~{1}9jI1f-9~?4$y2L{Lyqr^VS=&S6|tZagp2H1f=4x-St(5mJj+JMpBV)Z6 z5$h}|)kzWt07Z@S0xZacP*)`gB~k3bX#y5#8jFfZ3yO;fErYebau&pQ;v|>(u-jJ( z=i@w9xV*DE=tORqbvtPmo2)ln9SoE!V$DtI4i8V1mTtdSlyzNu$Dnl#qVl5l%!9!I zNY_mj>J0Ndw~dXIAyJu0DOUM>hCaw2qr9jiLs_bsanKt$Zxv$cY8Ez;nN8vdYf?NCj{TF1`EAw2-^o29!ofjMASlZ`?)!fDa)eu@|8BkaD5r9mRXk!q{$Bh#11R5fOs1OI~Wl zJQJa_Rv5quyVfS+tp#C1ETvLJah8xG?;VHCsz_-e05L|B7R%NyDOV;Cp}bI~!N7mxZkxIyfxHrT)oJAM}!;kiB?i=fQ)M%?mk@?q%8OQU2)e z?xh>&pFJHtc{bYLm(?}6{>pORsKcY#_}QcZjlnXjUuY{tNGN<3W&91>#M5ST)Ix9?3l&-x@@%8D9SN-$6iU=>CC64 zXP0KNts4Z0B4f)&k;*zrRa#O?Yn?>8tgviqMcD0T%-)Oh>?~81#LS)|9T`oCym-3)vyE4z*m(omgJ`8l#EU;2o!F+%kmmVhG$o^9 zb8AUEh~j9u+lfiaat?}}b)>v8QB_;-g_Lm(2n(XIP1885bV0@lyEu-WwRK*iAjX8A zBBebbh#HM&QDUMro6g6G21q&Pv&p#EO&KWG+G`&6v!-#4<)WxMoy-f6($2YFw^LWn zgIMcr)69#qw`Ocp3%9s4EN~JQ4xt6&tWrd+U-jiBwH2H%F7J!1$M!DS6158lKma*{ z)EtDNRg#DMF)x7rm$*n6n!%0`P@)9{)Mm$lEV3IGWx@yoV6E4>h1fxay-+*lnFs?q zC`1C_S(H*}h?!YHK@hkaK!uvTFew#sX)wUuKnR3Nfzl}GU4X}s0g$wbk`My{vTz(P zA}-pe+9L4M;y1i_)=1$`1m|t55^dRaF~)cZg(^U(6e$*A=SUH-W@kx|lwL@6LW2+T zqE<{HIa#=#F?hj5D;1QT7~p>dK@`S?y}Lb;C_wVwqd@SUDQpXT0@wp00oq~lf&eH{ z9C;VGj~2%X!c>J?8{ITD3%ty(P@2528*8K3)bl1zV#kdJVC}M$YB$kp9OF3h2bscn zX+C-UaDKRJhRdFGF>h8^)BSsUFCHH`Q7Yo))xqRh&*t^s!L!Y47cdq8DMr)P^UK*% zG#Cz-Hmle~XJ@lJ_x5UOF1>YCYdu_Ew@!_UYI%_EJ~&Ledn>Ei-ktp~{THuR$JLAb zyQ9{Leq!?+)Y1c)b5?4;25SfAa0| z(WuO=_v!cl#eYH_J2Lm~&t4oImJPTiJU-6%4j=#FfAzbc-+J_PZ+`B4eX=_lk7f@Z zzBo9Vrrr3`MYwit=kepctcwpHlWp!?dUb2CY=AmkxYLtnXJvy`d3-!x*qVXfw$m=&uw*eSu{pz(gcEvFr;pIWgV~xOR#lQpO09j+ni-4I#zaSm&6% zQaXyV#+L$GFcLIK?46$#1tR1{MT$s8DoV<-wCt<8(Pb^nS(=$dsfdYLk+4y|6lcLT z?&Y;wgj>lX5ra5p0wARbP)LQGoIr}Ub>)lUwt#~cclbrxKp25-o@F2hWC0q7v5Ub- z%r9^M3sztuad}A){2I3`|JgqQ2yaY6i+hVEF|)LjosfY^#QQeh2}l@-P?2uK?pp~X z0xDr~zzE8R%?AMh*?I9yB3{I?B~S50*eUnOYo|7vH=l#*MdKR zcgJAz3OXVR^a#|>IrNO+t#uZ|1Jb0uG|pRbPMXF#>m}5q)*3alcrP4S=mj>{K1~Ed zWbgn$S`oI+fDp9?Oajcpn!>kN34|l_WmpCwF8aCD*2wh{7Bm!n)D61+Y~ zy^?i%{iRNCqc=S`^ipnZUs}F!$(3h^kM~TP4tJKH{qpfI{{64k&TS};&8uT@7!jNbrDU)k>DV^&-Ouj!rZ(+uu5B*8_p^tf^#AT3f9JEi zho64@>C#HSuJWCo)p#|Y+q||-8ppj}_fNlnd*@v5wYM*y9n8y0wzlFV@;`X*C)QWn zmpYTPW_daK_P4*@>$!jXuYUq$b`OuHXS2r-rfj!SwBwxvz&K9Dd+ng8nk0#pV&}Xse3C}& z7*QCZscO=3Y*bz~Af8#8A{S;>W}3s%cpBHnyQ-SaGOd^rlyn%WEb2s*mPI`)X9c>t zVh=8gAU3_cIb$zz5_wkay@)qj8v#}baWo!}mDb*~V~*ljo5muh-K_9Vt4JqF))}6j z9B2^%R2mT}&nxez#>AwxfSB0W+D4I9TG`rK+r*JpMp^5UG-|az;)z03XHb#9EcF5+ z9jXegcaD8R6o}kbqWMzs)26vCrZc>}uLq#$<*pE>@nV%=ZsB8DczxS#U_mj!OBDJ) zBZk}Xo`r-IkrXH-#q5xjaOfU?>9WEgid9M>kwWEJPyk7hQbInUC|vqBo6(OEG_d1LO?{)!YU$0gL6(9 ztq@ccNmb8hlPHQQj){l}DN^h;hTdjE{33hgRE%ur4dVm-h9TWlaf{emQpslrWWFWXFkT^KVfWW@r+7TS7_;flyG)jBNs~gLm^&O)$8T<6^M_Jl? z^E=-t&k9@B*KzcYEz-_UMLk3s=`SHrAg%KI$*W z_Fi?kzdKwr^{m*qawX~a8h3pCjsE(T^Y4Cn_x9&|tLvRlKe}`9WK`tU!)K)ib#w@% z@btuqisLLfnU+x$XZ_@dKl$bU?y-)^9~F~n!3``8yG-iIv$Nf!+4jy*2u@COg_S{n z@6l*DjGjDg^6QYK>Uh@=mJliKu12=Orjp(LXJ=>CVAonA5;+e=&PVD#zf zh%l-vS5|H`E>I~YirIBK88fiJ*`zob&x#!S{eUe}K5I;0>m-R3g7u;`DWuvpo*k0* zjrAZ+>BuNR@{Kgo%yF&>%feabnAtWB2_T_!UhBwMoMCDZR0sf;??2ObT%z!rRCO-Q`)+m z`OW1%=@c*Zn(7E&nOY&lB0uD5w4syac0cSI$PBh zizfoD4FPIJbydbmObX&Ss;!L-c@$?GM^IK}T6ajPM!>L)^Fpb(F7wUh)qFm6Or5mv ztn(y9XtW}fEQyM|Na6?)ZPR$y0H9+=0o2sgm9U^D2`LxeBT}o7VQM4n!3&6vG!cfx zo-c%$ZPdUQ5}+`)e|?C5u@JyQvBSb5fiHdedvQ@CxY!op<*UiBz2dL&zimWWJNW>W zQlvD8ED^`do`~2nLlDUjp$etyfXG0?6e5E#7^6V3bEK4l)&nOZ5+V_TKaM?nWcJvi zAwg!UMMNn?K+m3V(Fzk{I|KeM=oe+0?G8lkl|SrJC_el;#7c+bM8y#)L&lgGO(zD+ zgy0&-&ZAnG@fK#|0J638Ig3Ii$gRY)_00i6Q2Yb4CsYu$o-Hpe41$85*|9k5LWhG7 z*>4Mk4geG>0zyUw1i=EJR4^NQ=h=G%0PlgoXo_^?#amw&v&quRT9O*?rcv696C|7q zm!>za&ORTkr%EQvtK41L@LU)5JY8NIEEC}L^zekRJ4nx|h(}MJEG_lZP7h;klGs+| z!6y$t|LN}8T$a|yo||NlcKY2(adu;Q>A~m6vjRT7^-voXEf2YwuWhW3jz@p?{m-&4 z^?v@VmCcpE_6OgrfPeAh_fJOCmHp$ai|ZG*%d=T`rIXLjyswi%eEG&88TNDxS*LHI zps3S7H}ucPc`?dXqgTJWqa{it{>wjlym=)ap4EGY{+&0MZ{M5djn6t+-MCmsN2goWUyTwt}?I-xoeC7P!^8;&} zy@&9BpyR#2cjpvphnTZres+yKct2l|3 zA)`xDCIqoq?`Bo44WTxT=WeGzt>-6`agila1aT(@0h6Tt;b5B2dRbIEJDQ!C*f?jC zI7(9EJw!^Qu=Bnto3xuYkyDY0)3mB>*)&d^^*o#B^Rmb~X;rzZ9LI*Vj%InTl=hBw zB&ZA$G-WMeS`!+rH7aGG0I6$-Dk2~-8il+dFNoK0=NEPW5ab@-2`e;;6d3|q5WR~O zhnE~}Q0u?^-_oiz7x#X7Ip{$+cw%YME&!sWRRmEu0_4|EjK%neg-mjBMuhVM!wr{3 zhyh4R+leazqDUc-C`2z_gKs^wiZHAT;#nk=YAF%`U_qq;Nmvvq1{AU0H-Lzs*)uPo zsh95!0ZEh62-v<}IN!p+!i705^iiHKC#NTfAf$l{qvXb2!dM6}KmA&UsN8HmC`^(!dsLlZP3Dzq%lS$6dz z@J~WYTwC|BSThI|Na(K6)`KCXBID~uYo$;?w>i|-H(WM?wl~Z?PnMSA{inNHA78%s z&g}Sc+Ltsf$}V>X_Vn?OdV`f@C`B<7^zqIc?(oH}AAUdSWy|Y>{OqW=I;=+a?k{d% zd-sw&d|b`hVU+LJUTN}n|LJ&Xt+TSRJgf85aq}<##V1dnl$`;RB9CG;pFTf3%AniN zwm0J!N7eg3fA(9y6YuRE57q}e+Z_$LKi#bw)7=?d*Ec@-$;X=)msP}RCw9_wmLjQ} zY->2)JB_nAPLp0Qr9{&84G~qI<)8oJ!SDW!^BWf~eEzE!AAETD+UskT!{_@&BR(-` zj9k0C+8cBZ4^J>or<0N>Z#!_Lk=NpPqsN@UXGIo<#l6AMKsr z-nw}H@_+oZAKd=*xSutLFW&pkH(&q$AAfxQ`r6LU_P2jK{ulrHxiB~&4Y`>vw+f@#%Q-dw>1%gS&gn8!MeYh(NEyzwwnT&AaEzsyKLd|HGf$-P^0q zT^}Ai&i>1Pw0_~{&b?2c9vn=aCDdsr!O_fKKfiwK?$cg}l3sk_a<`}~5xjEk+}^?D z__#zu$1tB4XXDZ~XstwXT-C1Ajgjc&WSk~RVsvHgo3HN_RUMUXn%A?LRVg-BblrIG zd{x#-qCt_-8kw8gS9K)-$N&oB$av>LBp6}J!VyugGtANq5bLUB&qYz9My<8hx|~cp zoviU*DP1>BSym>BQWGzvPvGSNn$tm|gb?>3Di zrLtk>i$cc+1dJwZ#49tO*I6ftBGWhvAY=?Evfj?ByqmzPAJrBdXDt@DDEWgTFNQD>cOV{P#G;X~(@ z$y7e8L~LVgV<PwxA&KwDeaJc2^I-1Qi_9e6ao_&;uAy!Wl<&uEhpRp zNAQK12wz_P0~~{D5g~`DRFHZG{5^D+D{a_039(`r`hgXaViDDLqDWBkupj~|quYar z1dy1WcrWZp>2RzNl5fKh1Q3u=De?%)Xh4xRvjJLWbio9-&n4&|h5f<|#Vm?Qgj*_{ zJpg$|trdGuK-4yKArZ27;cWwu(h7wMlnQh@2q|RXR%#>x$N}#?VYt~Z!j;-6Y4DyA zB!ri;EXKS-Xxkl>py36cB#ZCa;uqpQ2#^4=pmQ8EgVN46fMA{Tz;T=*fb&cs;%njY z+Qp&P)|ffj=ZRj<&wiRker;w%0Vz|4usiOpq)24~~U9q`qYsnG zWpC~6jqBQLFju!PJbrlebaHlbHeOrn-@S8UcH*wj={ zfA`o~`{eQ7J72&0=3ARbyU)ctQQpq$^A|QhdjIp+U*GxCw|?WN|MAbi`Wxx%Z(T{d zba4M!UX%xWv)(XGN7di|N8kOcKl>}7=+W)x)7+1zzA8PDvb8@RH6PwOgb4NzdBeCt zpQqEtd+hfypVvx5QL*=8t?~|{NP{@%eObA@sF9_q{MvG?fb+brnhH^JSNHoF00A*F z=*D?zKxyN=w^dUUwXzB(qlr;-EW65rAb~Jq6h$Cl ztvAY7W$is{hNg0)&^u`fLWKx=o)>jp*}CZs`kge(tJ=gSNiz}X4F~Kw=_Ic4%0zM6 z>-D+YCt|R+na}4+fWcdj8kLD-ByS=UqH_tMEUToGu|f-y*VW2!I6s>z;k1)zl_6@! z;G8qv4p)t>Y(iRTWt$3>CQTmL2jIXq-upC7EISfov}sJ^EjPC55BtS*YOPa*iqs(M zg9tEGW!ddyz?denF{+!TjdzuE-CjDIr*+eyGG~)B>U2DNX5VJkwDCUS!Uu?=v|@2U z$m|-SIMfNhOnLZq;lFSTmApim zgarvh-#Q`@YEppY*?VLTJ_fcn*bYb=Ld7hk$P0RJSwtyX*b;;plvap}*$XfdF?d8E zLNZYZd1GV-2E>4a5e4o*EHi{k0BBjbuS!4FPZB0tQGw3akTmRMe%!pV|LT&$=T|)n{jW2orPIb9PhF9QJigGy87boqx`g5y8hPPKmPv7 z-s!7feLGuOn;oCzGl(BfM-!P>xvjzd?<=3{>ca~M8 z&h{tSppQ#~>HVX8ROQoo5}Wbv9x|+KZAw!nK>V@9WeEoOcbN@O9NR-0Q?h+~&AX3&%PmaeLp>U0k zTP?W;0cD6Kl(UjBdz8p1-Rq))rf#hBO4tc9vl7ZWT_Wjrx>013BzBGMW|@vmT{cbK z2y)OMxW>hCq_xqcyPdAH&RZ{xp-e_86&kBH*4M`4X}l66qHCI2l}9#OUK&$RTr4#|z*4nZn$HbzTl?Zv(hMIZNy|@a5 zy9YU?Lw zCrOg3UKf~&FfyQxiM8_H<+B+&OWtb$-`FHc?7S+b)!Y}PpwsJ4r$ysE)P?trh@evF zBxJk@vxI~OL_ieJZPX`e_R=Z_DMSeXEKm<=r8;fJW4PACOB|@Z-G^!4YD74B?JPkESAK5w{tItFD-8|5 zQ0>WyKoG!sQAo}+5?brYv~)#4%7mE!Ul#aS>*}TnZ`U)UZoiH!&TwI7K@9+H&zBb; zHYP+B3b+KEsi*)T9_vF^oi*Hn-%Q~iheBnAA?;X#}`rP$i)ayR_{PC4n zw|kwHG~GQtb+_*y-@Mkne&fQ{TJQObGh)bddvr8Dzm%R^>5pf}ckg^YFWlwx>w|vx z#qR9W+q+qRe(U((e)H{X&-y3(&ri$G`de-~Qo$`t0_t z-S>WV+8dbVzI{HOy!!fPCDrWo^rK&u-OcR6Rh^i4FkD*OSbP7+@9#dI#Ji`LuD-Fg zy>s;9Xyx4I^XL2fhi+xqee!&A`_{=`$K8mCHypE%o6p2tT>!eXOo6Wo3 zq{{Qz%o~sdwNa3u?j|~2?kR9a1VwCF^}0Q&Y9Y-4Rqe8VQaLWm3WT#H0tI#Lm?h~X zIyQM#I_t-?SsX=auSZJ7NtAS==~)hnyb>X_&g#e{nr4$30g|FD>#%p;+F~|otc#)u zkt**H1Vz7vJ;P!SP$4gh?} zKY(DCYpH5!Kd#`Sy&zy*00PV{wAC6a1ciee4FHkb-9}6}cM-jJAvTqQUmb#yiU5R2 z5fM_TfBC{iC$PxzL_`4ZeNbu&v}L}6AXWeusi_FUpxRy+MpU2_kaBHo_DkWvEWQ`5 zvNTAW*!i$VDTUs-L~EtVvjg;qFI&tg21Po9j0QO3!Ecp3?E~%9@N1 zkWDdvcJS=^&W+deNzqwZO`^D(m%!3AH6zsCKls^)N8{<$n{T}LmmhuAD+lJ@-RY;l zJa$T5x!##ov*q*ar~A`izJD+4XSx^HM`v{-%S-(%Q?#k~jz=#J&Nf%G(|LaU>;zSI z`R!M-jmzg=r{DU=tB-#8!F;MN-#niVCX=J13xDUkM}PW*{m-9nym6k=4LEp)H2>P) z`}*PMFUALlb?LHfc=+TX>zZ_JC7t_uF`G_iXM2Z_Z@*YxUmNS{(UU!rsAJJC&D=SZTN56S){q8Rwi|Esnsh+4mdirx<*MJCt{qEC){U^_#KKaPR^dElk3#?VLx@^nl(X;Wii)e)F$>mpehN~Mt`@v7+ z7@j?X-~NNEXNTjY-#2N;>*Cs5uYd5fUp;;B0)>y~)nqcQEj-zs-{=g-IseQ5<*%MS znV+4(yI+eAPHV?-d{UM*yF#vCUKPs`!-Gdh&U2haJLiUnXT@k%&k8s?oo#Izt)REk zcak3;kC&EuRb8Lo>Zjd=v^qH*CuxktwQJ&3m1Q%xH7Zq?)Vj_s+$IZb*x>@Ns~uN| zGMQJ6^&Tj%Y-CU+sZ~(h$_gV2gMiUSsM8fh1)^rNS(;{LSNGH6l;@+0EUu~mzVXID9^&Z+nxX~6CD=*GJ)1|97OsH0ssIoNRHs;jKP->h6MDz z-2kwir;9JWn6L-|_a|+LAc~P$nyN9eQc8!MnwM%L0YKEGm9st|Io|t_P%gsOdxeC! zSTEX(vigGUMhcTw0Eo!52SieY?Gr)Q9s(e3lNT>=m$oJ`yzPLyAOf=}Y(E_PmU>l0 z3WR+Vo3H@%G{F!PYtVlmB#!b@b@*K^9}T<1QTU z4bQFq@PGOsO4D>po2XA}a&+$?jh8U2`#hp+`WfsU%{}PD!?AM*OUp?@_;3E?v&KuB zP%n;Igi>AA#kGr@#q6|~Q8q{}URgWayZ_t2bNOe#eDU#z4@)tCTw6aJ#+SE~qw(ao zf9uUxUb}d-zuWI-SFWAk*($fMZ9o0=*^j^fkv7rO{c^B%e6W{4d@?<^z54Mdhi9Wn z6h-4%GjDX9EFa!|I+?=8MzXZBq*XkxYq5HLJ)4)jw>M_S2Tx}2zOhsI=E>9Js__6M z9%2GuIz5_iZ!LRot!?61Rb`oVA|R=4+0>woLPh;pStsL(9rUr6_GkIDlMy;!7orKZ zcOFq0AzRmoNT8L*C{CPjtZM`yNfWI#l5VQHuInVr6k=^FplC846H>3+Z5nHxjg9u~ zy!VI*kv=*)GLbeEi#Up@vbBgzr}H?DKq;+t7AM{T#Kg`!<^U5Hd0D3x5v}#YJm?L* zc(W3DVCVd7Hi}GKmsR7rn`NWPc);D2VK0f}Xk|48$&~KKQ8!aY0ie&)Zkk56w#9s2 zx+XR{juI2a80mSDg9_NRuAM{Axw8N$&MFqiY(4APfRM0{Bh&3B^Sr7X8>LAUC(~jo zfCPMYJY(-P3Xp1Q$0D6HQEO|i_BztmiFIt9iyg$Vao#n~)wPS07_^2j@*M*#L1H1~ z_IP$})Fgn25>zZGq@bPWEy~j>f`V-nQC>Q~zKHRJHWZ=7?!&zmeZZQ&SOwbKaG7H*ug z#p9mQy?v|_bi~$`<0F6mre5CYZmf6jet!4PgM+orrB~kB z(K@M$X8*WkrSCnv|IOe3a{cJw)(3a9toz>6Psa@_EkFCK$Ez#wo8Nh*J{Vh3>2h-L z|&eg97$#SxsH!eHRfrG5NlbZKiR z8T2+)<19b={Ag{pZ@HZYrI(k7AbdKVD+8n1 z?BNr8ey0QMJkZYO;Q8KYa#pVIERQBdRa8w+zj5Qj(OHh-y?29dWIJ3HzN#fjBV9L6 zld(<1%!DMuN!kSh{BTRg;ojy%ck+pDy=k7 zS=L#%2TrW>1}HBo@yy;MV3Ai*Y%--It+P(YSxd^4^P*`Qr6`VdU6zP!s~Ui-y7bIZ zY!Gyu#Z6WAd)|I@T`u(ie0q_FgMI5j&C`g#0sv8Glw6WG2BKDl;g=dK3C`nUm84(Q;GN9JA zbz-n6%7dm+8rN5sjWOay*n4NKb7fQavBwCdbxq^vd1)C`2Vc&YFmgjTm9&U&Km}S0 zRFE6uuTAt8=~z@kwz`_djb9d>FJFW?KuCvfR|ITP5CCYuYFGex@nv4>CYj}>E?>ki zwhKWK=RAsNWh9stv<`a%ifFCEt1MKh+@2Wi^!130psa7$Glr|I7j1NCPM0uqNrkEt z)|%T+ns99oJgzuv0TBsVT&Ne8FP_IsCVXLaZk3e`f&v~x7jG~sAc!ytihvU)QkL0! z4XYFnA9vz-``MtLFz6;DsLpTFN~vAOrwJA>Dyrc0CFb21^AXOXxDt z0@{1wHlKBY=pnTznIdKu_M}aaB54yLc<-H#4H1GO;7Vj%O-ISfX0mb3&fl+^)3g&^ z{pzbyd9G)+nap=zP)xmkmrOi4oXn0VAbj$)FoXEwjmu5RCojh9J9hWz#mP?}h|ty7 zubk~yfBD0w2+`Q;vaZ+H#VG9cqD!xA{`Nn(o@D=FX|?y-8ynA`9Eyzp%YXDGo%F|h zXMg@DpH2%IET?r*ot>6{`On`gnxZ>g`tI-j_H;Hf5k9y-{s(_}gV=6gSn|ze@8RPk#1SPr5_+n`>*r@o*i!_1ACO z!u@Wi{}+Gy$)g7@86psT?Ms(0U%HT6`~2`^I85iWs-H%f=uQV6)|#p>ed*k#i&y@| zpZr8o=k~*0@$&e=kphmEV0^s0w>Q3Yb!BU_^SA!?Z+!T{=jYB{d+_jNe0q58((=4G zSzT3UqsjdAZ2Rga7<1?Id*e}c|MB?glL=#b=k}!E>o9VZ_11O$`1uSmvh33=%bRhU z!=0zc)1$LNFN=FowlcI@H7`!jCe8j~o@AzI=F?fdG*DF~)42o3H!fc|p4QI}PZ0sV ztSk-K+I%ujJ0^*Tjdh2olh{z!i#usL%j+mJ&hg>#nO0_=7u`-a%}W*OUN>!QDFE7N z)l?3gC4)xmG))I08L(# zHaf2wL{vmY(PBhP>viXo zX}{C$tt{<7->a*-F^!?9%Im2nXB$`7N|6`cSm#9;aGKZis`eh!tSinF7-t=OrbW(< zZ%h9OQ4=Vo46{?%3R7A!;9{P)(*Ku(|JU(*fN(aqI{pwN89ZSC66Q$A;&;IiwXo7I zena@H)c^^!Z3KZ#=CDh!Fe8h1jdwN_mq19_3>{%ghk(%lw!(PM@mT=avxioRv)JeYQ}V@s5W*V^2cbg8jyN>o z00$XwdrArbh=j^E5)t}!l4ud;hl&UlqOwh6$b<-%C{2Z}FbmY3o@7~L%6$QZ=AP*< z;hvEwNtXKM-ifPbK$7&AR@XO_SI4_28OFN+**$KflxM0CV~H&L>%~ zfAjTo+|*S*K07+=rDpr$((ao%#j}HZ-~PSt)}zz$bl&T& zFKrHNHDXtt?!t65-Pl>(ePYvzTif0&ayve#AKe?H)@6S1o$r4AEHVYZ=ambZrr?7IgUHY^4eUb*{0;ro<4o$jq}eRo=i`t;2ceb*z zw7SwoSG@ao{-&*p_n!Uq=ui$1pXH?yNbcR;JDvHh)xl&sofr9)^Xt=jG3<38?Vbp@ z%*3<8-MF@1mVP>)^@rJ&%UkoZzIFHM$??3bWog(wI&yX8O;7D?4es2YoWGFf)5cmq zDMs_6QYf7y$p+oO_qTrMr|*4wdNSTQzdE1fr>B$o+-n2|+RuM7f)&qedGtGvKa3N9(1bm2}XUC$*gD6sgt`rdvQA&|k z!Ytk+ARz$&2?B+=+Gg~#NGSY5K<`C>LA3)hsEGm>8%PWZK6fAq5fMQVB-k?M41oa- z=O-aCsNh5iM?$O758-kQ2oz%LJhLbgY2$a;2mA;n^rlEqJ~>hd(q{jJKhJ=nmw2J$ zYX8n6Y;8!2=EehnI3Yp+C4`9Nyo@@zQp0@88wKN%`o}u@=!W{}4~^kh-#M?A-pc;hA9Z`v zz2`q&*@7rdCWn)2Z(e?O_feHg+Rgf_t6%y~G+0U}qY08ubV5Wi;;bsScRDK@=RbJ= z*}wYdf306K zn!NX`gMJpp+J5`(H-72Q-gx`c#mmbd{_x|i0a(y)zr9}7_VD@H^rUg)+3aW%t*5Vk z>&qwikLL%c%+TNJel@xN|M{a|&i{D&+8bA__xq!>%Be>W_jmWFyH93Er_f1Z&`VFo zg%@N?QIslOP3FZ?Uq?hIrG5Bd>`VLYzjg(%_vuH^cMm7FZWK{n`Z$W;yu9(V+0(3_ zZfy5|>&ut_C46*Yd;No7?v|BBg``!Q#NAYRUq5~Pw5jstrT()g2VeQxYu54M@!_Fm_fAlzub%InD)uZQ=wI$M#o=loyKTeIZtPkh&G(+v3ph%J$FVgE~pw(nF?)G~@ zOGt`@g;0Y6woPN}PMQ@}S(Z5x#*xvgH_wZzs*KSJ6#|uI-RX2<6O%&p(8)3p>U26v z(JY_Oig|x9AVx&3G@?S%5TlOb$opU-c1jsAcKz#H3Iu>#TFWEX#hs2RQ;l z)ijP#DbwrsZDZ@ED(l)1MR8=5PWv$lP4m)Zk@5JZckF6m2f)HU>L1)7Vj8I2>>B7d$3-S0s-$WB9S7;P5`tb zBJ!Tu`_?t4RQriYfdG+`FFzP(#3fpCEmM`o3uDe+blWu3<^1z#1wh)jNG29t#UTBT|%oGMMR{~=PM{$g%}PA z9I_W6;MgDlFW|8jMhZwBppqa|g}{6hC<3K5De+iy11$M;HXj{F3986;JF}IQb@rpq zYSii4e0DIK&Nr`L7R$}(Od8+m4Q*NEhqJT2svH&ZpdWSm@BQW8*MIBs&V`NBGyDAM zNNFsqv;F;<(x#~Cy+6CXer`F>X>HlR^6J)nmfPlJIOuO(+&I}icyRyua49oMdUa># z=2yD=FDBl}$&nEno=$Tgn_vCn_IfwoI=4JJIXM3G<8;}tZlTntMN>dpZJt{x=Z8&o zYHRuMNAGQHuXj6LlXRXwe6qhgXB(d!+)BD>*6p0l=h-01>*C}Hj!*K{<=9(z@L=zo zzxCQDA3pfW&+ld3>@WWK{V3@OM2`K7XVua&m{?6`<4zXcf4G11%?lSVWlx?JgL(6( zKm5#>r+;waHBjS=SIIT2lZk)zt5=d?di?zGqYpnnJ>K7Y{Q0`?$B{{g{qd9Ov%8}k zZ{yzX?9N?yuIDSO)4f0W`DEUdv$8+H`Lv!CFqy+F2M1*qo8{FdrSM=h&N_)j=#d7* zvlrF&n%cUtnosA~udRJ*q9``ANj1->wUf2A?7h#Py!G}uNTQRy(_8oUO-#@B4!Z*_ z3Ou8rLa}j{vXxIi``meW>&rJj_{E2B+`RGh;c+L6>eAo3yJrx7=dXQ@t$X_9nNgF* zvPo!TqkrT2*7$7ZohqtAgmrL~#Bn63l+m7fZFM!x(z>=)S%R>NG!G* zt!>gcs_LfO=~(By^U4slR2dRd<7_>v!HE~Al~Rh5I1Muw0kkp9T$W|)fB~_MHIZ@- z5Yc;1lGqE!ozyix>twdJNs`8~86BMsmxhrssjc28R^p}^bW-1_vDob^i zbfDqVRu(X1y!#Mqsk8t(X05bEWbXtVfWlU!$gl|I2}5O}1eI|4f5{jl2oM1WAz`Z@ z4-!vt>=48|@s7pYYVN%i@gRPI5QUS35EYS%O(Y-yq?8s^2pF#VfrB8e1qCP)0q>jx zwp9*@;F+`vbg9ys2-yn<1FO*?ww|z+&4hhI1lY3xkTxQsv=+gjxO9$Rehv``2N@xf zw57v@xG+AD0Ep*c9{^#55Uk`v!V(CD0?vT=2%s>u2Q0K)?#p6#5l}6sKy4KN7lGVB z)MlGX*g1TgBOP*^J+pAgo<`x&lfWucBB&P(G%8|dFN{Ql5E6(yiDk#_%d~JD3%AHm z5XH8o1pq|M+(~1l6U3;VRZ6>FAC{IQrlKmF{@Rv`#*-HxbcdUYl;?`;LYssXq1Q#h zIx=zAtt-rD`75ul?LVGBeR?uj?Gr_w{oOm0X%4SlA3VB$Ivo{X{>^JY`N{2bmwK0P z4jpYS6#)l}jIevis3b zcK_xdzC-=q!ykXB#l^$!!`pY6o2_#z*$R)2PVfBkwzaH{B1Y$BG@4h9olM!*`uL=I z^x*y*?_3_PEXQg8gJ0dNbGZ4&+TrubE3aO*P=EaM#}6N#Bx$<6xq5VX_TuUIH-GEK zFF(3Bom9)4YI9?Fx_|n~t;bicT*w~6qkAtj!ObfM0ZyJBfrZcB|IkG4)i*EodsyZ5 z_EwS$qnW(%?#AWU&RuzB?eWJS-oNuKO17TepZ@KC^84vP|KI-4@10-83m4a}UA=JY zv-|tIW3AGrkx9kh`P${5{&LreDQZkJ_|7*kkeKiP^b=@cb-DA=N6$Cbd*|0+Z8QFB z-+SewAKbe4aB{F$o{h@2<@lYqFHTR!_n(fD(A()(-?}Zj4eWL_{J`=LL(0 zMAen`ey%k+_O7->DkemQm8}7=%yR+E^Kxyu&6oGyMcP>BMVx1Ay^V~{(zLFdvZ}b2 zG)bN3pj7nU2}>GBNgNeqt##J%U}=bi=!i&(AbXCqwvLk|0|AUpEs{57byipycz(2! zBdD!-+l#GpFt_0wjV%82Q|Tun?{oEDM4-G%Wx#g!t`{iV>)nAbp|`5Dn;=y_bN`DZ+4ncOfp0eXHwl@w>%I z#xKD<47&c%4GbW(@Cmd44}Aeo5h>4JSdgLp>0j8{S|RA-dmQvmgc?Z$C`Cr=NT9K< z_8LT?)pq;h+F-y$q|=>z|Hs|I>R`BxQOx7h`SFRO6j7ZV&!)%4&W+9CW;ZWfx7Urc z&cFMge|0vNaXI<@@7%Aw>PF_4Id}WW1 z+}W%cAKCrGX)kqY`s~HC$z;xRk0;}%g5A}%SX?z&?)A31uYKc{hd+CNur$yr0~Hyq zp1=3r?4%~d<>g^)(zm|;7G>r~|K>wa&|g`a)wN|uCf(mH#&^%=d4BcPOK-oKZe2O& za=iEXy-z-R+FOZ_k8)(V_we5K#t@U}_T%00s3~hccYY%_{)5NR>r};d@xAZ8bN|kh zqG&GPSliiL|I>f>i!0md&c&5$?{4ose)0V9@W+1+&kz3e%B#y)Zmg~CtZ%<_V}5eL zO1<%&FEwR+>1KcH+J*o5zyH^hF%0|S(Qj>ZfA&ieNb*L!$4;mBXFq&=<Q}c8 zXHNaIfBAtnlo+zsAMcKg;A}Q~?dHbc`NzNWzy8yoeE9xo>xv&P8FBV2U%k>#)Uk3O zeKuZQSEt8iuhaX*r;qME9R2>^x^Q+pUtNuk$BIwc+UClI?d7Gd+YcXm3t2}lp4%d= z<4*dOufP76|LLQ~R^R#7m#<&G_QOB_NmZAxys~xg_R|LZ=J_>i`Od8q1bOSt3m@M) ztgX{Zp=~UaUs)N(aV(xAZBEY4in0JA>wPuPvu@_t_tLD}?T#lC4_+&Zj47-OmoH|o zG&*PPtX4{UC(NAan3?l>tuT)6+9WzT0O->^#?1 zZEcmtF-B@u%$1OdVibp79Ds9dt#d3$N)%YN!rm#3aH6Qci5TNRsgJH0{|=@ z1jL3RBJ2PVfSk2DbeTG)JNHHsrR2>FXJsbKW~^m3R?pQ>GD^K@c1> zh*0zw@oc;FDo(LP?Hh2=LS)3w$z5muyR5iz8+_6%f9 zQ_tg$8fI$`AAfc_*-ORvOUdfy5~SNnXGvjGR;4)|7pHUa3W(~w>~zz;N8`!K{NBee zy2F8EHm2FSu%t~@%-q9AXM;g@@zUyW*njfq=cr@a|VHkB_EzK0dB0 zefR#@HGa6dH7(}TS}wnO?e<4crboqOUJV9xeS7WBy@UPT!cU7Y{r));UygI?ChOO> zXJh~3>G-v`HqVad)3f>dxz$VOHptdGs9cUC^$ zou1$93_88zljE&RtLLws|Fi%2!{Z}<6hN#*|S5FP}X-1I(tA;+=2Ye15RM zwA?wI)~BP{D>v3ZyM2TfRL$9X#5Jd}zIeTV`P}lsLG@4nyMI4i?Z0xpQ!2LJZm+BV z^1uCdel+>)!#g*gAI&SZ`y&6R|LkX{r+Gewzxel`He<#>AIXnqXC^uyXVLXbYk%pRe>*M3iT4 z>AfJt1{Ma(jzDQtB9fS>sH-GN1VUOP0|2x4P8317;f_aB(%O5jD(k&uS;U^>IO_Gf zb=BmxwN353(@LwzG>z+ZVgTx;D|ue#^U`|B%O>q4ny{`KP1+DN)(`sqn&o&r=@~-s zS=Pzth0;}Jt!4HMQJVG$;>a{**-g667*edYHacqRrm1Z=%baUMGx)~F+GLzt>zuDx zKp9gMv&K73$_m@28Fsr0R9!aKvx$|8bXB{&n0vw~Hq4?UlO!2?C~V`(xlZC#lk+cC z;R-<-YuVSXDs0sN5)p%^01bnPx3%~RC;}m7AIRI#9w2}aLUTese*m_L6saJG*Q7XL zM-(arLOE?G+)noK|uwRpMkOWzua0Jb#)tvim$GJAH!9?3}Xf*~TR;FA&oU=+djNi0Ze z?>&MdAZ}+fAQ3nY4=Eryo<+6mgm0Y!t^Ymjze?2Tjbv2+m7z6<80<=vxZ`0t(4Xj0o9#R7wPtQl426 zX|1!gH(b@{1}m>zS^ZCc_0R9!eWJLxx|WbTE9R)9RRa^()OBei6Ad=jPoM1dmX@x* zmOXxSd~`6K=k)a9c)T13{QZCO z-9P!aADrLnADq=q;h#R6T{yq`=+o!@K^ALtwm3R${_+Q(z4h+-%a=F%UDfN`58vN? z_nSL6-rm~3b@%c&ZW8FUyK?83x9b_iX}q$oicz(+v3mGoa{PQQKJwN*c<*WB)!~bg zPNTK;j?&F!T0MW-qyx}#^z-*`{a62+-|cjwd$%4q%l$q+e!Q==QAxUzb@z{FLU1-I zFKn*eyK{WFKmXEOt5>hDlhl*rla&?o_{nZJfleyT_~hLACEv`o!PWj6Xd0{>uMAHw z-ndxXx-9F_xTp$$^Ec0BD;L~!GCCS-s?#{$fAsL$mtN@%mVfjY4}bFKhdY;&^{w8e zoz<1C)q~mO^uYuaEcbgCcUCr*lUtu3T|T#Y?p*IjKfNO;-~PtcpZ)aNm)Mt`WqL{U0QkXXScup%{Tt!-+u7?;q2Ti zgWve-&b`M+?|M??9DwsmD-!g!>393C zs^YBERF!Mou(u4fNKJ7*giv|s#WQ>Hju9kDVnnJNPoR{El305$byX;B$Y|TRD2lQy z);gNca$(U?)ak1EERRfF*Hs7#^(>9^+87Z?ldRK?^P<#RE2NxjRAkZ^LE`xTOVxh| zTb^h4Vc0q68-D4=ms7vaJ<~HiId0y?B63Lp0!@pw$|7B)N;1oIS!Ky8yJWi@s#IlB zvP7#Wg0jd0L5hG0kRSje?JhRY&Q9*>oV#C+H~-={9RKmVuV>j))z#DQz1#P`@IB}I ziHaD2Spa%hf=4A-kU>Uot@FTvj-+O3%e->zQ6QivWC2{da*~diY!n7jsHBiZUJ3%G zq$x}9tal!fo6Qy=kq!_kpQJhnykinzoJ-T3*he-{0$+9s9Lmi?NYTA-g2RcDr zB_(0n9wnx@O}#e~dJpylR7OT%oCZ~stS=L_5VEEnp8_n?gSbB1rpBr1QFvmk0)kp0 z>YM{YAR#@}1?MEBMnvx%pNNu#R#Wj0pY+vuCQ0>ku*`KkBGdOp2rF?lBU|L^n>guf8f(Js>j(6w1Ct&A2 zu&={eJu^EmgpeczvOwf|4%S+qliLI`TF>N9@F(X4(v)zR?n+oc8xJ6-3`Bj?BmSuo z1q>c)X9Y8O@MLUN`_Vjm43yBYFh3tg#bC0tv-RWgX#3hbk7pW-lR--cT;^#!w@BQ; z(P0oplEh&D$b%RSOBsrV`EINlH?JR*B{&u@z5U$b5r5^2&;0Y>`hIRP@1_6sfB$E( zkREVvH$8pkl!{xsk0;k}4DteW2+c6w-cF@%eDYy`Zb@lXxE#7IxbpgnMC^9On{z8K zyf!ntz8EDzwo^uJD*N`qt<9PFZq+Ea?hT@L8$x#>VT12|@4?37Nm0RT zZ=H|U)tPhQpZ?jkP>Upp&#bS#_wJn!esI$j`JH>mJG<`9w^vWCE~>~$9_&7O zxV`ycFfJgi_|~t7#!QxFEHNIzTW99#X<1x-}&?V4|cC!8(DxCUR?X7pM9=BnuxNx z^~sGZ&(GiT9ILQ$P&pnBvsMxz^UKeizkmM$fa}b6ogc2P=}VW_#>4W??ak4TukP$k z&Yqtgdq&Go9_)-q#b8u<@UOje@%HrxMP-&266qO~k^pF`GKvxsSeAuGZH%?nksxCk z08xvhejk7lMHIw&kr#PURHc&6nMz4XNF>OCv(6!KS(E@2Xf356!YE9f^VV3c6iFZ? z?|oSsK!Jqbu@04~oK!L^3vk|fhXxr?0C?{Mtv$Q4D1{)Yq%$s50Radxb^JJItHKzz z1kNiNfTvcJ1gaf1H9!;uS_swwTL$l$0GxGIl}-k}tW1?fL9-EQV{BQKWmS65*83vO z5i!(J6v(VBEPDbggbYFjz$D09CxlQ!5I|)rVxhD^6as;envFyU;b4?zlfok+@+i#$ zfijQ+3G&=|18nQqdS-OiNWvfp3$55G&xK_tiM{v8V4WA_30Vm(WXMQBbaFCJQ=I{@ zJ}KPPIYhN#3$ez#Ya0|00g-UNt{aq|Jpv#x0~6B8pnw|0$IqO!c;Ypfj)VHmju{bk zpb4EMRU2tRYG-WyrHAR*sUHx%$68tjo=Ff9@wCf@;3RwqxUQ9Rbu=DM`?~yesxcFt z#61&w^oSmi9oG?RScCkv@W-F5<^0+8g^_CSbKN{_3E6m8e-{~MB0)fwMQM;3NFsVb z5_m$jv#;}|7lr=NJje zIi_jjSgnOctV@y^fq>B~B~^gqeA5nh2(z`7PQL{qR(aZ2{Yp@kZr|1ImJmv66}IAX zG}*hi>2$n$>2&pEU_A^+MmM0<2_N3y825^Q@w-1rI#J;D-8*}apH%Y;aPHy?qyE+( zf0T5hU;bZy?dk_Nu3g)C>DAMFo0HqO`>UtAr_bTxu{dd?>lbH~PM$p6I~r6Ew;_sQ zZl?2OH`_kmpIcZmE!^JS`_}jGoL=pqjK|~g;?jI&M}-w<&voAYbmzf#rj?!)rqj?r zdiQaWl{1~--otEt1)^BSLF4?-|Lpedn-A{pp1E|o%$$R0XRE)rc~=C%%_Y3^_cjiC)zV7% z$N%#8t5KE?(;se(E6b!PZnlSqqd)onkMG^y-Pu0AWBXA+X=UabGhhDl7r*fNul>Eh z_g{VMw|_q~d)k9^I-g`z`Iq0i@?>l8^FOot=+30y8~>w!`0X&l=U+Q(3VZ(9v%Mp8 z5YxHSOZ&$?PpFdM)XL1md;85;?d%-Sb{lzKo;kC;e=x8`_ILi%zxKQT zcSexCW0gwBM_ocX(g2;XTaoneb|(F zVVnzsz=AD3u`RNquw$om;JpiVi0ns$iO_O!zLQQSuCh)N0xKc&sw7iMrIRRRNSt** z(y=enNr9x4t_{Mf5qa;XIuZaP0mvvs7{%ywUt|E_oK;f#(pblUEQA(11`vqiL~lOj zK5L+vr%dU|)?4Q-OiyhDU&rQ87vAZOpdKE+7RA)RPUy+c+_cj{fG3v(pqeuE03Pa$ z2b%iDymfWE7CMiJ&N`rJDLHta#vmYg@Bmb6fQZn0H(lZVG#3P^o-X8^Ma@K0kbPPL z3{&|~oyiPr5$b?XCO*;MPwmp+ruhkVbSZlVYeC4`XzLs}5@ektBvVzPCTo2K>}&;W zy_3#4L=s5s9J`u`Fr_ihPp=eEhr1zoU?7-UAL`&aU$g5(%qP(g?CWUIDeVDV<4eGR z9*F_G^Ok_>agGAn)iJRid|iDr%}sH}IFlbAJl=i$$Io7xYw{rLZ~puL@msU!*0e0Z zR=%7Jw{}ISy%ha}!=~@FW@qLuot7%-o>|)bXm@9O@aEeq?e5IM?xf89o!gsPMYBtG z|1h2JMsu@fd8yfm;&3X`D{_@ZK>p%R{Cws8ACVHiCK?0FBjvcjxY<%?b#*cse__^mVy!OV0zyDwT z;YS~gKtX>DMZY?Adi4iCda}P)LZCvW6$aOD?d6vL>;Lla{9pdB|M=RcTR->9FWvv} zc7Ig<`ak*o|M7qP-T&{I$ROb(thV5Jpld1at_!;e>>Y z;-r7n*P+&ms%+dy0%nKApmk;ZVB8lZjIoGFAOy*DoDm`bN3EzCH<(!~&ETxF-g9ML zI?0q)fDiC@Nz~2r0y5k~Uin z$to|4FjRq}ZnqN)t(NB+0WP+aL{m4GjZinkP_AQ%Ls4BUlN{-jjqppEQ$3 ztKCGl)|xoRm(}3JrdcfBEmohMro5-5>s>=h!B8; zB%uKjKzODTsW?te!8Jhrl%_q2gsx}96y1Rn;&r;lBLGd6xF?NZQ`yY)SbG{J{q+4m zqXFtHY7!`j3^O}#JXm80356#RQ___XYbV;tBe{A=AfWTiw%&+I3TEe~m>Wuoj7TKC zKOt|y3qi!5kr5o%4e#DEI}iX^0*9Yv5{~|C=m72~^)3u$m0MqDbCCHytl$``@@N{Yb z5RzkNMDN)+tAw;3EJIov;~XQ^&)GTyjNUr{CrNSbmPQZ6h%nO(W|n5x)?Qjz0?47r zo(wmhtX^EPY3{fhZ#@zkbfjgg*qvd$!8Tyo7s?doy!0*Kcp#ez-3Mbi0jwT->~K97bYx zCUp0A%Zi30vv**cZFSTyg%nce2gg|}*4M6WL?NC!(>OlNZrt8o8de{?e|>+uH#e7{ zXzMUL7`VgZQEPU2emT0;o1baqV}E=w+CLayyS`0? zyZd%HeDYv-lugh$SnR<6{y+Zr5aQqY7ysIn)hn;B+0_Thf7*^p`4|LB`P9H#krlI|Ujf8m$k zxVW&lxVmuj)>b#4X|?CR_k;VRky~DCoIbZ?J$`z1J1^YQ++3$w>>M61EhT1R!Z^?v z&2?u-lhMK6Kr4Lt)n_hVJbn4uXP4*SRZ_MVTKPe_yE6v%Nv!|+-}>4=|A+sRWb4~4 z&0ge1m8L~idh|At6gPt~2%IS*7Bg+tO`_4n_0n>Zp{cA#E~~1l%0|-g&S|O3vTQaR zfer);t+fz<_9XkiEe&|B}EtrISpf!UKF zM7Dz1*F7qhYI0o&l8S_I-m~Lrl>OA=iZJd&z)6sG=3;3zeYHF%tL?l#7*YnIz`;D;94NwtDKnJzAL68SVf*=e@C`m#}iAX{SB?UMH5@nGwa}b0iBr}r$Yif-XdT;7t zjrxmG$3oP1D@VR=!f{;dLTmJrtoOj=nQGR;d0+?N&{dwq)P7uFl1#;<{M2Uvo`|)! zfgWjj&|HbbgiFM*qdPP6s9g+PCMr0;L-bU6soqu%5t_NIV&&YZvCTVR6*! z8H-!HB4u*x4;@~)&?UG|U6$YVHAAGo3ruT~q_jjQx{QlAIe5W&+ z*rS7^(Qw#l%2pKKe~@+CGHOw)5yfFV81D-Xb`CQg%F)=3`mQQUp$NUsD!BIXP8f(z z7tWq-zyHpAGjsFLUN|)vru)aaLOwdmbk}|H^UpEcAAkQRtwdCgu3zoF{JG~k7n0ZB zI(zrSyNe6K)!+Yd-s!fa3bK4+ zjR}LGtV$G+XX9od8XY~~(jJ}Xq8aGQdT%i=jTCZ_m8}GvV+<%(5eSr020F0LN+GJs z)Fz^;G(cEY#(Jxi)H?9a8t0s2@2t|2jdjiwfvF1bAkA~71?ILaia65Vv650q3>yt+ zO=-+=zaNHCBZ>2@I6OX{pX;cA_78XBmBk_($yVr1WxQeU$CF7$!ZLfuSy2h80v%Sy zN}&W`GmeuaG}e?k5TXMY1(A)RKbc4+jdiB1TCJ7EMag$paIh^i05jLI6??*24)A{B&op7tJY3!#_0`rhi7R8G~s`!PHYQMTqzWEIRp^ z=?&n-Vmy`ePX|T)zT`bv??_6CETr<*d2hjcFFF zexiuICnR6XeUPLi=hzve6eEI=5>a>nYf5Gmr0Tiix%SFP2J*~AWPP361|Cjs2?&5( zR+SK<@*e7tI9p%h&;xpJnUI|$WJFIAOl7>cNSc9>znD#VBlwxGzxe(KPabRyo@^X>Gg)3r&Yp>1d;RRC&tDqvJnnBkBt6@hpD}qjI2zDZrtF*@^wE7|7m_M_t`Tf3tYoAd2^8^fZ)NmiXZy>e=8_U`RR3yZBm z?sku>xf!yq{Ifs*v5-*a@N)<62~$p6LX-nek(%BfSU?I0H4{*CYa%-5csTWj2W_eVFb zJ$~V}7Z=uN2^y`|{BTfFyY<$WF9{iZ_x<~MDc^YeT)W%)vp>3exI1{^(h|t{&B-Ws0KAi>w)iT8hU{w!zH?M2$e9DNSK2 z%it=d1rjw{Q6Lq9N-Jk7&qz8@D3l-!qR4noi!v+9tfYx zf`Qp_{SHbBW^mpC`l*7z7-x;2nzKNN+A~Nf0HqK@)e==dja@(htQluO;Ve&eI8S{F zQ$ueJkAVla9+-r6&LjBRh=x$-Kzn9^qW)M{KLbJ{!A#6RD4^~ttM5A~lp@dMfE-}u z*wnIIL`D(l}DbD5n-ZW<;zV(F_CvfXP~?1Ub*%gSCD*9E?T-?>GpP zFbWaDmKK4PQqHp}9fBu2T3zULXNq>xQFMH~bvvH#beER^+`*G=t)wZTcQDZd+dUN{ zHe88Bn$w~cCr5k7DE0A!eGK&a`r`3XAEh{VWcaKLd8$u_Euix6gcwt#3$=%zJXJ+slzx+ZR!I_J5AglnNmxZ^!8jYE& zbYf-Sh%g-Vs*SrldBF=yv+2g};m+{Rt%JN$!?fBv97ai)r?x+aTaR{1TOJ)vP*Jnd ziWCqF^c)82{KbVIe*bo!LRmU*ZGY$xZC*Jb@$u$ynpMF5;Aq&0<)rkbt*mnqk@M3= z(}l&Pa<0tFgWX}AC~sXeiI!L9#+j|03j!RBs*Y$53uhdcdDTmcC=eCPy`xF2StwB$ zHpZzC_I3}$fcN)%yGOl)-O0w*9T_XIcSD|L@x6D+_n8ZS5bFTL*pq);mI}XI^;m$KT(U;ghT&g5dCQu(>f@SZ)kb zy`{k|JUl zhDcTlvX-SFDMeWnXHAV$d z6gQD@oaF+&2Tww2shnl4R1jcVPORswvW3Z`ASv=PNFox#8pFO;^>K}ld0Sg;YxEya zLefO7)Hw+N!ur}U)pnj>Gba@8$PWLF0ck@pUq07fSC#(N^~xvFX{zH-)|3`O+7z_sUV8ryeLs7&6n zr@BF`?mPe_L?k2*NY+B9Iuda-8V&l#WmXdE&P*qW!XS!lSs7=oG0Zld3|5xZ`dT=* z)_?rfiV;S_okSU13=qoGtBLcz%(MB0_R(>g+w!#+79Tw6 zwc3ra)n(-O3%PB&?4TI}e& zG+R4^D3VH$^WFCDgP_@> zc9JM|KQD67r**; zI|=pC5YL@ITNZ~q_ja#5*F1Z^^X|KMzwk?!&YlSpEwX|?`0>3v_idx$gCxB2@`c00 zQM0A@_a^i6F{!{*&xP>zys7gLY?zd8+L0GfQPydz?IHc z&N*wWF$PHpLiROj01Aba1VFyNdLTHDh$lBDH_e>`=BLHW1Sk~M0#L5iP@>jRvB0{R zfQd+H^~XiIm|9DXX9eF z(?n7~`4?9=_RGdht9R@VdpmJ6T3nq?I;&6Ky`CPAHy`&GSLSYi`T&d=W@htnynJeQ zW*P54$O>!whi2yHXmi^f9FdMiR=_0Bq}E{)8ta(h@BD{f{eSm>jC#AiPN3!OnPhKVO&)S}{I)v~}ghg$vKGPKJB;Z#?|c7njeSUR_#hEG{*_`<=TgG(Y+A z#~WKm-}sdmFTXfjKws)KYQ${iH~;A4?G3ju+v$&7CHSQ`U$4^M)sOEM8GQX4=Rg0} z<$wGS-Z?%>dn0%E&e5|MSMEIC-q|YG&o%(%+H!dP)DsML@9peNdooUc{O%}js-@K~dBbI14TmFJIVqK^Gr1I@ zM6E;bz!6X3kKn=g0gj#D=r>irt zM}V>@1d1pQ!27yIfCwcCr4&gj%Cf2~0car*!5SMzVHkyB5IbgPeUTMKQDk}Uoegvl z=&;+J0irZZEqFvM%0gyFYr^<(@C{f-9G`fB*#ww1m3fSI$qg>pI*DzJLf7344%O` zk0k0#5g~&20--QDdv?y6$~xogv?X>Y0$X2~`S>Z0cEY<*%{XA~#i&QD7X(t&#ylWF z1mLYFMu8Y8C1vOtJ%h8J5Tq6Y7$A$IveBF<*maZpVaRifogfb87Q11inGbD+kujKb z;!~&RHXhra@YDJUVvyr2OXh9-dpD`78h4SCpsT;jpU8g_Stqsyy0X zfARE7U%w(FSEbp({as@U-R)A8s3bY}?D>WcwM=Y|qoI5FIGYr>x;&@DWHjQ*Om6gi!U7aQez>{U@$0K?bgN9%j1dNJ97Jn!}U`gK_ZIKVWCCw!o`^{zHxqS zp>hAoembchZ;mw5`L+4K@YR>Pokpk#*exwLUVmwA*gHrX;_JWoBHMCrJAJ%$xHK0_ zK`V<3U;FY~ON%o~=um@1Fbp%Dq}7bxeDhhTbf&)Yqx22 zbxsPIjf>M47nYY=l5o`TUp_zI=>+=+NB`=#|7hi0=i-G^py$x>p%0X zEv;lVKffTgKD8dc`0{y^r&~Kcr3E`kv-I|zjgLNg*zQC^%B{^oBMuDn!Qt@6jg24v z`G*^uy(0IhgEMQ3D%5G}27CGD!$T_HNO^F>)iLCk<8v|;jv2T~TX zv<{_!%6bos9EE|imACA@my)b;Q5mkPgmsodkw8R&3LJQ22faNI+)i3$nR#ysIFPcDM1fR|W|UQI zN|U8oR+UUdLOJgpvvsa2DiuYc4gwuGW4tkf$UEbl36vgOO0}8HD?>ZL0j%qm3cH&O~`S2O1dvfak z)Bu2~!hfnunVzS0qv7;GcYH$pex@Hi!HAwd?$c`n12B>YaNZH3u@(?K`#O->dG_qQ zb0h>JvUNgAwzV}1q4r@BVx4YHQV0Qv&V%!wu#OiIq`(3J$P-cPv;m$00H!9dsm%(} zO(_Yg_YTi&jU%XOVPq0wDzqVHCL-VyZv&ouA_?nGuiAy+>i!v5AKGlKalVp5R>qWt zHP(}m$Si@XDLjXOFqNP(z{$1DgI9T>u754YHtIe_09tzd#MB=dhe|_ z=#jmvZ(8WF)lQJ+R%d4#b6e@wN8{ro=UkB&cNZr&eNSph2x4H!+bNxt@M>)rQ$!q$H2i{~HQ z+iSFw!~R$x&df$fhj}B4?%v)W^|GHF-nsP5sx7Mdx%T?p^3gay8syy>UF0S&c#@Xi zd+*v;Uw`&fm8HYUY*+Pr$B*vszw+i;l;EB3bmOBz)md(i$8KYP?|3{scV>Bcaq;?{ z&3>;K75QwZMJjx}yYDJfl$CW(hbjtnCJItPv(@YmizJTLPcL1+zds(7>t~wJJ-@nr zZ+D~|$v`Qerh}_j?#DU(bIb8pW-q1LXr|rFEk+@iS+#m9x$)^{ z64_t)l~;})-v9AC@5N31>gQf)wUiz3mDkR$|LE?6-O2HAw>=liiM2(RH1uSWwUTIY zVMYNBC%H4e+ex%= zc@ZSqInat~j?OxtB{9&Y7afow1EpQ%f<)CaqR`ae3BSD*%Qglv<3& z<8eCCI`ZU$AZ)eTl`UBi3280&t6^md!451PkH%`Yo0YbkM2%P~2S#Lpz%WqkC@Y)= zEQ&l1B0#SNl8{NH8etfbXY0XPYwUQKrG<5t!>H-lJHtpAgh7_(qrs$#eI(W5%v@EL zL^K`@%OX{PN!04~2Xo!|%KGtm2!)nHGIJ=EM*>C0dpYI8K?bUhs+?jjCk!qUF|f5( zO02`b0MYxCk{0+3=z$aL9#33b(`j)6+5gles1tsEYSG7&`B9%%pN$ZF>Kiz*nmru> z)3p|+DRM+e0O*0iSd_@lde7Fmx@lMlQ3t|JKU4@`6FY>?xv33o>dh6N9oO-22<(^? zfgtpCAgEVrI<%bkLJ-6BT7dw7sup5GePjc8V8@IC>wIMZaQ6Z7J40u5<8nVD_qDERT!n|u3{Z0zDhS@7EjN8V$gRGC*7&$die{`kia z0|kQtoV(y!o%oZRkAIXs9*j&9262?U`S#f#{@`IY&hrsmy?gH~Uwi)MwVSKU$?dy4 zON)UB;7@+%ouk7lX-3xB8@CP-DJyKKCTZr+oLz9hBBZ%yd;6&W%1h7f9QXE*hh^mh zEsb%9*<^OEofo!p&^ykCX>sY93pXC`M*;2a41VXIy%UG#{Il~99t@XeI^%Nm>gyM0 z7CP^I|LQZ(o&THv+26eXllTAh55Kj1YVN_+&6~G&U;SJ<82Kx2ERQFpP-byeuAVt} zu<(>x7JwiK?t(ek@AGX6G=(lGZ4mcRF^1K z*4e@;t-TjQ2omJkSEfXRQVL`wP>4o{F)}l;Bs%UNHJhy{5K$Bc;oL#*Fw%h}#h$Zq z=Bd`kQJLqjyQV$OQnIiN=9y)#4T0mI`37zE9 zAlC~lv-9<-4IUVsch<0HZ;VnBu`a8ZPoX26f*&VI%(b7nHWN<+=?Dc{JvR4tu?_Op$mv9G7L~th2@f3U;3bZrA=cK$xcT)kT)Rp6H&Z-UCKN z=TE*&Czmi10+>m#V*>~%>kefQf|wn9r^C=?w$;=UcEe!e<0xKQIkAr8_a80IM9)0expnQpmi*S|)`~%PbkP6eFTS|_;PL%i*T4ERrxxZqOLOf&K_cAo z@xd!EHy>~3y9ed`a(8*LIQ8QC;hp=oNjhlj`G$4XWapqYGjsgpu)Uu02R?~mXk4eO)3TiO zOBKMm)wwJ`+TQH#OL^;hS~zI7$Yd6}L6PR^I312mZ&*fQ^`mz_I(w#*rv0Pc>~mi_ z3xxgos48vL4Chhq?WDQmIBAVX>Gs}W=U|{odZy9QNGmn$jgF7T!;xQKZO^yc!*tRg z<-<`SmAL=tC@oF1rF)a>z#t<9g{dlf?bY*3i_!2If9=lpD7D@B=BcyYwbjP#a`erA z@td8+1q9QYZJ@>decs+YLJHpa+NJw99&S9`U0Z4X++Tb%?~iQRfA^aozw^PtD=(iJ z^$rr%LhBxH`0~3qF28p6?Aeury}ogL?#xPWGWq6rKV=G~)B+JwvKE11rkhynt#JUl zH^>92Na@q-tK)nuoEMUdqG)t90LhwCLsHgQhy)W@UlnDAQcSV|M=d46GbkZgkY!&P zb8vj5gks>Ts*H8UnmS@kNl}$XNdX9zwZL8}p@cvb#+XT7AVQRADZsKt4h@&uLZ!K_p4$>Btr$3N=cQD0MT8!+1QIG+Q&h;Zd6AL8usLoaIPhym!`X zm1KFLL*=R>41+;l%KlK#LQ882f>01jtq7!JFln9{>jYvDM`0+f_g=`sa5R|YrA6n3 z41!Q8V_Xmfz`|QD3&Wn`xFMy;^T{MD$}$haAkr#sG_$-qo{W3NcD(QYELvw2aPsMP|2*0`yl#d|>n4r)=nWAN;Wxn5}8yp+KlGv|7Xun-*`e2JRK^?Pt`~G^rBE>C+NWe*FpPC z1kPEZq;rmhM}@K1$H^;wL_9}NEBF_iYiDN%8YJS=3xN$%gMo9tJ~?cAP}cc>q&nyJQ|^n z6Qzd7qZdE-j4#s?AX2)l(tL7kU3vQXQ>$m3tK9qFIbOXubLrXB<9=ohXJs}{?Ksb8 zpmDJ4Tn1HP&ONixY5IoBfAz1vnvKe>JG)zZ2VsKk4oF?9z`ya<^WXgATQ_cHKY8!* zOV7@q`Pt{I>+xSG_1L4DUuae+pFA0;K+LwA*KQv*6Q?6JNqwByk8V96<~Rl+FfEHX&|+4X zMUfjn8fPBCRwB&`N!}Fht6zTU%xae*J^joJYv(S1_>&)f`#TSBe7G?g^v_;cYsTc3 zTFYm;k4`m`I84iOWeJy0Esb^$i{Y@<3NAml*6z;lK7PD@>EhMUT!xOk&HAUjz+mL z&I5&Uq`@B@9}P!oBP5Bc+ilM+Bwu{-X2y_^fWf_KH<;q_F(0X6limV{SG%Ly? z(}8NYW`qy`uBu9{;=SIuYDA4FI_!_35owQ|W;4{XFvTP-Dr+ZcadbS0lhA45L@XVr zX~v#{M%!jaI3G3|xnW}~DHA~JJx{V!N@bj_&!Mt3+1MK6eWVzmJu?qX&O1%Is;V+~ zo~>^QLJ%lTQ5AV=DuN zG~LHq=bQ&-=d5!^2C{~2qDGt8gYrou(P8KToMQ$@RBMgdSOy>jtOG;|1vp;|U{4&* zj97;_O}}2Q=Fq*s`6B@|GOy$t{X+nQ_nMcI)cKy_=6( z?P%@FVoQrtm(G0h{o7Slc9vTfad@yhbKzn(+)s~=4t9>0*5?B7Qp3?;GSk8JGs{|v zhc})a53*-2uTLh~g>!T3=a&z5C(DbAWmVn2v3d8?ql?dU_MYs=v&|oT|7My4g1`KH zeD?gpg%?-;{14v;pI$gmL|~l1`OWJaH+TC7Rn{X@R+Hg))UT@1czJCFa!7j_5)OOW zU;x>Ww)V;-(J#ETe*e*X&9rj%NVu&wqFkB5Wd z^OxtN7!P(P3tdQ(gl)!Pj>g8_-CkU5tuM{reXt+Lsw~DwNBvh`np--vfCNcP13o0PV2bMv!rpI&(P+TEc|wUU5% z_RPx1*B{Sx8bK1;vQkRSb(AsL#@6}>3VrtWML1$Aa;cel6ZRu{onH$T4m@|&+@gWbce4`h54UgM%2PM;#*>hxotF6Sr$?&CM-%f$jXP?hvyb%fq~LOqA*1@nT#e`<%NjjPGk_!m!+Ff z5ulJkQ~;Ky3ZtYI$Am;0 ztf@$-yl~^xIuHiDma5azX`!mJk|coZxJsD1UahN(Cu^NOB6`P23=HlhGw3tE7i!%m zpO|{#Gv;>+?SFO`IEe10V7=Z;zXTg+~9iAB95NZcCvh&mFbW)L1&n+>vV*#)SBI((mNP>JV`L_(-TOzK@ zG9?NU1k6$p393pf5w#Rj5+k4hHXu+Sg0mnAYXcYxS?i041cDbRS+a3cg{t+2P}SxS z2k0FzdxLCjk!D3%Ob9HY09S>fM35exb$|sg%tS&*wo}HWmSZFaySym5y|J@yktEWV^@swo7dB$<9m~l zee+9~pado3xpsJSW3;fKE?p8od2cJm$;)4O_TuG*s;X2(pa0nx4mWojOVsX)^H^qwHJXxq11@`Z%lh_lCJ~^RwL>H+L6i;ppJ_*M9A- zt>u|A{q)?kD{(7$aQk3ldd!ft8jGvVAN}b5aOfB3;Nr#bOJ99uJUrTev~{qt`|O)9 zRXIGov)^bfb{grRw^tS*w2nb}TO1u0Gcy56bnJ%Xd^9O%o53Wv`6T<~!DA#THH;^P z6!O8NJ!?7ll~tT)RUE1P!+xg;-tg(uP2j-~zxi)|?i)YXYPH)ck}*E_rRVrP5B|IA`K zGS=Fqg@s@Fl`s6pum9-GS_4omonH9pC!a=*=KkTN$h}o`)E`K8VKd4LZa0#4r(KqL zSy)rrbEnn^*>Lyhu-geXcL#!?8Hev(eI$emwU^$5QV_(NYHb>UR5A)f>uhOCYpm9~ zEXqI!h``oaV}w-5pp^2?21+Afpmk9c9=R+F=UrtDGP{BSxX6l5w^f<68OLm`b6ggs zk}8R!KnGP-)yQugM?ethpwVbFnk}uW{ewLv$$MX!GD<=ss?n0#C+w^R2CX#$C@ql$ zdq5y2Hr5oSYle!1Xf<25C?~n!@25qEXXZQOvMNebls0ZR4Wk92f`FOPLy;L$QV49# z&*jJcVLlWp%*zy{^2kzBR+*|Sbs&|JSvqMno2D`nPy|w`P-2|9rwD!iJ z6s70hCjg=b!4UYV8MfRq^Vx$t#{sgJ~@;brc3LI zRI^rh)~*7E&vyEMYYg~0G;~Ua)-@_P4TC;$q5-2nQU3#CjaocKo<6gMP2FQaL_}Cu z6i6X}onyedBAU=yhs2D4BnX*MFf$QKDG{f^_9%p?@t69aXL0}x-dP4xAOsOPYf(sG zUrZ+8oU*9n@;PMy}J$OPCfP&fAx>Ex5>}{RTEYPwAauz%gNG*+JMi3I6_lRVzXLb?;1kd1r+0~d- z?NuWLuL($K9mYWzC0YeqX`!WeP+8LnLz`t#RZ^lr5lHP2JXb7+ZZc>!=aa^(jn3oy zKlolhb90OF%K5q0(t_=es=~hX)eFOin~hdjq=y)CqZ{-#caHnxrBe&r+lT#r+HN)t z)6&`H7@|M@-A@Z^-u~R;yYKIR@$IF2I6gi$$A{SudXHXt^~~iL<`bWs^PPLw_uEVDR-0Mq zey_iE<6&#=R9^bUQ?t9nfs`D_a&2XHdvCP8HG1W_rTybUprrSqH4L72cfD3Sjdt+n zOBddG|KV^jXtyEKg1mYBaC`aG^4wf=z7zH)#oo?{oNsoLS6^NI$@d;M18tMy>~ibX zm)5&;$*GIWz3tmt>b;Glhj$LDLR2pIh@Ea2H;I&DmH<9VFfXmJSX53(jN>4PW2OAw z@klEXgt{tCqT{s8$x10@e^g#xp1pgpI~tf~0MD!hUwQkvMiVz5-S$k!`#bGUkTl?T zf8)>APdA4JTsYfJf+l2sePxBiz*!38=GKG5Ygae3k^hyy@ukQ2H-k8wpP5CQ{P_EK zRxjnF;nv3CvN5Fu0N^D{NSjX>BMm)nzNmy*~Y=09YA+RR>y-&=Re4 zh-kebW~Bmem1AF)27sl~rm`Z^L^L}yiy$gfv^!02jbrC3KN+P-94jfx(vlz{Wi`%~ zlFm8jE9nV{n4J!SyeNx252GN<@>ZO*XWAUQ-tIv*DI*H=%5|DiHcGui?+((uq_glsfMBTaBpIXog8tOe}(gx&=!hdIWLeiK>}N=2{DhPo+C`fB|!z z;)!+YGMvP!G1SJZe_PT2w5;$mb`<$Dfly5zKTXk@%7Lap&{J2~2>?_Zk534II_23j zSZfi%xmr`k-q;$v0us(yXAF^~I{&o}w;*y{2kQfX0Pmb*q7xgfuL*8q1VHZ`gEgpy z1TG7cr6cF6(ecsl_U4^WuQ8g+dlCddmGdPt6B)L~RACU-HVOeKgmBK{v>TI*`M zM_{RB7)H#PG#XJ9E2*P6h=M4LqA(1DFwj951{z5Kla7M0-DFcqjiFNDN|M&wVL-h& zIh+ZPx-;M$%T{;&rDx{`*}wXYyISrvxSA)mjgZ2OHbw^u@s0;V*n+W&d&Nb@kD^4-WUO^N?Ol8^LnerO$o- z!tdR`y76H0%yS3vf?_B@<*&T`Of?u6Z3vmBY&zAV$;a*-sx1)Ct`<-Uc?6#H`I-lNrR8|}y z?Cn$wR$SxF&%b{5d+*-pG=vZ?Y09ch|KMMJH`Mx*Pd9dVCm+1;p1%_Ot^eSQfBc8{ z`-AK+{<~k;dAR@R(Z=4v&s5{%AAaZd-Y#_J8uxyD=b0;~msVzvd)fZc z$X7_@ky%NeZHJ@ERR&0i(J*Jn5>*fc0!0$3UT@rKH;gk9rNDCgptm^R_`=s-IDGO* zyJBf&eR(w)9rZ+lItd@$yWKn9|H9|aDuS>7;;S>u-4DKb^+z9G{qon|w2bkU=kDBj z^x;Pj0vX=Dxp(;FC{ChuXsV7r+%Fbanqk=3-Z^kN%*@ZmVG<;2Zm#>+{?>YPY2lOa z+yW5k=_~PlcbEhtTeErt7N9o*b>)w-IBf&DS8jX0S-MRhnejMpR z&ntyz&M$Slt>@1!esc41Z<3A=tt13ze3lhRq?8Z>g%CoDs&ayaQqopFib8LUVTXv| zS%DIQBBh=8ve{zq7%DGM)o8>?l%*wjXe0@+&&$G3)7*u%PH0^hn1o>keu~e zNRZartQo}^hdxWgICjpFR#|Bc2_t~wpl}v|!$w$oUwJnvO6Qyd3Zpo#ERsa;Rg@Ux z%Bswd%ZgM{VAqf`RX1AAY` z*)bs3nK(7Wez-_2L(Vv5DG@LzU~mU))dZE><~z>^#s^zc6iBEd3Ch6 zy+7)E?>w?~>^wT}jq!DJ7J@_c&U+7@>+&w=82~*aLRyr8B5%BB@FzK=B*`-ef@&zu z1J<>U9$ftd5zsj62}uVc(2XdLqXr2@N(rR|kF9SNz~greDjNq zl~c>Y(dNhZ&wcqNXRz5;jqY5uumS})x-n*Wusay#lQ3#@7nhJR3C7uIe7HA$<@0Ty zxwFr%&z_n;diZfwk zzB%ec>!9-NI_yr?kPt$&TgfoXo%3;= zAfX_2YH9xGzVPDkm5bNj|M`W-S7P9`s#Y? z#VhS!`sJUkh8y?qJ{}zp7f-iLmOkFt=|=E#zx?^V$NP8hY)ch5F8=VFKd9L69;bmQ zl>!w;bDc(<=!2t!!-G+rgv~fyTA7I(k#pt;A3U;#breR?M3NM2K?uPBh~zvQW1Y2v zg!f(u$>14$nPBbXAwsb27yvq39h_S5)pNvlV%*pkrK*T z@Wz*UUKV*(mXeSt0Pt}j_eU9_Yc-lk(hC`Mx;37pCDk|kJMObtFT72DUU$=WPT=$!%fzV=2r?-;OVeetQ<6F(F4 z^H2R%ZQeo#hLZ%L8glnPWg_O20OcAYLhw&N81aM}#u^%W$~4!HJY|eM*Oj6~Bo#R% zDG59vfvYM(wV^@QJ(a+KMBaO6JtEa7>D1dr0&B4*1G6U~0m-rlz#4BcRasUx2!beV zVk1I?GRwQ2*->klrz2pr&I%!_iY1BK)P^$lmaVaMtQ!$4Nr2v2WJX&#XN?fTSr#ar z_ss0-L~cd^CLweri2xXgP!KuCQUDM-Yf%WL!o`*K7cHQNFib+Nbr2|}>dzhup|sS2 zV$WI!QVJqcN&&GZ0fbr!B9cN-?Wka^d(J3SfDl9>5dfOboY5j}CMEEG~3O3Tq;G`F%AspxobILgXqH17`cY>=I~aye`@I8)X&m5F)>$A^0*tB9nKm}G^S zj7```+8sV~X`ZY)+8p$7SK~KRW6(>4lfCv?Be+moLvQFaF+d ze0S|ayngO9*>G>~_~BhxS)MGcHye>w7dn$suMs!76;(ECH(Tf5cq=_P*nYH8PExJq z_U-M*8(CIpQ`#WZ?PhfUaX$!QYj1dFr4`J#Po19syeM``RZDS614UwGSTe9qwjd`}t>P=9>TVw{9de$>!#9GYT3a`lD}tk`8l} z)NBX)`@O+#LpXK%!r7p`@a^BfcI!^R)fIcYJxLrVx_>xnHYkpfQ4WX2C%3DaW|D;Q z&%gfK2iI;?&Ju`6n@52Xj4&J(6*B-2Mj0c8VQ~B5*5CcRzqNjG>Dsl2%}_3#o|_!( z9q;z`AKxF3#?5ZXq21ivoSE-Pp2MG z)O-62aCk77ol%YUtwy3o{h`&tSHASZAN}j^zWK)4_kOaqw=wwJUw>`P0-YBD>}~A4 z|G}N^Qfp~p?(hAB-xf-ZdIflN-~e&`^z8A`VDI69($sE-K@=bN#)qRxSe0p3Rwa{= zhewmO<+gOrBY+a3wANZv8N<%#C?rNCw8n;7Ba!o#JxC%F!d8_r6(AD{r6e+BWnr!N z&I>`Nsz^u#au%HeQ&y!Q@0?{9>QJH*3cTggRB;@pX-bk1pfpvegEUPMWM1T@KqZ9^ zq?SS}sipJ)&J;o^LEx<=DTJijJCmomw^h`PmC(pUK`cqJ1u1lznRYWSyo1uUW}2u) z<-KDo5Q11SduM#bmdZjYtc=m9tT&T!L7XkKX!F1_h@1Bny&;0k+n7$5J4XH_nx5)|hEU?97obv5ydKqs{A35o+J2Q#0X?bF-9XWP6#L5RRlNme*1i~)wJ7W50f;Jrsep@de7gb;v2NFZkKm6oEm&k+KHsJqe-1xb=3 zhy#*7NJO3<%CJ<>4>0}jqZ<$~^+FlG&(vO91K^^%o*eE+&nxSK%+xpOi2(uc(Qlle5-d@ z9rla8qX`Js2~8C~jbHo~bLRZ|^{bEH{PJ_N^U*CrXb(phGy1T!0Jop%H@ z(`_q~BrvnimX!(>02D>(oC{O{0992PXC;!*X!1%0;Jx>jC*v$h0%Q0_rJz7d1aj|I`zhyaAfSfzykL4t`m)Y@1(J{}$D zntK7NddlYfUQ9sK3V^#oh?(V7oTW> zc#8B-rJ6tWR%Sq&(zhr0(R6V5DYMK^$pkp@8sO7JgPJnH&+KJ5Wo@Ch12eKG&rT}M z-ZG%`9<1}y0kiY00tHB(y_iZ?u@29favp#r1QJ#C+K!Te3BfuiBq1Q75Qq*8Fsnd$ z7j$R4gZ^Hq0+g~c4iL(!s;omGW)h)r4iVWqMn2- z!zd1dNGq)Zt%9gM(*c_L9uR>9)k&o&2#A%`jb?K?U%k~z)ijGUmShcvAs`DO2@w&! zXQc?J&T;YtP!9$o1Yjv85d=}FB8g#1IGkHZvf)mV-C?OKGq%Nnju*h}GD34{0lm!z zVA!voKK=OS*1b=AYZsq8KAarwr~Q6*{|`PqeQIg$)Ka6_`Q(G$J9o?M&i0j;mL^8+ z?oO7^BnOcVSj0+=^R8p4u!$Z&pvw&#ced zk-zY=O+Va>TKbLE?hn4dqc#1?ue|h6fAfR$XIi6)**nP7Miz(S+508u) z|NPIMzrQ&=eY*P>e)V}ta&kCs1Y$fI-@LVP|1p2%D-jWV^qmJ;3FlAGojJXJd|W(y zymLIrOM}Pvx0EtJ|Ftuxo?E~9t;4)@(vt{qX&J4t-A)MJkB8}7pMT}Tg^PFY-!IZU z3gDIJ7ANEKu%Fr7+`jXq)ryAWe4OPBQk*AyJjd%RRv11g-PKv%V(E5gM-27 zu>ab>`?WZ5G1>JC%RwAJd9-t8{nXVDuaCynJKOhqqx3KQ>KDitPqwzNU*G$}7tc

GHy#3lU&pg=J==b5uxkP||a%T^P zxOjOD~XGu5=722d-hoggqA@{Wx-4)lO#&!=Q@+g*mxUy=ba^l zY?68*2yA7Xt145~neaXd!ys%-@`zxl1rE6>N1N!g6%e)xkM4<5qms-BshH<>32Ha8CXN9Dm` z^<=aE;KBaECvo74G9QoIGtJ@dAj?d5wqtW&n5s1!zxL+YJ2#F$`D8N;<;tn%{^o8d z{r{!}||gb!|Reg99Myn7v1 z7NFS>qu%)Pb6pVeU~DF%(X9u2gP~npoZUYd^^Qy!K@ey+m;|VfCzic$MXJ+`_mA?q zxuhA+kKnWs{MNtl`LJ8q2 z4k8MqB!Pr3)FN&LjfOvSD*nnZJr{b)dqbi1qx<^{>+^AQ=Fk4<$J@Kvm%nu7yFa`; z7*;>~i}cbL-uQ3+&(|)UPv&NmP*sybesp-eyc+00$g2AAom)W|AM9kk@u;_N-z^`% z^x_3)A1$3(a!ScY#57u8pZoOo_JvDxJ6pMwl>mOQxgR%!w_m>`lkiXe`28dfbf~?z z#`soCtCCCWB8}EUAl0C6+D%!cR?mcz1bN2@;9P|$OjTJTME0&!fwIojy?2P17p1jM zC?TbA%ys513S!`*s2Yt%R+dmFGCHq}Q$jlLob}FeQ5Z>7dZ(1gvMdNx7zGGimX;bc zJJXrurBB&|MZ~hq!LyK}C^BojF{U-sCSWZk6A)mWM9#U|+7vhAJj<_!w8&J&MB?Db=f2rLX!jM5fF{FSy?*fM$(w? zwwtZi-p)>Ct0a!oEU)rR5PEORY@(GWpd<*AD9VZyh-W$tQ>B$P-Z{`xNvQ;dT9=g_ zrnyq8F2D3(rIPIJ6aq#RNX~hl#{K|!u2X`ijpJNH;sUTn>+3^gdJ~v#=u_kdp62zO z?CA*1wSnp+Fk(vbPQ%bXi+-yI3FFiIL)}Pr;w-2?0D-1Y8c+!7Q8IX%K7UUD-UBcS z670b{mP9oUWxTi6F(4s`8e&JRO;YviLk4!!yd3L{Gu9f5BnX8Lw0Gbg1d2>$g_O=z zDv0M-*FZ?{sDd~O0F<>LP#NznH>`Xq7;(Q zGm`*fNr1I{msy}7K@7~qEKE3)1P#Gig|NrwK@iMH+>aXI4d_Ng7z8NG<1Iuhq0|_v z`31k1?`%x6g4^A>?|u7GUb%E)_V#v`=90rBJlfwkrPVUn-!041Q3&90xHkfON{aZo z%PFc>k>TU*(Uq4j?SAmMsO-^k@!dbW)d`!=Up_sz6z?4z9UZ_2Kf04NlPfP>xO08y z_T&Dg=UNZ{?EUr2>-Rsre!NpjE&7A>^)Ids_Iq1*_ZwObkFtOw>u7O(<#=;v`_bg= zOV0?IPX^=7MsfdHRoHV$AoOgC=tzV!0)jrTV1-5VY3 z!o7Qw*WW(Byf%OT3APGCJ-5_e?1rWBgkm^xozmXEdz@!wGYW0)PoL>-9*#@v zDzHd83HAMry_JPV)JQhAj$O%s^xCtlAKl#riSsk9)R^MnR}m^443Y${dc5Du=MMu^%?+nM*5kYn@0&qe;2>WVewdZ-43S z<;Cvt!QkAbbHDVzxcuEe{`mj-pMS8h03h+~snhrGAK!g=d~d5a9Ata@#jQv0wYtrf zrS>E{?zA~=jN7HS8m^VxPEbE`^mxMC+Xsfs7gno0*}T4qHM%^wXJuwH8}%8 z5C+ma51zbdsbu{F2n^nNLNcZC&P&Nk2=CcCtAupc1A*3oKn5akUI@`_>8dhPP^iPK z$O+I{i|i}ofISd;>m70w2gcaSl*|z!N=2RStaH{n>x~J*pfnYG?=2u=o|OO~m58Fy zl*SmFWw{OmV4P%WoJ5GENN3qZ2eA+$OY>}!$4NxY#`~lZ0yt}Ykx%SVK|)kzJ~KOm zLT>H#!11a1`B;f&(nLaF&p?riK>--fi_DRb3d=lCBsG$R8C9SGYR!wa)<#Llh0#h# zfrJ<;MdZ9Gqh@4^LJ|_V;pk9{Ws?`#ICqu>5lV#~N?V1I5=fP)fCL~hOn1!ba__yX zPmq&ioT2s;;YkuuJtmM(7V`SYO1{Z<#~cB z;WTstxQ@-_no9--kKmp2-Z6ndu-;nZyt9A^-XI|%GBJa2{%f*YYDtWB&BVge+U(+Lpo1`of*|nhNgy(!ASo3P zNv)++LJ$!mlhTrih}a_{v$d`sb6Q9pghEL6;CUL)bTUOfoLG>-vmiv2%pintNB~Gg z9$08_-q)N12~oElIO7o!!J{BVuIoQZAR)1Ho}KedvWNp)F^4VVik_>Un8`N6PSKkj zJ~>*xbjC~$We8QN(nkl7A!Fbk+s%jh;l4FUangOfv6nOw&(ZiWJ=h(Qhej-95Ptdf zXRqJgJ3c(>E=8}sekKI_qi^3GPTYRqi-x&&b?4rl!L2(Vg``nR>w|+Qc{U?PS(&s0 zA(KD)y{ib^o{z_Q)*GjU@XbHDedW2MGiO!;mHgn_A76Q6;pl@MljT~7=igWx>>pov z{rth>!_7wr>tGpaesx8vB#QCcwOgYJeE#j#^UtkRd9}MgYIVX#+mk9YOUq4myt3Rq zyS|dOwF7^AFu8TZy#7k_;pSv-XBc)o34ZqSLehw{EWh;n+@JjZ&2i~kjn1$CgKvXs z1BOzI)wOwBR{j2Hel9-W5kgUz;Jx>5>V~@d!w+UwPpzJxx%}3p58i#evz_(FS(!tT z!(adFU)n!7{^+Cow{C3pj$dmnEMj_otg&VBOf=Gy5-QJ{*0wbP5g@jrZjaUouqi_=kY zSkkZYhy=I^zQfXf9Y#4G&;?H^KX8TSpf1t4ZryMS6}V*2FXmcd=}2EE>0@` z^LIY!&dlH2zL!plD2`C+M4>gfwLO%fjAAt!P6qvAJQ`*;{lRyy&!3sI-go8&NO^j` zIhuH3eLpLmvz;UwmJn*`oRi4Cej)4gqRz>!H_)0B^v>%jVDABl9h9arW$D8}OCN|SrK5|h(~tJo{l4Ptz|gVRzi+O<94SJ zX|=X|>ac&1ra_+OMOnp3cQ`7#omSFTg~$Fl6|KfhD|FU4$69Gb0uSshGMUooNVXb@ zcdn{Rkj7S}kg}OHtHNQ6GUKo^);S_jx?bg-CJd0IK=4i}t%Jl`HyDgOJ79L!X)Ot$ z^5DQ5V~i=Fme$sKN9UP&T0!L)JUDBF76c58wvJIf3G$z+aO!2h9$_c?{#lYn-!! z6e0>CjjcR`Km;Imj%jK<^Xvf08B0PMwo@Sul5>`b9Ds992OvK%r4$kn2+=r?LI8sh66=>Xm3Ts(6fFp`&NUT+5xle0IY)$aLUx}N zd9eo)pCu(R0|E&lJu*$r5w_NfPK8~-D5ia3$e!y0)D!BR1fmp$DFQXoF=&_w<;hHt z#$c4ba+`1+K2m zKYG$Lh5qgj?*7WJytT2hKR1({T0M2^#{J18yYzhb{h##vqx8q$`pNZ=A13n{?v?}m~lF3b?1Bg?6JY z=8MlfdayqlmF%O%rFL4DgI<*vRXKpQ1^jFO^DjJqWhtU; zmT!Iig=XNd-@G%^g}1+a>HFWgx_5kZ=Imnk)XJyt-@p37vLEzPLd7IaVSBw)GJ})| zj3fbky|7r4RPRB|$l%kgB7s^50?AtIt%V4@w@@-M)!+I$2#l$SNFaLWNfTEd0fQ*4 ze^AP*3L+K9Q5b3|rIVs6OZ47?ugZ$O&(bsu0_(i@*8l&h`mbQgvg=F}Tx;#Uk8pXl z7NH{}A|oYIK!YGa5(FrUMY6ioOjTER&Dhvi33m93@GT#xapHk@n(=urut|<7yN|gj(q$O41~) z24tfRi}Xg>!t|6q$8uMl}bP|I!R-vCg+MQ4}l{c1zp&}+9ITtB#z=Dvsspn&-h#hWl4k; zkrxP#Noin(pa4jKJ#z_HC5^_n=C9|U@V9LDh%GUcO?{>eXvppY6;AW6zQ?fMZ`e_ zNoi7%Vg^#$L?(n_3yXqED^kknL=h<@CSdV^5JCvflcIWalE#q;L_{6yG0aXN1g|t{ zK{Oi6Y_OEjW<2>GeG)_{FTTPeN-GhxzKFDjlOs`?YX$&@P}ccsZ2(AERI1srDmirG zl|~j}VI&Y{L?J{$Rscw%kWr9G2$Z0(w>|{N9UYET7;wHT320^F%9KfR+dk+W_L6Fy z3(hl}^ahA=2yALqBK7dr=4b>5hZ&iy)deIo(Tt_)3O5R3FP@&OrS%6JTVkr2>o*(m z`s(c4Z{ELqf0PwR2S-Cwt*l>I`QY81!$TQ%AvR`br;m}wxLBI5A9nh+x;Z-PMirH$ z>fyc3Zg24PbE}ouKlS3`^vuGox2{diKy5NgQaiUYdHTY#%ktyBR{Ov=CW_^Ai*ATp zk9M!VeAN)>O#SS=+sDVF^OqO;$M%2z*FSpV((2?)_0F9~SFWyn@ZsTTWJh_Zr|Rfn z^x*F1`DbQo&E(7zQ=418FMVZcc5cG=+}&HdJ3HN5*WEXM?_BG_=!vUmE49wU2iYL! z&FvlucRGVKmMqWCo}Hatt+$T%iQ>HQr_ZhC!(8&B$cAT5tu>R%pxaKGden7k3@^TL zs(ZM%ufm0MlY1MFn+Z-YRi1h7{IC6r{Ka3qgC;sUZbw@Cf{Pq%!NComyLxeJb8k56 zCkCe{;o_yWY6Ks=cjM zPMxVX>eVETCgvKapO{}&cA|HHrfz1Hz?(1Y*%r*GGF zk~Wi3VzUwVx_NgvN+UI0Pq+7v^Gs%%dT2uco|&%d(za)!h*^LcM6^;RFNK3JFb9u< zz`!Kn10t}3ey^vD)?~T@|NCD|s`;dnaj5gIIF|m#l zofm}y86v7ytA+Q>kRl6wb#{?Adze;czfjjew+9O>^-32Yb!g zw9`4RR;q%m6*zAdk_9Hxqv6Q=;Ju5Y+GsSe)-j7t(l|n7-R}+nl%ytb=nqF}b0VEB{D=nP*&xX#loRfy2!+;{FVqaX=Guo0g#mdphn_yrh>v!@~nXc1B3v;7@RPB zW?&2~9CFq&Djv?xM+Tx`y9BwZH^j%?o$Zi~jG<(#H%uem?Z}03Lw2n9w?zv|xw?Epp4tDlN z1E+%cJ9iG+9p^HYRO5QRw%;#?nT5j$H0BzBx!^{rJwt=_i&L-U?hDJg%V+Q2 z>Rh{ZKOy|$ug~>+-QW$A+uAywn5en1%W3bVF_4-}OuNx8J^3czx^Ekv2)x zz>lwQl9%83jWcJ@He8VD$wuLEW=eIAvdfnnm1OqZg~dvI>;Ao+sk!>f>2w5=RO=sI z+dqGP_uydo@Xj_X^ZFaNX6L4E-06^I5c15eek+R1?D8bAd;f!v(<;66!quPrVs1bG!I@8S>`4J*7&!zkA5s7G03WZ=i zy_FC|E2WT_1BXx+BryvLvUQ{tA!s0tkw)Uu<$w?y9hulVTRI6DfI|S0G>(DA+9I$| z(^wl#nu@~eSQ(>qq^)(R$dD0)My)m+=8Z-pA6XMoWReiVXqY9HN))LyNx`vLK_Ns` zpiE>$Ac>M9x1dQWWZ`bNSFKlA8~_jc1yng1j*QaXy5FnIgkjx&4gxQ54f^igU6PK3-MF5D##Hf%tl((Oac(xttgIe6!+DyQWjWzR4PX{CgKnF9oZvXlm) z9P=tvYP`J?VtJPp;*#$s0f;1oK;^t12sMyG2|*x02n^0U=R+P`A;J0Jf^*D)*$G`*`wOXB@T%PlbbInud zX6}9bV191l+V$4shkZqGu#=Cxx_oy2=A%}t)5{Ax(NG#VO5wSu7S3LnefVHE-h=Mo zXl;F-6+XOwe|oCYIyf5kY@|h)=N4x_^Z8mfbXl+0?KBnA#phQ(czw&5YT8gsrxscp zy^r2~aO>vLs0$0L((V?w@A=s}{OE75edgse_iyi`iBByz9z5tx&Llgw`|_uk*3Qn^ z(MTnF(CR+%+}T?nJ)E3KCnlON{>o_+Yr*i~=3bVAFg||ND+*4dWM}h0MYz0DTV1Ly z%*Wf0dPx#pzP!4z_4wU)ZcauxEc$CJ6Ysx$y-}O}@cMpIg`?f>xzEo&`Q+TA-9A># z+}cboMHn8>uUDTsb!LAj|JI+p+v??a?ra>kZwHua@ZX-C-mKiTfzBL=jST5 zWOZ@+&9`raJkJaB=+SU)KJH|r(Y?X2FB20m%m(Wh&ri=xZr!@`>QCN(`kD34aj~>K z|K=;#rtUp{^l)=;XIP(XI1Q^S(}fhPr>dLxcVGI_)uWx=z2k1$#D#_G?Yo^K55r;K zdahKlGErZgi1!aWn)vI#_UYbeRH>Sw4O^RsufO@;Pk;Qesidu=QM(AeLbO(kv&}Rz zt={o-&z{-YYIVE02cBH44AI#`s{w$SH7EcG!jaOkg`!xCWcI`o%3}~fjx-#-c>3QLt+bSB2(mfV6uUc zWH1`VNmQw%S(XP5jfpw{GD~n$No!eN6uGr7qY6e*{E5tw=S7r8z(GKck9(2Pje4z7 zPqVzpUBS#zVm!EP#A%waigZOZ`0aKl(llSIq)8?3_iMF8g2V|5Kw?tQ6vc61t|k#B zi4)NXB$6aCC9(4^bF2)_Og2LZ{cI>qqOh<|8M4-8qtWc#q_v4_A3~)n0wz)wOah2! z@eF{J7Y<1y5|^u}V0p$B2}&EV*_iNvI5voqhH-A| z!V~i6xZ~yTj013-0^m=SBl+a8KmMb9;tz!}VKgv1SBy(>1%br_qi{*n|ogbm$A3(3PB%i=DaNm8*J{qbs`Lkfy#CO6;+LBP_Yndt&pO$(eyANJW8v| zR4W-^T%GcRPEv_9;>cSma$Ttud9nTQaOGTM;lhfJ5DkM;{o})$nKp^Law$IZ%%!8f z?DZf0V*UK--~H!b-h8s|y~xlr&z`C_qlfplZ+v*WQjhyRKh;QjBY~jWd2!{^ z;z4VW4f0g$xdvRlGUcS0Xhh9Ps#Lf!Nh0v}>z(!W`25AWrNzrd#=`;M`shG=Sv)Fl_j_h9Q@_U@1FuUuHV`poI?ef#142WVXzd>jJDpP!nRnm1m2;SzpSy8;zYwWJ z2O}1++wJMOnZEO#z0R$>M*;N{Ppt19Jlxpo>ooFCYSn71-D))Jw;vp}wvU?Awc#j# z^st2@!@koJSXg-{fPny%0t9rW2ZKR?y$AxZh&H5@QAEt**;i|ccmeWA3PAe(K@umy zdj!y^M3*NyM8qHgkrA-h>qSPF-f-`12tr7aR8bUZQY1lquq>=l zTbBpNMeega=McOOQ3MjW$XwORABB8MkZ3m=%TRNC;<+I^*~uZQW_%z zF8IBpmX=U63tA+un2>ws5NT9On<#cx5K$Rp8Vw&fFC2!TG#OIHkX9J1HAayF>t&eb zX;ht>n9QxW926pv3W1F=_2%SQ5D0;Y0%Bnuq8??VyvPZa2nJ@YwUnmW5CDl3SVuG>6J_Ze!HuLca0LS!x_&Ypk`8%UtK8XjMz(dPzVx0ID78E#{{^H+R>;*pA zcE&m56Ei6wp)w}6>_u2Bje7hDqjm5;1YbI2kj9h|1QG!7!2^+z079q(2QI88Wub_Z z#26EhQISUDe2LLStqc)GD&|na_e(j5kO+uE;PPCrKmh@ZBx!=BRo4c907zIy4VcRh zmy%dcEW#X|DrXh!MVL7_=e={G$e2C*!aM7oWe5@+ig@pdfPLYM%!OPexUz3R#0APN zgn(Mr$RsMNkk-z!Hfa=9j52AZ$>3C!AQ>=>z7uJOr2VK@ovKMOF+4mX5SxvHcM}Vz zz%;}0R<)J_Q`Q=I$4R5=F(i%J^y17!BOGjvY?jT=)X~HAbP}i7-9H8a_FzG6?R8g^ zMy(OAF3p}k)p+H*A3XoL3!kah|LU7>jtaVaXE%ay^+IKCp}Kx%`ux@T=45>LqX&1c zKX~fd#pN^S@?IyYm?xgRB$993-DUd$z9CqAuFE;v}EUi{;&U>5t z7cVdEZgwUn;z~8TaCvECw|DXC*(*=a>}-!VHbzff)_?z>ym0Q^bTMpiZr=XlSJrOa zI>>vS`v-eZJ~Mmq((>e#IyyX_nywl%r8#WC*TkM z;S)dm+dB`px~sF%#3Y_s!kZtp8Z~(GnVE%^%J;wf;hme?4ECkZuAW+7-h8~9R-?~+ zX63n;X>q;zqrbTI<=^nkqN20wjl;uE5=Vfe^QRVw z)PASe>GwNFM-!9vK|i~G`@!Vo8^tO&W*=B3lh#NlG<;CR$?XbI@<=(`wyFXf6 zox}>h_?fHU-uP*Z^#1#IW*fDo#i_@S4~DHHjimnbS3Y=fd+XfU z#Rs=~_1ku2i3W@=WXa`2CL`^fOLUTdU~30}CTi5#7JF zRf*zSO)s5UC3V>9jB0W1*S_|ffAJ^Z&Vu@{|K6|v>0f@=IiQFKWHkaPO(8HT0l-o! zE$dJN3V2aOrT!0qN-1G>>`N6}7?ZeSt&L425C)@x(K;VlQivWT0BCg9lR{x|-f2(( zK8m6&D_F#P_RgCq)>^!?fRMy#Y+?u;8KV^yg^yJ#0NHR5#}(_dPPZcfk%>Z(JS$9W zY+h)kSU?;OvJ4QCC<+`%p$G+G=RBg=VP37(SopAYScwePlc{F&*md^XoylrcnVU^a z;zl`8s6+_>#<9^Ty}Pd6Yq;kR0#q^0D&!7>zwz_)CeU~#-nvdotAap zg^)y1Y>ar%1WJ?5Ef_;c5JGSkVN6mJpc9}o3X1a;3CN-eIRFL+0vs$03j~DVy<@iC<-*?Pg)a*4 zeehOVblrWVyv83pJCv{E`! zw(v=%W}Wlanp)Kbn_FK^V-~C9G#lp5n*8Wdr4doJS_#vJ`$ic*N|Yf8d8MANuFoIr z4YnV&j$6a^v(wY-v$|Q|zPmZKFM8Qvuhyh$H6mmIWQ5K8 z+l!}{ul-`1RCMb6e6UB=DkOCX;-{z0?K{0F-u>w9gEMDO&91Dio|_YK z)^-ox+)10=rM1aB_l{HyN=J8Y?$6Cmbz0d}Et+4CmrkAOc1DkG?w!9ltI3c0!+xhf zH9z^_#O}4sOU_gow+Fk2}5Q-7N z$akWi6L`3}H|Q1QAx`6dQGD~8zxadSdUkdpa)aIT7nZFvMFx)_AO7LL_|^aPN8c9b zUi$3P^xV|)*_GMZ=E3IHu&N*39!k)nlUg<>(hbZjrZ>NyCd|8(Xg~Ov2&Oej+ppg{rCU!Z~p3afp%of(&@!dUApkb z@?tfuzVXqWC!f9e;oG-D#w4Uk6NrpcIWqRA8Ks$pxV#ZCu~G(^h0%W^9QzOyf(XZv z%0?M81Bfjg2jKv56d?lpptUOdyAhB*fF_qYrKre!U`7H2$#Yw{qF$@UQOqGY>vCIY zqm|Mu!4|5xI(-D?yzOSrYpv2m*#d%#Vl6?CgxN(}4+bNp2!c1p#3n|7Y&7Zz z51>l13s{1;&3cV+T=$wLrkbWnDiDMWRnnL(i7>bjDH`S$QIMcosTmb}$3(g;FHwpJ zkV6RGciSz^E-K}m(*8*r%aCfBeEqwDtpCjMgX6Vc0Dl?$f|C`zj0DIDe6E!G$NK>S z35+>FpTq%970M1UZo zl#YyH2)@WA02QF32$d;cY)WfgdiV)SBr}!Y0B=P|p(aEW;J^YdcrW4sd>FJ3|BPoJ}AaS)am#BLmZMTkc>$dwNf9B$H zVEv)gQLUIbW_hxP5L5eL=6g^L`F4dqL*G?f94C9*G^A<{Q6Hf zx3(o++uhtc|I}i?SKNI6g9i_LI)*2od}4WN`ul(KYWs+XgM4%2<~M%p>Cb(Boh9!M zi~DzWZ{Ksn9^ZZM?%qykVPQ%|lU1(lZDdM?)wIa`16(^r1*x7XkK$!=&% zbBg-;;jjMs)w$_rJ&j-f@>9oqouj@Chl9&cEp2We%&)>rzxwR*OHXFE_8#56bNAul z#ztrD?EJ!HeD~3g3!G@Pyfj^(spO<)AllsQO-z_h$4yRB#zG9U3zPZC9Ul*(I4OL= z;-5G@#UApam}oY?`K{|`&du-a9i6*y`U_vZglh2h-+zWstcBijUaN(;uJ(8L??34L z%CBG8*lgdwe{kG6{{4Ua`9}{Q{O|`KPfkrOEzE9gAH4hF!|tIhuc&YR zC~TpW8RZ#qKxH^9kPui%0|1VO1relalI7MFd74&Yt(~=6B}7nIyR@<*9Ei~6d7M@u zN*sU_s5hE^G^kc8`6w5`*pwZHi6A8r1OQTGeaLN=J6Ee#l_D1O&LwdKNLg0&Mp?by z^a7EN960YB8bgSn2v{Ie#-ImLp1cKA5`=V;YGXVHF3C6uM1%lIT0uKt2PYw}aeFOG zc>};t*5UC4S5Der%C(#D6PG@mxLn|u`QCD$7+(d(yw5UBfF#1>8AFMf_@xR|zDV$w z#vml6w9yf}+!saQ5PWH*!Lfn0EI6QXwu?EGw~UgoF9<+LsLQJiGlXEBbwsFKVUj9o z5h1Ma_4LamO}v?2*n2<9E~3K%prcqx%p%4_CQ0MCBBV(fQW}Izk}{wVsFWe4%cg5>j1S&9XX3;aRuDrdL_`}+ zWTI&N^(&?0IEi)K&-!!5R2%i-aAcy`+l-wDP}b7Lr@kclfhOxiM{M7YhROT{_}m`% z*cS@@=`$0BQ=@^KKebSqZnhsjN>VCDcHvAF0^a$+R+HB3!tD8H)*PnyZ|~%J@tK$E zherePwl-N6#9MbBRw`;^qcy)c?*yi1DoeQUe5&js!b9r$6`1yKjAeq5+e0OAj6%ef;|M=U-S-O*ywrD-rF1G)Os}Z-+t$% zr_MB&($-FU!Yvv1| zec}9tXTI~@zwNdAk2iX8RV^OT9m zPjy{Cw+JH6Yx`z+zOlYM;iPPYfuI;<>B9Bg-{r)q#^ z=P3J?PoJ5Ynf~7QKWbLtbDuf8_4uIO5BIkoO;6VX!cpJWOl^9$`Qi1CJXYR&d*e5M z_aD9c{zo^iKkSeENB3@b2l=Hd=MVNCzq_|<$XLfN&omO2^+|z*2(&0~eFy=V0ZD6O z5mLkO0C+{Hm5#NtP9#VfT=m`q0V0-U7;!eR7Zm4wwUVZl zq(2y?X-r5Qc$j6Q(a<}ON`=6rFp8oO?D28I%t@RWqXdGEh{u(a0unG%D9jv~DM^j; znh1qGD$>e8;De(Ui%d;6F^-D`4UFtr0utz0 z`2Y$v2UkXxqy+VHKoKED#*k8lvq$Y#9LM4vkP_j+Xc$LPHBClEh5(f~A|~r?oJKsz zgJ56=f;iSb|Ok2Doaj8`xC5Bsm^?| zRh;CK7zCx!JB{PHewasy#uMDJXdXW|1YMF13$Qc4jP zp>W=d1QVO_d|JxLbpR0%4h*1(hy+;>ya)m-r9pgfIWQTcl~KY0f&c_mN)vdO7g-j( z^{!wE%+BXo2rQnNrA(j+m(yASU{nf#ggJO*P)g}2icvw}07yiXm;^{6Xwn8WMR5eE zP*Dg%+K`HZfQhs(Un@iySb#t&X3%j2ib_u^D9~{%BH1Vp!Nt`S6oC*b6eP!-07tRO z^CAR?m=J+N4$gX4_&ju5eOpXcOdX`3bq}!^WZjlhg9zxjoiRh6;nJyfmZ<1i#lql{ zW(Cn#CZgulypuh!3An!kDT&MQA|U%WCc zHoW=vwzp8N8PD!;=g^4-ow8Otk9Mmg_uBXGe(|?nY);6jbF0Rvt?Qe~?o_>6Sy-HU zaD9`6;z_f$^WgH6r;C8|>uZ{lRzLsVcV64vEZD>3+QUXo_ebD|YHRy=dmj#3kAP)< zc4n$s@pY(H&42j!uQn2Wck^ys3*Y$0)sSr+w~sGeTGfhBW7aFAfXTV3H{N_Ow=$VD zlZ}n`FMfVJKLe{Psn(V0)%nLC+^^f(@lktk>)^sO=a$Y+Pc2ODK5SjTy|;F%IkPZb zTUhBl+VlO~RORf2ncMgJhljnYP9N@dpPWi|4>oK+-+wgXM8Eyc_2<8E>GXwV?F-6# zhdVo`ECR55Kk1`?X&7`CmJ?wz4=gx$>8P_U2c=T5R0jYJNVKlR?Y1hVIY5{Z6f}UVeG$vtPJq zTYEP?>W>D{HA64K=e9eNsf$yQPNQn0UY&h-&~i8LPRz}&pIaNYTU#6V&z+j-b`J8v z5EVJk^NSO00?jPQLYg>OtrQV>=Z#k03x~i8@nrER^D{`=1z&cU0|RH>6+HN`-ipEfT(Jc3J9VXmV#}`EDN5AG?OYUGcsZz1oVzQPc@ry zoCr`R5I7Xp`Vd?Q-i2BvB}F6ayb}Pho>gq1a9KVa4tmoZ$V8D=ipcxI=B_+bJLiK0 z4+s#rTzAG7t`nDMnf{d%RR+g!&hj6BIS_y`!so=~dGa8E@+UGT|I7F;04zXo68t@h z0^pe0#v*i*AQ-Dfh=GG56eyc!NQjUWNFdP6pa_Kn2q;pdNFfCl?@x-qgcukFNo%bn zK(OKnRS1?GSr;TPdy*wM>s{{hq4$oR7bs&^5?BZ{qE=cH8BiL68|66&la7QUqfA`a zfLf74RKgM^5hA7(tOh03rcIP1Y4L86YKrS&)%{!84X^Hw4NBhd@e^(i(+C z#(d`_OP^QWShKYvjf<8%10{+mmkR?eaFcyDulq2BN0fBen2e)TJt*NWkp z3k#-F{mGyI>HJy$rQi7C55D!r#^d6@{8yj%SRooU>Zjer!SV6%)ajYUON+D9ghB7^ z>}Pq=>yPf;+}mk&cODlP&P?dE(%K#V{Kn?h%L|8{Y z{@ZJRxxO}U9Zb*Ee)z)=_xD?)0@5^DKQ+TY{<*k)gWR)5sl@5)4cJ{&A=fR4X+VRDbAK zm!`h*n_pgy@1Wj5I2KIy~5(vsAzr@c7B7j0fQiww0omW~*5LJ5agg693LLp&c8v6|e z5C};V3p?i{LrPKb-e?m>KN)oKERw_widV@h~Qbb9bQ`I!> z@-Va3x`MsvNHK%fm(E-$#ROCeK`R3$ibh2y41@|n zm3Did47h_#`S^KA_Z(prq5_2_6(rg2X3jVgXPz){ulh zVHb>5!tq2w2$9Et10euow15Z;`=Ase7-8?6&sGXRptS0@59@Oax)!BA@Rr@#?=|MrxS{fXp`v;mnUCLo9J~t1+gqdE zqdNl&81svtTYvoM#{ORC&;IzQFTQYb_39I$aP6Z)S4~ZWwP9tt*&hu*{P6Mi_TJ&q z?A7O=sx?K%slr5RkCI64^o^yjyayWQXZ$1l$;&urcJVDa?y z?f0%-dhTLctKPmgQlcg%%-Wgx-~Rf`-~Fo__rmUjN27yw{?-TGN(_SmEHA|UfnQpl zxZUjV&B z@Amq`pZ)M{)b+WQ>hmw0|G^JG_{opnb*up+qrJ7ZUXM_uT8VA$Mw!d3jTIWB7*ocuclxVO z)?fLnw{x$DISvNlXRm+sJOAJ}QPktD-FtU?XU|Uk@BX(>H>>K6H*Tyg%}mrQ7cR}b z{r2AEbo}@J;JLex2Y>yi?|1uQW`6!Z{>N`Qm;By8`h$R|>&o?Zcl&Wln1`9kx<&*YAp%GMW{$Kff;CFBh%gEZpa2j6E7GNN#5<1& znn;jYy zTmTdRPHQ!0c7=0M3@m7r>t$)Bnp|V&qDTXvFbc2>EM%gj3Sa;=2ooTa zfanlEXd} z=4{9M=)t49kqr} zfARU7uioBy+`4-|d+u|SLCjzO_3d+KruMhmZ`U8Jt~Xd0$GzTcRDX1*Rhy|Tt~BS? zItP1)Q?pa`$;RS(YJ2K$U)#R(AlDIwNA6$$kAHmbY-6gSVJm;?GiQ^!y7A7IHpW@= zi=W?Nn>1$EhlLNRY9I5?e*3@wzyGuEfA6iWz0olDQ`6Hc3o`>-^ag`bQH(}~U^K{_ zwGtTWvsFUaKRj;MqN!$ebHCl~h2Q?|3lDBRUTRLBUR}C-JpAHk&KMJ|E;eqze|Y-R z6CbwPMM&qG)2nM2KX~=V%{vFrzI5rMJ3EDQ`;Xn;#`fmj$JgH4Rvw_B|J(odXTyHh z?#gfd;WIaGZTyG-CfC`LO44}0}LQM!xn3;@X7Ntz7 z88drr6bN}ABBPjrIS>H=nn;5PGb^o}^{AE7ic2k)K^ubtgk%f~0}&O4l^}wS6u2N- zD-#(&axRDx=UfzNZ-Yt=DeQFn%u-~dtgtK`Skff2MTm@H79>DM(t;YTbu{)AVqr6_ z428fUL{Z||H7A-ymKoB8=TTu36Isns6y;fQG|Fe4Ph*~}ri0v)GR_u(*+~}ZBudhN zsy}p@b7Z}BQWUN%TsO){DUMke7%G(%5e;hAC=3d8wOUPM?JNW5Zd7>h3FCnq%_U}b zX#o{-2zhR`D(_9mfe}f7fQ*P8b14PNV1=Au3{GeUCzMVYyE5Sf{4iGT8GwkHK?G2h zI?*^J%nakGU-^uD@=PGVG=B<|Ma(DpvJ(g?B7n2b*1<)vngaB;f7!9#=Wkwl50U>CE#t=p! z2nPrpl+q>aL2G5=7;G7=FaT^_s7<7#i8Jd8R{;nCyk{06Bqrm9o$~|+hu~^uaSTl$69EfyH@mP))8Vn+dc1w_{aaU`szDEvN|LvZ(&eSBJ3z_fN)1&7MmADp3zv_yrqQS~9Oj2n zEm^v-@Z>XV-~Q9v*1>cIYD@b07tj6Z2d~Uc)fd;m4Mrcley0fCg{e71VWJUc7n_f^ z@>}c--o}}Wr=PfT`K?#(9QOgR`O5e26dvDu?_&f70B^kh z@#nw(v=(2q!>J2r4;~$U^w!opZ*GNZ^5HdTAG9AnzJVg^vo(#ZP^ziKF@N^cPc1Ap z9zWVTWGZrd5JrZ`n`4Qk5=EW%VCw051>?!{^Se9kBA^6%`}?=nSLcdOQLR>w2maX? z7Z#=`?mcW(Yti<8zdl*Nd1qH};)%dj=jS-){-(+N*BtHYr~q3*~gQDn4KD+!<&MMu4% z6Tj6u4h4@g`Sb6*b!usPbbssBUwmAxMAbCv^@q=1ng7aHou0)0op*n6w|g{PIX%l_Mm>A9-|zNZ z8b{rJ*xT*q7Nax~Qp1evGgIBs;PH)XOP~8foLYiG*6*yWu8sPgerxFSFq77;%~E9^ z+}c>2sgRCz2;L zwtPIu2Za+-KCpKZxFo{pG>s8ee>Cik`u%J$a-+gp=h>K;P{&cFtZ*S@B0!O%DB?6S z+OTk72Pj4>L~zzYkR(aiJEIJtisBea5g>bJF8~-A1QftH{H1h+Kx6%<1ROv~J!2Rn zIm)a6OBpSI6AKRkfZz$vtOU0KAv1Fb-Z}5RbHO?1eDJ}0A6)Rxd+&o|?*j*6VF}}& zvXb8&taE{xwANauNvgFbBBM_vc5hPJW$}{^Q;}&MXQuJW~V8OD*LK`Bb zqX>keC}9Q=Vkr?62uPp|DorXTqB2s<07^$8lmR&`3v5_KYlWJEa|qz9*IJdiIz<4& zMky3QVPQuYBsfAKfXHZoVBKiYYj+M0vTVR1gd$X;8lYNUIluR?vvp@Xs?;!^HH{@Z za7T9^O`d(RzH~{YlN2S^a&SB~-OO^?-*uGK`t4A!)$>Dcweq3>0+1LkJu_XY+TNa9 zU7!8*7oWaxW$E;}%GssG!=3ir>g2Ee{-;izoA}1BKk=2%EfP!aqm$( zip?uOxWR;1Uu;azOziI$^NS1CMbA8aPRCkDH5U8IU;M}ohb*%9aR27D&6$OGqMm%} zX;@pY1Ilq{`1qjnjo*8IW?Dw0?)pk?{p^JIMOvYiGtHUBI8C(^YPYk0{cnFVHO;01 ztLNv5!z(|zKj?5{ZgKJaT3*QRcJb4{esAZXU8%=QbB%0Z&!1lW?caU*`IpWIaP1vw z?+s=bSFgPIEXV2e{8V+aar1f`Ri!d9YpK@S8+8x!x}oV>{KVzy)%9uwxc#8Fz1bP& z0|VHoPIex)=9bnv$M|?V^m>4t%*?LcyLbHk?_C=f(3q-y@ZRQ~o1HY)^@+-@`&%~m zkutqr9vqM+U?>XjJ@kip`*_gLasfOz8g#qXIv>Yyu%A7A)Y{n|ZSJ}4cGx&5_K)1% zM}xoq_O<`ve|%-DM@y%c2YF@T?1jbCr=5?|%53oJ^Pj)+>CdjWjz;@?hcA8c!sJra zx5q1MlT-ETfA`=0?>D!4o@H@qLd8iyjoM7l*T3?mr_P;U?{wRzFRVTB)Y+Y#J2!5= z@z>w{j}LEsq*cK%EZksfvN}CcNt5u;{;QYIUzoc5#6mreHGn80U|<(iL?!3N2jLL3 z*32v{OrZo66C-mVMP)2WNZ2!y7^BMbW)ww1f(GTRLBKN_wQQBG;MJ9^O7(}Fq*$WGlY~k{43JL^; zL!2b8a7q~;959<$6Ou7h4l^6oDhRmTdSK6CByPIe*g9s^{gsM+PkpeSn4%Uk?I*OyxQA?yrmqu0gBH&37to2HpGWmrJ z1Q3LkB7vZl1`uG+!ivBcE#eh2fmdQ)oqgSb43brCGPTznCNS0)k`?{f~I0>qjV z#e-o#iexnGy9&7s6Tk!oL;u1PrzWRW=Rl2y)p)k9&8oIXkUS~bTdj?|jYi!Sq0=9Z zT=d~b`E1jF@z>97KiF?9Er0y#b?b|-{lT+8{MI$qq{XvS@4fNi`6nkD zUp;?#*tz||_8HXcmsX6Sxs?Xq+Mb<)bE}QL-QJIW^wG{n_S|QuKYIV+U;gp_6Hg~s zu3pT7uS_;RxVwM;^4bqxxmT^yqm6^rNAbadT&*`qRj-4nKQhed*4ft?j*DugmxD9v&SY9~}9sPgcJ5AHRL>nKP9Jy>soc z5AfaBK1u)^GxdDrxlmgV+Z+OdzxD7QV3Kt^AN=CA>B&iwBKSeQ&MbazzOiwC>$ugb z#n5cT3zJEHxG7d;d2j+cxS$j%M3Hf)hjU(QqqQ!z+EN2HMCCf8QF{(X#{`H3j8x)r zB?P6F4=%8fR>r7OZMM!UMP!t4aNa8-@4OiBA(ZG{LK@{kRj_Q^C zNdh2|vyL$&v2o4=k~RtiSVC@dt+fHgg5m^7p-7%NtqBN_C{_*gjG#Z`!{)8Dcc3(+}sumRH5JEs0XPX(&I|ooM|5}qFPyk}tmxE`)GH(S4 zTo&*G3yb&=h%ky{q_T6y35ZsT6c*MIqB0RF0tP~0W*x^wIt1~yU>-q9s4)mUPznJ#6ADMC&d*QP<4?a_ucY+k z)%AW4zxkIBw%ZS^3$qjS8^8A4{Ra<%3`I~p@-gk)KS)#a%*(aS`<=Icc=P_%f0jF#$NlO1NGUjR2EO4zI}IF zo5pB_fqgx(y+MBA{6a6!v&;h0qs{$-!{*M>!9kB1W@qci1MfV8fb{_th7goiGgCDY z$cDv1ySK73n+PeqO7f!D{-gw;Y`$kM@wQ_E4wOtwP9E=Y4jyVKA&gwoirqiXRhP8#y zjwh!Z82Y`|F?0Cp*Pm=XxU)K^zVNBjk9UtZAKY7>UVrwvE8Cso7k}{+O&TEpC@Aw^ zWQ@{Ch(53)Fhl|p0(%aG=tD4C$B_vFNo1^dhy+5xGDd_Pm?3!YoFf1=4uayzyBlK@ zg%B!f8khrnr4+LuQZ0!kkabQe1;R=Ji3BfYqR8d0EZ9J;kSH)~MS!3Q03a}=QS1Y= zXGAJXRznc>UI2qHx$&%&D)kdWbcM5pMG2k_qBe1&GmpCW=5I0w!^iR8k3$*&=XAk~oB*%j2t*l_OBA)tG}1TuqWJA7!H~&+?#j zwN@KtMHIyxLZ0Qq;GD~CK_H$P5SbuIKoHNuf=Uz%mh>K_)EEdsnofB3f5-57GVeWM zJP4pL!%2#u{MSTl=UfmdH~z5)ls_>50U`!w_@w%4y!1>iln^v!r%j2=bUvSqKqaa5%CZt2y+01gk^>t0a1W`QLtl>Ak3&x0)P*Q zB;`n>G!8kRq!DS2j0!*$X=Ny4YGbfgD`SB@7XXMvkx4j!F$R?|2vO;!Wl|Ud*J>5^ zo`}oVG$bJiA|j5xFUn}K^$ryW?+QCmh8cZcjN&9NhE^<~F0Wp>A2|%Uco(z^gbuWL z=vofeIfzI7BV%lBVyY5Xhy6VTT&sr5PhG54`Zjkn%d1H}2F!ZRbOw+p6j+h z3PoQL1Q+^!SN@!?zrTknPp6CZOUt#X#{Iv&d2ym~_Tp-L>+sYQxOiqI>-0@hYaI<# zYF_@0i}$W?uAN(&o|`*;srBTiF4vR#-+b@d!;RgAnVF&Wm#&`a_6O_dmv=Y&AG~*0 zLKGZyt(#g*Tb&{`rYPv~R_Eg7sZ&?ayJ4^2@6N5(2FKaPtxezd^B1FE`^sWk(JL$I z#=S@Ns)yj63$=;n!A@s-W&RKT#p?I|>Vv=itGl&2e)ef9Klnb}OWd$-D2|O5;4% zi@w*3W7vJTvAf+~Iz8K%sBs3X>vMN*w{!N>%jx>1#;|2y|Nj0r{_wQHiRuK*PEA39 zGYgHIAANk-50yB2aA3dt-J7OT&xZ2g!S3{|URhZ>JZO<2QbGg+-kU>g^P@Kl2B4-irJwnXm_hRkxtA{ z4DIN_!!0|4S6+L3>+bQ@mo9$g_dlbF&={p7;=m@-O6gIayAXf{l@byOV?Tp{AdQVW z9D)-r4_c^51Exuo7exp`DU6iK3rh?xu+gPbAc_>TXf(hg0rT7|Eu@G9Ntxh-h>$`e zg(y*~oO4MMyTTizMcBEZ6nPs^0TN0WhggvS0~oDJr%YrN2jC!xG}bNz5Wp5z5Q#u3 zA^;zPa~??Eyxz0Kqe(2#Sh~2(tG?MuCVih!ohlP^3u( zwQ;s50h}wi(lA;D2#5i|g9IT!0v5}mfRF#Kc~M2$2UaR46z&V1pt=5eq5Q-WH;{k{FkbTv~P6D2-`uE-h2uPj_2>|8aZJb={#6Pn~@9i#JD)H|}jd9KP^{mu|d$ z|L)CKxAw#8+Qj4g?Ovb%Hr z^YqolxwCV{XmtJEEd?-tdj8X2e>PDcnl#*cb$51Mow+b`>f-X|!@bi>%h|w<2DUjf z2T65rt93lIwQ6g>iLDmsg1kak8W*#{da4h``VS(qvQY2|M1#VPb?KIla-0y{wQvw`7q2aH*VkR zc_?OQC+C-yaM(L+pI%+L{owJPdu`9Ovf8Lmrh~3K?hW@l-R5M&`OxkStCecKQaS8& zP#{W7vzqQ5bqOP_HG=edd72spdN?YS39dJ2)N9R&7^SBT7;PTyb&{Ai54&fUC$mvS zcy!#cJ>xs0y#Fw(*3?c2LX-dI-~Q-eyYY$a@3$s(pE0c4R zjkuy7Z0?FbjCB0^FK%4F@qi3flBj!JboTc~BbTIM)OUhnD*D{1*`eo;-+QpVbEI`d z-j}E@49t0+6RF@t0XWA*=ve{>5YdW|5H$v2Aap?xF!zp#j8?;8#vs5Ptat@Pni+}8 zPNl%i7>(}(A&6%-QG|em$eulVQ~+M$NFj=4KN=NUqYojDm31Bfb)-d@03)Lnfel<_ zR%v1pBw`LS&c$k_6*Dt~4;~rgBvOu@3rN5M9J~(!m@8=#thn6O>Qw=+BXYqaO1m?V z!d6r?GgECQRjF1ql4(S~JbQ{bW&lVN69U+x(5PZ#1jYM6SR&Q9H|QmmDgcpEg)1}} z02LgY$Y^aGGofguqLLNBp{yMzETQuV0uq7*)>;`82U~7$s+^jhcsvP8dNh3E_8FIg zl`-Kkrpt}p{RkxGHXy<*!G{ygXS{8UT|ncfKmdkuv-~&)IQAHo5wh}>PE_u@K%_{C zVA8e$0}>#UGQdOdeK0`@07gb9EJ9jyKvEi%cFuud@a!xCp(sUh z!c2|kWMMOBb6@0*iP}`%pLudk(8hMf=u|AXA79IyWAXhy9Ji0}+<$w1x|Sago70su zXXjQ{Yd1c=J~8V)_nYeg;tI~Z*JNmR%WL=qv6qR zHxE#&nyuZz{8Sl!7t55*-(D+y*J)|ChCjz6@B}`VX*M|FI`xinf&ovk9Ur`GjomBC`^%SJ384R-Lubl1lhY0MYR;xq~ ze77fRN=6;`Ve4SBshbmtbx4Sl6OA}B?c>g+wfSq;w~jl!uol?KYqWR3PlV z_oIF%i7CsEI-UOMGqXZ=YO;RZ&bBv4Q3Vf=25ZYOGhMCK;HfaDk0T3OlB!FY5kV3{2wW`fs zW^EcJK2^D34>UwaJSi zI!IAxg{{V>)~JK{+}R>{(Xjw(qk<3GkPZqN*#{yt##rYNIf^3$V&M>M@S#?%1<|#N zV#Hv*Rw^z<())<4pIvCECRuMg&t7Y7UByC1^QegWXFg7ZkwIW3}wS%ZZp?U zZ646)S&um%nKUKTV4^6B07wuJlz77{gP}Kx`u6u8&NXFnv014#lR7nORXZF# z`TW_SDz#><@L^(N{%^nYM%pkYiVINR{mxG7_(o%Pc55>`^~C({{_yZ8AA^nqxnU6| zo3&wwH}>+i8B&JKt%wtCjfL2Y0g;)F#cj zE9bJ_vG=M{sZ3U*;qk~hm(=3(PpnK#PIk{9kJ`O+Pp@Xq`K*X)wWXEh=Rdgp>6ezT z-|61Ef22($FgWhmx%p~;6ryJI=0}gcY|qZl{@!oB_%}cP`QcHka9pd@C;(H)at}cm z;kcg#0bnr3_~85fVI|RV6h)e{(TGK|JSz$la=&*xs#jI3n@vyE8WW8>dq@3lZ=zW_ zwX)RhckR&5EhNoGW!P)i8cC&=_6H;DQE4%RL8KB#_qLCcMs0P1_70BMS8GX<*pN?D zpfQm`AZwvgO}#JDIM$QM%nQ@gpZm%s9o@e9;RCJk!i%%R;h>D>-@3Ec?K@P)M2UA~ zoojWnP~#PqYn?TYu(n>iOI7p+}aLzkxgBL|43@iYOMA&FjrIiFwf<#)OMq4-{QW2u4 zTBH?V-~b?66CnhKD2fQtTc3fq)|r=$vKMA?5WEXfk_y167+G)U(y6Slh4X{apcyrYfP)PB87m+n6DJ}imZpqhD-Ytu z2MiD_2b*hT@0qAJH7giF=>!4M@*vC2d|&Kr-lA_hSs2<${8xDp&)it@$*uNa{qc=RGrR{pxY5C~fAu+z$W z$HDfq!DyHb;xy5KMK+R9G-@^FgAvaeYceW|mC|vgo>ZF3L|)LORS-2!Q!ubFArcZZ zm+|;md##ZAJ*BeXI=}!B0h&0D$S7;AMaKbRU14Dxq-Nq^BzOsk!oegegCZ7Mo=-1d zo^LcdWH%KJJY^>JL2!J&KVbIplHVkRnZ z)NdUfFE*38^JiyPi=!L&=FXh$9CUh}0gy3KEz5!}XwZeF$rLffk&)rxonLHScq%?~ zeo2u+tX{jevwxV?Dz)LDXr!@8DpQkF4>oo=A(w|nwchDv2_XTX#1=;dRbn&%Ai9BH7*VflpuPh~v>g{*7zyE{lS;qg(|M~a!?{2;G=H2yErZG2JpHnyPAEwnfuGcs2 zv*H)$C9IB(Vlc(~Utvcg&#*-ST^y%Bf& z95}ZUPA|T0SV+2uaBLFNRj#Ln_&X*pT!Z{;Cu*lihB&UatlL z5y*2}t)^*G%|=-g8{jy%z6inlptP2N-m!I_m9JJSl?omocZb#qlV>nd;ygIE>;(ZL zt-Z5(kp~eGU_m17*?AwVbv^`S)=II6V;~jBNmG!d3?u-L?K)#DsK75v!N!x`Qmh{X z;t<9qT2lIjc%0`dEuSa0A0!Mx%Cwn?aPWlV3U#GQ1)ia-B48f7;!&4nkn9DuDzz(- zAfBile%~H-AhZe#Med-Is66*s-vOF9#Y&V!iJq)f z@}ltK*H>qY!N_y8x~^BxG#kx{pMUq`SmS)Xe*WSzplsghwfBw&$AkW$&mqh%%?dAYzubK0`TcMF{&Us!X8WM~{yUEz?HunM^Kby0dqclJvLhFn zbY^MvCK%&FLBCT+>5~Gr% z<73Azcex?xw2mhlH3B`t;^^2m>L~yws0;5SZL(qZ!&l$;MNlYr?rfbtvwCX%)aJ%k z`!Ms~1Ch4^3L~RurzSc|_q#(RvV~8QczLaP+#BVY>u2G(+e@RkI?@L-(_ zh=}rM-+Y6BhM6xILI{DOuWjo%&x_EgSC01jdr4QJ@yr;!b~c3Id80)yWl$8yJ?!51fnE~j1h%_84-Xm zsV1@3+8CF+BuR=Q55NeX*?aKL<+XZRuT?}~n2lWU!Mn!fgkwvH975r2Ubx7pYMKy; zj&!|VJMQ!Yva3Y;w%TyQLHKI4>JK&N)hS)U@#huDoN^mu)$mBePAXGf<_zf zL*M|;D`QL)dC$yLuUD0hI-Ry4jz$F_CTVJIVO=O!e*^>;6=^r}h(Juj%nE>r zjL~JGG@;Vv@!z=;Z%2rNQc|ke2LJ@6*fWw7FoC@U0Ya^H2r7<}O0`mNHXDs9DdPhI z1Qrt|QIw=1vqX+aiGYd-$e>mfMNyoJpz~m&1VRuIrN}#9ZbM`A#dyXINv#oRel&DS zQ>>$iD0m*Y!XRs7(pXh0)ez*swyV_?WjHluo_z9iAK!X)VYwc7YI<>Ib|Eo(uYJG` z08j#y5BtZbo|;Z4=9kxeb>YI53r@+Kew;U7cT= z5gYydryt+DzIoUgUHf=&oJThwbQ@K?`b=-*#~=RU+CvkW`T05lKHNQQbqk!Vkntbg z-!le#o$#ZdJifRzZTaE!LOQk9+}v(=JHxz}Kio?n- zfgj~CH&<>~^i0w|*dc(!J$KwU8h8# zl~gt2%HyL}VWlV_O#zW6Yl)qkDn?llZM4dr%|=DFlC--0nThJoUN2U0P%u4FyZ`v0 z%*4CG)v9Tnr1_}W>>gL*$ZMTtwzq$b8fIpi1|Y43(I_*mR9N{1xp}QX&mQyj(~aL6)8mEz|J{gX5rv05k*N1%tV;m zLTj3vp9uoaxgb)o(}G?&J1CeuHGmHuwr-}2yIUB{s`(Pqd!N^*#6tQ^ktb*9upd#gc zh$3BB=UhPH$V31jKygGPo27|Sh6Gr|N0CO327_TeHSJ+`*zT{)%vPgxI2?AmeQ#a0 z(r})w&4UZpguo0)4466RLa^R5nK-QuvRn%1T#@HRAgCs_G^rHchajGkNrPL2tuDA-O5&B1`%c?lJapU?J*~3YJk$m zd6E)iL?tLC+gU)I8&5b11%&kYQo$jFAS6^qyAh!zc?v*jv?WCdRMvQDrAaH0(##}? z5Ih?}*$TZ2I^?Ho*`Yz{7xpTBkokbQv57f*lT zv!D6*fBfy;R-XX-y*zmBeYkved1qsPb2I;c{+o9U7GHW{;_H9^i>qs=8)>U?wU#s< z-+X)LN8f*^m(#6V`>SWxJDUTQY8hpfn(OC2t?1nO-BvcpvRb3sXx6rO4~C z*P`}5Cpt|;Pc6+n_l1j%iRi(dJNHC??rUHAKVCVrb^mbb{=r}V^?l`lIfNkA+Sfk3 z?tO@2Rd1%J&w`-ZK4@p{fzhf`sh(P0xPAX&UijRCA}DgMq?I_0Nz~M2{kS{y)~zfz zP($!8w>CJKp6bufH%9q@J+p}SetKq-I9O-nTJ5maCsKW7u%aVt)!Kt(d>M) z)jrN_mRoVeJFNi;XickYlXvcI)T@2TN{@P@I5s9$t4kAuqIhoc^rJ^RKv7x&H*%V^ilPVzBc%v~1I|ZT zTB+tP#E}^cMwLp+AS@*lLOEtzIHe*IATm)z%4idrYPF(_>a@FQz0z$DOl(joqm3cO z;ETdC0TRXas<(yr-nxuXYo*u+t!ZU->ENK{y_=q%N#oGzw3Q-nrPJxfanziwPEAc5 zw>nyDZ~S015NIaf?+wygMJtNaIJ1SxRe$6PYf+%n@8@~poKq%CC{bD=OQcO8bR4q5 zXp|Q^j)qy`0OOFxQS5?*5aJ{P7DGye%b9@-Ix*2F#Dy0-DQ$m3RQOde5f)aDdW3igXR^5{>fD$1saN_O+9Q)o&3qOvFfLNp) z`~#sNTO0D>u&|jXm85aCUL{Q;#GVx>Ww_qs*wosfER;+SC+r8IB-iE_rzZ zLVz4c8U;9bGLed!;QCqbpm2rK$%XUt&eoMu)5~*v`uC*&wn3$?uxDch)st>$-*xo;Y z-Mxc<_sws2x`R%~%}%5qB!n>8tV~Z;Y~F0`9pAWJ%+J9vtL*F?8|_*L?FaWZf9(%m zdj8o9fBi>qzWru))B`q0mFnc)Mk~uL8IxHk2Dy$f2pqJ*hT;0!3}F0dV;_~O#7WMq zl-_TRJWHC#TCOQK-&iKOBvq_@=^3OlWy6~khFMs2od^zNUt()6vMOT`QcBl31 z7oO?e?Y;Hx!}qWK*{8nv%-&Y_Q=fgh+qzq?Rsa7){pXXdS$3a^t+n>vuRAtR`s;0Z z*WRGJ(Eu8TBnVI(QsPJ)F~k`uLYk0`&|jdCz9>Q?7YdC?49Ov9I3#EQ!gT}f&|O{L z*R5Ok)^&Mr`po0cdEGW^>BF0aiMn4aZ$@U-*?X_``#n!*#TUor;o->}Z#}qnxqt8B zLTUOR{@pKqe0%c2?S}zy>1FR^*6AdIDmI!aAka!X%ZoxIcqe%-tql6zR=3?eyi1rg12#OL@QRr7Rn+oi&`r=I`kp2R91THacd zrdoq_-YAVo&O2jpwpjSOwv{zXrI}_=LM%jiZR1w5nymmqyPbkw8I4E1wO-oFAP9@Z zagubpqtjux(@9Y`wpQWLpzDJ3q;(MAYIm%4E;#QoSxL*?T0XGzytI{w zwA!rzB$Vf66@a{F5h0)?idtDGiKAkO#^9dc8Pbd5R&YDK_ z-Zv-+W{yl8Y!#(#9mPlljEE$k&tcdH1jHtv0!+>MK*Ws9OaTEKe7}e^GMVO-XvXr` z7|2At1Q8_f!*DdZ^~r}HzyDTI%#4ZGH#T2<;S1NU-t4cgYDF$sjnUbT9YB#n!dg)X zK6n-m99Zi!`=ioID>^%7ST)Unz+QlBXX*1QS<_`eAox7GM5_7r7|&MF9-HA4DU{~N zB7(>Q!8=8&DW@oMSIs4X7goCiF>fl^m zRnwz`w3qi+H&Dl3fJ7-$2;6j-HK}H?P6VPvDIs048&g`O9dqI0)2KJA*)Zva)A{(x z&e5gK?TvF!#hY0HlyoCUdtTbHC?%uoH&)*H(Op~Y*AD!6Yum<++p8BQlKBram@y~9ZyR^Q3^ZMKW;hpdQ`#WF!l`HSOb=QWF z=kDR{nYEDD*)M+f`0@T+AVCU`4i?V2ix=Bp*K57-)XK?da&$bq|KMm@ z`Lc$zg{LRE3$Su73U%nTlWt2?LWifzvV!MtKE(?Myu5s4qncHdv765{srZfWzxB)) zu4R4ITWJ|ZQ&+wA@y=odwtjbZ;>Ww=i&w8DS$i>?|K%Tl{D=Q$nw^K|zp^$Q+sewD zw|7tW^Q4^?wdcByqO&x4r!rBS@7O41a?#@Dqb#lfYq zu8Kuol_rjoC^DL26P3-Pk448CBJdQ9ia;pE2$4`!nzd@SN@-G2nq*4p#d1CzPrL_I zR9i~`T47|8AZ(p8am++@RY!>dO&Dl(ZJiKm@2lFHIFS&lvW^nX>`g*gf`=dwb-Ha- zzyX`a*kUnPs8J+MjPpS;0%2wADEL;E#IY_)R7xjF1fqQH^2#n2rAHyH>#9y#oh)qu zkkXnt*s5x^+MR9(1p|j9Ng@+$U6npCK`V*FqE54Ra}j3(rF6Horcrb71f)?BDr8Zp z1e8)Sk_wEBA|W7>3!VT-6C#&oH9R>w9Ug}|B#9XeHdj_x;#ON!Bj#+9|W&Xhj@BhUQ{=?2^pZbcubwYqje)hw^c;SWTzVq$heE#JxwtHP5BtT{X zX3!*F5Rs6awMZOk4TMc}=WLFHAPh*NlqNuC3c;Oe?NJEnEX2xAng;}cAh2geO0emm zFcK&v4h%reY9Emp5Cn-7Gcp847?~)D^Um9%m@lVQzBDExO544GiQ*{E07*v%kj~a5 zpTjYLAlLxFwk%y$)a7DyvfuBonY1Se8U^I@&V@83JC2NIrJ~5RJDH8uXmPk)?BrF@ z>G^1|qO;KN7kPiWG3bzc0CG7^Av2a2kE+rq=dWg2z1ZJho{Azeq;0-Lu(49Dehlp- z%4Peid3ZQdhPT$^q}K*Aks4pQcHyP3T!paw|NTGym}~gW--^Ef?|%QrjqBh2&1Yv~ zK6`L?ItxWfFFp56d(D3ThqqVHZw@DJQQEw;mPSVAFTDc)}OiM>Q---7ZPOWV0gxBQYwy;m0o8wE|w)*2ijny zqI5N0TkVN+lX)3LDPmiB4SKD=^69Ppqy4>W*VmWUPEP0lvw!f#Kl?YYErv%gf9dMQ z7oVCud2sY-JUg|2^2fKYTr4BU6ymB)x*Q1;Jr~1 z3or6Iige**Q7nJ(~{tuCJ5IDhW^bY6|}#nVq+UJ3Q?5+Yj#@ zEDNuc+1;N8fle0t8V1{)ZrcZitNl*ePFj%}Id$00Gb0NF4+cF#a?T!& z@@tns11YUnd%aeyt*_3lc8Yl(o2b=ljYlJ`^ucI4U0P-U4=Odc9*j2Ex^b$%{EZu$ zG&#?RO-uw6lPWZ6s!<~VIqyRVNJ>S(oUzJV((nygoI~&+rTUqA}>Z+`bFGU_VnoZ-FM%5|DD&44)&|MjuUg?{FRqq`r<2J{>Ik! zdC-kC2AlcJUy@$M1V8|R)**Pjb@+V9EQm$r7_efROLH=p{}x7wSseR=!%-ra7m zLvE6QbrD^DHa&iH(BJOqWMwwnF(#_3I?U$zxRQ)kw=$0K@GyM#WSnF=OQIKEy!pb5 z7ytD?`$IB@3-{iekDffK`&*p0>9yB(@7}8s6sYJ=etKtZrJp-}bmzbY=yl@R+?GBF zMT-&;!9bK`6hx*o8>jSmIJdR0o!d}w?Rpm#ITyy`h4EoJo33_S#?bm|Z&sBjvpPw@ z?@d{7Jgx(%;b_*5qv@==@~sO${_{`TokS=4>N8hA{$zhvq3O`=y9brU+Dl}Nj?6~8 zr=sM+lcORpS%ewdtz@ytyX~ac>n;}gcs#G1L@sbG4tQgu`)F@|^K$&;*x!6Ey?MF+ z+QZqu{^xIY`lg=Fzx%7_PY?I5eev4-FK>matV;OOmG1Q$gL9YG7PI*e|Mb(pmcBTh zEZZ%0;p*Un_dhznk$(EY@ZH;^IEseTs;ptT)p-O_Hwc8x7x+L{N}e`c=OHMeZpl?v=Y5oR3Zux`e|fh zii{E9<-GLXj~9G#GtDbb6TK)4Z{1plh9kayxz+3UE^Ke_A0F@R9v%;ijn#f(>)oS6 zo6GiwUhQ?d9l3LVoMcFZtwfi#?X=C|@lde@NW$mG4uW^>^{q(vl)jh7H7W-(oOr9i}J zWbfi6>Gs;AqglPojZ!+!YUlHOX)7Nki2@}=o;fy=1MZ|9Lqs}ir7Z)bl>(r}Vy;-) z?IcPfGSnBD&F0Q}b{2$@2vL;M0-^{=0eU7xtrP*GRxF}Stc)QfKw?C0W+R3Oc=yTC zAN{+3`QDqay2`O|a8L*j_lKW+_`xS1z4P0@^Y^Yl`=W}PgpY0(zeLTXo|=~g0WiCd zK6v*}|HJPW({VR$i8w%jfC0`J9)xtTxA))v%m2Ep%U}DA-|4M%Sfr6m5)lXlFM&aj z@ys9BXkkEWMTkNmKAi2InHd{iA2M?TLp>u@3;U+nA;Lk* zcGORcYQCILgR6V%7Xmdd7hn+%0nco70jQqOc1zeVi%Fu*#YK zbs4B&DIK7uDsUBq5Q^Hb_ga&@hTyDoNWcK6vnolow|2OF-fuPfa-E>3Lim!&fiworG{I5TFg-&!sU>+PVIA(B%fE-Xq1OmE*l>bE*? zz5V3*Ypd;^>GjOP;o`S`<-(xbn&-uMuimOEz{B-w` z|J}d$%;9jFTlp{kueVk=qIuXB*zL4Xt9Gwrc^PLJA+4;efAGoO-fF+H^=z3h%9@ni zytXo1R7o1Odg)}5x7ywPgJ~4Yl?$EWq~yYB04SDqQz2p>>e}fj3KD{4rIq(wd#58U z%qB9ydto0(F&VPXGBYa}jwe}`X{ACF%Bd=2lo!s&lOW7NqSy?l6GZ})DuqdsGz7G~ zEF`cG-jHGracru(=2AMXOaM`Z;cy&$AWfYtjiM-DED@pIPFWK6LfR;0LI~OzBGqcA z0$f%lv-e)e7(}IGD65)T(ljwfwL95#I;#r1$cuijvz#xKM$#Ywi>hK46KkEu`C=g< zM4k5D;qmj=E@oMzH`Z5L{j&5TF$QN;+*$7{F+|!1UY5Dhu@?cY%Cgq6BvF(`v8}xe z9GPf%dOTR|r*Y!h86A5M8dxDkMmHr~BE=jK02sUrEP#Zc_q#voI@MnKm%%<%mL-e+`h@>69wjCxmYZw5d2`UmZYsXO@Sexc=6Uc_L3w?@U?3GGC~2=S~DQ9kckX& z6Z#eqS6dS&SrifJ+37)GyK?R-60snIXoVue(8MB4+S^h#zdYzX`rxC(N2e-XEf>?Z z)oZ4A&iD+t+CJaedU|dDle=*%yZXY5^XaTNt(G%CJB8WM21}p(;DhzcYl9mX*v+#3 zYQ0>5xV7~QHlMF==DP{PxSOp83UFw@ydZ-8-}O z!IS5%UAlJT`k(ySJIALb8kkOak&jXh_xGmV?#Y!)*VZnGg>diF`@iwmzp-2_{`}8A zn4Q?}x_RpQ<#u~;|Fiq6gVt~UwO{+-qmS<2-+Ax7>CI~3CLs)n`?Sj_$W@Mg(QrAQR!M43$CU=8 z#tdr=Re7zn3c+y*h>DPmLQyLCfP~&VVQHnQGODafK~PFr=M`$BqTrn^9Yw_6a{#3@ zY2)geKtkn+Fla$yRP1uw6o4B#TRYRWcL?B|4W6uXfde%QWh5ac#M(I)$-3>TDAOno z!ld+cGPOdUK-y>{!P}BqkW^FeZ4hVz4W8~S!O4K6wbDia zATX)M+r}r8$zS~GAH4t8Uq(QrF$jC_1=v-L28~IacOQQE0qTGH_y4E=i;GupfC3H- z!omSaB(PFM!sF@e^;ds<{A4H7iB^pr5(K4TjvAwElf8i!W`Rd-n2sKlx<$@oAGXJ42;!E_qIA8hqYP#z{QBE_!NX`=zVynam!7_H>-~>~I$wI}nWA`m zI))Zf00>?Yz+=;fw^0Q3xRfo=^Gu zl|fOJ0eqUl*7?@y;o(zHuZ*Vo&ZGQ|*YB@x#M#0>xV8V+{zt!7S|O7axf*P){@KSL zt)4%(aW0NKovO%x^2Z;&`rhGif;XRAosDa!^mJy+g;2~ybUG>GB<^PE$?0jJMjcN6}2glX>Z`|J6>b&sWYPHO7-n^i-xqI(nRh9kj zdQr?yP7kdMn_J*(Us+dFe6pya0w=Ws^4624>1<{_-?(`-A5Kz@gi#sQSrnfRix-}{ znsu|uo%U9(A26^J0ip&(s%>p+ zYmLz&GFa)Uik8blE2Cmw)QB6etEQX0ds1Qk!CPD-u1RzC9H(FhhfPt5J@!s2SfAZnmN`RDj z@gayPlz^Z>fE0lhw3U17-FF9rfA@F)2P22>1SRAl1FGcRYOq9|Lf zR+eNsjsu70D|+w5a~!9rb>J`?4JX6X#d6NUDFt!XYPUPR-k{s+r0teLZ5=~kWfW2* zj#-eRIKvg$;^Dz#AndJO1ckr|V2C^^KxUxDGCw-qf4I`ihSTa~|LCcwt^;u~I_zFJ zcj4(v#eBYZXXpOAaPj=;dU9oTt@Df5Ca-+CwSA#KpYuoW?wp(i3~=FUM`w{o*BZ1= z?jF18-D}T%?MAuOo&Meb@&OQ>zmPuOD{tM}eX=`_A^P~ENAJG>s9??p(cFf1H&#ab z0HJi9P8?j@-EQ{P)pO;P}1`dj} z^IX>+0vL^}?Y365q@_;GBCk+E9BEQmdW)qf9ksi0Wy9r*+js8oU%1>Y@>+tteC5LZ zqxpQkoGtB2KUwB3Ng`4h0N4R*b{vc{8-un|=zM5rE{TnImOXdkxb|FEuHA_vkOxAgxvlj0 zbUqy}rl-q|eyi6>0ih9uCTCzP5nqyd?x5JR*AMILKm=8$(K=4OKf!6`*rgX<1##=HntZNeE(t_dYPY z5SX+g)QDoe3)UkNAqCIF(KL-?qYR4#=ap6hpp;Rl;v}w$S_qu5t!kxomgvD?WtkWA z`C_)r)5u%rKxDC405`x(pQffPik3<(v(kDtUjWf$K8|&g#!;em5}Vq4inJo65HzB& z_blCZT0$Y1A{@ABF*jRs0a4hDVziC` zm2)A_=T&Vx?QXZ%MW6tpG=<=7T{24)r>OY&X#d`wTem*>@ZjKZJe~$07#KmaEZJBa zJayyd%P)NC+Vy8!{hrb!o;^CFBVyqoMn|po3T*S~{{6@#Nq3D^VDGg8t(d{(rw7MJ zADcAFw0hzB8y|gqu>W8$A0C_7bS`A=a~ImHx$&@l{=w>Mmz}$CbN$M}WMi`*O8o4@ z;r_h}Bze43eD=xmsm)8KMM>YWIyP|(NgA*o9*v%R76O|rHp`W;w`aYd$4YN(t#zX4 zU}8l>wQzvpnJfK|KN)3OLcpT1Pj<#pO1*A-xwM{b8YQ*0-F94iTNYKbOj${FX_v09 zYa0-t*mPu4-FWmDXegI;}J>EAKdqb?JSisFg-Zs*1|(>>dYaH{$Lr zcf0$OGKe^=tQY2XYWi6`%_~E&)l0HgT2xhSZ5*53-EpstSt~ie(Wz>Gd^{4sUcXBY zYUg9Gr$rTO0+y4L^75v>_QLsk|H4<#fB$Dss^tS)hi)3B9W~zfGtqKd^j12TE^L1A z;k~2N6E3S4UOD&s|MmarZ-4uLe{MbbhyU}x|4;t;A0ACAEyCWtaD98{bip7t;A))q zThWuphyTa_-FK&l!?NhU{`#kz8>=Ui{G(fkD_!%=wU@v4m1q9ZKl-=14O%P5aA9+0 zJU!VMK%B&tb>rRB^XCR>6n9$5%1Rb1=6Z2uyZwu|MlU|SF)r#y4`$1XZ9OiQrFFKl z*?~x`6?^ckO3Nk&%^}n!P(#yXXw0$*1j53eh*GUgY#gaZ8fa@P$l7RE*Ct7XCHO#E zdFw^1NMXJ#gk^oTqY>uQC4o|;6jJE|OLH$IXMG5QN;q&sqz{%Ez|~HJLZaXq7=v@l zm^h8fqH@-WUq_}Ev}xAqXdSWh zS(1f1D1|y!mVI4WZyhKC5Fm6eWJxm5mmofPe==FVd+)(&nlv+|R=X1=X|LBWi)uEV z`@lNZ5#Vf@kLUBcvT2$L;36+4(@|wBWwe*zoYP7N6z>C&u(MtS6nSiHmd)4&kVqgP zomssQkvT9lF?V7FO%y~%qtiFuc=gHShpor}LTv*O1|N`7Sb_)wGBGNN&@j_tk$?XO ze|q)CvsbQPM-KdXD3fKlh6#zMK2rMjw;5~yh$x+c!bTSW^Eb@DIZ@=;S zFFttp!@6`0wG#=P00<0+#om+RqmTFg@@MaS?aQxx?>GL&g-e&fM2eeD3m9#Jt&K^# z{k33=Cl7C3f9?yW)uM<9ye5gGRMU2O^l6f%s;&nsol9F=zj*c2r=HT+UpUXjV)SH| z^?GRHZ+@?@V-gE#w|n!ct<}~3a^#X^{>oRkr^m2!GP?c2(F=d~MfA0#ol9Rhm!B-_ z^4|7^?D((t(pirC_M931Ym9`v&(K;^?lF)_i)zm}>$ERh@-H0k5 zzW(ghPo5l2=Xvcpj&;8kAD&KosY()kw7=|k@w3kk`>U<>wXSnu%L+nR?I*M8qTh;B z1M6$;stPx+UAXmd*H5b9biB3QVrToE)|a1u?zMM6u6@W_F$-0$?6%_Cy4u!7O`ydn zhz(61kLyjdy4t$`{-`S4*2V5hr}bp#qvHd=e|Y-cUw!`d*B_>Fe(v)6bXe{@aewqj z`(OXc`fxD@C;OA~Z~o3Riz=M#PtR@jV`F}J|75=K-HnWalSG%LHH{Q6@O0u<`%!FY zQ5IPZ`^V7k#FK@6yg#~nalJ0>>~z`cne*$td|CbF8=o4jLgl(KYEA$6pZ@Fq<@U?Z zJpFk0;kE0l{?YMpj2476c+XzgizKlb&gQ)%_?^G~%RhSZgVsuV`=f^lex;vXy4c&< zoBg-{{4aj;m+uzKdNf(oIrloP;kZZ@cu@!xJf@M+Q4**A;Aos?X)8@nj*njb(Pxps z-}&ut9-WjwjXrUrZh!V<|DfF5>dfb?P23t}j~<^SQD%MBX%hh;2M~c^eefJOh!1sD zJL|j)!H3|Tt!xM^0UFY8@PPs1BnFY-f-oS1CX?ux$OMN9OkGo)$O9jvrktt@Lru@T~; zs;a71T8V_Ru23M3lTN3brCFAyfkmTMWF&~Sem#UjsxNacC&Wc1`@vU51vAI==04eWvhfbnE10_+-UjbZ1DvKS3dPlu@< z_Q5kW^=vO7K)@hG6d7ZTj*L;Hfm8^>O>#&u2mmS6nv_AUNNGT15J40|FJQeFG^Ar- ztn0eAb+6OUvbG{4f-LNOT^0-H9V$9H-1&n)_}Bm9pa0WW|MH!(2ntO^+9*v*feU{j!2e$#s{(w#n_M*0b;k(b8 z$j=v}FaGkiZ+`D$5enX1b}Q|#bRt6%xTwnDU8?l?tw9`NKxY!2wNw`&S70SWsK#J z#ipv5(3FL<7T$gTXgn{HD0%;*2eTQAr$M`Q^~!~pUU{bG>gZHDnW-$Im{!`ow=qnw<9{ zV4|q3$~aag(w=?ptWru86?MbWrof>gPl^|8IQX84v{Fid>e_=4DgqHjQB_sWdll)T ztdut0R&P1afgzS;nde00ovW->N{RT|Ng73IlGSB7nJwZh79TiB^Pce`vaXN^ug9Pj*KnJ5CWpEYgY%S6b5#}mf4+2#1Ihy8|ZGMwns!k z5dcywf*`K0?%)6H&DVck6;L>N&8;pP=5B2GLCDjkKuufO`Um%sSU z=U;kJIFJI7u)nu=aPSy{D2+aVQpAlc#S@1h49u*IZiJvf6j&4hB4$ZC8%;m{@ZB$e z^=sXB+p~y>grJmYM5Prl1VmCqgu#0bo`?{T6rnJ41HWJ*1&CxcQp4qDU{<6EbyErU z5Ksh21pxp8QX)XgAdt6pQ7p4oFG({%BZ{1_gR}G5LQq;sy8rmekACncAKm&SpI5O~ z+Gw%tg9ohzNc5TMvJR|}M5uQD&d%|_`2GLxAGSI#zw#AO;GK8Dn%H>97?^ateg67K z@BaCn+pjyxOnsT-+uWUH@H&y(b73r zSE%Bo+f6%bieb{}^fiyBGy3MYH+~Tv&T}X%uB>&Ew0N>t^jj3XT)et<>B^NP&7SNY zL?)@~Fr4P?j;XEl4kDw+^D2$iWGbDG_qYs`pa}cz_SS0e&Vz%>x}t2VgGb9Re(6S)SL12nYCykO>8D@&%Cqyy+$gwkapkRd@3t~U3Cbt#o&G14kxqO`R&c0{rLTz$K&_jc=PMueDTVSs}FZhx7V}HsJ+Pj&g0XX zq1R4kGu!FVSHJe1(Rl7Ct4o-SrmYT)7Z5bGf@ehU^p{_F>1V%qKQ{4a4<_pyh6zZ) zXtwOGBw3OM2Bpl#n(6m@AAkC|EZp;#H}mQ2AODwsa^uCPa@34ENgyvnxm+rRhm+DM zvNiYnX?rCp7P)rTQNw+dptTCj0O0EoMOsjZKxyTjQzlY`&O52u#F2G13s*%=M#m}+ z$lh8XfRr+^(I!$lLE=0Qr<3tYuV2@ql?gsr7qnKjvkK3K6-r?mCoEuHP(*2(1kVsK zijAwS4^AvZXZ(98W`$CuV1Rb=C}p?=Z^73= zE0shoLc_s%2Q^z3(WIhO6p(NrLefeHgDlEu8#s{4+C7EXX?5aAGfGw3e!E3U>b;-m zg^A)+MMDl?Jv?2r&5CwF4eERNN zuV20Slu-&9sv>{9^KdqvDk1>!!80JEVi!VS>wWXP2|+wVQwTsrAA}GTaR_|p?x#n4 zyWMA=4uOO7uC7UmM-&K+coG{91X1up2uMJHO(~6qLqHG`k*1dgNC>D6lF|T8KZ^m8 z&s+usKuQ@<5wjqP^IoBDw_A~k95=5hDe{~YX%jtpa_=wx?E4Syd={z@p%xTE2nr}t zp2Y(Rf)#0kNCD4SM3Kgw-Q$1pul_|Gx1M_LMd2DzGoXs2I$vr9Do!q6d*${AKWlf) zGtXSSbg7;8Vxz~tK3!QeH=iBMr_(Qf^M;f)7^i_hyFKc3qNrWBwmZ*%wIJfni~Yy9 zj#kd~U-yxSoL^nrQaYZDio?C}w}1V) zN1qLU`8S>$jmnSSx%1TXYHQ>C?c1Z%$+F*-)tA~wC(Es^)i1nsV_e}+U;SYJV9{R* zWgSFdICA2kzZP$;b@OtxtU{{?!mwNhtw6Mpg8kE}j+9%9(GW)_(lDA7EWtNfuewHo zuYGZIHeK#M$yWxrb*>kY)gYJ7_xE;>MyHFF6?5&z=HbD~WW4O8u(=rzx*cR!*UM1n z+goX6i^(FlbFdB;6S#QZZ*R2HG+rMh8ymgeO1i%{IlsL=UR3$Q_d9a!>RM&PWVuvq zTLwDuvOL+%UOq=GN+m8|%uclV#b~;Gyg!=MX7{bV$B&EO`R!*Oo{SF; zPPfkwx@-M+-+pvD_D11mWVF@!+<)-lqv?G1tuNm={l$~<)UFMBNTw)?ciz4|SWWky zl-q0YrI)uC|5V_@!tKtpTF~QSwmhyJEkN_TWzf<%F#xe;y4~I@+HgW<;{ZO z_18cC`OiNzk-ByFhl4?9Jge(E7_TR@$_Hq*sJ2yOoO|Pg59(rhbTVBZtg!QGF9qja zkdu?Lw$N{Pe*XH0^Snl;&F!9`k5L2}0F0(QpHJs?La@5hnT_*h83w)XqT<*@kN0N> zi`%=W%b*AaYbQn#04?+S-0ENkRkxQqRL%!2KCH~^?C>p74nr8FXH zO^7N4_ST0GwAP9gX)O#s1ZRDuj8Ud2D`u(d+B(NUj5aLntrL*4v|5u&vcyD)Xlu7D ziY!V9$tWE~@pv{*(*y-p27LgWjA!%tQV|nT6G8DH))gkwu`$BveNbr7Kwg&4Is)N1 zLIIP+k=9y8T8IUbBq@v1ItN5Xo2srzkq@q{%i30GR1_HiuBuumDv6`XY+-FpL`rFc zrqxP5&{cRQ?0^A;7&+Lf&_$`v{FRFlj8^X@6<(+XoE_E6<~3mnHdO0&WZ#8 zf8F%J52Cd4zG6TTAyf#UlnRcUegh#XL}4Kyz~&30h_DE| zAf!+OATk3WGY1LMgv1aKRpf$@GClwzL~&YIRaF*Il0+sER3f#tE?+D~xYgi{=fUlpr5r9wHz0(eBt_+Ub?ZIj`OL`l6EWUsc4?)#nlUguYKuyZ0Hwn zetPS{aT-T|4KZ~w!4&f&7)4?cQ~9HmKXIxYsi&gH8Y z-g)Pf3+H-^$?V)_vXaHERK|~w!nLG6**Al1>&BIQHbouv2dhUX!zAtg`~T)gAAibG z3Katoy!P6oBKVa~S~*zk4J+rP2n6JGY=8RWTbkmB_lIO4DCnlq$#Kzcr|aw4`*-&0 zy!^r|SAXTpEv;OZ`G z*wf2krbpGH1VtRFC^KPY&|xB&&iBWYjg>xNYdAh>DXoP;7>E#1h!CKWc%asbG_l9C z1TO+07_FI^NStR5B0d1KF`AtirJ0n71m^&0u~;I2Hi|&i)(J=e2^;{(m?#o(!E3|@ zD#y&i;+zKnAuvXH_8=h4gi34etczk(SG87JE3{SZ!FnHzj=c9spp|ycv6i~5jn>_E zCo%>_PR5hZ6n0I}>hoS(vxj1aGcQ;O z%&dt7n&V3l0s%G2e;ubN%uf_Ud$l$hb^@vA@n$%DHO0=tHY7o~Ao zJNClJL=c+);PFg%CLAPyDAZa33J|$Ee0=-kd!OC<>bJh_a!;f)*LnE{lhEikH9j?8>uka5gddPyXanV)b|zuHI<3`sb^hBP|Lf zKeFBPm!|vs6H(y$5cLvk3pO^Li-tyo4SMU7B?>>Dlti=8P+SP4W=ChsM;nBR)G8eC`eRywYFAIzl;>w%iE zFll3u!59-nLSR57MLuwpsH&X0g1Xs_Djh;t zE(=m;w_u~S3k<@FG@)|dhGtCM^n-#wBOSAFI6Fc>gn)eDBF~X9iDE^Xy+rNk^AC3{ ziadC*&TG`dNMMlASw>;Kq7~w@s6}im%XGWP0PH*=ASwn@pqQfhavbR<6V4Lc+MrX_ zt}IIDz1GC+B8*8?9K~8u94F3&qNuboO6$sd?doY+M}rlS#>5`z3~UUIZ{$qH-rP$S z5yEIV`t1Iv-c?#@76w9gfmuRe5G9NN0UBHGUsZ@vnoX-apc2iFJ%kXRZ5b<shjr-gsv7wIA-^yAO383F9oc$G7)P93Or*x&8`Ye(}=&?cKu%m5Q_3NN`>~d9+~D ze(H-YTPy|_y0gh}ZRP5<=btI7>0kcg`>%ZK;_B95|IXg!^WEj?2%}oZ{P6B0qqE7V zynIQOb$I`SLSh-OpZm;o8C8;h?V(eVWpEK79Y<4-xg~ zq?k`0WxWImw1l1ek1t-_u=67B$DJ09XXT^MPQ1nc_5bES`zOEm-wig*y`5Viw3yo1 z7`2l8;;m0RS-iE~X}9UqyN5=@AOHBx&8~X!x%Dr7^~%OZ_O)+cu4j8mOw-Bi=+*rKb+Rf3LUFJ{T_R60To6kHoWOap}VP>-ncH zxbJ=E`RP2@@gaMB`IVcdoqX_nuk9VfwX0(D`spj1moBb9*m-z-Fg)Bl(pmclfA;=s zuYK|xf8!g2ekVX^b?L)TPWruQu-e}}7%vJJrNUL6C2-+l|D{*1FGsUCe(}j-0h5{i z;!7J6<{)bScxD4%JU`f6?Hr9pi`+(rI&u2l?|t#ZTX%qD_wiwG)wH{&w(Nt9i#$uH zD199uceQo4uT%1hIm9|Ea;t@)<_Hqee*b7(6|lYCW}%3jDAI`!Vr`YAS$im$S6%jz@UO>6C39(5ebVjN>IwG z0HnY{Yt?-8ltqP#dT+ zm(ztJ2p9r$oTQ4#L|PFAAC%E7a(uMkU0F>{f{3jsixkz1vQXv7Od|vzcs`%?d)?#x zqs@)2;dBy3=5%~of0Hzq>C!zWLkJo?O87ip-fAlPg`_)O3K-(S(2=UOw*5kRD7lFp!jc6WCk z-TQ2{_l5E4iLDByFnGsJR0oI=dL<40wO9rYpE$jk|wKz;(eISX3NELu+nd? z?aZu|LQ;)CL;VRM=_jzn=Al;iVzhcAcH7qa13WV1K&)En3+HS&=w5N zM{!Jq!HR%*YXLAz(@#JC=+;Lcxw=A9Wl^C*ASD1uD)d=OD^W=#WR_?)dRh#)~g zgf%H=;qlJSVxDV*M4Cm|3zBG)c<~g+y^ZbXzw+hz-Vw#i!$;#qI=}MLxs{FegO3i1 z$(%G!r{yw-^XK8g-C17w>tDQ?uXPvmdi~nQhi^U{O^We&+UjR z7rxrP`1HWEu+v@dUpxH0^XJXu>yp1!ix?Pl#ZEobiT zy=AwX9GsNbF0^kxwK>|Gc4Nq{tsgxa9-qt{69(07r@!})|H<*m?2Y%QNvxHkpdiw^ zs+m-rmmy!)wyfK2GdXe9f`9wFmx|+^GyO|4?46&F zZ@#j5^V#()>*qgxXEz|+e7=*cChxp=@aC=Yjcey#x|#j?FK%}(w0Dk%j~*Vab)#o* zY(I1Te0p-qB#uL?)jGGmdgbEUKlD`m<&F<-V zxtQ_gcKpYG{_fWLYTNQ+IlFnWUo&>oG>%NWWj%*hW;|PLvuRmWf=r323cu1e?Nm{y zu3zd^E)<0YgkSmE&7ZvX(V&y~pym@!R`fT&`Q;DZ`Q*_XcTrKh)tcq`rLBHlmF@K& z3RP9D0H3{iwz;&<%_d6_kRS?$6h&mTFe9p_9oo=cfH*cPZD&R)6)Ep*;9z|ah7f#h ztrCpl#3b6)C7~wNXb2IN(cU^F)kK2`0hq1#1j4>?#}cpv9{_|I8a;@$)PE&S}9|U08~|Ff+iwF%JU+MqTs!= z*61~Mm7pk6aT1rMB~7+=q)1U>v{8zRqO1s5q}N-Y&*svCCZYNCJsmn zg|TsViLf-~U}Rw;0FvY5y<)LYgh(XlgdhYCA^|~~X;Je5$tVrA7?_$5xHCrpKAZjK^Ld`p);Zd-7_I$Q~4n9b%6A4gM zm9L+@PZqOTBd!2&klc3s)Of`9^m1Qf3q^Ss9B@-w}gFRxsAD*N!~b61J0XAGg5*Nch0 zaB1zv_35nO+S*oU^P`{LP1<@fTTVtffO+HR2a^e`b>Ql=+v5{{^tccW-}5~upH=pZoZjJW#^WpgZN5dDs`}O6ck3U4OO*tqRf@cgYv;F0-opfavpj;7>@FD>zzDb6ieUfX$63!I%&EnttM01 z&~EGL)I-Msa}pUsDHf0nD2wBbm2Q@ry~C5)H0&RaPsdeR!K?4w-(1bU^X;d9{FC>8 z_1jxNdF#m6071I3X$sn4~NSdA`^CY#=CT+B0X6w9`01u!D<`d`f%r$Uc5TWXM2Z}2hrAy`zXm5CM1LVY@3 z#F|zI?bFjakaW7q=xDA9i9r-<#|{x$LXt!ZF*1fo+O2ptcST;9C{~&TBu!FHIs`YJ z%;G3&cjHid&rGC51RKaABQOAi_nuu4YUqy~f)Ef02$R-ISd`W(#wdyjB`S-KMBBlOu?* zfNG-h4ekK}SjdookTME&sQr94sq31Vl}03CfgtSIH=U)1n#LkQHg+5klu34y5576ap%Nk-5#n4{(~K#iUY5rr852^D|tDn65624(<7S^+j- zlEfJ8y=yX092ksxc<=u1!$%-w4BDD001}}vFiQYNJS(#`Lr(&r<^T`@0Op2G6j-Q9 z+6~92!;|ex+e(?hf!UE#Kp@E0Ix_Lm=y2om+Rd-5ZEgkya_;i>>}XfD=-lNCu}`FX+9oZd+vNy+G2J*J~+Aj%*6-q?eu&4Xt?b3 zGv{n80@K!@bf*=i?YOS$G}8U^W;(810m6QJHO^AAtV0&ZRc)=c8qsS&fkD5uxlWT& ze)-(md{IP7(hJ55E{ej1I*Z6#-%H`-WVyDUoE%pV?$361rq`d^Ug@?-hf5pDC!>l; zO~+1%-h2CgtFHlRy#-jqZ~wKIp1F4MjkoXr`KxztJazr;_a71g3ss&qQX1xYxz!Zd{%hv(cd|EPrxmKRp7uL@k+Z(j?Senc^#cGf z8l$bXI?@t=1$Xurd#a2i6S^7jMj*lrLE;+{^dv*J%=dO2GtY197 z{QPy(iBAshG1Qc>3q=*`?N@G&caEo{e6lQ`JU-E&*4EPPwf=n0q>I&VS9ivL^!+Dk zjL$sN>aApx;dDK2A3u2186+w~o$Av^d-c+H+VOH$U48yqIU0WY=Dlmro$If581U*d z8S||D7%KASJWG8*}+QHPR*@*LsInc$?QM*2fuyecP@p$<=%VmF%!PNyTI-mo1KTdC+$wF zH%LblcQRc@N}}~fPpUZWz4DE0 zc<3mO_Kx$z5wFJqg*@W3&s~^LN~}ULnqR-Z7LnAxe(CA+KFF>I&dX$gn`$>%R<>KO2zoXRA@pN*!w0Zq2zx48*+n*VtX;E~u_}OQlKRG@9r~m8^ zgz)nDbg{H)qOFy(bXsGS81g}p?zA!=LSEFhmq9ye87PH^*5NRY( zNfH@jSORLyi(E&hF0HNVtksIsEVxix8(e6%g}}Hhoz|)ybrh+z@WO%kxlaZoeRMGeR%j-7Q)!c{2)!XRKGjR2GJC@^>*@_d;jN#Mu@@4N?qs;&)d z0BI#zRaxhP(keJ7g6($O=*Zb3xAy#CV7-qt*Um*zoMed)9uT#Tmw8@0uax!<2+itX zZ8#i~5=QCvy3^T|Eiyn=J5VY}z~GHm!az##8UCIHK`9bM@FGF9GA547Vm29_IO|jr zF$fW|2MLS-1VDH;AqP;6xE9aWsf`7pQJzs?5s|#eXS1nu78%$F2u!3zBiaxc0F*RQ zq-GieXN4&u6a)z(ERF?`K%uQ|k(Ui!qA^4V?+}py1p=I@E}Qe08zC7`bLc@78Dq z4mNP@6)S4!Nf`=m_Em3tBZ&q_2l)^G@RP|9 zo_lHi@xgSmaFZEax`dd<#mre}r_05~jg7ZIeB5azb!Dg3JTmYbzy7<{^RoWo&)+zW zGj%fa!?$+MZ>If0|73Te7@m7^!!dpKU^E%|`EuC+`9KE0N+MDkNJS~t^Lnzh^Z8O~ zjN({DE{Zj}5Jif;oJ{k|^ZH81xzO#0BuQ#74wP8YWj!nE#nR6A^1a8Wo15vNl{#;G z?P#^zc7@yBon1NC)1(fEnZ0jZeMneURm8?<9h>+piQ%1&jMTy? z-g;5U)UeKg0i1ULM1-C_kq#UXA&NB#un#N^{aEU%R{AUiL|P$GPz(W3fdpADiXdS+ zosJ2y6_%u<$iFIBp z6D5Y(1>xX*05(x} zGLeFDoK$tq0pcuP>8+Gy?u16usWQg; zdLIBmA#iA5_`+wVb^ZCsgb@e{MFbg|A1aC{LI$CxU=>1Hl#ON!Su|0AGXQjxKSiY= zu;3Y?2+o$K2qGxWDGMMF0s}}<6yAG4V(&>c^F3i<5uxT?AcE8^_E{QYDF{knW&#Yt z0G~TvSUgH2FD7WDk^m$E;zJhM;D8tay?_@H z2u<}snlIj*1xrqAY6>rcmU;`Ryy+?yBpoZf04l)lG|K#XRqCuFdDQxBmPE z^E-dfBUz-`ul(Q z>h?x@a$2?&Ja=}QrUR}R*e)F|gKe+t##+P5b{No?J9c%jf z7cN}6dhKL3+CP4v#NUaYN| zilgDs{_}tFi=r0Y)`uhS1MHt1XuxhOQbx|5TOSUmm37y)2i96+bQWRZip^E1#&GoT zU;tU~@1>u<{$`Z+(Un{0FU?L)C&S^hudL_uLcDzISD(+`J*usGvNL(>!~DvXWNT~S z5k9)VlY8&25Zj9vSGHGsCP}NZK7VfW%2mr=?VrxlSlxW?#()0z|N3W-9v&V3(Zwqp ztya2slqXAH1u~H-Ewg~2shw9!g#c-$lPsoXetbFsFqUCacw-@Wu;t|Hg>!WkZhy9K zqNLsHPiFbS;lcp4GPBb6Z5_3uxRsh>>0$#Ly{t0Tas@75=v~&ypZw&rh#+VqBHjmE zxh6|v1v&%?!7qQ~N}8D`k9OiDIy!byME4Kp^Vx~Dp;&sQ+)kLTuXo22J6qOy<&Y?j z3atN(7ekg)v$y6D5%Vcq^SY_zipty;<#Kbdnr59K9+U!wd6gsKGB48QvYn)Y z7{!S&a{wDGqET^6xRFu?K_!6AZ$W}6pfqcdh_Ii{XAgEB)-E(gauGok4g$@;0!*Zv zMX&%7A~zl4vwu(!f}EiyjM0Qp+fsm=0TC(+jsu}%?0YLK15n}MJDod>tg1E{C4A$FryXT!7o(?PPnxg_x7^z~hES}G-H%%rM1cd+) z1PFjhkef6pGC`BXYOI;WL6&(T02mplEAd>ol86B$PLe!dAhsSoUO;`ib+vuzV$Vd~ zqlX`E-8@emm-DLCi^FvJ+53k#zIt(eMJH+Y@b=->h3<0U&)*z)52q($kCzFtQVydy z+S=^tdivzy@Ms7XZQeL{a_iytQ&)7qV|Pz>KE7|IbMe{rE7#76qUCgc?Q56kdqusp zD{Gx>E&lk<;>IieXP>=MRG&TETRe5WKb}{y(HAxcMVaT;om*?)`SgB-Y{Xx^&{|*X z?LFDkq&pd8Z5mC7lhY~y%x5RJW`{4mxc=2|J^%8v+eiBk^SoNyY`yxkpFaJ}RmPjh`1Y;im!ID#7xP!Y|L$-9?XT98sWMK*Tu<#UfA^Jxlkkhz?+>SC zWN2A{GU#iHG94?kac-pyJkMPa8I9-NRyv%{+fkGm`qCH94Ik|Nt-trx=byb&jZYtZ z^7u=?@^m}hoD2_6_71i-H@7ceo{j2H-hRBb)}N0bJlI{N?exoE?yn4br{m&iSfxpn zSX$_cHsyH3b^3n-vNVKjv#5%GbSny=*R{Hwrbm0V- z6?9^_w%xJLJ$|?_RJTmaV7=P!BquR)R9Rn??De z*jP=rdaVT9Q#aNG!~TIG#3ZJBkNLU+WW&tX!&X0=EecS0Ff0MVGS6q+NqrKj?RCw7 zM>FWQq*5Y?i=5jXm=24;;F%T+(Atn9(8|VM z7n~stQOkMH5?WC!(h)?exkVDGy0$9ON}&(ntZkY|3b8E8EY0F1s-$KPRb2sqQJR3f zm%YQ&G&bYba(i{P-Rdxr_jM=hFP24K+jf)~9ksHSQc+Rm!G&(SQ`c2l*J+vr7DXyc zlJR1hW-YB1vu6lInj7je1B>{;q%?D=N>^3oY&yF8@m=eDlqA8`pc>c%0w@CD&!d{c z0*EMthz$H!W4JRqKatXky%=Mh^U~lhn>>{w=B7<1AP7OuJY>zF91sOjP@28z=L-QM zVrXcAoF)-_A>t@WoU59PH4y>?BtRu01P+dy$Wib_ARL;_G9m&p2VfQiY?`_fSPLiFBC@EAo%iZm?qg$NQ-UDc_gLxW@)w%ikl__o}Gq`*cU{gAyA+I zFf>8eMp!I_)I2vA96Yl4VCT!ZbuMj1QIst5T)ZgIr>Eu7@yR^@(SQDb_~lF2uc@%y zd*df7*I&#wH{CqXPiD`3=Std6#=E0rE9S7c|NcpTqX$G!f8%Ks^<-!A)!(?VeNh!h zkF%5|_)D+6Oo+v77G_7qbkZ5DqT?sGA7|a}V6dvZIX*h-U)ZdRI!=>zFAR^C(`kAB zN?#(qxw*0b_-MYErxA2Ad9qtQeRIHKCa1*6NReN3)gHt*?Lm<=sDc!#b07R1_)aBrhwY z)b-Wg?&&OwjN>pX?9Gc^)GTbfG56Z()8Dv!DvV7D9+b~gx^ih_XMeo2dsKQ{ESv?pyq@iWENgq~&OxgaZEe_l_m2AgR$lqV zysiRdnt$-;Zypav>np8!S-<`Er(gS}XIHw}a9BzZW3+XAbTkbjiommU+9@e4ic%@H z%zaUd_dpSVGQwb@2o)HU5G{{ROGKbZ2XT?6`P`j5*S-9$m3%tuC7M#z>m?}F`XGsQ zouK>5i%*ZHixqH#IG#ZVbxqj_uuYNE&^)^7ipRtptZmiB{ zi?xk@3$v;^WyJmmS_P*DeSmJMUcWj5Y$|Di0{VZfltrz<`Q0QV&O0C=Mi5BS$W_)>MU*7odxa>>ftiq+;X`0A!j-M5 z$!`Qj3bi&>Sw(RSf@N6*4q9m)Wz13*wRcv>QB~H1eosZw8E_!rY_Tv}x3jJbA^1>M zH3uhg!hxGJpeRZt&_qfbAH1)897VnsQmQD*UZ-1D_5Yu${|dHj%g!^gF~*#8x$XDy zF1@?At}l11Py`AfKsXZY7TILeg_>$fEp;eD57e7pr3i((Wk_{MEs+$OBH2xfWRL_v z5F|Xx*Y!Sc=1q6J-F~@g#?Zq$Sq$YdNG;$ z+9{FXr+~P3dUh?2EJxPYq?Gl3HW{z1thCd1UX^hiJMVQAmz8a|Td2&!d0#JFwaD5_ zWOQ9u!8_(~w0{u9RptD%4?dpGM$Gl};tZXQv~une9V(s)AvE3=0U`pr)HDzQ0a-53 zY-~2UcnRY~q!8FU3m^?Jpi#k@#dDC$-2n>l<*C@5(EykAWB@E+v}OcxvdHI)`Lvxb zh2S|z@Q#CL4q7V|7B4JpLjVx(y@b%Llo3gxlEy5?0su-I2p*7-sM!`9V*-c-2`t(e zKnacoK}V6Z4v7>J#ZhdMv-wdz8Hu1GaS{+wy!Sr%W{p6N5XC46bopVP%RM5E^|tW> zL8Ed32>KAtMrZkA7R5?QryH-2fmE*JT%o)^cP>+QPC z*_L*e7lk{2a=3o?)_C{KT9?m?#cbBU(l@R6^yBl**YDkYNg;-EG5`FF^OF<%;>)YA zzjpilG_Mxb_F79P>EYSz-u2}4WE8jCFWp>8div^m{FVu`5kx!U>`VoT474(B5`Cqc zou8atyD`|@>?g{7@Xjai{^-fhRrBoW?zQVz&-PEt8ovCs^}WXzPacGgO?B(q))x;B z)|S#Y-nhHFcf7uO<$wOa`5)!e{CHBo{N~F=_@L9x9zNP1N_PM8+0uIF!G5me7ywv_ z(v;WUB=KUJ+p=2U7>vfVrw7O1`0|Z|!^5?#_0mggcW(}(*z2TNU5?JVX!km6uUw_~ z!qKA%DEi<#pPZiBvxU6#(Ya2{SH8S6&FkSxcV11(X<5{qS79=luJ(0?RF=FbtHYz? zRzeqN<#fU;t5Ls~_Lf>7zjqiXK#;F4EAPVh{^ZleJd7vRVh&}GtJ=+Si4)KSnqXsn z$yt|W>2zLA^8%0nId~)$PZ#CJMmC-nC+Ab=K$Jp2=OiJrj%Ty^)s3DK==YLLK~a>4 z`^Q_Wo!O*B=Wo2W6vfGV?|)W;9%j@@yTNms=;`QaI-f1~+d_DFv}bEx+ibt`^2`6` z-~6D{iC(-u{NkA}irg{lMCYaLcH*nsQJ&k=({ZbvEDc%)u-$4Y9Vacnl~P`WPN!8> zb~@+YK!)8;JEoJ1Jd{>*klJ}BR7QL65lIle^|RUByMPSJXjB*{rYcxzwAPuHK@{&e zPGj`gG>ltUM^UUID2h^{Qc5*^E>x;^c9GlpV&Po?g%E^@OcW_1B<<@e>$HONmrF|& zt+ZC!ae&HM=ao``5G96v0Bp(tK}W`Vm&S?FCRAP!2uK)|#yE~xAX0IjS4&A28?6Z! zMVZeQPWhs!2E$>u*LA^{CFhF;V;00GX)Hl%TSJ7ACg)tM(?SHL$$-&DgJ4zL>3o4A zAONu$O=iJ!D@s~%DwXBHNs^vT=j;OmXIZOe**iK(JAD;}vd&9a4LgIPs)pSzW$9!# zj)*_~;Jv522RHA$oOGAgx7T8w%<>`#?;RYjE%%jD;HAh5wyx98Ts~Gr-V2Ke3kqmb zf!H^&%%)5WFOO7G4YIYV(n1taiWoxhjvK?jNHdXyz|9FS+`u;#U%m+%_5(6VW8tJr z?u9fbCZimmo}8SX4h9*s_ugs(B2aYsZ7>Hwp+$O^#LQY7B+cUHi%DMR+??tPAh0M25;&lcZU!N^teONu zU;%m_o}kO}9RO_h(SQvf3c*?n0GhD56A&sU_958Xp|FG?B8uVyqVK;q8BadCcZ+`Y z7j8njT%J8RdiQ%~Zg%8l#k zw7WhWZfwjiCZh+T)g3~MdPL>R>oD%yW? zeEZ&wi^F1cJl|N$mU_u&pC8`2*?aAUTYvPO`{hx2G#XytOzewO2{x1y%hFF#E9|sm zqw%NSya|yxKR&v7b!B6y| z)>isDmh;oe+wVLChSz?&z4yias7wc6**^GWZ!(^Ax@oJ^52NY%@wp-FwzJ?TCzJYo z3}qQ^-QA3g9G{$@kLsd=em|>ip2ip$D2jHLvfbmcfV8sI)jrnP>zmb;?uQ>8Y=UfT zw_khh8VA@|8a~=R*+07I4)w`dwXv0+pBGs>&f2s(?28XkqSuEj4twr$3X%BwqrA@?Q&Z!FSm{ljs{)D3WYC?&` z9G%VP1+1@iF~F><%OVJo06yDYy#DfvHs-~<*Dl8M(b;HuJ?*!W-J_GybT+q;#Sj>W z-OjMnEn`=Qv%p~D*jpJ6`ir7GIlhR}Xk}?AQVB?KzVy%oRXIw2MH&I5wpIfHK@=NQ5T^zJfJCH0fQbuMH%mGOqm3ZivL;PL3K4=2BH|qj zgGLoN07)EcRN6TgM;Z~MB+{bm(i&}|B=*)35+iiGowBM3kwb9ayUIs#yqM>49IbhI|;ji+;Fsf;V4I*DRoZc9wIcFty5+ZC0s!*FR8 zkO~`$!qs+4WSAw1(+GhyblZc^AAa6i8t?BN^p@9u*Ld&9hUO_CX;iL)5^IrPq(kzW$j?k=!l~TduWI8X(oFxE=cTQ9gb;wVAyR}Wh(0iB(mE3I!lJYSVF2_TP({tv9}!HPhQNZf zSj;UuMpVLdX+i~%=j#x-l)IYOS)=s8^SMa4i8nN()8%|32+ZPw_kmdi2%EtdQA7X< z4Vfs(JctN%+kI;nXVWO%qr;QVZI@KON8KCOUb=Pk#lzM0{>MN3VffBRx4s&21+%#w zba;7vy>zzT-(TBa3iFv{tOzD2qd1A!X;+2TV5yibdYzW)^z%j4>c!=}-nf3PczOnP z7@y>C|M213`d~23mR6F|im?109Z6*SbqfNq-O*b~IelQ8HMJC)+zM z!~NOZ^}6kQFWlJLSbOioXHTCVI_FvZYp=a{?e$l%F5)D9{VOkOW!`@8i$8euRyQ_R zzO?-5yB|M!JX+rzj6QsHHmP)kZI-j$^V5SdnE2gy4l57cHp%yPyBXcPxp8$fR#EG{ z`-k-`2*9GS6{rMJLP$Dk@Lp?NUv5`!wl+);o=n;qtSy1z@-P0yU)Xu&4*1E_kH53D z-Rlm9$x?c;ceJ#<(QQ?g4W*U!wZZXtHm$B2T4^sjJQ)xA$)GnV3V%ME zKR!4&3U6NBd3taJL@&K`b22N=$MbGKIXtl$55TP? z681X6^U>IOyOs^*q}be_dncS-x|o_I5-Os6=kh->}*)%k{CESnYTI_XXrz? zy1hM_&4bvzgRx@(4=RS0)umo519s=rsjqlxWw_KIiiFvGu8`6!TU6!#>BXRv0sC=L zE-fJe)OFR0T9ZZK0NPtNkpVMK+E#)n5bD|^qKV@9BDc=63xjTl8HK~Fu%K{K6muw% zA&p~OmP^AyS=7v7eQV9v92^^k_I!j|<%_vOW{j4Wm#V6ws#a0tEiVmM=7ldqePeai zL@5xOIBB)IY7Kl{9bG)$J9tvtc`Hi+NSoMt$H5zu)wU7<1|UtrIRK1J+_01txDdR6 zC^U_!5eP%@k&cWpO@7#zBme-Rgv-R0_Z&nnJ@S`JV8Q=Cs>J2F*|NwALuV%t{m~s)K8`(996W zQ4~i2#yTs2TF16_i+SOlXk{pPA|XYhJwfonJ>OYe`eK_2>GJa-fQUl8bR3HiQ4>OL zKvb@-n<`^!9J(y*S(qEpQD7z=lPCz3nyl6CJUN;CzyHa{kIsv`FRq7CQe&~ba_dk2 z;76;QJ4;I|Wl==k_?_Q4c;#noZ-`DzIU4y;b9p?TRB4v<*4Anp_0~6A%dP37M`usR z>)WdzeD5RK=k@KLuW0R7r_c*~S}KiY?%C<{iQC$VVhwpUd;Hnd#1KaiT=n3S`8R+1 zX1CvY|J_Hujv9^UMPyf&R~PmC^eo(aO@IIG_jlGdXL+@|Ulb*@+Tc0VWq$Z%_tuRU zMtL|nJo_L1#iKZZJ6Bta^5Nqr(=Q%|IQIQccmK&nr<>lpwXu6}e*4z-gVVXE`PBMy z%$)>QSF+K(diMC@_MP>1E2@v?OD$asD)1ucL6~{>XcU}V%)<5S*rNESz8t@BbM@Ml z_~w-^;rz2d_^lf+-x#bcA%w+bIypOCOeSkrt~_}*sV01UR4iTX-?%<}@1tXFoOdTX zTkBDr{NTrr`rT}Gb-gOX$@#1*T~R=kiV(M2y|fkOHayxrzc`<_yGfRq$=vQ9O^3Z~ z^wHzR+%2jwoj|9h&dzc}G@05?hZjYzG&ID7z;JP43lGCVHn06WcV+ER5h=AOY;zwY zRHUGBu!|3IJ0qeXUMa91jZV%-_42R{%yBD=lJ>#rS*<{<>n- zzneNwdk0f(io3Tq-@5;BfA2U+&C1F^1madJQIQw%qDJGz!zb0|s<9TH9iG+f2mS8C z+DTpyy4_ixADm1BfdFi8tP9bzX9s6TGcuZmw5vTb1>wsD7@z=3;5d#@07#Tlfddkz zQQSDDS;PZV@Xk5`P)f&fOax_7qf*+as;nnp9n1G=p~>VnO}rEWL$g9? z^m2qt>(S+6KoIHD?b%$G5dyQ)L`I_`l&1Cfa(lpxHuxrPh6+UZGIT{kNbJRP5Fda% zHYHtJU!K)?S(8JA-UmhGy>AgAQdQT^`?|KVL0guy#lm?XLO?_Y3<$!ZneY&skn`o` z{`u*T4JHo>_!4(Vm&pgZ^oTaaYDky*5dd7WOArA>DFcKd1mU2KGCE6>R?%R9>#HEy1lzU{W_Usa&iVbB1sgP zc4ztG@$vGVd*=tc|Nh_Ye)|`%jVI;GO18ARw3s-lt4=#ReX^f)I}AjSy#L{mD{wx~ zzw+kIy(gnS0Kgd7Jvg~>eT(^cHp*S@{`mVJ)ecaZ zDmX4-GOz0gM;B+amDSKw*@MUDwt{OL<}dw~Kf9O}-}%mOF0JZLr+s+7X!YAWSBLER z^n8j*a(FyZN>=*G#aWefU}>eZyL;SOicFk#Tbv}dbKsoMQWH>OP^aU0zmq00F5l3v z-D{V#v;WoK`m0$Q^4ZDn{L|li)7@yBdNx1*;76Z;)Gxkz*JNhvg`v%Tcd2vm!O3)C zFCNYg&#Iq#GYQ)C`>pvb|K8jCcW-ShuMBp!*Z%MiKgk!1d=6QHM`yCz8ihb<7Jc~H z-sxd307;6452Ve-#09bQg)L^FG(<6Vy2*UuC-X7{hz+i7EPLjn#MVl4Uqu*m=e6R&2d|_ua=zsU*@D=XKvxSykPkB#PoF9#7_@(ZUK4 z=~?aO`2w^)o*c86Ry*50o@zrzW8vlQwT-3a&hfZ#%9xk}u+pfNQ4RA&L;?r^+R70E z0kL@YAnbK)3~4VeI8L%CK&b1INV8+DwI&mMaNO9p1(CGYWmSW4)A&kA5SkRe7a^sH zgBW8>6afhk`Z|~hb)*q6j-nup07|P6f&?b5g7cn*gI6X}ioEwx6a^oEfSK6`G8CNm zu5R)@$OuZ4qUm&IjOHN01;Joz8%2>Z3V?dlv2{Ijg?Cn~I0Vo}DMhxblQe=jF7m=e zy5H@(TH-iqx1y@7HRySf*VdXS5eVAoB+0sIivn(KY%DJiNvqvw2cz-1GN!V2eSK{_ z9?i;QAyBcZob|Ji7ixqMyeOk5`DwW*m6;vy?O$x4roF6M*u`{ocC?28op$Htd#|Of zxYLRldg>a=}uelb~E?@vb4mGxysD($pZhC@IwMxCFZA06yFX9YtXM^O^TQ9QaB z+uWidqgAKXD#}VniI5H<00AJfga%6S6Yo!RVQ-*)KLLVbW2B_Z!yQetgf|&-yU1B%0DzPtg0(?s*iV$A+Pbr8yBA^hQ(pp%Y_l-0!EYCZi&_oIa zSfC*s2k#Z>IF6A}07#LFD4|FIm|1Hr!0dhV7epkmkCkD>P&lI%5(du#(A0Ye?|lde zgq}bENqOtFiHtI+DL6|=9N0ur2u_(uD>a`_fjLXF@pNXbmmtAIZJi>D6XTsD5@%fi z&+N;E_0B~UnK(6Zq+?2yVM^;NByrr}eBwCDvb0*%QI@W6U0GURU0K=g4zjY6$4?*T zRlR-X>g^Ycci(yE#@2ej-5-s{id2@y^F_HRN@Ze`rbe0ffAD>~7-v~~I-1+y<0S3( zI#HBuU%Rf2T3uVwicgMCSD5s6b@dP0KIFJe6-dF7T@$rHr?~VbFfM&@Zpm3EP|v!e6y)mh|L0ke&T0puole+ zlFPS1^Eyy?i5+F{oC^TtoDa;#M5Mr16%hsrp4kQOLl6jcZ7l~;(I=lD9h@#AMZ(hW zZ4a|3AotfhpL{TT{oD2CtvmD4$_R(qG?@HRu4ll|` zL0-XP#)jB2cRO(J-pG(z3{@$!O8g6we`Dq zZsvJ@^Ts(E`tDozmzFy#{oXsfUp#nn8Zc2BQ%!*-L5!n#W7xg_?1B*g;lF+N>WaMa z;#xVbqh)9X^rBkd zSP3p1?8B99I5?W_>+t%`ImJRx50Y*NNkpUD zTxq9Sl*aMt=)#7oboKhyDrqX&&&#^)6VH5nmXGJWoz6j`eeKRHzBOa+CjQEa40Y#u#TqyOqYNsj8BU za!!E&TWR3QO#n#*2|+8x0i5-SNJ??j8zU?r!k2s6EK8c}Hb4-jIMTIPZv!ETgt~4z zxFLiPd|;8PsE75%t1^eL;DbjNfV)B_C7E;pePs&)>mniilB~kyPp>GqVZkY;9?E4 z;AZprv%|xS+3e@O{WINOFKu^=-T<8u@1CEx($>+*pF2?KYYyEz2I2d$$17F$Q zFP;z?#D>b#u$mE(m;p643r_^amrS8XAZ&8J0t|v61WIXVop%9=8u1VTMS{QV$w5>K z8|{dMKtO~65D5Sz1SXLnf)twdGBuZU5k#d3JO~I9DpY}4Sb)$v*CHeVLQn{VA`)Dv z02LrMr9*s)uj5O5zleY|t6h9vYYzxW&Bs8xyy!Q34rCAr!oogyLJ9&NKx-o+Wm#jR zZXqHOa|pt<=b{c8)O1l)N+p>A6?IJK_RgQWGW^De_y10NZSC;*;k&@4nxTS}1(B|ERZmBcpWf+LhDCdru$kee&_in?HN+@u$1Bb;GUI zS|Gj_4~I*;PY$sNQyKl|{Ls$O)mWYCG< z`tkec$Fr}#apURJ6H>Iiqz?|ypB=(0cVy6yt1{9AyZg@;3%jshYH0%vuMR)>{Qmj* zET4l;>?G$XiM639()~=&3)jh#n%!*b&IqE|obMfUv;JUh8H4|+Us-`-ada>~Jj_)j zn_HXJs2E&>vq!tDx2|5l7p616zSho4{`CC^yPqGOPU~yikhS6Voi%OJ?|%R3tboI_ zLW!C$?CR?B#bh2QsWwq%osOf?xG=Fvj1q0C+6Gp+3m3Bzg@^qti6RIgIG?nP09~BV z@v>*y$3+dW>Wzs> zsXD;JC(|^B(FB;?yqqb5yj&cd!SxN$T5qf^?d~0bmrf^pw7Zv=f}~WW%DInIm3xVk zI0G-91+kMw+9+#XmsFA^I!WSmW~~p55CAkd8?qK@t$c8GDNCslQo9!yt`1S8XEWzf zL_lAvp#s8!M1q(7f@GAdU6N=dQG`KUzNiopf(HR#`zVTq1(~BLB1L&#C{5l6MAh^W zx?r0QSYh^oMT9gVOIceH38DePxhjrR@0^LW1oqYgpi-nX1urI|s&viZ%_7WP=T?zw z;+t`nu!y20cs5FT?*j{J5(Z!lAs~o}bQEbKoX;1=s1QQC)k@;n7=tKElQxtu>L@aT zQr8utRFx&A(l|=9c1mKa+WSydwIWpmf{?AO0Fc7q9V+t9ezCt_74GExWU#llzPWny z+LgnzgRRZ0KYaT=sC}=KUA?mXAN~1Xt!=&kXxAi($8a%Uh@$1qm5NHsl2=8m*JIXE zk}fZ8osUjio%;Oz1Q0j3){`vR-8<;E;;PO`so+8qC#$Q=pM3hs@x}hd>Dkqto$lIt zVj}W99-Yso^W(#lqA1F$)UnAIl@jL+B9H7mh8iF|QgAf3*h-&(cm2MnL*sOMi5hXO2fXh)pND+|=czIkC zspr*gC?GyCQPaQ!g2KRv%DW0#ob}E(2nhlN$Cev{DPjW=B}G6DTCO2~UJf5k`%8mh zy_9|$9yJ0HG`Lgd=N}XaN!OlhYXQl7&nSurNSH++kVYT`LPn(6Fd!&3pDa*ojH62O z>b+EDA-|89F z0t`>yxnC`O)J|Xh>Sh$n#r}Nz?oNAUcqMHa)U)&1Vm#9c{n}q%9`-thM3G-v>Td1q z*rJ+T%szVS$>*P!gH`3~@Qt6jwYu7Z5PnMJy$>(O=cC0e|Js``B&}yl{p|WyI?26> z^~HIa_th`|)XSEui2Ukm@6O$w^^Gpsh5k}6ACHqrzxkCf?H$agi}H<^T32>f9y~bF ziC!6Y-}~^rQ8-nqQ0neFD9sv+{|7T(Wh^+rD}oS#)yVS`C1O`>)``s&wS z`~4sMaJCqJExu7#>Dr5TAOF$!KK#M)wHKFezqF-L=aU7pUB9tyi#my-&mZr9`q9Oo z`ZIS@jAo1K#>-n*ujikBa`^fElN-0T0>arT3|DwK7#tl>pX{E-S#ohUc9sFr2aXKc z;ARMt!o9UmAD@v{K)z*EWXw`8MHJ24>9&bf z6zQy!1RGww)h(+!1fQi^E1Aw5YC*wJ^R=D6iQ`_U#XhX9ES04{E{Y^hdIMk8K2j8d z1i)@4CW^Bp{_OtKVgWa=4W1pG#!=MoCzC}Ltjk(j2uth1L<*_hiL2UrZ>=|heUe3r z^zq3lilnh7qTJcKlsHCX6tv=^^r4o?qMBDuM1q${YeiJK5NXiN+=PaRLBwB*zXHOX zBqmOxw4Eu1Ay}<(KFtwHN1C)Miv^)lDhj@-3K#FZR)(l4*8uha0umbJ08+DL7OC?p z&Jq)8=X~%S6m#GrFXK2y5a*m0fMv zE5ZA#g^eP^4Ey`LE)>_cS9jLeHn+EKUc2T)I6NRm=yrN5>+93mVq+!b zj7a3!2WA!k5hC!`*3JrvRw_x%<ROhmc7JPg=QN+WH7OTQ_;2@NhsRG2H+Qy^)Wl1jy>}kn{rT4>4<8LS*Q0Lh=(B_UM;9Ag>hyTpT1vqH zpmdiz{nak(qPNs(b(5XVN+;Rj!C0qpUE5;3P?3p^p5_%1v|E{SVLA)XPN(fOHim|g z*;wsF3X@b@=i?+|!3yN$Xuhz}Z9%WyA}u!5y)4dFx=K+4e7rcls7gB+b`y<@y6$vS z;Vf&_lkq%ASYGXD6P-_si%H(;rY^8$VT5U3UCd#*Z%`=&NMgOZoYX~ec0Q8;!+v~x zF<)BlEy`L2QHqKpbh@$iNMKoTw8_r^0RR9=L_t)1q>c4Np3iN&OB_N~+oCS( zTKbVL%UVZRBP_4>0d$ha&ed5v07yGart=~)N{Iv%qa}{CZmiuz$igI-Iw)xjUIc(5 z6>ON63-+!z>@TmcR(Vm@6+0iNX%fc(G#gKK5^G}`6*aifNTZ0r%)$^r2vAUhW2Hfx zNRjfjBTWcoD+>aoR9#oW24gfLkVfyV(OQ^EfEgGA5GYN|-e}V>aEvi=6p6@eS}3JV zq-|w`v&|J&EA+vOmk@$6nzRZounV9R2g?Y~`Y1Ay)y=;^S=E(rtJ98j#DO75;6MsA zY3F_5P)Lc`>^=|`D?sOJTh@VH6q$Z+80*N|K+N7%5~|JBr9rRr;~)GuP2-nexZ^zU z?mp}FhCcYLoAr8aLS-WDTu9T_WHt-lO-A$6(^0?E+1Or7G194d_Qe<7Za0a|Cm(;- z?RNW1gSlNyE=H%PC)&iuL`j@XrlTLeb21r^t+SikJMC_FQRVG!ii)4@?{99d{mCD_ z^Tqyyc9N!P=j6%L>0+vobQJjtw26H10+;#;DMdsgtQ0o6yv7K`L3BfTB1B*W0A>Qk zAp}=@4hW`+8Z^HyGsGO20}%pC@T?RHP!m^cd_;vS`VP;UoEQExLF6#&o1w;ZPku>cDgqMQ? zivS}kU~aBKu3Xd+E#`A}MWiD^@ZLG=BBK|Rsa8g5MOt(110Wflv@#E*l?j3%RL(8% z?DG#N|J}jfzx?N_6K1cxxFYVs*ItYIt9?@2I*m^s{yuYc^NrWz61Rd`h7OMCarH`jMoj&}FH^!4lc$bS6RK`70y|6kw!#?M~& zP#+(UmzUxFxAL#P7N;fx=1)G{J$W{H{k5wro6F^7(O&BQ!na@k@sFPw@S7{x=`RtA zl5nGAyY zk0$MYR-Be`tK07+g2X--fP-V$+TvNxrY;02F;rAm2$p(DWOU{H>vz}R`SiG~+`Onz zIMNs!?6xu@Ddr^y34ot}c9dk)?VBcf;)5&mN(pv4Nh{G&noOqo#l>6*^MmuMkhBXs zJFCxj54;O;q(@`ByrQ2SkIT{pz`c`20B|*+0?(ij7Nw6O3PF-sO{S%-tyX&2PZqPP ztUN4iq>V!Xg-HzFg;ttMz`(w;B?xZ}B4ZS=UmkRcU_PILh%!()j|!Z~%2FCaNR2uj zk05wbM!e69f{4;IK>+Optkr}X5J@2rLAWfFZEE6`CJ~5r6laMC8|y4E+q$Z&8Uf=p zQA!s@#Vk6Cg7Z3#<&vf%&U@g?$c-=)!DUII2nexoU0bac2k*VtN&|p*A#jMaLV)1d z1&;*5T4gkXM3H9pHn5Mc4pfmu~bsboYp(Iuz2r{#X_dYaDXGAQl5EUt7$bqnc zwboe|MX~sw1cX3Dk~H?gI_uh#A7HEjb@TH7jCcp6%`-&}^DJRwgn!ODZ3d$fzibGwlTiCPIQN>n5#^ z23lENeYSTr8_z)Da5=gdjVjA+O^ZcEMyKt5x~M}4N|~an%i3D!d}X67F&(@`HyEJ0A^@5F#^niFSh`E znK2fIrj$5vK#ygW1ESH2gH{b~L2GHwasbh$QE{}nwAt__UP!bCW@bWQ&BP%{;2;dn zd-9G@1r80|lmpiu%hF9JHp(h~UP5g-T`a8gV1uY;_)sdirfUF^=n{s9A|iBIX9g@t z1Pr3-5|Q1|DHs?4sc||fw6?zg$%jkJgAjtP3$2uQ?8TvoQhGiaJ7)o&-*S8qP|i!> zfJh-g+U}^99!=-{Vdr>Wm9_ip|Lec8_xSxEz5U_pVChc3Z`x~lwR`g5;jM4HtOs=Z z$=fm$TZOd0w0dph(c7O=RJOb6^n4Vxvi`>E%GITh|73r#M%V6bEG>28I8i%G{To|* zpS_g___<$r;dg)Y@zMTb<;vhs|I)1=e(&R(x5M$#bg4j_0 zPOE9D&dv{7y|mZv{lWKtaDIBWwb5H&8iFY6?A;r;4$h7jb9?93)!Noc5_LKujbiVE ziPDp^3ulXK+k-{v4^L+R`cFRFzrDR2n>0;rcS*hWyr@P1P;p)!qYoCAgtTPx~yu0}AKN!FH<^aa!#&YlCaP+l5`_jo= zm9xFVg)|Xiu#R7TW#`e;lUvuf7Nwm|ibY=Ros81dxZr(*VQ23c7o{!hMPRhnMM-jH zXLY`)=ZkW_s1A-N%n&Icl3B4hjTC593erwCpA~!Ou-=B7SGTMU-iJ6gfS4o@8SR5Z zWtIp1)3dQA)oZIb)stzty|FxBCM-YG8ycHFfEncfkXKm{d0@PVWOLOtZK_=4hhEN!S#TWLXlt5TR1kZ2T0F07j9X z&2sU`ipxdm8-hHeBEvScvJ_arfhJJKkWx(zz0xE=h72iXod;1tyq}Ln7%E~Z8PLS0 zvi9WEX|0To%0*Q`WxZ38ic~Sni($V9pmh`tmzG+c)(g~f?530P`Nhmb=w-dxA}7*= zW_P*6PzyX@dFqY6%hl#KR;ot)Y4k5VE$oP(#nF9xL&atrfo&yVuWAAN+3cT}| zd}dTuB%hWcV0U@lhoDfjGVOMIJRU6y7sZ(^OZHOQngzWNq3N9_g)9;R5CslOYa}Fr z;Dw-(j4oek(tzcfavr1rTnD@N^r4&sj}64I$lht!37X>K7b+&pin7o zd}RX&X;syA@KqdVul>}2GCzK=c=pMaotIg`!w-J&!Mko{b9MWt-l%tXcV7OgZudA) zt3RmfNjZ1RJL@oP+3(%&57TR}+#c;7+v%)KqwZRF^A3FJ^}9(N@7~{ERPNrZFGH?D zXW#nymsYlxzu5n9I9W_x{gt1+^V%z$i`nSse&v6Nj z*Souq&Jxp#(+ls^{_e@LGGG7lt)sJxr{QGh%DT0S$DjR4k|j~9+snOgU%U3>w{~B8 z_13Tdqwhp<96UaFxVv>_`GvbX_dkDHCp6BhTeq(*4_d3seQ$jnx5o3_0vw&shP@6E z8(^iV6DL^~tu43D&gZk-2Z4j5v(~C~e`1o`VO4l}Mqq5W(6*I%xcs3rF08kXrX_tc)+hqN;GI4_Q zS!F%6TiORYI?fRR4Fm_v9lEw22Wx%sWo|W6S%rSP75CHC!Afb*&nLzC#gu_kqoO3a zwY~Pk@fSyD7v8fAzSoIjV@9J~0z{cCEu&-1^U&=k^Q!vf@zKUo9|2sDi*sAInEdKEQ!dk#k;aGDchP%0<ph^T=aAfl^X<8BF;#UlU`G$^eVfjG}f zk@G%aAS7>HmPCpY28vB8?NNE$t5eV8) zD@7nsRaJvNXmWyOUMYmqItF0G;6hPVyU(8G^ZBQrf8IIh2KK_fuDlD*)|GWWcwbd@ z2%)mp2XAd18rn8DNFtFCK+x3=0RsoE6ryp#5r-h+y=P=*aCN1PsmqEm-ne>;ly0|^ zIF5Vm-h4i-Y+y#~0|LbGk{V`no@-47adaQ9XAg=0bWx7u?aa-voq+m zdoRBHavH^6ynr|x(kyLIeEDKJpNyV;@x@|6RpqqSid6H4V`e}BP(~B@ptMq9{OSu| z{ov@s|MTDZ-~H$Rv%h$2Yisw@r{6UnU4P?8?qu5Tu0RNYY3s_JY$fQ=_v>jnetfuf z{rculeI?)zz? zRySLJ<8OYs9ntCT-tn_XuYT*>X%x>Vlj}QM7iY7lho4Rt)=+Gs==h|#cSmimb-sAu zPfo{K);TyhxqfBy=YQ_a-~OFH{$lrVHkqE!!qx58#l>PYwu%-T8*A;fL!e@#SC-nV zn2cv*WYie`m;c4D4Tg4pzPEj4`L%D|x&7sxa(?ln-~M;&FSW;qyYVpNst6t|xR1a0 z`S`TD^5WHxe)RC;kB`O+*!$5L2XFzt^4i9y4?erNm{Vk0NxWDrUVdq7zOauT?s;*q zytG}G?(}?$2#W%u#Md4?hgL>`!)(5&Z15gdSCVd59Ea1>iFlqBwI=P6SioACfwI1Rqo|V;*b&(3pESEKpF z`g)!_7ofGoO3TV{Q0Ap|uD8^&btw{B323cM94Vq8V946QhPrimazmjeI@5E2n` z5JuquN`VAWisUk{52O@`Xi~A!Rn5j29RUghNSY*dWu5h;H6hoP*O8BOOb8G>8R8(` z1y`|Z7Q8HtWd#*BRku|wMgs{Jiy}=T@j?LNgRN>74xSMS6h}nDLJZD{LMBF$fQBRl z3G7_Z4gbJ9O^P)J$4z@dU3(-l8nsf^iGXlJzR?(w0twQn)d4+l2%&bKQBZ_!sT!zJ z%iAL9v~;SewYr|RvoAgg2T%9Q2%{(l1R`Jn1!#Tn!KHCJA5EM0pVGRhf(XWG%psr> z4qW?CR~AW22!tBED5W?sF)4!xq1*19Ehf{^#cVPI3Z34dDl0EOi4*Swd#@sch*}%x zgNY1>U`))4K@kZEN!Ch7+KId+)cXeNnScvPfq6dUb?c>>wf?H?`~h~E5L`JmA$S2*L!2224sMT$s?4P{z&gYZKvL>}Xyy{Pn*u__yEr zs49!N-R(-JuoKkra=TsHk#}Gst#y2SF}|4CSkt%|-@VZ_j_s^gYCPz6msf_X8!z+* z-SekM@gREP>o2rBt#*P*1_$@|XOn#A_VrKSKis;x!6lE67wti7&FK25d!pqPOvorxie;=Hbnibk`% zDBYrPRe&@l@O*Jmv^SO^SZ85pYhXhN-pv-3^AZ3$Sv(q*&cUFc1P@=FP9g!RaBzH~fVGK| z#MG4m5rxVNXwLx!BNHJt7bO5>=K=vV!?`w1Rw#g=LK40#Z_%U#5m>BuD2~MwP-GGx zE~kEYPTjpMeG&k~AdNLcDJ=*>1}}#IxU8=tr5GeHYSko&y$6EWXj|H5KS)SX5}PQ* zvGG2XWuXX3FiVrF3Q0^==`b(=H`Ob_bF+PjHF*c1rJH6;{ei1%Z>>V@g98@tf{BfF zfe@8agy?*TB1MXV=Om7VId~t(ktQVsjrkoAL?CNt970*vAXt`Fuir{k;JGbw>(C-cV%fs$qn4}#8s?{3A5&!=0{NcYk z|5u20r;`DLAUAqe<6w{CG+z|Kvo;Yh&dZ|L>Ua(+jsu4RDj!0U#=!+5MbhNx!~ST4}Ic7I~6o*1O=Ck>V^V%UWp-)C7N#1(>0> z&YoUqrGhXiZEH&kLD(400h3rMgjP!t0V#}==y_!{B54pXIzmj+q`jCg+U=ADYFEZl zl(t)d1{FXEsWDL!7rDCk<*&c-&96vND5VrBC5Yh8+Uefj=jRs(B-qN@0m@=lqHfv_ zkRe^HazwoPHH8I5r5rK*0(Iihh zt>Id{Kfc&KIZxAe+SVU`{P5%``pWA!&(4ZxC-YejfA7El9n|{dsCxCa6`MsTClg=W zqgin=aj)FlJR21!7pIfLc?U{^a}Zy5o!C2pAAk3|;VWC6L3sArBNmg!^yV*reR*vS zu?!-k-NR>}A4X}ivO{-Yz7m}J^zDNvA`>f>`{_m9&eYHRsoNiaa`d$~u79z2kw$Rm z_VQrR+rBnD*&k;eyS2S0mM4pRIZH`_6KN+}qq?@fG_!8+_$)6Zj(`Kq>bfeNF^Z^p zn88F80$*}()fR2DotP%4e1I8MgXg$=+ANyNQQdp5~ML#q|l)@5mw z*Am61C<~)ize7zMOoKg zE2T}OFf=h*6c9uS!bI5Aw3tMD>uO;Z3C=5R*fRu1BqUHoj1U=Q4kB!fZqmUG3!teF ze4hVolvV-yNu?k@&wl~{G_jh-pKV=GNT?x-j8cRM4exV4%3JLikpw_%0tM{Y7|k4l z1nX;5)Re-5h&D!;MMQ*x_lN|j5E~{90f+UKz6yZ^G+`1a)>&u$EH9EIb)fDah>}n%WH6*vy18}bl`p@sGF(ksEkP|z5`sAHC7tEr`qJ`Ju^6?}ma~<$9)yiX zW0cm~R@JbR*0meWN^O+$ep=?h!t8?x0x|*ta1h~3sm#`O9P6wTqf&KIEf%>#j-nKR zs-ozno!CT0Uiiv&x*Zg$YU^yU*5^e{W?_s;v$U$L_@IndT6>UotF^Jc)9dv^@KIz! z2#5+;v^K&B989E`5I~exQDg)_qeemm6Gbsf)and|%foz{r>)F83j`)oiZm$FT1Bx@ zpmd^(lH;h|jy3BZ2%t!?0Z}So(#Cs9A`{pH(WUs<*gY8-84(m>2tg^ld`em9C-neL zD3}|k6Km2zYYw3(3a!kds>-VJ4wOc%qVfDfgzaK3L?%v55La8Hb&QPYB?JbB0EK|} z9uQ{p$!GT;){9v~F#Y*o{_3r*D9P%xvv9Cj;6`$7dE?Q8XZJem@BGQ`v-`WsVN*?;t4|H}2{Ucdjz!=q^dckZmVqSm`V`ut~q z{xyT&`_q5!%f|=Dhlhu6{_0mwA0H^qio=h7_q#v-&goabc{lFHZr?sVI4dm(hjs>w z-21l9=fykkKYX~GKY9WyD=<1KvsMZWle5|8)zv#U*PiTOBuN4uKKtU#S=rp^$=TvG zuQLPpZe}mu8oYRC`Py>!_`w%k7ws%w-)@Z#7t8&nBm&)v&!6qZ?HHI|_}WYRkB-le zCd(U37iaVLKA8OSuYKv{j}AV6I^S6WiS_+2_KzT3_j&E{fn?T}u>0ny9)6%vhByAW0f0N$`wWVy5$IGMhDy zzVnOe#jHqEU3;F*ybsXt(8;b`g&pyOa+{E}%0WdfGEUhpgiboI?7$k(IRDg)&k}(9AiJRtyfS2!r{}95F z%ep246b2zyq==A^yz^0HjL}F~kf>T#(Ly#Iq$KJ+C+)8ma74UVZ)6qb) zqH4!vlrY!96%5XM=jxh+>+2xFcUyzniUMEdZh6p#5Xx$y3{FND%nWF=*i6RrUawVG zVOIG%I8{r#o5l(kqj^keRfnXNX&pzT00lipqz2tCg*=G}gD4}4Koca1A_e5w7_c>K z6k#ER;5`K2^lX8!#-=WwRYU^B%(2!SLfXlc){MBax)w)T;HcN_OORHpv$VWSv?t6< z%PYfP@AP~Gf~|J@^yqwPeJ~gdQ-vq9`QGE*cDLnOa{n%{`J9NSi3q+cB|!r5MuCNKu|R}Isj(U#9q+oGzAe; z&T|Ic1bW4Jc6Du;luBC}I0uM|^m9)E<8zP^3j-=Z;Sdxlk+Ud1?6Jr(ayXvhm87u5WB?{O!N@PqsGw z&PM0uS8o022cOsg2I*k$;>NX|X&%U!8=dZ__m34y;b1bYQNhoBd;4U6zfE?dXVO@# zU$nB$=JlQKAkC)_5&7ceba`tjO@=z@C=7MEzzBVC)|L=?!%!Fb_GbLChHwAk_Dio_ z{jdM49|eXdPbVwG*4EBy+ScFu!_U&pOlP5T<-6}c8c(Xy%IU;L3Q*DNQjd{FqjS#^ zM--drgMMyNsbH{AGy?%B1{CwK#Wuwue*Cws7F5v1$ zSCe8b&ILi#Mtc`rU<2R*HrAK>E%U{*@_gb3ZDYN3d#!wt=ebSN7z58M--MXPvjt*P z@<(2{tT;)Pb77FARpl`ys4W&+eo>j>O< zR7OSxU_}T>MoUpt+Gy`ZlO|2Ni2*g7+vn|gqP23)p;qjClBPRGBhw_qp7%`=(IqQ@SeSh9+`t)^)i$=p{*{Sr$bxJ3rUT43_%&qEHf8LY>!`Vs72Z>1b)V zbXtv9mwO^oS2+r3q6nA~Lf~nhUtL{#@Wt7c&6Y8WGvAJ*(fN2<%7P^#yS&l2Oe)R- ztMk!3tJoOZPPA4!cPfsfR=ZP{V$@75|5m^M8iGYQPn*rgnB>+Xd695K85e900bIC*r!lCgl8y(mVm-?MvHyE8| zsStSv)cSd)STjqjI88~gDoau%)~87ZaWyaV(X{^ifA63E$}fKF;L}l9OzzxVE-o&9 z^Ebww2(oVa*Z;z6fA^yw@MPKK$gpM~9y+UiixGKlSH- zuDhmaWdGr>f0s=7h5zgog}A>zu|kstogR%5I9h4dUM)^8K7aSg>4^_O2!2+SOD(u{ zWBJeh%9}|NfA;=|_dh#&`PHrCz4LZA0pS;9+FOkM55N2F<0p%} zP%m6rJ~%ILZ1=rl71P-wr`Y`CfAw!~zjX79$ItHE-P&B~tSq-YTL@g#?$OizVQ+Bl z*1e;X<7Y>ci?bq{P6=Uq9lrkN#?`HR-7Nn7fAKqI6;3YT+V$$>$x(l!-RreOY0F7| zfs$lhc3>g{EI00Kk4}y!C+D4&;n#j{xOa5;Gr#i2l^cV)@f((Ox4kj8+OXouz zspUad7fwfXcs6#{wPT31n$BibsPnuI0iqLmPjE81h}}Elto?Su(r`wq-tA(_0Do_gAi7QWdIQhE+7E}W~I_N_TaVB zx>=BEt(sH23HwW6QpN?>V7-tu09KV1fmWwQq)<43R7IhTMiHY8AXZham2MOr=FqsW zfe4-_JmEi-@jWkLZ497ziRxuxg~ZILKr79ly!Xya6aMnnDWl55QV3eBIEoaK0QlfJ z1R_#e0YGgl(j*p~^GXqmG&WE{a4aE+XO9qEUHRIG_-+~vhJ78STB!iqS(nDKMpR1o zdflv*O)nOet6FJP**ZxL*X$*X7mIec3yfu10ZX2_eh;U45hX^v7=@L`5O`(Sr^sjo z6-Sx?h)iO1mStp}!onQ3BX(W0^UQqN1+j$OPdyY4v(TS5@fh zYBm$$NK>2`Mbc_#RaGN^HY!OClB8*BG*x9$S7lMw-nr$KrMxPRk4D{IZ*6TOP20}- zUfM3oDpH068)K}i7$tD#oWMnMkJ#KRSVpi!i}4WzGE6B$=l&x0NC+|zdXJ`hEheCy`~P(XnoAxN{I08of< zsh%~IS=3}S3o9Z8N`eg{C`5uuk|c4|pn;+&;^3)q00E*_X{+P%Ie>tI+ShrJTU+@c z0x(>@di~YczWC(Pt?gU)UR(c<|ME|N@#xcT7X7<_`N99=AG|j!lXo9Y|6=7=R<6$u z$J-ScwMg7)~AARM^FXuL~(p$gQd+_A+{^#e<_N-MX$J0q+4^F24i~s3g9v8p& zZ~x$f?|k$-FWks>($?D2K*S%NpAEYaV*kOzNg>efs~1+&AAIoP$?3VPigp55u4GXt zv$l*+o^5xRuHJj$Ti?Euw9IsJu6@|Qe=uCSbMwZvV02zj@4xj#89i8O#Rg17%iA4c zl|-$zt+nOW(hvXS!S2UL{XvQ=9goysw%2c}D_h$SAD=yXG}4BadJq}??QgyD(PvM_ z<7)SKk;J+z{K_CLtjy*`@Zc@9S_Tk~PUoZ{OUcBst855Z*WL&4%vuq98FZtp9nGc- z=ivC@3=ou2afBzQQxk&%$m?=6nkuc*MCY|N8XQYEhF&`}-3S0%S?Ytu01_n2;_l&RQ!$NRwUE49y=zK#b!^D*GHlO^6s=@FYq# zpaY0v&FsDRS(bWd>#FcJSTD8rp!C`Ksi2w_qfR@mLlE|9)~aj`iq5Aqtu-S>Chc~+ zjsca)vZUSa60^o=KChHgr3-moo=g`Y$^oNuPSIesUArI%wRNY{S{oAu=xd519Yt}g z-A>Y0o|if@lgXl$MvGaUrOC?LveJgxH{mHl>||XN=_HCGWr)-z`G8Rb*awZMv}VVI z$X-Zeqj5J?!Gy@(MNKveM7*8PCS_45WDj9594svjv!s>8MomVB)Y4MF*Xa)i!@$BK zRZ$j2T~~FQw1e|#%&?WzR+g4GS5`L>j5KPKiivda9w8zvT9dOD#UWCpO_MPOWX(W| zLI@0?VtR3Jd8q9|$%wGMn7Mb6jW zJI6s7S&;w)7GXc1O(A&iU5s4Sg>%-6FtJhOQT^WUynEyB%P>Cv{&ydL% zN6)@`R9ru*{=vU_*iWK=^ZiGt(>u4X1;C9f=g%H}_T&M)_`=H1{L;7k*M}d!`_b0* zjR(iYKmQ*5Xa8k;Whwsn2truV4{`j582Z!~fkjbd}^tXPn zzP|GGS+TocrYqUijn$)z`PtdU%<_NpPk(EDc~vD)TG%@*#-n$C?prUPpC5nq>#u2| zTd~=F^8UdYcx`|8`=3UwWOP;}8eV<5{h$7YuOx19a(*^HI|vmW?d_}BEpIQiI{fgx z$1AJL{iW6SfAnBwr)M3_bGZ5P%G&0z&L_gW|Lpi;fBM2#Zsn(o&)?tQ*y<0rR)KK$ z;rR1+P98s-4La%5-NR0|UplJmyx(uVa_`DwF@1Rd$?mc1_KnfHbiUtB!?ZfDU@$N{ zoy-TBEb4ZKaU3OCGM?lDV1rb(M3IT9Iq7^7n=30z`-rwGBcoNMTwR~crxB^{FsrLt z2`q}rxe&)DO`;@@d{ryqyjYkhIXM`4%j;|1w3AFGv&F3LwbGS-@7d|Zm9;dKTR=u3 zyyVsjb5Yd8UNY?WY>@*%?RjO`IzC%;G83oDR^D2vZEdu^n9Qg1qPE`FGGEjPP*u4x zs(I#>CLg$ynnh7{I&C5-#cCoBtN=Aja3u!;6Dh5=p~lmX!3Q6L(WWkIU?EZsF_byP zv94^O)bjUshzQ6UPl7DW&^5E23~Fkxe`yBq<2vJhxcqax2Kq{5_{wQw*-0ivsG zt%wnrIXJeYn#QoIs6wM`090koAt*&e$~&vHa^5SYIf&L8BXPkqaPSfs6hUN^v!T^V zM3M6$j%Yex4BOrLqDYd+7&Yj3to7El6BTJ=s#0uGrCB^4Pc?$>wm^a~tgH^tr;}Ee z)pgrMdf4gus=jsO)|1_*)`vN}>3Ev8T8Sc5`pT89qr)Rt6_};od5Vxg5wJlfqIQS!pE7D_2@)jP9f&0!j1!G)jmO zC?FU@@0(T@5u`@rs7eb#rsL6geBo>FoizqXsj9Lr%<~GItF7nheC~r3Rh6VM3l^0X z60H>xvQj~yu8K5^uiUscSYEY3DvwH09Hp0)y9|xr7s)7v-ucjsf|p4J<{${F(cp=I zNO535g^bP%qWq+!Mw4y;{yK^T1c{8%LLrDWz6BE*MGA?$NAH7gh;9wE<9U-AT>8=) zMo{ylT^1c*K8Oe`O)&hj2aVZ_1OUGLP7sk40ccVpXgO5QQKX{S%!`Ud3t*FL7Q*0M z)0u`y1Y}GE$~k5NKU^F3SJvKp@~|cD=;-K=-umQMe);xq{)@NnUca`nZZ_XLEENCI zpPW|thkx_0eD!yJ>qn91gt4RHTfgv4C$+Dp%iFznulMMD@_+k3Tq`Sl@X_qM-#gsh zElko{8V>J2yBHPvU;oC(-}u&7&sBPSG|7hJ>$i74{_OaZ&-U(pY3u7>yZg8QkN=K3 z&>uox!V52MU%k=gm_I+57Gh2&Ja?_6*M9KD=$%h4Dp4Ji?48c4vJwNirIX#$_dnem zE_I&m&d<&uT7%hi{Qd`LqCbK%)*a9*ozl=7Q;`!v3&PVdvx^ZNB&lcGzOvcw^&dXitLji!)!MK%9JZZxRZ&kDRi1NGpLcwEu_(fz z9VclvE1Wjb(3qoX-pr9GKha8rKuoN<-Int%FLD-em7fQ=er4_1;iN8^fubm$Oc!2i zRn$r==fMW>00sarZHhohwf9WP}Agdn9nqN(iJB0tvI1K=hopA}IbpI4|JxgI(5)U^4+UuL7l*x#`+N#AeW@ zrc@09K?q1hl%fz&X*7m}xh`tNrgRe&P-u!onx9vsV7&t*b8@K_W)9QPFqo46-g@&g$GUK$$-@5 ze9%s77pBvRt=xkrkDRyOxkRBRG!Qa_7#jkN2?B>|Q49vXC~Yqm)pX&UuyziKT4@_e znK(8IfkX@HNTnG9UV<%x2%sWE9<^g0Ei4ocIMg=OF4UzJyd>T<0{{_h=;F*0 zE)QZ6L{dT`ptaTnAk55-tRDg^jY<+|EqAO7frm+j{6 zgJ(r4rAxo}-Mz=hv+LVy_aBa|m4IPVO)bIBdcU=t&0IYz{CH7Z+g^X;mGtQsrz^|x zbUeoZ-+1Gjo0~8G)^Go__wVmVS$AHBL05NLYN?NPb^h~z>Xp3APoEqfp1>%V-~Zz$ zSFiT(y?T3cJjqMqsNIUq!;c>ihwVT5&hDMty`5X@@4xlPJUi&*~(Hculy``PJjWHyO9KR49rl~K6lezr=v9HdDXBu z<+5xW06_tW!0J*jNn->doh{<~*-E-EIg_)FCovx^Scd1Yv*(P$`7K ziK1Swvnc8^R9dM>D~YrX)`bu_luiPOw@wo&9|9oSvTiDzf&^wJ0aA3y)DPnEQW~VDsF-c`Z*3POL0K_u~MFP?p^iWZ7R)U``D#sQA_~2Ys zJ6rEPds@sVL~4972XPQsCs8A#a|q50q0y0MfS8m#zo-ITF54B70IEwuk4A-ns7WPR zq+?UowqZ0t03ZfI6U7t)AqHk5Edpt(kTfZkrl|nxL?lSkih~OdaCu{Wd3m+f?uNWj z!URf@YEWxLfTV~}8q09w^cC?Ts3vI(2#nbH00#hE}S?YBvQ>jqbW$>z@i!s3W4)MDQ%);u+-0D z)$VksCnxXj&1a{seLgN71?ntq_7Csx|Iu5Ezxh{RJ*vNW@#}YPzJB}o>F)b~^4_hN zURvK6{`iNVyBhY7N1gk7Co`y3t~+t5qQzoSR8v3qJIpR}T}91ASVx8^IOmm7N2g<8NPBTBPLzrfLhZy_E?t;D9`DCv323xI z57ivXl2LT7!`mn|VQA;{(o z55hnM0LaacS#$Hh#3Fz^-*+ga%dh1@ArSyj6lrTc05FSE6g&d~A_ zeEwzm`FkdgA`z%chd?5zwejA2>w!Qkr2t*+Ngzwe)-_RA5i2xae5ak|Md@9g7pS!9 z^*d=~P)QQ&I=G~jbd~ZPG-#vaARz>3wOg$$QmW;HRF$tw?~%3E{dOx(B85mO?REyF zjE0q^!Bo$?q&+(zxtND2Hnp{N?I-8Q-~YpZU$eN%i-2=AdjR3Os*u=O3kb1MDv6(` zr(_Suf5LI!6dOD}Z^&h`>k;Bxbe(DxfIr(|`HB=U;yDGoSs)4}Y>pJoxH&@2pR%>(A^y`1YOA1fG9x>z&)jS1xb9 z*By-pbr-_P;^69~wR;cen?tzu^rd#zKRBL6qE|olf;Fd)?i>vn9`AR1r?>AsINUpv z$-0t6&$clN+uI|wr`~@1(c$sz^|zj6hu5DOh2iAx?e49&e&nrt@!7$%&mFw5bM@+r zFTVS;+XwUbu}=;E`YRtyYP|Ka-KRft6D7?~=BI~^5cEyleRdlIJ$`V1XA?gAi?96Q zAN(j~+}#T5?A4F%o;79~$#DPC@#%>UD|qJm`jgXMK$|u*VcX}2kMp9NRaG0iJXCt& znQL!ivEi2)Vr@5S5|>_VBb8pw)aaAt-IU{>VYB1VL2NJz|_lZ1jBA|4D!DQ89!(_$P0 zGeBvmH>TsJ>8jv!IeJZJr)Q(# z5FPu#xlL`3eTqcvLr7Wo9`85fMW`zW$m~2TJLkNMbp;BO^~;s3cy%~3ppipT0N|8j z*TgP2F=Y`ou`Wkb$s&d#Sxf{>C1)#)R4V9GTY;O9zzoq4u{dUl*+fK*iLzdtXsUmM z(noV>VkkK^eHsiZ$IknJOn^c_8C5~ENX){OLnL-ALKaOyR3-IMQZlkENR%0wn2=aR z96RSc6A=J#DFBR!M#Hh;sst0^q_j*)v?M~3iDm;aLuNxzF>(xmMS^_cLtH7&3i1;H zkV<@<;YD^>A(0?y`4p9oMFjd@V@yrcswR~*Th7ntCo#seqoXHJ9&y{Aog9U_j(uPJ z1PI8)2H?C;QB4hy5gh^%F(Em&5Gqy6IcvGVGmFu%a(&jp+SX_~rlV7J&JDx7jn_ui zf9K!-^4Z?eMmuU~$2@A!>(9=-a}%m2>5{o7yt>JL8k`B%5rpZbsg*hu3wrpbb0&A&1Y|XuzaLV!0wVlSgol94a=X^MO&&mld0+>#Ry|IbS7R&Ql2h7Vh*8@%&Qj*aiv~3Jz%Pw}aI2Zy4UUuR{ zg7Y!!av9sU-(Fj5Q4>V9(X{|XQ3oI?=agfV@!9|Z z9e@%h)qunRquMjMihVtGec#6zGg#_VpJB0__kFB9q)yg$YD+k4TFvJAE(Pb=8-S&h zQ>Ga<@10@AvkuVCXyopaCp%p1MlwDhy z>KFhbArwqJf++wKs;c+AG6ox{Sylx!$pS{c4kBU-s;jN$s=b%Yo>NJbWK+{nd+*o` zMa6}Inwo0tvw;F)@SJ+<)y@_RMjH(RhI}?(B2hgY_Aw%{8q|Y9rwPD9RnOanQbhe~W~KU?mNvct>&; zF+)UVANzhZnoQQ#%m8n_R52Vse*Eok{Mp01cL)(gfqdbfi%3=hW-Eq9Dk?ZJQ)6!0 zILMlEX6J}Y+d*B-fX>&UZF)vtG@W#LdDQJpaKF8}f`eD+Vi{mpdu@&55_JQ%-!`?U7({EZt|cXodC(>G3M?cr(vXz}*Z zc^1RrU|<9!(53dzzIpFxo_>Dr-hMY~+g-b~L)af2bpOf!_s@Rsm%h+ss)p|VUi{i$ z?vE$X&nH>+w|@Kc*-oDC&cE@;`?Ib&x${KSvgS+IHvhr@XowNpc?UIlU){caK-2oA z=hse;7l#MU)u#sc-rf79UmRb1=Ee)p-TM7M_<;eu{lRgc=;XWyM_}B?^sX#D*>uDy zt9SV5(HSx|3mr@~cn`#>9Zvv_c{3}c-4{bcOSvn4w^pC6qp5qa5U1L_Sb zM`7UGwyPO$UB6@!mn!JV3`YU$VIBLF)EFQ|XxrYhIz$4@U|Z8sGR+vtaZ+$BSuJKk zRWR_(V5ph~b$w-Xa&|WBV_HTT1ZMW9r?Zltl4F*fYeyBj*!JTP#=bsUwv>^<20jF@ zsgs;+G!71-?>bW!1tM_4JIzL9=n>RH?Ny;~JMZh!aA<06(^lRO2Njj*EKA7}CWclfni&)|n84-Yq)r4?W`L8nB6f7&&=zU-!1ZBsjii9g}1~DR*>=gQ5eT^JIQF^FnuDVfMQhb0IsD-ww0GWRc;kY&xF zl8Fh?UHAYHxFpu$-~3|0qQNjMhBAQ*Z2~(lDd#LzRar6uWb(+qq~8!T{u|-)o|urp zNeBZ)kQ_x705mH(2h31_O5{0E$^w8@sGN6`wP6(Xj;&vQFh9F;W9!%d@!!5wZRU6q zU~&JA_dol&>pR!4UAjE@r~mmM@6YYQqwaLR=&8Q3yR(1Z9UL7u`|{g=`*XkYg^z>B zdp~>cdtdwR_3Pu`{@cIu!izWm@(=&2oy*!wlV==#<6C!czyEO7>)I4@h6j%h_Z}Z@ zYz)@7reFQj*FXQI7k~BFKH7HlM_;}3t=HdLw%rfjd(bQwi>3U^FFZ5PzOKV)vi+$~ z{l>%n{=xZsC*6{i1;`v+9<3j@O>aGgI_B=Zdq-Q__a5Ah<7s$d{gP|q8&5uHA1?m) z|Ll){@zsyb4j(7g(?v5_AAau#Z-4&tSILIEm$X&?<=?57i}s)XKfbd)fnWYRuQ;@| zY4!MngT04GuYCTMThCqj;rAcB_1fdF{>8(mpQ}FirJMVYAD^C`iY~5Q_D3hj4x91D zCVT*)BG*A(*NY~NNA=-pHy-=G%L>4#k}Y+4b0aLH^cuS?Y~a0X+CH}Jcz`3Oqth+~ zI6iF^G<70~fju&yK_`L?5H&DN>##9)8`JgUQN4d~QVpu%fRgB0)7N#Fj0dOlcCqXx zgKE+CqUH$*lq3@Xs8#Hz<6+yyrtQpth)O`ma8Os?oiCeF6|!o~I$~-7%Rn^0`pyyd zl81HWlAzfjRJHRlc5T;pxsj}O@F`1dqXv$W$K&DpbaHTb=3HO_=SZTu*kuA15bZM* z9fb;3NSM{sdG?i)BrqhxQum`N4yr*2b&7pTiHruLQR-7sbTE;wbQ&w==SqrKvZZJ> zl`Y5#qB4ht5IALVKr5iG0THdlJp|69WM%J{ZIhyMQM_aE+;Gl{M2-ll@I7;IE~^@- z4*`Ljre{YCzL-nFQ`e=vgOk4NmrZLDC8j}D`^wkDYCNomgHZzO#k#h4A&5w8k=sCM zgPP(pf?CRHkn1F68ljK`m|0B>L5!KRX3Z+5J({V;Wlt`&F*ki~V>ZGvwhq3gO2Nwi z32LS#m5Q8#N)}Z?GVfMOmsQnTCO1-Sr=*}|t%+zTD^V5#B876SAs`V;mM#^|JfUMy zBQC3*RdYc^gj{?XWwk?y1SB9#3`WT00`xhLH>Q*AE$0Iw=OhM{#X?|4N6?#C-=|Ph z-}X6)DU@9gAdz#3D>k4h1dr@IA^1=gzZK$2JDGDPqAVGKnMz_=8P-5bx!?v@avoTL z7;?(1buQ7x0sxA;reu2;m8YtfS4Gq=j4F2VkpmOBpa`L=mZqSX3&=YMP{#X&)@w&# z%xtF2OacfXDriW~dG@J~07OX6`uS)27<@<|nl0x~0EsIH%*;(fUnaxygV1PY!?c_ntj?eE8t;0S5l~r*0gaJZN)#yw{l>8ivuN zj!k;w?K>ZP?piC$Pk-k5gU7cwr+)kL##^tywsR@`+OK}}VE^dx2M53L#i!5Cn*DkA zg;!sA|DF3g+tuIsjW7SnpL~B(O-~p5|L=eL&OiM7gWvkKSMKjGzxMsTdj~&r#Cq5xwZ8Pl=N`ZL#%O!1o5yax44x(fxboDx zNen|cJ3ax_?X5Kw1Q72@MF&H;_0;AUfART;9~`!Ab9M?`smtlppMKtW@aReN!Tsaw zSJ&nXZJK@<>g$(%)CdaKcGou7*6-ZkKRNFv6Bn{SI$u_vHmBk8^-FtuXY*)llYwMu zTk*j!yQEo70F*irBxJJyWJw&rrM2+lt(_O2**#mdi-khYr^j(vyRH`}z-C?7Pbvz| zA;8$P84wz24rt8goD0DlgD|+ECuKwHViJRj%p^I6^H!b5v2qsMn2wjy#J*2SDl*T0 z-~xcAB%l#ZFl)@25UOESRqkNXk4M8c3sf#;jRMXwgE}T6Ldjxg%xH#8E+tXzM6)9X zfKd4udm(1;26g3$)u8XYx(+HPNd|R=hMlw`rIswXX#}+u3Un0-Q|32#0e?oxA_W*4 zm55?W7sn7Ftz;pt_M#@DF-BE0Q$h!3hp*9#GQ@8Ys?>{7}i+9w1@y{JfT+J3(5fzh%WlJkxU0T2W@ux0=-hiC;$ z2^>>qg35;#p@|I0psXno6cmuvM6wtlJ7Pp+QZq0l1QS^)DFMo!LyObT08|TBG)GQ|ffvAceybr;MQ@&?|x>{%&)x!ANlN4Pu}|A+S6C!EZ@2JTvv;(y?yrp0ohOl7NUe8#s(b{DX8OM%2%%4T(4>G@YE02 zzWw&y{lopO$>^zTmroA&PWJbI`FB3Exvjy+jp^FI{)?}l9^AXUJ38M#?VI+_TSqT` z;^wQLyRm&^V|!;bntKL`-I)=;{@VMG9-q0v_{P=X{qU)qn~!fl8E*{Nru7Ri?v6+G zc)0fP;hlSr9<+U1*KRQKgHiqLvo{_*cyfR5WP8JHU!Q#Px$E_4T6y@sy!Cv2W(bht+7* zo}D?<8@uaOs1A|IRK#Hzkf{m`KGG#04f~U<1?gxh15O zG5{b`2*K3KdXx~zvC1louyovAQcr0-7>+lG8|x!BXqq-9@$Amd&VaD#yYXZ_i%r(2 zD*4jx*5Yh7KV1%dZ6MjyjAG8jFqwv?S-KDaToxr}L5IkS<_9(TQ5cR+&u3}T#F!Ms zK+%z}JTM{xc~Vd$1XtFVAF^wdoHvyWrecW3#lBRKeC1!zBo=cQ;a4c zsySv8d2)7q-Za;l1uu*xNC+ybt_<>FRWXzc522(%t~#JH^fxtP0>BUbU}jeK;Q(L; zV#>fpC1i#Q1|>LNi{n_u#JM1r3%-z;i~LPZ%}_+uMAalGEV?6hC}mD(BEn4GdGAB0 z6%6Z8r8cu<_Jf_u?e1(K6=?2X;0=?*4>qzt=n(CfBD+wk3K(nc%r}l z%g^3^=l%z8Jb3Wv>^Hyo%KP`fzIQAx*rl~0R&L()$EWkIR}VmB9t{+(Y^}!}my3Rp z#6huw#ANc5citQi>n`i-Z@o7j4mQVYqWSLq`(JwX+O6wbKl#B=zWAkAmS^{NZ?65V zjZZ)O;_lkm9^bzIM}P3X3LG3Sjt@_+o}Fy14L2{1FYj#B<9z(+{H-4yU43@#l~pN>}qqD=qy~D$^+xMoUk!8ENyZ+t>5BgrVFRi_O?|fM6AcVj^ zI4`Q+xwY}=QQr=0Hn5nYBbp3H=qNavHQm{Hmk4r(45o@%4Up7enbU;)>Adaxv_2VR zU3uG2m&<`;$>IP@GRxG}#L#s;`^q4xXj#ZfFFsK4uI+mVNbFIiDBlW_AJA|(s)`a0 z0Q)YnM-^Qz8z#)E&N(V^ScIvIXh6h8`DvieaqjafP25UpsUO}3<#Pz2MX<3#LRCINPykYkRna`UFy@SC9?h~!XJP1_8Hqw}-#uIX}4i0B8whoE3X=IO@ze7OW- z$6ifNPv_&&pzr&fGuMOQgRes6T+_xj@m+Ftt=q?u7K*5jr{77ME?znG`Z(8vTz zVmX#Dof2#dhR6a+fQrbBC|F=NRF;?k07fPH8~|yhe^&(rBVsdL21i zx*xy4{P+KN|LQkCdFdbhx4zK#{2PD&H@^L~w+`NYg2Ul?yUZeQytD7?K{TAKO;yc# zR!N8Z%U566RFC87U~Ov)))v>E-)d(uCm6c?_x|4JUij22`;QO(bo~pz`Af@27H4OB z5AT2ZOFwrU(kARjnUE3zAU@To7I&y zzqkMJ(SrwmIRA}bdHI94-u~DZUdp-o;GM@^GoMW1xo3Af$p?>N5UT4>Uz=`BQ`hZW z-?@DC`gHBdQy+b1xmr zzyQ|PLJ~eXYa}Pn&@TGAc7qU7$`2nr(iDe*uI^kiwe_|2>9pS2xfG91E$g^KLRzZ_ z1OR5^DiBkaR8_ufQ%VvXk4EFJkJ`t2ydJYS-?swEEJi6nHClz#5+Su+KXR;D9e^kh z^Du-!WQtKU6Q*R2OaXy;9(!c$b53fbVOc>z%sFRpl2)78Y}r(qNwOe8@di6)H9_>E znTd1G+01$GoI|kK_2d|t`o13yhve9M&a2HF7EgT{Thq$q_BUeCi*l^^hrxv+m7^Sm z2n{V~4a7xto;ygrpr?}Rw~D3#Q!{pD!hisvX2g_YCL|SAQ3I*!kTt0pGniTiA%Hpz zOau`HOi=)>?^@vKE6L z6JSYvqbwN!LKRdqIv7^tdN62XzxU*UcXf){&~nZ>_mVQ0LSfOB{6&PeY5T54rr7qP zxhRdyl$e>FiV(0EI3{MlzOezoNFYhjOjKivsas@);reAUG5~Z=VqBp*RRa;RvkS6R zG5x9mAS$fXnJei(Apl-LY7rR9YqK)YnbL(T5z)MNOi0AUY(}8Pwzzt}fX2+oj79)Z zzLTYM5QCbXXcQA-My9M%0#}*1s%k_E&NZpiE-^bs%$ihEj47qa9*G<>AD*7q1HXQ0 z2Q0~Q@tq&Hv&Gl{z2ABMaQ|>;`|=;Z`~IuHGB|ATY?-fK+gP$4pDw{w?Q)r##UQ}O z_UQJLvrF$kxpw)|rMpMkEe6!CU%B?D|K(pz*0!E~>BSEo9>4kBcaj@j*}X~*a^E#= zv)Dgd+r9F=_aA=wAN{Rwe(igUg}rtA(Jy`e)!+M-kN@dwckeu&Ki)fv`S_zRzA%~& zj*br3E{&TfXOA8q{Pbtv_{V?m-<{vNG#;$&tZn_(n|HqT#@*B>$G|>h)4pF^yYlqF z#nYqn=}2*y0d#w(YTMb~<45nk`~L3rv7d$?egDnrMtx~_C=@q0hrjj0$KL(vLrr%2 z>~4|9^)QVtJJmyNQ6oy+6cCr#bn{d;G} z#|W^s?p}Q17PGx`_sQvOxqp5hT)nndH_JE&fBS-6#7e3k$B#8HNK)vClc9TJPPksurgWItRec(PT1QG%Wy{ql5E#+ow)37RI{) z7&>MKFq1$mDb397~q6mtDah zY*mO+Aw`!BH-Z5=$Apk`u^o}95CI_?V5xk?%mqkUfsj@zJ|>v@N|#d`yPVUqX&3W()Ao#H1OiZ? zd!|x`VN~jHFk(VOHFZP@#VHjlcBy5}ijPR~L#ga4V}S?-0aTN7TrzI4u=EY+;!Jde zS&hj76a@=8VgP0)23OE~j##a*39W3D0hI_HxGbsyieLug=>`Cuo}6Tqsvb$sjtP-+ zj;ivZ|F7#JBbix@9TTZ&)?{W`)J#;=d1tXxHA0Rt2j`eQF-aD72u0xp(D$wPtcW57 zF^i^Jepwa??2G!QoZE#7NJLr&gDd~x%2uwbX0$Q^+v;K{eXo`1U?`WN8G>0>6EhP4 zLjq<{BjhAPK;%ltCJ5@-r2-ozL`Bccj{CR?KGeBy5ugOA5}`<91Y#r~V(yx* zot>XoRhX_1PmYdyHAfUigP3|Wst0Rb)cq9C-_PIqKmKTY9e(r6!y~`(U;g#pqMG{M zu%L=Z&MA>lm<~bGxb{uASk6w~IqHY+9(?P^@BjVZ{nDdH2NTnCy7Il(_742fSHAL> zr}Oyyi?>dGymxdszjbSC_sZzI-@i@q-A{kw`t;_S_{|?YIJmrf`?CjY)8jto<-#s)gvBgwPXan>nn(tt(EtICXY)E#fFyub6&6|Rdgup3Z}#Bi zBxXpW2EfjtT2?WDfp@vkR1^o!wJAD)K4v0Plaz!AY99bKX4zO9XT;oQVyFzu&%)|V zC^^_jfG$){Ge{Ot%d8+){54fw0f6L$grbHbjI362w2hI;aco;h&Y{a7P1mXDpc;DT zGAGHxbb)%di^NqkKvXMUU_gQ*K$I+|$V^#8ldP;sR4P%Ba^w*Ohhk6#j-urZb<9Qf zQ&*LhdO7FPE-{ga<|H0XR`Xu<6=O6aWW=muW+Hhuo4fIFSO*o#F*$Z7{qf1Uulh-f z1Ihu-|j3Wd+jy|Kl5gVYv|y$o|bShe>gIL9tsG-x#u( ze7Jix00b&CCnU-uW}u=M-!NtGil-NmWQF6wq`A1g2jeLa_c`VyDiC92;(Az1&RpaT z#FDdQnJ;GsAUV6hwi1S|RQ}|A5g#!#00tjQXOmK_LqMc1^`KTF8&l4GjJ}G6jGD6` z6iNiDXkJ1!`_J?2{dX+5Y14$oL-(Uc%e9SX z4j-NT>9-I5;`KkM#v5nd=4gE7^z{6PKY858I2cZ*lljp+_PNhX-rGMvI-UJTum2Z= zit`e-wg>;i|JmPu<>usH{o!~2`rGgRqks7K2VMJL{L7yLvzhj>pEb=(Pw(n4JooJ% zzrD3J+T0pGxP9kWzx>5ZJ0DS%oZAp+ydE~Mj;@?tzV_Tx?fK&EH||}#x%>U^-AUOs zOMA4}JoDW38!ugn^V_qT3`hF#{;@gkwQ7&z*?IGck3QYYF$yAwQC%Gzx4oEwhJgpf z@pv+*Nc%V#rR|Niu5H(*BhBZ#yPHhm@!rwKbl5HB!J~6RxVAOEb>s4^>wfa+5s1#t z?DCaC@D-qIyB=~DLaun$^kl@|ncCqj)d3J-nR1F&jd3~(%_1Y(vW?DpQ)#*$iKpXf zt7)-pMXUz{a$Vr=U|!Xph?+hRD@Ov^)Uhjqjzl$fIigD1+FShc{r-oG}LvBeN4Idko!KW7G9I6S_8C^nB9t@fw-dEikO)(AYm*yqd-NIMF4=9vxb1WVXG1ZQkd$T}C3Vg65o)It+wEBT(YS z5_7C78fEKKOrYi~1@GEEYF1FIM;idxWC&qI!^sjmQGMsh5~o)O6AYWeb=7oj9R|z< zDmy!&T`nBs{^?S9JxuuMbTQ#tL%6fQkfhnKx834o-dSDwE?v5OsadwmWrq#U7b*A2 z8(iP=NB8c3?VW@D2e)rb+&91e)xY<9U-*k}zWx4LqLw{#8`J;dKl$hXn}6?1KY8c= zyAKb)`_uT_zxwj~KiSvkFTecrFCn(_MtA%DdnwK@U%f((PtO&TNsYhkMQX`gFRvef!D$>h5~i!QPX(nA@CA_s*7e z6@;wzu4xv*`Fh}AdG3ZJ`OY^!n2v|f-neAw?(dyc-c82CqobzjAS-MczWeB?>$-b) zm(IcZ8dbH+DfeB+Okog?juttVg&SGc%6sRi>Gj6tjg<0u!bf+`h8#zeu$U(mFbBZS zz@p+t74}csUceB}7rCxYV-I4>KAVC7c*jQAww;j_UU}|QM>-nRW+);TcSu8t4J#RBC7t}igRRiz6<20;YpEPHPBIEJ6X3EmK+WRi*Fx11z@Z|J7iG=`6 zj+hNBX9Ivbgr?~+YX||%98){**fA0HZJ!YU%m zz{;gloL>sS7Y0yOBQ7ptQeDlzoo8Kj^`IgKT6Tb|>p=`uiG|QHA%T=K4Uv!`gsK_@ z5dkz4Wp=Uc2Gzh+Riy7?vur@CqpV1zrcjoyvU151mRX1jV0oCH(GZZC zM2M)!LDj6_S`ARlm|gi}N7FTSK8pwdhyb9d5OGXNlOU5RAURPHGjxmv&J4QxhC|V$ z3Y6@*Vm~t}8lI}EEfxZjNy=q_$V{lN@IEz55!`?&0f-11cxQlEd|~CiLZnp+X=!+{ zJTNQ`D1sF-XxT?yEUG|Ep^#3H49eA2S_d@*ASjC1Vw<2cMI$C+$6mw$Srw2O%!2p6 z;%?ce6p<;o>O*IC)|^GMT25Vg2suV%xU{pIB^@0-))>#u_Xp##2&ig|UEf9oc2#I4 z5fK>0E>A|I_N-;_Ja94R()hVhrJR^ZfxyT)r$FcPwyFmj`<$}ls;UO+dso#U+GnvW zKDgBOSpn4)eP#>6tzX$iKsFE{S62pUvc2ircDlZP^*Y7AGhkEbmd)1Mq@T|R9$tQ7 z{rQ`d*S>#SjeqIOpL%eo`ImqC=l_%6{rvyw|MjiCMSpoRJv;3{;-7x!-7oz7bN}SO zI{M(<=Je&WXFmPRgZH1TUv@WN-hAe{r}iEWFJIf(y0oRKLCf1)(01KmQ13lCzqT{J z`Rvx^r!T46az6Xj|K?}@v;V^n)_1G#{-AmKDaXzqA2stf8`xwt*xVSM&N~J-U>9Rr zwDAlju#KID@#xC-=*@SZR70pL@9OaO?fu}r5}zzOs(5?5I-B>W^L}|aBSc^EawbG} zaByx4F7Wl8wUb4Q1}=~)61iTaOBqbPcdgQL-laMW$6lDHXY zs0KuoKs9NJGiL;c00xV$XQTjF_#{QhJRMYZ7$}`EJ;;WT^D^dBtkPxIS0q1 zQPp?3d~TfcP{#9+%Pgj((Uuf2fU?_TQ7s}lB7g#YFV+6acW-L=p>vXO#dt6?P|y-$ zQY0;EE8`G=W&r|b@SfB#b>bMjb3oLkNJ8v@5UV<*UR>oJW6H^l%5+4MAXw#svL_=^ z4Zc=`B#J7R&zZHDQSM_* zDVo`Axi~pH&!Ao3r6i?*^}#D-?@R4T2xtmnP+3-XP9-C)uy#xYKx#rvB8G^oS@nu) zvr_cHMUt8oRqg6#r84dyq!rgdt$3vj@j`~h2nH)R0U8)Hs(d&rSzRYtq-f#IOjL@^ zDrO~Q=PPC(V>E-l=@2P{lp{99$VkY%3RD4*BmftHXeoGIy_ma{jNbD1MMPpx&T-Md z7L%B0)~tjCKxXQE5Gnf0GFk)#NGa9M5plWy%52|OV2q0g11o%p5w3Jr9}d&duqdDi zM2(1w_(v78s+uAwuJTQ-j4lDp1jrRgA~O(*dykg$McXu?t~dl!n@omal)F?9Cs|XMl9{CbG+-6` zEZJqd9Ir^KYV@PEn#E)HGp{+Ivw=ia!tc>B)5w|+QV8@hW3^S}GIfBx#=Cr9_1yARq=zT#&msUEq9 z?;hNI<%J{|#&&x2crYAadG^{De*^aRXG~BboSiiv8Q*A6&JQ1+4~B!QH#T1WiR$I1lTybTp zrrtTrT30@sp3N6s7Zt6E63vh(n-V}TG8qlKlq;exDSEd&Yea!OT29XU7<)z-2K3Gy zogb%ialuM*lYQm!Vx;1b5kSQx#jJv=!s{t$6&(9Dqw2fmE>4sD7+eQ z6BfY@g6#9}CGCcxNp>bT8gi#Bh zuNx;Q6{G=g=CSmt79U^T})^wCV)ijB?}@UGa`tJnTkrvId(k&0T_Bm z%;nBTD=Vz>brd6-DHt0R1v!AB0W*?gB2v*1f|?Q-B?f1!l?7OiF{gA2E0=J*s5MB(kW6fM@~#XY*tmw6w1@jsE9-gNLB<+UJ1}Z zMI=kg;GLBg#1z!jq*+DZh0HA^X)7<)=7JMW7$s`mBv&Ti8z4xT)g znA^pinEi0rb6H}y5=oG-u7V?O zWABl&XxH`Nu#dg-E=y_`y+cAqCMTACTgG}=BsyZdMlUbwYY4Tt~Z|L`|FguOebhv(;)Hquj9pZbN5Z>b-@_LE2d z?4SP2XSSygWA0~o`-3AE&e7)u~T|GYU*EibxduKVuUSxARJwBh$z!q&A zDnPKUF^q?lm+|qV(_2qp);ot!_RbLrjE+ycK7p@rx>lbo+FYjUg6q>jh^lgM*bWEi z5Zbo!NRJL>XA=iibv9qtH3d&yABhPS4))K$vS%6wUbYf~#B5@g!J00{s0s-PRFQZE zBxhz(Eg^pAU0NSh#Z|sG9aWWYx~^Mx#+I`!=gTDK9E9LeEQ*TRC^#TYDS1bs;%b1S z(O|adJ%J)rOv}EnYt9MHL~|VQV7`cw)qxrzWKl;<$cVt~`q(d*OL7iY69d$odler- z5tN$&A%x&HYa2TQBS2(U%_${EzI>uq{z$Nb*Id%z<5D^(cG-rWw)gTf96QZu%3WRKs06C^s0oc1X zcD{3tc|00qk-ByV$A>Da2JFM;XpPwQU57+Wh*XuYgB>e~p_y6J$F`41Kunes0gHA3 z%Y0LzKN7#x5R zkwbRmJ%`fADjFi0nWPj`F4Q8ka;XCt^cE{chinLBB^1DN5@28=K&*Y$rd(Aa6U1jZDxkOGLA z7??;_M@$50sz#1;SDJ5CP%%rQ&S8TF0Xp-`j~n}16pMqWXpYu2pM^FVorix9sG?ellOi)jNd@YK^gS2woz zUpwg&Fam+Cjq97c+n;{wdlNS|2Td!B^LXXTq*=zUPmdm+5u+mx zbv3E0WIC+f-tpNeKvladTVpglI&Z_GU39vAsUA16sy!JR0+S=4WH}}CCV^?PR&8vI z56+t|M*B@LlIr}{bU?0sdF?P*Nb*p4}9BZ1x%}PFZNl= zIb2m0p`|2E%HClXAfnS*6Do8>O-wn9V;B$oY2UekU1Lq(6PYNuoKngPu%h#;S=MZ- z0_usptI8-D5g4^?`Y;(5v_(aEQoMISy5&Ie`QUPvt&Y00Ye0}lDmQmB8ufmViL^-(hR1e7pXcc0hf|v zBFIG+RaU*kt7O}92@|OhBVCN#3(?GoIhTRbO7K#IL*;Q}S+1xlITvGJcnM}EYFLaH zh{R4cyAWcEZH#8<2LXX;HNq+STyUNdNI|+7O#pyA`>YvONLir32IkBIpm$zW%C1;d zgY!i^kh;_&iWw1l@*=|MM1@uzhlm=HWl=C=DmLJYK@Y8-joHP}tLO~M0t(RzHE$Ia zUsw{W0tLXtl&n~%x%>zrerTk~q69z&j-8okS=%EKvs#&>71wZ)uCa>reGh~HD5?e^ zt6KtB8x}Ijpp^Tr>$zeH|p&xgM$_y?al5zdi=smo7=ZO_W1qBmv_cTd$%X!-L31J=xww$xKY-+upW){iDGChWR={qm*rMYHHT5zB0C?Aa4ADq3q=xmt;l zk(fO}FJeX_fQ2vuC|ZQc3IL!hdJBlwl{d_wX|`O%w$BD^D28a@1J}W+h;y8>b}<@b z%vwS`RJ5)cln|8}Cqplqx2A*Bd3$-QZesSB(QG=dXO^59>Pjj{L`7m@qC&-AtXOEa zB?)g(p(zZis&Bgtno<(U#LlRgZjhbJlEjn?ND@pH9FeXtXNnh7*_Gz*!#$mqR-@=P zR^)o1GP(so1pvC({jIhK21r&_0nE%Y09M|ss46(53>nS9P?EY3zeN=*I=`p6SBk<995Oy-r6K!Q)F^nIqyTWT&n1JJe8~}0z`v4v^n~~BARmq zz|~N~u;|KHbYujCGT;_NWX~z35PXS7E}Eq*0z`%d9EqS%{zF#OGBkv8D3|)Wz*Aur zb^=gfG{mcOuawEKYLd#MCL#hf17=1*hd}JgScwU9N}`HZb~`{sA~}mNdnz~_^1zHH z?6?4=)y%R+HCuTH&^fPQ5>u#ZRP1v?K%>>DT~~iiWB>zWVknGOLe-4KgpLriW>bhM z8!I3pfo6@VPm+)rFzae@1co4JMrZ^oq8GVDs}2Y*jL@rN7ZiYDWd|NxD7(-=MV)7YqT)p@F$@4^kYZ8-1@P<` zv7U}i6;%5w#N3~pp0&%lW13DUV%#@PiXEC|HD6WTvgOLx)c}F2YKTa&=?npf14mv( zx~@%{Q4ZLmYjpHj15h*;dvW#SbIEcX_oT3`@!vI`nrcM& z!^hVq>$Em_*tEa-8z1}jSAT4ll4T@|&EoL~_cpgit{OIH=V%eg4@W!IbnC`5l+a%L z>z}>%&e6`*?Hf0q;z9W7zj)&tU+X^g+0onYJvuyY806#xvZU|5cDtK*%t}}h+o-CX zCx#J%e*dj|tU4U>Gq)~xv+l`h)Au5(m#1~p_5@%GjxfeF8VzdC`=|5uHGg?$>+YQ= z8=Irge*9zi9v>Z^_HBfE;EH!#v*jqF8XUze2$qu+9Y41h13K4mtm0LB2$v8d*p`y8iK4~K(g9~p_vh@DCnjV6{+2zYZch&ijo(U?I* z6za-3=K)gXc+sU%2=hLfVK!~LJegD`dVao)EjTNd$x=L#LjXiB%IO>hNeCgE5MbZY zXjJDuHEjo~Rb35y-F7{wdG_Vgf(Gmu0i_p$!r)!)74ag3WF`MxWpu!YX-ZnEv@$EQ z6{TI}BC>DgIKzv{kQuI8T?LC>V$V(6FcA_L@gh5?Vg|qnl5(l~%Nir5jM144MVRLgm?`nWiWJyqI^Awce%?O9DeE`BpsS(&&IDR>5im@*)H#|oBBT@`#C2-tb>+l=Ky2#O}^$Y~Zr z78K2@0x3xnDUwnn0)t+11wjx5$}PFd)`p9Rx+*Z#fXmXU9QIJ+q)n{^0GF3;^-``H zQ!^1z6|oG)WtiXebyNl^-tRYkL^Lx)wxKp+BU zr9a4uYwPPTf9&JO2Zx7Go*X?nbrk_fDI>hY*!H1vX%QtR&px#sl5@c;Sa0e8iOCE> zgbB`Pv%0FPDpVm511OrMELL0>BB>Y)4GoP_PAR9g$r^xZE{G1yqC^xpML`bO_Hrzwx=BfA+@p-Pyx?vyMGzH|rKR zuCK3+>w9~%>39H$sPyE~;UG9RS)bNxqd^ZzG_9|}U{H4*jK{n-86BO@*4OaT_ONMY zX5g4o(|>UP@%aM80F(8i)TguAvTu_MfsDw2J!MtT%%C7}e74Lvkw72gwVn0Dlk5;S zNA7&yAsZ0}M@iI()XY1snAtNsK+!Cc1YDmgAIbu0ZEYl)O<;X}Xn=j&Gdg6PFXEtr zjZyG~#Jud1O5Zf8b5PeoK|nywQj*>6$>Zb2w63a3H-_Go>^qgvD9K&wb%{-%m-%xnuP;r5zXp+?Hxu-L}X@6C?)}z$k3TZ zl^l!aWVW2ufki?cCXQ>(GM~+3j39tuDiDI-+FUnwW${SPspxFBY-4W*kg4ta*e6nU zSSO37CNYy^0t6)_Rxq$azX2v<0swT5Re^wjO%()COf_c&09j4!Rk8vgk{U9(Lb9Wk z#{c5MiJA!_X&HRzN|OUvrZeTzMT6C|Dw@#?Dk!2D5f$q&5xIh~1IQ&7Sj*x8v&djH zMs`x13Xk`b7ih!#|JAA8lZ18^y2=UnXj;?p1|c8-9+F!(yrvRLs@VYR`| z*@qya#XGWW8usXbH#fJDIMh|_=Q;L-82bo_j-6y7#MDM*F+pHrWx%K=ipBXG^I5$y<8>(FMM?JG}n%<>@-V_MHc(M|op2K-T@k{`4%V zSxvfVbWroHo15?6I|hkOV~6K$9RL+_>IZ`=37nqKnpu4AxovVbUo5K{mdp5ufBy9@ z8ySdEn{vu3ZJ$LAz#*Dr?IL7k=gf>lXxko|SbHZqImRx>oCE+*n$|mrSq&^W4vwQJ znO0R@F@fg5AeuwP(o552HA5^fsH7a%hha2uHTg5iIZLzXBnu&E%EPsB%xMrDk*k=N zU2OZfHmZBkW})lroHXxljJsC6t6%-pbFPeo5fO=*m>7Ks1}!3JVakDH1kD)$BqtDX zfskO?G?VF+h`L29#0V%^U0?!WqAcQ8d^lUFR1{WaSLu!ffPkqm(SS-R3T6m=F&zR} ztp}`RtrCJlHggUEfr-j_rzIa;gvc?YBd5Ijji5I*DGGWf&L)hY3QU-DVpdg@zOAV0 z+fGs(_@S^(*C%QvtISNzv19fC1c<64YAI(?P(|bwWK_j+QUXVs%6`JYoWqrZbv2C# zq9Q71A}ZtNl!Td7LBT{+q#Q`zGu=QH4Y;rkbhT(LR3Iw% zD6XiNs~s$3xr`x;5|il)R;i$dmSvSf2Sm2otD34Zc{RvcHTQr3=$xwKz)-xnAIfF4 z?@~BCZ}?;S?3oQYro!t)$c$Fcl6~C1wIEL!KsKA}UuF z05SHG`aboNV%y>Qg{t!GToqzU^W_|X zm_0zqv1MjvpF{|OiL;1_#RTlwAxTcy<)tPhMr2=k^iGnxs;-E+_#v_&kP?{6a=ApD z@alD-zVCt$jE+5kDJk|MpeB~H^r)>X-dw9auJLALCI2_>R z?b-~F&-;SwWW=yhwO#x9k6r!2vOhmr^nKq)FvLON_2J<0;ld0Q1(G?&;LH@lxGFe6 zCQd32Ah1(05qsv@OJ^su@hF_lW9pJ40LCaLkX4Zh27}spvYgwl%K<#&cvRQH&gY$q zI;PsYzUu5aa@NOlO%Hv`$ew?n?sOjZH&Z% z#8vP=ed~RP7wR;)Xpf1B5v>?-zz~Yx>f$s8NGW^I#I!NpS}xB;Y-4@Hdz#JWxlaZr zS=o^q5`vg9p&FI>fJ3-IK(88Yx`+WR5XTGqE3SktW`#^+2x0;X%m|7_vZQK==pE&p zQv#$cCO*`}lwtw^bSNo<0W;>D1AAiUn370J*&(JFhgIbqAy|yNC3FtD=P`A|mcmZo3SCo+xW>`@WBewX_TN&K|k+O($u8@yn(hQ~+nN}{?qW>)F9x?`?Re5Vz(y6Qa@#w@Wc zq`l87W=sIY)OU%TLjyx11Ln$8ARs~n!#*a-5R+&UFhfxQ%Y9c5$Kc63!rX#oSFz6w z!y#DVou?GLtX4Qp>=8_2mjFRB#8_wo&c{$R1hp9|NRr8Tj9{^EljKqobls8+&~b1K zVL(8Pm;htn8)y-2pcx<)R$rspmqY{rLo3@S2J9u1V?s2eBDexk0Rgkp;i0h@9y#D6f|NQsAISg>+>e_7C z_bOMnhh2&TPvb#2J!=M&`tt5{|H=7szWmP5?rv|64o|>Q)poKrogSUdLDKfrcM-Bm zSo2x5s;G~Wa?Ba~ECiqiYeSb}1Ve|=cg^QsefDQReGvOdjJuaN9`2o0jM)qsM#Dh} z0g2kS6@x_!!^(z$(_!TtiyElTX5D1uuI{X#ozG5=&z*y-JKHBGi`d7FHE#rU@TVtD zUBgqCx4OPx&bsZjfq}+l??*TY{`f47>iTRExyIliMRThKq5+e0QmS6?G3uc9jOc#THq7Qd< z7sG&+Y7t8rc%kAmHE>kuyhg-;z|PeKS=2FC!LxG+xo9L*6v)8L2N$Z^Au=-s2%brF zeKNXoX?;BM6_X^9ti=PhY#J9hYwBZPd26~JfSluM5E#6vE4~kdK1uGnWi~Jn(MU)L z0syLl1df7PNk|igvi3J9acHWZoS8aut0it0y{I1vd6kL-2xubZ07EPdz{ThtphQlA zrLqJt0kkXt2rR50(wLys-o<$cP`G6vA|+}}#ZpcpR=Ac$LnPUdPz6=YRSZZ--mJj! zQOVdKi%M1^EL-KQLa4>`m?Y+uC1(=>1L*|`37sM$qGN|lRQA?ox}u<>hN>#a#>DIj zLjiM6ZZJ@=zHegJ0Z>jUIA?v*tdcYS8_B+^89!Vc!G*;5LSRswXMh*4HI9-Kg3=zy8y`U;Cv` zeEr*Rnf5zZHy++OOSAmzfA2TfH`g8hrPp+J`l0`}$mFX(82@O=05I_x-5TV4EAh9EnY#>NbSA%RY zTg)eubwL@LR-OkHF3l;hAEIZN+POhWt286iRrpp`B3ayV@#T2?qkN3 zGu2*l9)uv-iug)J5zWMis9;ks3<0Ii0Jyj$R@Z@9!S~YzcNOSEI)#Cu8Dvp4@#sa& zQJF0QGcUT?d$-@)KinU92OOf9Y95cKz8bD=Zy^TA`Rw$pd?zpdOap+jWpzyXqAF6v z!DwWl^>DbhvDP$AGh0f^F?EaOLQ+Er>tTG}XoMI}p~W7j3e$wl@a!BW=RSInG}#=gxt)i8EQOoWUoIctuw@B5C)nd2e|uRSXO z5(RSTjF~)9QUxl73#p0=KIgIuA|`g=vq_R%dB0q?bzOzvN2B4%a(Q$P?>svD=<_$v zkGe1Y@<-?M)8qa5_2(y6SE*mR@$lz<>oXrcTmHd6`FiSd2==*Oym|2O@#+4f%OC$p zAepzb7e2c7_KzN(T^U{8tiF4c(B1#+=brx0|DPZ1UVbz4tvyX7z06i}TobwX4Qejl$<$b7^yo1{F&^sjh5I-+1rIb2rz>Tu7ZaDD3wCMXq*AgNDj0c1D$tZYg+Gg2x0O-Bj7=;x3VTH>@J~`^g z16bc2&lgROsd7*=O)J0fEJP{AzKbdLo70Jb2C!w0N2kZnzHsG#@Vj67<8S`>sjKk4 z9~}X?lTID10$@zCGCKkeuKEx$v!V*G)S6|BYe1{L9~cH73?RnnT>t<8YTIsm_foTH zQ`hFKVoHDnD1S5F#fy_1E@rcah!;!X3t>u8My=egXi#pX5ISY~t1%&{rG8zuWWHvjWpIPvTfR<4u3!3+Y48!#?n5CF=l)@xx zmVH^3WFhjN3<#(m)G>>Qdgn#ek=DLWs?6-!^(i@~3}{)M_o#rVE>NKXtQ3d>U`9lU zSVAJ15KtM>F&Y>#n*pGq88M=PnnEdh9pr?};4T1iYKCAg)TP)}!4Iop?-*!B={7?| zHZew0Gl#^1vl$r~fvOR(iV-q0GCM+tCCr(Tpy(f|cEy62%@olzUzpVh(G1BkGa`UO zs6j-SowBBw2oQ-4L`)pIs8Q5BJ419xhL%iF6s^GBzyKAHhyhtu9Mc7wjSN)@42#zQ ziP2P$3g7g@qaH4#J_VAETJn)f5q`ncDB(QivM`0H5R-`-nWAx4Ra$w=98*mBaQ|pf zl`SgAoRjF~ohzoIko(v&7W6ax|aC$31i>oZp9@hTR|L4zUi>3GO<1Y*cqv4&$r|aX<#AP*Y z=4m<4O&h&KP#ZA3aBb(^`}@r@y>N4KSf$hoJAU|Zc6!`Qr-K;ft-D8R&_F}rTygxr*KKIPk>2xA}Gpx}g`hnY+jQ7u% z!6>WdoGQnjO>?{KvSuMLP}`o?DfQE7Fv@F_>hY|}6%gpHYnSfdKiHf=9q{=4aCfWj zGM_g~MNApy^X9WJ->8Beoi2X=&+ol@ByGFcJ6g<^c`yunb1?%~X3kRV?!_Tr4kknZ zWamIZEnQp!3MDd7)zF+{CRe65-jj+LSRlT6?V1X{^VXY$SyZaZNw$?3o&XgU%oAxD zw<2T}x;W#DSi1<|VRb+jEl1J!=}IhOW@<#3lCdZ-S5*+PETSlN@Pz2=s*j1#h!AvT zKJNQCtV3P7&9%{VG8|Pd^=&g>genYtK){r>k1^&rss~v#=a@49dO&Ep?3@EaWyq*$ z*@>w+@Eq!)rt^l_14)YF$Ri*in+OvzAh}ACkg{eW?--d%o|z~*LNiiAMiW2;c5I?P zc*f+YY-6Yp$&iaS!w`s9s#*=+mBnw_d^#o(<+1|IQWX14tOg*UfW!pMD$49~79wPJ zYJh~SP%_KV3@kbCP&lfx3+PYkjCb_v+6r z=VwnI-05SK9&+rvBmkf}$Jiq~Fk0WdDyD!L2}oBS127wn24E0#HZkv=C(lG?kW*$x zkvw0X9v(jEn>m=oD9Dy`Cc`pQb+p=ko2-J>06`>|txt)+K`2sTGgYJ>06;CPvT9O6 z#7^4Sc3krAnL(}Z``9&EGZGA>@1ezv~0kwv>cvUA>f=UiWG=>~xv zC@`U^l*JXCO{-rb5Lkv1Wbm|wyjm+$ys;O>eb87)}zgT@yCDish@xP^3|<_ zhX?D^we_pJ{cN^8>$L5<0>|n=Ih(ncDJ{$y#3QV!zw)ekym1tWyBx+@cs)Q zz4q^Z_KV8;)|&@UUB$sT7^atAy!!B9dH7^`p7QS1sjJiMcee9UfQg^!^I-DZPzaP$(8Z;*5uy(!{DHqgG>zpb~eY(ahAMj z(!psz7!3N<_l@-lwx-pC!^5XH>fzd;x9(?e?Z+;OgU@~9${X*_0Gg+6Z976#Tsc2W zBV07>wn~dF>Psq?qp}`eN!F2zJfnQD>S|b3VcB%STT0PaBQfy7tqmuacdtEubbrw_ zs}sG*Rsv@-YvMA#1yC`^#ICpjK*5mFp%*iDj!KU4 zs>fGCRs%D0j>H7eRaK~iuPO(*56(-pPWq}Y;)zhJQ?=96rFWE6&2lH^otq46RUnc+ zMdnJtJa9qLhbmagYGy*F>MJ&bq69}U&zub$pjOTy5Ev6ciTf$`%p!ks#lWQKkW3v} za8yu~2ufJQOKhryL`cOokVP1o2{ntVAp;0x5q6Yg2EgEhbOP)pXCoqW3?4KWfDRd( zDyVtq+SGaP%*<3!jR94&XZFlYXvKDm#Tsfzs#0!fEL(+@F0dens0dNLSa!|>k%%G@ zDGNxU$sz#q%0?_^sI~$*qcKv+Xmo@QSpdkC08qpZNKv&l6)EoAl}_Q`SS8^~ zN${aA6w5}O3mF>_$PpEZm+0zJE$eaBGG=LGqS`~oEV&pn+T63NcCK9i{1<-ZyWf<% zZ@*oSM&46iEDITlhyjs+b51EI@2e8qQ1&^JQX4w}5EDW+6L4nW#6*&|aryA!y@wBO z=h%6V4#_d)UULSfl6!}F_11u?8QN+_DvAOMW?DpIt1%rAT85GsM+mtS03lC^)GxYX zogq$!);9C13d9V|l3G(j1a^oD^`LGRt+cw-`I0(7P}O=^4eGi@Kx8BgK2*V_m;en- zbBw+US&}y*G6CyjPwe_;*>}AQT#p91OM8p_m*2ks+^uWt(}yDbwXgo@)z4i?XZ@TO zVKniBpgFB?t&QqHD2{o4-p&ut(CVk2dttP(cK*Smz5A!1`P9vqKK*hS*025ePH_J6 z=J45XllmlP zI9oIu>yxXy>)(IvN5kL;bvW;}?QA@$pPaRK4v!uCcORT4!^KjnVX&k5tc|e~&wkP5 zdrz99<-N7ZXg2HLzkB-J&H3r+Y-eYKszIOf;mQ1T9?xg6xi#n`#F&~ctxbcOjlA!h zewhV9rvs;1BEk#L?!Ni%BS$bA)>+jB&puZ**2Bs~7fssP40T0Bc6zp0ws90_?9qX( zkI6fGXYX|HOhw@7r?z%>Mn8Rf|L&vt!Ev)W_UZpe)qe-;mYwH);G5Rk`%5R^96IMl z10X;I0t5&G1VxbwR)4&@4eRhzVpZ0-v!iv-0r@0?+M@e_6pDYJilkT6ed>G)jW)Z zxu4*pKMd$1Avj?<{Fnivl$Z~5jG-LIV~q7^fJdjMs+I%la9Gc#Q&rvATz{}zsh-v% zmo7ho1iO0=n%M%#34s_Ri6$+sVnFBqNiFTTCFs6-a)El4)_R9BA(rzn_A_G_LeEYW zxP)JI^_%oKx3103sJtPwqg~l3Ck!T*azQ?38mVwTXqHz>J!-iWMTpL2$D| ztuBX8Vsf{FPbiTJW^!{VTUX)pxHB>UFmq7`i5dEon-`jFVQvGnuoTW;GR$azo;yW= zipZS$>m(u)vT&&nE3(VOt-#&Msc8R%RG7Amy+KrjG?(He>Mbqcj@~R5gJBI`LUs;@ z+G&doJ5I)v$#@G9v#Mryl5AEmu8i!iOi;{Z?q1xui^Z%>ZHNqN1eCj^mRrpn2X(Bf zacH_QpPd};kEXfJ$&tNOHJJBq=~7Y9-)G7a&)kqfe4ByW?mii*YChe9@J)WelbHhSKOkLJ&=y^O|pB>E=^JG}#p!$t(-g@6tXRK?UzjWsA1LHQG z+&>ylb}l`!Hk`owKlq>HE9KXD^N4`1Z{|dUgK;AG!3# zcWz&JVrTdI;jJ4-^`!d7*WZ2OiF5b%7Tr<@LB>MuvRkX@%Ff1gmfA&o^XC5JkFD=L zIL*ysbbjl-x9&{V)<5^z$G-CAR~9pvOzM}v{o3-hyY$Ev9!|dc$}Qk3?Y3Qprqisp zOeH}DJnIgdV9@yS9j?F`PJUwiM}yCKqi*;RuP%;t3+P~_&F*=a+Er_K7tXf%=i zqlE}#=ib8y5YR{+*?P$DFk0y)h^6bu;!-E4$rUGvL>5O<4al#cQqW1&!)H5V3z{6gTMh3T( zy;SKsxWoXimQBqC!?2>1vaAqugvi2{^6BZUB@dB2=fD(0m<$JT5J!V54&r!Hzx&Q@ zaLEQp7`!?Yqz*BsT=dc5s3If`T*nFk6KM;I1dyS#ps4{NB;d@ks_Nl5h7q|Fi6b#{ zJq(sQ%T~qUiW~-m@wgt0VntPqK`M?yj>|;{#C1L7SP4}esw!4NLa3q$g&3lFT-Fd$)>G?Ynj=@%v@Bm{ERoHSd?nzLHc)U<8a=A65xZM&4w)xq7> z9lI`di?&&&u5Fgf#cZ)?X7lBIxtukN#oT61De3Ck7Kq|t;NX^%BV=?Gu4-{NK>~I_ zCN3z85?dxD1EQYX&@;r#YK4qa3Lcq)IVut%CYSHmy*ssz0 zyVEUaMK?-0TcO3OJv^cnFsiFtHt?RgUX;2OLEO#q!{HJbdKspvTd{016Ou$SGtUMF zxl3)EKnnwkz3+C&uAbFnpu!eJbTF8oEYP9THWLCu1J0!ecHTMyAvN=sksuH_u!snS zShIW1sjln!Y|d3YoGwR4gM;I=F`4}KSKj#O2X_9$zxKIf>i#e|7h_f9*SKXD2T_vwq>~ z&YN!^?(H5weraPwaB|W-|Hw{8x^Vvdb5Ec9g-l{?4e{eCE=pKl_RK((Ycr{_0zM(`9-v1A}g9-y=}0Tm{)()q?WQh3$)% zE`Ix!S4M+ThcQGNuT9RLId}iw{dTGIG*zTYHEe(eqj6$Bf8p$~UfVy|CjksR91V8w z?nbGC#L4=oS|$KLcW!$+og+t6V~i=`?fb{)&W=cA?`YnpG^_{9t~s18V$BG+F&f5- z@0?B_ed6NFuibw3%Go#Xo`{5MP#>P=<9TOJA=at0yZ2A(I!XxNZD%PE1@IyWxp5!W zW#Zgr2V+J;F3Fgx=A61P+31gNL;HGu$kqi03&7GjsKK1QQB*v!?f%ee}{ z9Z=W}&6SCPkwpN}vJ#T2Qs+pl=53pVd2?9Lmnqcs>2VvRa->M?SrGyvj3=XRxpYSk zVu0#|nFqs3;D{JE)*%E|LnS8WQE~Q$;dpH_UfUWDN0o40*C>$!OO43RfWy&{jFBaV z$P!`l`_O10?SQ-D2iqYUQDu^1m799O24-XV`G zR}5mGnsP@5KmtH??QN1~j^6XB`y{KGW^X%PPE)hU$H&tLcMoTa*FeadAz-5sU zLMAuQP^K3oLX|;cB0)Cf?78d$YGs{u2OwGH%$$;SEVVj9(moLhUM#Z??xnlfoRGkP zKn!vLs|tmjwTe-V;&8IIxdrZv<=mJn2^_76Gj&ymS^)E4Gz4%&9}EUNJLj0dQwwT> z$Ra2OjO40G6U^HNkOIKPbLY?P>@1c|GhgQ1xp}jkpG>DIb#2#zn_6P5+)Y){v>fBg z9f1keu=i4wpI&lA1nZj;3cR{#r@~b5v;dGhHP2>d2E=5^5KAZ!3J!sYdW<6yq6HBv zpUJGCH8Ps1a&SP*4w}u?l>6fc0x}{pfuorP3Bo*`>buwPZ;k=C;o`ZigFDM#{h61? zV}10+XJ#2+`O>YoUpe^UKlh=Re(*!vH};0Z+a1g9&70TWyY=kz7rm10&EeH&AAR=w zKYZ_R{`5S-4}Ic^uYc>-hd=oE^;-u`vpBoC{gtoY9<51duzhx-A%6U$&%g2dwY80n zH($H9cej1)Q9C#}+1)*yXZ*<1PksF>Z(e=!>X*Ly+QrM4EpG3g?#&uhhpzandnplD z#x6%>cTj*E_xG;t-rrmsww6yH>aWd1nA|nA&-hjXVz~gKpLe z0GR`rtEstC-~w<)0>Yx~VNRVnWK}LkPyh-crLJw;LLTpOmsJ79+`urqMz^XSRMlX< zoV8sOLnVYMgS!gH?VZh0T|YQFYE$P51!#jSU=|rT`3iS{EbBONMSS?jfd-J&RrZ%)`sKF$z**zTo)OHFo+RRLZ}C!u13RZFdD{s7^{k_ z$Rf;01&~xGk)>v^PpOuF(ZdDVdIWR%-usg>7?xDELk~sn)$LVg!h4Ckyl@XYPmqW* zlxtJjK6_QfizUmkr|&_DMfHDxm;g%s2z^&dU|@gIG)W9X0yYS)7jC zU|=TC=HSE>h*7AFMEYua!0CK38IEup+h%!qxbNE4fdj`Nlv2|d(G#j_mo;_SikFYk z0n9X|%)))QnnMuQY#~V7c1DD5Zh&s7P2fhQKDxNg6o?5Osw#kaRxi+LWGsFV7EU<> zpsKq9qA^hjVrt;%!~lTE$Y{CC2Gn1&&;gR0b-Co(Gvt(=kegNy4(FGL;~)6+hu(er z?VE2+3GD}d`11b#&B@m0(c$cOf92KXyjx!%pE6vPI~wubckey<_?d6~{vR&7?#}(G zAiwYNC$>Iuardy9XP9@~CjRz&^KN|Rk8V#7kMHT%-lJ;t;K8iTVQp=F|76A`DhOp( z3B@l_FjnriY};Wyn2aZR*}d@Mi_biMdG~O6cz^f);jHPjQCp_Dxvr1uqvd>?40tg+4fX@XQmJcTc94_3+^M%=vR?FI=i-&0BB1dwjeYjfcy{qBh(Xi`C$CI-5@0 zY`A~2jG+o*+2GQ-ozo^|jpxrk|MdHRko_zN5-reil8;iYz2ih#x>xnwb;JBO5 zyQY~fmyl7YEV>A8&df|bw-&uWOysO26k{aox-MJJEQF*4AWYyOQW|WzidBqJuyowA z#Ck9qp3bMs`GS};8DtL(Xx-ZSIM!ipG+G-BPmd0dPEI*cpwbX!kjq+z-dB0`Ynp)* zk{bYG!7&zaf*P6iU$mN$VPT?ipn^3;Bw}h7ZB%TQZm6!y5C|PGNbb5Ahi%&tbM8C} zk(0O)*F3B&tArS2_~;{#oV##wW3+wY@k{HQ=Qhrs9gaqEG-75BkuVT3fpL&f2V^2H zRV0O4SJq}IH?97t4l6>$!}(-+=*zjnpmh1VIzl1TJFIfVt`4w*VtenG>i3ZTERTKq zspx3th-KmofC?+95@JawL*L#~Dr0(+f04$iq>KA(cEQS()6dGD3&qP{7F&M!jTT-& zs326UvFw@t8@Gq1cy}sTA2UzM+LV?}@jNVMi#(fk(?%EVa=w_g&GK~7$#BRq&YO&Y z*_BX;8M6ltOu|TpR2)xC+_zB!uD;mfJuU--va|NIJcJVWb)w+l%mM)n-HjBe=~^H{ zGBrag%a{>jX3T2Y@?=yE>oti1nPU|MQIOEl)RxPpT`a4?Fi1$|z!-%*8KQS>Tdvbt zGaw>#|j) zS|>zz3rubvB)A%rh-FWCsWw09^YJM*n)LxkEt%NZDE)4Be3lQ~^r;r~-ni zgF6z7P)=zus3b;&Mb1kHED_6xd_FZmFbh9!x%-;RPPkiLfcW!nk58iy`+J`>=Z0)!|n{IB`U;fHlckdo&tak6sZ(M)l`nBc8 zI?Rt-W*rRi#+|*LF>PNw_s4$xL#HQ;>+jv(xwzKB)q~yRz2o`Oyp8jA@9y61d#960 zwV?5EWAgIrd$Afcxm`9mUv^S0kp|VIN{!;lw13p_q0_*q@18RN6qe2Ho$r6&hu(PW z%_oyt?M^$-aBd9Vy16$Mu3CUV~Klluq9aS*4=4&C3qeQ#q>5#q2O z-@3ckc3h7Jr_+X|nz!01E>0JRrwh$MM2p6<0aI)$0(eTDx=W0qV#>$4f<{O-Cc^L0ef0<-i!L*WkSp4~ILnrSZs^zd!$*Yy>&ub8fY zdQ3RD12BL9GY2MiV&s4*#In-S0Rkg35iql$lzNQv7AqSLW66>`0byV5NPxi^fB|dp z_3t@e^*ft`xw~4^=EIZu>B;hV-k#3pN5`|pVm?2ewbp639K^U_&YhcEO4=mv8ZAF= zMCVXgf+mEBSo%jTgMk~No3W4tF4|N97MW?MZD$$SlXb!YdY&{S%jBf0b6d9B@^Hi= z2%g(yZ9|B{kdh){j1`F!m}v(9px9<5GeGZh7b!Rcd0%8x^kt?p;ul054=N^pXYc+W zedn9#0K}yPEf6E2=FYPLdXzXEk2G0MrC~vH?!1RCB4U~6Cjw#=B5_NjI(mQOgf1%} za9~8R()uCg^s!O7Jaorsvl!7L?_2!N&zTsB@*1wv#%ih)T2N0AKJphNMw z5`a@r^%arHWb^do1c3x8R->IOkIhcy?Ac4N-CcbAL!Y^}cjt8X__<4??_56~uAkp| zWcZCY4=-JLB!v9>+pm7)!_R$b`Oa6retUKR<8^xO`OEKnAFuC>&R;!q!R*0a`uaED zz8mYElaogu4R`mNJBR*-uiUAI_3LjP&8Ky(^&frfE^@G>%hr;?#(J_aIGnXCQ7T@q zCXTjjHD}js!lBcIP^dMl$98q=cF>e5R`SM;YX{Sl{nz(B)6KgN^x=8T`&%{w%)9*V zt$S>~NPt2cTkEGMC#Ofv{IrXN>yu&I`Lex7%*O{O4%n)qL#K*@E6-w=-|@j^=sGn_a3}^<(cOOYa5Te_#J~G$#Z^5ih3+hvIuH z=>#leVav*hW+j{qn%vx(!3km&IJ<+TMn%93nURQ~%ZL=PDX~{B&@kJ z%kIs1T(`?sIEav>q^=AA>@fskcJ;w<(00kPN#MYNi5cBI4=M>&6{`webDMJNOw|E6 zggVxB7>EP_%%JO&;bc5`aPOD}LaZEd(ROYg>xd5Omer!05SwY+HmaIk&0I+Oe5He` zC)J853Y2DOW(M$-G7;vitEsY^nRX^ZSu+xfgh4gPIp^FllL3G^A=jY>Ak)GN=n;Fx za=^rZP6*?{Ftw?7=oMO(7XUH116PAs2bob|f>^QCI5{(&FJ`H7;^V|O57Mvy!F$^q zHHYpeKKZ@@r)OUL$hEy{FyIe<|Hr=k^>2OX#izWx{{A0-l-JjP;pe~fk(ZttY>$5F zH@^15bI-o=&QS{I4j20NH;>-9nT}^ms?UD=_0vX==JUiMVC%Cv0|Oa3FcR;dCS;a+ z2*8Svy2QfBkWvnjW5BFEM6Y~52>leY2x#c=;KBWq1>5nSvSyp zwq#@^Y_%j4L#quNpinkW4%5R^VJhc~`{`>nUHG3bN+!;Q@gYa5#f_Xa1sO;y?E+L^N#E{U5lbI#hYL}t$JE2{bG z?o6ucZU9*NW&;OPCFUTpSUDXuR?>A{ms&I@M$>M6vL0ic&KKv;(O@`Ei>`Gy^H|p+ zv5HX{N0ZU>n=iff_Uorddkg^?TouSykT?e_8g(fLv%n1CRG_{H;JzAf7B(CKVys+< zQS}wLP4SQ366y zhusmhE0uWeOa!G5o)8%r`>hGUT*2H-+osKJ6DlEyt_gDPvoM*V;CxCMNs-H8g()ar zjzS#m)HL4e6_xDd9+&{eiVE6Xjn|JSD-@SYHbbohtbh?->R1<)O7-AG+A%>iCP8kqL3=L9h zG;5FwsIU_5BZ-8b} zUMOTUZM)0_RP3|a%k+l3BKm_QhITLUhQ896>VvxW@{q4ro0Qx3zcOuX>6%(=`98ae|^RvIS zR#j^oql3E#=N_>$SD(&bdgIpJ1v&rZ{nvIT;mk(-_)Ek4hc`SmAARnrufIbye&RR2 zdVk}b?JmY&dTpw4_SP=%b<=~HcWp{tCWv>hFOX2ZtE#GLt?NPnp}-h1C5=G>ArZ96 zm_wHo3VoiX>$)J4Jd=BrFoy=P%;L%bz_BNHp}Qf#V$pQVghVk$Uk!@wVFU&GiG0xk z)Quf;O4HfATQ)%G2GwAQECv+o0b0TRDiNiWs7w)oR=O*c+c&Wz39u-+BecueayD%z z$LG$Tf8v>|HHMs)+h;ZynnxeGG(S0=O{Z~RfaChc87Un~+B-o@G4X0Bxe|K`$jx(? z3b>#w%!5Ilv$|yjP|eHbGNra_mTjwRll7C?@v>R0jnC9ooSmEuh>$f0CM1-A2o4HNn88b~NY$+n4th8Y zxaO*^EahzMMs4!kfkR%Kj4z+PaOvXJ7vA^m^Y44{$|H}Qy?kLjnUvP=W!ttVv)puA zfE2{;Bq2(SM9iTQh{Q}HDBs;E!5In2qa(Tb^mHa^y1qFHHENgBqR}q7fg?L1SZ|~& zGKZUCUKwoE+#Fre+|5hB3%H^=nS!gUdhT2^nV}h)_ML~trcj!Y0ZKfJg+dex1WdpL zM974Jkjc4s!k0w?(EEaBCY18`$$=4jL?EbUHE>pNECNz#pD#%7;w~xYA}`5q)ZTSi z))_0saGEqH%PFa#=J z5Q#uUoGdq8+a%_Yy9`3WC7G{WOcPCex_3X#4uaWyxtL9-`+K{cT56Zz+I0=OnK2@% zssU*35ZQ|4P9W^A8Ci1Ik%U1#sHzG%j@Q=FcTF{S?R+`EfA?O~wvnsJ`UDY|DQnh} z!A1c0tQp32OgURp11*y?Fevyy^B(IPm;q4&r!J>dik2Xy#Kc4hR*XI%M1KZa#>;i7p<_T`FOz$%&BKF73_i{_*r%H@_4RpiQlu+t}XujoBVy15|*>QYzk@C3A)cl9hxhFW`}e0$Jbtl) z@bY)w*gm&&dUAa41^Z7PLr39KKaOdZ(mn3a7bMXh&ij9*VP~p zGs19CkAfbYsH?5aDAo@+`v4CFfpUs0t{^pa>c4h(*Q|&%8U)Q=MQNLiz_V$}Z4QHa z6i3@T+gC4Ldh)5Ko`2!F#~yujeS1r!;>c;)zWU8qHMQM**0i~4GXMptxt2j)0a4DD zv#DiAC#r)4VG`!ia5&jm+t}J!-`c1~BSa*I5Ta~^;domn0I*<@aOptm-jhG6ynxT-e-NDJd zFdq8z0}wG03ScB+q^gQTs)b34U5BXjKbKzU(ueF&1|r3DopRP}n$=V8Qii5O>{;e4 z0m?-}gqbA>GjRya)ZZHfLxrIpj5pTLUp)WRbD+s{+b!mc)1#A{*KXdte)ILW-g)z_ z*WbDR=F#zk#bRmh9JxC^5NqO~N^LTOjE?S_TdBe_O>fi5Q7R;IfvGgC6^r(a9R?7-&Sx?ew zN@;CtXMcZpFdC9;PFWopr0e|poqOlbT+FUkiZ;npwypzX>#~c(8@Erd^3LvY3!aS) z3{`v4Wzps>kT_sS>h8ip!Q7zm*cnRo11o6}q>!ML{KQx_T?Yt+&zCYvY}x%`@-8jkVEux#&zGJ9yW&T?^n~DnWwK ztnHAjcbyBZj57{}G|13pXS5dVv$$F=p1i|5Zj{`h0h zzwq3XPd|3{?3vMIOhBpA`N`?MH{O|@%-d!jBp7flaWq~(cXelTYil%`gjmN>6#{uF zhyi!YNwsZK*W}c8i{)amSl+n*;Jw?ohvV_tb7#+R@i(lS- z^X2zF{wP#~AqxPxt7Tx#?C7BA#fNFG+SkE*A5{f&LMQZ+twm-gDTBJSXgg(f1VjT; zNPy@lrL4R!yhTJLuLd>@*<8WQEMay)Ev?B$;M^&wP)eQ~)RfUIpL)tkec_NiYo^vL zb(+;X%Y7Kh(5b{z`?7R51SdlX4#I@UNEAdESqLJCBO3`5FfutBA+dlEi9{hr0YO0) z3Lq>XSV?4|ItHnP1!IgX9FRj`BxFKj3=}z*G&e9YLFwh~0}drF^6>Ff03fFttGcQ; zs;$c#kH2uGYd_jfn+NyzZ@l~N>#uy{z4zX_cmMV~cWzeQETpAQO;srlTkseIhYBSu z+v&YKcb4`ks6qs3WK^bGjt;Yb7RCp z{m*K`P6$Czjfw}M49PP%xfKvhb|v%FB|;1$rG5hp%j?gGPN=ScL;x&eo=e9j3l}Rk z6SXN7YztFaM;wVK_2Ar@b7#-)y!_4OXf$ryB{~Ai=Fa5gaBn)DZEtATwgw0}=hP9i zxq1d7&g!w=Y+6a`4n{-;mky4GUXbQQ(3uA2++_n$cLdX%it*7*vB;pk1;#8}p{uK! z1k6qpYCsPlh%k(G6?oCYL(#S$>mnj}84Xh)ZxdIshonEW=#`cGA@8`idt-0+zcGr#=XuYBPBPd|ET z>)9u-eBbj=?!I@M);ACKPj0?<>&niSrBi|4Y6)(RLcJru*Ip1Dxm3lIr)@cK7~$VmhG5>2V8rDz zwYhcE*>bVyIz}wUd&3 zjL`#ea7Pw&=+rvJ?3k4e(Gke95fBxtD z8Vm-*Q9Y>Zx~}U`*Hsl`j4_13QjV%f#p=e$%rcOAJY2u9{qzT){NX?Ose}7_d-ra> z@%k&b-n+BZWfHc43uqgotdvt;ET;SSZ$7wj?Ral*XRy)Ru8H^Rt}BjpcNz(W~ya1x30|qP*p-y`qT^&iH(_ulghQ{iI_VxLbe#R z>vAPDtSUkZF*HqMss>Jk)o|eIgxItlFnXupCpa>@mAVpUmDWRM8q^gLA#mvsVa8rND(n=1(EF#{-BmLI zxCBIO3c|}QXV=9dTX+A)gSE-{aR1J4{?>0lxOMxjYwtGp@BZE&{O;`s_c;uX?{|9# zyXVgi1>DW7DC}S>KYBl>D4-j1Fa+etmJ2#qiyov74iF>~%>kTQ)+QrQ{?2>v?(Xi* z=ck(Tne8nAIGs(;U%5J|M`zFOFvpXl>$Fm(bi1%Sn5L+I)#2-#hj ztW@|pIv6O`gXpayp=Mj=TvhcmPd)v?4}av@=bwB0sV6ti?SOmJb|;4i*KfUjdVD-T zIc}GV=5a9I-Z_8f%*C^3AE|0SIz4{p-8*}`X)>xGd*aghN6(BVq6&w{r)|?rPv@!> z>JaP51RQuY9MrLjfnpF53c$lNXU^_yJ@WYDx9{A(dFS^0?YEzN;_;pJ&8BT@36;p1 z^XG59`PR~15~d8ogVJr*xq0b2v!rd;bxX}{cGKi((KKz-cI~pAFBZ+bX}UHYcWJgv z%Vb^WvsTqH8yP@WR|JZYCAhH&0swK15|D!mJCTt929mmBt%*<~kst>a0|tuD%#kH< z6c&_<0;51x2}J@#5>zk6Jr2Ntgo?}z0D@G+;nf6yz<~jX$jQ8P7G%Y8;z2X@oYV<= z7N7x9V5S&jr~@dJ;X>|GU@n6&FW$4P1m1Po%)8VTOZ>c5UFLk6RU4u<_(B2-hZrP? z#F}H|5TZm1A%;jXhAL8B$4WvKV-;mEtU?IHI;=xg<=MP_`Rm_YpEcqw-_kK8i^EhmogCsi8(2t5wT_!VPR5LW)vwL!Q+!>I9B&JW{3Cgzy3!zgbG)n1yzGkZ1sha2t-_VtP;XPDr_z< z%s38QhK$1E4k9$Ds-3N^y}S2s-MZbjZI|HWaDRJag8=5U84JZh98?uB9UdP}r?XNi z-Ygn73nCqYFqnIwLZ1Nu%MRLKF2NL$+}zbX1|~$yo|3i=Z0-zSc=3ggee4Hc`o0fL zHg;ka7qjKJzV?l~cWy4`GqW@qkE$xJudhGy*!hbWFPypf*km*|jVFiAd^%rS+j{cZ zX9lB+877081Q;n1fA#C%`RbRy__@!0=5&$wZtUgUt*;Newi}M>x~l3B1~Hu7+?i~Q zB?=)9tKpN6J~*BtVYlz(zsM3SdIb07y-N+`Y_=y;S^3WF&HP1WcN}hpIUw1HxQp z)Wl$=63&dNRZx*4g%Y7D^k!Ceb0Y$!483s;NKn~{Nv(8&IWj9iE2x1WkRSjPIj|5j z5db3*IT#9~J1}E8Tr#m+U!?%6{}sfX4GnWLKr>a(4g@J@Go9ydvCMF)pbN7pA%-Xb z7KBsRjfULJ7vH&Fz4Enh-MV#S|K5$(e(TaBS3mWskG=Hl`ybmme|i1!`+LV55(Y_c zUi+gv_wK7hWE2U)qz)mlXNZyAQL~0hpy2J$5u6lC(#pWpfC&3V-b|!SzDZb=&`bfW z2x$)Gl|$=o7v-?a!$AdY|C@t4TDC?T;Sd5>gE~-Xx-O+;mV*RzG}91-glnb{M6)`A z5QSb?a3UZopeELY$&CTZfC?o_A`>0JZ9}sv}ML`E=B3B^l*Azqw!tSMUsc�LvK=a#jUr;Se}4y-%VS951+< zw{6FW6gY+e28@EMVej{>{?I4bAC8jC+1?xh>beRoLOCKO%fpV6M8&MhRDIk_|K1&IZp)a>0M<--PXxirN_IXpA&lVI1RSYnP{eyd# z&OBcYWWHQ7a}{MY9+Sw)>9GMw7(_{65ToSWX@-QRt|UFitH@J`kgZHUA&3})CnMnb zqD=h<-ti;rEd2AmDhscTi;Ny*vS^UwdvuYLae-~Zx^&jY}G*)5u- zFdI^{TrQiG(xPe8^#^xH)8V=8o%QvRdnUw-=gw(Puf6ludpB>6$CDUh>N;fLI{wbL zU+$*M~GzV`L(NvWfbLw)| zoN^((lt#)TPl7rMxt8`7r!v*|lA`KaeMsJ`(&pxdrdViU03k}jK9K)&j?ba-%=MiH_G7EmK|7E$n=REW?~5Y2%M$k7Pc(H+@)b7bE!Q}$Tw zT>xBI*?^7^iM40pmEf)>L2!(rN@kLJon44cdT_^JFhnBFsq2=i8YB>tbyfu?Cgy&{ zCvi_cs3QOb5i{3z9>PReoCORPO~(N(IU?kof&jWRlOwrlHYE&5DCLM|UQitUN&F!> zh6x^mS*e5p`ZWfHhlAm4KI;v(UXmy18G=MWViC>>!2=O9mHShtiU5P*K)d7)1};E| zsC`P;B}mtGh!6w;T!;ya)DMi%WsPFy$vV(!dhE4Yf92Sldgf&(XF;?A6VQb*bD z5Ydsy4GGW-Laoao)&NEXN%ecizy0#xkBlEiiJ^Zb7_kbK2)iW&lR%yg``hD6B_ev5 z_{5?Q8jzr>l`3vkN961s7;Ua^Lgt;F4R8)U6ywzl?; zP7aTbk!WLWeK~Jph}EDT4n_`;lLA19F|eRRPBy59i$!DLBm{yfsj63jinjp}6~Me8 z!b+iKt{Fb?{-;0n$&bJP123*`?dMVboUR`MGmjgBUMdyu7|XCMHBvv(jSGr8XtUDh3(6`klA-caJ{(`5)AjO&!ey zk=!f7IIbt7Vb)rcx69>pF~4``-eNJnc2N^f&ldVx4)J#mmQdLYr(I|>ZWfav2rRN8Ml7s;OBUzi8zEI6g9SDI@ zdrLn$6P2oXQU~lIKWK>RH35}O4!QMmB!IfAdQMtOJP@^Lf9B?j(myw5X4%}Fm~-z= zbwq*;W{L{sonmg;OwH9mQ*mc#{Ggv&@MTy`lh z4M2sJ-Is8qIpmbIj|&nI#!$JMFcUyZ?Qm_KnVYtVu@sovWQ+McSq6s~0ee@d2Vo=# zOgU#WjVy%yMa0ajIP4qI$VD(lQnR{d0CY79fJ}t02u!7{h)bKfo2fYhb~!UKB07p@ zC!i8YHZ#|fE-aR1cl96~>Hy@HjTxb=u3&cF~ z9<7a+vpKo-OgeW<#)5!g+85w>56hMq3QH1;_U@(m%7KYYtv_EBDlH;QP_yjjZo)Di zPYzG#*|jJ!s(r5?02x>^A64QC8oX;3WiMO}Gr#9D@I7ppvKfTPbtT}r90lA~S`YTc z;J9*-_1@EdUo`||V)n?$0V#;oF^poka`wXh!SRK2n-BJO&9i1YnNHWo9kN+>Zgb2m#V7hBuG{(jdjMFJp>Mnj?s01l51 zV;$BvH@c$#VGzzxt&{VPkFb;`2{mxO@THW_x>5kLtZQUVH2HZ{NIm=io4f zbI*MGPyJixFJ6A^iJi?+WMIuofPIk+f+E&+o-ezqsvY2Gf8jU(%#VFMhN#QD%*ov) zaMQNt83~ysunej&91N;@G@H#1PEG;k{JCw-8iXIYc=`DF^!l|MJ7>>Ms<>P%>tQ&1 z{=B8}vyXjb-l%#ycV+YNWab1U-YTTbM1q3?k`95y5gi~3TL$3*T-5QTA_0j60LVyg zRM>g_2p_?LdnSKxH!G}cRrOq=z|c9g3Z1&Nlv45*a)RWVQ)<+(6cC%CnR#~2s-&9D zDL1-IoiG!#7x*6&kgK|g6!G2>5e*%>)M>VeOhjGN_HSA@24o=z=fFV%AQ_;VS!bv& z%;ZTJkqFROxaYgAFuW!7RDjlKCd8ml0>wS2SVnuC0kzH0v}HLeZiXV12g1N53qdRl zh$*Kc;JWEFwE!NOIbale^7%{u@qhXs{)NBzC;!Pm{-;0tGe31SzkmDw?&0C?E3bd& zeJ_3R;~)L83m2}u@y^%o-F$QZ{(Y_~lQWUCGLRD{OKz+V2>_Ye+&ZErH3yVn2AYin z3o(`^&LY0`aWO3IM&O3RWQK(9NCts%o^lBWxdS1#U7K>yDFA((n% zC*}~D5Q7jJVHE(JiAbDs*a<tj=f(8OCr}wy2gMQ3WBo7nywD( z=$_5$N>ZyO{D^`?z)YI8Uo%&0c%g7x^CVs-aWpSn!IQ-Z`OlB zrNQpqJ3vMcVQm;CRxwH|#l&r8$O;G$Vq^(CyP;G$hfnyVd@g((MuN} z+`V=1;KBHjM-Y5e4F*ASJG*=EHp;jjj@rC9n4PSxj|jzuLtWLwdNhtiSXDua%~41Q z5GgP+IB`!}EdF4?{(R;AA;AF&icYiAbIeNZx*H&o!F1q<*IgoJSu}A;<|MhSD z(C7c;-~Dg?)~m04Vb-0@?(gj${Q9?E`T7rj_){-F_R=tpO<%dYe;3KJ;XOfHL^ zHOnTOGLeXkMgu19+J>o=FleYExDf$3gdk>$$VI6aWI(Edq=dp6nA8*ir3fNS44_5e z0m@lT3>b3GITry2-Mmov)J&}ay4+05Oi^9SLOdMRZo0d_*I8mBW^QwbY7z|<5TR{b zHC-$h^JN!_&0N6{L72(O6qtfiW@b<;*3uFmA@U#<3C9?6Rv}i;{j8OU2wlO{kO0)3 z+|=C)o!Ys;=m8Nhb;`u0@4?K`Ju5Ic_OPgeQbIE)#OzKENZ4Dl`zwvL&GM~xzjJr@ zW|zF}%t0Y_Ky)(QTeP#3It5ZS*Dis}`o{QZ{}>Q7tCp8ev|X1SmEgkmiKe6`Y-0JPE>2&W~`4sQo$&Xw- zLsiYJMe6`Ysp_sxxwZAl*o40LwLkpy4}8dRkS&`LGX_CKWF{x-r|^WX=AF6lAOu`r z1A}`H_Cu)FM}y$BHkz!ht?xfLxN_-oRSCL|qAwSF>RW+oCsX&$_4@Hg&TO3-$0`7! z$O@ZVj#CKP91G75`aW2szIoBU3m54db+p_MdxZfC$gx}<&B4HOwxr%B%j#WLO`fx= zgQ>ew=|!w4B0&-~;XxR|U>MJAL!{|+v3GbppHAn~dAIBU5xfi$R(=R#4us4YW0YDX zl6y4stjR5byMw0eS+jN#C|O28sq4C`V+@4czqd=<4-p12L|t}--WxT934wvIx0gWw zYhl`v8<;CLSq+R?SSZ9OK}e{MQdJ}lKxi3^$cQXT2*3o)1mc1OjA%%dNkGQ6j$#5w z)n!hR>r+d;j6!O?dDrQ_QU_$3(vju&b2qb z_44nh#jIH_)Y2e^oGq&|IU<2Mm(Y7zWXgd4VN}6;!VD3C1ET-{Q(@mi=^-fzY>4(TTaRQbBVrl82hE04-$dYI(&$@cC>C1oOr8XnHtX z%u9!nyGcj1Krr1od!AWB5J?G<+;aI+u@v|ym^<|YU+#5@B6uPp)1V%trZZ0p|f15m0*`rETx-=72pDBzy6gBuc;Gi#^@G0FOx+w2Dq9CGhxNTfXS?Ew_wItt1qE+fR+`f%rKa#9P&z)5D_zb4@)CLHnW5M z!{eh3#?dl6b1efEZse!9$m%6TPbIyzqgS5-8ssaHz0fLuS zMKjZ^%tE0^6R{AW2z)%5G_z&kc;)J){lmS<<|ZPlgPO+3j53=|kB^R)&2o#qiVOg! z^CQFIY<>y?#BAo-yo}4tG%}ld({;f3$eD8=c>M7Xp4&RUap%?F|I%Qc+fn`g&;QtG ze&WXuPmV7>cJA=Tt>5_hpT+2zW>B>iq?GkP`+xtNv-fuYpZ|~l`O?|+{U7@1(ZMUf z`D;IS<*|>v^wPgG8cJQk$;qN?n-IgaYxa)x`R5)6<>_#!r#^z+%HhX&qfp*qqLPTfJUwreAzO!7maU4v2R8NBNVtP8J ziE9P~=FmAxjIj=@-eWg2EIYjw7&Ev(T(@9mp+?%!#e*1!lI z+*0a5U4*k4GC6WXT*oSeKq!Q$Zk?sYVzD?n7OI4>>wAB^ol2_e;o9WP;LIGF+$k`C zGqVE|vQ$zw7IOqJNKOjMUPKaL#b0=~`G)>vb01W}NyINAJ0*X)|0&*vF z)tp@fMI4-69LUX45FIQq2xCP-0K25t=J{g58`s;7K^Fts1?DNlk=z}kP>g9L7-Q2lRf^cR_?wP2FPKDv%0Hma9#7H85AjIVy zT6k$?t1CY*0(4ttpt z1VT5GlF2C2NNJgNWK5Po2r@V-Q7qEGX-)u)tY+nW-R9iq-Fn}KI{+~{VllI;spjP1 zE32!cyA{Bz!utBAfzRi2_u^gvF0dI1;GR;OH0A8fCGiBVo>3h@0n|0Csv8(YVkq$- z09Pj@mXJ+DRcZFrrGBIBF;?K7T@i@^wXkxLi%MS%C}?J|vYfk9U-c~Lpo6Mr<}T}Z z)uPoF(2p$uif8xXxcA{L;JX_Jes^n#v5p~V&Zfr1DJ65uJ_j%)7A5bwJP%Dk08sFf zg+~c2p&+>p(2Xq{6GO5p2xd!dSA}@t?AaSP?+~z-G$f)sNZY2~QQ^>ogNe0Enp0vV zA~S`a5kcn0M4$!?*)lQ{QKuc4hgc)=axqO^ck99aM=xC5**cR_heqHLfELSUK0R%d z5_pI;0GMl+ayDh=N+Ohdqm!`*;_TR3o>cK8?|=W3YomB_^6r*>eDb-cHZP2f^K;K%_@i&U`Op5&FaC)?`_zB%zyDLe z@<0CW@BjAeFW=l_tj=6HruuZWhHX0h?O*-rme)S|;U7Hix-b0xYgaGrR2Ue-!O3B* zur}FlQs*meg^~NwtF0Dd1OR6tceJdoR z=i|lbXfhf|tkFOf&47UrxlNEwf&~sv7;<)DQ8f}q!|cWx5Gb=t;Tt2MgKEK-<}O)K zt*WS=*vOoMixRkCR@c4>p@5N%6*L18x^Ts=GzB)ZU!P0uhZ06)-078BD zYJp!7tniuK`z?!QvTW)IpsucNT9Cb~tG@M3jl?7p5GduOs-1PFRu5tff`lU6b*%)c zLojy|(9{tD5g`FN8CX^&7VoWp&eVs=SGf%bbah2EGgEK_lfpW65K}c^55lU zC{ZM5Ez{=Jp{tPLJgbUGW6SbAtu}_T>aRSY-?J|uKoAMcs+kZp7dwu*XCh|jvO7bM z>M4!d4B%zIL?X*hL_}P|6-Xq4npI6%xam3oh!O7}?3VLn-;N6a0MeyIj4a&cPE!(= zl#;t5q2&zVEYkb7w8sOL82|yBswonQnOoO5;_1nBYx_*o-WrYK+N?==JRFK zme_30mby&9Rj2?!%a-R3ii0?m66IzGpawFiKJ(&j&n_v3fFMjC{-gy1p#o_G7{`(oLKfToO%fIsXm%FEDuReP3-pRDR^X9kTUNm@gFui=? zoJszdU;OqDeE36J;MA*_UJ(MzYhE^QD+-?@GNGBd6Z&vijYq@7*=&2cIf!vs54N|? z?CtMezIa6>j7H;8J-&V8#+eIGcU{t?ZDUBw={!pn0jNkSxepx|;{-T@suy$+IGTFN z0`|o{1=;BUee7^`!NlHOXkJqxHgZ5o^yu;r~wffk&yz6 z3I|~*q=X(WL@6s!2J*@UoU{Xv=;mD7)*V$X1_5^_4}c7qGPFy>IJQOI%`59*{KsT1YyQl#hf+N)OAThiI@ohrnXeOSeH_<_f%CZt7ADH zgM(qOcbB>jViaa(?ks^TQZb*Qa4AJX05Z2yYC*&fSQcSsXxb*L1Arif7+8Qg)Z?m~ z&fApc%{(U+mb$8#goS%eiNOF)Pfi@nk}->jSn)YAu*3fTp0ACYW+{=~xGeoi)zQhU zET+zATCUow@hSizX4i-FTXP2jcVi|~05IX;$btA@UCLK8T7Yalf0YpRyDrdGJHvNx z0^i*dM8v_f<{~R)RWl33?zpm9Ag}h%APf)Z*!_9OOqEEG(7oKdAcRPM$X!q}um z6=D^u#cY1*+@;rFdvkC1Xt8K4b+N9w4*7KIW>F$b0B{2{vJ469XnijRG@gc$zVD@H zu5Jw8d-;uSceX(oIWFBA!i!fgeCp5r$Z*4d;ivwGH?QA#u(uBuo_gVNTb%y2|Kwl4 z_F(@vfA;70PnM6Ly?pn^@oc{L>dUWoQ+x8swX4rRw!NcY_~-xh=FRn)`SBn8 zkxyT`3cvibKlA!K*B^P`XAu=NwGKC5d-u=%Z~t|LRPg+TU0CePSdJe+UKp6g0%pb{ zs+o|Y`}$zKSS$~Zk1m}(8yU}?IrGY^_cdoC6hiWRdVK6{o2kxiO05y+3)(8qc z&B2_qnws@`U_pt2tAnfcZoX0>(ED=T9Qtd5`)V!>z15-TkMwi7tc5pSBH&g8CRfv} zy#Ye`jwU0T-7~BbaYUU z%n7y49s*W@sz}04<_16rRVAjm$(ey3f8x^L{@Z`^pZt>#{M0}E@9sT#PlUTJ{r)$8 z_q}WHefCp7@*_X`6K}oo@;lew25UtycP$gTQZ_60o1C?(xXj*qs~~_EGP7Bk@e=_c z5_mEbEIYaZAh95n5j!@iD+ki5s_H6Mp=!NlMpZ}8BIH0!3T{+Nya5q`MN-OYN*vt) z*$Z$u<%9@@tV^qkO=d>zzicn5Qh>iwKXb`G8FH^GQ0A^O)_kf60 zYB@+aWSdu23?UM8>aw~nW(~O`2{`1G4KU4?-p@R&{8)3gE_KKVM2>!Ze55(6JD6tz zb?9>LW31*536R|iMcb(GsF{JVT%wC6Q`{imJ+7Db!4hZ(L?S^dr>41jLcKh=S@yfrd{yxVo|Fq({;J9_)SRx4$(sI2+?ygn7<;kVaQ8 ze&jFy_~zs1fB6@G>FZyAwO!1G;otsCAB~gspZll3_P744_VqV!fBW^5r!UE0`uBeH z@)OlR{_p=mHCTNACm#EuPd-J|-QPX9|N5Od?NhN$L! z7}I;_fS1y8_a4Ix1m*~?rUmB;0N@1=4Bl%;rE9=S(yvD}^l>mP0zhdBD-0Jc7-vKz zbdvtlXLSQaDRT!ka}X4inh;EzD0$E|ty%^xZV^{^Pv&H^eYb5BGMZ&{=w@viEhpnDr>$5;10|-KDkqWjgz0%`#XSVCdcmA}9wIcH>PQkzTyFrXN?t7f;#Ma|7n12B;j2p7%;xVf3fV8TcY z?iP@^3ILFs+%@gi&geh-KmC=De)7YA^S}Jhzy9Ul6X9l>uFc>5*snH!}m|jD}QR_7dH)E~Vld z=~6OvVKB9}YYCl0V1^(Y2ySi^Sk*X6nYV=)+Afg@I=X=p1fURNsh8Jefn10Sbgbta z6J>QGWEKY`WQU9fZXm>2kTM{7PMS42uplezL#$UhML1+Y1oO7hdRX%umu=UyCFD>Z zB?t4h%~W-5$`4211ps4Z-vIzq6{_65O-YcvUyl*oLkPs;;Og$Ct{K>wh=54Q8NgPj zq7ud}{t5?J>Ad~p4vr6JF1G4Y1&Be2FkA0S?ze%JC&-~FYy_pLwm*ISEBnEB{|kYT zi`MErVhOOImyr;RsHEo*5m}f@LrFhIfuhGCQaP?OQBDfvUE9nT^R!n=8}an`RCCc0 z3K7FI7?Vk{=K_Kwk}xu*Wm8@SCI;d@dDaUHjzHzTNJu>@AAyC4$qg3G{M?1}o3}Tv zzxmeAxw99~pUt3cS`sr8j@Q;#wT>LB+B_rF2oPc&sygRoR{hBPAAkSFv-jV8@AiB5 zCm>fwwdAfdIL%7igU|oj&&%5IXMg%2}#w6Xnd3}HX6ilIm&wuuVYF~Z+ z`A4<6{g3~_SHJkB*PnXd<#U(Uf9{vQLFCuo-Q8Fp?_AyJrqjLI;!i*O;p?y8`WOG= z*TQ)1k;gAjMk7=Iy{~@d2R`~?=bl?)BrzMOJy<%Q6YDM}(cSJYKf-HiQ!SG=J zVA(9z!(=$9H`X>zj*rG`V-eceT#o|X{6Nm0&z9TVMGFT9hh3*t?3;!Pjn&EecV#gw z_g_Zw|CBo!pc^V%DU3Jo2LuLIzSMq5(C^>?ZtBJJ2JYF)h|bEjy6g+?uBALIshJj1 zw40U@vAF?}Wh=d#YUXY^gM%6Z6Y*d`Svd}pW-BmjRX0Pll$w-s+cc-M+b5?dB69i4 zm1m!Ob~axgAD?e(m1@`t~A@Zf%Z(4-_HC`KMwl(9{XLwN(3_Twt(kN@7gTFpyIYS|!4 z$6s-_p@LQv0>!ezt`fehrL!M57mlhEA~JzJ#PQjOrn;+`f5OPXJQtHqU@G82LYNh0uZW#s}qZA?gIpBC(~J(V_P5f`rc2D z<#*g%&|J9qo(PfE0ENpKy}UqWs!$rZ%5{nmfy!Q-0ibls4#q=An$M?;#p%Y{8WY{S zduOqjUwZ8ETX!A|2jkhS9go&WlcA>0s=6A~U6+eZE$8KOdAz$hG%c=J2^b*LYI_VWFE{;AKs_~-xK zPyS#2`v3CW6Yn3^m$%P#x9;p;x%9|mk6b~5#dLY_VDH60^Q@XV0x^=6xL~;suihY~ zNQJ=NP1T(Z0UVg5*!b#LGtud6wmz9K)A{YQH?F<&$fJ)s!sf>2sN((mcec+zO15d; zB23n%)9L(lS)CcJ-srt~lNk#LmR7ce9uJ5Jf~W>orUBM#_~_OT(WnfvN~BP&52F?hk$FbD#LWKlXbse_{9fwTwm#*^C)P$Q=|gt1>V%lOuu? zlNkXxOE2st&FE-=%pq2kv!|Tr?V`!q)MnO15fO2BFdq&!9Kh6tc~IAL&4|d{*W0M9XcixQJlkl;J=*Ox;U|^UC$?ecTs7 zII1eM#ELy@&Z`(07QhS|qUq`s3vK|eio~D*fY>uP0U6P})WH#wt1)vCi$VxZ>b}R# z(@U)mkTsWqH9%fnejXnD9aqcocb8=BujU8@jO@-FN*+O&O_dPL3!=`!jfqW*%@3Fu z1)u={ml&wE5JWreFDE4><_y@27hVX`y$=-y0SKiuva%3?3!wwC5D96rB6~VOZ%^z? zM8MRT1c-qUkt{1qJUBeOd;9L%WTWf4@o3|nH{UyV*>}!t9(i|gaJaR(Gg({fQmPE< z$>8Mlcxz)cUoOA%>dRYeqaXXBPfzdO{Q9?EKR#NHAZ$jg9NwMgDMB7r&%giq4}I+8 z|MHjq#jQJco_X@}*`0Aa3%~NaZ@zbPdfWongO}gAckiG53@O~YKR-B}Pc}x&*0SZz z&Fw|AoSrtXe(lZSAT+0kf9>DHHaXW~A%4uWt_P$t0#m zsibcJiO9`*3ab^YRYCO`Lh&RcI->Lr1_z81BFE``u4ckK9*>u8XJCVBATh45ZCty4 z9VJbho0~yMq7VWJ zT28rbh>SQe8@lDx0M^zt07#fKlVplnvSu522vU~LY-;9%!HAKa01LYSy3&KKj)v|a z07-!=FV(Eb1fEIN)jTi=NSUeE0ZVre_kGEZOa|DsE|5R_=}-LhFZ|U1`LF+tKm46v z%h`$G;P~XXzVYkdxBTd5KKT=Gpa0e$eC2nV&g&`&0RpC!ganCKtO_MW2Nwh+E}OoZ zIzX3F2y8%Eb24kY)O8&RH;p1en-i$7uMGx+Vd?e|36?XU#}L#k#86UN%SFRPDXW>5 zw~lLx7C_FKM6xDuH1|q^MNc^+7hMn}2s4+1g}LXPJv%WY+Gse;-2%Xv0u!}4LkWr& zUp+Vhtm3o%>ZdtdHiB}d?Pr=qXr?)(x~>6AG}Oyh)i0p#2w<8@A2A|xVXCd7xhdxm zM%Hy!ny=LzoujBat-4JaP*sUo-GQ8l)lG=ADnG1yRO@$5?}s$xXjzdcihOrb?{7B! z_@L|3U|g3boPWgt+OPYL2tCXVfQSi7oP`0LkTVsz4_VmViDk9JSk4frxW4;t@uEuy zkqnN#u-{*r%)!A4N|X|Nkp06=*xlTPM4Dw&WCA4Um4WP84Lj~b7yZXFa8Of5Qa};X z9ulCcx|}a&vl9S*^*gV9=0|=c)^(Tj<*S$8zV^;XKm4&e(7n9}YipD3ovqn?ab{4D zN0ZsnNt?R1n;q}of9~81dvCpY`>l6QkJAv~4EhlLpo2qEoeaMJGavqwf9_Ae_S!4o z{PwHkL3m_i)K2HmJpc6n^fSM|xv_p`bG^$31o!SA$H9;E;2-?8Yo{ zz-P`5Uitcs{nPf@7q^cNcJE)i@y~zp|9Ab}>yy#e-ofEB&*ZOs^~=vc^Mq-6!(qSZ z^#~nADEzMeu80nf&ZQfu7fFqP5HN_uSkLB1IcH%S)YWh_KAp}u;GiCEZ0t1M+soNB zcbN%N&SZ9YaQLX5FX1XcRLx91e~HUu!C*jWR{Y;tOV=?%AD%8qPwW{{#U|qBi2Wg{ zEL6pr?fo;aIjd^U*$hjQrTn%40RR9=L_t)?gR6tO7idJ*qW(BqiA$=M*2)Z^OMl^i7wX~f{mw77jpqPD3K+on}5@LwlbVQH-Qvy~fw7u^+{2+Lzv?#Y%rtgB+l zC|%gpYoX9j%H4<<*)5qPb#3SUodwD%j(RX5mj$r2${>OEM4oauR|6`2Vx<}-S*|6< z5QxxOw{fjDMGV2?z+i@WNCU^PZYmo@2~82QJV(>-~C& zTJBBmOhv1}Akrm+J}~NL3II?FFjucT0F+#lOi!oo2;|E`@^KQDloJ7E)ewY`N@l>E zhzT`=6_A#js)Lprk(=g})ZKDpnue3n$;t7hM;^Iz=gWtO`;R~Qy?)Kp^lI$IxzIb+Px@;XO3mhFZt}qx72i7h}g0@?}_`dU>{q##u zeE5;QJ9qz^|Lo_nhP$`!E>BMPtNX9L_3r6(d3ZX1aB#SLclY#Y@A(&gB6}t(jY!4l zX#fDs=mrHz=7{9@-RT&(8#_7iYF>hYW!W@s+to30;4^1-_V*4hoI77tvbi;K)8+iw z+`vt9B9{Hbqm-=*(nnFCblf9=7cvOa!xIF!S!4u9#d2;c*c!G4rf-eT704ZX~O z-3(E)tC^{0)tuEFRI{0RS!>iSm%(R|j!N3M_9~W{PcvS<0yn|4w^Lv0R;mb3 zB4`dmh_+ZR&?5o@lXx)CYOLTXtC>|*rJCE6G$#i@VsjwrPu8g94#-v*3JY zY+ak$Hl?hDm{ns4TN|5ZmNhMAi`3=A>1h>MQ#YvEI&xLTX1QQvbgX0SatcAfJ++Al z)wJs@L`a&NPQk5?19fOqgT!?eb1uwfY}=EhgF&o26J}^q=KyNC>6Sq&a3%tgP=-*% zh={7DmSZ5zid1L`_;3L301jGctbGq}@jw|E05GN8^&#bw%=OYsP;eQpmI;BGxL-l3 zJh28Yfm2FIWJv)DdF9$9a8-9G#hMhPJXcI)y~06e=zTY~V;KV!GidRIB3d>;65_rL zz#r}vh34Tor}8mZ()&ZNDX!F?{;FOc-J)cb*qE6ylN$h%Fu9|dB9~2(iZ#3}LO@_; z?1xBP+)>Vq!$FPAWy3DU(0)%v?B^wA4RaKC8Lh3&PZmsE#{o0fgKEA|BKiNb_2*%> zWmkP5xQ4yYaPN)r#hYKwgHovxfzZHUA%PH>QBcN!sVck7XP1X=+{R}3+>PhLRo&R- zwvC}Y0Cs_)7zAb(2&6)-IkL0MkEdGn4-mqK76LWJRPoyi`$|L*HgetvrU&;&}b^IUEga5xtP z&V#sqcJsUc;%vlrIB{XKX6(tkOaELy6{I^fZPJpJOe^}5>KpKK0e zjCI5Sg%o9s3}VYs4PW;5*Dq(2|N5VP^Xf~xXDHCqalEjp=Vo}f!h&~VF=~2?G}sW z`lt>>J6l`NJpb&aie?8}ZoXK~yK-&+2psmZOVwrw(G>whKkm`Vc3 ziOaHpI)q>Z1d%}1sH1u z0RppWhegrlq`+#dmJ4Ob2%0P!XBLJS01E6INg^dQv(jsJw9W>D3hG=5S8HW{p z+xx%nEno7bfA9PM_9MUffpRbe%Lm81|MFk`v+w%0@A=N}_$$BoOFwjrQRP$y=sDE@fJml;G6Kl3s??w;3R90A1TZNWo74{L zk}3Ketp1}&YM`kZmPH`jURw(x5U?}#j4&`o#K7FPsb^IX5n)O>ce#g?Kx6RSGerZJ z*l`TL;}p+RWnR(XoyLgw9YuQ6%CJ4VHfHyvh>p6gTQiv#X8~ z1XL9jkyLsqTz?JpO&>6^j|_mQJ|Uu!K?tGgl!(>DAaAXYMxttvv-qr|3M;QxLPiF1 zQUQX5`(OW-{TqAJ*~Bm(pG;eoE=kU92mz6EN?>fN(nsa^g=r3Z5HW`$h9U$ewAAIx zm#-Y$x(x=G&Rx9r;)^2P7rpr{zwpZ+2!x|*@WgXZ!`rc_2FE8yCkNv?mTPr+>-pyo zukS~o66{QjXCmI1H#I;xDsg-B!G~XSN?CKk|{Ek*-@?FBg-O>n}ZhJX=iD?1dMf8xGcD6>r_T{n1B0wrCFL z)BUE?W|>As{F<-(rUxE)Kq!{pI+)uB_aw`HA7l#1Wx<;4=;Y}9xh*gq4QoldT(+Sq z*4NkT0i|Yw+Jth{wW%zIdAXd7XB!&>Ty+UxX2=GB3=B>O1W+SGKy+)SEYROrZjZ0% z(=Z_z0eLC$c*Y+3R51}ZL~$9Ih`0aX{2$2zNh~Kx**C+^Hg!6W_)51DvM@;$Xo{EOv+gjaWKn4As}TCuz;va z$vaUZ(a2&^(a10&s%cOb1{017>VslQC6kXp0ODSyjcCXkqug_O```S;kN@cZa^Fw? z+aIq;)<qYrgKU{N=y#>%aPcJ@JJn05oMmw4#hTiApcdHc)e6640_) zW`N0TkxfP2D43WqbveeOh@ok^sw$gisj6+KWeik^=%2_dIoz53+N~!>miD=PwXqHtX2gN2QNh!xz0ASW!(EyRMWMWRD zjMTP^5O8O6D;qKsn7fKm5{ZFL5lo26(eR?J%uI*~89^}wM%D{EXTfdzeGdt}Tni8b zATXZW+3w#fA^;T2Ozigf)Mwn1>)42)8U!#C>r=Yi+L8b~a}EhDtCRg~6-Hv9uBJzw#T;ZAY%fk$o}G@Bb?eHix+1xYVn+~yIe&GwB@1*^$U;X*X(ZTsko4@nBzxyYD^bzAcEKQOm8=g6P`K?D^b~3*7 z*khkQfBw<~_udtXGRDa6VYlMgS|CuYhJh{b@7;RvzPpi`Ic{%nFXzj(;b=J8*xA|{ z&!*Gq$$C9VSrjv<9UqM^T-pv{#TQcm@;BX=TS|aHKxSqjW{Ak7Y7hX$1BKnY06@79 zAOvT#ofdmF$?BdzqMEv-<^Yd5DrBN6AX&xyKU{Ux?|^&gM{TwCa1l+9Ev=}gVCIQ< zSw$r&xVB!!z}ZSFNx@k~CZ-@sSNuRQfTV(kNwWk1$RTK1MV}f1vkHnBQBZ?U)u^aq zG1Wy*iGYnoEK3Y(qAG0K22d!NSk1cm!W20K4h1M7p)Xevv4s?4$gBzw9ZL=z2+b^K z6C*NKVrEb66mrL7%7Nl1L#r~(ZIzr zWoZ*(0K%LznuUmx)i3xqH_QZpWmSO!hk(re8wpqxB@#)_gcKr!7?G(cnu6qDgx1?r z5djmR=dtuLv`7I=a{%BXdbW(2TX^;n<*s=W&5XVPMvt&y1j3xu)CADnI;}U==-X-^ z(ou8o>EFj&VNd0EF%=+!p{SYxp%IaI){&!~L5V0vPLj~j2ftGt00MM55wG+@rzeAB zK`Wx)=_x=i>NaBzB0|i;L)K0?M(T_Ua~2Qwt3Y5TLXzC?gq@3(B%3+|*GaypS;2sr z{jTe6sfaA+pj2>0gHR3D)&`@+`k4!p$;o0mX_pI2O_$r{e5R5_l|uyUVpS*@Y8Dj` z4KcdaHzb-j?b%qbZERX6+gs;N?F*0m(Hmd?MgVG--LM)>CzEHNdx5b$+}q2`IZN~8 z=Z^9sSAYf5&M@AdE=Pb{+r!%#MjL0|`mV2d`R0uefBex7`3W_Z6S}SK)jz=5y zwO@Mf#4H_erejY@L1K2OWJ5&a6k->@R}4Z_MGion2#4A9&)jIw886FG*Dh`pvv zdXzsW_2f~H+7{1QRxsym8knJGH9!?LLse8kZ*o1)L>*g_l_tRl16Qm;&m1K}k$%_U zg^d8Sh*O4qF`+89GC@$ckRT#}oFo`#%(v{lyDN8dRBTxu+RRRW50CTO2q5(uFs1ur?s1=bi7zdL~45n<+ zNC4t0OeT_1U`?i~gNm6Tps5<5AwUK}erW|HGQYEoNmLP$fU;&)MMh1TIRasfB(X;YnKZ|YL`hgS9Pq1 zWnI-o9E!4S8&FGKJDJS|=-KCATHD$wDXNHN+1lD(%%*cIw>C!y`-jp=Go7PqU}zT! zEf+xd?+kAp%uA&8wR)0u-&?M-}c@RwtoL3 zzxLwc@wEH&*7n&S`zPO9*YSm?UTPb?{`BL-gX-+rH+|}L_uuoHy4YIZ*d8yM81Zm# z@7BxL?z#W9?eWRM?fv!5Z3GyO1}UXwYGPS!Y@S!4$;o)9TNGuLbEk-l`FuQCY^+xR z}hOb*@yyz0&>n>!PTHHCzFZfhActzqes3(AyBWC0;%G7H8*RDxv%DHrsb1QKK|qTx4-dS-|$uMc<)C( z_51VVlcFjdPh^%C!_BaG}Ua=8HB90I+lALCn{N>@xyn0BEx%Q3LbKC}+#2$Y25(Sw(ZsU|tP0m-QMV5O`QTV()vwLmHvlAZA57xPSsqN3 zO%ch=RIDfqVIL`!D7d8%tQNcouv{+i6-EB50ib?Q(BHezOL;xRNFnPN~6)@m+8yG+OP=|#r#m##GJ zB7`^`49hwuP3s$@z5UyFUb%03YxCOGmmphCX}d0?TBCUgaNow@*3q&gJ-acSh)O8m z{*`ZC-(36o|NINrZtiWaRbTr0D;Lg|M~4Sz&Yho5=fCyapPsdFbOhA^nrT~Z7MS4H zt=V|KEDFASap&v5`YS*2>Cc`V?YAwD2HP8Ji*uLG`b+5s=^C=lAb7}Vu*l5akH`#sw2`lQI+-dkGOYO<=$(2i6zIjsU31Yo7ZQo?LO`H3HdZXEs z5d2HH3Jd}RV-{61-%R=zM@`f*3g6T@#Sr@Xtk#Ra?9>Iwn|sUs64*Dj+Ivo^_VA7y z$MygAc81WyN0J3h6np@6b*P=Pr+TRZ00<~}MTK6{W&lWC2O;<{MB+*uGV5~{eLo(m zh!HJor%BaN5j6TCCuOY;7$3z@o$QV8xO7Q%JSywO zbS`babjR6Shx_okNAJ4#j%Pl9qlk9TrE{CZc)WkLEM@0>`NX4NnC>6E{e6Gwi9g(W z{TIEt&|8oE(H~nF@9rM1pDRXYmDzOKb}0l(SwK-q$?2b}NT{kS z8nKsn>(2%31zNprlLL_iWZ1{V`_2k5IW;u*yW3Nmkbw`oAX$4r-_x8_z`#^hv$=t& z?}7X8&{LxcP<7$Dtn_F7m;mZvf{OS(=`OR*iC9TF&3!EZe34<Z5s?6u zLSWH^N`lx=NkE)6a4{MTm-EA%GEq=Oan~bI3IL4|8Z`u=Btpq~HXT=09XXbDsRXXI z%sE*$C;6bV0T`lXVF+a4WOWiPB8zGyPFYHfTC!;B06SK-kTZxvh!EU`*iUg$VlYG% z!LB)be(RtA+aLSK|KQ@U{?xypFAmBWPL2=$^Dq4LSH1IHU;kC_|ILs5%8eIauvh{@ zN-0am5VGV&(>84uaE!%M$PtjtGzmz~O>VN~>3othWEDaz0(D((ns%9zC_33v4Z2Pw zn^W8Yte+T6J#B_p`RuB0cWiF@<}C*R5CRbqa!~{(HuWx8JirYdv{vk)9qWyDR;w)t zF{t&kDlw`iBp+N2lpQDO(}N8FO+}EvM4Xgqsz9hF=;)`I2$-n{xT`689R=SgfMoTa zxu2X>6-2U>MUh2JfqEZkm;L}!&#PFK^4up^03s#qj}HJ1%uWTt6~(ik(GkH6JrwM8 z3DC1pVKo4(4g%_(KGhTmz4f!ez2BEpmC%e3F)$b~6>1q&b7z2tWI-{eXex!7nPdYq22)`xiG|S^C~~N)7D>Cbl(y}bi&?n_ z0%f^=ba?CF@IZ;nFf=H(w|3et7on;L!)CcmZKkbYqSNurvM_NZ(k@j3+Z+dgsclA8 z1xg5V_Uz7AyzA|Iw|77J#Alk+5J$uO@kc*@{?huSvnZJhc=-OyFWx+!Pj54UiQL+s zji(DJ@inix^Y-p;Ks$GS`}W?gr$7G%GyS>Ex$g?sNi|Lhaj z_~7Qr@xf?x2V^Y=)pE8digL6z8dihp!L@ciBgm<1sVFgyX0z#H*^EZD_s?kP+f`R$ z0Q6)s18;z>cL}myw5$NV$PWli2^GPduju@JA3(EnLBaLE(o4fcz_6zTS)UDKqNXUi z@(nVP{)|!4e5xTnJp-(Fv2zs3{LBzl%^CVgquiev{?c7U(6dUxUBJALQV~E@m%ZkQEfh{Y77)Av|CKCw=S=6#}U{#4R^fV79ZA}6NB2#1R>rWwAL@1EZ$rhk& zNJv0rij;I~H2ix%^23)docnkG{Kt-uZw`uLmhy)_`3EP{#dmz;|M1vnANklP{-|j> zAfOQ1tZhydmTp>YOI{*79Ko*%Tm$PM;P24!pD^gZj)gk{& zDx;@Unn903yIqgFjGdM;L{gKw3`ht_B1WDL0%$5|NIiAcg0W~uuWdkx*{^EXIXib8 z5CIgC{Nk_`N(0WmSpg>Kmy=~wQBV|I=?TnLW9kjN_-x#>cL-5U(NHX-n{80=AM_L~ z?Gy>YwE_UZxZ0~x-(VApC=t0yTTjUqF|aZY{D@QY;;%j4|H|2cufqN9)&CD)F$eWS zz!kS(PIGqt2ZhMwBYw{6*KbebQ4-$@`wD_lq|YesnLCKY%s|L)8n;TA_VZIbS`u8j z!idNeo#lxppoPN&f!tnNoRo?NRg7pBQmJJMv7*5sE!$$WRju!wzdWz%uzzrSZM1g& z%K4kuuC+Ojh8ymRXURlTyIhEA#Wp}1AeI5P0)w<`9LS)xuGrf6()Yfj+FARFpZTe$ zpLt=eEbqPltjrgON6VaXP}Qk}<)VB1g=>*RHRO%aFe!9hGaL;d(#u!(QrDGLaq;|_ zkAC74H(x&4+8SKHaZnDbOLvYq3{Q?G%f<5g%U7!^4$7zk*RS7x&E5ANj}I5i#oB1S zY11b^`siyPc*BQ(@AqSXc6ZToSj=Vjqc51^zkmG4M{67+4dTYR^P7kJH(P^Cm(SjL z*ToxGufOeW5A7{Kxp%FZF74XONA=qJ-u1n;t@Y!B{g+>S@yeA;YP>!grsa}}Hn-Nd z)oR3gHDY!mCpt%A4jcL#A^nSe;_i9o}o+7jIp>*&_yzYaUdPa#;h%VhQuKnJJf2-{>RDr=5Ii(idG}7M*04Khn=L3Ls zP1~gokphRLnUKvu4ZAMqoV|bVb-2#r>Xk`ejaJM8bwQkw^*Qg3;Y0t{fV<)Z4CoGP zW~vaQhq(e7X3eCC2wf-SwcPEj2t~x)F+mIi5t696`GdIDkAklD5PmRtL@EM((vusHR{>jNJR| z`+!3QlK~(Blk@A4iADNb=ycS<)u?X&4F~j3oddl(Spk5V#e|9b@Zz2h1RN}jxa8Tu zIFLhC2&5tk#vzCUJqR300YeO)r0BWAm*44|` z55`L!lvEZ2BHY*<9-J)aT{oU4WXO^Ru|BuGIh#SdNFVss-#gly6-78XT9hLSR#?Ki z4r7D2yyeZ;Ufe^&qKKMO$%E-^kpM$1X5;1U+mk1r{N(N@e*gC2(O`4)@BVLp^~pz{ zJahT-!QS4_|J=vZ@w~CPy|MAa_2-|yx|~hhMT=`2qcb}j!!nlZqc4B&TaQkTHIdF1 zPd|Beu^4Y(zVF(#{p-6gWqCVsD2hT+Dd5KT+L_I@(yDg0(6o%?{#>THo6Tlt&TTpV zD58w$8(Kg>H%(bxSqK2&H$R#>dAaYSoRTiR>yLo}1ByXFPLK)RV%{TDOu=B4Go}cl z(nB+5*-nkZ#aw`eE7h;6E6p9OHz%E$_=aGm4L0rRrP>1nx>_}yI-NTE&(w2md_|#V zKxCjXu&Q;-mKl)(s3D^PiU&C<1EEV5B2ifq2gu7-4cVfp08wt#5VQgEe2rQHhZuRb zm`}$CRasCh7|ejE2&!U;WUMNdjRT_+0tk_z8LH);U1F4wP`rC3$vG5(bI_`c*(4*2 zPzI|46pUx*a4!qYE3^B;fpr@!glU-uW@ z|L1<|cYfpQ?mpGU#dAB8=`80Y3XA0;_x)eb5(Wdwnah%xFeP%_55XZQ0Tcsb%Hr8u zxT3rOL9c)%Bt{Ma!H^J$gNadO_j8Ly>F9Qhk<24$jo1sdk!LugdTf9bcA1fgU1P=O zI5l!WB+fZOM5X`;93U{OS%%)y*dhR+sJpWDV}gMfSs@N+LV%8l`e8+Y{tSK74rr#t zz0ZR~Lk>=%@19tDHoUqFpzkMoe&ouxlULe%A`~@3U<#tHRB%le10wsq)O!KhtEoON z5PNma|8LI$=0ND3KB#JpQB3_5^q@v`Za4a2if-LY% z#DqvGL%(ESwE<-jK{XH)2w3lZ3P=D9DnbCz3o?3oyE;Fk_sF$?K*lNM!FrS~sYdnj@mQgb#0UE)U^WO`GnZ<^0Oyc~1{GpfG1T?5=ic+}-!z#Y z|GS_0&wz05{MPaQtWXIzv*I{q};N&=+*(~b1E{LD}!c$-IwzqDb zIXkTI^UuC~<)QOoP|wD*kN*Bg_imi*f8yz#OFO{yrC<5LXb?^gPr5d9HQd-4KJ(PA zW`S3q8aGXP^3%^rKnk|GQQmj|eMH4$Pd>A~J4K4O_iv1kkGIcW>Ze`AVo+^upDlp% zVs1?vwbmxyIJ*0-J?8aAm%m?>Zpw$1=_3e zSMv29eSy;zt|KMppdCb|Eh`??_3>iDhA2rDiPXAYy>UY&NaOv8;-^4qRkW1|T9)!EAzxX5zO2nh_Y88ZZ$g!CB zfXtRvGhw!niK>dK0%BN&&YBUkd-`kYVhG>xEnicOHh$!9|Ly&&k5@6wo9^>3KC}1p zdtd#|ul%#``<73A?8BdZ_QjKfqv2??#RgR)jSA|fi^g4+0#PulV_9%qE}FWm+IFET zjLZf10#1Nc3km&>6VYEVAsgz}=H`R)x98&H}xho2Z$PAI)!^GXt2oY69#0WuDkyuT8c*}?wRLqN=i24x2%!n(? zeK9Z)5h64NTzS~903~quD{uYgsRP66h=8c7@gQ_$( z=Az~(W=Pnz&9K@yI6CN3YpUliZXF*_509tgS#xxFJQ%H=zp#0HFn)P=f8N@74rk8< zP->RV(TSFWN;^F|$W7DUKEBFgZL7ck+mEItEQOwZ;vjYQ*y2W6SVM^r zx-6(H^`v13)Cz89a_!pTw|)1w9$mdT8f`6S-QKlp1RsX15k(iJuFw?S`O(wA@t2G0y0W*YHk^+T5rizpe0J3j1i4@(D6b%9rI+q$% zML>m3QCTU876B7jW)>77Hw^(XEJ7bx&tRzN#|esrg1`E`Zwo*6&wl7{{onUqeyXlw zoAmZ%@@pUZ;MMyd`sS~C--R<@_~;X#oQ_Wt;b3!gc5`cXyj<(ruIr9Z#(|(HLI}YL zmudoxzH#hQ3J^@P(?qQANdchu7BK)*EdoDq@4XiJ zV9sa+hr2Q$pk~nwNR&l^Fr&B^E-1n(n-|4*UaQ{`AgfZZF6)s`LNX_#d8?r-yJ=Nt zUNW2_5)lSYT_)>uTFr@Ogv69Z0ulLNz!l4&01N>LK^@#O6ZJX{6gV(B+A1m` z1_HBHGA6jmJ)k?lQAAKM1TqwF0FV_Ckd2YpL{%aX7ZD8FE~EQRlr;{k+|Daw&OjDK zU@fdY#3AoLD>tqVn)YlJcBz2=4d%0#_bX{ErGYmy> zJnlr6k;7!XJa_hdSypYErn9`gj$*NGG+`!$&Fx|2u=FHmWaMT%=RS_Fq z=1$e7(>V>xX_J_F?;vM8L4(Qh*p!fQv=+CvMrXD*j!#aS+4Prx_7|r|<8wO~9)A79 z4AqU7ckh1iO-K+ZNbc%kefHe>(P&Mukv28+dDvJ3O9s|07W0$Y`qrkq&#v7 zVc&iS>XWCUA*#6h4@%GEgPx|Nn3EqGno>@Z9mns{1K-n{0r=Y2ss4SJ4z@Z2oU7yQ ziULZ%;}!+WZjhcoqP5W=Y?WWRUIT_y~wjR8KK~)Wi$n=fkQ}La#%@{gZoZPMPSHXQqdagoHGE5 z84=Wjs%z6~;(#n3DoVK%5NT5)f`o|xQg)7&e{XMF572u+IJ?0iAQg9EN7r_FHp!{O zXiuHCQ&0y20wOY@vt@|TH=Bh0UjPvMYKmacyWJDI1b+p?l0T;}T~@JRgo`%yBRKR* z8$>nNUA|K9`#GhbaaYUX9>X;NVh)iJQ`3PlGcw0ekffZm5|St)h7cLigI0vQ#eIV*6;xf^Y)&AKKv85^moh|I;AvuEzOrpc-mw7m> z0BJg#$08Ov8;}}yDAh2k*zNsk)*M68ahDxN+}50m!EcYd3!Le6`sj(h(4 zO{}Vj-~waG63CN}$viHZ08C8DdnHEUz{6oNotWf=9{fQDN-CXzbzlIu#@wMX{q2S@Lt_vPFi^raPetToBX&W@;B8DV@*k#FC1DpDs zgy`1gv?@J@K!m1NRTUv<=%npJ45wzDt#;1S>Dq*y`;9vjKt#j@%mj(RT?f=e6bTI= ziF0sjdBa%+3DrZ~sh6FTAqNi3L?J{%BvT{8qAdLK z00buXg*J0QLys#6%w|d)*eAZgtJQ}Bk@ro$5b@_?WLHr4%VaembWJgmGm_MeC48LX z5CEQk@x>R=(RD-v!4<9puYBMsvh*tS=X4sdvMhJbo@Ij6N?*r;FtEQWFaz?uIz|LW zN;xr7FTPtnov0F#)9C!q>y%klC1%Zv#6+xK*x5&(A_fg@*Rso37zlw80!N4#Jne-I z3^_6}3#c)8>W8lc&>RXh&0>;O-8-M!E&yWPmB&ZZQ9(5q7uVPCyRiQB2mfoadwnPJ z-DTh|NtQZtgZZx4J@C+%zVpTBuK)N?{v<%;U|fcila4}|H?UMlZDyu6YbnS@)J{Bx zcD`&8!B5Y&=pv79nzG$b%=x^A$&&RWcl zr;F6yzJ5rV#bKxL%(>bLQ#`Pc_qJV<#JOK8bZ* z%T&!~Q(am3`xU65S;RyY$$9+9xPp{bvKfE?XjUZZLmvGA00RXPm6RQ*VPHd0GG;>b zC79+eYfetTG*ly15hmz^`^@|n?R$1LGq-XR_pWrdkL$Y>9F*15BpEdU@af-!X&k$; zQ)a5q1YrF{zalQ50#mAjDdiAY1=Nf!AoOX!?(|}&M3_<=7>A=lRTcAg8Hi1?ns3-h zk|9EFn*~@1O!ayU0TGvr#e8~Flw}D6;>vl32GS!;jy9PA1N0#xiUva25UJq6ibmiu zSYU*d2{dQ*TNgorN=7x)Ok!w?MN~#8z(u!4ga(#!3PfM^?sw9^{ulrB@BZKmkNq(V zo9a|sq?3R3t3UtWf5ZFV^%Y-vUggs6jgOAIJyD3X;2aQqGAv6h(|oNChB5j8a(O;N{a)V-a#n%&b{rS+rf^z@Cg<6or{_ zWJyxQQdNkesd9_}W@51jhUUN&afl&wNvgV1)gnYy1%((mixN@ScA*HO35lC#NrWN9 z?2WU50p(r;;=h(--6EodNCq6kYN3Dz78ohUBE(Ka0ZkGhg9vl*>mMnQ3ugKu)%t7c zm4m@nND%l|+}zww11`W3$XebTXc+ zT9>UVX>%P#pli~(oed7SK7ym^ICXaZ{Dy!on}vwP$R)#IG%N}njjD$px_7v>mN_NH zq9|47C-YRs__jB{?)Ye$bK+?SMo89n2@@hR3p0|50EQxRRh66DYq1QwySw9~Sv4#W zU{IH>;qARVncjNin-(W~N9Qh|t?Ftrol(S4l{@FpSFu{orc0BqU1TUzv#jgTEM{HX z#u!x%093L&&!~CAvjKvMT5tch%&Ge9XoKil9F0_6ab~&R+?I;T07Ctk^#esrEWAF!SUv6X;DnR{~(Ktvx9j zPw`UU%k{U61D{Hgg$R*AbM_4CK(rFHnTQb}ki|fCUC(E;+z9|0F)|W?3FNNx^pGYr zfl6eGM2l`Qn~%%E5D9?;Gl}M)WGCKx~puj9(%mzN- zB#$iwh(XoR5IQkMLn;}Q2r5JX_wpbF6|2hdu6KM%_|bpzkN@ruJoor#2UW;js}yI_ z{nQ8kuZRAp|JPf;>g(1%{2QS-IHb$#_q_4>+mAo~(&ug+Ot0P8g$%>>(QsI05s@5_ zFmPEEP1i|Iu8Z~7je!9fFqzu==K7tN@3{Mp%c)zoO{c1)whEp%17Iekz|6p;BFO9m z88N5St;FbVUT;0EJOopt5LB|LmPLhNL>xj0u>b%j21H^<4pl`JB*uaQGm~#Yy%~4< zlxF9VAu*|nxK?RpSmb}(ex5~g!kTZmI068KA|iokRFz>I`lwM5ie>^pB91{EsO;Y` zK;jUBPsO5=RaK1{6`*Ssy2QZCc|(ALgf8N8UMwgXyS)$g5=}eZ55I!{tOPr;DAv}t zfVeCQMjnlVx{v~m(4$!T5!@NE{qfNc-fGV0G8apm_-zM$3@n!c-6>Kbi`$5j(?Jw~ zfWv1#{JSw!KwS*A5tUV4glK4GEb5{}LXM$7$V23!43T45grW$c3}sOkRZ&-URS&AV zs;d|Z1VT|X1Oc@yc`@C+{`e;zx%#P(-j(EX9nKcWDvg$HO4BTFc*mc)^1y?)_iz9H zBOh5d%X4SW&9@e2(qt3m6K z|Mf+H-~FvW_{bxVrq)g-$4C47&Fs#4SdJ%)ESbw<{p`+}we73(lLhE}GL0inO#^vo zscRNXUJppvb(xuyyI-l{YA{?0gHhcs2FyUSYR=lF1ZElu1nHM&c4;^9fXR4 zO_dC~PE1WC=dRN%Nps5Cz&cM2P(=h+i$U*4<=}t+fdC#a+jjvXrxc^!eb^j6Qa5-& zM30XII0Z)i!~H>fWU60HdqkQMhz4a;VL(7MF=A5l04N1RG^0Q{b+IUhgTXOTCWWkk z0LB0Wppp?f1xQ`n<|>u34k0jcvs^3}^B7BoMXVxtv;ssxCI-zp1^{=!QDCM3F=;l$ z7?_PRW@aM@3I@munt>dLO#&{{vk<_t0h21^`LAlv6J)H%v)b zIz(iYe!Bz;VrbB$*4%RefVh`Ucz=NIQ~{{!2Az^}HqfMfUa8ME>UKFki<+9bYQex| zV&ps*G!R7W6)0eI8Z!e#K%@+(WF7>-J~lN=cDanIs*0cQ$J2=;x?lu-Y^VAr!%?EX zVd^=1f9k`)K#{;wm#^Kp(KIbnL5PtG0}~K6K{J5f(%I1bNI)bIBV!;>0Pm-0&BBq@ zqz|)HBSb)mA&LmckWvOh%?hH-j4`ODFj(72%f(_bfjr55>a^yjL*N)A5rVoHhByQc z0mvu>Bt|A62t^qqQ&|m$!*V#NtBtZ6m4qcnSv2kOQCyr%UwT$Ucr;ne^UbztYl3mB1!o3Av8GL`#HD2=wQf)k$`Dj_ zSi;uEPz|#fF`2=1-c=Ps2M0&Ti{&Dam4n!xEGM((wGZEQIO`To-dw8>j^-CHtu0bR zNr3G5;P}CZADrwjU@7CYTrQI28~~qr^yRaI;g!vE_uYH{*7{BnV%Unyv@j3|<>Yv; z(|qgj*3HA)MyAm=wl-e(hSyv=w{!9G#kwqBy!K+O3RUU4_F(_kVlkN&S;f}_rsl<+ z2u;Hi{+wSHjqWsvWOyzCIEB+u)hM20P1(V zuJ%$z1nYfcR?;#R>?uX=?PiDw>Qrst^4jVfR^9(8Wy04BU~a9w($uISNY<_cj#U=l zVgecU*El#*YLbm9sN}kgbyb}#Pa>E8jN0?FRU|{krdh^VF~<<7DB^UnoKH^5x+bDV z5{G~s&9tD1XaJOxkrF^crYwjBm|>f<#$W=(EZIUtGhjD!RBg=&B!q}&rU3wwAOK`l zMbdzZP{p3jVQAEk(kQKPpf7se!$17@|NcMx{vX)A`mCrllg0u9P5%DR{Nw-c_x;El zzw&+U;~)OWhkotz*FSgvTi^0s?|RpJ-}+^redgM){?YG#?)vkK#R9nqfx#>_Eu!a4 z1rPuPj!{jSsR%5pCx=Jvvc0+cawG<@l1mIElFWdSl|apy0uY!uU&YiQ=MDf;O1{I- zSwK~x7lZo})Wi&=r=@vXkehRg5Rhti+gT>gDFLAocDZxX4yY*@JL`rJ6cAYiFaY|4 zlI5Ixj0FG;RDjrBGkaEocP!v~<~~2m0mgoB?6z?}!t@2!2mz4M`aBpQBt$d<&4x^b zlyfo#LdvQsX8AZBcl-@L6Bs~%*4Lh?lsh=S+ewk3}hlRBg54%j6eY_2T*{3F)SC;@LhlT`xaTc zW=@hdC&{U8m&b?4?XuBj(>06vY%!ah45(loyX;u*ddKVTdCM0M2Ju55`ry&=6o?VIEJK#On85HX zumJ;0BnIn}CQX{d7$(d#sADL&>vC2Y49cDgrK#i~SuEMbdv|W`?ze5Xu~i-%wg^GA zEttD3Im4hX^Ri8lXSe40{&o%Gt zySL`ew98#PPgPl_49n2gWw91%yDIkQ3!ODh)7Ay2wvimB<72b)WGqNP#!O;X#E{KQ z3<%ql{fpj}NR}jGYFQ*n$|(a#ixxQ(wS=-INj3oUhj>TKD~xpAG%av1d;B zO>{Z{_+;UIkmW2Pl9~~K6GEJy=L`-5Xi`(e5{!mx^>}=uS$!5qLyW=9&}kH@%eifu zc~Mreh(*^m&9q&v#So1N2nZ~L5COTd8-nz<1pr_vGjl+b+!?5bP%v>u0Az*id=O9# z7&HNzAOeyFCg`AwWJr=xz#WnaA_OKjBf=0_M5Rqdp_-zx+dQ`0DpoxqalbfAX0}KK6$n`{Vb&_q|{7;2U0d;hTTs6CZ!% z*=L^LKg@u@K!lP-RWWER7yz?mHABE;W&nprM|gY~hzTjR9r(NA;#dSj5)tnORfT*p z3aBc?oVw2K|NHf?d7rKbK#ZzN9v$V=4fa=yr>UI2*N$bsnmKdYFS~u2!zlgd9k~$1 zy$ldn-|RDIUM-M%=Cr>eJl$RqB6x-_K+jIw?yY9UrNtt7Rwjd#B(xrCbMPAUC!-nvFN(l zY%y=9idnvo+HdY0A8n_6Zh)jIF90HTqE(0;0w}!ctKU;^ zZ$I{#Kl$K?Ki&w=rri*7QBY*KaE?j_0X#_w0YeCFlgonEM#JH#tT>hhwOM8-9WbS& z+nd$eC>8}2k&ce%Imz=c?Y-fR7yjf^S9dn5JMSE*rsaILY$Rk&31%nl`JK_Zi)XHX z{-_?%$3FGR^>gcA{qzOVhp%jYg_o!c%VV>O5c07a1wx}+%~sF9*J6tLPD@f|x4 zzU8%FuJ8GC-~Rr8`Gf!94|Z?;=CAxly(`G7Ls3>u*G&%ho0Dm* z3j)!uOF5-3iR6@X?wX~lrj%53_V8a7Njdu-Sk3ME&D6z)sq6GqfNO?kNp#hvgPD=X zRcJ=H=M@L$6hOfg-7mt#9CHHp_^qBU4r&0}8;aSAy$Y|;|83>v;;@mSN9!1P_8EZp zk_;jm5!?;9pFIHC3409SM*`BaX{tCViWnj%$pNB?ATtOclg|Ta*tAWos^xM~3;TRH&jzs3y79K$LP;Q4xqmL14);hk%M922fBGLS#f@ z#oXHtbUsmqU`bW|WXstw5(YPbLNL&rAQb#f@BiB8p8o7F{D+^^r6OrTWdnSBcKi>2 z@*n>A|NKL5{N_IwtmVfZ`|#e$zyG;kdg#)}|K|66)BpT6Uvc;P#;DtarXUm3}0XZNkT6X;oqvuUY5+frdV#+BYA`!vLEx^Rk(6VAd(k3@K%QP8-;7yM8j6gqvdQ)r3hxpIj5ARYr3}SFr_n_<>m9Eiv!Eo zZvOP&{DIBuhnJ$AEpa`73TEaOAtx;WCz{vpICIAvzX*r%4<7l{%eTg5QLS&4!>s_R zDT;u}+BQ#SowLbP?$$TL+4Gy0x-#bdqm$*bRSZqa5sG0|A52bSu`w8id+w^Ae_4)h z&4cOZK6`a%vpk+I?!D*Y_U7iz8{-gYeQS8{y&EU{Qw()6+Ss{Lk2cRe_|O;qogeA09165M#4US*_EY#fZYm@nU>1MTMNY@e!Q6JRpu?P*uaxNM>uJwHvcV zH6Kep(>7p5Y?{SvJl>4kX3*tqNKHyftZQ}Ib~#DPId!RPI}uHiQ`e;~<&-pwYW72; zUj;ce({vT`s2==oy(>dMaQBV4nW=eI!_(Wu#AP%hW@ywelYo>#!N4Fh zQq#;sQH_RcM~kt!QGoPz${5H@!B}!?n}%Zuk~HUnL)R?3K~r#@yVgwIggT|xz=V)Y zDFi@3#Y{@TSp^Z*0CGlz04#=1$OARwfQ*RAwaXH?h`}^lWMwoE0k9%KD`?CL60D5G zW@PBR4oOni{I`GTZ{56p`@_HXi;(5uXbga7&khc!?LYjF|K{)g_uu#Md*6TK+U}R$ zdF2D=aQHa!vBJhwfYEe&j~?z)`RASDTrC7ZLy0WF3Y z0;5V9i&%2EYz;xO`Pt>AvuPT_8W6R}2xvs|ilwX(_a)KHjCyx`W;TOR6vQOfS8>T% z%*kcy_C(%w``qaT)AyQm`aS(bN(d+hUNB7z46^q3J?%^d*g2KVg6=SXq@K2{MYhi%>rUxxQf4{n)FXV1*L(71{_* zwEuIhtXKWjBK9$__R8wm{8U<<0GvQ$zfY&X-YYd808Xh!$h&+JHidfES)yUE{Q~1t6ZD`uY3R3(V26H$0yI;c##7PH-{K-xJHXb*I915Bmx`T zk&B>NV~FcJ8+p;pME0&vmJ2Y0qRz2oO){M?fMxggv56dy@%*K=x(?b|S(b}MYXDa+ zUue7BE>x86z4Ok4@d+VK_9w$wefu|k$9I4CUs@ZSqoplB|I*&A+s#Y6yI*)|arI`E z8eXQZOFAAmCJka>=5heJOJzBSkXr<{C+iPfdiR&!^~JAw*Bid%laD<%U9{U4CX0Dz zI+-rTa5$`H%jWRp7&X;{YWLRk+~u9oaIKB&gL-?jo19x;ySaO?Jf3!E##|1{VbyiZ zqr=;0&TXX(S**=@zG&Mli+R&Esq4Db<&;v_rPOsft7*y(&G%9fK>!2IJt0VXln@LI zM8tY;PB4Wmg6Nl_8K^IKZRG{*om=nsVBX_{#nHI~gVC29W7_k;nFzixi0Md7YC?a{jh*hCoH<`6e1ni|5O;YMI zQIMRgveYb0prE;Ji8%rqL6-%L0MwNnV5T1MOhLs%Tc8iFvr`=jSRWATy|#i85ZKD2 z6jT+_Rpw}5>P}_8P&)1SdtAvOw^IPoEhJ!tc)&^&=0%izuj*R*el2XhW4|duFDF}F zggxVO1+Mg`i5F1mI>&!_b@k$k^4|kN{o?~6BRPl9p`YH?*Ue*BKY_2D5Vpdv08aVP zt4D=bA9bE{wmKgAqpt7Da8>I2bpyU~EUr$2V91^%nIWkvG838zksCb)M(917(9n-F z5%FkBFmvXA>K5B|7?x@L>{FllrC)gIcYk}de{%OmtlN~1+M*(3EC~&v%R;DJ#t`c5 zH@#Cw7j|!*jLzQ?>d#N-(=2*8&4{qE&4aColDCSYUE-{<^_{_}2sdxfuia>3aAXn6 z2xKJ6O_Pf%QXr#5#@Z5xINqOcpC5EBT)aHm*{)xB>2^`ZV%hBN@2wYWjZQYJ;oI(e z_e{rLu8z29}lg~b<`v)*SNwfL zwEV);S0B6U(_fM?ER-9DK=fAtzOrYuQ?F+#Rv?csPYEf<@Yw-;A;n`KuPWe0UE zhjl0}Z*D)mceH4x^TnhBNO>LF_IP~UG;Oxfbm@59EZTOlXxgrkq+Od#E#<7a7rz_& za@Q1{rQnu6nyshjgSlG+imIYF{sso(`drOm8QdPuR8&lXPIp&I4r-bsK7igU`HkZ$ zvL3)8C(>Dx20=l=gN0pd@0YkM{Cnb(-!McD4|{3oyFVw6_~lDxs*y>$7TNVG=+F~I zchT(`O3a*dq7clK19PCZ>4+*s0tJ8!L?K2r0#yQPmrG_Gjn+6)!PL&iu?R>(m62$< z=u+DnjEbtvoe?7k3NfUVMJ0y7MU?CZA>?dEs48VF01%8YAesU>9IUOX1vW6lXiQ=r zM+_=th;@XNg#w`>nPvne$|jS^Vo+EA>Zg931r#}b%|4ypivNhD_x z(}FOT!=`Nwp)7-!4JrZ$ZrfB4qZ%2ereQ`EY;zYvh=2krop5jly14y3qD8<&h}QGR zC@}P*A-69ABxFRl0(DD22PB;x%!CTP&{x5SD!-cSw3+V#Cr43w9|teby)`=6DdzD? z4fo=^Du&pX#}&=Sf2yyf2&=IHS3W?rS|1=Fu#u^dnXJlo4{Y}RKdw%dzMTg*SO8^m zEyd~g^bfl_J6<{9`flC;Tyl-8^}^{Y+vrvB&}#Ysf>$2xbQT|uJ}VhxOQ9t*?)d<`(sQz4@2F>w6wJfA431=9lUD7uJs!GFzsm$(?j!DQhNJsCHQ* zVqi|zW>h9wc1s`)MQC$7Z<|;Rcxy15HJ|(Rr$-Onw=t|?rrmN`3~JF_1OneISw0-^ zF6NVqbrEBjkHIFvpgBxA}FmEgNC+}AffMny=5~5LWGrQp~p^PwSiojUn7u; z3KJDorI17nK^YjpRAn}w)Wup(5ST*}L0!hh*zUXveKo_;+YRKpdy~l#TGQZgAa@bD6*M3Uw zGYnRf!|IVvkHyu`LBCh31-~Wug^wOPZn-L4>WTI`Cz;HRYVF3}q2(|_ot6bHOCJ6?1D>o?ag>CN%v+L1-jqEjhFwVf}TB(t`eG|M8!L0JT{oXvtou*j?#vsp%f zK*#grcBW^}p4&Z{Ucd27S(d?~um%c3nId*Mn?cv5Kq9#*1e z%QoJbK|Ke z(;~HT*%?1~eE2W^%YXPg@B5Z7e(=7JeR?vD43eW5g#c)pBov`b-R4?7S+u>~i)vsb zLP;74V^Ot{+mtzSQ3O#F6*WjXM`p>1nbZ(N98`7NbO0a_I?0h4xL15DfF!q(An`dA z0XZNdg&?U@&*^n*Wl;iN^{YrsE8LrORkK0NO@7o}_`RkwnF_BeT@RP{Q9Rzjds_`X zIAmt#%OocntZJjLK~Dh@_5ZD2=838AO?ojFiWwkK=ywCq8*P(k0Q8l&7r*qi8mSLR z@R5Mf0<)r;h$ROVpE!~K z{SANq{TJT!&U$<6EpL9)z4zUJ?dsbPj`ofYcNf#i_+a<=XVlvy@7`Cac2i1M| zz4kl4=Py0wph5#AV28dQM*Sqj8)n~GYBs!U1H6p)mPqgiw7);@?1>pG_Xv~9bbQW1;Z{2h^k1tSqF3NDzzikv!8;6NcnX3D8osuB^fAEks@LMXB4 zr}XkYW+p-;0wHI^8v$ycW-h9IFmOLX0+>g5X&}ldV5ne>JpqT1RF%1RNEYjTd(0ev zG)5=jV~<_-e$ws)LV)Bp`T%C4NWlyQ6_6ARObG}P%tX!ef&G_3L=0*ef{0KrqXlME zQ6~1^!oYef9>CQK3xIupj=h?S5JMznNf{_+U$YSbpeb-*$w}3a`}W<1?yu50^xPr@ z5SIp1KNL7x%YFPNTJQMY4*?1*aI2qb{F?(KkXp#nMJ7my0TGF^LGR#&3JO6(9Ldq$VO{0;RNzoESIo?L8Bj;6C) z2kq`|ck^m@^_Cp%P3Fz!T7g1)pa0~if8ynxw|?T>>))_(*F%GIcf9uQ`|R%fRbV!r zw)5HH!QQR?lY`ga9xo14KAKN&@9p2398XiDi{%^@rn9LZ%*j9n=Cd?dAB-o9wP9Uy z(58FfwdX6Ql1Z9&G^h@bW}Bm}Z~NA79ni)Tzwy!0`c9;(DC&6rY;ot=HF%A+lRRHE zhlk^%6Y1K;{&;-j8ZNs1>xa$JVRJI?+9oZRZBAY45YyCh129k6ShH9xE?&7Z91M=f zbMw6qB<>e#Hl0py?d>O3Vk|?MrCS@;YiCBsFN1VxYq)mT_NCh|E|%k_i1V_Z*JWIs z98Ky0*PBFCgs{lVlvP!P4Bh|D#MDF;(A=}C7s!*k!rs(!&fq)^69Z535=6@>i>Qfm zKt?ju9;{bGMGAyHy}v(5nAv@$5nM6@fY7r)M5*5>su?hlp)j$XWh|o1DrwnE}XHh}tZ}i5!(6y=S zaxCh4SOOXv6;;83rA_mb#Z1N=W4ml1AgXE-H3i{%eMkuPaEM0rXszDbtk>45D7h#@ zIT#HGF)|U0Lf7WBT&TeyMr%3jO5v9-S5b{S2xY2bjSLP`PxHX2s*C8$7AMo};` zc4v4Lr-UPOyA)6KTFe$0n3&WoMs~GFAWk_0^e91JK}~CpfCz*tT804Z^&H(cGj%!k z78)p7O~m|~iKQW!q=JGOsN@hs>T=-VV=Vy!yU%@eP8pJ?#;chz!>KgTCj=x2Oim{w z3{E>zC3IIULt_FW7a0?Px*Qn{tdCDw@kRjyI}g;qZiOT9&VA??vMZ*&A+Uo#gbtRX z`t)Z|#DM6;+88)QqMnWy0{GO9;L)LAz8wL#DCz}`y{thvgg*1#4+wYB17}aKz%E4h z&hweaaid;p2gH!Vuqs1{j0A2Mo(&CDSFo3YI^C8aCm})wB;u}XOxiP}qW#=QZ~w~A z4{zMsQrM_yw8rb(#f2}rfB1&il&^h#8@9yC%?Dn4_KoZ9bT-?&DMts3-RINpwRCX% zGT~jpZcOI8&wqaJ<}-P?GdzFc{JjsKxo~NH^ISb#Uq2f!oxAgO`ECMOG)DOI&=Bb`G@WdA>1?P zWYaAd+i)!7!W`}h5#8n zfYt(s%-AivfH6x3aJ9Mtf@JA!Y|WTM>RK}p6*0+O)|3!UB?l%8CMl`+=VqYBM5~}} zGf1MDfc&BW026V0a8&~&Qzb+IHF1>E-QSFfa#lt}Llj^PF_KvZQvy`$*LJ1`ZBj7+ zBt}VDEfJcE8lhtgnnlGj0HV3a9TS;p;65?QM|wuo+?8d~wn;$=aahGF5->?>{H1p7 zBE}*HuIrFf4iOk5Fc}CzQPl+~lx0-SWDPT?W^wfLEsPa~0;s4*8(5CEwl|n#J*bA& zAO!*tGB(04X)IfMwAA(>*%p@?-EV=zTwTQ0j%$$PgB>N6WV z8v~9d1vc$v2C=Mio5xMgvsRhk^IhNdxyPS8c<~8nmRmc6cCmQo_U`9zJoB~hd-q45 z|H7k_2}j=87@= zMTZc|z=8JWS^BLa#@2n67MWEcoj5{@yaoF$(^F9HT;Wf=hapi>KghB=Ef zbPa&Gbkj;n9hio-frthUW}rzz2w9Ynljb6Fk_=!*prV8phzzi46EcWO?|l!LayAeI zLSjI7cs0!yh}8g$7=Q@@0|1y|5u%D9DieuVN|}kt?P0S3GZBhpQHe!QNsugnB$Al| zGWSLfW=JHcV#*u{P$UHk3Y5f3LIf)Ub)5zd+ON@#0;>Qb_n2#6u(@|$Q4M9CB^9CY z;3XzjmEKDfu$SlqsG5-xf#bReZno;yUPuh#{JEXfNzZLk(X4)ksgd)51t1^-0mmpx zKmn9k?817nIezMi-~9QF{XJfG=ZAy!HQqQg*tlb-zI#qA_*GcZWuG8G=Y|%{Tv&r~i|7dsr@aEC{VE^_@5DLJ&a~Uq3 zJ+n9IIxep5O|dANCarI7k476G_`okd|HM-p8^e1x9=`d~H4KvSLZu}V9>j_0eF%Lb0~9zJ14)o%IJIrwA<^A0OQ~x%%9-!{hPO`?qhLoHR)y zI?m>rcm*abI7v5|oiy{NUR&>`%PgiqYv<3#>Rcmo?%bVMcJ#sH$>Q-phQ&ex9dymc zj!>*Z926sLwG}E1jno2j?nIJALJZWkZ6qY406;MyK~9*uPDDXKQf4Ew3|&V6tXdYK zo6kU$O9u16RTZ_Ik|luvWQ>G4CnmD4iDluLqG}e(NI<<$ETSuZi4gjbFH=*=g6MO( z0;uy0RkDa?5mw1+Xdp=p0G*m-8Vn&&n}rDkgo%(54Fya=3;+!>XjZkHbIRHKB~foy zoRB9);VNh%>zKuW*pNuFWDzsuP)L%%!Bxa6FdU9Rwawk!mIjEdA)u;|N!N9TA;gFV zfT1i0S$R?qh(lRdML8g@hwGchWhjeK1}><}?ZOg=5X%A)yEajXh8$x>vEU+x7-K!) zP!@v$b7U$ggh7RKOZzvkPKV`-PnJ2S*{>|wQqj$lOG$mGJ-&j4(h5JR1}B+l?>HLv>et1 zSZ#6YG9tD~LQy8sA_Nmll0zA~ltGjT%c|;3m>J#u5wH(|&K6_A6uR7oP^R3KA@+XD z9vcN8+fkH;7{}n0c zB1UGYLLrvHv%mnsjEL(HH7OS%OJ<^ElC%1vmD6XIb|UL(d?4~Atg}&ND4qGW?e!}pehLY+v`^P6Ii|KrFJYP(^+3X1FNx@gE(OOYdT*r;UFqAPCRV?cw#(JaJIDfG$ z?=B*VT9OnrHf^c`9ZjZBKlAwpmpkikZPD6gvlyRTym00IJMaCS5B%E8S9gmFn~Mv8 z;>N9;W@@OUuRKh*eP#K*?Cg5-4JbYs29ngnC$%yYD`m z`Ap#5tQqZgM(aOq=C$ZTk#D({*j`s?nu0cYew14f)bTuNfa7 z-+S@)?WZU=|q) z=1@iyH7hut}&&Y zq(y?X?An~#CeNB>w@f*im}%FfMQyp3gp@^xG;b7DZ@c86VX1DWZ`P7Zw>o4vu zn(pxE03alT^5UiM{2%|^w|@QCTv$J2Bu9&+N`K{B-}Ybr#lQWpKmDICj5dnp?0jMK z+tVt#t4icz*>+C8^wyWqh)gX7M$HWxre=w;^dza}Vo4Z8q^zTvdWxbN1ne&}W)94$ z6Gz=el{p}W$bG7+bokpWc_OHQc_rAU^P10bTCaUg&)8ffZwv;cG(rJ{x)98e_@1!gSEn7X`5 zW5N&^0J@yZSTIppg)G^aEQ``ALK#4EE<+?FMbHefh@coagcxE<$ufY&7{O2^Gc$3J z-ltTNsZUA(NVzFO-5V#lGnV%DcFw^I!KuH!b$cuVpu0MO+Z%-f0my_5092qXf;00d zP*em!0YgM(TFj*?Fn8&~T2W7)fBIMcL;JaB?08uZ%ANBOtFGECp-f$Sl6NO2WW*G! z?%5}%XD-sk2jYFNi|6kNo7;xWRT;|yi-2{E8yqnp!5Y#9#D^3RG-iT03;%!k%&6&+KH%! zz$F5TfLWjrLZC>QFd7W1(b{0Nb?)58+QxEmBw{F#wPOfI*f!m4K55!H0Wx83rES|# zh1Y+>+o8kZ_SXKh_)<&{-*;sg@x_;)Uo?wp+W_q?XD88`!Y-_@4H4-7N7bJP>zZX} zec-d!+Izq6H=Nswt}~t12s}JD%yA-uK;mueJJ*{hcg1qyDIS zBlE_+=bmrxwV(Aozb6DHNG+<-GfSzZP9s?*bMqzwshCNzR^3%gnQB$lQgbO&&f~b* z<#7-omq7?M=V}$j-Cb4T#cDGG7Oh3A)>>(Aq2NFgLdPQR&U<`6OB9h50tG=5jxlzr zi}S;DC4?v(V~jC$2O*{u`xIvq=wRL@n#JB?BxWKZzHFNS3Ah7wZS>-hjDk5KWWbp{ z24aB88O#y`B@RKNi7yNY)WpQ-kJfzlaDDv3 z^*{BW{Cj`=5B)0-zxl@3f9dC^FMn=&<8zk%ho64zx4!uJ!{VpV*H7+iU|w_w$4BP$ z@WCU6k|QKmts$V59X1Sdj1rh(YDL{3QCPyX%M5_MPtnP!IZZfdt!&Qi8M(^@4Evk1tVsYeZtAUq7)Zq|zghr6~8nPRqQAeX7YJ;h*Z)0jm##Ngs7 zq%svDUM%`NWw{#R$f*9Z5nAaoFXQB4DaDL}mdqZ#DxnS*am)DTW5-?OJ&?VJO*D>tr#CwEA|Z zD9qfIq97Wl4BmfR?q&SIFj1{QbB4vs=o}GK5Fv&eSQYMz?tlp-tX19HCfQnJSqNh4 ztw`SV(r`flgNXqZ7AqFO*7s5s9xi2G^TOH;p~6!3-Vl$-)$Q8S`;%{d{%`!3<+ZQ$ zr~24pcKgZsva2z9f5`Ajr_!4`$U+F1$*y{-oAYhCIK!Psi)WteZrrZ5LtRMM$39Bn zcGA)w7(*b0j`~@|@s-<;Uonu|hs{vUC)IJB*6Z!|d^@h!!)7(?*4y1~vm3_UdN+;R z^D}bsiS6y2nU02)y$z}fu&p` zW!pZ#u?;yyh=8bCG55sc)u5VP8A{_2F3u0Po4%XPQukjJ$1TWW zkm|xweHOcbR}6lSehg}l0RtG*Fv)@tV9;!floh`RuH6LF|I)O7!sVd|h#3L&_= zFbq{#L_DUDVn|&Kfn!XwS>N|5_7dZ_t#`yu?#(+Pf!jY4g+pvyx%P1-Zp}){MDCgP zAERdC1`$D;E0Nnh@Lv4#(!YJ_=x!Y=+{}$cA<--y9`x6)9=+vn&tv|%fADh;-}vfN zPe1m7S6-P^$E=_F^sk-1bN+Ari9h=4M_#myvxn2yU;k>}o;>>M+h6~}*Is$ z9$Vm8<@n9J>^6S&E6(#7c1oC--XAO-r68fadc#GmD7B zv{Z*pRn1Kj3uwI>XMNvyEJ4h{#Hs~gt;T?as9Lld8ZkaL#h)2L#2Lf^iDRuYjiWnM zz3WnQIi{3$yJ4|hFjKASOfkiUgmIdtDaRBXp86i}z&wsQ&btspZR-Ibdgwxs5ay{H zrs;6euZPJ>Ddqrs^{f*NAxhO!t925gTBcgOxN7W@u*~~@(anZ3S!Wysz*H3E?Gkjg zKE)uSRWT$o|CVv5r+Q4AoerRr2DN(ka+ zwW`)yRGFD2NDR#;<>s{%7IESrQcY7#jlCZGpk~BkaD{eVs<}8Qh_DdM)y%85KC`Zy z<(h@OIjbtZ9YSYNTiA13i|>a%fZ-xxpTVz?AOTDyu@e@qrt>Zx&k@hQ{1bon#||Hz z^r!i$`Rv)}=7%>s8*p$OLS)oRn#C;iGg%(Y7xT#7HgomKE>6$ZpB~Ro`X`>~uHOt- zu7}w{PV>;uyM7kCM1cWnCX@Ga+|FtZCNIxYKf{dh@Ff_zW+P<5@gUhmcy=Z{Vv ztu7vwA#0h=AKklq@2z1w3^hx@EV9V_^G5G`^@HcD^J{l*?cRK62JB1!TIhn2RT&aR zTAZmBQ@A-CMG=WJyud450y*vdiS9()<(kKu%l3S3b#XZB^Ku*~7IsoVh{G`HG)`oQ zsVh>6rMmSYsgmY;x*A^o-j83uekETF@85i2eSWq+Is5wS-~7UFy!NFpz4ndIedXR~ zeq%juD~u63q*)*PzV8Ch`&l=e^?jNx4u~KW2*EXkPK2rJdZEy#$Si#d!W1Qv%$PaE z01z`bg(SofBFDrbgdics5MukkQDO`fBt#BiX_h(cz1IW;5Hpya-gDuQxskEMjCgPS z2SCn%dS!A&>!bE+s#;sqv4oI73YkYrkz?f8F-sEEX|XSW}_mfef1~pGfMlW=0;&NQY%@v=bO_Kotq%KeZ2mQQ@_V=tb5_4Qx+>3{qW|LTt)oSZDT=ZEUE<-z=eFMQ*l ze0sC2$H!M$wO`t(wPYGM>ub1vaq-~EqvJdC#mni*bC*o<#D;e;yMcs5 zl2VEl2U{ui^kUttxlsMgt{)l0CH7l=8&SP zHnwglMYZx}q(Fp7$jkr%14IT)S!W@J6w_2nRqbMnTaW~15f-jaYF2ag;EQ?JQ;6lk z7ysH{?Y{Z;(+BhJN_g(+baB`qip_E`FXKYab@iM-oP zU-|sz;$m}lIt;sXej&4i?&xZFaF``JN$ltItA~a2{8;o<4r+)n{N77jv%5#l_)cw{P8f^2rap??b1LE;iG&*_}<} z*b2x}j9n)Zg0MJ*1Gjjf0ffMuh*C^@7A1q?J|gL+g`K@U1%o&lkr07cnkl(KQCdLq zGM3ov8q|`(*ki9824YfI2+S1>b7vt_XBLBrv=k^qoJ1NrO@xeNU_xLgrpQjrA&@{M zP)dO#!c3A7i-@8%wn7-rLRw7$09s1Uxn`@QkGYh()|&UE=W$y~u4Ac_P9wp1nAT3Z zowA@|P=(0gEE3f$SWvHIshf3{qJ+-aj;?kGR}Kyqi+d4p1O1?W%BRYH1^QgjXx$v>SH!@7;ZH_w9S%dTUr+ba5&&kxlRZ##>jf zEFMP)M+a9-zx+>s<|qF2e}3_;cW#Qja(wvYl|;3?v%Wukb^Y9Jqg?96J0Zo@BlX1$ z0mrwO+wFS1SmpZQpCy zCe>^<45N-!&9`~&)u){2^BFlw%LfrP>^~DKnMhmSatI-hr3);9OD?f3jh%9?F$4ld zkr1V7HLU@xfVh);V1~Pv5<_SZp}tS;dT0TZ`C?W~%T#75HtoA+H&sOmG1Od(R}He- zZ6&sLXm^JMp7qfX-E0~&2dPCHX|9^@bga3uTQ1d@(ws>UoZwZ96*GyEMCP-or7XHm zBu%9XAxeb0Kw^ruYWp97a#bPSV{=s9xvDn8r;fF1l^|gBv&6(DPqpo9g&VpH5?WHL z?>kiwy-c+Vhgu7OuIng}nb+dv=tCqzts0m?Bzt4EC{g>#qV4Po`+ORK+!fST$w7jH zVuuhAidHm4a8?8l5?lD3fe=tfi(TjfbxvRZm7iaI?w>5a@y_#C=Fcr<9ETK(<$$Om zMy@AK2 zs;c6|;95%snHd&5mI7@)2AYUi#EX`^!?c4cQLt)7s{{f^uGpSk9NjtwiL)!I5ojj0 z(80~)gX>2xbaMs6t+tK{Cy;^Mp{?@Qs#dKIHBjrKbBB-_oJrtB)?U!wYP;Hm-m;RH z#glt|+AqJ zA}P2NVv4h5W{{wn^Q!20QOZW^c37S6wlcn zOxe|}N~pv$M;|C2*x1eNs^$5tlg?ttoLr|OabYH=qm%g*=^{Jplmbid_wu8Mo3}QV zK!xDSshh!R9xwy?#XNR_J&Ht`FTowU!`ZFt)ODVEN=atX0~MW&)SXRrH;rDV&1!RY zx;i^Kd+^R9RMpK|EN>q?N?eXR+_`#veYSr3&f`Duqo4StpZb;0{Mx7f`~TfPd}7hR zdVT)v)vMF#`Fxkpcf-8srkr10b_A%}`S~~!@>JqX7DisR=U6tAB;Gw*={ASh@i^Jp z!w()EeCv(P_Vur=15b0t!TiJD{T;Me&Trh{K8&Gm$LZo?v)yf`oW0gzJ?w^|4AV61 zrXgExdVNu7DW%k^*-Q=2L71G~Fq_R_E&-(!LaTNptyLt{TGY@SWf4Tg!d%=UQ<|s7 z)>k#YY>rDQ5>Qpyy^AphZicSqVlFJDOsS9C-NeGC9zqO(Yqe6e4?%>z)G6!H(IT?s zu@38w}{%G8|OZol#XwwOqZ_(sywdrQ}+Rr`Xl1L*AvnCyKQwfl>%H z*O&yvX`YCXvKbJWoGB24s7^V?6h!h=YOO@hQ8@54mIgmmRa55>QWsgotA!K*(vsC} zF`u=B92}eNrtkZdQkq3oi$S!i5P5B>5)T|f6j(SA2#fh_8YkgapQbDvLn@|j22cpB z>L4aq;LvFu0d@CUyc82*G#4Xr$+;7WefPOf{^Zww^6$Q2!wW~@$y>M2?!M&-Y2MYb zl+7+PcOi9M51L{}w2|qZbH~A4gL&Zo@F>m~i(60E-NkTnKdXQ922X#Y&7_rM(c;Pn0$*t1>%jAvUTn;$HJ%TzQw z89O`2&e(4~`^0bi-9Iu)`h~~-=P!QcGk5R4y`2h-gB!zzBt&7;AS@jlb9JrWk``2J zt+_BL1W|V_N+QI}EIF4z(E+fpH@jTb*_?P+E5NW^#%a5n##|jF#9iOP)F}`}tHX4D z@#J&2iGrhWA9P!!Pe}x0VS;KQ5~l`0H)paILIJzEq#!0@p*i%(-CSL*J^c1xti31Q z%#E@43&Fu~*%L+fp>ua@L1Kru&1FOWx$X1y2#p_o8GZq`TB!EKAXo?lqE`C~fQ=lC z5*=pjmbsQw#6`XDQrSGxi;FIl>HLwc z^l*0T{<{y$gA0msaqdFV*LVcbrd zA|E_tQ>Xqo@tT0D_2Ar84cGUbRfM)+vv5%+oY&H=8mJ z+x3u#aoi2HDi1LAQq7?i#oUT&$(fi-wHTxp4ZPyNdO@HdwCPo9|UW4Gsz-HCi9KK0QjhX=#yHy_2+v+(q2H?4LZK$2sfy2V_s zT*VX59RI8T7yi)qdYBm$Bn}623kA9n z=sSr579w);*`zdTD6d(1|evzXlbL+DufOX4kQ4? zMXji++Tp>K8+RUm>(RqphH<-|hHWkNshd~-^q=|Dzw?KFw2zB%yAvh}UEd!~4^H2G z>nqQF@5jtpy_gw_LR|na)y4wVI!xwXb0%kw!Yqga!oEOc&rV?oG?#tL9lMr7Xve+- z*Ei)_^^(u0^eAYx{Foh3;F#-LbEMG=Rt& zwi8~1TK1Z?DoyTrESrnOL)?Djv8x{DyW^V+?z`IGwth~06oT;lKu;$9C;zZNz1ZcP zDUU+cteh4$x_SM|S=~tAS0-6><90LFadC8f{`xyuO=)grcQK50+|sztR=E~O2E4#g zT^WTS126#rDDVmnAchPm1_;nY&N0s62huI(F{f~JeDL+vddTrn*0aoEarD98^Mj9n z=mUq(K7HlZt;5H!0U~qBn=#E|K$5dfuU$O|k?-%uVbnnhY|F$E=x5R|y3{4=Ws$nKUw=^5 z(k#^}vl9nVxEsb@H3I^+L!O0h*`vsx#VGFVjW90vr>tuOhrUoEri%oAGOw)g6|W*>_ELy z$g~o;RE;74c~yzA=>FBbn5Cg;&BfuJa1f|^sn*yJs^GvBgo(APtBD8`b$xQBDHms| zRf)Lu0>>cCTvbzw)pVK)2kK)qHJA~L=329g#BrFUlUbi)7iy`c)K){^K6ESsvml{P zWhx57!EzxfPQx(eEF>H_#;9r~X8}UuJeGbjn|Ga=J1CcIN;+SkeQ}z; zan{|qb-3HzcVfc9!SSu-^($9?%hO&g-+goY;NF8Tef4U(Hn+Kr@$B_?ZhSXh+J-La{tP7s{`0};eFTZk={1c{T>Ql*^p>D@p% z)(=m2n~U`@tcL9_Pusk!IS-{yH4kGMH@nStwb60A-3{B#YQ4F*Sg$s_-6jtJa!NO5 z{rCOg?>@UYKRrFIbvVEK=3|Q`bq8u}&F=(bitcuL|6Pk97FTahUg@HiYUU)7_D#P` zrd6w!I<7V-7Q4tHguZI2??>Pwr)j8+upW{M{9Xr z0bC7gR@JH^yQ*r|TB?p}YPGo4s?|)@P~A+`yjC_;)z;qvH8l#{XjVcnLS=HXk_?u& zR(AcBYw1ct|8UG(#mEPbxlOyXH|ojh=5#pV!}|GetUmMQN_9>3Oij(XV9-j=C^%R& z2BbY=V6gx*0|bF7+UyCU1{EuqS(3$c^QG_o5C3C+?1mD(jG+ML{z8p)i1vG^z+a3vsoi=P$T5*$sXS7rS@Je zg72m5OV<(YLE3wXsp05Vn}*^J)qNhXRLiAow%c)>%5KQD44d8A`Ng;1c>T4nfBnsO z-`t%(+U@GqqiZjG@B;@opC}~#m4lcEI(zinf8Y}zdG$M@ggTWdoWha_pZ%wweBxuz z-FWI*)mk+(i!hm*w>C&P8OUL|K)sH+YJM?i^O&1k3;c6&L^p1hRt?IKU2(5*T2H2A~2UGPjusxmGg->H@J90&|e!v>D2A z9|px??6vAJ`Zn7pmj~zh?YnyR?t{;M>h5p+#_HZZE_t4eSx?q= zA_r4#{?WFWF&cmZ5IBJ` z&U)DQ=etRSONyI-6@1-WJ({K4$JgHf{P7DfUjV^CKEHY8gYz4=BrdHYnOm+@l^Kzm zH<3VlW~Hh#i@LTCHHmP;bdX38YK9ky5HYYjwZ1R9XRd1tuFFfnIpZM?ozwdnQ zt53w#b6Aye9+-jzx1s8;GHWr>>2?J{=$EZYZ0L<-tsgR&H+;fdi&xT9x;IGa@wvkD1*NmIRSdw{{4B*zWix<}`|S0C5tPXuNMzgMHy z;M;tOob7I1UzFOka5R8Yt)C#*O(|dgjj#UH-~XS!@cGYw>YsmdEY(bi$u!@&ee>BD zp7`LaABzmD#;J30+l`xHb9nVyJLY2$H{bIWD7e-3;okGXqlAv73u)Hn#iDLE(`Geo z*W*gv5c>s7jPr%`k^0CYE(A9TLM&#!Cm%Ly0tgK&XzpfUb2xNwzp$d3t5>tEuIgE# zCf@K5EWyp#00*hV5J2oqWMqy0K<&b3?8&3*#ArQ30ejA^Ll)$!2e;opPUpu9oY7_5SzUQkAmR77DGF76j&mLR#>u-TG?}R1%2Ul#){@n>C67~?rg2yEq$Qhs$y2VT zxlChj<$7ARp+nUa@RS5Vh#`b#ANRiNi`H3+aJI1q3DrZ_1v3l8>XWNEKpf2KI8_3r z`K({e2{aDlyqitANTkK#a&JQc<_C+Ks}`pe+_kC_DNCrPB8}gr4in*CI&~9~s!H$_ zMI_Wx+R>I`ELDZY-NJKEKR%3g9Cj>Zsw{y)dCZbvyY1IL_o-R7xl=+uDo9{1ZZ*dg zx}znSx7(rAT!;d}hRIF0CoAvz!+MksNBtastvFxNHa9{y=I6|9(wyX+LeX4|}}!dzEkPMhVQ;2;M?LB+v|! zY&-q=zwqyV>r0i<&zHB&s z5R`^M`S> zh0Ti94m;+WVq6MMlbN!G*~nc#ATVpKV#J>MS<>yUn31r-2_74i3(#8UNs3icBIl~D zkH#Y?SA-xs>bg#Q93 zcqNbLTCA+Lb#;-?M?0VDs_JIbGfM@AtYfb0i@YA;H6`LuRm=U03vwIb!=zvYh7lPz zZIjz>i`5Cx7G{PSj6t1qKoFoaMq|@kEpUY?l#B$NKmva68Da3fWKmI?A55{)!p$m9 zBtnrxHB9U6qVIa4k)jtq`NBJ!*Y2Jk3SB$0XYSPFR7iGgkva%$bib?ZAbCcut+r&#cesdsMt?Jb@*IH|ZL$%bR zCD)S2-F7JBG>+r2-i^Cq*zJbhb{fZ0$K7_D%j8~myIsxMK$i1*QmvKLLI^SNE@xL| zBCCa@Im>Oz;skS+7)7Y6R<(w5aR-!$ zi2_rG!T6dVVI_?AwF~C;98{R zY#H3|-#saLzPNI@cH!=S2Fi)Ry$ zfgxRrT`;9-7|pnwb*b;Cp=8xPmD|udjX_{yXW173?fs0G$kWTjL=%UY$*^A^(6^`6 zmV3gtN!vu^dxSaQrDOt6y89ph+rRVX+wbg(mZ@C7b?cd@pMCnd=WkxS7KPt<^|&?+sIV z^bdayFo~S{xps-1&#R|-SF}(qwi`<^SEzX;FYIopr66!FlQPX`@nV}-Q!Z{wlmcgm zM5;xFh~d)eZ%OvE5nC2P~kyjPN=(=B;c`Bo%@^}D3#prhF2vZ!lu{Q`KZ_|O>C&w0x)ghswv=q8S?1)fx`)LQ z6A3vfI}tmPHDD|ef-tN#oTxP+SL{;KHTCwhzUSw+?vy6~I@5dgFmP%gFJWz&p&Fan2pLCpGN_r8WG5oE8bfd=EwYISaX3OQf~u!q2c6hahJ-3MQTz*PkOJYHX>czk%H4;Tku z*1B`88P!ftPS1v25cGYxa`m`N-P6y%bo1Gd{Q5upsW%^-J$3tK=DXvAx1-Eof9p|i zc=XNJ|LNcSOK*Mo3$J|Qw?6&;k6yX`WH+CCZ01UBO>aVQ*M{G7wT2!vcM_pl*U!7I zEWFk-4t2`4s^NLCoGiBoTx4x$eQ>C^eA`)h1B4IZN6A}Bj z?*Ov*%5eV|OM-Su0hiT3?J}sfHt4=PD9nE5@BQ7s^*8Gxx(b+p?yHT+igaQ}OLoOe9<>j`8r55MfPyKCWvaeNXM-?$iB62cg zcjXw(h?ttV7&wHkn_W517gwjt&WG z5J=ogwc6A~gh0{7Lv!?#8q~;~z^s58gIqmSt7E3()J$2fu8yiTS)C@$#dY6F0HB># z?Yw0;$hoy55nLRRPzzHA72Tb6x_jaA#9@AL{rchh+0Xpc-&e=s!JMXxwN?t~Dgyhc z)s!Mp7pVtBh`=cZnkMt2fVrx=8BMyMs2SD?lOY?RUZ4&XK%A1WRU3xtkSJv8X0Apc zk06jSITMmdnI>iu4>R5Qb|n;82s_!;Oy&$T0R>Jem8z<0rjzS>QUwO5yt6TYVt`v{ zu@iFvb<9!8#m(Ao+d!A&+rC)Gv<>_FaUKOIFapL}^fHr?h;83VOGB-t%Wu}@u0i>_PD z(xUHE>Qa;x0#l0(5D-ZiOu{h;MInOkuLf}Q{n0+Txc~IG<$;?+_SXO$#0Jw=fU&Q; zzRVo8eHqlfy*!A>-CAA1zQxqRAR}jD6A_Mqkc5a)t@);js1d5!OonC4*`h`#aPk-xYf8rmlrtQg) z@4xZ(Z~W7rd+eoGU;6MzpZVy=pLp@*!`nAP-%%7YgBckEAXo4uMx{;PZ5on*MpTL& zmF%@y;XO||&gR?gdVX*vgk-T<6Gd93b2~ws(%pXCtQiJZb@jF=CT?+kXJ(0kEwA$S_-njRr*FOC0`({Un zSFc|Q&puW1bEW9CU8&5xXbAk+(@)M9hfYS^KErS)xHczCd#w<)A|imAs=hC6;3y(& z87yQ~FbcX57aEscKOX4u)x6qH7n^C^4VzurjJt7XStx8^)%KpS~ z7y<)u(P9cA2dNdRYJHSey3AnJYLlD0l5;>Gf>A7jTnyDq$wU+!fn1)&AZiQO@Id*^a<(6?u`zzpW9qt+S6#8it0go6+POGb*2 z;eaP_fla6_p9OeAP#A#=DuY8K!J;KV)XrD4)%JPsKla??Z-v9RV|hJ3ygN=Sw$LY% zKuqIU$EjYN4J{W94&=hpA}GfDo)i}mFr^e4%NcIM;v_M2EFCeYu1hKPv%Z^k6jJJY ziqdy!K3jy?r7-K`EQT%y5fO?ZL>3W=QKZep90N;W5|FS6GlPhUIEb)gA2JdELe6k& zX?kMpW6uo{L*zuJx)0TutGTMXD-4BT?$&zko8h|sj4E+!es|*LLus>J+l11-S&hgX zEaKoQB#8xx2ec3(Vj*Gy)i1w(`tbaS6sUu#IRz&H87y5=8A_Y(fSu*x!J~uO z!4l!lak{k#ca8s(KlA4vzcF7K-d#TO0|HT(S_Q9t= z_>reye)agV$I@~^t&+lwjl8{Z0r(!NLM%k?5>xM1RcqFwj7E1YRlS>cEJP4;5^7&? z2Ql{NKHS|5ZVKBU@3t>uZcYntNjb}Tuky60Vgn}A{vH4tw`{L!f{0)&F(mnwPyWJ} zUi~KQOy?9Wnay`1Ii^J9S-8md0J_ zW8ZqA_u|#wH|b5Ybl3M@9W7+rG8L_Nh)H0;1Xp*4nRyDtix~R2Sat_Tx*MkTHt)81 zvz?}Kx8AJ2{>CunIPc=Ya&i55dHeeO%29uGDDxRd<}RtKs*y-bdQlLDP*k`zz#H9=+f4xCZnd+5#( zE0hsi^1Hws5byzbf=%Z4-W>JKZ}B|cTJ(=S`@-gh$A^Q1`@7-(ZmVRwY085`q~>aH zHAgWpK&Uo#T?g2dE3=Dm6ltPYow63KQ!e9Fs;YWBib=%Xi#mjI$r5Nb@4=ixA{U{) z>t@6p`jk@OfEYVM?7EIgLh54|ByvbG#1I?dN@5BrNlYPzD8fO6gM=74gocKd5L*X~ z5EHSGNP}G)8C}8)*;|OV6&wltvO<6fZnRf)TP(-T97eS{tpRTas#Yg?xg{}p2z&XI z_C@i&_ntUdgoIfn2!kaZ9vBdq)O~M9K`VvvrfSqZyaig+&8P+IgiL(`*o-bNF0z8) zym!UGOwF|m^4I>0zw{6P^Z)c2N%j2VtWF(7-~m2^_W(gEyzZB5lz|=`WHz9Nl8zw+|?Kk-{${*Lc{;>Gt}y>+Xb z&sg>~2;@flfu?;+8{COnSc$b{t;S5daeC+OTQ9!yexYD+rpv|e<%xZ%?>8@yzekg3 zzZl>xVB#$bt9JP>?dbVzt+pkP+24>N5{1boVkNna<`pJXE^7!hlHy=J6 zPA;xY_RPWT!#A%U2KDR5ANs+6>HRp@;kxt?W`~0@KWJsxq4PA>eW!xi?TaaMNv#s-Ay)(Wf<}>?pB*= zGY;F$IBbX2MJ^*n%�R9=m;X=ZXI6)&A%pEf?I03$?1>S~8JXsopqW2G?o~f)D}1 z?p{j8RCBE;YFZl%4u-(Y-2T>-4DQuTwW^twu|_WC;v}EnOXGRmta%)RIa}p%x4w7Z zt4)K~p@@NXnkP9sxw!uL)fc|+JMX`B_v8z2ro(x^xKe{g(ep=VkKI}B-g>ybcX4!l zcy@MiyzJk-d(jbgLt&Q65F!KBYRZfd&A5SloP$=s{H=0xUMZX%PNu3Lh!|kZ6oA;0 zpYUML>ZUG4?xvu6nN2Y^y95wVf-no&>F_WeE&8&q2ZwzO!LvSm>(Makb{wmF2yAZV ztnQ{Bg!);hRkPZZOA>)OiL~dey45zQKnOIUwDPC`5L!iT`^+d`&JJP!s@8-cXy$z; z<`hDOnJR&bTC4B9B(FG#)op?{hnE8tCZGvcfeK#UIeOs-zmH%2j@9*B!}8$X`TCvv zCzNIe+wO+R%62_iDcjv{y{unL|iXL?m?~ z#MpHyrI5OiID{lA#SlV_ftgy*F|mZeEHS3oc%D&GNFj0Hc5M*RUR!Fk&8EOG3k+)B z?ABI0S?D^Cg+?TYa4gm&NgB5N353;#MT-muHfinM2Ksnj4v^w4Ile zo4YhkHU?%|t=741zzA?9C%{91*ZQCS*q{34zxG$2pkU)zO06J<2Y3&3@b*|1)CkPL z1?V6ZB(X!^mHBKszWQrl`LZG}5eV+!in;*xfPg4)0gsMJON9~B{Am8v3wJ*99Up(; z!{7DvOCP@a^b^Z#*W)4)$F?m54CZiqZ!gx816s=Y`TE_5_df9c_ebuOl$ohvv-kJk z?KmNym=Ewf; z=lAb-w3tj|^zm%qi|uzG9sc0q!EDh@9i5E3!^a*!eD&4&b1yEQc=^gx&mG>niB9gl z`PR3-`TEzs@y&x-|I`alzw(g}UcYvOLa6s};pmbeeR^I_9z+@-XVOE9=#;T~C|UxEr>cHByHd4{twq_~a9ZkKgXE zUrR>^)CURzSS{MbGb z@HED+sR9P71czpj*WEV7;3dOcD7YzwPOAzBQt#%y#7HTGAS7Yho$7Wi`Sj||#p&JC zaP=z9CJB7+)34h`^J*tlZJe0+-LK!9rs73SktfZya%3{^h)FHP$Us)Tdc4?8dAFVn z%`8cim*%nKeFDJSi_w7qDwiJzf?Zw-O^av;2Y1AMjBcilUjhe^VGM3Cg90}o!wNKk zBdG3w=y4XqlXsr|ksmnx?Y}L(_*_{WZ$~-bO{eF(N2~SO>1wKZ+~)OmGffj!%fmPg zgVx+0g*s)+g{#48nVi*ChdefdnF$k-x@xg$7_x}R7~mlCQq`T9#MC)RN}+0HCT1a! z!L1kxnQ41Vv~p0|YrgD23sJU=9!SiU93s-@K9;~ym?^~Ad?Xy3$P|c!#4f~`Vv3Sd zObw_TQVdao2n7-jtxh0_gw&@Z)f&Ua%uM&HHCbDr?w z@(X|eC$8)?m^G1+fCOs#4}%#VQD;C04DbLp00=ChMm|y7lxczq%V^4=0CJeX`Zk_} zjb(Be(P%!PHe<+4@c1*gU;X%pUiq%?d*=NgdF;6tj&9xxvo3_@e{wI?>%Mja?pp1Q zZ+=VW>B%Ra3M6gJmmu%a4d~mA&CMFHA8fw8-9_6B?Dx9xw$*HV;5|p;QXpztQu}3^ zGQ`sL#q*!}5B}qS{!@SNJ3jd8)#D%d+&4e@`fI3QsS$+5I+b=!+*v0PR z-G>*e!sEewoFDu3C!YVlPu%&Q51*f{7SBF&_35WJ?>_qVU;m~1j~>it2OoU()#pF_ zzOG+zz@@Xnskxe5-S((XH*G@Xp2wpVlTkIDN{jNk8cbo%s#Xee)?9roWg0#2%6e1A z-FSXBZO85TTB{D5&3ZQ&BOWa0w{Km2@rC8BTZ`*gXGhC0O9;?roH8#(OP#cuYGrQW zY)L5rNL|mui2EAVwtgYNty--%PH>Qu8wqJGISP|`h@vIuaSEQL3^Hzn!Ck0h9v+;Q zdvDjXlW}vBPtR?$tJ^aWvJp?}(~hQ{M96Gjot*n{c;(K=zjJo`eVi8P7u~h19wMa# z5`-qXf}+5Mg__@sxE+V?rX+BK@0DQ&liOaT=B6pSI?QU_tGaEFAaSLAe9!mYBM>Ha zxCK@o*N=Ywr*>cX{I&1?$o>0oA0E)^o%>-_UOqA0y*EGVE$_nexNNrU*u3-L?47eR z>a>~0ZJvf42wjYM(hyh`Bv>w%(^SWCDlh@}5{3Dqj0|${1Z^xxg}0s^L;LV+0y|ur z`iaKyBZdo@fdF(6CrToddo^o%y*pKNgH@QJZ5~K~38t_Sltsdws~`9`e(1*ceSiP_ z2fHiBG@s{e=R4i*#?5ZpO_Mok@sew^-~d)zFT9!MYPC)~bDgZ_Y09NcyVY3okjto= zcf)4AUV~hiYOQ8A4%^*k&}!3eVCGWtGz=mfQm563sg~M$QNYX+ID}dX5ycRQgPF51 zoFouhf3LZh%z#x>byHV&xP?IKj;1dYu|Um5#GHbVd#k8scalh8mk2O(5D6)!F7=6G zKVK|TnlEOHF7#dMqXdu$v4jvsLW+$N!9kcg#Mp%{NQfZ{vxw{u|2D=t5nsm1c>fZ- zEO=;}d{;NM_jZ@Y)cy$j=qxA&b89D=n^Id+!&zjSruEqwl#>9~6n{5(h8su#hDSJo zD&VjQUf~KCWYR1eZukAB$Qr6v=iqU#wmUc84B#*~7m&FhAS4JO8c@{V{QAS!zy6Qy zZ~x<~cjoW^;Ir@h$j6_4@xynXedV#IpI_d%8fOa=C-s`M7Y%(^%XIzf&A{9&op91d z@(1AB&UJ2#W4!mQy%d7>2mZHJBKONhgXKED=nWpeem7SyZ~Mm3FJ1La>)+^#_-GU-22K;fBNoN zZanro{>cCJ*^hqshhO~HH=E7FZ{2_W%isL^*MI8D(Us?3dj8gvw_@x(a8rVa#Gtg- zs01MQ>S*`411@AF)LIl^v?qKZu!(EpT79ZKMxPS*T^Q$SHZPmiY&M@Z>x1R|rs`(B z-fV}HFMZ|Y^{??_wzzuz>T^#VKXGS%>sEiT42xd+6#HZ}ucc_wS|=SFX7R$R5i=o3 z3e16{NMMOEbs=k|ZWV3T7>1VU!t8#QLT)CO9Kfy;wH0wwLBpP{buRfN279C#qS5T$4 zX{pxe)+E#tOI|C88RG6_vQLeWb92@2zYUBW+J*}U(LUPZaA3b`B1+zZab3v6@cO^_ zTK(LYpZXo&_uR*S@A~Ke_JQqIZ=F5=uYDBH-udd!{*%Sc2~Q zAAaqP@$SXhTlZyBp?(;~)%n(3XCaSCM*Q{AjLrIc|P@;H{7$6>0fS}jN|!(`b_Gn^|I z7;zvolfY(9uo#3zByb!5s@1?yGZI!+;-=0znL^vDsInvgyWP&ceLM89LUT=V3-pjL~)RGDMX1Wu*AMw3a1cLj9rY(9jDmE*mp6dFzfpn$G!_O z#=tSf7-EVkC6Sht5)L9HAThw))S=pTD#FXYYLR*M9DchgWA$J$L8DkAD1>kACc_4}9#gr=K}GJVC)0qm%1DFa?s`50KfUaYWDzOdubj3 zlM%d>NAKQ0d*}YyTW@^hxi{$IoBjFS+o6B__D!6epQ+fDZavli&(}V6E5@yA35kZ` zSnD#M`X>JWJ~e#$Y*(MW&VT31AAa_a&u%_(5Du1y%jaKxVH$?BchBE??Hk|x%onab zc6{TRJI9aRlsIE)a5i-#t9nUa0J*ugsWXr{xq}U*svD6j0VEU{iMfw7Pox@3wLIBo zDBHvCVwG11Hcr!e>yG(ixhxrh*5|9$SKl~)?aOfy(~WD_pMPfY*yD${@64}W?+%x- zi(MZ<`oxP_ns-<%+~B1)2f7=1DXO(Ig%zj< zy~19hx#nS;OH5J1j1FdebdA}fNT+_(k~}1kj;Imlr8-F2s=B$jd+X*lBXuLRUPCv% zjCD6M-aY}?xKVBA2DirHXft&CmO;z4nGyC`q|4*o zYIXl#oc@c?eeuTaCx76#-ua$S96j~a;>PvWI6Qc8BHSzXjhSAxMKqeSNg#7$=Jsq4 zOzIFJ^=3y#;K;&_$O{L|!{LEjn_`Wbti6&6wJ1@8QEP=+wUX;FO}ou@bFmwSakts* zw!5he<8C)j>tQ#Vmpn~*Dy8I_3qZob-Sd<+vZ?{lX73`@DN6{dwY~9DH=oa!K;v$B zi0N`FZm+>`#)X0dKi9-kk53y6H z5IbR^5MoLprqp#YbRh z*FXF3|2zMu&6mGw4;J0+Cm%n$c_qXdgx%Cty+wZ->BpVSyePav4ca{I&2R~@ z6Jsw0_+{mbs^@Ck$u?s-Tj}|Bx86t>Zh- zE+4ydc>88|btwlu1yVJr+N9%Vja+h`P|bC(sx`(IhafSf!vpDNA;wy4A3-o>hN;48 z$&+g(RUu<_)QQRnS7YNYQUpS90SPzPl#{p-oXLresS-on4C-~tP4;y-3GaOu*lRjX z&}k1mHwC;Y=(wbQXi;WdvZb8dpz5x~?CL^POGRU>vH-%&U=Z)?(pqr@D!7waR;yq6 zz6+B)OWo2txvu7^7p+e@Gt+pUnIry%9VFtduLpih%jD^ z<7V2dvcVHrAP$XSg<+p2BL)acpwKd6L;`lN0(MZvrL_cP-~K$fVIRqL5Wy7K!rHx3 z!2lINNTioj=XGQ}cJt`FKJeuC{MN@m{Go9C*!gaEy4q9=0K;s?A)5vwcNZo%Zw*;+ zXCl?wKvLBd1~nr(=M&9g(~d^w~sn;8kaLqte8NpOcW?i*2CsspZS zRkfBfjd_~#ZZmAw+bNfEn6~RlwUnt$Q?5ScJmjhDwxfBScH5eVaU4~_BxRb^vyR#M zJ=NI>=1@4&e9_J3%cH|-9L+0{*F2#m0|-RWVjS4iN!ZPZCB;-9fn1XgKmJMex0ZQBq zZ5;rhIos;qy#RwJ5CLtMRrmLY6`&U!9WCa!AAjN1kAC&tdqb`t|Nig)fgk?epZ=9! z|LPY$b@!WZq!cL5L=JxN_x#S^|A+rjzeoga(9rh%yMO86a_Ya7dVUk{!GH4w;J|)h z^8JUVNrRV|38Lx`zWMO4{}+Gxm;TvLoIH3ak-q=MkN?sC{r_WS5B|zu{ISn|@hga7 zD0y<85qEt!4xxil1C;BsV53?)`#T*W{Xz1NHEt%kjXVqm%&3+7v`q>VXnz6h#Xh`OeCAE*+)w%KD!_YE zN?Q>)!`RJK_tSv6s+3?|O{q4?p>H_dfTfw?F&UQc9KC zY8b8@E;cr#C>@7+<*627yM6si2;6n);r+Mgx0a*1r+EA0&m8`??}|KxbX1GaXUn>9 z47<3H^*7(W`>S8P{^YaUlT+Pk-C8-@l&x+aTs(O5RI_HDxdF$k~NXInowKS?j@l15#)B~x~h5eVU}W5E3>rrA}zI4ixOCx zd_hgJH;07K9%|JacG8)}t-baTW0W8Og%AkD!q#j$Oto6u(C$_KQmnp*pQrZM3nF)^ zR@Dt&bG2zGc`B=up_XZPKJ7*uwq@8}^z3ZCo!|usZQ6JrSunpxgzX;1QI7rYW@=Du+iP!keF= zf(qP0J&-^K-$E$HjdSk8tKa#tfAin@KmO{ke&!$i)gNCR9lZ4Z50+vVXPY}uKk;w> z`+w}^_rJjX{!o7}4b~RDbUFNUt8H!P*5zy9J$dNcmQureI|5uP!3}pn5ZmGxfBqN# zt3UN)U-;~&Lx{isM}GGo`nUeqUz0nab)!}>8^Oqq!n(=>TVo7MmH}Ya1b*%i8;tfRlOQ1 zytqy#AZl3>b63?`p>>ZxH3K<0+NIdyCRPipwO;uB6P-OoaS9L?ax!H%GUpiFm7t{* zkf5kf*|WlELNzF}K?{-5cI4`|UhCQU_|BV)MSSDaUp~2ees7gMb%*mb`ULajoX6UQ zAVjz4hs2awSG#R3WqB}92VEZ#j?*YQ?WWmsK0CUy*{+te9(l@}v4dpTc(I*#vtd2< zp|4qKBDm!3*l{4h%%+^RXeErMS*yVVvzao;VFBX5$ve#;0D-B(qXZ)eP^+@j=2ZZN28cm+pbg_pbexCdk3Rd#5C6-zzvBn^@W}z#?S|c08xbG| zF*uV)P*vS8@=VEzgvhB@bw{fjGk2}#s^$=8bJ~|uw1b-2&6!&-DZtJo4HdWl1)R*B zna!QRwY4BQtX5DPuEE01Iun9`*<5Q?CyEj;4T1;}EPbZ_NrY?1J7e>^&D*??ZM9kkd)u;xn zR_CU5JK4PJ#`VrFiTY8vxVz~D;>!hU-K!)7m|Uiq+Vng@Gtm5J|) zW#2Eu7$ldDegC#(=?N`LVl!}tU8ht6UpqQ@V7hwn&ZmF*mwxAu{I2oQw~h|3|Diwm zN1u4?aV~MZz#DIW`Sju0;^tB8V<3lDHzN>H>&4pN2evPF$G+lf|0bZzBX|EB+Mg8I zyK&nFu?>@CH7?Wl|E?c=`p#2-?mzl}-MjlGmhp|Rede*}U;3ec^A8^$-}=HAUt4YN z-FtX;wi(7+89W2eyMFoD(feNfz<2+?A6+btu0DRNKbW=Uyqhvht2S_Vb87-K0bv#o zy5pl8SF}8{d$hi|`|#a2zeT182aBVtS7X-)A!mWrss-8HN!kSidLt)8uPwYAM;+cBM=&yL2$@v2zetuHngtBYUx^7^yi zTwFc6{?hYDFFtql%yS2Kp6rg7q3>|mGgB?q%;!@ccUv2Wx*N)FtF@Bu@|5#toEz+wo|@)R(Cn5vdAdBDFeaE zX4Nl}?RH%s&%XJV?KU)J?sReE=qRLjUw`NNl|y*#LLhfkN6jG!K##I!uSy^!iIP2W zj$<}imG#9&i)vR-{LFJ%1MEKQC9`Z++rxv|b{YsWQqH*u1#=G|6P~3Gj792I5WB9D zRd_A78%MaCYE?B@g(?hyfMQF)x=Y(QfhlqfY(*o4fi8AdY9Nj==BmMzh=ft$y;6ZX zfK{njPn!%9WG`u&EwNo2|-nf z!Qhlovu5>LsaDO?Fb!qBo7NXaZQ8C@<2Y=#tKFE#ycvg~+F0^5=0Qu9m>h1-)s$J# zMHMcJ*e?)e7*BUY?dM#|q*c55Jj~~%WZ_`Z>E!g(AOL7D_JLqs^uO_!{_5ZSfB*TE z=@7Vhcvfn5cqAfWagv=?3DfVOqbd-3yAmQKhi#9 z;<~3$?P*y0ZIcc)b;#fWP#{8Psk5W`?Ap~|{@OPcP>K zdBry#JjkP8f9k^@`PheEdilA-;|>Y#U^8_8o`XzELUuctw*~U%6ePY z+p^i#;X*Gia^8&R7e#eiU1X-wOLufMyZ!i$S3j_P{<)*4o=8VaT0|mfb0iehaVVQ@ z%@cCYyKHU}LhNVV;$U`sz$v)24J|d-rPs!$&R!wR4MX)-z(B(0rlan+!PIteN@no=tCh&WoY$M-!9~sE^4460 z*WY@$d-J@T_o?qjs?{fqlT&3f+ikm^y=WfC&FLBS77q@q(p%qnaCO!xCUdI_8x4wC zNamCsLp5##c^(hu3ljHgTuU%&*Ez9^JDHLr5|O!h%bt>J^RcitYJt0(_8}J4TH5nB;4qTqfe!27m7$U*ltTw++11u7=I9;;PAuK|~6^iOp&Tf?YzI9#* zvsZ0Ov4I54?h4zhQ||5NDQexK)7(P5oK0)dz~pM|$VFYEDdzJCb9ZtWL^!gVha?0* zXf~Ut5K|0HlA;8W#Vk~HHyv}$wQ`iI4)$rtM%Zm9H_Fp^u^Ef!_4(kiJnH&Sw$oJU(dmi0v_=7Sch|ragZ;%n{}=wj-}vj(qa9n3 zY|zR8lUEo!L$yisb7H&HOd7E^08JANmp|!11=W3>gsRNbqoZZ7{s?mep2q*8x4@4>dbe*3AH z-}iyv@`>;HuBTpo-_57499)ed!o9_~_ay~;FYNwJMQAwab_(qWalH4>B_OD+pUC#} zV>?C-24;3OX0N%?x&X5>W+o7Ej0`XZMzd|wUXnIz50QpIy!?7GSu5XdO8%a00p`ot zbF-i*rrqxJt$P>u@8#85AJf6X;bML$F)1supvCM)6&NSoO*CrRtn2!so}HKT(|UR~?8e>6MXojun_7LS6Z*MdnO}eZD_356 z;o#XjN4IaLqXUGv-$kg^vDkJ~c2gdAR*F><4yo_cybp)Vut=bA8T`A{=3RZNuwqk2 zRm(*+n--Xg7&A+}#0qYA)7jRx0|-IHrwSr(RxWeZ3OJdt2*bQ+vEl};>c#f97dXLK z8=$!Y>Tc3W*^ME^G7Nbv^P9^gi}AsBS`7#DZnIm}VN3_?Hs*3E>;r}Z!uRSgs(g=%nwJSim+k(wt=Qw;27Vg+DgQmZow3D~V?b~Vo$ znG=brxp`vF)fr|Wj#NrvI18(pgD0y3X@aW31x!E^a`nIh7BHz=+M>(aG@FrU}Ws$@O5dTTg)Fm19bY|r(~cF6$G@DVz}Dp&!44K@K8Jfe=Uc(}ar zTR;BPkNzk>|B>7uj9PNeUX2?JO9<6eO$hr&S2K63MMXNzN1IYZ&ctdUqH4yV?Xa75y@WJXt))s3ahO(9P>h`f7nToA20A+}%r+z*8*|9ElDgqt&?gxdMCZPFsw@ z-#a0(cO|!tQ=6EXzzzYmSXlxRfgAz~@UmYiBY?3FACm4}Xk_pVD0oCUrsQ6(FAsK6 z1};DJ2mj!2`_UhL@ug?p`sO=7{^LL1Q(QfI=j)$-{d{vVFZ$MY-AONLez;s7-@Nnk zN5AU>-~I6yKJwu^&px%hwv@;qptUr-w;OEN!rlk5e{=pXtwHT!dg)=>CuSD5S`)zMaZ$tD9|JZ&9*Hiix!-2?OTDXv**Y-WF8XT4C<(0&o+B*J7r5+(?wb zT%5vePSbAM6$@A#Etn2Lec&BnkG}EsevaMA*|a*3R@d*`J-C+2_U!cG!>3<*l0@!) z>Z{9x>$8I-K^+wWRP)I}vFysYDgB~za?K<1NJUYL8@YPT1SL080B(`My8l42HFpvP z;@WBs-Kp&_U28Q38L^pFhXPEjjY(DQhjc z)GE%^DohDx>g`+6%!WMmv1f!fNC*pjpXG2SGi@v(77om2K8+K|ghkEa%^jq0s5zL} z#R1hClWezxh=@ovvP<@i)=ESnM256_M{fdxX^_B7?o`ywodN|8)*kDyk`-o7B&lOC z5ke3xZeGV+IFhQ1h!eSz1kO2&P}^iHkqA_sT5YQlF{!zPP|aJ7nyPZUf_M)ljFLj? zBoMcZ2tkO9C~*j>U(9XF$%4^K~3*;4&5!6W# z+fyeE5Dvwv@1q0cErt&>&;S=8f@Y8ae?Wl0J2#HrT(7o~4<3K&$NsCo@Wiu^|Brv? z?|$~vpZQhHaM!cXKi~qJ0HrD-vjl`}Y7V zm*UdCn*u~`ibkF_Yb&hw#Av_B-iOuxjrRm8dhc#(_lbK8(J;Zm&Z?N!yNkQ`FWz}~ zxH$7V&EtG|a5N7+wJHNsuS(jmCHtKD-VfM1yd)4u2$QfeC;$Nn1OYKMsT9@V&So@L z+Ya{VVtTYHXBYMCbbJ5q_VoPp^kH|{S5Ha>M~aNuAV)f9w_e{`31o|=svZ`<6Zib;l}I39)N5vqzR zA&{CDTXrH60Hxfjx;evA(;;WGOk^NqVJ;kc+qPS~?bb%Sx#3N&9rb8L1d0Mcb7$I> z_t3K&fgQ>qcQdD~L}ZqlcCOa&_N-GQSITX(o3p2t$v7;nh2=pVZ9dxyP|w}1m(OwA z1?G_EMA=iDr;XI4JM*0@TLgj&CDkqiBuWw)q`?j3A(FT=Q;Z&jSpbJAG=q#wgIn4n z))J)U;*>L32nJ*qA!H3gtPVk%cf^v+L}HUVB32Lq+1-%K!Z3*4J@k6VE;;K9Ic2HFe;eJAAb7$gELa52xoU^I~5#pqlQ|IQ) z{mf3pX4;c&#O(crPk}Z!ck3#MNbYw#yi3YN>MC6FeJQ6@+B}qNhV@-_Qc^m{%m}@GKPR`6?&VfUyLf3XqCPB=|+_NT*!AynO!3ENjAlX5} zrly`nddr8lOF^iF-53Dk7`g2u3K-d|YCs0vYpQx8RS4c6Z62biDzOBji{~!C_R6_C zoA++t+`w>faP5E)IaBVsW;)&3+?b^VCc#q=PA_|gfWR6I$w&RGuqs}xBc~d8!x=_(y#t%H5eX! z`1o7j^{%&m@EgxO^}fTWj#f)!u8El(#lmdSU>7L8ypU4k_&-01%i(M&-hpLB)OyJ% znHw+IuU+4|a{2o8>tn=0y;RqA9f|rxHgYvb>3R0|{K3u?0>B7?CAbho5!uC= zU@;(yyF0TBV-$H@$=Y~)`gq>n^PR5R*qq$m99_BAZQklOwmdb19EMG5FI=46xwZ4` zSL&n32WK8#IsN$f*s*$jIgBI5LW3|G*Ll{`=5Ctq<-MKUOv6qsOXIk_M1zWi7ef-% z)E%Cci71d!;2`A68So5E=H7W`j$x^;#*^WZW8KzvdwX+s^JaT@i<8BQtEEv0WQ?SW zY!n2529SdzkwXOr*SQT3tmK*8c=fefy%cU#L!;TspYc+I0KI?UjR* zZf_PEovf~e+~lqqATp5&`MeVu9erdaH7%>NrJTArj6O}`mb=tU=Q)GSgQ82C%%<6` zbQ(kicFhFpQl`M>&z;Uv%6v6_{)uou;1Y@W?_aWV+v}f zS)KaizCzeCP|{M|yd({jtTGVBqpEE?As{>0V-?EmuGwP<#MB?LYB^`P7F|fF*^GPd z`IK2Ah@6X57(u8Su!NwMl5q6Ub`48#f`F6hpsJaf1XUHRwBX`|D`6oa5sZabIhiX98=|>-0U0xo=iX$WE?ai$_w{D+({sp*M(~b=zU<7XA z$t|HDnY9B=^n?K!IF!iOLS z<{*huQJ5#qknE-;)zT;~t(j80$MET3|VE z0(J`7O|oZJdI z;;!yS3$upeUJ!y1gi_Mow!3w2cIkTFxo5lE5+w{7C5E<&M(*q-W*`JMbx6?ZOjDbt z4T;r<4s|zfM$7eE9!}S8z5Z)|aN_aPvJ_@FuhD#OFsN;JDh6%4c5^$;n%ao6ix(@L zY!GI+i@Ot?oGJ-KPV6aXlTeC%HaAd1VjjSxrVxQt5=4cm4ptF`z)?aStKqO-Uk<}1 z9t@DdM5O8&)^y&@C?%WE$%{o`wyrfx+LhEuGg8fqTwgU+^_)_tmNaX!E?G9$ytu(l zK(a7stQH**_bX%6K{LDs1f(!QbQDg)i-SOhGpJh-0}a?Adq`H%Q{VSZYj1zNxp!+? z9UQ&m1NqR&xg(i*MsejUEPj{GGzShPC&sda5ktj9#elHYPB@hC(=y+}Jp?ZH%w-wU z7H3tjWH-xj?Yg#0qhX!1xseEEHL$CD43)c!1l442sZGVCt*Rq2bzOFcYbN5rVs2nk zcP+xx5STb=iTdEKYl<3yh`Q7b2jjA6Wae%v5{l1HcC&0wEJ8VD62T%%kph{!tC5og zNhuL?le#JfkeF$#BD1(#)1^UGH?1u%kD$IcZPpL2?(MX6{pKxqAwd*2vk>^z7hnF{ zf9rp`^y;~#(dgmRXO`DjkUQ&|fZ|zU^Lf+kv{$ZO-n?@sbUGr05}J{7Lh(!N0UzoO z$`%JB03a+hKLJ#MB2WV(P)!nvBTE%{RMkz}vMDn&OTnGZDXS&bB2byPDOR#IZz#%6 zO1sD_%WHSKnTGh^{`LR%(Wl=1$)En2*PefN>*|e=6hJ^8g3HobnM+zC79j#94PL*+8O_mQGKmG7~-}~76pE>>RcOH7^_+Sz^f(UMa>-`$gUsLy$LWO2TFL+v% zfkG_Uz8&d85k-AceL4LXmBf0scfmd1pECA27VHnMi+?AWTgf~kg7(fg1*-xPGm#s4 zkmFGLTnKH5EWKgORVjwYc2V6phrkn5X+;FsRDjg1934C9P&EPv zgvvw3b@ahN>VXU@8V^u0hngcJFa;qg)tA76$PjoCmlCmFl-ks<1j1pCtYk*&$jLO5 zDRL%LQ!Np>uI`#Gt9G4sozCXkw%u$`n|a=w<=I^4Go8MW{>>k)j!v$8`d82X_Gj1L z{>12+59{$K_F|o~5C(Hgl%rLN*$RO&HI-C@-io|T@YPsy6hZr?0 z31xT9$=pMX62u%17pM8Wfk3rn*=t=5Obtxt=2;zJ31xLvRS67-lQTIb0^qPn?x)1?rj8`XmfM)R&+SzT%NW_0!DEiYt{dyJ9TKJl}^_)mZ0 z$0r<4o;o?G>dmbkOS$c4*37%q4MG?WtI2rT+x*;1FR*$I9(D+x+%-X)o=57LVP8PX zi$4^5#2*n9fdC(ZBQStOPynJxCDL`N(8k3?XtBdf%wUNSq-BykKB<#a zj`%_em!h)&D2ri(dWBM7Y)V{kLGGQb>^1eO-r^C+MNH&<&qG9DFi0S|({^KbYtObf9H*z=&6*_;h2vI;iCkC?&DhPvtNQgpJKnPUDf(SAQ z>PUuJNl_GX^Wz|>~rN>o7O$cGLMj~uB_og5rH8J3r^Gz3R?Lo-i1 zdp4Ug0Aj9&Tvs%xWqFCJh``_?V0H6i*Q0P>I3^jD0x3ccgNG1>h_g|XY-fu3EN$)5 ztg*eBwv9{HuGMa4Z3j0Ff|SU;+n#GqgIF;+S!d+pRlPF7JTYLY$U>YlLKG4)sYn9H z$|a(%AZ$@6MuLDtpYsq19KZ}>1Yr>h>|A`?7(~S6fRS6V{w9Eoif2xdMiCqLBOu88 z^%)Uj1{Y-)_i&;2-wrRK%}!+tyIdS4IJd+FJFz=7!wdh=?@zc{5fgittnD;)Ztm(* zuEQ!Q6UR!sj?+BvY)>z|vHj&|FMj^H-JLrfJODK+0Rb{aQs_e;c=yFSv$dz+`pCC@ z;Ltmtl%ZVt&0qS$PyNfPs&VYZ@FU+ediyi9eyl^(gb?Dq>ENo_dkDzf%?Th80<3np+hs zF4TI;=2jFHF~m+2GnIt_Vv2Po9L!C#iO_5|6A@>In}b4(#q_7lU+7^nv7&o5j}q0K zm|TmyAOoac00g9*ojB*DJ^0U>wHj2aW@c^EK?DE=VdA!JC(EnbTRU|1)-CJ6fmwYp zuG{(E-~HQv|1-b-t8Y2|P+fCMy1TcxGn;x&HOtZ<3h{VY?e6S8c>0lRmo8nta=FSh zR2q|7$}MsO*Pi*=_ZjccFI5Mkwfi)GL;2peMEFToR#Nuy~0V6?RH-ZqHGEp*g zE)b%@VCl%=N8b95Gw*)yTb_RBp+_EEIeajTDh^~tJlX%x`wqQ7t9yNEA2tFkxLtSi zJ_C3k(Lv8V-=Cfq1;7^{FA76%`i+HsY57;`1$2JD@SqG%{ARR>%hE%pG;eoqY;0V- zoNnF-dph6{i6b*S6ETb&=9b*cN#ZbvmSQUX*k2Y&0#k{V7MKtbQ4kCwg#ZwVfXRz3 zySilZronE?+gth8UEkSEckhU1Pg99d0t;g>kd?Ktez-dGFpeF;%EXo`mI@A?&24wC zP2+8L;ZO(EmBd=c!??V}!>D~}b~go684Jt}3$~kyHD@ySWXL7}G@L88{ZntmT-hS!D zjpv@f`SOdqJGT;Mg(Y5FIdtg#?~jkZc-YCUA=M3vOC;TURj;ox_<3% z{@?$fH_pFt=Aj37_x5hwxP9~9T}95h(Y7VBVW^i!qqWs#OZxDG55N5N7rNr=?nOcS1%QEKHD2sT0uVs~UV#`M7dr&#Pk#W&z4~*%jL?4fD&>iAopdf(eV@WBV4e0u5dfqJEufy6 zoc7nTfc=|(p(a`k556d_h4{K(Aj^EPm`AvemoIb0{@74@hXq7e03;@db~?LrXY;j7 z>Dq=&yP-r_BH|KB=D7v>CK}nG#e0tV%@QQ;vydP}F%(4Z1W`yT3IZai znX0te_h!DchwW|b?P0#v?(PIrYZDO}3yp_)bwbCF#xp0v(POl_#)FYVEUV3$w6~jQ zGo(bS%*<5`gPMnts!FP$%oHPpyqJc$gUPz&=H$fg)MlS&%8HaJJ0%r$^|6pl}Gp&g{l;DvksoK@4TTs_acsaL;8~!~wVv z+#ysf`dOHT7l{~vnQl0F7)0w*o;J?cdeCn11>nvwe;L}w~Ts=+iHh=i!z7a~H0lyf_qRfEd2 zQy?;rL7KMdRjB67+;v$(NUEwj9t{DGl@xrzdoYT#8h~t?vr^zHRxwunQi7vt=T1_V z=E$8zdet96-OP+jkDs$yPyf4l3{h3pjJSkUdyEl|?PhJm z97aoH5@z@9ot?>K<=)Peu3fvCoh+w{!(bG?^5w7o*gyJln|I?noPGJ+h3nU>Kv*a% zbpTKc9bB1=>cP+P=y4p`6r(65z$2uq?_zT&M~K5R|C0 z!W;K^x-Cw(#dUYTkPsl?3@;*@A|xe1;A2PyPDm9nDz0-yP6LW%`%Frl85nHl;wJY_woC0f%oXpnQ96Lf(%~vl6;g zypat`62Q)(SAG!(Dq=?Sv4ZMXw@| zk{8SF1eRV!CIy*cD<1v@#AWX5hxh9U=t=!Pb#`BFv0vtm%V7p0FV@8bIGD(s#X~6T ze*Lar@vZJJ|NitTSDJ(SO>-vi*RhL#Hn}Sn+qC3_5>twzTIQbkD;FYzZ2t~s2YIpU zBw;b{j4XSbuy?Q7yk*nP`Gwct_`T2Zh&iPUCn5%14OLyWUCJrPSerVLkH({< znz}Td&8k=}O~xEVbKczASzAA_Go1x@L$Tfp*qhI;T)Q3uE5}z}J$L^4C50|p%5q#0 zh+4B-J9|rml#-u0_277QZN9r%WvnnqlBQg#_Yd*Kp}xQ}xbNb76rTE~8;eb`qW4Gn zg5p3Vc=I13fD7gW(#3qh3j;?16%*t#H_zb!iad$I0W2_9lL7)jBftss zFG4C(Cg#3FM?c;B{ohxEjCZcsxAWciLWS~7kY z$__6sLnX->p54id?ixdqken?Bnv6p=z<9~qxy@#r zJML(ZJhkfc?R;}fUp=3eSLwt9@xfDaB$cf5ZE(U-E z#$1^ytHBrXH2}n*DrG}jC<+35Njs6nkxv9JrpjrS^N7Ort-a}&UYcLNSby_IeDu9d80@ybG@KAo6{9;0&ccA8Ecq5UQ7+!m;;=bK z$0BXJ)TY3dL{g@}fkV!T%$dAxx>&)*i&Lqp8v4BF;ya#GHp_72l#&{2nRy(HAe_xo zaaU4v(>7(1P@a*6b2e2oG6pAA?kxqVL{P8;ByKJw04f%R=bQ@C$_t){iPg1ib^=CD z!t5+jLh+Ib!e(v-#WiR$;lO?9n@4!qSC?eXw{LIUx^tI^E?>WV{>r)hrq!kUCy&E{ zF57I@qRUrq4VOogcDmaYksR6*-(FyPyFW3OYnB#!CHJABa#3@SzZZOezXo720o-qK z%EFcOuSYsK_vRB;IEHd$^P({_AfT8f^aVrV2CKj&*g60NkB}LNyl@g8$>CI7>MBs> z9^ht~fFY3KDYL;1+`<7$P6?>o)w-Qs-QC@L;nuBJ>)#9G<*KgNP8~gP>cK;2-typ6 zPal5#vB}}}Fz$oz;SMjay<7}E09)wQ{wDPOB;$ou(Y{V}Q9MfI1p4N$Px&d>Y}q|F zb1&c^6P4g|E?!u@*5=NK6bG8^$ zOH7ry?`7ZJXm8%jFTFCGEQj^OeBxLn?)i0FAMXkb8)cYi)PPD&vLJFdiN&xCDn+oqQ4qF5!F8nZ2;Wg*IRzCNKJt-d}_PK-j78--%1*=+F0F&sJj4h}_8;C}F&` z=vex-#NxnH_6=AfyE9-V+nQp9$UcOO)>e)^67k;kcYS1X<`ZxH;wR(Yb_~s4cCOe0KF1q_afUkMBxQ*AfvaLCCxSI}D_h>KL@k9r}dqAkrl>Vu$CHiP&IZ z?=-_RtZda$mpfvrs+_VGE+vXv?rS`OiYUm*g##=X6ez*7nv|_TQ#a3~U?wKDti=_p z4{f!qW~vU)X5wa!ViQat%h@yq0Nz_g`OVvRHRY|Xo!8F4p5gnQCxH7~Po;ZwPmU(r z-R-TlV<#yNx@P7fmQ)@`ab@q1D7$}l7x|52VfZ8WJNjOkvpCq?d*7&jJ|OJB0xTYr zu&@a#bYue1vkN330i}LH5drQ%#A2Bio_d7J6QC%oj|{jJiGl;*K@WQx0d;S5QGSjx zzBtG;P#`)Z7PsBj+^m5O9Nlc*ZEkw3uV21!mA^Dz9-TjVVsiB4;kQ1q{+7p&JpRPW ziKBzHm1MgGBZM4FP0K;^rh|K#;PDMDQ{hF|4faF?#bu z51|yj9(m;8<((;#mD0);CnIt&_t#y~4zN1Fz32=5hy}0<3qjS2iq5Rdvz*liF${*i z;gy3p5Zt+8h=)ilB;+W*BL)e@4xX4OvJ;`q!+qwlpk$&85ZI~MMlB?*L|`IeccSe3 zQnaF6>J7k1%D@CCD*-$dIFe^8rfo*A&(cFsDzr>g|vr`BWj#yP- zWb?q1DOp(-5oVSUc(ym6ERR{ptXHOpu%@o-+A3CEPHq&KQkx=+svB%N?8sDb^!)Qb{Zs$!=B+!W zO%9MmE^7_fSQ1$m!Yh6rXDfdwAm5gdWCy%K>8t5BE#BXI0( zsvK~J1^~UeX{jHGK#5GMK|~&lORYng%*jF70bm9wQD%x>Tq}6fGM9j)g;Hiv*Y(P#OPnDVJ`F%QVavJU-VbTT;F3b1L1QDH*uf4w1u&*xAjokAXYEiHTT*D~MPK2n1m- zouoQhMmFSJ7R{W=6$q>#b3tThvQfm4X?e(#u?)vF8e&*eteA;9r7nBZ(5&^WlpR^U z?a0g(mJ+Fx7wIICnzb!0ge|>gbnyr(Z*Wm?F_j+M(X&!S{kZPs0Cg*2WiIU08%#Sn zK@1FDtoW9Tki`IECng6&q@U6HewLX4DIE+kGaJEOfV>ikdE<>McXu{ZieRfNUVq@| z^2##216YlUUoC}-%JOy(<-0RE3z$e8fx+y=J+V;^cMc#XAr~sW1Bk!^VTe!^7jc%p ze=RtCxvrQ|5Futr`4ks5jWe+q>1FX}HMpl~<3u8Hlyv&J7hn1Z|8Vc6XT6$F+a|%7 zXwPX1yA9eK)Zs(#oIn1a*RNflEUj?GK?Z6m#wyk|5hYbFJ}pHV#U)~?SV1#0dx?Mn z2?Zi2k)qov6OAxL3T%sF(@k!5jDxCbQZi>DVnPUF3KnWoy9i!#3Bep$GZAOYAkXc-PbECMWCA;Ztyr|1v9oXpR2wN6nscaxSyUBXptOGf-yLqI=9L7J z053Bqdp099(|L?>I36+yxu>oTA*7V(;>C-a(5CtBeD=aCuYda2e)X3>@$&!#Y=xWZ z*(Y9{zP++;u~MXyYIuBYz1!IynJ$67OFN$DJ<+%yTKjP;JZ-R?bqRWDp`FI=Uji{&6souHj2m%v`DMDBvWIdTd3}iDiA*R42(KVCxLGLhNu_I09MQpR!`Rg%x^loyc*IhKd zy^_))?2I1DBX%N}B7|iW5Q!ret|`>FNie2<|Bnbkh)O?BD4>3^d@lPI4gf(SvU(Mp zwc+IpXJ5H?{qc`}<9O-#Xb@)?&fk6U*<(jf9X@`H)Y-EeGL%S|QDAW=E>}U{QwTu_ z!X)KzXRsiMlOQlLLAYNhS$qm22myuRCx!^3P$>^UKOzW&8Ii$^K)rsy?>vg@7Wa7! zIk$T=2>~p_6zREFp8ebZ?~Rwgm~C%vO$DCAcYxc_ZTMZTpZY)lXny4F|Nh_qT0I<( z2P29y9*@+_gke@vd>}A~lZ%AHQgaNdN)p^lsghprB;-uy#Vg(2Emo0A5Ti&40=ST{ zaCQ@7FA1aWOyVVT%}ipj@=-|;c9Re!ghe(%aMzTwxk(jWjf$`~FnY|;UA;%{%!Q?Z z*dYo80Glg=9c~#RMDqfr!3}|kNQ(KDGX;^h>r$6vh#`tF1%%>bVXk!U{5kD>x1Dcn z-2M7Xul&@%{K=P|dx41-&VW6{fcp()QK9r*56mLyCA9N33b|<;sWf(2dfs#2N~qmpk7^Fd+6xuTOL_^{E35)zisXG>B*^6VQFA$niDz7`Vt8kWd5ch z0JuL5cu)M%zT9m8H;9F7uO~!$&ZWRlVCGPARMk<;sffubkn>tjXN@{TRq)ck6R7ibSM9u!u}N^#TGJVap!`h76TDD2*Tb&n{Lb`a=%QMMJ)zc;7&|N zP9%;3&2ym)985wM`$r~26z*JnRP9*v`>#Cv$N%U58wR84jF%@PZ|Ae~7hd@3pLyuu z(F4bh3#c|EH7632OH0Sewwxc?LY?J zfN#U@lI$Ki_`{$0#chOt{?Go!K&ojobz>RC&Mb9J6oC^9WgrwktE^S5NCeayod|&_ zurQbOc@cFh3RrTZ;`d)nyBN$=Wd7#l&{CB_!T=E$!m%jy&D7M0rAsMPkwbJR0!5LW zvlGf52swmBH3J}KFP5~{pS{#y-a^zb-cyjWsz)fQ?7%G9v;=FjK|&M}VplkfI7sd? z6G99H!W4!`%$n)!t7kRi-rnxs&i23m)NlTqpZXWmt=)2mc)w`!W(pRI9uxiE2VB@! z21`eV<950|G#td=AfVs0asXgMug6`8x`;S914KRMUR)`;p9uO37VHHm?Y|5z3@Nz0s~QWr z#HQxguU|X$r@raTH+*P&Z$6#3<2nq6Ly6dX?dBi+{lE9=LjYA>-+42S_Dr9OG+@e?pphzJo(efu8+LLe3wfP;%Li350=6T4~nZrlA>9#c8 zn5k*IIdl%_0)s~1O7EjL`%7Wvc*SA}=x?1~$M~ih0(~vnZ)Tkl1T;f7FvG&40F=+l zLMLkA#kAr3549KwApPU>n@WQ1L;teyEGitpn_jBZV*j8lUsVYKl|*t^Kaba;bgeJytJ}13^G_7GYbLJot@3QxAtz|YqxIBHuiLPo2R*I zyOlw`R?EQy~xVKx*a0n6g2?gbp zb|Hc*Q!zFVjtnVmMjHi-8s*s(#Qdf5_nle=8n2 zK*5Ulk7E{=avj+sK90EP2HXL2W``1&wGRTr7fJ&6P?%9>DRZ0?83$q%To2^lkGfJm z4Px)nD0T)38Hj`(W`IfDN$nS3I2#^0{?Y&X`?n19ZqChYZF#bC{N(zH!+Lq^Gk@(b zzwN1KCNjicC!|33l&D1dmPVn-I^iIul6f0KnRQsC_X}KRhX7`XzzHEz zV6Zq7NkkPu42eBME(~G{ftV2jvlXQ*6Oq8p+=yJwiJV!HH)q}{f>zs`cYgD?uKf0I z-MxF>fGso(iBxSkF4y7fjDzob```JwpIKQxdggq(QjshGp8Hi(aXEleK5SzJKW)Xy9*FgZ64F>c=VZ3R)?LTI~2nzR-XS)zJ zx%E0-%Q=9Vgrr!CI=FBhC^Mz-BEqXU96$Hmm*Z>diU0a8@Df!3!3)T~&-O#_cF=d{04>&5e9^rS7jTh?AwnXh){+Pgu z2|D!NR6KY&XZtX2bApqANqdI{TRZ~8*`52Pu<}z2F_-tnj7TkKqdS}1ub#hoWp5`O z8StUQzAmflE34z-f#b)I9yu|pMqm9$|LUX+-umccp4IbQ81AVD37sIlLy!|-RG!Wx zlw=5J=Rm|#){Oh@fwCjU(TiG1#8G;D0S5?3#3h1c-_*#tp#Qyl9p7*Ayco$r%N_#5 zB!-%e^6r!+>wL#9U)y>13uk}lGufM6-_hR4I^14f{=KHVI^<`*`#b-KfAII0hD+Ui zI@{Xp+IgEBqM+92dSrDgJ|@XJ*PPR=(`GNhZ9bct`)uAc^CqQPGjH0IQrAtJslnB~ zOR1T)L}ZF;P?1nd>V>)Ok6Gcu=T{-CtD)bIbai5IBo6N6SqU!8=2leIPDDXe%@rYp za#o4NoO7X3h&iV$A|jMjDToVa?R8^t=Y^O?b1o%SVsM7Kf?(<)NC_H|AZ{QbBs2HW z<<6AY{qEg+H?LnuLAi=zp|m01AC!x#z;Gs_;&f_m44C^or$b9C)^wH|rJmI)SGx7Q zNr*JFTz4t@Bv<|9|WF_gRlV{>Otk|uYT#}-}>~YUw!^uL(7kU;C&x> z@7oWbJifF%3?okFUfV{Kmt(9Wb8m^olQ>*n86Q1<%K;LW{VXOVP(#pLN{V{c=HamU!~gNiX2J(n0k%R1xmGmMgC}DlG#X znG;M(CtgYvI~BUQ$XB@EHgH#VgrI~B&t^^Pq>{Hj@z{f@n|XFJA=$g$H1pf%{_wSH zb(oBXXHGwI>hJ;Iom(>_qO3`~2Ck;%MS~?!U?&bCSZ7WIOWBAca}e`Po(n_J-&no% zRYN3Tv2s2Xo>L`cTH++>cs`i7E^>;|KLv9RY8CB`QO(Z=I#8`pZ@KC`p^FN zW^WcaSXqGWePU3&0!l;@`Ve5CKi8;=mFF}J;rQyoVM>_Ky0%Tw23~UD7CiEOCSd{k z_o5)VPYjgHiWbDBFHY_^pZNVDUg+=!x*&neT*7#hqO`~+7u`KAfEN(FwD}9!PhWuq z-UF)(&|lv4>_w+kUSt4;g%+qhUF;d%ERbc2*>eRg-=&Kh!wdGf0tJ}7sVH(k1h|5# z{=MQEaz(MHmN>eQQbaHHjp~J^M{Jvu!{=nq)!;?dYs_}q{HDx9ap+u#c!zpn8 zOK`wpZbA?iS2J*oVSaD#_dfjxpZkM9+L_JY`u_KR!+-JZC(j%kjiiu;dpYl_C(x`j zH=m4$ld)6)+HCXAh@|%&@!}I8AUL^7%G71-?9OlB+Pd_{jf<~ey>a={#;wbD@4m4! zz3R<&&d8=qRefl*{Fe2@r&re>UOsRvt{sr23DI&6gVuODXYCqo2nw4=W{^54MC7b+ zb9N4ryTlTVh&Xt*0+IkAb0;-|Lyfes-@ZU?u-|f$_ZW~6`2rE*1*Akp_zw^xMk;H$ zAR+Nc{R(HOct*pbkfJ)A`JIowHMh+rlCgLIrq98Euy$j&#mRy|+HN)z?M-_Ve zPhyC{h%6U_g1|1sF@{ksp=OQ@NenS^_lhfMx3}BKWLOV4AfTO7lhHQjY}SI@O~pyg z$vvA1vs4vJj8)|^fZ-ezMnRm63sqadI#qLXQ*+l`W^J+3`xWAk35AL`m^(2g%}y{b zk{>Ar+^wjq3Io8b&Y`%W*5f#eLBu_bEAhSAwdZV#)k zCkZDW{^7s%f0-iBclQ4BU;RtqD$QoO-C z(~=WOoK0PbLU2GrRD%eqDiIqfC3m1d#6czD0)V;`rtE1vCe zVgfUlzyK?xk(xWx@BjYq5Do8cZ2a6W{NgYD{LjKoz@8*%G$cb(6#g3X@}e`V!ZB?2MfGqAM;t9s`p!L01mPRP`<#w zXuspy9|Z!~dl_h1H1vI%LHeVAA2sT0z18!ics=B^= z^X9+(v48&6uYP^$=!4(=1K;<~4?VTKHsat~T0({}JB-|fI01L|^!b~+0z7hj^1!HO zGE3HZ30T+C%5g!!YF`_EPMsL8u2rFqs9@Y|$!s3nvpHM^22<6{#02V6<`9d@ z&KwlQ+~5X+3%RS4xn)pM)WAWc2I3yw(E=0o&N!tHAto2roI`Zx-cCD1>ZtBa zTs&{GD%a6r0`!y?vzbwVD~+Bq0zv2QCh%`x8796-AY+tGhcitC^;hn%QhVPi@!bY+aYTrfb?}I&D(Y zSvyaunYV2-Z<;Q3ZSE|0T{19;Rgp~&cFIR!C+R3XPZ+dsU;8zxq2TD`KD=kF$8$uk&S=`O1K>E*4LgziH+ zh&^v%83j>J1e=2wn-Kkv$?zFG0#jH8s(}cq0f6Izt`KmBaZj8Hmz86%!;>2eCsWU< zX0@hS9>s36X}-Am%8RdFIDPWKdq1-Fp7$Sl`y=C}QP-6$DIbhtRu4hU+;Wx@P*;t@ zAl`TY3PJV0_2NJIU;oJ~FFyaslkfh4zxmfrojw^VBsWT#gG6E`gJ$94*Ws1n^oSmK zWMw=1jXOJ6wx=uWqv1emgh0d;$XywnJ*$$zER_*3uq_QykE_E+>PP9I=_8%dywhgd zZfxvbzkK`J`5U(`T)Ohcg=-hzxN!T%%dfuJ+AdeTI$An49zAyW(4*rcZ$EV8`08*; zd8E~-BtE7jZaF70kx;?S(mv^04qPiHngy#S2-G3$>cnEkW&Mg^r4aXMynE3|7IBrM zSnd!DwCOm6Y!o73(8k=LMn*(|C5p95Rmf_^^^2G&)Er<|7H8l_N}gMwgF=wWC4$98 zK`>H^QJAaYAgg(zwqws~=EUxdz*Kf{oFE*mk{~eza;MVr2{^e$H&=GI*2=0}3Sht# z!L68EyMxIb2#vY9NfDQXfryB?M>b2ADQ71Zc2;-Kno?2%N{))N7j%sTEZ$2w6Bt75 zJtEGei^eWxak0j@XAzm;(BjZa2FqF16htX!+HO76c~rrzi=?nYAeyJha^TRRLrcf+ z?Oodg{J}GS#^n2rL&s zspy6YWF_X%dea;&>jJ9XJhg4t%$r%$H1jUCZPPYQ+ca(0b=|y4ZEELDGw<3iVWupE+X+XBfe-({U;g3M_4T_OyPdTLURxUs zYqaxrIIIcf{xk+9wZ>5o!ywR^QwLr=fAcduN!{KmysUqAcW zy-Vj`zH$5Y+ZWJXoOnJkj1R1=zUAPdM-Co;eEsD5>wXs%z(Mr>16^oskU) zPJlAAxaLJK!C+E1ru|I=aZ@!b0!#_Pk&9Z71*XnbG&7Py-GPWZRm#jPswnE5oXuNx zXO|94#A-}M$7b-@We{Q+5Jxa`2o;4IL%74bu1RW6nww^?ZS%aFHBCp%vu0;FKsRk+2vNG}d@!z3 z0x1}rQx37VWDO|97-NV)WFB~!c5`R4tcX#VSSmN+7zQH3#(@b`#Q0N; zw?61q&kGkAElt0>WwR1Gqo&!j=G1j5C)3<@xox|Y+ga1j=kum*r}K6;YjT%%r_*Vd zXKg#}x=dumZO*wv27vUt1{|}v{&_Z=X*oS9(Va$FRcr)pSz7kl?k)8HD`GQE;hw}HgmiK%7{{smTdN7Ck zHy=R%Ahb_QVDYLJFS{Qacu|4qKF~x08-NGkJ){U8g8aTJX&+zW(CaOF$tmE43al-uCuy`jJ0# z_>mKng9qzDEZdt|v$c6x4+yyO?DN0!Q$PFCOV6Kq$5Y?)L;ub8oVPR$R_ba}H(k5F zwlW#l5)jOm#-k9>GjndUc^e1ATeq6EwK^x;vvlt2)~9~wkDh=2h1qm|;(?P7oO$5r zv4aQKR*xT8TR*h6JdBeFsksgknWC#GRFy%+14hl-tZFcZ5XCAmgq+AE8@6}Sg*WcK z_VU&9FI~QM>GhkJ&+Xj1q|IIOS@CZ zE0Mc7pl%9X?%vh!ZKfuo#>vI1@COA}gu6mDP7I zFpdeHDZxW^{@inqJ#_M8-~Mq+?An01 zU)r)_zGvnRFHXos6lQ%dSaeZlv`D+@Wneuw1h^B!REy}Vta-S*YM%m&zPmLhl*h4x zy{NHpFAs7ja$y0e?5Qx3xw%?7GMFgDP=wB4^w^BM6}-pIix&VRr9#7~6S;bBTUDR$ z&EP)o+MF~wrn6bo@wYtQHqEu`SGwE|2azQxnSn>cm50u}ZR_5>voAkKNP$UQk1nkqTVCllclKsG z4%c3g2nBR8axDtLKJwllsoa+eS`-Wis5p|oNy2~s&yb=M%`Gh$RP1-_`%kmL^!r1+ zXfbI)0;Auwu|@gWM}c}anEJo<|7VL&El&TweD8z%1IQk_2pNDJ+V5BOWut`Ecn^sH z15oVYBNAW++ktN&&jF@USX!#;VO<4uIqlB0ZGdJ3EF%)ECPbkO7a%h;Q-F^N!GSQ` z^MM>Xaqx+EPfnkC^!@J|A6jnPJ(l3^qhYnSx^(yI8yB8^cDA=&t*lo^j_)Xc;Y-)Q z^#}jh!NUgvR3&U}ZAysihu22YM?oe_1GoidgU@G42Gy&tZSHQTvzIQs=lzeIeDGki z)4g%w_Rsy&uRr^hSN7&xGN{&;*Q=#f8jP2gS0`)B2am5EKY8%f;njx@t*i{=sFEO| z7LM$J*iF4el4sMbst$!n;1DH<#6ZT)yxZBHZEQ_9?(M$%#<^>kFJ3->;m*}_w=P`J zjp>rcWeguXeE4mvhu(Se*vWdlT8(PfDtFo20(cmy06D2R+%#Dx5x5ISw~P{*Kyb|< zGqqxzlgZ#d*R08B-Mmq3H(f(Co2Pl}t){7Zle!$x=C*U!%$=3KEoHaNXjwa@E?L%K z?kQA=p{hp1K~4402Se1tbsUU&FsiGSVH}NXi4wSy5S+Lk4nicAL1Zl~g|IUXs*sU+ zAgMD3=eAXaceyDW4;_L?qsp2+IcI~Ds!{+8LXap`4v@(1HX4mA#I==Wi!2-=yfPjP zM#ITu5-X4(l6GCw%$n(LgY5IJb4QoZDneb=#z8_h1w7|u*5%fA=G*h+&2H0nd1H6x zZD%2~*u3T1tjXP^>#Wnc&!_DiyD9B--S)iO+t}XSN@+Vl$EYU2F)SYkojG{;iN_yY zUp;(ib%lBNVw&2Ao?7|%x3&Yn`1O}R|AntNHh=m(&j?YoH`8i394?fwwVmDRP~-Mgj1&Z>&C6y3u-`)_j(!!A}-={_;gpVUQ3 z=`aG@0=kj=902WqC4mg8Ui|$jy~!}{%^=dETk8PO~z~puT7Uim}gQ;%(PU=+)Y!SRAIno zn-_0?{?5JUSFe8N6U1q}wqovqWH=ceT3KE{dGhJU9!$+dsk%PB_vzpI%Bc^1lKsFG*c;&{eyZ0V@=orPi5~(V2cV@P%+T_*Ah%kNm<*T3i?Js}J$KP}6=<DHBBxqfy+dVFB3)x*`{a7mU%!m+9( z4+ddaS9KkR^?OxU zSV)RZsyRqlLI}j9*_zJPEg4KT0iCK_nPd$vf%?2&4pk8AI+(k)+0 zJ9poA@}06W7(M=EeD@Rcyz|B@cYp6QpZ75M&^LecpbDF_T^ZF)Hk<9*ycWdSQ2J+&nRva zR!DBk)|px^WTl`mT$-~Nu`Ly!35bz49SC7}XPR4W=iPkXplpMnH<^(T*Ow1mym(1H zk4N>rjSY3K!?3D{sk33kjmsBSpIBdCU7zmWbr4BxQ)+Wp3s)TGmVKcbCU$3FIb zfBLN-e1;}<$u}jow(TG^+iOog_1Ig#`6IKf?bkm4)j#}|U)j2LHM_2x1@{VodF!sk z9iyGM6UHD`qsnTt(D2Z0zI~G;HLdP3=9H(aOG{@Tj>9^IFgSkV!S#_hzFbo`pGgc^ z)6#e}8q}}8`nu2OZ-4wTy}fz(=s}AFGjnWiY{zMM0D#SW~ zX)vCw9X)j7*wNMHrGsme)yZUaX?ZdnPR670s2;{x2Ld4g;$|>5)PdtlJz5?eIdSNn zPrl9Dc6ZwDO`Get?!5HI*&7!xU48Y^D_5`GzVza-X~%&FZbz#6(e(qzhHFR1E5{EW z91Yed%i~c99HX^UZLQte3;; zWOX!L8x2RJ$#^(kT|e>ou{anHKK3WRLuSc0H?Mx~+2=p=+ba4^-~O@DTOQfk-M(@0 z(yOmrSvs`(UElLvqj9`(>5jDZUb7d&phlXk4${ThzyIWKKK#gIPkh5SbZy?7?^>Qi zoQQ*jz$8quFGjkN1su0Xgi=iR8n-L2~VS?ri zGh)lCfrA7=Z&;F5H7AGXzO^lkxRfoGva?f+qY1@&v-mShZz&}rjLKD26=np>T?YpT zH&u?5yN*NEI|Q)iwk3jrb24MZYACGX&fQJT*qxW?!!i*$IT^6NcHq{X+o_$6hD&$1 z?u>^^AAIJc4?cS4?#9Mv|HE%}t{lU;*Is!06HnZ^b!|4^;aCw%+jc_#7mL&FmXXWcZiO%ALKh67N}ZH|G=vA4H-=hnvR(WO837yiWi zzxjQ?_p879xnKYEz3sgbr8N<#w}x3(Vo7EJv{XwJpWeF@l3B%dg;`Q{M&Nnsu5WDa zoWFYN*xJc6r#ouj@%RU>2W{uAcOYS?Qo#D^@;vdg&!1mgUOjzceP?HPd3nUl4B6Re zCTp>6+Q9kf+RBSBpMCP_$L?+BXWsYrV@vs|({K6CZ~ege%a_&d?Ag~YUV44w*5&zZ zyJO3&haWgRJTN)))cZCsoz~&(b{-r zZSCmMV@ufe*0pQr?_Ilg^|`9ujcpDMAE2d!!?p2vzyXbR=I(=PK-A1< zyJ>HV?(TGUrM|?#YrfDa>3A z3|Cb(AuyD{AkPkR%bMXJ^_1bR*0)E@A(Bf75p6df#(J;3&a0ErMCi%;9F@QY_TKE@WJ*08Bv$RK+O7u^LY%OGiKc?cerizwb}J_0dzu z9y;>O_kT1u^TBAfgI#|8>aDGAd~oueAOA0Pw*BQleD=o8dtMJGgF&dm-J9oLeD=#P zeEmG<^t-?FJMa01kBrt1498*zq(sEUsn^Za ziL|(oqVH4tylqKiECiBD0F$aNUsBY@vCBotio!)tmRw=#Qb#kSrghaYoi}^C zd(-Jud%;^@jRrg*z%12E_2$NA;K9j<9(&@Q?|skv-(w`Fj-2|HPyWK@#`TbCYwPa8 zLx(S2-e!(jt#gwYVih=dxrbBwbzJiOt{ngQnr=B{i<4eysA(YecsLx@!>WprB}icA zAk0*3u30S;*g(vbbIw^Eu4YMd)@)gmnwhmJWz8kKITv$`#g^H^$+^q{Zw8utzc#!X zsw(9~@7D}>^Z}`iV&6xF_Q=43Np^?l#ZRq^Xo2DoH$|R7-t&#$_R&B87iLDASFRs_ z=upnht=DesO}A(5j=7V_a6DdFU0q*0(9HKP-JGu+UH{&{{+HkW!T0~-KlrED&!2aI zybG<3g-j7adx`_Z)70ji8`Y*2YqNTAHt(`HSb{+>U)$Q5zrOm`2W{oj`1H!_FT8x{ z$k8FK8C=L&TqJGWyz$a=FR!jVO*~$njNu58Up)Kz!Gi}jH+L>wc>RyQ_|mZx>s7@+ z`>+4QsnaL!?xr97!EZlveEI#)JaXsu?e{$S(1i;(cJ5riv$uQx@-=a_`EI*`*T49? zX!EA==k)BgGUanJ#ggk!PTWv41+ojLa4Z7P$1&KE9>LcBja~JdSW{7_U6rO+O*I+ zcQ$6bTW7DI|H^;-!sSpNR zLJqe-o-S;rSo@30_rHPO)R@}+mIXpVdffvkrR>=gJF-o=P4j8HDZ-W$GrJmsEZ6mD zWwNqVA2_&v^Ujvdr)h82q@B@tVqMpENrXaGr*4mc+^GmTv%rYqT?t1v24}A%u%&En zRQjI`mSFA-W#TGUjdtk6boyyqw3ko=#tK4WIQ7$f8Nqo5+rlgV;`z_N^88o+;$QvC zPd)t5%`4XqO~RSCtzI~H`SO*^cQ-fR_T<~)2ue|W( zFTD7LSGQ*2PC#jV znW&_(n!ESQQTq>05>TR+s9&kG{vXLmh*^XHbu}jhat2g^tj!Vwc_5sxS;t>UJeUC94u_z%3MIQI8p^Pkm}&_YF!UU!#YF}pf9`TRB}Pw zi42}&tg@;|U{EE@EMU^&fm(vkQ0(H#%vH5XDW%l3O`Ezdr<5~XwSO`F3oJL@K7~Mw z8+Gy5{|uX23=OXE0M?R?yU=cyFVg;kChz5*aG*k*Bh7%v-t(UK{jrb6iCw>XLk9S* z-}s|9UU}8KW^4OiqO5A8@vy4L%d5-B9(eeHQ>Ts|dvNpKJ%CR?e&)aaJO9^z^JD+` z+zT%UU_`teqINmCIqc@izx%2JrwzumKJaY0o zzw={1@y~wt+#45H*2B)$&U@eYT556@Eza%Ehi2g|CK8jAAj_*`NsD1&pkgL;62~+CvIH$(xvkkjw}y1wzj01 z?@ogaMjJc3yL)

rpj$ZZu2-99&u&P9{emICSLnsfSNL@W6>94;(tQG8hOPP)^2- z$%yKzTAfsp>9JD>g47=mAG!0re@41yT`&qnYyz~<8++*sUq1iIfAue#dHyp$`UB6r z^Qoo5;Nq2A9akyv=f3*v|Mqu(?7~Z5Y1+LAs*qWv2u09#C2Z5p1Q06Ls#H@1QO+5l z^8OQ3x#)Z`f0otA;wfM5dxKXoGKZX#nsL@bu@Xv@io=M6SryDdf~o}(FU!pi2ysRL z)G(|@-}td_J9+ZaXTSElm#31vX9$#1E}*Q!x$NbEpqW@;%m7FfR_yuBx|g4O_GkX_zkT?B`0Ld8uYCGb zAOH5WcKrDE-pX+#q;N0KKtrxFKpbr^~5tz{m=jK$ECUX z1K;{xzxxZn_UGREG$KqDg?8B=?+I`tW?~>P0a6F=&njEAfQ1||%{k}Xb*-jOt<7EP zG}&IVVy+NEnH2~eBnUADadi>_p{fU^xR9vob3%NXnKq6UwqNN0P{W#)cW0hZ4;!6_1C*U1l>SfKZKx zqdPkruC9)EJpJ?sKltH)^RNHe>SXfx(@!2abaH3oz}w#auI11C+SZ+0^LD;@?=BHZ z2<%YO&N3~czHDK{wm3caA@%-5r5;f+1}}wrZDnmTTpkTWuhk)#s+7%*Qck@v)#Y9E zD8U^8?Cze+!hn*a>|i)t0*4S(l_;#xu!yJJD5>R~=k2`d+NPbSt}`{WT;g1OVZgZG znh8);`TIWw79t>RY~Xu)g}WuYBq1 zjjMNeH_o0tzqhy7blvVOsi#3zk89B`tLDMrE9-~X&OG$+N5A=7XY+3J?%hWoe)N0( zn;-et|LA|e`qHbKRoiw6EIlD%ZRZS9&Kd zeC^(gXJ0=5rLX_t)!#jGc)guXzxd^6txMnW@ozbLWaY|v6%QEj+n;#v`SZ`NbkkFh zp1gWzdv`0ZEY*$YlgEyqefi~$TQ|d9xpT$8_W3#tM@cEvQq{xBa1`p{p#w`Nj~@N= zf9j7NSz4A#RBdY~Klkd{&wuS}uf1~Nzx$y-yY|-8O3D_LG2QLH`nj)s@|S<}(6I;q z_@DW{M;|)UHuJB&`g%55Uc(=J`uF~)|L*V2r&j};uoyC!)rlk&3vP+QvNAd+gVgJ* z>(_7Y{J9_fi^m^&;5R<`bKUlCN=oDk6AG-9U75H`nTXVKsK(Cbgs$smO>1td8SY?a zVlUp(%sFL;XUjQh2s~O@bwby+&SIv-9zt1`E7l=mMpYf%p{m3|VjO{{d%N?kYiD2h z!(aL2gMadSzx&j~$A9bhKXc;22M-)LLZEF;!vhB%KXvlfwd=3E_`=0YZ_KuL4?p<8 zpZhC+;oCm+p?bt$_|z-AI~)J$w?Ff}f8{UN#|~J@pzdQ33P_6{$I0IJdTzz*Y+;w% zMN8WJ&q znESlw{+aAe6C+XI#5%Y*4wpdY9>|EkIEI{ypGN7X9adHeh>F8tc2ncPya2&DLbnf)& zhhDgQ*DSYfqpkqOy7JV_yS!3YyvOjq5?C*UFMf#kEw~DEE5H!d;e!WP$IHXPfCxh5 zre!CSouI^vHl5rn4lHb@0M)}m&X%)s2*puVJ-E7N>sNV6JQ+Jw6NsH$f*>+8Ms+M=1BZ4ql8Lwg2GF%z-5! zf?Mch>EP)n-hS}((dWPR?B~AlIgG~7zx?XXeBLFUckN^{W)E2@#36?~l4`a)-MF>g z?w#MdyZsFx{+4Rged+Um@aS9L@dH2n7k}m_{`vK@ueZqsHUT97#B+25a0lB1lDdHy zK37V>h=ZC2)zIXcXm;=3^!y7)o_;JeZ8zN=Pu5JeVhjdhJRV%Xe(mDxuRrqG1BcgE z<{=DZ493Rx#`}+ch?Xvl4o`1vZ2ZQjKmE*mHmizdFxcGL9u9}YgNGh?_|$ZsZr|H> z!nnrD(g4D(r7CcYbaXH|u{?R_nUnW+wtwq)K7aA%^|M#sc*naQ|Hco0V}@X0kNl zIJ$k~b`@hqv7L3(1Pe2pPVZj4clG`6Uww3~ncC;S^z19Iz4&Xt{2S+=d+y0c-*NK4 z{}IipF^8t>mpA|2zx$=z8<*epjx+Ci_j^yDoaDWo*Is*lV=q4Z=;^O~`HO%1um3O2 z^m+&+6et8suIBDqu1Pf|m+@q>y}6ap#(HUKJb@=o*4)Y! z8@szZsclye9tuHrx3@LtYB;n$yw$*v5GLbw3bcJ^gGAiaRLPm)iel-YRGgK=Fm;*3 zU=%8ITb`^QKXu|;Klbfk{=?tBadY$gzyJHc`@6n-FbP3o5r;Ihot^D%&VK5lQ|n7< zc4zNLe)tC;J9FyC|Iv@{ZQXWq@h}=qtnDZe9910lmpn$w6}NWt|Hd2=(cQRA3e^$dJK`=d%|O zrcZ44ML{MCgGy>Ff`bTU6e~tL+&s&Dv!(w6j@Ct$SuE7EfHV#sX749FeoT zi7<#vEz}jV7q=}G1Dqmf>cNX%+EmP}dVsCR+TrG*VlRA!8VRUy9l2|@>tc*iDleNX zKvhL^(=H>#Z~o}FA6#92u1jss4;(*v?(*xu{K=pGfBcRAe)VcS-Q6MfteR3zo>T6s z!2nFjEjhDr$onv5kM=BdLA1dAeSmOqZSCOF`qJ_^3X>tV9We>Y-VGPjvb>o+ZD6ED z7ZFiX7s|=5T$mLncp!$m!HKwd+lfn|?aAR;J)81kykeISm}%gVMch}+Op}{h*LH1} za&FtMZQCZL&U$|kD`qj~<+%)y!XNM=&T_%1^z~d8{z5Hsz~JrgdvbDk?F+yEd#_)- zUQOzkU%hbY?!AhudQcA`l~fg>Y15sJxx0;%f9uDGod2MgjUVY=rjcZp=9zAksZH-;WjCN;xcXQ)A|JcX= z^}qS)+3l@abN9JXwKdC~>2UqP^2*v)7muEPXtUY=?0@{?#_jFnM~{BsU5~9VkA^jm zM-{T?F1b089X_=3mwxDv&u8h%?R%g5{Xcy1i@$s6%8iInk?q{Q_?E{$a$@bkXK!5G zym70&yQPoTFTe6rH*UNUVj!Wl^|gDKE6mcI+8A^=7*CcD)T4?+ij`kEcd?^%`r$K& zj~#mB>ZPxJ{kh-xEflcZ)m&sz^TK7aox1jt2b|a$;z$ESDJQimYI=N&77%%n_)g29(?F+PrY&Wx%u8^xjbSVJ7m1PJQ$4T zO|!GP(RK5E;}h++-~G`KZCtwg#)V75;szF~s^R4Dfo^XvbzSNjfW|A6#~yhX%k2F5 zb9;O9&#J>Prx_U?sq7r*IaA3b^CUw+{^Aaa?AU=-J*C>Tn? z9vPTafWrMw*K!^O68+VicwK`>d5C&qLbmuhtq?b7}mZ+?Ged3}1e z_OVaB=g7>VT3i;v#Q8yzEx&#Fjc1-Z(Fl{>k0z;GgJ9hMa2a$jfky1#Yhd#xEO;Lve%Y%Ym zym#Jv@A6E*x!h)H=4=KmNJvU4t%`C$K@{(mRtR9iNGYR=GG|mp3>%BO%_6v_2w4Qs ziAO|KIBZ&F7GUkXWf1QncNPGMh*%T_S=zPU&K^8==H%)1)va2ya^b-TUw`>!m${9F zjq8{1Dox5j!72p+6X@L9augzI4`_^*AQa~^;9_g4hHv4b&UJVnG)krM`dFh@Q;N{D z^P&Wlkh2~UsA%}um*0_)iY>VpQVIwhJJO2Ch~&lQgqe#ng#z8;r06OyJhk_N#s~<; zXkm8D0*;gcX77OZK^25R1}RW+ToMLBQGniyIG<$MU@#b@X=(>)ZW)~`mJNlsAE+R^ z?L&>k4CSDr^`$6)q_LUFkpok6OG|IxzLO&^ukPGhU9ZGt42&>XrrpF9Yia@)D$SV( z=qdqVPWt^po_JlCG)p5iJByo(-+k`Hr#`j5y-O#g5|^@!N>MyBb>L+#C9b!yusk(Befrdy z(3sI?4Iy7&xb?Af_l?w>%e$KaQLDS{QifbKsfnqBQFE-7d8*p1;y> zCt<)3JbZfQ!01S$UJ5A)2-#f_`OV+_oma11Sz6z%O&vLO`t+@Z zOFOG8ESMp*cRCQxsIC$j-NjHi68vr zfZUgU?YFwUy~0bFdp0VXJ}}wsbUU4HU@9XMqc<+S;(P}n4B~QVv?3a9)@zN%UaPge zwduWwdo~0_t?=#VFMay+KUc0i_x9yC45>Jd#txk5_jl7|kY~AdIRLQ3wd!iWzqYo% zTd7p5jpnOwTxxf_@BQ!xfBeUPtWt|@rsn5wUcPfkmn+@4?f zq0ju(Q-A!-%dcJj#lQAf{=wh>$N%sD>u-0rZZVP;3QOh2=+xHMdZpS7%can0pSv_k z`g`5O7f&s`bt|rnCcWJ}_2uS>OS3_@OQ5`XpSb4OL}NVa?riUEZv(If-570TDL1F4 z6o6w#h~v%ixG{U>)$erE4kMMSmD6W0#Bl?3`ttKHIX5UbCJm`18Q3%`MU`Wx&uq8a zw{G1~EJ0BEU;gY*PmVRwTM+RM9n0oc`|8WjUHQtFCbQ0XC0^+EuD2Y|ufD{Zma4cAW_KRKz z0KuZ0PedUADl}xY0YHgC5xj%QE_M}cbI0C0n*oT+9e`M$JMXQ_#TWmfNLqscDosG7 zQ~|Fbq@r?%v_=G_6@mZ(BIzv4g-C}%5Qal<5=d{*;oJ+TBgc-F!sv|~*B$wHKmP7( z*RHnqwiNBnFDxEBJS8AZsE5lRa$+rWtTeM^0C5mxgWjL?KPVu#zx$0;FwqztsW;*% z)F9ro00buVS*{2S>fyy9?8PGtJBfe`-Xszcd1doKac2`E4_WYqj{x`a4oVRe^#!B| zG+6IRlXH#>pf(n<5EvFyWL-1-+b)hM^;ysj?NxCao_p3_B#D`ODPcN0#E{2 zXiyHs17rXhLBNviFJKg42EHVmXX(O=uh-5V>n3_-FbfzP$JCy|5IYKXu^v(di@8v(-4ty!T)WW%AGenLquB&AqQa z_0%7I^P5-SdSxsw4_X5g3LrYqhsW%M-gBtNm_XFZZ=gMw=tM8yi_S z;Sw6)_r2$R_doGi>eK)7@BYP$-+Oywd81T`w8bpxGVsUV_ui(Zhm9-WQf8Hns}hp$;p1VGf1<$kBTwY|5sy`9?}0fHd37D-SCx*SIcs?*QE^|k*I)yj$j_CEKreD^8>2y+ky zq;#I;nak#GUq>ZVGY3jx<<9&(dwJrC4}RpMpKMNyri0|mU;g?FFMJovr6=F@k>|b- zEAzJk(#SqSOj_HozxezQefsmi{%`;NzxtJ5{R@Bjum93N`X_(q|M-7(HWoZ$p4;WQ zc>rQI{Z0pfftkgF&-Zq^R2#`X9l7uE&86F!#VD*!j!m=8#7h_jx#QG2RcjV}VgcA~ z_lb0WZr+4}5R2gU){gk=>7Wnb0Co;!NG(#~i2$+R>fFA1`K7OYd+XX8&Gyzr7>L+b6z#5UF5S3v`iYMU z5IClSFN4GX72v-Dy38W`okJ{m4=sTE8-ilj4md0ty7$k97R>@BGV~Pg|0yVNk2e4S zMTguy#02j*H$l;kS^Rem_l$^$qJ$tcA%KWf5v6DhF5E;2DBd%Ow;qP(V0O-ei1*$* zYrSLPA}lO`NN7w*L<*HgrMw^!72yaFAS8t(z(Ek*ci+WM(!Fu}>Ia|vP`MHxKXG(p zb)nO4t#7VO%r;AvvUfRx3}qMLJkR2CwV=0|I0`#DV>^`AfGh|D8&FjfPmE7A>-ErR zwjNl}i3viDWTJS8q`CteKnBgg#srfG4(f9G#^U@&r<1YVUEcD5svPxgHgL(G zFdE^0M-2c#VqI?gxpyUP!oVniR=W?h@!a#@J9_BQwcFQ5$43sHIC}T$wQiD>Ky|?h zXc#hCAqU9;EVBj}0OSBENL(Z>KxN=~YrXOPm&^A*e(S{-N2jMtl}0HrjatY_FSq?Y z@^8HP=7k6DpP8N*9Vt)GHlBXw>T}ON^}Y{%;+s!B+iI^(H_N^59;k5Q(D>@w(&59= zU~g+-VPR!$A@6O)CVlY4iSy@=*dSZmY+t!^qr04nDl@tGg_kq^G-KmO-myLJ7t6ALU!KU-T`+}+J@-C7!Jj#Nta&YM@EvzOH- zlAX?8KjVIOWOD4}{l_19?_sZ>Wr(D}Y|o<$?r zY>)z=_uNlY&mtbOJbmf8=PKoxgln}5K%E^YNh&&g`^p=wUMmVig?hWSm!&z7lq0jS zww!i5{chXlw#Uv8SZ0MPDwPU2m?vw}1N!U;G1?cT1(QCx7uDwcFdTeeaJ7 znhk?BI=X)O?LYg!{gqGr@F)M^tKS0kYk%!;{*{08FaG`i@pu1LXLr%DH;yJI4~sFq zb{7?ppc7vy2buTncH8%}jkzVGW62##==&y3y?C4Vy|do4Mx_a~3dk7hbvFx-D;Hge zh=B>1Kv!zHSR)*2K=6TyFf`ML&VKH5pTGY0TZ=bt#m$Pao||91_2!Mw|LGq-dh{T( z&vWYq1f|y=+`WAH?u*ZF-MCuHI)^za_4?fH^^`9OY^K?_{__`4-uIZUjS1Qz;E-jX zl(~P57tEgm?eHh)VM8|ZzS1rt`vt2*g&pzUuK~dOz=5HnROE{pkr0RF(n!MwK!TzZ ze3&?cp?&n;46+ZTbRxxxw69(jEg@3$dn=6yS{p#naBmJ74mdDeRtbCWofi~l?{c5I zL?dZrVU{?KgD`MuUIZ9(=cZ;4k5p^DL3eKc&e7S~GpCN9Iep>f7oJOc+0NGHAoX!n z0zyMZEA557Nba2=5@IgSn6Or<4LZ935P-@72si{aRvVcb8?V-CY`q~(p<(C9P?l%j zx!mPmyvq_K)J8>-QJ@G(D?`Ru=h^3iUZW9V6aiuOLwl@J1v3f7I|d-7V3_9?A_PHW z0D!#r0BRV=Mr7~+;Jm|P(N4ocPC+TE)Q0jJfruhKtPw;2$HjKuM~XZ$mLozn(r6B{ zEa|6do(_`!AnWI8p8*K42e|+rpFP?bAKOW~%Ue59xwN*tky}ncpr1SNu#bos4wATE z-Uxt%?75w!iEg>z@mH>=er zAARJN=Pq4(_08Fn_doE!g|}{|NxE05)!N;3eQWRRh4Y{K;JZ$ZHS>P|-S?fnbMyA~ zOSk(wgA0#j=T4oNm~C$lHl01x?d-ky$Imj_r91OcSQ?+5JbUW!1Lu#AjW&E)HO8A4 z8jWB2JOAK~%h$g8`+xAlrRUOiJ6YS#OmDhW;(mMK?VV(|cjEYcv$IEzOpc8-t0xZ~ zoH{n^mHqzrp8MuEp6d4#Cjg`l967jgeR2Nwtxl)^fe$_M*r}5@|L_|hI(^{&#z+N>CQwjs6!eo%QJ8gDBb9#DXdE>(Q_qGPD3S!pU&hpM6 zPZ&VF6ZXgoz`7)BCFwo_v1oR;o2v;x4iM;4E2&JTLMekY2J zV?mEDmqD+O#MNrERIY|W!vqyk5h=@(W`j0pWKm&Y>gAwZ3F@_y3WG|iT&q-w7}*Wd z)I(Mqoha99>$@=mr=Le*l zb+@v$9c=f>1e@L7X1_P!@*98noo}D}PapYF0C4YK5GkaC;W;xjJQm+=``*XmUcvp;6Alk(zPEZV(!=+rgW^Yr#~Kk9&R9ed zCWSBrlp=tzDA1u6DgZ#N-6J|y$67) zshQBw%Enf|(;8&CW8bJX0~O|OXKQ^WO)?#XK@|HzrnH~{3Ki6kNJjt}Xc}pZv^(t_ z90JDxv5HDTJTo!TsMoa#DF*Kal(bs?UboxncY8_N&k{z~A}m4xq#+n=ABM2MmG;#n*C(Z36J#D?l8oIwXY5qKsn4Lq8OI5mZVQc3cvW z0%d{*<5)cLmLgg|)WPl7gbVUhs@o7YsAr>`d$2K)qManQpyFGBc zF*DNSyEoo-@zL(uR(-13Uff-#; z%5H0RYHEFPxtI1vK3ci&;+fHrhW5VG@2@Vez4q0&?yM|qZgv>Mpi-*G@%HB0{Po-Y zPS@u46YqcM!|%SYytHum!r?=WrmXKc2dJTme6r+kz4Da*!1*jo6thBN=fXf$Yt>q_ z_Sql(ktZK{LR7HZ?erX!5c|9Rz3yObdv$$hX^^xeNp^O32fdCX81(u8n5S7Kj8U0E zGH6sP&9RBm@sZ4>d7iX)c3qkQz|L;VlO%(LLbX*_Pft%i_TWP! z_41t?cfS0k-|O$~)+#k;eU|3Coz1b)27nd@1cl&dC#M{myx+O{_R>H6`~UFge*V*6 z{p#0GpFIB2kA3_HZomJP%a?xPXa2&WMvp8o(z_D?jNjcV*AW4M?IqCIU?cKqkwcG6{*#Pd& zCC)8gyS_Si=l+Kt2@rIs%C&HPdFN~Y`9H^2Kl;%p%ZJ8SZ{2wIzkOk6Y3XoTb7!x$ zx9yW8ZRc4h?Q(G!3Ehc6sE^Lzj2KfCf2Pfb4e$YysE4Hsi3A!-&0^vwF zlyUf-XBfY8a2Mjw&@(y&$=;KEV2C`scR%cB2!^NskRCAnMd4nacsO?KXN;tHzwR4o zhPTTQRkVLg2n_RO1<cYbCdNsLjm5lWQBhtE(F#GCgsyRvT$&8-+KYL|pEy%_Im&87mM} zqp6WbfEboaaabxxaS+AIptF8=dsUFO+PmF6?`2sx85I5Rgg}CYk)H^`gLmA?2b=9d zq=R}Xo@|U(N_9pyVNi`DK+t4{p$X50&7X#dU(tv~4J--?a-r-Ak&cuAiU>0S7?A=V zA>sj06wJIY-4n6X?J%o>uHhFXaPL(h`_2LoM9EyXx0hFzjF zvU~l;#@(euXV3JLB<&1z?mU7+XeFIjUV8cB>2n8<&;GeT{h43>7yqrlv682Q`;H%o zYUbht4{$FzJUM;pm*`ojyBzU}kh;;?tiyz&W>eyL+u( zZ>wW{7B?!6^X0{*=bn4^t+%hPuI{QhsMM+^j?2|%duzA5*Y_3xxLGehd}Q`>51pIn zcJkcECYD~$5V8`()I@bIjQ{ZZtAkc2BD2#?bYMudW8aJ-rSLcY8~XHER2KB zz)#f1Mr+d}BS*^h=8>Zpnqz9R*3_s$bE#a((t!voFeV7tdY5PEAVI|0nMoa}9IZ|B zez%)>wu!au`u&6)H_CNW(k4_ExZ7>*w%Wa3$2u$CB4L(sq+(`FJKc1ULGF|`^=hp? zT6*Zgi>6Wa&do2Z|L*Vp*X_+!09CKmPoF+BF)``}>2uFMd+o|q1hlzRO0mzDm+#IV zIH=-go@SLOtTk&V&tJHG`}WT6+JE?!U;aP;(LXqP=DxY>mww|n|I;H6JouwO^6@)! zbI&~U?Xl}`&&{p3yDiCC&?*qpj1npV41gieT@Ij2rE0F8ZUT5X4a69@x1+J{0fud$u9kKa}qY^{@|^9Kt~N;bK_S zRNoVFMfSa!JUr5A*w3}^9~zoRi_t-fGkX4?!8ndJ0aXAj7*l5iH&e=3$ z&hypn&CQ+N9dGk{0~G*?go@Y;BPbDrBt#mJK?dF@oz8BzXUL3JtK+qX)?ri%^E}mo zDr_%B=Moi-d{kt?n2Nguhu*GtHujo|v?AgW5fEWOM=B)-Ct&gDA+uhE3zrWt@CIAGAfQy#){g(so zfAQZ@AcxNnK>$V}1j$7@Ng}9gr7+E{wGu|gw0CaJ-K0R5OXc<;(|{>>1a!k%F(^6` z{r+O0(9XPw17e^IxC~bHcXwCrUi!|{6Ne6+z5o0hue_Xy&$}Jx^RQIAcJ=aio_YGm zr+@6jAADD5H~H_s`{jjeuipRI7prF!h>!K^=c<<0A36Sa}i@|3PSfog2t zb@tGEo;crYw;J_v4s}!+>2?Rp*H?VbSuP?pH8%C=z#JD z-|mTbr9dAZAOG;9XC8>uXnV`#xyhUkbe?%cm5UEd1V|Uhr{3sYyZ!c?4?gjDYiG+_ z>q*BZIygNW39q%cuf4K?ghgI=Cq$o%AeblyAR=9ks-<$Z(VRYX=-9;6WEch0(=)lX z3yXJlwziTaDObwPYQ0)5m&>KNRH|32_4-J2e99QT+3Mx}v>Zh`g3zEt9382iI(woV zhJxtXgLk0JXth?Z#tKyCn3#Kg+wJZ-&tr|o#O!#l)m>ZPY;SH@mj@;~a`gDj#I(`0 zu)OrlbI-agi%h5#8KqL;t5>d`IemI`YIb{N&V(fk{QS!=wzhYbg5A}fZ~Wfx{kcE; z*Z3_sII9jH zJ4FfwV&KK8j&PdqZ%SdgVf^1@^)Q_ZRS9~eJ-GH#a1 z`DA|k=2yPWsW;XvJ@@=8|NXZg`GvoVf=(FO7Xi}3)nDibLm{D9m?G_Cp@;7=W?!sn z_g~oe0`*9~AZ}(nA~3JAVveLxowjc)!aq%vQ`TATUHr z5GmNdBnll0WuHY{NEx1K|0fPN){GjMnLQF|J%mpnB8$|k^~i+1UTn}d^(7VS+niB^O&fmVayS+6wHZ?Lbxd68Wy;8~-@iS{(=#^IN0g%g;xH(qCLT+OR zEP;jqb@R~SQ%88Ps|z^-7S(fJgcJdw6(&Ik0K!D105oV2PJOo0%6HnW zN*qr#M{AWTky1n=!d`;F0EqXjh}a9L;g@3wqoyGQwBSpKMif9rs0|2<7aody;+cg# z6t&7!NPzqPU%;Xe(s|CUB_v@+Ktb@pLO_bb0F9`Cv_+fQEOqY8frAqhQ|&=Nti&5j zOFe)92^6vOAiOUe<8Ti(yb$*P}Uz% zyO-GvK_&aL61~$?HY6G`(m+H58r(rnb~$EL@EGTmO9JBL`R)pC@CH3Z?I>B;t9t5$7{*AA~NuB&;byI=*1&~LS@05 zFjOkc1}Tv^c19~Um}a>O5MpgJs|pc@7?_k+m3kc@n3|YYCQ5p}tk+HFHs1Zv!3x3p z%HH-XuQ!gKuN*k>$Y*~z-Rp%!VO)wvVyA?G-JrX7_15y!e|+=5eZAssInt}G_T}II z%Bhchrgmh;<(Wc8L@uJHg?KI~MH{yWI8;MPy{HZ&6hINZiHq61aDwu^T|}V#CR{6oM0t#6+=c5!lQ z{K)B(b9Zmd-Mu|AelUzmh(uhpErTF3fMe&8OJN|uiVAWkAOnfy))lENP%cnrbfom| z=DC@P$(Jr)o$IWNI0G6H36Q-gz#@8XkkAW)Xpj&o1Iz_FVv~XErfH*EYF6s?N<9uj z0LijUA&PK8X9Fp|a0Dcpj2BnTO@zS0R3y_xh@kLD?YA|y8EXKYr$! zQl;|XL+77-;(Zh2$A0s-|LffCTN`Vey>8N5lQf6g=vap^PjF>rJ+4lSOpM*AR|E%Z zYjdspl7oj1uD5zUwAHDRoqjLlM7iFT#g`yWz0KR*Bn;!sXlFAWhGDrZ>}us^x7A6z zojmaBi08#+L8534FN}j5Vt7yLkNZqqEc6$DJK&brm9c z^r#34JPQaSXsy{f1Vr{nqtfMtw>Pf6Hg)p!&Tf0q>jjmPG2!&=;j*a>Y}*hr6A+*m zP@;I4CPkp38{D%^2b{U1r_W!%a(Q|FHi9j}B?a#V1%2*Cob_V!EpfQl+R3_!_im7o z34(;Juw&+Kk`MMe;+=`YnF9wOe)L`ABgZy2R$qPN#m&t%0B~9EvJ8}3U*&GM&o2LnTPyFOd-~0CF#{5^l{)M0V{GU769Df-=D-!WpmJ%R~PX;L<7^Mh-J!{g%^Fphz zUOu*!EW$n#u|R$agCc$6?0tF4cdx$^N5qvF9dI41X8@>H>?mh(p_qF_cmj&z(1^WgfUp(S@C^yLd^AJS1_^t1G zMTY-q|H(+j%z#iZF94Jvk@&*$#DXZys0dIrg7=;aG=x%GSR8;B)J7keo<;F*zxC#Y z2j742=-IvYmsVESjvqaG;q=+>KlAL`$|@q1g9<``$}=z_F|%j3&Ix#BR1h22OAFU9 zcoK#{DG&h#&ZX?qoCN}XV50VkCmy>zzi?&l_Dw7|_4_NPVrVT8bud)8ii%@V0DbTsaLu0j%oAt*%z?$VwXnVYr+(~nZ@l%! z_g;Nv{pt-0V@0OBKJ$kzOM9)_*kn|Set%-dJ5EZug@Vdh*eWSFg^mF0E#rRw)Q{Xfm5QZ$o9mD6H!^4$9Vh&pIwg zr8ow_-BwFOm}N;2mKycOpp%tqW$^lqS1u)4>TC~v5{G8|@Z_<{>e<5wFC3dXDD1bl z)ppA`N6d)I18`o*rNCYUP*8+G0Xy%iAotg6*Peam_{E1dlO1n`!H-NdN19WiDGhAb zJC@>AfrP_JTHU*{oj4-R?2QTM7jLyUSN3&aEFwT$(Bg`3awSEfb+^?E0u`#NGCEJX zA|4UT)#}NkC*o@T){QHhyL;V1^8D+sY^{AgilTOVTav7p-%J?S8za?fsX8*Uw!9KV zD$fih?!b`)xy_WuPOE+S`j!5muOSw1J)>hJrCZ&-uv{`G0=8ZZ5D)?~ASt5*(s^%<5FX1{T73# zdn6bn(#kn+RLp|i-E9EzVihVys$kX>4=*7<6j}w4l=m>`yXIK^=#k?L-0keeWXGn) zl-*4_Tpg)at!v%5u`u?0DTu;mCGB?h7H{>Imb&XJt+#KquHUIAX?3sHPg1W`%QPpqMc*QC#BQKr6*wiX5+Zo<*qO zED;za&jL2BmZJAuIDPuyp-Xpe-&~yUx^LW#G4x zq@5)7_V$s{iE6nK1`#3Tc_uz5MMP+{@;)aZ)Cv#?g%oK-g?j(yL{zG95EWi2RK(tU z&jQ{T$dWwwMbBH&o5Vs2Jp)nEVg^Pj(4r6l2$W`#EKNaRVsyOO99!DlSl!y|3^D*9 zRJMT74n_as3roXuzIY1UdnLgBTIt?(zpn)r?yrPhsa#E54vDP>Y!ST6!YBx`%&|it zzYnML`@c-F3NEyu954dt0ux{k(w4!mKmWb)xN_mx?DiYU@v-X9f9^*<_sP$_^4bfp zU%Ilov6{4Z-+bnqOIKct>!ah-llQ&r$%&ELSncfT6SKX&ZnD=|Yt3(N?dy6Foq@Xg#!&f^f`DHAYrXhiOWXvbYpY7-|2-~ zJMdVc)siJbwO>&p!Lz|Bww> z#G^7(Glzpp?dFZkTiaUzu(Q5tmzMw_1>Bbk0U)xO+um5)-cSS!3@L&R!;(f-Z8U9~ zRjZY?^^LW~HA2X34$O?;Jc?nYEY!w1)(S<~3o$AI0n|DKqoS>iQxX15Kp8C2*|Nx5Hd6} zT3f$+mq7^76}K>w7XWra6gcaw^Th|7JSYe?nVH#{v5C>#<;l)g)Z3npa#-(d?{wQ? zIjYtN-FAC^e*3`9(oVea)*I>EweI{zZ*k46ZkGox?WfA90oxR5R|LES-fvyLvhnK6 zjYl5N^Nb}E&*+J*Bc+TniE~I)JlmzX6;P2PrIlC^0ac`h5J{=xRqvSy_Fo12sWI?8 z%##TU0kEL|6B_(5JVrnWFhtT9-B|BT1tQ49-%9QkmJ#kdJd4kB0W07lq9()X0FmCY z4IvT`h?hdC7GZC_BBeptyF7@3dc7W(Vk9I{p$-qt9_qKdSKqu*ypH1_oSizd-EQ~0 zow14f@zbYXd+B@g3pYtcQ8~`D3~)dFAmTiGFJ3T+LPxsLhCsjzgF*lhFYKh)VrWIo z&Zmimv|bKB^uPnB4jsO7`|h3P`M%9GP>zU#hDZY`prI??iv zWw}DDv-SGa*hCbTNEPW|7V(4%ho`C-G8(DKeHHm>QdB$u-Z^iHP-_NZ_5#A5S!jouKm^Malo7g2HqL}bJ;2tcey=w*FvU~F^*0Qb7>wY@D1OiJb66Dr=fa1Z@o z$p6zp|0n+_F7W+zRgn@KvPl54+;);QF4R-!mUgy})|*N06hX#P3zYIYJFsE_V~GTGhHg~X6zx47; z8yhRw?Rp9;)k?+|P?Shu6e`w=y-S^!POqo5?sL{^I|NHAn4Fz%Hk;*gH3)RMT&-6t z&C$`3YPDVt8ljFP(|NzYwYvJutGnso#ObpK4$lP5v3`Gt(bYq!~Ko;df|+^v_E?ylOL%au}RudfKZ zofhSOXLUOpB;WxxB7hb`rhtRVi7AVAV!UoBU@HV9ph2`EGd4caY>q4~u6+0Dr#|?B zk5(#&dcDu*~5hbsl6%QrVE$0zyDT$wLRh$4)VZ5A0W$J2D*>B1yTZ@3@Z8Z z#@e$ljlJvLgfQq0ymh2W*b|}-0z?tV-m?w@6&PeC=slzNRP;zYD@tin1m5lYg^HzO z@ts=qhYd$XBr3jfvDh#VY0X%i^`Nx)VjYZBn1Tw<81L`<3hAi`i~$tga0rTvlk8sh zY~TL_#b1ZfHQt}hJTL+X4mZdJv{E#CCyb#9B4Z*GLGiC5lqyj->164^5Ui~&l^=iL z(4nI*fB*S~`NfHehtD26cInj@uDo^WzWd)_DOEQIyBZ0K8gxQIaO?|!DFqsYL_9Mq zBmrd4=(#{saIu4SOratXmkiM4hbGE1;}1Txed>)nHx^fyIxf`!Oz0R4kn^k%1EQRr z46!zFuji|qJlu22UIldl|wYD0HTx9j(w63N^w|kjsVj9 z+Ily)0BTrzEPtXXVgFpmdw0S9Lbx!84dVd!Rs!r)4gO^l6W z5Ra(f@R6y6-Q>IO?vBn#}CX%Tyoy0yF7dr2=_Z^zrK3P8iZsVcjK)wI z(xC?M;*2IyinBb+lK$q};_bD&i*wz(H|^$jWUb3{1$0w=@6mUiJ@@zr$`j|3QY+ir zk~CEwl{f?(7B&a~g4W2^X+Xyk29@0U35Zti-s~^lY@9v4v^dz?S}m1MJaF;ix1alF zduKI{%7b(OP%4GBYO_8%Hd1en6XDYQjV$Yp9yoOO?aKi78h{u8y;y)i>v|Z4{ay>5 zzjpz9kdOV?XTSZ2zrDS*06?USQ9A8)?|pS=XO5&U?RVP*3c(X7O~H@;rN93DZ-0I6 z?i~PX^|O`ruL0Z4dPK;r0|J-Wk!E9SYpvbc6U7Y53MhswWh7T?*3VqHFY)Q*p#x)w z4{a>0gVL39X=HNz^nIs~9yt-$>Wd3Y^LKBRtD|9D?WNNjTOCjuRV*&`zzhz6bl80F zCqMDfnUCFEdhX@#eq(!ijR93Wa_+th&wlOq>!Xu{UM9H%@gQ7l)JwH$qf%wh>+72j zz589utD9TPJLm3yJaGxyEa|1qN<-N0CEK~;-OX)8<$_?)&c`RG&YZg+lTr8*Bk<2`uP&@ydaKgr0E}mG&h|iw)&r3)P%D6zb0a}{x?Ei! z3>IF0^=x}Dipsq~+vOQDu(M$pW{LIe5W!ooa;<_;E6prgYh^T~Kt_XrwK)+HseJ-ueY)fd~+oxhRs^za)rRSUD&}dB9@1NNPCQ4^MMJk)p$sFNJ!H zI82HS_rAq*-Lvz$`8($?oF1*$%3-{^w*JWDST4u3LE3~a9~32T-m$g0cOel0Y0r#6 z3XqwHn4Kbs=2-|pA!#J`AP^{G&aMBdckj$?uB0wifJNdN5eS%| z$g+tVwycN%Y5)Z)1#tj7Ub@@6rS09Ba{1ucR2)}ArA47*f2*iswh|E@@2FIjT z;Wz>suI@wx36Mk)LWsiR90y_GoDaevx6ZOB0n5z7!UBLg2y^iQqLt3j_j@%O+s@ew(EiCk6y=Lq_B)86K@028CZJ92?~CAV-Jt3;+M@p2rvT&00t-(+s%Xk%AgP=?eBc!55NA0 z-}v_U)Wora2R`tLAH44a@B8qd`pJ*hW}+(OHeb7Y_13qae&!Fqv3~W=`tnBm%Hnkj z%H?vUWU7^tL#-?2wRUSD?wVxU=u%X!G-~B)tx~B6sv3nlP!nMgiFXdbf%93vm-YLd z?jXt2wCnP2((Pp3mhbn%B#T|v%w0)v0>OCS&^ixvBA8iu{rStc-dsEO;Dg7`Jwat< zdB^36Vn8n-PDBVnyhjFw;zd9_X!6}`q!bIxunk(A(dCfy$=QJgT%&L?+OKUKW;W zlQXAww%3_x+y?EvrG9%CY-RzTc=BU!y!_hY+yWUB>L3c@JR6jvsxcZUICuX1=)|G# zJ@sV)1Yiwe2KeY>PwuQNtlgOlO&FFUVG;JAKxwkp+Cip`*;(5f8y`11Qm71SVex&8 zE2GCRp8D8FKXl=d^W{n@%ksmA5C8Lj`v1FrYbEV>j~+R6@m5n!Et37yMcW*Bp^Z+H#vs$e}fJvGt ztqf^xjIlu`!Yn2TwbITDX`@IXD)B;Uc%&<(ffXqu#Y{k02xY@jpm=x_3M&sq440Hv zBFGLIL|6z(8v_8s1T=hwl2W+8YuR5Zfp|}%h>%FJb3-kO2tbr5@mR2on&IPTB_F8x3*iY z?Q$uKN|h{MH32dxAW}v_CgKG&A{F!rCZ)*~6YmhR2F%5a01C^z2zo#v5p?1iL~E^z z(O}S5Wp`}4erWW;jjfY67Vpk&EbrNY28hszrdWd1`hw8v3r9Opu{Xvs)1~Txk&#NR9t074n>%NW2_s_+3R^%BLR1sS>+S7qZD*cS_KA2E zX@d|N9T2&|w2}HoA|Y~le_$c^1C~@&0^q?XS7XxuT9qa%qan8iz2}6z%hst$vqG;<~(7s z-Pw8T4}Rz9r#`DImEQJlR2eya`s{13zr3}#8&<0QuD^Zb%HGE6*6O0|Z%|xcaZFb) zWqG1>SREgoojoPoTU(kd*Bj#_v%Nv9*B|7WRTu{GNJYna)<1Lpv8APJOLMOy7|}|E z{GdmQN|iW{>f__H)zPtQZ@rc#wwPK{aFx<5L-5Tnd@&46SSov$g9jiW3IZdV0s>H> zS>ImEG87KWGw9!l4Q3hI}09AP7dA^~QLeZ7ihZ zfL9d7^=Rhc^yuN)iSgN}KAMU9+Hd{F$x~;;O1Qncy*0nU>pS7VONLpd+Bw!IkIug9 zXV$KLod&%Lmugg1r4_S9NB|Phsm49;*m8vS+|1Op4>a3Ol0g*Y{qFn=ubjVlIvXS# z8!Lc}f?2+9^v!mtN(TBIYXYMR4-dw%A zmvtmp3XuT_fdEJfT~$=kA{6`lVbw5|OO@$~>7xgaOidlF*2~%`?^#3 z^mcFGy0x$}zrD5IZSNSZyz|a8c*%%!fmo9=+GyjgB>=a7=)-=jg%KHqWSC%=ebXAC zFb@lG5Q;#fTI1XRga8&GgkS*TJ+`;{x4!f0x4-jdIsB!m>G9dgk%vF=1xbDCj*vKtMhXa(_J1*KXat`RsSkeeBbFyIaPmXU-gd&wD=jN8kSH z?Q2&KA3byN%=y)kdZ)ZjlFMZGT?*M=R8DlJqvjec4r-P0eWY*3t-R*9#0$6~GfQ!qM4?X$Bv)}kS zpmKt?H&99e34sDKBwm{koL5RGz25rz+JW(jPJid&Cmwq5!yi0%@x;>N`j>w13*Y|M zS6W*e&SnVsJbdTQ^7^0p$)A1s`trZ~m0#YtbqB$RqznLe2RrTlzx*eE=U@JtU;pf9 zf8vXO@UoKZ6adSr3zv%Oodyt28pa{e41_%b6)p3EQV31ndPKZeFzuZX8pI*Nl#l?# z<(Y^B(7>db8Gr_97V4;0t2dkDt)1P~l?{1VrY5GMQXO`7`~7Tvb9LhEsUruc2WhXH z4<;k3#T75Gw7l4;k8f>+-n(JofZ4M+mfYIZ`dAwv0%8>?ajtl)7D=@Nfy&|mDEB#Q z0wm8&gv{bO50nOCBA+GQATm?UaI|{*+_8g~ZroT}UF>oSL!$+hNhjVb5E^PGVubdK z=0)!-D;Z$s@|}(K>gLY$L~~|*s*EwPWHxbm9-7cZp=S#~K@?D6bf5(QhRN@>e(Kbm%rCw@=X z5fNa(I4+;N@4}Ho$4Bb5L7ueQTPs_)U7lELdx?tzt#lBXsFx*ElVc;J)vIr%NjgA~ zPN!cE%>b~kTs00Hh*ru7h;t6e0u+MMkaM-~MJUS5iq8#!kciq}M;FT~1lB+bfPn#^ z2f5pyx+2gy&>&0J<`-X{zw^cKza0G2f7WbN&Ky7f(8CX3eD9M-F5EZv@PpG&J_5!= z-tVq%ZQWh$-MF2+H7^VE?bXFjyPNHG5OpeIauSz^ARe3>7OP1P03c2)#GwgS5Hx}l zArN9jU@b$Z69G8SRUkoCLNx&2U0HgiwQ=y|$%7|Om*WakA970~>_7vtFY1^8lyYEI zpkgP7Nzc9b+``1@)Y*$kJB=gr_#^kf{^s}Z-n!bDXuj+06Q?d54L|Y0dRz&Ea^?~f zf{CQv>a}+E2ECpVBjjgCn^jZkXT8n6_WI_=#@c$f-S*76w{N|9ePG9@4~!o?cx2|t z5ihX5y3pF*N(Tccn#rVlJu`Bs9K|9KYOa^#>uD7w0hX z1b%I0t^DqH>o9!a$kF@Hoc!KvSAPB9|I3AiI|{5qgCT)f6P9OZj>eUdr@!>Yjq6tp zh9(Y&m@plMNG#cxzW85%;DbMO>fGsezjNrwsbBchzYr*+qKHs}I2P>|R@XY)y`8O{ zYPHhpw=XxGYONyFBUjQ`>K4>7duz+nZmSw^`oa+wS&SX_oZ| z-R+&-EFJWEour>v0-X(dS>}$Pef+`4?n`&K?tcH3-u%)rjicqLzq6Hf2jJQC^H7@_ zm};OQk=#Q9U>qh)ec&!|6ozNdoIZ2*Fr<6WKKJ$@VF3yVW3775t_aI711SO_LS$e~$`@FCM5O?REPG<{?5QAKC?P}`^tw7At%JZQUE~~yC?FN%wcJYwy)-#~@_1u%q_wtNE?4@!L_~(3 zCj=Dop3>BoNNc52pJ^Hb{Xq#biudBZ7snt1EC#@fC=G&CSe#faJ7Gqp%b{_cXM-%w za%I9Fc>G;!TgPtB-Mq89()Aev5)lCbcmy^erU*O$IsmW z=13_L0GlN$#Ido7%-W4kt5DuyU$$epx5yt7KM=$N5C$iYoqXWj`LVIFjoppQSMTm@ zuLFw9ScJXLlhh;lz=WZRfH0|4j~_W^&z-$_{c_q*34xK4JkJ84|J z@AG7NXd(TdR|-%pqL~0_$R;8WAb0dJ@_R4Bxh-AySwS$Al+E)TwiN1 zuk>%+>2GdIt7Y>{a+?eL)Ouw0);SN5_rNP)1sDT_N*R*E)?c7cWj|s`AkKM(K?EEa zGe`%km*3o2x^?#Cqm}7V1yV%22?4}|ATx6@D1+BTQR*ITPF{NEMJwja0}r;g+sBU| z`Opviz};W}&#!&&yO&>k*}1&Z7`0$$rjFG{YGGVDbl@QB`cz}QQJd8i<#u;-Yrsu9 zID7ESg$rR2grRY%MUj5e8zd<>hoD8kk&kO-h0yM__gdYwZ+ipY?zNPVHl;u*&fB}Y z+r8W_ug>4SGne!Ro!#v$$)Y$2qacdn%;x=ei;>(Q0kDfpb6J+1I(k%?e)lWC_pg8T zUn97El6kXy(ZB z#_G=Y%H(K`OvQT4^1Rp1bznz=T63a4J2G9X)OUM3F{|EYcW!;H-RkVM*Ed(Uwp;5- zzdcCPPIoKIt+zIJ*14?k5Dg>gfAUiZU{Jdpt#wLQfAn*oootkEKKoLCeXY{o9%C2o zv^raD%93#k0;j}DOl*7IE%XU{0g6QLLe8MZt78}5`@n&N<>gD)FJHYoKS&xn3I*y0 z+2tbGm>8>1js&&ws8I!%XG!MzX_h2DAJ{B$HucW4XUobvuXPE~mn#ipG!)Gv-e;N3 zt<7yB?E2mA?q-r_nV^c|SZkFf$;il%fFb z&m(D{=cot(!&1cFDzA-Fc`i1$+L(frVKWjx+z=Eff6opCMLZIECs1%p5QT{p5fPxu z(u6btTM&)N-g}+)2PUp5G(ix6)=^XngLrIea(sO9#n)aF4$j{H*v97S;_B-B;_B&( zXP4JEZrxoNsa796e@2(9Z(VwQasKXmKKS8lZ(QmP`sBTUD@v)IV48b0TA4tk@4crX zb93le0z!hKnGAuYFry(605W@FCO{BDfz;)|5=Nmxvce`xP97dVd3a)PxA)51S8uH> zwtYeXB^`kGDB=JVfGwmTKs6LLP=E{iO;KXg2Y+{aXKj0Lx=}qmHQQ{AL{UjVGMj4F z1>Yjjn!N*5NPvu5DS-k6$5^042$eF{I-?XSR3Lfg(F95%kg_%p!?4UoHzxq42 z+hVZ2d=Qn&^H*L9-60_7{dQ~JvCPgKYBtBCh(;%>Q&TgIk%mjNGzWH;G_zo}S*wLuP>-58q~-<}UWdmP%MJm-WVm`3OlpS$e%?94Zw`NsEOeEM%KUL&pYRCK6% zt$v#K(j+6HP-zzJT}}Zxw!l6rmwcWkY3h7la94_`1QOWX70rW%Ly>?Oyi&${k03;- zbRYr@a_Zy*AA0{sc9zx#?T*^p92cG`>CGGM+~y^qPzNEPcOV)z2Zq$atWv2Wite`Cm9Yt9G-=X6_4=sR>Y>Np z{n~49y!OhQpZwGhA3lC~b9eXE7hfcUHXD5JThE&c{Ni8x%l&Tu`>$QN6&os;tDiKj+ia?YiRpj0ZC;|W|LM}MY z3WQOk%AG~dopa9mMy2%0C!g5r-hXp`@!H(2jm|a>T`|SaVgp!2@f3gs5g-eo_i1Pb z2tZQc#a4Tx)gG&s4$n-DO-@(JbyVWL_ue8#3bj$>ya0+;%Cn$Sj=d^a6RO}%C=nq+ zGFqva(j2uCg=wCIaTJ-j-RmbdDyy9)d#xQ=#zw6GcemkewX{0&v)33GaA|WGEjNf4`qdgLi(E?rp9Mwg?aDa>b?Lfe{q|<{}6# z1s;e?z{3!&LXr34onBv~^$q{}mH2Btkv)Dw6e{vWWB-0dTV7lU0ZMM?14=+payS%G#W3CKq^Qc z(1}=U3)M>zS!cIz-fFL{&mKHduZ&fyB?sAXE$)#S6e(f}73eITMY$N)pZ`z)^|Alu zXAVxC^m+E9pZVd%#pUZaUPaOMM!nzZyF5j7k)~#SYX0{1o@W*}boDU<(8}`S8<$?l zoi$-ts+1?DM%%5O&6Vw_6qTd0fbVxZndMr&7DpAWaC~&~BcJ$e;8g z&dq0@{#HI{&E4rZwi*bC#Df(Vnm|OX!XS*gY2SF0*{pcDB}w101wf;<^9+C*HG>rM zI3fb5{m*|<@dO}5K;DbxfN1oGe(1+arnG+d%3yg#ZEQ4wchaGA)@CQ#1ghpyc`=MEJYH{3ojgHPM|2Puv>HGX zFM?ixNGYX3nP_Od5oYIrnZ0+Jb1wIJ?rg^1@`SxQ8ti3rKKFp6XAfE`XM zW)A?K#XIYTozIj0U@++CxkW`g+uKnTX=9Rp5|v`DlnFyAyKcKjBt{2_sI?)by!Xzz zg2thPG|&MvC<10v=+cPfaz|QAu`MP-L7Rh88jF5JhBWOHfwwuL*ZqE1kqW{2uxo_MqPe;ew&oP1C#*N2O4N6_5aWX6x;+M!7gMNGUZe zX+u_o3cwEmH6lX@J|cj4cA*ZkUIv05iG7x&ZC`d{jj;B>@%zsly|cXf_LVEEtsM(q z3k5{TXu&H01l*@<1BxJs0%BkQ)_??*fM4nkHg4Ph@0J1p$btXlpBkB!d0Y3BZx& z#3K(r6qiabz5MFl=C)R3%8^E7t+sbssdGNhlgw(RG#XV{NJ*@7?1fp{+q*8c%()D=zQfYId#XC^%R~goA}C1v zKbM9)M1djY#2X+1A&?xz0c6NUP&5ZGA^{X2YSXrU*Ta0T_1c}i$}4vxgjzXl#Fb{P zIo3EhGc`4NaI85yZp!h1?o6LQe?*bwy?kS9>-z2Wn|B6V zn=Z{c&yB~jR?7JR(Ss|7KL*HIJYYZLUbcO!yEoFDJap_-sa7H+uwDTIX%InX%bpCz zxjP-zTjlCgzxm%j@e6-;cItSoY<})%e&Rpj5v-R=O(`QaC z&CR9OY5-vyN6laQ|Ne{fXAb+k+v)CJf92_q{NShG{Wt!`CqMb|KYr>vuf6d6&c-HZ znPci~1H-Xq9`b3E(Zhz zGJ!7Il(f=<$kq?TvIGJ^A_b)$l^(WnkpUA&Vf}$~4?q6MgPim_bMwiao6SMDX0z_r zrY1pUOb{lTyjH#xfKz*rmVr*q%#I$IZJeIm+g-l=^ySsfU7H6;20;SPwoXfm-BFxO5 zy|&&vYiv@EFngWosDFAo@XDtATcX6rWU7n@6^MfQEq=VE1rq6ws zTSO#Mg3Jt?o9ng4NEnr(I1Yk{5S$wd(@JXqkxYa^2O1E>lOpB3Go+ExS>D}T4@zOJ z)(B0cliY=2XaeKCjdfJ5SCHWPwYeYo&<7uX?|Z)X)h~SeTi=W-)v1ZvwcX8f7@s{%h_P6;Wc z10r8=?hsg5p<(Zlftis#Y1&6_N?}JV)N${*6qww5tyPv=jh+xIwK&bvK{rip7M4oq zj!hq&99>=Cyty#HvbL76BLISeCe8FG+IorrtpMmDBY{IuIe-AJ4f5^lx2ks*P92;* zaPVlg-ZY`|>{u9C7)5I(>`{n-L1{#^fLbX4Vc{SQa_52|U_wSgFXd{Lt!(bLhj0h) z5edP=J}?kOgv@wj{NnkGVJW_G^GecBgHj~yeD1e;Ef13BnL_2+W1vYX6NZsC8Wk!) zpaK%g9#Wh1x-CEqOC`zj+;K0>ve-^mtLz*qbcGcdq}Up&;sz=Nx&JACEC4W+#{mg| z3ey%qFL^Om8p~3?F%lpPA?|}p{MFEfu01;CVjjVuK0W%{NoO9|hw7Yh# zyQeoh*4++3)kv$7IW$_I9IwrcP1VQ8n)TZF?DWXgp^@py&83aLbg|$w34>AxjzWOMycGtCKs;awssz-@+{XH9((g~t9&U_{k>${64~_sJ z(8BD|`7Aek*(1%-B**7|?RVb)b3ZqG_}I+k_-8)*xnJMfTw7a0fZXK>$j;|>fL?%< zDPlJJ(BOW*2c8MASdtZxnF1#u0s@A`?}tfIDP>f+w7%SHZvzyG1;{d2WP18ZE-W6h zjg{p>Gto+kh{59eW)bl0ZNGQEZ^M?Uw-PkeNBW9!znn?ayT)#m^HxBuSx zz54v?-+wODy7-PoP|O7o4Z|Qz`YACna(}OnT8VQY!rnV)slcJ2vCbhXF2;PO zV(

vIMn77aQiOqnb=aAs<1`oyX8KmGa7pFJ|YcI|R+Zm!bWog}aun}M|^4xKiZ zbPl9TWpuc-xvoKvR-5M^IG-xM^U9kmYs-oC&Ick85R(-kjQ|>=+3^F%jvnE1Se_gY zPL3%}&M`9}Gkag;e+nx`f&2szLL^m)wK$~kBM=dwkWvU7KvXz6y!S*X%r18zEG#z9 z*t4*B?}`F5Qe2%t&WIJys%eVX`_q^gHp*^+qY>LXhF`>exCQ!<|sr?Yf~52A}B=K zm`1&!qtcmE%@2I|1KLy*Is#~v0vH|^-dYnyHur-pW0t_^NCo7@rM+qto<1^he0qFiyLEeE;m+!6 zCr>iqkW>IVXEp!~n1k0)Fo1@ssC)j%2to_IW8Euv=dLd-9h#j!e)QPb=y(`JUK|KJ zZxuVO0{{_4%w7RHW>RE~@zzJ7Vb2Ifa{yMV^?o|o?RSf;w>COS2L(Tb01y=mPfbo8 zo;ebRdU0{F*Xf`kpWA+?*Gm&;^FfkmO`e5Fv1bKZp+*8@v<^sVGg7NL5G9o5HVmU` zwUVbEub-*EasJFLI}D(Vvpg~Ed(WTl+Kge6fv?WL_G ztE_I8VQw_klTm$Ubh6f*nmjn$oSGdSt?3G|aA#vTXC9lKoSdCbc6PUx7q{Ph+iz~? zgFb>&AQ~VjG8d?{K&*8{V zAdVw&ocsKDfBUy1ihuBvANwnR_Rl}_%1g-dp$~qnS#RFCb@w-Z<2PP-`S}P0fp&LW z|L#}+)l<(t`)He-TkpG10Jy7^Ac)wvELR!=$tn9gpJKIhW_&IU?}P z%xrTD0Bl`u9Vud9?`(06f&g1*^E}V8G|OzBDP^3u%IHd?UMa^(lGkfB5UUg^RQ+zd z-|J@1wp!a^5SGi;IIc9uMx%0>SwtX72K`Rkvu)Mtjm8*$?#F-r)YkA9FU6$7BRmt*$(~~px(sW6e$uy#h&zv|5EZ(zGqzFSr2;g(8 zfS3T4QAXkLT`2{LTY(l9_8f-FrJe%FeKs;Ok_-}4+=z%us~|MqTY%z;tq7H4YIl>R zjg33=3!B|N3qn8!h(Q=+KOz8bpO8$5ETRAmA_!^#3!Z=jfSG3f^qDhb7o0BJ&I z2@s6{0mpy{QUzTGRaMbsxqfVF=HSe1V`QXRZ&c!v2X6x@jWx)i+f6$QOS{)^wdU?p zyUoxK00@v!3y~8wK!92;C{=6YBjrkkIVXc4EFM`rA`_w#@M$iNY#8(#CqMO>AN$--5M`MPCaR;$%bWktzhxpg%~R6CG2fI&*^H` z;wi8iGv`8sMxZQFDAb52W{;e&)*2JDGeOz*u+^sY%I@~*&;0DepZQd7J*Z+FFVJ7C zPy~dA!>XWyj81n>hyY&N^?cw|SP@JnQ%SgF#y%=)f2qgkc@}^2m{m_3bzB+}hsV9)QyTfil1V z!mj8GgCTlafHe?^Ab1TJAUUBNcz&a< zGa}N&%z^RAQ5~49pX}{zxA!_(o-wm{Pm02j(j?tXvvRo-MdUK8APyp9{UB%RCW))m zy-2wf_d8v75&>Cow13d^@UdU0|9H=rx3A$0{X&Ig#QUoP7`jM>6c>ncB+#QK zbm)z_(^cN@uBgGZ-=P$&u;f(?=Rphr&3}JA=4d3ah2^=~I=Xhey`7_O4$`*4A8WH}Ch9 z)<}vKHY)XzW2aOcAR&NHJ60Sck|x|Q>hPd|v;su{((I@2pE>dD#?nh)`rUrgf8??E zJ#g{9|Mjo^wJ(10e_XrvHghIgYk{yFmZFFeNogGh41&xAaVaq6a(Vpd0T?9nS8ioV zszt|@>fy6zthGzG=Gwbk#)Ojxj%_U8vDOwAas(i40uz81k3u4V+5{%!T?Q(=yetJq z#uv!ZG)Z%p8Vr>&?A$lL{q@~e`%nGs&kU08wHsT%{a^mg*5YDhVgcuJpJxLdMPMNa z1MA$^|KNYD-(5I;=7A?a@Z@j(-@khM=9Q2FE8crgI&d!66cS>dTLurgFWQilB9Q{J zSJX*Z0bnZ#T9Hz5tvWGY85t)oXGtyXBpbaJIv>QN&GE+A^yuV)=>rF*jvp95GCMjs z)<{~t@BhK?cQ0RUwv%cYg3U~&oTbgI)AKGhfzhQp=s4~5g0M6@HBoPr7G8b%>hc{8 zCIQea#)&4<4jM*}SI0)1Gc!j{nRv1hJD)GzxVG^9ggAOK!k9Z zh8dE#c-XO3e9mQ`utWQSuVH%>GXNsN5X?+!IQ|bwZ9+)JH;x#M`@K#`Fhuby18}cq zaCpbC_#wYpM0{bM#Ny(y-g|a=YIB<2cP`oKmJ22>fuKo z4KQkVH)oH|%&F$TV#g)4&OC37^fC$I{DS#YU0cZdQPyi0t z0OlY{SRn+02Ez>O-0J$u#`?jTnG5GH)T>Q2fS$!+pp*yl-lGCdij9HHp#TYahH_lK zxwPzn6&fG}J6yaXsj<fOc)2HJkN8R_46!qd8k#T9Q)il zDN4GBdQ7n(9Y(6}NkPL9KylCuzVj~^9BCLrXou_KV%{YNQK%G%0vvc-(wda=0;z}- ziGi3A1#AH`L{xxOG?@aIK-IxGa2@~wJt2cQWGx5?ffSG}bPSdx2M|Ofm;&@TTej)- zjlJr|wHn07#%E7WpPZeXYEB*SBLWqQP92;%GTmL6vW9y-J4oV6Y4pG>2FBYQ zz)6;wC}f`&YrlP9m+~wWDsKTmY?9Zc_0gG`*L$5C-}qK%ZSS%7fBeX?6QBKwpML7u z?_PTI<=$?G*=DU?2fF+3LG=#d7fGpT>GEBLf(lWis+d%=Yp=h`%w&SfXp=;Cw--%7 zwPpoedY{>&mFpwgi+aW&9kZDh1mYeXg#?FV_*8Mv*zT=JI2 zG%kgQW@a0cwV9cb6US%oyYJAU>6yu9O{cK6x_0 zQp8>miho=@h=kA&dmyN|su?^Nnqio9(MUS#XN+d;! znP5N$(Le*8yYKDWH^<5+?Y-A}f1Iid=mP=vce|=ieRb>XwbpO_f)oM8s(4$dwE+o8 zDm7&9-NZO4^45`3&RUyW=kq*HMZ~#0i!=6&D1eM8&RS+UaOh~354brc!n#?j>A-kU z1SF2D)ke9r<3R&(nkTPcfBn|2rTgCY(1Z6s_?>6IJ-dJUAN<{a_|j{yedY6?pV`wK z#NDlpH4|W{$%_xe60>Ve&aADi7E(;dUJ2Ffv(}6EuGj5{lq_$zD?!+7)FvCX$#N~X zcDvu#q(Y-2t&LJ5?3_aZQc8$4L9rhb0d|50SU`ZZR^D460A?@1?6pyX%&1U-wXR^d ztH4-ii9~5Rw148zzNvHF6LU)|a|;U_?N$a}11Y4C2w9v60w@^$VgRfZycZv$@@O)j z-B?`N*xEdI_ReF6k5{4+h3GkFCJK~*cOopJ1A~E)I7U)I5arf;07SIbib7xz@%36` z&&;fj)cR&?dt=S{+&YVh){B6ioSNOg|46M;?e>$o%X8h{R=Hd{as1T&{fBF&f-2NT z5M7oIvMjlI3ja>!j1Qxzwr9{%F5>Q ze4fUgc59H@%zDMl?23&Zx?Dg4(pd&Muj;cB83DL^4*yk{f|LQp|yZ5o2o z8lpfWDVy0ePJ6w+(II+fC!|%NR1g}ijdht5R$3XQZJudutOq2xy>RP4{Q5sXeB`J$ z@#`zu)uV|KZ>Nhd0;fe((4HJEBrX8%+!tlqL_JelY89ZEWtO zvG*R03QeSS7zJUaQk|;SkCq!_wNhE2;j%RG+r6ZdXQ@jPpJz1<^}RSfULGHB?BCni zcW`?D{>jEzy%dxI#CN-|U%fj2y%(1+TrgYPXQ!ve%{XSPwPwdaiV4I>FgZEVXe6zU z_d<+OrIro)t(7fKdkCrmIsqF2c`ypi{sa4~6H`HBR>GM;)s|M~U%mCp)3>i(8)Q6o9EOX8}n^|k~Jolc1z=-l5oDRZ>fKVw#+6$8kl!-!J0)Uy>iC15H zJ&3A5^syh?++2R{XZAXCtowc!u)NUCdr^vsYFC5jw|)D#()VdTUCVSFfxNU zsYf;ROb9Fvoij=c`^`?P)$43j%MDW@0HCy{N~z-#2#FA27%7)|C&ECe#9ODa$YmhQ z;=OmmDB?ZqK!JCF$X);>XA3L{Q25&rpjJvLvesGag|OBv-#d2d&eNwi*0-;l?|S6n2aoPM4nbZEL+4zYr3@L73NX-$R+n1edH&ny&p+4N*#>lh zHvOcd$dtm6Z5F58%kpZiUM|<%Aal;uqG*sgDn#NS83=$^fC4P!Fk_tJJ*+YD$D2hy*_r|iBrdppPkva z7m;?_ozVL7)PxF5f4c=p;&VY`RAk5l0yyWHL6HC>F(7$w*`pURii91ftOovYIjlGL zpC9ae@4x-G=HWxe|A8`_KO!hOiuDmP7@m+Vt|`8e)I;(BZv@Pft!x)oT@FGzdHE z(++I!bo<@b*5;Dy69}1rrW=*!?Bwi3ZF>LYp1o6()1_*Of&ex6EQ>okTXV0zws>Q1 z?bdv}w58hlK7_lcX6iEs8bLLWQ$b=tM#VtlMeC5+nINi!k?UuiIip~EWjRS&HBcd- z2CPC^AP8#{6SI5A_Ri>%@u2$2^7Y%x-@AF`T5qeb)y(YVnc3PF`b#@`E}Uk}R#=$5 z3=`2um?sEGv8x|p`7mtJhx*Nebu}E23q8Xytc!}j9QtCqLia>P2p1s&up%WQ&iUdZ zL`Z@T3-SoLg8GhtqpOo5E^>nb6`CQPg2g#&;|xHElnR0(ppb^)G@zPcQG~qr?7g?n zTUV?P(=_wmYCsS~LkbXuC@?0}Au|GiR%Uf|<^3OeGD|w2{p{z)tK*Zky0yJ0A3S&Y zsrUTpAN&ttY1v+ARJmGCods}VRPoLVfFk3Ct#z6qwcaHuBA8Ht=h!9HAS{*2sm)e8 z?X2dTrLuTxt!*JdtsITjtM#ztEr%vRfYh@mCWRspX>v$fE5{P*&@$}Ct%COefS>?H z5P^Y(5w*9Tk)8KS8}S}NY&>8>QfANovDumXcFvqwT3WfewAAWs_q`(kgNlI&5g9dj zUD&`CKnMF!ktJi<-d^79b$cgIoH}^?jwpx-0jwaR(u#)`mWn`!r3#8dK}1xBFP(p- z9B{iCcKZ4HMsK^nk+?KKGtsQyci$s_6>!Tn1z5ns)S6{p6#rFF>VGM!5MBe4S zES;`5mDVN*4N@4E>Y?tWagtgGR>0>l%EShiA#5?s94-D}00o$adBOky0iu_JBnm^u z9Iz4uPXx-c17JkW0$S^cF!f#wAS$CxS#ZEEWp6<|LT>X8N>*4PB2@?zW(PnX8O0C@ zvqsQ};Khv6vq1z!LM*K85CFV@FK{X{PNa{1tGhY>^5(%SmmfTF_nEVIj!#ZG?6*2Q zB{7x8n2j?5#nuUf3d(}$FhvFiUvQA600luIp#|@qR|Zjmq_*n2Z>u(D#?M_|UHIN- z{@|kys6*_OV)})9%-g)mMhYlTIy!qnd`~nupRJBs7HzQs85C87p z?`$jqXvDDDs7=o7ojq`7&&-h^n#huVY`g8;FI-)pzcA<0M4SgwL0GDWQB*SrCM);d zvG2(K0}~Td)kYKs8VwkM1UMfg@#bc)yR+Txv^JMF*EZVg8}U{rjFV{$PL56OJ=9RC zk2IyROXWJs|_QhInN6>*G(wJ?AP0+9k$JcEE`l=wJg!2^PTFCHsH z^~9T*;YF^P*2b6shJrN_7M7vO3nPgLk|IK*6rvIdVd%twfFdj!1eI1S0_-#I0RdH^ zkitQp*DGVEPTujguYF}{b@RjTeb_4X-S0m4z?dd6(9z zV;+3&Q)jIhtrg|Y8d5?WgyBxNZTp#LVdsrfwQ5ue14Y67(pqHn?D*t#bIe-Tjgus6 zcdL256b8x#ng~gNloRW6L_?sg2M|h<4A|%5K^amAfZhv|AcF{cFN%nnh1faKhO*QW ziZ*CWkfk{@$9c?5QQ4e0dGzSv!|Pi+x0ja})>isiUjRZ+8k8Ct<0z1VLdu>Esz3$k zUEE7AzV>?D@1HvNkcmoU$O zMr$F-+9hqq#vAjC*T?Ep=k9yxpZtIR#UK9OXTS9IzleJ~0hw~8+Ud5kEX%U2pA7bm zjiv2goTuf`Xs;%Msw-n=^MOs1Jj*!u;Qd=X^>`R0Sx|#e4dq?MSud`zjRO#%04Soc zqyrKm1m_q~ymw9{2bDXtz$5Y~WT~|%LZDHK1PmQeief=5Qo(c4KnM~5p#U*DKn+AJ z4h49a&RwL=DnKP*g*AXhF+f5EUt37&KvV4Pe1B{0m2bU#>BWcdyYJYEyBpPse9)D6 z0J#fHNC?(tK2O-?3V}!ikarmXi7zm4@P$iI7b&K>(#7(yFXXSZ~%CI88F)pj;i-L6{|}^FAzx2%$r^ zY$47qNr*ZMYU>+YI*b~V`^P5tD~h7pL~fIICvB}SA3kvM?7fdpPtBY-a^%3FgXKzH zSiHABCsdw{a>xM%CFKJR+9+*E>rkTv0!#}@CKPrX*+>Lu2w!ehC0|$X78q&9A^bj5<6hn~@K)UFk zsK_-H0`a00DMn2y49W;-C~UU}Pd@y(bJ^FQ{?>!{+&A;{CrgrNw{O#X+1}Y%UfXJwf@&000u!hpDn}6+qgBdY5#%=S#yw((%wAlU zXMr(NP;p0*Sx5|kp4odLBJV{|R3W(FoJ9zbii|XC9XlPE!w04h?3q5>K6!g-WqxIG ztG8{z8o&}MjaUeIir+Abgn(tG6Ys6#`J1;Z^@$^A?gmYqyF$)GS`m>p%7M$`TmY2P zBF-RAPfx{ZzPh%SWf?GfaMgNqcF+FaMvH{vUV6{n=RW@9A1jfo)oV+)E^V!Dk;bk5 zc6qvK8sp2G_M6{5f8+W^fD{AhyI4hG_vUi*zM0Sd$)8@kHvjWK`_qky@jv~OKT7&- zU&sZhGDzZnl0b7R45K9LYv8eZl|)$<>wLzQelm#Dw3nnQXN56>8nrvca62TNyy+Rr zB~Xo^QY%-R%@8QT$W~;cPVq`(kx3I`&2jwP9^uA8CZjO?=5&D z2uN4MfLKt#dp7`y06i)@Y_^aESc>V4hP;5{%g(F>0m+~!Bw!%|Lh=kb_!1h>CP7}z z)6VnH9+_LX_vE=#M^8-b+Z%6hHr`NADekca7xzyOR4C_oAT z6pM>f>Bk|`kuW^ksQ1-G5;W(fA5NWdwR;vfUff)G-DO#p=dIPgbumK+e5%Q4Aq(1O zmYpC{san<)AyBrHm8!M6CjV97zP(mQDf-G~) zf?5TUN&4yJ^il{`+C-+Lj1(g?1F-cHnUXG*qDtKh#WqIJMp3DxNohmM zIhQ6$9;azO@L47~I?pb5NpB}ha)zAQ`JA0t$OQA;+eET-FzEZ#0yEhRodv=G=tKzB zs)A;v98Lr*#^sVbWzprrX;y599C$Cl1+Tz+&@eiG6%YsrJK>Q^U7-R1aaQTTL|V#w zfB`+YAShKUDpW~#^}>ahZgtiN7#ul#=+LnvTb=E1+*){kePf}WwVbm|K}l-$nuxl1A6m8 z0&0XiK_R;=><~t_2{NiIz#!m8QvwU>(S8~{5Go)R!J#>e3}eJ_*Ioy_cOr;LN|P>7 zr6Pp|i5Vm)1x71D;>4dib?VmAg6|LRyZ@o_M)jp+gH#gvYY z_V#vf5aiQi&8S{#wFf&J*Cxhldzxe3R6E^vn&ojhDw#a8X(fsRr4b+qbm}?JZJ zdi|dD1#-d)=;-m|3Tb0~HR;DnD*>vO>a$ZbJ6l`zN;S>=J@?)HbASEkuU)$2t9pEL z!e*JNm&;)oZg*<6@s+KO?|k=#Ygf-J#Vi&CGi%#B+cryX+^RqE_~Xxf_qlJ(jz9L~ zQ@w8g8{hoepx;gsTQ2E1NqcGRfddm%OrQi6VQ6%uj4+CDvsO;itUpM)Njym7*k&HQ z9VP)3qd;LC#0c0h)tV0XjZaQBCaYmss+P*7a-|WKgHT~vdB>K>fw$gg-ZFb@nI+E$ zHp^1$GMo4E!61&4Jc+X;b7|`HF6S9r>joAOhV=&1LXATbkY*GDL_#H0xDTTALEvKmRz_vQH`bQ6w>DQUUpRN?xjp+1llE>fP@!Q$RH`rqg(1Ct z=$TsNISCiCT=7(Bq?rPR>{*1J6X$Ty3#=f_AC2_J^0mx-b-X%OF}3}7J~(}RKUT(7 z*+wNhHq+2SJw@H>#B7~o0Gn{`^V||RP9e+CD?$$$OYYKsnh~ab%*6Zco>#~~PEm}| zM@J3_ggpZTMnT}bEN|M(#u_AzQ}mMgjB=$z)XIYOv1H&~4y?TKsGK$3xT`d~oKb;K z6NiXd&YD3)oVCJ|dG;1krw!SD$7e~-o}G2#L)2Pf#aTpP@^zvpQjuWny~}K-%E|+` z^A3vEL0zhiG%uBq!ZMHn0Z>I`E;>&_#nA-}Ob(ff8USj@kYr#H6cC>aqZOCsjGz^w zCg)rbRqA0S2x)!!;??UHJH4H--gx-^@2CqrH+SpRmD|s3cXo1b6%nBVKQTRCuaq=K zM7oHI9$BFjV2>0TAV4T12HL1|ANm#zlYWb%ABSj=0PHRa1V@bjVa*txFnb2(ssM6$=6ENB+Y%6)=K4awd|QYaj#Y` zWzH(WL6)(1#hRHd5>gSKLr5$lRLEWhm_36=#Vn#g871tQMVO6=0K8}KE$7w~Q7Me} zADY;|cjB(ocP`A`ytT5}i`$mj07MG45Q?|#HGomZ0dza9*7|B=W|Bq1a!D)`pwT)A z^v>3HFHadn0hOwyD2i6r);is;0`gX(lG(riNV~Na2Bkoo2Oqrq$3OKWU;e_EZ7v`A z=)3d8sW6mmkR)l6XXVUZo4bAe@jO_Eas2IPzBw3lQR}c$?%8}N?o>lm+6a46XpANzh2$ItI;~-I1pvUR+*CbOaEsM?{4h#ghRM6%cI|3MwL>I{^-&C<9iCnQ|YyvUq1y zqI%ORKU)d!(KRC)tk)_i6qv;a4ul|t;tQe~ij1s*Js}~8^VS1{I3^HrIXYkjD^?2- zmBOeLgv;}n=I1YUljP{V_w1cHu(Z>8X=&-p3yZhYw1>*$kn$p^%GEkUSgtn2OL1)h z5g-u8qTw#ieJ)H*M%t}HKR@iMhpLy6K5SPHhQrf$v`!uhY-CqcBg2USv0%bc(P$x# z%(q4t6F?XyU%@ciS9X;OfWV#+-xM5(XW7;67fePhYzJ_-xhVYli%3WjtrZS)L59_X z2XT_R48Y%g?w;pfeyP0 z`Ac(GwpUlTx3+iMJ)?tCZER(=yRdZO!0f~wM-L0)`s#YC+p&W|o~5FdPtiI*G11JO zV5s75R*F0VibG)!T03?D1>Oq_qf*QsX}H-ZQr5bnQ0M5ybJ1etE@fr{6h=g0cByyH zd!tOXQM&WK({6k6{B@paNM+ittfD4>zqB9R9ch1vvz zG)Xyi;1%e}$?5(e-q_v*X3$z|JvlM8z1i{(Cnm~|KKA(A9=i9_|LJ$P+wC9yvT!B@DtwB^z{?Zr$EqT1yA1?ZqIZH6dt1AdQ4dYelHF z3Y7}90;5S$5E5aiRj623NCPu~2kY5!&erymBu~@Upf^b3R-SgVGzQxR+Xt6hpHB85=WI<9Tj@Se!myn%rQ(%jFp=;ih|InTCKTnW}*}Y ztxmSp>n>ipbp7R5cN%)4m6f#yW)FxAph%Fw2@qhhKm$e~v6ej|g2&-+0NLhw5)XRT zJ1{{RgBZN`;q2t}zS+Ht=U=&Y^JW&nxpzDghULpEi}T6grNzZdN$LQQ1W+^p5*n=q z6zEb=DRV*Nl%YTu3l(o6s|Eo9A{cS~i-Wn)b`<_tZ|;L})b9`R7#U`|j?VjG36R|d z?lAfr2;L$9lU=>dXr+L|Brh0>bWld9k^qpxUA##OX$=UBvYv`-%Fyto*e~O*re6Sb zaanCh*b^cFc#&GO2>=%Pjf+9|9%~pSq zB$-Dl@9dOF+cX)kHB;+}9Rh)K-g;$}&oWYqS%^q$dg;>2>S{L|eC_K` zfB1dxp4m6Mw76lS3X>9so{hI!I}C~yFnbneVL}37t&KN)<@(jPzvnSReD;OsDGdMi zFa4b#`H4@gt*tIC+!A2csvoC=Fe^u4=AGD_%4MaD0^vgLKqPtLE=z!7jER&20nc3Q zqY#lmg@lx6^-}r3{{7`z$)<^Ib>^>KS)IR?^}8Tc4Je92>)3j>9&G9v5EVH9LmH6` z5h@C_DXTCtQOT6FgbEap4zxl;WefmIQ{KXNh%be=z7LNt*QYxRWI!ugLgh|!ip#x@AOC$ zdk^461el9PuwcgGkn~r~>gbC#9fJ@K{Xh@}(GBNy4}!!J2mm5!Qw~EWZX3;cF6E}E zX*hfzA3T|lO&L=jt7gZ-U_WJb?ZPa}Al_cMQ8l6Rh_32Hh1IgISpoJeptn$Y0-t5M zP2wmj8^C(K(x^tHh!NS_Bu<^p$YFDAj1e{Rpgm}{l1i0ADutvYWlX6Agt1SAB~9~k zqf9`#a~iN#kH#BK6=83Cdue{o=Kko(lY95hR;r~a)G7?w3Mo}Bm4k?K=dw6w%uygv z3Qd^#RB07xT?Cf^hR!j3^xc2-nJ>Nmh0ivyERK~boYMpVbwQ0`DR!qQAjnvpZtRgj ziu7-`-sh;HObP^$ofT|OHufDjJonNM=C90A**x_2`<+Q&n}7YKwf3cUZ!7n{;0a2x zG9s-Np+ta@QDIam!n0%u^A}iRv1=6(K^mUh0tB#|MpxuMlE~27cu14jT814vmXN+B>2?2qiSXmTT$B}HO;F1=T zgw|Sti1%C&Q8iLz!YHbS(fG5^JooS;53ejQk2kAE@#p^d3uBdVY5sP(GUjaloB#E< z|Mh?TwY}5hw-#c?oV|PFC!G{N}!Q~5=Z@hlx{A+LIY0?D{Z&z1$78X`3QEC6-y(i9` zVk_&bE8Ri=#>(1csZuXRIWh_-{bYQs!6Jh+4bd==A|V2WXsuQ34F#_7$42l1kui=f z33=pV4dc8AE*Np7G)g9niU74XCeY4i`5;qKSe~lAZRU}?diUI(zkT!S#dg{OKv58j z3ppo3VW&}{R?a&Uny6gb>9lj}H0VaN+3)uIS&V=VI3km!);np8Pk!)+KYaJe!=3i} zrOVfoEFqBRp8M9Z1N+CvCmNHJo15Fs`gj!fyZuB55h`>b#F=Nu-lHKT41%be^)oAj zm`q^K-gR;ixBsty{jWdu^MCzaPd)V~fBf0F*Y@mtSt2|hMiHkOATonu0abJuC;-X? zh2}(n!HE_SKOAgXL=}7X+=pe_e_&6&R!-tx-0Lr0pS!&@$9YmM8-qH{viXgzLFRG+ zCuj>NxZxML$m$dUPy`A=69uFKQY90HT31X|2}_kQESJh*5Cvgmlqxq$6|FSR0%Ml5 z&5|_D^VGUQGKl+07PrztH}CI2(t|V+?|jYx4#)x`a7a|xh!|2VEFxiG+SVCi0N>i$ zYHux8qIysboO7C$_g)A@SWs9z0I+}v4nG$G5nvK2)&#o=3A+C*(ai=d&zT218Jx)})YB;1!p`07W^Uy2PbPMnu-;3eZ|NH8t)T(E>$*%ag$% z6`!{|+od2V*D8hYh1OK7R(zJKkUY>|{+T^+=+L1%jwUX~Q?S~$H z@aV|{1gbePChQ6?gpzJ7EzW2CY;0_zTCHS0_JZZG^q!A@@Rjd>cV%^ZzlM+nNT~2h zKqf||5P%U!t585@L=+MpdJl?_Rv|@@Wg4iTOQq2~efso`mtLN~bgMK~zxT0ITfTSc z_R4pcH!p1Wx==LYsEyKocgLy&kqV)*KF|U0a(_mwnGwK_R4i{9pg2Eq_}fL{AqXSG zK7Vw8kDBvg$N!eDU*6=9!l>PcU0zWE+sp3Q@Ro(caMv(=k66G&yB8eRj3HNh#6|+# zEk3(2l=$HuTL2J95ejOpw2U?$-Z|%NWb}Bw4kT0KV}}kN{Nm^T_;3G>zw`VLUNRy( z-S*eN_Vtf^;wN-ix_0G_7oU0I;fEi&adUBPb)!+O?VmpA*&408{g@z+N+n^B3Xu>A z2Z==x$4Cw~J8^HYy1czLckA}$t5?4N>~kkh9J~MFdmsPEBk%p>J2tK@fBVbdxq1Ei z;`&+=$If}>ZM)On+S+MMjGs7u1VnAGuJ3HDZ??J{Tiaue+OdOstaB)!Ok^1xTW7N{ zFyy=mbdqKfGawsrXeZ(kMKGpiw>ZZVjg#|J(d$D;?iyUyZRvWCXStr$U#RS3n_O{QwLKSFZw6f04 zOz;2LCqAk;-?_b_qM+OBqUTabBI=8O`NccVo*k=C?QC^6x7I7=T5F>lMgro&2?qcO zij+nqLgyTicaWw#sg3!<`8V!3e&p8T?ce?MXMXB${o>Nn?QebS+i8-eN;yRF-UK>F zABrPzn!;h*!J%d#}jdV_X9>2CH~J1$)tbaSu?aLTy> zB_P)OP$~9;?8*e=<&sKl$ry63;C4n)l-ryL7z7nqFNg&NNJg7I9v)3Y37tqW7ytsG zP;q9-5X44CM2}3mxGxoeXa=GbjE9r6u)hZAfE5sB?E5)K&9uLhy?Ei`{rB!UadkVNR;#bbUcJqxJ`}Wsv zE^WPbdH!wp-SfZ$4^2$ZIL~1~Q+J%)xc!|8&<34C03?-W<~1cGbnBiw%jQB7j3r-Q7t50Cp3{5fF8e zmQ^HY7u82&!KhXWK|&%x49jM+IX*kR_sqTbFWtU<^61%;(bunBO9%a+OwT;~^rP?k zz@Djzt))xf{`%J+d*aDccO1VtH#b(FxP0S=fVHB%aHCp61a@AK3lni|v^X|_a-O5m z4Du|twlhdK!(>NAUX(9*b_Ax6F>D6KXdKs`C7I9sbBfY z>lZJ-_e18wE3aR_c&*l$+}hsw>NmgoBOm_ZiCS|tP4dB@QY{-&L0}Ra6RQ#H42jIB zU62$lkniktUU}(d$Nc4m&+R#S=?Ci9+I@?>@F17V?RXc3;2Adgkv$kh;Iw$Qs z=S&DF77d7F2G7FYiWPB0$dU_-^6Z=y24_7x&e*4p(OG9R$r31p$H)eh(Nsbc8Pg0) z^{74(H5!$%MzsT^u6aSwmZG;cE7XM+8MMvy*y4J&wS1REEsx? zy>z-dcJjz+69j=${dPy85+_W+EJz?j(Qz>l6+I{kFd!6ZpfaLY6jY<4dxX)S0nh;p zh(iD(r6~2@iZe!)jvsY*owoHVI0Dpwn5T-6w>LVgN$-z8{k4~#+xNfy;~y8t)-f}I zS178VWPkW4f3~%`lIFa)q4iWA#RbKwHR(cw=e(-~9Uj*=@J2%ij6;$A0HC|B~5veg7NJ zKl>kl?b8`2zy9z3)xsNB{>8ujucr@9{`AlO)VtsF6fq(~?%aR-?LYYJAO0B`^H2Vl zU;e=R-u}^#zWZ~(|0lN>Zd|%?@!07jgq({1%>VNTPk-~J-}@iG_M?x#W9>ix{;&Si zfAhA-&i%{(<6lnIs$oeVIdte7Su9w^01E?fKu|;3H_H%rAwf_?A_5A90Te3IN*R}@ z-EKP@k3Y70z$x1ve~-t^|Gd6-Txlqd7B9PnGp35=A-1*2_u{V+tG1hrDc9)v|9I2P1m zw5pWLW6kCrM~{8}bANK~?lW=P-)?ON8f@xQm)yF3?aW=LUqAo7OK)7=TH8K$?CA49 zcxioOyEo_n0DD&{mD#h_0OCP}(E%zVz(9#W%ADkRpfw13;ck}4TidJsRo|dp`dDho5-#^%r0I&eP9azIvtCjVtAFdaO2GuMM(%ZgbTLA*$*9 zhx?s&ztvgX=m#p%h?QE&5C)0>gpkl^77_2sQ|?lYBm!D%GT_*wFexSC1q3w+f&xMn zQlQy;afNe-n9w-qGnAP9Ay$Mr@%-Ps|6-~G*B z|CPV@PtP1b+ge`K-Vc&kO;kDcS`pjKlOk0o0LPvb5z_EzV~<#5s4$XX;b`L75KYfa zH^v(XF70;ntsNKlO9AG2x{+iZ=hN}3Z$xiwthEc)EvO>ZM->1$jEt?Q;O0u8O`w%7 zhqb^|CD2UbJ?D<|)Th>4n_HXu+_TMrz4y*%60<>6YPBRfRJx%}L!0rSK3S?X8};e2 zMpP+PqHuC->{w|s&*Ll?+evz@es9p(+G!8cyyr5dq*f}=&KxY2YYHUEVl12l2?fD< zPfGFdjw{p;qd@>g0J+dzq7?ekp?Y)IILQMd3L^kAVR2k|?^$UUh{8JRdaYjD2`ZpQ zrw(TuYppMSZQ-li^PRatM(wzlRRin|vM4afT+*dDPQLNwS1!%Hq6IeQx60+l6OTSM zb7rbsYkch+-+blU-^;QTD$UcU?&`*a-}~ZMKlI@T_Z&R(8I6SIxAr46z{*(XhU;g^57dC!U zl+xNc7n)#YZTrh#`TEMn>R5H^)4%;E?|A&}wMzMpdrn+mzT_RTWm6NE*q5Gvt=Hci zYs^%u(IbyO^c(;F-=(qJ*xo`f2lvd3kJS^l2p(7iu`rWTL*XRK@H9h4(1pcJ)D}4XaVn_hr+yu*;fDjhDjL|thdAUleucsk6Poc0l9+wNaslm!Es~T_1RF5H))3%@?2j{tx}o zUqhw0b~Zwz092@gTDj8ic9c>M7y$v9J$vg4sM>hW^E?MGBCgm9lH-`WJN;6yK2ckn zUSFD9xc1U3&mTRo|A7adcOJ-NBaLRg-`ZYY zoO}7XSN7DWOVx6{Qfv1%r=}(}1>O$;lxPyp1Eq8CfKj7j0GGu=fgm;}$M#N*kMG^{ z)Z5?r`jt0~sm@I8X|3H(GtWl(K37cGO=5s7%?D*&LJ<)u%;ph@06iB3WAI+ICeq61 zwp^`G@0kfB(~rAJx0hwHpzLI6ra6hCHOx0{K6i7q$Kn7ygE&SM;$n?}H2hKzzqlfx zkc`npVl-(agIX&cB9(%mVZyp@XoGcjo)Q9d;=uWCl8A!D+ky8z$J;IryeRBMK&2%t z5srt!Y^AoR-WZ=48*5Z6l}deVtTr``09l&FI|C79bS#bm6mr}j5HNzKBEW@^3<(Pn zdM;KLEC>a{Go0yZm>VcOgoB5Jf?$!I#)8bkh+Y5?i^wS;HUN1qE@RlNHgc`v*a>35 zD7nLKva_;4ovrbpdgj=^ews!>V8t3C1fM$__u_uspWS!-;~)L#AN_~VB%I}$uT+~G z>&q{ne>uziqwjdvPyh5!o<4azblGqG=g<77|MXuz{po+(Uv1xBTG+eiUO}OTY5duikulerc)y{p+k zoS6pj>^y6^c5PvKX*pEx!|#3A`D(WxHztB32loZl`tg$|;v^fSajQ2-^E||W7%`Qx zN;wQ06Jz6<&u-4$*gG>-4l6>EIl%xCkO%~YfxX}mbVVW}Dvni!;sjBF7Yb<5>F03_ zD!A+32b5B;zVI@Ka`dhvb1S#sSXz62XM4`O6bO|70~X=vtQ4_$AOSS!y>l*OFKL$4 z$D1k$hG#w?zvWyWVmCN+mf2m$!U)zc{GI_AhrT;7EF#6f0`Qijc=td?0icnze$=`b z6=Fmwee(%JI0#22XLz`eN&^5Q6<%@z07Kk*^mN`ut_pbqj_@NI)-Vg5l8>_gMDvrD(GKKuReKJ@6@ z8XPcb+nvs!H_<4U!%|Mv ztT!^@z$gc#f(i&nt`t=2Svp8l$JPpXYjf~WDVH^nF@Y(SJb+m1toL36q0$OGduH-r zGo=8Tgs4aqX6GFN2??>*N~Jbeq%{~oLiTaDLyAZOtxT~#%(KK)8iOp`?spJ~-Eczl zxn+bShYv?(diuE+pM3nuebdvGsPWwS7yjbQfAPcYSNCDQ&be!p_>%IusyaB*hF&Y=ON(0R}=eMzgmFNQh%& zjoNqv5aLcZN%|tv8Vq_wzE*p6ZF4D3JDxKERyrz|D`BVzkw6nEQpm*2>>V>(w#+Qd zHuJf4-deEUqennRL?VbpAQ46(sHTdlkq(i{N=-%8YSgsiJbGkTID;p7&g|H8&*ce7 zCry^)f!SO&^EeR&hsP%lOzxYi?I}m0i2@?f-V18WHp?9=DO~pz7YZTB#TL6w>^CKFaOHV{m!R9ck9CZkN((S>)!4+s!@{mHHOaH zq#q|qXQEtw^1UCN*}t#d-*i|xe(3Pt+1crd=@);Mjxrxs&SHI2yw| zT)hrKlc50+54{~mflRO~`x>J6u&c}(cJ6N?IU{$t;?+jFuwjAN-O14j)TX!~ju4<-H{Pp#tSzm3hNx81f!2&YpQedFee%rGt!vk>Tz~loFFg3rLxmm1=~HKJUYje@ zR=o3Js0ks$NVZm?|o)^aO+j*t640*(?u%dU4Vk3_xpZy)jyx&Abhx()|y-?JHmT z+Qv@%#Hkbe_fB_{xn`rewXuHX+O_?&vwNp!4%PRax$nNaE?&NVbN-d{7xzvdJbe7% z-o1Obwl7sn6^+C!LPDSb$$J*>Evpkpj@0VaZns+tOGi$gc;d1920QVU*RLNsd~9p0 z1&WY41MVhiLyHAQK*tUk7@3F_YVb~hh+`lW6d@o)M%1JUAk@Lw#8g-cU6!W(L_Bx; zy#eRaXr5nRzS8P@?aNTCT-gc?M|BZz0q z&Rc7pbCy~89tti)PxX<2L#0Yajp0pn!!yDHE0;w9X0{@T{~5dk>OW+Z*Kb zgWJ``m8o*&j>)}u96C8(o%CecC4is<1q@094?j>vqzr@QBrJt_p4;Mn#zJl>#3Vx( z7yv*e;zxbq=w2)Ca}h)V@LoKk7w^Rr0t0y?#)Be17LZaVNhMMy$fZ3nd&|`yeb=$4 zP8&k(F%yEf5XS?pwfCN#_w1b50|&?Nx?_*CgQG`&X65#li+k&v8;1@b_^1D`zwJsm zheb80%M*1U-Gf(%CMuvSev10*a0 z&ZIE&5-6PW1wjF#Aw!x)lmZd(o_(5UX|i|Uz5^$Y-k!g{y|X#Fx7zEsuCA_JY`1UP zJOxBm)Di?G;DA~01)(V>Cp*B5iO%LnBD@;|5dDA z-Xv%ia)2RSn7I%Uz%Ebd%_YNIxSY6q$QLBz;g)s?I}T@nH#Z0cu^$Nmkx+yYM0N`i zjyR(-oKkkR1~i&FMwJRtBM~wRh|L`;REkKe`q)?)mRdX8)v1Z4m9=tI3&N;B7?5EB zB(0Y>R_?m{+&8}cjW}(6^J{;3`qZg2C(bVMVG}lNXn?f)aENI z%O+Il?a||RJ$CFge?t*|V>F{YJGKhFZtgGB5*^fb-5Xmdn#8 zP8@Bey7-@)I9jURwLj-}ubfOF@~9@%raS+Dv$ zNqfCe8|M`&6cqqy^qxowQ_jU55h42``B@NH{F})jP^itZkWmgTw?w>vAB98-p&vy% zd89%%EM7Q`LlP8cErx>L6Z)WBLk)dPQ`Jg!%qKx)`f;MQqG1I>ZB%Y;vtHK@JPWdF zR4cvSV0Cr*(9G;s?y_sEDaTXQ37^8s_O{Z#8kWF&qg0&5fgS^pIler9qhV@yo;j5zHcN9Msg!CnQ&VI0@jLD~Hq)p9 zXHjS_%+1YI#wN$6U%PhY>g8+W6Vv03>b(z~TV0>m+B@&dff=hb@=VILMkVTqcq9-* znL=O4vJhDud!!n9NrQ3&n*r9Fjn$>K#id14QMtIq^_`2I?v*^r zp}<2>JeyG4+@m)2daYTjwcD+Bt0#g=gCYe2;JsEsRIU+e$Gc{q_*URXhDbicE(dIM zT)$-r@K+$4Hy=`LG_a63%bPM47}^fWE>&k#1d0J;w0?l1h~RLUJX#@)7>mQVH=Heo z61KOHlty*p&9_}VVub&U-n1@?6B0yGP)M1*_u6PQu(oC!|gSzX+yMx`XnSy)l2QmMvq%+7);1l~wUF1JQ0 z=UBXF5v@!Z7;7_T1_19t0G)tj9+mDT?I%C@-jDvsN51pb?|=78U%ztY`n8L%ec|(; zd+==!zyE_Ddf=%C-v0i#FI->x{7 z(!22LO43{7IG&lBTe$gcPJj3V?`d`W-~Pro=H}+F+?cxa?0rX$9NF2~X^c$_ zHr4}eie@4VgE+B!_DnS!&1YYD-i!X!&-`Rb1;78Nf9gr7LcjUn|J#ilbAwK6tlY#T zam|Jm0ReJi1hQm65opi?4uDa3xGy49N~8CPphy9!v9a-Lt?KjK#tG(ryVL6^+2Q<+ zc5Bm%DF@|hEh<;rtv&=s>tgg2?-|4k3wQZ zNry(6B#ZMnc5E}7XMkQTh!CI#Fhqf)n$mTO27qOv2ne+vScf2GO(AJPWF1=0*OI~Z z#h3Q4-n{SRoqP8l4zz+i)j2uuwK7DSo%oDP<+2Z@-|lLo5m3Bha3DxZdtoRhL=mCU zCXi(`2NaDRN#Xn}AOa59dI4c1lV@q}@;tX%2pO%1o{<1mqQgAYFE6y7ySA~twR7*u z+TZxVef4_9xh(hIJ4c8{2c$|0L-3B-r&+SuX;s3y0DkA0Xa4Xr|Igk-_x{jNzW1X~ zy{@QQScw*qV)urM zQGgu?dgTln(k69jq#?Fhsa^?d;liz@#O9N;jrG>{Ypw0q;=up_m3RhZK}DXiTy4Dj zeIKt>_1f~%+wOnhJI{abh3`E>OiX}?#1JT5uQn0YoACbbc|L5Qi}fz<(r{q(|0AZS zy!mi~T{nfHD0tXwOEHm*m_@@S@$QZnX{a#9w?IaMBMBG0DIO4cQ#8CwIuZ~OE`|Uk zlpzUKMo-+0upd>I;ZLEM1~fZ|r~!dgvGikRr4&qzO?O+{)zf#~x^?T&k)zG#L~Cc$ zIW`(eWM$>HF~R=52e;N%a`v|u=4~3c+uPtdcewy7LIAXm1;Bf!jM7@=sZ+$Hl+wog ztf*i_0KhDsi#A83$i8HNNCi|`Ilb);-CHb^Dlm{)!p7~wVIW%pVzm-s1!yXkHVlFmQksU3tcoDPo$_g%Mj3z@+>II3M)mx z;H?J$55iU;a7rN{0O{>kXJvC+00fYQiNrbIn3xoH%&kz@F);v9Yn%PRo18 z?2L&7siO40J^Q}?t*39?xbedu{m4D%?*H-^{_@Hj*Y+LU|Kj-z^K%OTJU%-mN!FO2 zNqs!F@h}YIvZ0gXcw~!%Tg} z`Cz`i(|YyAdj_35@3=F-Qku2HAhNk9%}N{2F=coA_I$t7y5rDUjRBE>bA_lF2pNl= z8v&3Y0}+CV67WNg27@o!J_Zm*24uk8IwF;bcn_A9^ zANlxCj8BviUAYuK`N$JG6ze1`MG#e;^`%m!WTHq1#^~I-m_UM3n#P*YvvJo zP;iPDpU1He1A`nWN(NmM=-Gn@vq7@9y-}`}T1m3l>0cWp{ld#kfJQF7rc`;`6YqTJ zq5F5%H(YM_OwN4tsSmty{s(!Sc+biPnn;swj8BhP^&>=P#Ld~I{=bEQ^Ja;VQAXa7 z{4=C);+teo99l;ZDjE8U4y^`;dxWB0D7^WH)#5GeBL1uUG7<(-|K#OC`C9C`JXX97c^rp1-pd6F`UFbgvXSes!+!@-zA00N_UsGwu^ zLTa?b5E)iK0YOnj2v91Q$de9(L4V*TKtkMeZ1#hDKDKz@19LZST)cSx=Ec{a{^qw| z`2MrA`w!fG*V!kZ`q2H4KlJFko@{RnzWeOc-}=g*UAXYV%JRA(hB~a(8ha+jHn+QF z6O@9mWOTjOP*G^KnW)v`ICkEq=>Wugo9E6Gs=$PmQppJAE=LClpomb=i6~GWJs>&n zl~LANMFi{-#bwOoo##>zEw8SxciIA=6albjZ^0eA<77N&udZ(bz=2~Y4jwtWaD9Dp z&%vl1zVQ0>jsDV!g9i_t+Uqe`S-d?yF|q%^*!}n2y|J}9_xc-GU%xavJA3%>;km`d z#l>ZC;MlXb<*=Tl{^)_dmv7zJGqLZNe(q=2mNq{3XJ6`e+x;}2Us(|6U7mw=$}!fe z4nT%Jw-5$qd*HOQq_rU_Oc4PPfUuAPpeRj_ZLK+0Zd3z>{XFx`+x<=p>~cJq>-8;> zHi7rhX>T(Fh)4#&fC`X{*Fq?m2#7!;9zZD((F%7N5Fh}yxLFD=4&qS#sbcJt4vjJ7 zb5kids%6vZCfR`V3ZC~64zi+~gbp%CmK zFax1@U<6=cVaLo$7}0siZ6*k$a>={iCrL)mc?iYz5XzNqsfJjo>t4_Y*IQrP?&?sN zDwQma10qr?0LBP2pQS#wj@yH*GsvS-X{s>+zC5%4psA^dW1j*BjkD*@0#w3E$r#N5 zN+TczArfGornjyvtgo-_Jusan{RRd3AUSq+|F0bQu`G6-#Q)dd{pIWY zceZlpoPb9Nl?fea9aWpnAv!W7<=_x!88+tyrLTb3c{m9SdHEy8C=3tPVS$j{+EAc3 z;L*QzPGDdvEGA$iGJCV86i*%!_lJ7mT?$aaG1~np2n@>!4O9GxMq7zun>;KM1+)Z+ z%%cid?16y*K}+F;Ls+z6AV7eDHk0F%x)R*FHUIYazi(+_vC^#7E6rrkS4x41O|z|? ztpf-513;i7>+@T;=kh!cl?Eh4FNiF{)`>zQ^xh(B(na)=04RY1kYZtAE=<|j3l}3a zA_h^Ic9K1Z4~|bxr`9vDXBSoV(c=dX9@_ixqYtjGEnd9v+Kr1>7p`6V?lUi3xcJ8Y zg9qiWh`yVI(af_kk9>0q+ah`ZaG zbg3MM7?zqfU|;doUa!*|BrdlBsdA;9+B{T-lq&3HJOhz}zkVjFF^y90Y!QW9`CD z=Z?FNpE_`0v%R^rxNz6~_dNWzyZVD<&};qRwbvTcjdwlu-n(};uU@~lzOjNVmTeG5 zp;D-zRx90i@?>+m`lrA3mF4x-fYewJe&QpaFr`wvv%Pq0esf_Z?({+vx=67gGL}rB zRN#H)ePCTqBPRp!Uf7W$c0LFVu&dYWl}Z`OyEq;U`fZzRdY!;nOj3<5@U8HlZxqBdGkX2+^QUzgY0_046CUFTBgy|*sQ@-WcEz#suqtK048 z{j6=5)>~zeC^UDTJ<3LX>SzDnqfg#lQg&+m?AsrI$NdkU!p7vmLwic4NF>)lh{!$# zUBQ6U{?`BXumAJ^{%il}^y#}QW7QX)`N6{C&8eB$`%c{X-HWgO_uu=yx+&Ktt6Q5} zFWb%|k3V685YSP8M*#n*AupD* z!_I&B!x4@DEgqn|V9`j#ySoe!D15gH1;H@rad*_%J@4PN0sL*vk5x(KI1C?zRyawerr12#?Szh2%w?b;b|~6?-y0?45kE zI`iHm$?m{Hgu)WX2?L#b;Gs&bmZy#pob%*3G@5{_<>ryYd+xdWuH_r6&wS_kH{N)0 z@#dA=w{M?6|H7BQ^5P#wA&! z5=G<J8$sRDSI3Pwi~Bo_Xebz4o9|uibg({`bH8y%W=A>+Q5qjh6!T!#w0ch2>*tcyI?>1;VG z6cliuw<$BL!hV)PDTM?c91&`z3AJZW3I%|@7ZB$ip(ryXQvo025YOHZ;dlTc=vkZOd56>>C^wDiSvyI-#>kfIq^l}O z1uf@SZ(UOwPaHqv+o?@s$9Aedws&%mDV3ZDc2Y!edn}lMP~4de!s3MsMkpc(kboB= zbih`eC-DF|up@Bn954%}HmAr4dG?tV^g=+&R15?Nxu*=J>!j1?txcx})u8OMJWUci zh)ZUS!A1mo8;x7n*DqaNGo+q>@xn(QyT=$fesK1uf8xVm`oi-seC}ByPHVT{^Y-_q zar^!g_dfdgBTA{bZ!1QDcfjDCyz}jke(mhrF1_%^um1gi^^1T1r+)QU|HeyiT>a)> z{^0ifx+&AjMoU<|=c6AP-#gat4W{-U%3KzN6b7O5{DJpB^~KMBv%lW?${+lN{c!f; zg-f6MiJ#hdY;(NXu%;hPsB>>SJvlkuEC;n}$@SycF3jEk(A`Il9buc#PLGqY&78~g z9Iy-EIV%Zi$4Vq+A2=9zt_x%U)lwM{BH>J2*r<-rOr^b@)s@?gT2!g1>l@u0-F_Ds zv?<8Bt~MT0{{@wWQ-xyMgeN2f@-}PRck{m9C6p6XV|OD z$UqrK1%7nm?;qWMEC2#^SAIHio9H5x4dfC_yQ41J+Th>;%7Gz6$YEDL}!8bE@8_U%7# z`|6E$t25CYuZ0aAhTwgc+c-_iwdUr|W~qGF``+_DEAq+<-_LWa5xo=dxwr|1Kq&(feAOf@UlFJEfZqjqmIvu)>N@isv258^-n;$M8~Cw}($f#cUMUQ%%O!F%qI z_&sUT|H0)8U;Fwue(`Vp!Xpnp_S(hs>zm6Up@2WK|H$Qw^B@1nd&Vcm{^NiDt*bY# z*PGQ}{-6Ku>_lU1yo8)np6;7%)Pl1?{?pHV?Ta+%pdvz(<+(=mLQ;q_NJNmGCj?Lk z!ayL*m2$08Z75@sq}z`NeV5+qw3gyH2SP-JqP5AgKB6iD)Y)-nGp)2|Pg;owrOA88 zISUNAX-MQfJDVYZ%{>x=P!J*lv9pYIXfPwHc8{1-cGgA&?>NqKm22#_`;t&^ZX0fKK9q% zGuV!6^|3sKK;hr{xnH>B$hoCk3)`K+6Yu=c;hFuP{>?x7-5fb1a2bH_-c z>SvA~GUbvMt_4$r#BFxky)+Ghc9LW{i_3u?)8z(*UdRf7^S0ObB7k6`vME>dBt3Wb zY;SvGdts#(R>o(V>;27(n_DerCQX{G^DL|g1#o<<+3U5|Z(fx=HAVyKFw}W2PFMj5 zkQX6sP@p+Bp~KP;ewNWz7kA|VZ_2WU=k$pCJGAMPH{qLM=fArLD8hIPre@K-4{4es zr%w?Yfi%P8nqhcbjGB58#u2zL!=O< z1~kU7XB?7!1PNK7*nW8bSJaX)ym5z<6ay0|8Pz5P)H-n1PE1dpIC8X7DrYiK2HAT} zM5MiV@shZla~G7NU@SU0b^7p~C!hMzhhKR1`!9a~TNhq^_4e(}<)zI_7cai>!ZSw? z?mKhp?2$94-tnF%-u|9nRxS*6a0p zx6|I(Y_azTj~sBAY_(d)51(@DTiGDCSs`zNJodM4-u~*BJ|BMIhtHn7^Yxdmz=Neb zAGzxzS=Rmi{*B8wUViE2qbJTBIe27eb2CX@V9brBxk_pJp1aPit*?IZt55g3ox>-N z-v7X{nTe?@Z`@c}UG8shUcP?u#;rx^x`}eVP7Rl3B@=*62!~E>nt+*sMG+KqIsgIC z?9*zaUaeP!K%6AKtTRX#I-M=H3=|l}0$GxIK@kPIWV8YF?5vRSHuhdDWC%_GgHjj- zVd`?N$>zesBA`Tp76D}Ik|Zx6q#z2Nvk0OH6_Nx~$rM3}%j>%>(=hbCl`0bxaoy({i7Aw+>7FxKUo$!TO^@d^|>hS)ds}w zWt|(j-QBr!e{knPTdwIa+g}Ox9+|D$Eaf8C6u=_{T0QdMou^Nnt(PnPPUjtu+}mom zb~d*ke&XDCKtKM;_ptSGJLwIa_^tcyIevQI?B3~GVpX@JOkmJUxl#hnEYO$;?tSb` znr2}T=4lQjwQ_yWtOS9Qf|6-+jd1r}C&&nUiww>(iznujXZD>sdq5*ND_dLbdPNWV zd8ZWzib`QA>!+YJo018Xq}~FMGI^Zk-e+mNc6;#$FF%9ej4Ef$G5a)gtt_qUU{BbX zjw*$--XM)5qjL(R#N&-dq)ls(G;0%$Mk5RI#f2N!+r0z;RFoG9A4FjgvKRK;>g-sT zA`*hqS}DOm8Lf-3Ek#-bf%A59eA1W@caPBBj=nhMar9qA5I?F3LwxVehwdK3FydG4 z&IhBjR(9RxkU$toLxFV<8}y;}d9+e0x@8)sgAonaz}}C1K#MM;uuK{jr{R1tZUn*tAqk>Jon!+?(UBtuEA{HFTQ}eNk@u`$TD9WCsDug; zbJAvfvgy)zXM1B|VKMLbM1WbGFaS{DmOiYJgrvb+7{;6d0v3(F2ooqC66m;quLNY6 z(JXxS?45h}&MJa1GMWgqpb)bM2GGi=lCEga&U-K7$|X}fJT*1(!3Q3=|K%56{{GY7 zzBzwsyWMHsxV5;kdF|$n$(LT-KRvte(18O7_r2qt4?OzB{cE?^Zd|!}>FUL~g~jci zo#mA^Q5Xff(P(hHJyx&PqLO!Mpa{s+gK}c~S(>tEag2y@0%RyKDj_hbfDMIKKVoMb0A;% z{FgYj6XO#f``8cpl*YztK~(1 zCM-w@grtbAXLN*8tu~`-1<9sq;yrJ72Mg_(flw*$8A*vSvOfFp6F>4}Ke^eCzxc<0 zx_8fX5Ky<%UA?`8;?q5V*RNb?l*2}C-1Ym!j)5E(ksrf8Mv$37Pz<70I?r=P1|;X03^8k` zDRp)L5_{jxvVrrxJc(_dFxqX-{~edW)VL8;ECz! z$%Yk#=~JvVObo8B`u zF)_3E;LP5M*>^tt#N!XIudd&`a_j1?t8?NpGm7?CDoelb>=2U%dOJJK zv9akr`#JW+sv6aNcPsP0Y7AH!j-5CR#03B;bkQX2eQm5(tye-rY3}1V8(6#29jrO) zibu_m;YXi%WZ+m6z4q#*k9_#4Mzi_a`8Q^#CnuYY+p9~@KJ)yG&wu}O zi(fi)$LSA!;)m{i_<>3#gw%God%adK(4irn_VcUPu6^Y%zi{KV?>P3JtR{tum>C#V zph1w(3o{hUSO-q$QBbJLx_DY~2zEQE(V_^&2VGn@0Ql4OvVBiI8GR|R$noN|X z$@$k`z4z`1v?dp)T1BV`1=x+`_QhQ%2q^4b7=%tdXoUe}>^s(Nce`DiZ@aY5Sz=u~ zNeAA>K94;+@i{@A%S^hTW{`BIz#W=YhfVFVJH~q@-?}~sHTo!uYBfPyLb5iunICUX zlxtPe#1_4Wem_f-B(pGRwSaJIeQT%HzrDFu3Qeui?6f!YI9^%ZV8A4eks#FJ_V#M6 zT;1AgTbp${ty;aI0}6}*5Ah(*vSedzEltum9tePQb};BBX{-RfWhY>?>h!xoK$4?U zD$lW6iAX_`T>V zm$N^FduT}I#}VoOO=F%n9eZfhfYUHRzCeH8(wdLxJR{;y(X+^?;Ebk^p$5Gm`w!3V z5yf-#mI^Lt@rr<`2!}v>Q7;go@L3qLt4D(5f(a_Dc{oZ?v6LXvEJ8#=P>D+S-}k`z z=bnB2<(J>{k&g$FB17>YWt4OGoI7{!+_%5~^x}iev~;jS@Ejief-SAn3(c>h(L0pBkjD)!s={=Ui^BEAlMaTM9HO%_!Dc zo4K65h1`o%fh4H+A~BnVMXphRY(UQuDB0ZFMmNdP23 zmE@K^D@7p0kQr3UdZkr2&YWk2LiZ$Dp3hdQ0J62Q0f3syBB-?)o0tr$Wkp5>C2ax} zK*@n2OL75Bef+|U&+R{U>PLR~ljD15Z(P4};oW}8bzrWOvQvf6+5JXgg zagKvZX}Pub;>)kMyXoFNGgD)=sp;m?Bm3{Z?>K={o{zPTSn*dcUt3i}&4i zPFJd&SP?G{hujbWAZ<`s95TDyVGxi~v6HUN*9Ps|-FQ2Rx6-%+dEeQXZDv^rb1wK+ z0!)K$A4rLJChu;%^z8qiu0IdAEW7Rk!8PoC&bh;z--|giGjdj@GE?)Ak}TQsY%Ckw zfInj}HdH|shO$4}K+#1(cXdO5O*hpv)de&da2eaOk!8!m!jffMQfgMtW8@eS8RLsL zzdN0C_FilCAA6tsLe$ssNtu!HB3|5k?%r$t)^GjRZ!N5!tf#ZDUy_5(&7Je-zw&&0 z?WGqsjx7wTw3s$d4n}*MhuizR(`MXGxj!tMIBjMlAeip&N(}9!b&Rg!Shp%r^__}L z4rjjbqM`845SnJjh||f;ObN_a6((bS6(l#APMFZl9CPWNfz?f8W=NEJ`OiQ8E8jeS{=#_6aRFvQ8i_J(YK=w&(V%cXw6Spg7}b?Lta{+8m6dAe&6_1* zSyo17pa`5HcSr9Wb!@H63piI+ChekitWT%KfwbK|7Xqq=bN7a>McieIr1tDavqgdmtjZxOQxRXZ)3~JaKLN z=4{#mSliZZ8_i@EYh;`?v$k!Ux)sHG)(BWg3sT#uwV{c#wryL9ZHxm~m4&mxa6FwH z)U(|C_N=WwE-nIH-CeLA;ZIP(V%C*h+HzIK~e)mq7dr;{Mp~V`|P=oe)Kz* z7Zt8$JoNC}?|JJZT2E~@S{PQnL9w^fmgHbMIkUdONGyOTO3Z{RDyY6J zCA8ibl|L;%rhJLts`o!JK%Y&-F z@bE)#?JpNiW7FxVa#dLtKk%d9w{q_s|YrEU~i^E}~u+m@HIJRCE z#nS3hwKVw15B%vHo7ZQ%mk9_#T~Qf;h!8Omnjr(Ls+pvoTRWRLE?m)X>v6icmi-(R z0Vye;5yom3+R}MYBS2PlRu2G5=cMpEH+QaGyL#gI*@!J0nHe#ffNI(uYs3&udJFv) zVBG5Kdz-KA?_QW3+?cle5+Yh;GBN-R1eQEwAd{g2*>_wHw!if8f9#6kXn((HC!`@Z z03a`bIbuLzX2(pO7;cLkO(`5OSf>PM$pF2ku~dyA{20b4wIel^m;BLenwi@y{H9 zfCw#`jN5ocCoENexw^i0rU@m4EPPiElx?)&d6t8y0F{r!VxTvugT6#bR;;hpEso?1Tn=zG5B^MCwXpL+RI z#d3dG5Cf{2Q0L-_bIp!*jiiK{6O)*tt{xFSiGe@TWFCcP>QKBLHH-5ASjXf**OEJz zn>6Y1wV?^IsVAc(F$#?mFfdFD|u4Ozsm`nkH*{KS9E~;vAuuyu}HchV_yyfnDHinD!G%hYL zcvb`(6g^N>N;MyfiXoz)nBKf`^H+Z3SNqGyKlq^su3xygwYPJ8WBrH!^bbAv^b41+ zy?op0gh-uk`={>-2I;U}MZ>Rj5=P_c_33yk z)=8B{h=35=mdSxcRFgOj`gvSX@}x**X8>OW?SX z7+YCf7*3{8^?K}JG~T*7YhSw%!{O}OmCa-8w_O|U#&$-|0}*H%B~U~JWF#dbQxFkT zGt>}U=S$~{*w(kmBnS~Tsq&bBVPf&7(y;7JVqhZ&YFZDix6)ggGzV9%URqt<=oP)T znF10Z02@R@B9#at2;8FWwDog0ZajVS^2OQo5MZh*MA1w{2oRE>s+%k|H74zB<(z>` zx33!*0aRpW=DadH0upH@sF~GGZGc5pIp^zkB05GK7@2Aev(_=sX2B2yOiTz3DU6$O zl+c9M#uC98l^iDy);lhJrKaQw*f}NzEZqXKdG3!!+YwYkMeixBOg9@|6Oexx3+fPc=h%5#l>T5i>FTCwz|;USUJs59*kmbF^VdB zpu|9=CQpaXkGi`C0HEFYZdwtX!fG-G&$7;L1~8jIATO}UHED=*}b}X`RiZ%=JQW|>tK7|z;>8r#J4u``!!JE?>KT^U{^8htvJg#D+pJ(b0I|i(#)v-WTW! zriz#mMok#iv;0{p5MjZLfcpmrCa`gA?ZY2@&%e#Vy+akzxv=?JzMg=)V zCGbG$;kh=%*Is++*MIBZ_EvxH{@ZRl*gjB`<0qDW`Y->Sk}SNRDK*Pc$QkgW(YBcEWaj zdwZ!j;KIkIag0o4%xV@Sc;^&!ab>Z;I3%{FZk+P&{`kgdbO?r!d|W}*XzqWDghoJy z2##4|z4hirixE6$a*#a$als~ljtw9Lbq>5QAnol2HI;;DJeLH!Mx!?`y>|QA zd(hBKLl7WBG?n1lF?nR%k=YYhUwh)l=9M_cq^hKBtwO?jX2Q>#DG=5 zn2e?gq*KAEM;BvY_UQ&Yc4mqwBEdUjGo%*8jD&$qRZJvWJDV9nH|Uv3=g>^%J;NMa zJ8^hoIt~oTDqL@w8NP+j$+N;Ka(3+jo1eXW?jbk%=4DJ` zsQ}$%RudvdA_GJq<`@VFy(h;cCMQm=-+tF!U;E=f`k(&(KfM2uw=6F&?(ghgc;nR@ zmoB-o0>>f|F#;Hf0fR}jE}@IcJf&(UgB3G{m(`f-mjT%%iscC`GEAZ_m0-|5`_0Eb-lxC(7yrsQ1}u1KW#O&wearQ|M_+mVe2j2? zbNiVmpFMW`#0TH^-VecJFudSB7;nmC6UpasN)hn;LnH(Q1 zEiJ7JPur&1+`nm{$A^p3wjxRZgaphGRYg#2X?bI~xKPhV*2a1~Ihf8aAC4`P;*d3$ zM4CErK!E15^{yi(ibB3X&MviSrx${u0*Rsli=vPC0L(~}39AkqM6D33>azyNW zGp)r8Nx+g#H6kI38c+&AkV*lKMcHp9X2Qila}Au^xk>vxl6FVh<#Jdc7N$J`(LicS z{`$2yPp+Tp^#`$uV2P-P#2Bc`-emgt)}?Q3ZEiMkMotXGKoAtbAg>+-v^=(xZeaKK z8vvn++N`b-#n2Q3sv2?#25K5bQ+`8Mkk~XSwGm@zRYS1kAO=9hjzQ4?LD4vwEP){q zGj$Vc007C<8xXCV#Wpk1CN^NEM>|CqCkwu7r%pM4%M5R$w$8KO2#n=FH)3Id`L8AZY=gr{~d_7-WG`^#>$ z&>L2$n@_xS>hxKE_KdQ$?JNSHx%bS^ocN1xeenK&{cFGa{MWy^ytH=PxzqQ&`yJLa z&iCr^?B?YgsNp8)4zZH=2oA*iZZnXrht(4H{+aTPx0A0(WlMDKrsK}{I_8; zobC)F=0m;nnSon#$-LQot9%HL5=vK37$9}ensO5FTEeLr09`H7wW1BQvwEJtx>SOg z9dSW~*i9BBT<#30t{WsOm<^Of2@x2BMl)13=lnhQ-uKy0ee!U7^TLIfh!BYp5SRv4 zwYR;m(2}7;hXj`5D;OuGf0y(uOFRLg^PGT8jvCCL5kj(oCAYM^G9HhD#k5T+dvd60 zL{QV1xg8+U01|^iGz3i2r&bj+^=gue??fFbsHRRhQu4qDR5L`fgd({Kv7OEKF1`Bl zl?xjy%XgkUbN0mPvm3|yr62y>do;!PHeSP)zX?yX;jlIL&!^6?R!E7?FdxLg3 z7&J{Zo(!wOXfh$75M$N%F~*|bj~chPck9_?I-dTsfBG-}?Z5wx4}a%}@4e&RN8kPa zs=xfc4}I^8uU))x<#h%{VDA|)s>Z11T~RnD6vKm#Z_G&Dvw1Y(B_R5}CD z7}S7V8XHFg3(b@qIhDNKB9Gi4kbx$hoTP@Hib!axYGGC%0!Ts|06umB{2P*68$o`oDnQh6FnGlCn|8vRPFs|pUl)<*y3JI5X!g#}4?YL|G#hNUq>VgD z33v;?GzZaLYMEDr^UO5opdh=GqHCKrh5$ejS|mhf6(K|sQvxQwxwEr>bMM~!?%Ta~ zV>X!*qAwg0)wS;J?Hg(4_Ypm!W$Pj8Xivi<5$t@hPUo-Jj$SRz4|=~ph^lp*pd*bD ziKD1R0WktfOF_Bwzl+iWfQc%Yi4hSq(VT=*H8Kzd#I!Pv5Y04gDFLQT3lOP@0s^6- z1C0}jvz_hDz3uZC-#EFva>wzr>+9=lE6XR2A0JOgggPwTv6cI-Z*AV(JKWmdo{SGi z2a|eMSM2)zg%Hft5D84HvNW;ESN*EDzqeBjmS*kjaBFLC>*l9-w@)5B`{ge^wzhoi zQ0(-%J2sB3_!fr!Rl{hcVlbMGSC6kAUtYX^>FQ#&bo!oiVG=G~zHsR`E){`ki~{+l~D zZ^Rh=pp2@Quf2Ka(ng7;XX;f8+uK)1Va$$Wj4?KatJIX)hdApmtn`LMg9s)E`-l6Z z@r}uJ1c1P5X^|FmlPXF?pn%AbT+z%#rSn)vWMU#SjWL1&ks(;}2|{*JBOxkKGNV(_ z!Z`#|GnLdXN@hZq<5vQjF~+3nNjCvW(NtPA0#$bGT@k~~y32ux3HlINVNi-L0b;BP z)fp%#67+qqwb&mn3pkC~D#WPH5i2#baAS0Ee*5MV`#XDP=9vhmK}4lw?_B|8Axexf zL?r-Umh2o8rp=Ke!py|X%p#&@F~oG4iBQctKM_=L1{ky>8q!@MJ8A^Z5hGLjg9d=A z860BA{N_polGEe7Gbk|cJyNcs5Dk$S7`SV1=Jth&F>}`*O+^|KaQ-`z+ku(|oz=m* z+pS;#*!>eIC0!;!_C-O&y?#-az{)e3g%;LU*V((ZwWZt6-aT-`8#gXK_Ju#bdh@E9 zY}d^fwqHJ6IMrKUuAmvrhQ;n-{p!W)9dFUXanCnxjg>E#29JE`y=PCK{$Kz3|MHpt z?|*pl@o(Mx&>e^S(-&WS{4!V#Z&pc3Ne46w> znf-w*_r&DWZ>o7dE4w!*wk0KLJ)-5a*fM!GWML0<*f+&Wz|QHR<7v9-PJZ5+3Fg`*H#!^>0keZtO0w730-%T>b1iZ(9^JTbV@>e07LEKUv<>>E=s_YGCO`{?* zGh#9fAONNWW>L`$R7H~`Q`%jk>2#{5rdnbu6%dk*i>BCw2%useod?Oi&CJa5mM4Ie zMVM1jYk=2Bhg;*(E7z|sFAPs_Y}|3`)at_0>cRpenCOYs`?g1;?VJ0TuV3G}xjm{U zGiwhmvN88iG?ephfdZqLj!P;gI zKmpv!(z^2=%tY(ibUJ%tnjg|v;P-y-*M91+|H3!E@Yt|gtcs<=4d40h_w7#(&fb3e zV0G=`cfWJ@VC$2g`1ID+{(trJKX=~)cfI<;^B@2CZ(g}_rL4Rnf*KLc+W6Adiw~T9 z%fMBGqPN5D^8VhL!IDRk7|AIb5U2!MSXpyDFHJzBHp

;hP5s0>Fthjg~z_F;#O$ z>@qed02TIxgh^n4po#JG1(_;Q;ucub=~X0Rz>Hhj&{Tt{nklFQGEUeOOifh*!9);} z1qvaksuHlF0=XoEYo{Xsqs+@ABd`G)vO%e~0f251?yXYJWyQ4mh7tZp@vb`)05 zo^gW(F>cz{vr7&}JG0TVH?BN;xO+v!oUfx{;hAW$DijTfN3)p`fFswn074WoR12m` zgklP!0PXyd5VNVGYU&M8QW;Z0?7#p_)CA0sh?p!>@JZV(ITVQ+As)HMq`Nj37Ur%c zVcIm6BGWunnVD*|`S4PJ*u@R#>Hwffh=i0$3_=13G$%${-AW;nW!N!xq32gW}|Xp#g3-33?Q*D2+%FhmC-HoB&C4%xKXOgq{zogT`01v?;+D{t%`eJA+o{2e-~ zALkWE_hN3XAuwflLIh_bAxf)(*%6v(YpO8@1T?_uY*x3;sk0{^svmy&`KM>~bTmE? ziNqW^ag?!W>YXG1c?z1)r3+T z*vvEwhk-y%Ocg0nKagiwqnQdIDx@uBSu>?-CXydpPEPrRQbtGlV^GQ2TmS(`5L=PS z=wN$1di}=LwZZba6DL;|mV4~h*OoU{mrtJDxb4izD;KX`x_b5U=8ai9h3QzukP!iy zxCJSlw`c{rlWQkG@SzVq`tEnX>!G(j|ID-Bdg|%Aot2KGHit3`v016yeJ`{-M{;_3zsfB?~@*>ZX*FI)3t-`bL-7of3PrED1AAeO~>93v4S+2 z8G`{ZQLi^B3g3oyba)U%Z;nTsvx%kExPVD61O^&gLQ(~jXh_MaszW97%Vk-vkN^zP z5g17*6f$P&z>YbhRIL$$LpzGcDB+RY#)xsF&2x9>u2tA)yn?%UTm$XILK^oa`58zwWs!P zZkYhPVb3j=MOpY8ZLBBtq-n(fOrn})8*EE_Jptt;C)P>L?dS%F4g(<|qIO$_>6WVt zl-_B=N!jfVvbzdchTEB6KmZe?%rY2SB43i7SPpUNbtnLLQH3ChKOk51M`?tT%7h>f zavGReLRpxF=0FAzGk9*g4;^#BBXo|LK_bKuz%1=(M+29ZMwk+^1et`Xf+*ni>({>d z^~dkH^Nt_*p6|Qow%h;Yi=V%G<Svx<{DGo8b5gVo z08!OsT1V<_EdPa{|F3%k{N;c5AFfZvetEDs?6*yuVgLz{Ctnm7F1#L^akaQYDdOt9 zWnyOYUT+t3=QK8D?Fs@|x1pD)(`RPRsDt=0SK@U4v^+VG!sPt8V+P4KA7j6zY@LH- zm}j8*SC2yMyi}O8ce%*x(hO$3P#S(VHA8f<4FHPhVwk1=kZJ@niPF|>h*CH8;b>fN z->I)p7H&!z4Yqy*DvlgwSbb* z907>MP|wh;S9xdkjor=F(;MG&?gs}aj(_Xxk6(P_mDp|r7=Vjn2vNa^7}-al!k3P* z@+1;M8!o-_>O&7abjMw1clSn*Jn+!*jS~mG@$TNfk9PU;>(0}oAN;`L(#q@SU;R(N z{|6T@y>6;REHN4qrD_zok#H~@ofs_oLAfwoKA3LpkEes>GDd5qWyLs~t}iVO`U{~A zAfcT!`=iOt$>B(AfbMZJqb3_GFznX(fneJ3iHv{>sUu^Uqz!3zGH6x>Bu$Q4UKlVh z#FMeDfusR^(z4gD231j2OeBJ%gM+576MS`y$f>=VMCEC~M7-gUeP46{GEiIq@}LCN z11Q0k0328kpbx|VC7?5DgJ4zN^PYFLHOIrLXI~WJG>qfym77<;xP5gGsOXn<3)|CX zHj4qY7112KmF7o1;VVsn+7OVKvY!P~lG~(mHC=u+05ecvBm|SxPPVRQ7L6bT0LgmK zal(%seZ#~=o%B4%14DyE!D1?rRRqA89j2}>W)IWUG*?ql6HQwTELmV60Rf3hnjAue zWPzT&Dh(;4;%wuRa3>*}8lg*J5ddO{(#9+X0gak4HKPE6BTz@wuPOj^4phXVgb?0* z{gwaM>o1=@dHel$Js7QjxOMq3&Ys-9{E2UT;V(S${aBCMSFeW?uNH%z3k6xj$hZ|5Ubi>{w9HmP-cgBNLV`~HdP}2YPOzm{fJg7_h-9Op8Uk_%YN%?GTMJ^D zB~Sx012l{2kc(Qn8U|n{#F_ZATn>QBjG;{OJK`MqO!a7fBGnU}KI&+?4*?X2oq?Gs zVkV8Uikl!BwA1PCD=)4tET21hVskv&SYN&K)QNLvZ$EYFJ62vl{rg}2ih+Xh{Gl^q zqe9LN7Wyk|3kSQ0S1(;$J9qa-KKQ=VXU-lR?j4Rt7hilaN^`Kcd;Rj8RaI5K+&Fc1 zHk)`Xd`Z2s|IYWm%FENk?eF>SkDa>h z{)?}_yg!~z4h}9}yEJK<(mPCjuC#TFl7i1|j&^TfUSBGDgN3ET@%ALNjYc(#jT8tL znn8cD*B=7F;bctg4yMyLcMk#p0x+=zF@PMpnbZ^wqojwIZn$P)F%fZUh-T!ZS?k={ zR2>NdFyqtQz%pj10MdrGX`5+H>>U#oj){EXLKFrUW6(CDS>e5@s0f)dSRZ&Gx(wC> zSOOe?c~B2j05X6F^PmI{!5EMLXN^ovPoBA-c_q&F%V01I?06)HFK@m1#qDdG09g7_ zD~(tn3f9HQM12AqC?czgrQpm&gbvA!nlPiJ3xlkD6VSN=!T=nzW6zv@+}Ux$C@Q9! z^iKKbp$)-;fVpIflKRbHMd6qk$kHQEj3=TYp@<>51m#UsqqZTYJ@rW7JOd~p5hXir zFwm~qj3SblRz!*-s-{_M1ln=D-J43#juR>F!5NSPbjaR&1Y+;WmF&Ft<-*c(S@l*| zm#WgOZ>&_k1p+QB8t?B{&K>Me+@OE+aC^MBw>{c#fT!Y|p^|**+6z7Bf8wo=7NI?S z@|op@qI~2*T3(3KG(iHQ)eWKbmxh1gm;Tlr58d-Ge&yG{`oc@0)pF4HOoOs21dcBa zEi{*3dF_t3JY4n%YC>6+(EYHS-?HW^gVjtDl(sG zd`Q|BowP_tq0#2y8!)w1GL*)&F)x!-DfxaopOTxYxppwk75LGwrlKo#D9yzf5djIJ zs+tm^0w7V_wgv#GRn;SM{e?lVKcI5JuD`Q&IZQ_}HfFISbGnBH-8w&*2l*}+%<=#H zW#ldO07#A<5*Vn4XkbL0?S@F|jTsmkX;(K}YA>gxYib(7Tvq!e+#$%+OBzK@cvH3e zr1HY4F5q;f$UI{u8ZQapC_^gD8*IUV%q$WH(5s{U8>9WDvcGt(|N7;NXI{H-X5(b2 z(&8y|%pwi|9CJOZCzFXH_4{SN^2gSCSNhzH_W_F0!RXvwcP+21{HgExK95!(j-P-2 z8CGAEw0>fpOV{uBj-NWNL4~=V?JcjY>}+i=4f^kY|0AbvKeM&7_l3v4b>{Y^+fP35 z#QEnQf98qXPM&<|ZErtpr#G%#O2;8lkm(#JiE@lG4zvAc(szS?*{jO_SJBKgvlGJ0 z;gKdiuQ$cc$13lz$N&r=*;lDCAeTNd{uyl-$~>)3-nTj{R#lscyVLS>V(rukBs_ z($?jhV1^8q1`=2#I%HRRUw8&mFo86}kak0Fy@vU3aA4%K!)rkW(GlG zlo%m8U;$-tG4HZe$_WMtT$;LIC><~YJ9geVUwB`Xz9_u+?40)=kW1&7Tv7C^fp@+t zD<1U9;liLQdrO1iU}3PZxU{jcvb3_;8}@qrvOg%RUg5n*GczIL*vi5FB(yOKOxxLP zYG3`r*MILn|L!Z#KcUQ<*8KkU=ds}LKXw0xseAm%<)zX+c)xn4(wSH_?AvrY8BYhx ziy!>%AHCzAM?Ujgzx|0%eRgLwHpH^B<$mwj&=&=6U%xUP?HBz%s0l_B$iyT9h#IDv z8M?5ZeCvgBJ^;|v2Tq=yNJw3S27s(1;z2|VjK~?7iZErSEm5G!v4oNvXcuY#!NiEr z3~o(7iPEt-l{oJO=L?gJpCK0+-EGRK`xY9eTLS~mvQ>}~0Wyl3fs!L<49-iVgp5%_ z6N;+ZIJVL6^;g$c_jh+kwY8u9xt|om7{kRkUi;-=`_$6n;A21W(_jD67k~BF{?+MYtHtq+{hODk z2RqJT;Sd=B)Cd^}1OSBy)m&|H)XZREp)7pA>WwA`htt`xbSf4l4jm2`mYm1wY#OGs zY26;y%|X*<9#Rw;Rb^%A9K-N%dPCajY&Hg{0T7UUS$fBVr6pf@Lg&eesI)Oklo&$W zNE<|=sLpHM2yw1-BcP*f42Y2cpztLj7QQ&PwuS`bgPrN<0DbAX0E#ixW=5diaG7|; zbPZ?`paSRt6#xaG2Xp`ofFKn>2}ocLoBd+!!R1}HC3l^}!zZ1}D#*`%Az03*YrUKm7F9U)j9;rr5YwDsg?Nvu1YKPG%q>G{JcX z2B2t6rV#;<5i}a22`0#lfXI}_T>-&}lByy{TvH@UrdmhTeWy|D5IiOKs!Yf0oKV7} zk^nQEZ}Uo^lTsp}se&Cfm6NGZ=APzQd>;H0+QoUU0O)Lp&itn zUPO9DPerJx(6DXAK%pwD)2G*utOA3lKUi$Qc5(btF_vovHkka>vU6Q|V-R4+nsusU_sD8WBx2 zW=aa@1jF2IDP<}IOTB@Pkx0W92AwP-D~56=%NsURk&r47fZTuzlf!2F@PM{`S(KjH z#9UD})7p8jF)b;Hs8V>=qFP@&KCbJ2F?`>ni%Y|W#bHV2c5m)pzOp%Mro&-(^M%6G&;O0Tytckxap`--!piW=U-{~V=U&^}-}&($`O)RY!N))G z>r~77;@aNz&9~fn+YGTB_O=ekeE?wY6@{pXDHtVKF>7Z6jYv~Ds)_>C8qp;pF>R*J zu@ff-gMqZQwgD0AI_yvCu_jw@2LKqA9PW=UpE&-uht7R;hLgRm&GC4DGTxJV8tPeq zb~--HE&mK58H=LVD~n!L4EtqOdglR>6wY&V}E2Y0xbbOU;zYR0dRu!{nU-o-F$HmgZ0wGnsQdRqdfm&EySw$}T#jq+| zzuy}SdR191F7(S@QT4npy(_&h3-8%EU?u{vn8dHv%-YGMnapMfd*ktJR>!(&+P0of zX0tYi>1;NuL(|0BEKH`8$!ylfX4X#BV%s#P3L1gIQ9(?ei?!9oq9}U3(pCNY?|Sgz zcfRef{ms98-vbZ)lYjbuY7Y0e+VH81&qmQlhbt>jZ3kzk%Z1|hGgcNBjTs1$td8w$ zqAj31e(!DHbNiY7>o@DI-Rbn;;QFQOd)E)9Ge8hALS#>-X>dHznjkR~(TJd;X|NH@ zz|~Dgq^VDkw~5&;UE_`iI^vbGcoiTg9aoP=Wv@gAF+n6!12c9G0UUAA0ANH`RfQ|m zx7@0mHNi1ey;2qirrNYYgEeg%Tba%#A!t3DinJlNAx>3IVv7nY(IlkFOw&BLk6FoS z$fPO=X~M^xvBZSGF>ec_lshtLG6RCS*LGJ*sHSp3RWhNI*G$3@_V1!UbXo`i==`0m zYY$}3Eb`P&IqAH#e{U z_Q!ws!FS#N{XhJ%(V07!kKMI$V)!rq**`tDeC*Hv*iSXH(Gy>K?8M1CUw-Y4!Y?f^ z#KZC4pz={AiJ*chtBRRnY(m|}rUJiEmIeD+2-DygF-TZiTIu(z7(=YvwrOjL2lcF- z6P^GHW-*eN?cJvu+dh5UgYSLxJ6Bg0k;`T_yLsdC_U6{;U@x@O@no-_OhTwbs8Ojt zY^OE_NTV9v8X|B2K!%9Xsv~dR5Q6qVygs->VA8T$)KU2@-|94{B7E z2oz$7QN*kbF|_R{g#ClihPI7u2oi%dOA+lj$r-JDp7@vnIr* zY3imPHL(pLG<6eNiLFSK4iH+j$SzeggrtfFK&D!_vMhWAEK6UvH4v~BC(oXJ@SX?X z{k9K(@hg8Ao6$|QPhEVWIeqWjVPX2%x0+C&{{HXxcie4GW813MQKwukwQV3kjdj7i za&p;L20UFmf3Uaz+DiuoC(=g&LJ-8PwW3ZrtD?w^mc&1*l0rWKfJN)norHh}(acbj z>OV2eDxye;jx|Pd1PYdP$w>*CG|egArZAn#49(EJqex*cEPyV&cbZhqWkL5VT~8?! z*BywpL^RGB@08L2%%Qbm8cl%;_Pzu*bR0uKW-ucpbi~9)7~0UQDv`LfgtNMhDlxQT zP{tTT9O@vnaTo?MO527In-JO%+a@%j4H7~OL8OTiCAOjw%*Yf>Aq@{$&H<*TQMHWB zg&C47M1qJ=E~$Z2@&ru{^K2%O7&%ikI_fbSbkmn}INv=goR4YbJei_FR?+2ubVz@m zOS_7pYZ=Vj*qu?60;a)CFd_yeBI?*eH5Dd{K|+kXTjOV6e0gVUcjxBrn^)d^?UnOa zuU~D(V?dvAO1TW?&w zUby1K#?s!__PzH!@V@tb;OuR;oqzs$SM`hj#%nKK*xugOsKn)X+6?=>#BMTzkOFY& zaTbFCiD|FsIr33;64a}f&TXupK(jDwG#U|wpp!Ol;mBlu8ea;EGCQnaJG^=E)u;Zj z94s%dp4>Qj@7c3=+;QJq*4LJM!yd7QrfsLq!Qsxq{^9P<_Gs_mU~hLao=j(>>0}&3 z(9lYZ02*U3i`qT+g2-A$M}ja|TwYmQJ9+l>```V(g@x7Uo_+FH(GjJ>Vi$AAk-J!5B;cN+1H7f-ny6dgMK8>nHcGTz7TjCZU<^?;VV9 zOsBJcZ?CuZA3y%o=0RN+YaYshcQhbUU0m!B1_MXFtV(8dg+nz!@kK?BopVhaOYRkg zPi1qfVKD3gXn$B$!vYuv!yc;jdnFk7s$@nIV4%WzSGd)+bx-IE2PW(Q*g4Pa9C<*8 z-utA9Ku{9{HCGhQfuts_ssRyWTCI^57@3NMFgl!0567dMXgrlj zLhE~jox|NaNN8i-v`vgn+cr%IF@_kWjrlUSF|@G`bqGtL+ z-^9sw&@XLXMmcd$fP>F|Y3t_J{XhQ0i}$~Uy=#?Vu8Oaa`pWqF zh4W9n^z7+7A1r$nAsY%Yf`N*Hs#uzR15;3mdG3T5^GH{sdd$Fx5aN8PiKM<&nrKOg zU!qb}K#-7ov?7FY@0^f(8M~%5G90WW~tvODi)H&3eA8hZL1eG(-sg- zBx_71NzaKny#>l|E^WKVj>CamScoql%{b9+15$TzKR1H=cOz>8HMN{qnW#%^RT&2RqxL4G#TGW7R9) z_Z=Vp#lQ1c+x=!|XS*u>sWZm|=*5e#{hNRN>(4&*Y@F4LD`(#R(AzeSpM3M;tCLwc z93MV={>Asb<6(_5-jBoelP|sg{Oi}Q5deDfS^$K!r@f7>BS%K;$fOM+2;*c@A75N9 z%Cep|ty+-gKL!TEFdK638P`*ha!B9BL-(awRCuZT5VBda!(5}GV@H$>NRV1Oov; z(-w2ctxt1~m_2&75O!}~jm>yvabx!&M%2sW$&-51`QhGJ-u%PI-nj7QBR}%Jw?F!5 zb>fWcc~Yo`2%yx&FrBsgdk0ru{laJd=ntNL`r_u!*+&LdQEH545Q8vtXR_3RL;|#K z0WZymwleevGl@#vX`K;GVrGc*?LY&Nc~}R`zBHzUN8eCQ%^4XQLMqcO0wO1p8M{TT zotXzRyPPf4qPi4tEUQX8!oQmr$ur56qh%**LIyB%DneYOfQB)$D`h%m&!!UENRA;| ztFfpGH6SAA&=GZvi+Oe;I@Ot?l0vi~KyJ_zjb-Jy8Ib>|L(=i#>L*y}kF^#mkSp>m5J!6Zd@X4}SmifAY2Wzw?7X^A~>dw|@H* zOt8GX5o0~U5yt@l0!l#)0HPuav2CTNYEt!kt?K&1!ru6R)M67h&a4fGgBThz?J8gn zL30B~l3F}@@K_QaW@^$f!4s*+f>=*?b~h2suqYTEdtVln^Hr}u=r64H7lsRq%S(%k zgQbShKG6{QKaNSscm60~V+1yBjo@l*vl1y%$#QBaAZCZZBe z)KpPH1q4HAr_))CqS`dGNn6(n7($3Ks#%aIBI%lBolrD0l%Q>^qKXL7lpQr~04Im- z#DQZb5sMH!6e)VENpkZ^`$8aXsZH_a2wkSy62$<3R0WyluJtOIN`Z7e6aWkbj7%Zh zdIMV4QM#-U;e2n=h=_n8=~c~o{ZfK7an@g4SmTqU@it0)vz`u!jzwFLaQ&&5F1+%_ zTR;4*cYO3u_s^WFM+b!pG_6j?lilshFTM24E6;xI%GMrZ^!&j;^@H47GLp2NP=k=8 zE^nnEBZy=`hDL-YhTIK5n2AVoPe!GV3vih>NOuJ@yd>fjrie&EK|xhDD6=z2axe=T z)8i##NJ5L$kRUWmK#LGeVpI`CL?TFhc*;fqNy(}L0BSLdZ-^KS3?+SPYKCdZ1If(X zpw~~Iu4q~@tTD7PAQBsxa|IG}QAv!3NX&?sbk8PAW~@MnOT%Fs8s|7f0)!AH2$}&9 zh-%ZYme$m5Wkv0MU;1qu>e&?0LTJyEM@Ke-7FcJfG`=jxt5oq+mAt?otNaGMbl5i+Q@3GePqcp453j zA|}}u2aVY?>h)_^Hg8-Gv0)%~E*N=V+q1ip$kBuMzg0N{2@I@)$6{0E=*n;;#WwGBsxp9mg zYAX_i5mh9r87c#4QhIe2R+g`&1sX)Asob<=^6rR$K%+uv6+&!gkw$8^Pn0Fn%+5O^ z?_5C!g)hOtv9~-qrUu%y!9dzs+sH@$9z#_y2fD)_(ECDJY2JnC- zfIs@s4FL$C01jX^*!r2X_rLvZULj7VVLFPjErz{=`runvuiPlzBR~3mcfajjtLrDK zUh(h)cWo>!RNf`ug4twNPeWbD(f(v_cW-nsZJT&F8c*v{-A-rixS2JxaUEw05<_jF z4Rzgs#5y+UNHNSp4G`O=F_Ah%WT&weiEQi?%p_nTwPiiI_E-Dk3xNMdw+!S z;;i1QTs_n8{_c;y>M0UC!@*1Xgr=OnuIp0DoKzQ z5y?Oe9FTyfK`jIb5DC~I8$Qo#N$hG$0?t)!o)gYoE_DiZn2#rE9_=wx0VO2I&XQ@f z>v~vti?AU{r~#rx)sT_{W^-QSWF-ee-KrBJO6KfHaLWtnyeN{3N}2_x6W61gx?mg%VsiLSwnsR+}@~rby`#0ueKL_UsT14Qg ztT3B+D59?*UftcA9qtqJVF)!)s5tbiSlW%*4Ep{19(dbd`8$7mVR@xBD}2EvsRE|B z$CTiGel1CiK$d5zOp~ZLiBpmhI+!72wufK`m<*;ZjpLaj5UZ*wxT3(cUOv&nxhBXC zXM~LG(h3?X3R6+Fe90gW2y+^-*0hWX(!SsHxBw6sL5+}91r5kK1k_}pOPH!RW|!+X zQCE1jD2T2ooSAy>L`?-C8TV_9ji!O9b}GyT5W1oWjhY%IG=UhRl+FRBmF+O>RU+!6 z2ZIn|lqd=!W(sX=$zkOx5mD9DLIMDZ0)SCkMlw^%&V~vgbz9H+0?qcM%PdR2=$0O#`mSfYr3t~Bbb?gpeH`3SThi`A0-OG)8>JP9=P}H$zS{UuU@%)xgaE0mcFFT*}`CHcW+z{dRI5EBiq-%@|C@<=FgtH zYiXfc-&p?2S0B51_3HomAN|As;qUz2Pk-unKKI!#yzuN3>nDzHtlxgysk0|foOIVnNfaTHmq?c7y1fYbpEL(#SLKa6G=S+Y~0LW-60Fd0riBMHFMyKWh*U_&6t(Xo$ zJy=ro6CeXP00eRX1jv94V4w_OfDU|PHf1_?=C*fyup9>>f5(}%EE zTDvYdG`bO+{ism+<0qEgvBit8-k6TYO>__3bMN2!AOG&^iQ9!ZF%_DPRsjK3)uJSs zP8!8Z$S^%1W}eH+(@!XoW15))01(x50aWrE?OL~>kSyqdhyck{RTPPs6YZm*KuDc_ zg@}*n3{yZuLe3qA>`lNaL#S$=S0h9KN^>Mh_d#sQkeyJ)h)^x(4`(7pQ366)l@bFY znL=4PRcK?Y!syr#BuY_mlvV;HN>Ex6K<|AtD150XA`yTbQHbKaFW83|`eoHNjf&0c zMpThpQ92P7B2|FG1A&^gZ7?-akC^Qf2p~w17!(PBd%cCap3dq?J)2EOhe2BMgig#f zNyzi5CI;5IokmKva|KW?r_y||bru%gDb0>H^KJlkt%3CQTZ)Bw?b=a9NByo2ZDmzg z2+eOEQJXf4MWo2BI;#iDRUVZ6J&^}E$$L_!D{;SB{*kpaj@L0ipsH>IYiMm1aJmT+d4o)^$t%gEDtuX3$P+0g7Zw4FbF_$f5|H9 z>;ZW+ZkIQlVp`C1OG1Er>BP(+*&{mQbk6_)$Q*zHs0bmDacR8Fyl%X#P(iy0<^c)- z4+vlm%xA?XLK2n&AZSVq4Ag=}^y_!s_Q-qQ?G&2p+i~;2yXeYdf4cwqH!htQ`?-Jm zkH7O{KUnzQ_^^5T)mOjtxi5eI5B}3L&pfd|I~0fn%!%zn07l5x${?g2X#rWMjnpYK zV18BrA@%x_rC4T;jk>Y+IjZO)C1fcAbtf^!>pV``{hoC8U8X8APs)5-U3&Cwp)-8T zPTe_9pe3YAJWth-Mk_7r<&MUW^7kZ5=Q7D8a1Q+EB{a;{5c3q2%jE=V6OWW3l|cu| z5G1ia5|scV)P@)g4902EP16VhIA0-9R3Ua6!|r(hJmR|RQBjR7##q->)}BV>HfuS2 z<3?0nSv~QQ_rCwX`5QmKar@o1p^MRgf=18I(%v961Ovv7_|0C~0H9(fDk=Dz=7}Oo z?Sn4QA|e85vVX`$fPtoo-b|b!fN4~7n6@CRN@wg!Y1j83kE;Lu%^LLSqb0LoqpkqQXb*@O%sv0_vA&GyGTuF$ifFO!$X%APJ zO##9A!pt-V?@PrN5WM%8AY0PBvjVWGdFO(Kswxby2_Zyro(;T25s9gb=z5GuF-BET zRYD3OIT%o7+Io_QDHuS^%rpoXqRTE@#GIR8mgeKL z6%%yk0~xw>Sfr~)J6JQnyz?+W|Dcc30(7%tb0C}#G69Cr`hFEcL+Gb<4ZsrWIGHT< ztJhw9?#AUCr_Y>w@BArhq3B4`-fz{H>; zCT(bY)xz4MkBHxnZwgPiF|^9h*XiNSHMsB4X%Mvq?Y@lD&W#T2v9uVhGFe z$IJ|6jwN$knA1q!pd?ykUPHGaT7j0J1%S_MA&J0T9_vL9-~kGt01?0tTnWxVXQ1f( z#vNzwefK-6!q?Y#bZ1n8kn3H&dGm|cF27>-*Z;SF{5^l}&u(vyo_pc-Kl+nzefl?l z`}OlrMi?1H04SLxn}S#r#k>K*z#uu{-cmZ|3IB|mv%VY<(~MDf2y^ugnJ>ZrqP&DT z@p2S8=f`o5RE}`A0i@MydK3b>(2pJQX_kDM(C`S}bXkP*+8Io4q{%NLO`dfT9U!?M zce+Ccu>2+njxclDF=LQID1e!z{d}p+Nm)$01dt3y^Y)XRQA|1KFNH3ZieielX4;17 zaJW*Br#_i;K-`^;(8txKwS|6V(CqDxrxx~t+2IU8-v8nE|95}ymmYk}Tdl>`kiY{) zM`VD+F7@8Zm2L(%iA2+fDAwgOP|f2Fgh$UDb30tM zi#6$+nW188tE4s27HuFU^Q&A!xV$!ot$ZY&MzA z#*@k6Y&xpznMyQMB;W!OP-9y70zm@+nhLp0OyrP~4rZq74Kl6SF+R5-{G(uhL=2^( z^%gbIyv$BDfSF~LDyWDVF*{WQ6-M8Nc58F9p4I)~;KLvNu7$35%a{I~W~D;bf}3Glxs#IX5hal$ zW->BM2Okp$m8urRbSEh}hL~{zs?=SEbnXodlT4eAE=c!k5~xZ8ab{+gnC@B?po>pQ~?CXuK}rK^ z2xx@)bF{R5DLYnVL(#4|oOwGnB1$Dl`rKT4+3Z!ue*G)oc=?SBKm5Hv z_?_SPk>jV2Kl0Gs_uq5pk9^k;{_?;6mtXws9~?~gcbJb=g{T-8(FBz-Uvpp(M~t9A z$ZcHe_1|;%TddV@-MH9jn5Z%k0~Npk#-L8~1}I9lHvkA2j{HG0l4Sq^NHz=v=m0#C zH*^36m?E)31S|nUTGap;C3=7Y(1CgY18{%}T6QeI14(UC1*N3k1_+=IiU~-BSoIh0 zdElXQ553hJ%(i#=aJtH-K+j#h{OoxDrNwyX_kG7+ul>cp`in2T^!l}HSE5Z6lmLC1 zt}5p(a+&|m=sV-G4vA;#CIy2W+Nje8@VshF0*#|^*D*413tpSYKfnaSlQ7-pK)$P5 zEp-kE5Rl4V&p<<3v5PtXfT`v?pKx~PHUpOWtdN(Pq*tx!dIF)`LP+I2)p@!eRVhha zowbPbU%!P5npZseYT^9t-2$#URmS|GEVwP-M4B25Ubw>=+44tX5BLf2gL?j{= zoo~fU%#c~C_w*>2nkD;Up!DL=02oOzkLDSqJx%}slsvl?6wHV*#$XXb+p4lFu&f52 zTxeT#TvdgN8M+vPFTA!98NG87(;OB`3^LVr)*`d>4Bi77A!~@qtg0+ZGhnkQVtH5* z9B~pW2Q^|34n@QaBt~WzqYyhoMW(bxkGU|mv^Q^gc`3%>(&F-LwlIZ-9+2=35{PK%toWezvmRe9k z1!W@zMRdc3B?r{=?%?16om*a6+q$tM=Clsqc;ajC`oM=DxbJN*z5E)MeEZ!Sd%Jrt zzwp}itH1iK7oPs^4}Iv-4}5TCedV_MZu_Ob^WUF3b?!g@>c2ji9=0J`RWwlypkFyK zGciCwF?5`)5XpN4J+`>`?tAZFUtE22b2rqJrfCc`fCdNxYRM53vXc)30ayXV07;Y? z0ezZ}NwFS~03(=J)7+<3H0lH}shBVVOe{YEB)={PgkTO30T__LB*94_53HaHKtKUx z3Nr*cH9UFGyWW2Gp1Ulx_2y38K3rfexwvq!^(QyC-Was~w?FXtuYB$LCw?b15FtbW z%GUQ}MrJ6Yh=kqJtxT98A904=5y_B0D+&n!F*6=>vL=B9J33)q>ocn$bGtRK_CU%u zRS1xU%N-Q4yqP!0W#@|2Ss@}&g)}8>b~G`Q(g@swdQvh=I5Y$A1lNOh$C6*?W#s*!#xbw?4 zHO%hC*flp&wUnwxWA>?0mdNd_)igjb=ez{TPqo{!gpLzclb<`~@f##kQ6xj8If}r% zWuoJeuuJ`E{nZ?+bVXEG+n^;12nuYOaz%lBO#V$si1>l zuc=$d?8q5FGpms(=@~RO(Y%VVV=5B#hO#Wp)QBJmAQ>AnYx?AziJC7ghKNK#NTU)e zGX??$3(<(2nK1$q#Wa@)n9L!1gR)mv%L_}h>FQ)@G#Vf7?e8@8w3$s+BN`ARbTqiRGXhz5)Vtb~ih0jVu4FC6S0 zEDS5rRRvWMAl%yC`qoolz4yL*?!WWi=iWHKxV&-qgAbJc#@_D1*S`MvE3aL9?$rz5 z{o#+?dG6fG#^PW6%Re>jyWjowf8T^4B?b{jY&1ZWsFYm%g9#y%nJ=6M@eXcZUpjYu zeLAY+B!oINO*3oSgXy@9^`xE5LOY$!qR3Pm0Sle6Fn5A1hNKY(mjggj%K!qVFU{rQ z449NW9uPB=V=fiJDU<=YZc4)d0Z4K@Ap;Z;gUH|?IC1|Y?|%Er9cP-R8Ex;mNm~Ss z$``k;eQAID{93#$xc|j(PTEGn8Da!uU_>Bd(v)?K=a$1AOHN+x<1*he?k6jM<01^ zD<1{>jsd%+4ze7@5D>j@rB#So$CrJjvTL@P5FmgLZ4)h;!TO0as-(uqm>lLs4M55A zUf<4u1k-X01j|c%&G(C^BGK7-;ru%ph(zsRna5ZThvPDFi*h;Nk1af5E zVwu##%}F|MSnm!CA_C5r7GSqaA|Hix8cY!kjF2#Sgaaj`y6pX(1;mi67D9AP7}~}F z*bx(hnJZWYkPH(Atg$G{rl}oq&{+BYcrtV3%EE&IIRXHz3e}`9Hs?JO#L$>hj4dKM zU$wJYQC2GAz4KXsgP{$8=)G^+ka8muIN;C*D9BWuFC>PlEJP$mArETijJdx4#+`ZC=y6;2vrHRc>GR87eB`zr|4?-k?mWN~pQIwOEKN>Q>1eDkqy zyz7zo-F^RC#?9m~%%NKwBH(&pQ&wlPZKk~irc*i^Mz56ce ztv9Z>wY#+h&4Q8os1RMhaO`7xR;m_^)r76`UQL;RL04B7ZPA$61Y@2$jUh(0*fudV zO{{|iiEUHYO{~XK&)PVvV-S&AV*nK}1ywKsi;xx;rUU>A=D}eKpI~W{%Y$+@T~Y_c zdDE6DSOFve1Q5U?XoP@ZeQ+m=jfda*_PZZ`>u_zbcd$3Qv16k+C`(=Tmo_i`(f;+% z9qwG$>R>$UiiQY?9vrblESU;GFhc=BZ6p`x+L1jO4t8kz*1%}<(}NxD0!MuM{IulI zJTC-nE+?2H3H!FM!qJcPO|<(PNI@*bnD=qaWeG>{BoQ@~b_S-FZQjF{w+6pL+OM?_Q1l_Nw`X?4QrS~X%$n+}OtLA__M zB&Lo~0LeK^ZE(k;#@*UbK&Y7y35t1cOR>>M|P_-ngogZGzx4Em8=LdCuM~}{VR^@e| zDR-C=!E`p8`JzybA-270h(fPjI6odwe)NYwHg4)CpLpE$s#9l9md-n@HkQ}7uU+%( z0yf-%Ni+aOb}m5KUtElebqGP)rPa03WUGK8Kx^g@XmfA#v!DM@fAg>Z(szE~!(aIF z7x#9ykDoZRuvqpMde6R4PbT{pE?(XK-A}y!`fES+=YCqW4yLsH&gZraecbx3;xVgP9%2GF!^DgtC(Q5QW8u<61?64(b&0GL8FPzTt9;>_Z) z`)<4Yj{D!TbbL)jZeG7R*%=kg3rkD0!^5Ypy!qJHwa<lhhRq-$rAl1k!@-hWUc$o>oOwAZ93Lwl^>aut9Y-xyu7)1$@0D-A%A*WN)7HS&|5)76G~{c{pgS#BB<=L!zH51&#= zmSwCC%)BoU)8aSvo|zdC2mL-F8(LGxrf&Mfeu=E6CT0kZS;eZVh;0O-vaFI}MXGno zs+2Y$Q`@wF5Jh~&8e0HIW`aV@YCw(*pm1JLn9C4FMGQb4hc=3;CyoY5DkVsuP_PRk zBFec{kBG?3Oo)II#gN3L@?}+)gN5GOv5m=eIvJ04c5faW?ljYh#ukatH2LCb6d<-d zIh?A@RDE@Ow(~WdbG>t}tvjyWX`ge4w~$2nc!$uK86hLM*tR9}_~7u_7a#wzAOABy z{6jx9mN@&y*GhI}!5G8(;u1_EFyEe3D z;F+{-pM2u+FaGf#{^=k4iDM^@Kl#j)2fO3jPoI0&yWc%nTYUBT7Y-J8uV15M{b95RC%IW*HY zGB_0@f>~W7J4<_{*gKbw7_uvQSeXqB6&Wpy`VlRyNKgqe2GKTXBcW+ykk~fOtZCY| z4s8?L5Mm=CM%C2+$@6}SiLL?l?D~~o87{9cFP}JZ;@Fvu;mR_S)}!fkJR!5C#id?_ zhnu@EZ@u|zZ=U~FyK@K}0hox0614N=6(B0YbR?3pPfU2%@<@JOqy>;E@ms^?w*}@~ z&PE6SzD*22|8W!nyZ^#DradYhj((?g0{e96AU8-cYbYU4-(`Pl%Md!huFm1g09==v zQtY7Q4c5hRfFnK#;g*+Ra0?W&c>=lh?O47oDWYRnC4JkQPE8sBVDf1-JQJyz^G=Z@ zv_YZ-fo|nw3h$bG;Fz}Zg|(uf5lj>XVB^4_I++B@%iz;Su?%^QQl4Tn7_ z924geb%3Y|i8~?+F`%ibifB@ec<+(Gj5GJ1ge9U91=Ntp)RdW$QG_aVwhaJiW`rmr z`I#b68YRo#4rw(QNK_CbL}e0DW+GxFFi8%{#?E8f6_acy2$Mf(vR#61(UyvI0|$ug za%#5x=fEgg7K<3EA%ZKbUNYup=VK6NbI1l3MM1Sc7+As16CyXwlpPO-{b^kTk+K0; zQ56OtBFt=vF$V7(AW-4e)HyE7qHUv?gf?TqSd6IPdawbl7c%?IM&^soX?N0<^1R01_S3LE2QW_0 zw$tZ+;3MDh=mT$k{P|~hx30bN@~bbu{M7W|plNDk-U>57x5i$YFr76B%D_EeR)sGL zPlz6g$QA6pF9^`HXGF)&GZQ#QCQ2DjOwlnyl2Mx=dr`G$&S9cmla^T!kkFT&`i>nF zI|jr|JsKiOqpB9#7}_8T5>-XbEUG91f)G!MfXm0GmqBPf!te$ zN^Xghw}ka?3;s9{8ILyyfo;az5U$k^3w9KUzG*3d1fFI5z)xbNeB`(#^}8_1yNwc zD8dG5NI+r)^aP-W7DYnzW%8U&Ya(?X?h#Q_tnZ)zV6rbr1~!l!VTn2Q?JX)3fNJr=>JX!ukx{1pv#qSAash}i6=4QMsL5LtwBwBO0Zi6P@h?5{z`K{17H>Oq?zQu;_#WrMKO|&8!dX)ne%TWt*m%u#w~yk?`i>?%4O5Vz@xiGa!{%$C4uxupAl>UR z00z{KTkGCO$NSh350mVL5wI+K-npvi4SGH2inW#1m8I3irN#A)wT<in_g zVG9EQLShpGVo_m$7~}f#@;!Gyz(~z(IvqEg*KS_Ac#Y5;L%-iYc5MCFv199Nt7TbK zWl@$TA{O4Ub0U&AZwkbqMrfc&2*73-R8-7)79ci20b^5*n5L=QkW&DWfe~golg4gw zKpxIW%6-$M`%Lj2i5LhGnKfzH#hhd8I`z2(Q$x1w_?MQGVIqtm#(;z=SFuY<4#*u3 zghV$Xp+f+T<_aQa6>+{GMm8YCsG;8*grK2~7NxAp?#yCcH;&jj2ZmXnW_;2J5IBPB8tf9&{V(xI>XiOy5lWZhC2K~okZ-G%WaTEyee%F^`613p47|h z8#{XkU;OH0kG$<&OACYV{qP5#eEP}99{cibr|!7z^y!V`CyM?;Y-hwgtor?7Z+mN( z45NS>^qemVy`i5>XQPAB>iV%kZ@7(HM3hL&rcu%M)z{wm<^Szp{>}gPZ@vG~4;F(` zNY20U+V6boGaq{Ihfbe7-OifH!R+a0pPbY)1fGfQ`PjCj+XhRu5d*nGgDHVZjG)G% zsbi)JrW(x%tW_ZZG-Loa00IVZg)fafbH%k|jJj&LvcvVqh9uq)wHdp?1f7%xgb93hf=l zzXed=+Ao;@en`4b=-R(s+DI|ia-SOvl6_FtP!R(Gpktr3-j1AO#N?e0=)5nBlEL}H zIcDd*bLf4-X1-rl{a!Wb_414XIw;Qwtur@Z6*(6E~+KtO3 z^UUsvIi#vTAjKFXDyB(bfMlZqT}{?WEes%A4I%*gss}0(#Z-J%feIi#^p1CKtR53- z2K{nC)N@czXA)yw*VDuOo7b}o-<>|nkG@aS5%8h zjwzE#O@xk}D2kYfwV_1-=Ljt3t1o7tq-JQ)ZGa&rAdp7#8fYYDQZZ(VstABYnk|_m zl?X||lVVS{T%@ElO)DlP1+xjT>klU55fHOu=NuE6S`ybsH6m6~b`C(D;~1qVOEpyjM~;b! zNlh6T$|8j5*v*<6kP26+2@`~-32jU4kf|tqG+{0XNkN%iQIu*X6e(499%IDB1e9f= zil8P@M8q+wL@|v~2?5QRA??-yQ%_!^m|Cw_R=r|%ZFMxgWB+h(duQule`|cWA48K= zUyfNslS2U^0e6cClIl4(v*uK7ckSo*=A}2mFsTT{1eh$y`DdQ_?ce^@pZs%w>75Th?7Zu9e=?rEapASW z!r*W+@};|S^G$)+FMHk>vo=g(Gl^|s)bmB08|(lf@hQ@9tLDd_MMIxAu5Rx zssKid*osDRj9@@&4$v7mAVw&Ny`g9Hj+nW0TokS(=UwTGlGr)V&M^{sB2FuA0loLA ziCAz5NQjQu5qKo$&;gS}R6>y$G>SAK)a`7h)BVxx^1=4=JG;+@{XL)-RDqJN-qenY zZc8LJLn5CN$&x#dm;keN;|s~Mp7Mf8N5stRi2)c12(l+*zMuf)7?}Vu!3$<0^3FLT zB4T#lr)hg;SCDh$ysyft@a)Udm##k;6pRal<-%9XE33WI_j>(;T~T^QA_s;0|8e%G zF}7t{dKk8bz4tliPBFfCW4@eg=$d<~>Ta^C*<_Ox2Z0ky zmf;_!WC0c_c@PE3rbG#1TM$W*l+>(lvInx8Jylm#SJ#kp=9?m3Om{fL-g~X(AN$@L z5t&u8;EP1QhI1b~x^6GCTJn4*<{b(3Z= zkb=sl*tRa*mK=KvUIVL0-$z1F0w~>R#Z{L+9N4t5manCio?LQOjX)_yVq|szl0^3J z-2dFCzW_Ne+xd9ZWVN~(hq{p%eO<-ATP{{AS)&d^_wM42u5U-pL_v0EJ9RUD@ZjO$ z;XxJr-ekg_RJE!-5u*19Fbp{uc^?cQ57BuShm7oC(>o$r5Q!Y4rZuHSR%QaDB9BY7 z=J?4H5eNy`BM7J!(*II)Fcd4iQ&y@Dg`e6CxXd0SGN>4{15*PP%>gO}14ZvK<*W+M zyHfBe#}rkUQ`RBYqXtZ?Q00`z-qwMov=RC1_l)vN-+ zJAg7}Xrk%~jXWZj&Q#*Hc!p-7GWjEhju^-}5ET(AuqrXBT1t_aRn&6P=H{L0WOp{X zcXW5PTs%L0{`C0K<@{`DS7uU#kqI`#;KjhJsure>jL2J4f1@-9n?KNX%(teX%eikm zYMP0ncrfSi{Os(Pf90#Mf9%Ear1^ti`te`T303-=!$+&X;zE? zo{XP8egdHEJSE%PJ0x`T`Q^~B4CV6joEh)kdEp!WYnoFQ0Wi-jh6XZd{O`W{->TvN z>|gz#fB8#4d3fvA-}!t0?c?J|)kRfR)kQx%`rrcqVCK4RlB%k$R92dnmYp~*V~x6= zfhzO8L{J6q7|FmpBw_}tJvmfG0Ioz-Q&xto24ajrs-y;L492FI*OD@9fM&d&2POpY zjvbRjteMFZBRO^zvl%jyBL@VG6bNaJiXj6NQBqAY4Pt#x^EjNw?wQ0VDb69U01|*9 znxU4aIwB4(50L;1ReOp0*S*y1VcRwOu@O?`QC&~RyDvO=u)ni68%<};Xi_z!@yIb( zJ`{d<69E)vV)nJKkg3eouyaMxgw}OhU?Rtkix0!nQ;2|u zyFB!1Shii?imGMRWGa>=4uj+rhe1V#zBSRV>-)Y_8DdN(X^2Tpb54rZ$CyNiJcwwQ zl87u*?qgaGeH%3;GbSLsCYicLr>=K*u6^Zt%{Fxr+{8afP;~9Fw5S3wq8ccwY#J*z z@Y=dKvB``WkcPfDwL(LqCS^ozb0S;QwiuS4M+5>;=e+Z7Xgh?mng*go?h_yX)Xr#@ z@-W0M#g%g*gvOE}In&JSs@gfOeQ*FMV2iU0=Xq#{^XHc-K7aiF6GQ}t@pwGlnT^Mj zhc7;?>w0%Qs~eva2nhl*kre3_1!fR2RdtAljHNJOplez^G&RY>L<$1AjO!KxI)!Xm zHZ*Xhm7EKmrBO>v-gBL)}G-Wk0(^&fZnRAgQ#)3wmilD7Y<(!$6 zR$$3OLaIuHYFO130xAfABC}-4IkMv|IbM+>69rNpPbVV6&S6RjjveKcsyf6Ny{}av zRNgUX$t8dB&WBJ$z{yP~h_qZT5s*C@g5})Hl6{4Ug?q~dXQIuhE^0^2Jj7IoDyyJ# zXqZja$dNNOB@oFXM#c`*6k-}o1lJNenVCvqNwXjk5QgBJ!^8dk{TJ@P^!)tvX~x7pi#cY*;WvNl z*Z<&`|G;NI{kfMOKKSxae&z4}*MIASr*FOT@Q1$ena_Uk=J(Q&cE&r4`Z7ivjmKW> zg@?DCJY5pwrFZ64=k1qkWw0e00amK!G^w#G07r}CP^{n zoQ7Bw^1v{qocbQbEN3;9oK#FDAwiCVpk|eQ6l%fI_eS!`$0Q3NiPC-?&W45bxC~U@8B>vYj-jWm) zLyntgWC*m@Oe}yYG7*`onxL&=aa3IsDXsb85Re!NiHbs2;nB2>v;f?gx!kf*V43`K z)x^8dEtl+^^WH3j<{``Bo%=ue<*yvvJ^+)-ALQ7oqH{QO5fN64mR(3O*5itRvk9n< zCX=phC$p(!0W(4#`aY#Jw5x8pOgTS$^nn)Nw}VTgs>I(MzFiUvR;BxH2nsRT z4q1}(+%$E{StP|FhB~MyAo#ilHA&gmm8v-BN*U0*bz_XGNzTsuHM4+1%o3V9C&?;m zl7?6ZuV9+R087`uijZ^UD5Z>wCB*m6g^KUryK`%Q|8jo+@skfO=4Xq=c^ulp#R0V5 z9oFsnW#XSy*ZFo~&)ozlrNP)XML7gS&AIFP>0~M?r7l%XeR_KS)nEKaFFttf*1^u7 z`%{1Pm;d2E{CEDHfA?SiU;JyI|H9{f=eK_S%{ShBdi<=ZCUcHx=3KLP>o7q)TU|Ks zy=%_So?OlscVD=3`}UpH#jM#7mZ@=-qpZd{1^kYByKiz_<|>F(AZJV%dCg0rDCiD_(LV)3AoUODMV4^jTx>P)hN(hM@~?u#uSA^x|w? z#by`-HiKH5juHb9vC_e_9Zp>zy0p#XCg?Xue6X{-JKjBLMz%Qn`ak;nztb+m4+xtJ4j z2%Z^KYqQ);CzofJ4v|&c)#};lY0C2EcYdEMFM@kVheZDH=+>(*y*!=Gn&8Hv3eFiS zkOPydYGRI%E$2jlMnFhvmZN~IfdfTz0EDPTy(eW?ruS3;Oi~6Q_C@*Fq4GJWmE6HLlPB6MPxP2gCHR}a?Y6nBe}qm&9kS9Vm~Q06{;c_;{$6+P{a_-iXMrAcf^Kj z_DGU~dXXwa?Nx^BoAh(r}q`kmkW_22)AKRDg};lo>Z z{-wY8FaMwakN?Ks`&)nO&-{hI@Z~Rm<^1w_*LPV{6HF3z97^y{`*YnD|Znc&*e40EL2M0|spRUP`0%wsU4(_PeUMmet(!L%;@{hP98`b`9DF%_&K1 zxs=i{vdz|Do6(lWYD742~;S1VX-bu-nwl?(WWbI=y%AZfNSG`*)hjXtFmOk4K}?q+hiAclL%w zcYd+>kuQJc@BY1?|A+tJt4Z@{JObu+)tU-ynEuy~ZSxGbk73=o1W+iCH&Ff7FPG?j z4YggB6~G9v5G=3#gc_`AE(<#}ZqObCL&Yu0#Cmuikz-&tfUlB4iquMhe+iI~wgxPFoT#pHWofpfd;F<;ygAahXT(-`U z^TBb|E>=X8a;|)J|G|S{7-EvxcZj9{dAVA&-Pz+0-u?Es0H~gf?;kxlxOHnZo*wS+ zhRQjIwR5iao(K&@aw)rRU}O_e)vTU)BdP(2qLAZIjC)z8tOj80+_srlETLiH5GBW^ z0${{0s}X3akf)^Ho1&>0J2rzfBv(~61SuT_ISZH)qVpk%m_pa~NJhk<;9WoiVkauz z`92E)<*3fF3r@2E7&C+87<=bE0wX3Lf{IjisOqp>wQA;_iz!xBRaZ@nk%&gkh`sOo z9)KJY-iH0mEBW3^+L;#F2 zn_319wO27i9EQQv0F52yB%cQ<_`L%!ijj#X4&;E%&{pr^~ z>qA(rx~Gqxz4*c_5ANUl#y7u#q@FN4(yRoSQxZT%HinpE90pT+<=)G0zxC$Ve)G4| zkeaIU?7AGCtNLyb&A|szDXj^mvrN@g1Wd`XWHA62e9l=UBY|T^)ZoG}3`Gr`08-Ah ziP2#_AHR*w*EVBo%WeI)UTpn_?PeX~l|=svjNblq?P!}?%ZA+Mni~PL>jlhBx6W_R~sJgE`Td)G`S2$iX9XojxliZ|#Hl#0mu zV4JVaCU3ra%7~lsW>_B?!4O>t4LTgUo{5n;M=6vq?COtw;`5*T{1<0ClTfi^JwH7g zS4|ZHBeUl;q%1m_jAKd+;G0oSeLoDA#kn9N?3kIC%cU4p>?YGm-w#bwiP>U4XXl2l zU#u46@u=CG<(!v`)$QB27N?i4_NztL55witlSgm8-;A1Owm;jQ?H}F(gBM?Tu)jO0 ztGcO!XY!sE$rGe33|!`1Od*>gf+%=LdBc4wh2{?+=giEOWzA8l84cEitZFC`EBtgEzAw4LPT7)ivXWBolKJLW(^A zs2C_BQECVBOiY@ynOaPx&!M#Dfg%EsOuT~#BIeYs2 zn(2BzX?<;INNbO7Rx;d-%0K{+hG>nN!O&~)EGd2cw}0n1e(N`X_7DB^bUgZB|F8ay z|K0!gfB(C`^9?k2_wKz{Uj6v*{`S|V)5-li_doU7PyM(5;H%5Ci=pl7@zgUL^V{#d z{khM7_Q4A;RH12yM5I1=LD5`@$xS!4N8<5kKlV9r_Ur%T*WUl&gW!q0>tYO54SL$~s0@P>$E5JZv0@TuYtXTnYG}@7zkWjJ_LyMU|6Y%Dw4MJsAb3}hDtsUKZ1iH*SLc~iRXcK zws>|-CwO^{lCpuWc8i0ajDwz@vRT>t;HlFOY8isle;Igk?fyQvOTW^qR^Vh^S~}=o zpUh^<$?k%?1&Bd}vZUpb{}_zsi^f%5&OfJ`T(&V6c+jsk^FyZo9vyzI!*kGZchhj4 z6jM#Af{Z*8|CHrMjF8CO-1aQ^$tdAw@S?kk%s;A*c53m*2A6%*-{fEO%~k|8gzXGO z0(J7Ps?Nkqo+K@jQA>V^cE{<`eSW@u%Ygs(O0dfcK`59WE0OM5z5J3)vNF?Ze{iqy zMAa+{GQhv}@)ptMQ&Gz(+Vn`3eEk6(AKTFk1w&j|JoZQU&o*=Z%2}sPl*MM%hXeZ1 z>fDZt)9r2MheiH7%Z`EwreEhf4zoq)msM3c10jEJofhHh9%01O<{$k}*MiS}4VcBn zm6DcMUKD2t2|@YnKrdhS;9w0N>Z~CP4tl^)cDMJ$yQbaTM@$cB@AO{b2Lp%@ybj~Q zOK|d1;upQADqMu>(RQl%{Bc`vMF(6RDy^x?c=o@i@{)Us0Y?`bJ&Cy?Z+4if8-a#Wfq(f&3MU0V0tsLC~HJozF|s9P6B{7q^Q zQa{$5I6OyeRKcuClAt;lNWlftuOZD+{+cWLr151r)$e%vkZI;?SPdR;lJoSVtBvXW z7-in{yy5Kcf3^}5IE81wCmt<%pKU;k)~8*sXHLx2lhAu(hhvwsJAN&?+I+vYQpcL= z(Z!_Q0%gL=4}&3J(C(A@g_X2;muJqQRI%0-_LYh+ zj`NR*&eW9nZNENzzjtuDE)$%ErLzj6&%dcdc4NnMdW&u4!>*b23eFvk>xu=RHWzhg zIq1x!Qy|wL_BcbC`8)vNeWk4E4}a!EmRnxhq<=WTe3&82v=d{%W+>d!R&UYu+&pTu z;EB+{Rf9{K%vFAGNr3%#gmT%SUFcW{LN5*#`4ayH^SKpdx`m8=Hru%U@Tc`c--`qv zo`oo7wY#L{e}o!mviC@{!^j z&Db6njHSVduUwuMWybPQscMAuzoc+9O;k+7(uomOgVR6w4_5>2X*iKESgPUSXrFj}dS%3y_)a&^< zFw=uFYB)o!^OF#mO)h}{rRRyhOG5IbN!iL;rKu&l+x_=b8M)MHIQu z6EdsGm5~xWox{6QUkdk&Jaf9)dz$JdkIwBI3)T?d|Q54k-_)YHBy0 zkP<^;R^eniWkS~{r~jCE{z&K@#eFznfkxU9MNn*nzU2Pum8W{ zbneCku}?+^#)5SyC|GX?>3V(cZqXihD-9{>u^>!cL+A;r2W=MIT=PxVN?I|hW|HphnBCj!n9afSQe%gWB}3`GOSv? zX-!^IeO`87_Skfc{#izE{ES3W#Bu31DMz)3zJhYT+9$Q6u z_w^}CGXGYU^K^4+{cOILeAj^D<9H%z)!uqh@ZZZNMkC{b^}oyY=1Fb^MiN4_mgr~g zoe@k5&&x~%s=oR@c`NbYb8TNhR;;TN$D7Xv4MvI|L;H_+ToI6Jk;d1by7aWV8XJ^S zr-^&~QT^NsD3gTzywi1P6v7Hqs?&2`BzbJYsUrOE+JB>1i>vK474!OCs!r!OmAQ&< zL^9AeuJY++wEnd$YuXHDo;AYd`zY^VkI~>bIuQ162$wEjKi+%$$ZC@DQMJwScCcr` z>WJBCrn_IfdyXsis-fy}d&?cu=xJw;V8x3S_x3QwcFpG*wUuQNB%ulsPSt$y?l+l* z&7@ud==rq!8U^f>LVI|xhtbks2Y8jt>|ub%{~_cBPa{Kqseo-^UB~ebk89q)i%bX-zN$qa}V4fyMO|mwFK@r}kc?g`u+)u4+oqzu7ulMA5bi*jYj5jqn}l zCVH&oW(=c_DsF(?W!;Zh>CFqV78MuKZzFFPJVI{cELYJfxVegM&T38?zr+r#>x3a% zJg|3de*XT=gIh1Z6=ufo(OKOsvE4rpcSSkk>c_A{xT%;FVPlx$)CyI&>Q1c8PG4AO zjKQ;YX-q04(fhdjp7H%tj>Tm4O0dR+z_6tXbMW=&yf-vlZF-i?WTQ5`j3?D$&^=?+4JdBe$z-nA-;i2KJooZ9EUEWMtIaJnnxP-S>CXoo)u7 zT?qa3kPp3@^yofEPlb6o_+S58C&r=M?8>Q^Lmco8BM@x5xxrzl1YILYJt~Nu5&YY@ z{@vY4z2n13+e`d{Z0Aes`lokbuaCAY66>eOZElzMWPHYkhGgl$04tdh2qUJL1!#n4 zdKBzu7Ym}l|KosTS8%`=b}MG}QneO%a-z)_ZYKT!`94$6V%9MvBm`Fv57{z0Ir?`s z_&|_r2SVOI1&1fin|n)l`bGnAv&2VaC5FYo4GMZwbG&lyQ`kB0JWUS7^rmY-Le&oO{WY2QyRID2Es_m46xV*o8{mJu;08b#x>aMh zZ9IV`xc_1uG7LE#j0}Sp^g2Xf+Q{R#Vsb3k)|cImf*c&oC2f&^00bHK_SZI*Cb`L7 z1VS_){7iA1Nqp`>&`jzWRg?x*NA~Np`-@(*B;I7;)Zhj>>>#eRK`kXQ(OBGBYceMN zy+{y>kn*b+JE5H|5@+} z#}Ds(`)+0k3_TCA=OUn{BBUJS z(!WnWX_D#f>XK4`w#%BprffE!GctSzvVmTR=ub$~>}bFieP;z$&tc!MK}5~`D7Kzk zy1VWBe{ZdBZBl326OV(!JkNuz?iUnpVj6Hu=55pS*Iy;b$Lp~dIbr*6!~eYu$H4EJ zH}A1XmXp)F4h0k(*DJ$wzc+<7pRi!z`xDaX+@IESOB{ZLdoxaT-EI#wgdZ0upd{~- zP#fWGxJ@;l;7LdJlP8)moOR9fAY)o3frQe4lyr|7t>m2hqP!n=XDMSvPsM&6S_=Vz z3L@XAl_^yO?5u{9(JgJ?8BzTwF6zoO7DJ($ZKlxJNFyoV!)oN7W)KqN7TZZF@3ZtW z?&B<<1=)+&pR8Tw?o$f#dM5Y62AkN;Pn;JKYuAoOc^c>VBme`cV7%oFJ|e+4efb}K~QPe-;6|!1q9(!f(qv`IzKq6Y~%0~@ylYrv0@=Qy|-`g zuWLvj6(i z%KL&Y@=+m1BbqBSI)4oDQUM#+d%M>lHJrO`FLYF2VkXaFazL!H*irl{3fy@7Km2f$_ zxkY?;Qsey%JAL;%A!5?i)3bxw6uxdF>|fk;;5FUz@8+do@6F1_#$C0O`^iaaZ6_p> zK9`4jzi@FzF(xrX(Et~;)jX5=C{8)hPK&8VDb`^3(I(C<^5$mfrKSh-{MgB}&y@-pg}}Ag!V&8M+|l&7?fC{cfF1o01EP&nMd0Y_QqI1D)FiDf*bb9*aBDDj@A`TO9g*7(qi zNicgd(RiBtzpeXA@9?Cf{dqbo>63j;+-iwk4?jcQ!*(Ql{R4v}7$if*u73pk{4_T2 z`nzwYoGsIa(qWYg-8qpAN8oyFPdvh~n21j-j)NV{VNbDIeFC>XG!%b@uq~DphY& zzIa5erh?Du1w%_c1}Ca5mbrK3Q+86(ZHgr<)(!X?{2TrvdEI&Kgf{YE%zep}x|kW^ zHcD!w@n`W{h_(*c9l8fVvrF2zlki|7=2nJw|RZE~-uMQZr+ z!4JdHlfU@H7yT<=G}83R(M>6o#0R=sLo z)ydH0fpkLHKHcb*KJHw0cP~%f{}PlBx^UD`PPk9v>aqwXUA)Ci5(oOOE|wOj-8?of z4~h8k)QnK{g`M9yL4dvmB`A3t-u6} z6|j9?;WxVk93|6iYw{tuP>&q8o_1)nnGxutz(3M14r3TY6TtD~r}!qGYW6(HSbhVI zID}!#3N3lB1LS+wzI9Wt&c*q?Ko9D?45h@!-{;r7aCN9+3*rIU>3S6sdxEShOHWjN zW%GWH`$?~A^Fj{)pg{Vl@zibt0`SG>B<1Yy;CvgU;jM1=2$MvHz9vVK9-d(h8mg0( z&lDhZ{`Zx72Jsj+s81nXYeKe2E*kPa>(_S!gDomy1Nn+B7*YIDr*LyF2elcsoxsUU zKE@a9U-NvFRm2pZ(uz`2(P{z-bQ-jFE^KNP%sIX;gjM#^Is zBzw>cGDvN|VTJQ23U?K3=<~Gj3){3}xf{Vd9N>);g=&wEq4SyHB408&7!r3yl#BH_ zlz0i-%@Bn(PL;)h8p7l(94Trk@qgXS#L056mElV=Y&SisoLsO>)fvXY^=carY4_F- z*JRMZSvzyAms^_4!$FDn=1g?Z9_-((wT}h-Az};nuLZlVbvz#~Vkgz{SKOK!sX8Ow z4g)8Re{^rA*sfSoT0VdHiZ4`UNRIN{^oC%%(S>Wa^FsM|5?Vw^RG&FBUr4_?q|G!C zp2ue$b~v9CD}NnohWXv+^&9uxYAj@9We(kME(tqnp^F1SP3tO~kSs!bGeS&)ap>UM1%lK4OYjLBsU&Cg{H{G zFZ3P)&o+#=#ntiLjTR^s=O3r1D}Pks<;qG<6?gDs&I0gU{yV!aYPkKc1U*C7i?c4g z6iyiPk}s4CHwArKqO;UcVoBkGNEf~91cirr`e9&2dp3K@#w|&uX=Tc9IxhN?BfUWu zMj-D%&!CVC=GTyEM_ER^^j&`GpS7P@5`L}A`0MDrdtfKRFh%G6r1pr4n7&iu+OxA+8R5{p{Kr>#zus)JQf78JHJ?>M zE|nM5Vl?kC!i^!u?EuJ^>C$Yt+Jj{XYH(0#PdJ?1%pP z&HTERK9qn6mhw+|(trt|WENE!nomr7{x5;B*iO)azJAIQbjVrQMQwb1$WHogqY*ys z0n`|jl9;M)JNc$;qQ}`;nI(4PF>d@jcR&h=7;H-(Yc<_$G1zO=I5Up83tJ^um4Bdr zQ#4IgoBv$tYwpb!*2jUn%6QRt)wjG-fM-5HAcC~Rc`6NCw-=G0#Hm^D44rz+K~?A)i+|2>Z0 z^YL%{#Z~#QuHjyS{Z=&mx&%X@a9%OLj9=b;yE0&=j=`}HLiz6;ny39YZ?HxR=Z*C& z`QUr3e{XZ!>YB9F&4Pa~wpjrqIvpVYb9Q|!4Lg^1f94=Bxx9?tz@**5Hg6kx?pih* z3!@%wVQpHNQ-;g-b7SQrc{wz`i1VdXSIno8kagAO(PyYUc}7jixG(h#pM96#@kd6t z1eXn+&6^)&$0zwMU53O~~E`w2+b(Wf|$d}sZ-{HAr5?5FvMXvW!VR!&C<4M@m z6lDgBC}q}j>t;)RLwk^qIS~G4V1pPbXX67PdQSLrFJ!rXMNh6r78!;%!ogM{f9WVZ zPS!hpf6AKm{F_mRcUMnNeJS}j$n7=P>1QFRJeOo2blXq7QyFx<3%rTg{q++)=pc_i zauF=CuZV?K>agpYSu=kqlo_`j=c+RxsRf5# zjc<5aSrH1sH$Eb)BTIRQ{rwv@jZ2$OTCXbG+n_Hs-@%I0xSQA6^}p`zaTtK0M@#2N zA}MMujLD&(G)CJ$JkEc_%JpUk*#oaJGLx6^Dxx>LMzv2T&CjaesK9fQDo-=k$1Zo8 zXOnb0peW>H3v(D;cx?Iba@xns&&zcm+`K*=u-5V_e;pbgJYR24f;*LNi{c-u+=o5m z9UL4W3v~jU!NKe{<7lo#bQ&&Kek)5 za1+qjx%2XZ_Q%gmH-j+oNgi&|QTaJxvIl;`1Zt93MUU8r97$jpejx5XPpbBIZ)#qJ z!6Ayp24ZBIs*p;hDE?(&t81TuN`#wscq4gJ0vr^re2&d%`Be*KygqYzy@A84y>Zj8~YQl@s=RdPE@p+ zezwv$;YDALtBHz={yP&f?gY zp6kEtWC|C{Q+Gx8H}L!Y&3{ZeIU_rjH~+ESk11SE-J@kt#1zbChLL>5gAL<7li685-oR`o1h2 zXhhZT+@dsCS-)COJP(#V#0VtK`M!cEZ(7&_H%wtnMAuvgm+apHRz#}>|p89l1S|pQ^Z>09nwrNICN`Tva_ruWj_85v!&tA+3((e3XGCYmXcm&M& zM60Jb+ayJ3jp$;O0?#9qUjaqEMOoi!@v2T0cTHh`K!c%q$!TdN+J1~#=7FT;YGo&< z2#&IZj~{Un@iSxRq98rh=Spx4s6aoNR|H+khLiNw&YP>;EagGInRk-?=j^G_31j

ynb$GdOes9J_W5;|90-Ew2R9Ze58V4o_=<|d#JBh z_-@!&E|bR}DSyy;dR{)FX=m(bkr>})T6gV!!ssl@xY_A_Tw!F@<9l3PGqNKc04D+D zy?;XpCjG~Dw|DQt@aqewuqquhahi>B_1~NR7zk4O87V9;A0qBMU!@UI+xnpI+=m~i z!!_$S->IAT+j$;ekpu>KKarZtOcA=7tj0CdilxLXyQ-^@cBfUBnfzt|cu|8#iknHQ zA=L>Sp96zoyfK|H;E?=qcCFq=L@c{BJjp!!^I%4jw)iuZzQQa!&wd#=0Kg*SH9%Q# z&I4CT_KYuuJf2Wo?vT17ks6$cj!ln?9<=5o0@~Y%{NT?oCvT*S$?asX%C>ug2Z0{d-eRa05am*InvVVre&fmkQuvPaJ8jRY*3O7R~H;?Xr zm5}vM;$~;k<>t-FCc5hWpAl;>Mu2h77vLAMHnM@HfcWSF!2sQ~4zZ1GH zFuuGEt9wu22~X>?YZ2R0916{LL{LvUcTbRBMqxE_V<6cOdQ9rE_M(rr`-S3vvH;`f zUjSuF5o*;C6$73Zj|^c4>G7j`+390q1evznnRo!u@&hFm4s~|5I;B_mac$Q*>%77E z(P1yXN6_bfY?VwI(0wpPKCy9$c8 zVy*7e&0@)(+8rZK29o|wA1TqNSOx9rD_oZ=pnuo0L4933q#4A^V}&Ym_1}qbW@wRy zE+&CNvT`!4X4y%w!mWAt-_H1V-vOA4~glD2U4?QL0+V0ty_{3RTYA&x80K&}XUJkwT; zu4fzeJ!p1HtPSH;?hVF?rKU=X5GP1-3(PG}QNfaDqxEq5X73G^-sEG7G;&q%URPxI zzmEs2&6nfYZtS`l@%Z?7DN}xBYA9ox`iV_@dsjPV3pO_EQ0uICT5Mk@sgFm_8EYtAGrWKTXvX{lOCDJxuB8RQ_NbPnK5= zV&~L*1*rIh!c)|Tk<4l3qleZof9g9sYjQPyYw>0&v)ZUcXE((YB15ZJ{Du%Dx}YXI zJ5IYNE>O^{7_YskJP|-X(fkUK6)gkH z5?#gxt2cDrC#;tA=vM#fF?-uX(_YQ~BLjIej zKq`cw_vnhP3PX2LJ?BigGZc1@>ghH&H-v#ENhT)HrN<$(S~UGlc6uL_z)HA0qAE_v zPs4r@I&)esm0Vi`UC=qq$hX;;U$F&J%RXGFPXcMos^xO}@EB3n#0^Msm;g@qXp zKe(;Axgd7w?}=9<%@a7*tH-=f+Q)EFd+#`y;J675-YC$< zVHlSi2{Y%j6kM~OdvZe8p`G5S9!FQfry2s9I%-Y_pLq6syu8d8Tm0)Gw|`nfOUzF& zQix;+lx4u-bQVf|a!FYF+HDoA*$=WN(bBFSdm>E18+vvsa{{S#q1Y8au3L=*t*>>2 zVq$Wv@+YsAEq1K1ZFgQ&6{A1`4w=)=p8A(6NZIC{c`wye&SN_<3%^>i$Hc_Mm*X*| zO_y40t##hXjsO~4X6>p*B-szh%O_OAlO4{C+L;FNJmgqA*%aPMK|$_W&37}tFTvOB z2ELzd{9wMd^NwPMugoF5_No!OGEc^VwCHVmTab|_y}{=tDh}!#8{{8#eS4%AOwoYr zor?bNu?)eY+l^p9Rz+K_ow4>bmgfzi$OMDI1%58IK8sBdJ0*sHrnR<{m2QO^q-|2@ zZ(<)G^=X)WI2+{KX#7l1OK*;JlL+vw5CkA0i=ey-ladK*!Z8K>f{L_boCzFMP-iis zt?~h4U>+V>I(WyqQ*v2B>IOj(exq}TC_&G0Ti#q_He26;XMecvpjEjox^ky_FZT~*{84(+GMA-8f>wRQMqp%feE)TIHRW9 zGGf8E>s1wgW8SkrvDB*)=8y;t9L$`Tu0r#88E7LGMgRQyOCDOuev-aiQNG&!#|eE* z)yisVqwX_^?}nGT($ifQRWt1RkFlNL;21B9&nxG{@7+A^hY=6V0OJ-78oplBdN0@p`a2Fw zXc#V+8Z27=SM&Yu>Xbf>%C)1|8b#+zdkwbSWs%FDR z>DVgz{MQl%4thB_#hS@9uUAc_`a8&!{M+Tm)HmdhWsM;DbKWXz2w#UL4f1>pfS-uUlla#grzO%O&XtDu9?gwYcje?b_m2<3S|>DFN^4M{y+55F&Ji)HTbiHxa~EGO*)Q`B_-SCB7{2eka^= z;w7EOkpcECHGP#c@y@IMnCtBqe)#}^s#0crU$8QFXp_``tqU*lFs>#YgS5y z<2wX`#1uYxQcworF@854XmxX}HL3Ne!TcTY1=ZN3qnJzF0+e;Ujy< zBbvG&XS_V4j$*9aTQ3F6F2{!)4VrEJV4MWovBFATzY!tHQ9HWBCx7@WG+<;{oD2K% zS(RO^GM%d3sBp#7$m=&ciZJ{y`cGR*bwW2^Mt~1*&|YC1uLK{y7Ej{C!xhd`-HdJ2 zX?FL{*afD`R-fP1nU&lixUGWEeOb*?l_E`SR_lu818)zeF~}}7IM|a262HrWfu!pF zkr~Tld7jN>t@oA}%*Dw?izhm-SX1Q!lQH(YVoZL zc$<>grxH;VyOqIm@X=ts%g$!V=hVYa_kpWD>Z&c@1M0csl;veGGb=W0Z zExg=JU<>6WKAy!bGt~_oi`S)!@ z*PVX&)_BR?RaGsdM@k_W**zkt(8Z?j%-Fz@bw2SdiT#Pc+v1SBEMwHs`K&{+QZ{{@ z!94rF{j-CEUG!`Z5>JbkQ(gb$#*&k9rh8Q#w~X;FqnF1VrI~JC)7I*;l@xbf|9(qX zRRx+QG@m=#L z6AUGUlJAMj5m)6w(G+lmo@%-8NYBP#-DsDP<+B!Z{RSI4(g!z@GWM zdJ&b3*}hUltqC)9|i|Qp#gXOnBE|-i`DW?mB=7w z=PB?tu4);5$Q$L9Dj#5nZ-eaO)BOm>cjRE`>&xyVot2J!869a)62U@;=WMkI3l?R% zvGf9OSLV(6dItuw+CGNGLB@F5^K+@cqFNvcTm%&_a|b2vX+(cKMaBAdIkJ5^RB%>m zcpPWJFwb<24pIrc^u>~_#$OB&QnNwC`Y8I!8(5}W_rnH~KJGX!BDc8~JU>USxyHx1 z_0E0a`nytChyRuKkz2t5)AwY~jWYq44Hnhu$L+?jxBZn5Sw9W`{h5+fNS$Fci7148 zwbd8G=c33})q1b8CIV+KV*q5Vnzy-mHBXX0bJuRRcyib;9>ux@IzHz58>#)@4z4$< z@;Q%W8j%L5dh*%GYeoL3ToSu$)=840NS~84*ea3|Gk`6hZm;jU$BkteBxy)h%Q)`H zCQMDkp(-b9U6@`>JNMo2c#@wOL%}bcBo*3*q?!pvhJW-7l+Twa_ssHkUK<6d?+`tH zCs@+(B`N!4EQ{MSowwTt(UaxLq7htFTjv}D> zqV;vK0U7o;5;x~BCu8RYJWek|xz8oP z??=pM+9|=Ds%ytWd^O{BecYI3Mm!IcK_g3a8wE<#!~l>5Z7c!TCaMV1(RVDO2qQ@+ zP5ecv&ou(!16J_^)+j7ieJwWqLM5-JDYafb;?%S!w{MX}(&daAt^jH@OnCVy<2s8^ zBqyc*kv?3RxAE*ac=}eTP8RTsl1`;(A3&bR%n>`?ZYHo;L+xw`5ZYH-s?QYtTAKTz zrjX9$)iXN|cdjR5TD-$cQ7ewEQ4rptdKbK}tSKac2QnGaYV{Xizi=k)i!75esfjLq zEOAR_!awpYCovlBUl?{LJ3xu{Dx9@!oP&jWRl{AO)7SQ!@DXj!yC{jg8@|7Q=MqIUUX;s%JKSf;a zruc7eY_9*Mi7jMZ>t6LeKQBkwyld98jmK5ZmBqh$>irCt!{Pq$o7&Fw(50w7$5$zz z-Rk32NDUp`<-Avie|t%@EM{vAc6VA`Um_pM0d&Y1SXHkY897*DpA@7UpbQ<@Gs|pd z*^tM9IggVO1}$kt%0_AVxJQ~9v3lsL?|4rQ7kqvB)Mw%Ft6tpFyOH4jSv0yP-mfwA z;vYlv>ZVM)uTL$+WjdO4r)b%$@uTnTx7?MU2jb$@1uOBLr4H~vSq(~vawj5u#cJ-X zr8Xe^(C7CAR_I&qO;1AspA*akKnm3Qew;r>#(1pD{!BE}Rx`c1S_M9kPW)Bm+^tMv za5p!UBx$?@k5a$XjWR`hU1NdseGKU*P88^D=iH~ml)?CmF&}YkY&EYM*g)9k0#pt8 z>gN%Rkfv6zsGLW4LKy^tHRk)(!n`W=CJ_eFyt$QyL&D9r9EP+66T;|- z!EetHvebL6{>7l5WuyuFmoH=%I5lXLRQUmvM<(-gJ7Oo8|GdH^zKQ*0iHKyBsjrP4 zC26!M%o+ZrG%^`&4WvlgGue#oWSzuiOk4Z;z!##}i?7E09HH4Z`o!1YHF>4RNMW6R zyp<}7$--cQvyCT!h#Jz!FJHp-g+SI~;$T04e-M!jew>++BiY}Nb5HC_m|N+}mkeOB z=uB1gvDz9qLJg=-SixKP9Z~VLlkC1B1N(h)vL)fiB$O>>y3KLGmu62H9l5^dU~4Iv zt<08q*((@`;vg5|udkUwfQcIq68l848X^oJtpC6t>PKHZyZt#KZOCJ$!h)%IQ zq(Cn*5ZKF{HnP#N=giAd<-ncULOQt~tUfc`$*SXEOW|&wl0Vk&mQ#Ygb%+ny6d3#1 z+U%lna?}*Cwtl#=7lM;qANEh5I?IP&?RvgJ-)`UT?&jsSgs!4IMOZ8>eSFV?F~m7j zg39_)8FqT0=kwnM0xE`$a98pQ&M-WQWW_N&s39SUUdQoMc>^6*mglpVJG1Qex8e7g z`{_RvCO%^IFOI!B7KO6`*poL<4x%!6U@F-t={Ynoj0iy3ccuV}%AwJut0SuWZ|dgb1~H*=?N0siyyS zQ37q!R367ho4?td0CFqgN7E4NXB9?}d@y^aV#61G0%7QXGg@hqNJtz!VoV~YYhcSf z`b>;0qJq6*uE8Z4Z2d(%fh0RsRhhtIfU$kw)>8cw8y^9IdGlR?^*M3wDTUcefk)_E z=C-=uD*jWZHKHwu6mIpCrzzOk&t{WrSpNqfSVR+8EeawqmgqQgUpY>z=LQ88EBu?W z8zd5e3x4MfQAfwG+-c0*T`8{Eg`5Y>9f*$4T%#_tAGUbm6N-u9Z#PxCybxDYB#P0U zFRiE4Y$jJ`QyK;SwSY$wD|4o2_ocHiP}%7{uo2L@sgNQDR2tHZdNWgM%*Si`ydu^; z@{U#+BsEN90_$j?th(AHSF^I^(aI%-$=up3e?Iy<=x_pcYVBnn5A0-d3vPr5AWyOM z`U^PzkZPkl7|2 zQ;T8?*`Qlj|8NHn{|2NHw+`Es-*soJElJxcvrl8VpBEy2q`cAvvwU}V_ur?%e~5p( zwrs&eQ0EqN`OpiWPEjZ5-PxI+wfkE8Vh=K|=lJrO9_Ba5Vsn0Wni$#LX6iTDeo^1) zdkib5*yxsp{+oa+V6V-P{QK})5Vyo!c>)Rs^ypZ!JGE(p`ja@fAvb`)us6G?Yrni@ z-5k2N03YAk$I9Q2`R07zIK9!gn<-TF5@m?$Z?b50Gn;g##Z!ixc%63oWtsBHAeR$( zg!lL(aCs-J507G1zUk7;??{jE3#U}s4%<2My%{v|Z7~k;g9Qse&&qxxN|h0b+I|0? z({^N3>`K`Y|49M9jJircv=pgqQIR_eocuMIO!9(AsWv}i)N|BVj#{%UQ#{xUq85#d zBtW~*$LbP9*_OP=1tDsE!DEOgNx1P-otPleAj(zsei~yQlio}z6zsv(kJcLPvsoci zw&r%uFNG*;;hP4WGC#P$Q`gQF%c^9sY9Al>+xSbh{DAIzz4m+NAOc%Ef-KHmIpKU- zX0{2!y0hiiFo0J21TJIXVT@K1r9r>j{K~@#l_7)2G_KDlJ^{d5siSlbS)8>RHK51J z6lFG%gDRY=tXdU3w{Fs}eWLz&*v!3*0^M#&RChN;?IFL>r`NfyE!Ez$rIacx-FP@X zbO$gkx}jT0?HDxMEqlA&0~CI4&abl{#PK6h-E$Q1mu&b|VLzTPce1Umpj}wR7H3Lp zliKm~9@hh-j#2N!!~ox@518oj@!V<|Vwx!*#hm<25_o5QY0Izx64LcVEL6?Uu<*fR zN|BEma0$}$wCs@`5O*&ah*pJ=A%dq$L$MyBsHepYR7nmz+juUDmjD)8l977D3z~Nn z?qxP&$B`uOFZ(e;Vn;b{$pnn4!{wbdP*AT_II=$q!#y%G@O^W0`2+bZT_x9H5lW%m zA8uH}scA}H_w^mBa)8p`ADnr4dr@{`Xt9dIeng(G3smG1D|fbcx#p{RfsspRXIme8 z3ayG+B(wH$VtyG4OG;rj$kxEnAv!>$V8ZX+lptqaYWV$O5``80V)<5e{m2*Vcq~)z z{Yj9R63(=PwiYWYE-5LNmFa#F2MrD$-}@!}%5J*l?L}&C!LnUor`WF`>BBGM7?46f-Z^zfECnQGWu9v?pRA-`Awcv%9}iDY_pLHp zE#zW@;MboK=2v2Cek{}vE@FiGbcK`^9z&=h*30yos;WOBE&vU_9na?`R3GA%NoXmr z2L8-xXY=qO_{QFYv=gJeA4g~udV7I%r=_mS4_yy}^mSVy_0vGbcB$S%E$$QNy+Tx< ziO91&;b&r5RJl%qm5D@JT+`tNx^zDEsypGHA{f8vl?tHxUftSt6sPBB{#=dAXepRvfgBK9*(ac%}fsSM3V3 zH7Cgkbt`lH$^jh0=XhR*%kzM^toV&}L{B^@<>Gmt`lC^z%MHKd7t?|;BIwS=<;LA+ zsvzx6i}m{YMQGICamG~&S!?lLKGTxrFHksapR|EGrNHmjRCTeHyMw{QgXx_|;Q^-0 z6?1P>*l_BYH0o*<;8yignPPE&q+b7Y?FZb5+bnoT+pl@~`pDh$$%L}!%FqoRqiO_) z;g6FS_=*v}49>xl8Zm}5j~{nk?iSgPnE)ogt<_LRCeSOg*)qo`q|*aRza@Ixg576FAw5FV z15sv(HLq#XqX9Zr`uB@%>yXtV7F@JlSdd>C2QC5@PIt*ZeC~R4&E&z;XE~{Hb~K_8 zc7r%KL-v@>g9L;8fScXMg~dgnVqkQqvpd^{rG-G3H*QBiSP7f2kng^_cnhzpnhkDN zPuuX`fJ^QE{}Z!e=D6P#%cXFPOce`!sqNcU+6!HhS<9P(CZTcB7VvKWjTN~iD{}#; zd~N@DNkGb&5h(QLmN46v74T~yu7O`bkb;5&g=Fr*46o+U^LQRG~7MYMacvuWK|wnO7a+ zIj=Pzfm7n}#M}MdNcfcZSkDW_T?HTE9^sxUDK28JTBBxvBcZE1Zh~u;BV7?tecl`) zqoHE<`n8yfDZwboPcCOYTK#on`%We41wE<6UGXQMoIj8*CuT2oo)au8-uo@OJaxM~ zCe!Gze^9o_zw<>A7b@zs3J?A}tKAu&mcK&L>;3#+FW%x-3;#}e&LzQF-f`viaG0bz zRXw2LUV*Zd!eZ=i-GYBq`yomYV_8zMva$ZTZ3SPEtz(m7vy=AZgH7}#VfP_pzgfAx zLGcmT-L@{-2^qwow3P}-{#qrfA>_=79`HC1GzW~+ z;1F4rJ20xH&w8}hCMPO=J(x^b3X!`MQn9(%K)JEaZbhPC74)im*F`up-W5*Lh`YjN z<$xPSC#hAF8?d{!-pdn3^Tof@MXS8y=ZCY0j*MOKnRiL;#{3XUTp&S1mtR1@zZQ=$ zTakMC^F~~&9b+s`O}1Uv*3$-sW@SwX;>h(juf#ZYb&qglN{}p9lZ-#|72RbLoX#P~ zp$+ApGCbQnzkCNf9UHsMjVnbSgT>a1_;R_YU~=IXh^>Q#>Y6O)BVq#cdV#|K0$QJR zDaiD$t!`Ra*56i#-EI)e|6A`q6dJT=YaWI$=*OPh-S)3~l$Wcgv2|asj9jBA;`N$v z2o}_ILQqg3A7>BHXg4;O{)OY&S8^@`-{{TVeBURjY98}iej4kl znAwtAFFg+-hXHx`opvV%6{n4e@Kpx2;t`Q{ulqh6EX#0;FOyUda&q{oW-)lbiMJ;F zA4TWk&*s}k@z6n4?Ny~jwH0mc6}w2Oy;XlksRpUNXUy8QYb1(NyEUq2%vv=cxgK*Z)$q~A z^1I-zxI>S(Ct43<|6ad2q1J6C(BC_Oj3Ib32ea#b*Ll@^;$X?#W}0si$T;1>e*MjN zwm+p^(O*}iv1{+cf809ntK9bM8gQ`~KTw;WoNq86FwI@rU_cYXWaw{l-|1}%T|g|3$GnOJ(dmGq5lHecs#Hi566)*lVyO|T5V9O~9oxRU|Vsp)0L z#K%1DRYk^82Z;dkfthuXo2zW@Ezy}KPq{bGAvz_6y-DnZh4cNBw6s3J-ePT$^U*h+ z?dd9sG*OqNtA_$S$^AWlPY_K%$-jn$lM;z$LcE0!K3ZGIta@+)e^-CM)8p9ksJTA5 zdpm@yy{cQj@LCScU!bFB7h^4_pzv{f3%ULuJoXzMNS~w=Ha?@x4a$i-`QW~ZEw5}3mg3oqj>xY0g2UtJL{-)^0G-lDs83tNw!nNs&cMd2qOBrGc_F8;EE^%3RFr4JZ;vw| z*s-S2My7r)4+ldy!Z)M#&zH779?b;rkau!(m9z+y+T46lb!u1@dtFRJ7mCV}OcpYc zSH0>slwkUxHvjV+JoN3ni-=L{M)D=VVG9d)nrZpQzv>c_uLM7le^j#@u_7ud-3ZTjBpxsJbFQr6riS*}U9R@Ui}m?#8LE z^j;Xc^WwwJso^d{3vc7f3g{1m0tIKxjQo!yxi!hc5a)wR1?K6i|K8fyJBq$sH2MyU z27&(2EP`N#L!ek-XUYvKEq!y(dIsDgSbdMTR8ah0L7DCP=obW_w?#GEQn!^aFq~C> zMbV^qCKO3WPSp5DYW!5?8hRyv*Vl4zs3`SId8b)SvWH<^ajpbb=9Vh3CJuLS*$5E( zxW8?v4X<@>S~EEz3clDfCcgKC~@{+W7cnVo3QRDaT~Am>$5#O zyEiwH(3%MYp3yBwmQMEpE=DlaXZ@q^Orz)ioKfD{c;V4q;fz6zliAZI5?;ZA-}2@xAs0e!=%CY z&d(ipzjS?@ol; zgOA>`TLq+qcYRq3>S-%~OI?j>Q01D$?X4Fs;zQQz@K$HtzHOuf-x>drd+#4GOJ!}3 z{JT5|xlFoWqHjhnXT~*`so<7!e2W8tHDozW@iz-^uwP%L-l$rCt?#h5u&^m$P~+ep z^?=fMp3Qk0ijrW_6$~%I51yM?J~#AnLO``J%DM^B-r2GJn5~Pab3g^e);Q-?CNNOwFQW%;J53o-tr@#vL7x7y<+75A97 za*yHMV7dJm8tn|bn1283;{6K|@uE~bwz_W`Im5aiKiuuS!yA~_W1Bws=4k79{VZuF zc>nk#?%ep4RgmE}^O$V%BWtC}diikH6EtX%PIHA9b{I2yi{ehPG!5Zz$~iF zP@}ry#^oac$}058d+0Qu()Bw+gKskiJKXLZTVoY08zS+l35~by0)LEIS>2ZwPon&N_gc?TiY+Otgk)K zs_5-E)x8&ar4J`({scf=8*9D=Tvuyk9fh1142N+t-kWD_ba(|v#A!!v1z&Sm%6ZTa zW$mlEE2b{-E z!Fg*ydWNL8rjeEWqBACZ3#wnuE&h44o>m^@p1DYV#G1xK855rA7~!bYseaQf zaZpG@5d`21^O#K+w&0{u+I23+IH6>cuBedkBlKY7^q z+1rympMMLfepvAQH-L4@lSc-)dC4FUAbBXX9q<~izPgkSIcpf!6E2sPF>Af-1n^sx zf{HToGP3f`!Cw3OODA6Ae(c)!pHHK)K?)mF=Ud0Sa)UK+olP*3D-<5ke!$6R(W%x<61OQ_M@2l)RDKsfU0qAa~aG?`}sEp zcSdbJNVy7-)4#tH0cT`p?H9!trraAYc%F*V7#P0R$5BqhF!=^;h{F1K4ksE684eU9 z5GLHG;6El$iXlbix-|m&Ely6@@%znO$SLm#}&fj=DAw=fp3$$kK4r%>Jl+5Gq{jP?JV(@V^ zTZhw4f#Ar-wUs(#o>o(~1#5ROmcCVCPI43Zp{4`=u3@mYcdsh%c7t9Z`53<{u=!Gh zeZv1S_TzOp4YghAtMZMUdwd_fJ4650cos913$Ahm-MZr=12Sgi%QR9`G2)0CX6(rd zqSn}Ii8jo>CxjEYQd*AS2(T+b#l4&tp8KKw#G)kOTtmc_QwCwM1fQ@C(V`3m4KY< z0h|{!G4=O)e{IB+IuK_PS%Uxm9)GyI+y7BcU6dX$bXE3YNoiw&N5tFt#Y+~FdDeeA zg-x{{3_+nDi|a^ME|A#EJlQ^^{A~p#%nOU*v`L~QBF*s-hpl-U_U8p}C zm1j@%D1N&+B!5>glLQ-10nis355ggDBzHUg*(X2UIjy9r)A0E8(!xsc1*@RQ+F2(^ z-J5$%1f|WI>Zy3E>(|3o`2q?9){V-|8=opspLpVWl@mpc!St!xy`jxUX+ zK!F`TWjyM>8EXD5PK-IZ2z&)7!C1YKpHvoOE4yktfOw`o#}UgQ3R}l~&l1)f2DFbr zFOPF4`*ZbwKZ$2c;goVkWX$IsK5locQp!$>#S^J@(UoRQa14EFh38(|6|* zi-u48b$9nA)jVLX)x-4lKHsydAE_-3@L7vzT>IFV_{2Gk<|^jyl&f^3Rb1l=?-r`! z)WN}(2V!O&VOaUm8}l+MrssguX>0A2xz#78B0l`9|9Lf4w^C-G?zTe?XZ+P9FA}vqrFyG*#ePX6EVxk&nc?Tz8+ak#hET#~#M_ zSU60~63m5O9tQdO?H@<+cNf>H!eKimqR*G-1G& zy-zPun2p5F)8=)0e&SIlD?8nO`QuT=@^Vlc@NU{al-o(@#R3CLpld6>zM|hL!-LNU z(2-X&Z}x4KBmALBWtU`EB)3+{^`X_F&lEhS4V%=8ni42=xx@b5XNuWSf9MV9J*GhCSuoj zy(0k<8^;HhGyTB9%vA2^CKL}2wR0b4{#4oxjvA0-Hyj>@@@`_9kr1 z#me-qMLhhG?wJr96Z8DrH`35FJvYT|5HtE|svV_jT;lVA@mpW7=rY2t9zKRpBxh%* z*lU_Qbuq*y0%YoE-RXP`_6D{>_q{}7MCpQagfmH`lb)WFp1-xVq{W`|@2;7tvI6pn zMK4?7X~M518pCF5)co&!p8n$#aHqFx()v}WtdzTyi{0<4?5szRC`0d1c(?qBtM>^C zUOM19NdHc1k^F8R8#gjVPwwwk`1z)&EtO^6tR#MEF^J4gF^U!Ods+?@wk~ zWTYNWN(@^?_*X_)Of}&1+kVK>C{OHrGqEC}v&KxG7G(!S9>G zUz>4BNMc{G+u+IhzqP}$he<}98QXg^gfms?oE(L~lll0ub(R~s@&VI56x0znA{~ zLfj$x4j#`D;{niapwHgSD522iXHuSgled=yIw4mJgUhU;B=h?U z-43q6KH~?LfcaQZiCga`&)q=Iqz8yn>h((QK4Syp#8>w?W@Hr zk877@DnB8&ZMUK+Y{fa>#L~bsYT;h#G<;WA!l@odfEjP}jXB0K(uTcAWwQAP9`}wL z1!d`9O^snGUyj3H|k-eZt}WCy*)Y+mBnq(`hPx^*&@3x1gPei>wa_f5~>wZ~XdXXSz4# zsSWxs=*i}`(-^6-b>G{S+2zcozV7$?{_E3?m5+>QLB^+c1I7n%%Rh(6;HkzsEIMq- zupA7p-md%7$z#jkA(r&$rQ28Rhc9Va6)JQcy;Ck2R3#G%OJifM&)dv5O9A2)JkJM} zK2KKeRu(feVY|Z2e;uUi*%?+QYWivTdFxS3KD;mhKm(-Y(>%T9d+5uy8qXvp<~bYg zNinW=>lVZF9sv<12GuF=vGuKs2zC=DcfY+Q1)g%5?p89n&9ZUwXj||&KqA+R-0vrr ztqYIG8JhGh)(DQ{9J1Lnyj#R?8_EM|EIB#T+QZ91T!_No<~M(XCnVl}9dSy#nAp@g zQ7OcPJ)qF%yZ1TY>h`A)J-u|50?>{0NCrNQ=dVQUIFo-7N`=O74MUQBXo;y{_*cjM z>O-i>Qdh8yv>qJUq(^+y%e5#SMG!ys$?Cn_B-zvRguK{0PRqM^9{0&Hqx_r2zK-@0 zk5$w}5E;{c`ERr&>8||7pSM&Yr~5ZJa&o>Xo}YU%_>&Kdm;7^lQnxooBV1Y=F>RQD z*U+z;ErEvZx;}qe+V~KpCC%Tf`J?ly^sCUo&W zfD|)CKt-6;@AkzqP&nk4FsKHnF-JB3F5}E+UXan_Q_(IT0RLgSBSQ6gJko!dD?Aoz zBP7lj#qbSD@@USf#IYpq_Rez2`dE+M(;4=WqK@Z>zOwxiP=7Ju`q|7WQ>I zk*>)1tr0ym2l!{un@l6wEUM9!bP{7E}f@05I~Pvmf`Nox5B*N`KdtIB*8^_2lE z#rLe|lA0X)jG%YRq<5Axj%K%xo2=6OcFKA{)K?wy`(}#@{}4kOWm^jOje~iTREExY zRtaosl1r5Em)6>RVs1f4dnV7&?IAyvrlS*uZGzVdXY{C7G`w0G8JafSl`1khl;Iwp zw#6N?gEfJY-Nnw(UU#KP7D@JpUjN3u!XnE4q^W5?{hoCyuS2jk5w>;-*8p{JF_gv_ zbGE$9jcR%EWIO>9*&+Jfa@KMNbBe9HBvL6Ik4k7;pD*<(T~@TOa8S>!_>2i-x`@A| z-rw8u@zn+jiTY|d-=jDv^tGT$?RLe6E8B!8DMj_S>HjlHqQHSge1jWiw|6WRPIvV# z>>4mP!3=y)UkICVEcWq)923TNnt#cMoSc^^ols%siDvD`MJszfN9{q!Ywly89|E%6 z!ZHa95Ake0SxQTqd?QV4=mv0eNo_zXp15s^aftEbpf}1rt$eL*W_F~?Y~AJu#da+w zDH+x}P8<;SfYT|nCLeOTZZCQ;izHd%8+`G1;1}wbTRg4DE9*PRqpA1`U~zG=bP}Bx z6dBWi_toAN*&6w?d3ID8dcNtJd)}pVwjQBQ?|TMWb(9$jIU}{-0&~98Q}S=wVuutj zBm9Gd#gmvt86Mo+-=C_IMq|1Tur6G?)?`wM0z8i|S9$GO`8uT^AC- zHD6ITxLuM%YLPWB={_SUMuw`v-tmGcDPv>qB$6h~_>?@kc|=Jz?Rt$qr!)KHZTwAUrdqU?0hN{xWxjt-Ne-s| zX*t0ra!Hv*rkkY!qd*~qZ?*fl+C*7+!@vaRRrg{Dra3GMGf}_FD4AC59Nnb>a-OAI zWKrSjt8qm0amK*zXa4qfrX8i|_-d6abG0h#QM5bCNSlt~wE*nfQlXyiEmeI+UtdXI z2aHRGwg#foXQ@`Ok)?#=OBM$Ge2#ZzBWmsb9g*;ea>TZ$afvc9G?Fh>YuG5a#9wAh zs=-%Ux=Ol1y6eIv*M3Ypvd@V36WY|Wd!HjgCYSS|F*N&5t9A9L4w(?kHY=QOrLz$7 zSg|EjbE9T#n*iFQ-#iRFS!-?DC;X=0xuV(Uxpk=_PUXk0AJ{I!gI79UywhQjr*!?V z;QS(C!0E+2$Ax-k*doPcNHydH!>GyHnHVMouln$QCqcRLP~G;o!CMvCHhC-6hZn={ z*7tKbm#&D^E(RdIyutddTKT05nAT3?q^D0V3u~m?wET|cmbVCL_i*{bydcJVw%hOA zsAv*2>S#MyvcR8*)yjinLO(eg*Ew<;>9fYk)BT@r#MUJedWZ2IlX`BHXg9;_h!{2F zfiHa56P0xkF}JgeSm<*4AVsDGi(v2OnHgt6JBj*R(bW#@iHHqSj*(`U$QM1mEB8NG z@Xr#a4+bk^zfyo+P}9Z;(a`Bk#Vjp*6v2UhOP1G&E_py__ok(oPSzej!pxsLeMRUn zt2!BPk1j`SZWE$;kiWbL0ZlHG86U&W=OdyYu9kGU9RnpO0I66QdAw*NP4rzP92~6w zy;1THyAyKMZ_lH2I;ToEFWr86kSadDrHP0F?g1{H+DKfzZRt0pNg_0>Wrlqh+%)S zKp`42j>AP#&kbLhJf)d)prN+&d?a~c#&9?Hv?xcpT=kN>Nv4?!j{_Q`11ircQ zDE5?a-Hq!*n;iTyN$j;>OW+*O1^(>G9KrCsFRFKO);q}8sw$*IDd-Teddb0lPVWi~ zEC$;A+){X2tGIfmejbr{b!YtY=vmIu>42PbyOn-o(jJniZ)w!M!*gExs0qXUuC3Xg zA-Z^TPo~MVAPt9FdA%rP(vzS$oeiPS! z(sLp)yl9gLW3%L$y6R%I9i&9do@o|ei3zzJt;ht_D&h7Mf?3FJ-nkx!fr z3+c1F;33Cjc^B&y4Z=H?z;akVh`iB{HWThCO^Bv%Ki)sYVrS2{M>@7gqmux+CYXG- zb9FyjiM+dscSW9|s}#xSwG|bCLW>pB__6tF!p!^Vt_>%G@58*H-F2CCccm}7r(%Qg z(p82C(+Wv2Q5t<==WM=L$`icbiNxTk6n#Owk&ZuA1%b2K-H!u1}7Jtzg;4GK&1tdY!4yks4Fm!_!%Q4R=O{@nz4) zWVn4W$X1CID7#|u1s|oMorL;|f39}tQ%t4VsC*`J^vVR503Y?$NY{!D`vs2lX(veN z+!LhhQ2S8Asfr>Esp%kA#a44LZITg&G%DmwQ@8)}zWAOggvL9}3a`r7#s7h&gufgu zAacA`=1)=S9+N08ege>(Wjr97YT)G3vMjW$9P>SW$nEJceYMU{)?NuULr`|`0|T(i z75xtgQ4i^>d?09>e`tE`^C_Q2c{Wi{yw2ua=n`@5sYFCzMuM8Hsb*PDLctHP2IEzg z0^d<_22oE>Nh)a*krFrUwHOv7_7un}!N(KR+KOqKajBx8vg}W@1FL|l9%(7${PFId zd43V1!N4O->HW3R>R~j>#)HaJ{#2Yh$!OHKaOKFrB>LzEr94wCnLeXPNnmcr^zdc6 z$CrY-Q{R8BkJ??Xh45F57yTs*?3$Fpa2p&8jR}SdcsV90sQlj8?rY?EQ_{V7Stj-B z!ir5lbHh}P|>2d!sExk2RZ!$V?p$lfCR zY!F@uJ++AeZl6>Xz=hBLp7!0&MoLjh9#;JP7q_|3EJsj{eZg1ddOsXI z*idm&&$_r+ZEtRAu6A0bCt6xr7Qsjr?c|eMd*KrK%e82w#;L7hoAwu#AxE24hIe*? z{~coo&8nVRP;PszL@1^>q^}yM^k~4lGWfTb}Z4$e@FXf8?a?s*xNg@CL?nn z@U^sr_y=OFh0XGUcEqV#Fs;aoCCQ8UOY9{Dg9R&>anbf!&$n4ufXo;--EAuADizJ5 zI(7JS$pxBeIB>ep&&ystUp-R9>w%u+Xsj=b&)4UfHDu;m}bqMN+0I}8TSmm){fzSnYByS zNXhD(Pqyt6UGj%Npc1)4moqpm@dO-enhK0_EQJwZ{}4nwr2(nZ3Pv-6h>5Kq~z`+})3R_-%_f5Ng!jFj$;EFXgWSYF<8&3!IB%dh_N7 zn07F#ciwHnX~(6qYDcOedfugdw#?2%{OcR%#23P{rR63spq1lZpF?qy91P_LH#V%m z>NcM%N4z3$be)YIok6P6PkY!}@0f<)`+a5<7(AM|xn`gwJ99H}a!3wxPHvoQq~uBJ zz5X+%8$>MVQ|vxH7*8^qTD&+=^f+|swp4mS;orxSi-t0qK&~^!P`-M1SJl#oqP%C6 zajsDdS7KA?n}zr`p5qA_O37AR_M0k|Hdp40;|g;_J>ZiV-B9L*dtM=x3*kB+Gl`Pi zM|>n2I4BviUu?X*DgeEbU*?ejzDn0gfQcF9tTs8K9!Sx>n&bQJ8=u8#BnNHxBcEQQ zq&Ov0(Yw01Oe;jOyndW9+ZIr(2-OmeU2mMN(ls5EiA5xc_7qz+QoQ1#gRu-@dYA|+ zj;W~?Lil>D#4w_MEl}6-?jiVd(@>GaE2P_wD3h)~=c;3?6@lP9{HFv%z6EOREL7)l zZjgqNsFD=V|3&D9T#(wKP$+pJenj&RnFw!vi8+ht<7q>ZS{8{!V*UCUd5{WC=8j8R z-2YdYbg?0P$&}q|1Kc?;miimc2z1|L-q@n(UTdVppeKTNN-kebIQ{lGSi!I>U66Rf z29WKKsK|O3?3cA>BUWUrHTg`5tR4b}3bPTA98ZHh8=vdQ(JhwmDEz2c9XVUx>(WyK z=)ECNQ}pnw1$$W6_SVaPdxaxP>hZ4DtwH}fsUEJbj^tig;plwPnN?PASZyaQtdHVx zHqt>SJ*z<4$}&&r2-B5Sl6*X0*q4HKZM!JmnT)3ASxbl5LiQGLZ$SeiVk<5iVvi$lJ~*&Qr@(RdwMgl=wSh3f~RwFN`yWxziE z9Can#yhL7|4#6p7$ZPbZNPDc&La1029m~98^Pm_ui$3wGl{2qGRo*$2TYB}f=d{Tg z?T9K$KlmG$a&&e@w759C_x6n}fnt6`hEJOqca+w!4x%^*U2O1y!4Q|IYWJjvA~ZCc zWHI!0=XAhYphzbCj$fkTeGn+_$#b=*#zRoy(_%YDM!IgWh$vrnX;x9Xu}M$XC}B1A zY}pnChyO7I>Zh?>(^K^jphB^ex-p9>1+{9?>;g{p99CNj4U5QH;)@AoY0+$=yU)H5 zkPm;-GsIx9?We6?fPyPW%f3{ms^t_rxJLo`W*TOt(?d$9Cw*_iOgTwUZEq6@+Y-b+ z_hd1Q$#=qT1vB1zU{hKlw=n4+pJ#A`eEZqR^vs*c+1qA`JXNrVF<#@Fo5@F|UB~Vg z2)Os>*pR6n2YsN-+Gmm=r&a|RcA4O=;kSb1{qN4U~ zxnm-#zx1X6r7SM2vB_VfAN!hm}+<+o2PvnpqBnAn+WgN2f9e0@CHrd*%4u1{@f$CYvS zU10()QAaw6P`7qDz}dd!??JNR#MFb~lyeLN@(1z%cZs-4J6&;eTAaeWy3Dj)j3$Nr zNZ<)N@9$YZ9ppittei|e)N9B1p3LE`gX<81_rBaMvB}C(nU$Q{_NkHmNFSXkoNLHS zd44)o#WRhWoZilO_;ARNe7ODnjxBY^ud36B)(V$ki(b5eKctU^nkdd+?)V%z3W67(wNdam} zf7Y~QgNkP(chZL(A4kx)lUIC8R?UR170*saLT(rZ9#pKBJjX^c`6+fF5J)b9XZ0r>m4@ zzg+_HE^{=@reVSaLJV!>T!vZbx@=mlJ^()@5u=OSlQ5*6n?n$6DONCD;G$AQSeHbi z<*a*xVPsYmnL~BWjL^u_Bmb;2y2k?kEKX;DB(FOb(5k6}UTxZt*AC+gqZCDRQZboG zt5Oz+QsX3_7>>N0{QiD}@%N267=KiLx%Q{cDaRbfU%-+71$!4mtE{F%O($d?uYsUs za&Y&(C2D#tYX-Oe1_G%sq6V|||6HQ;7xH$$#|PPn1PQ%#rO^nzUfH9~k{tU&XPhO} zWSQVmW%5Hs-Q;GN8jUC_5VxOIicpLZfDtU!O8jS%q7C;5hc5EKS;Zn7K9-1%q%~!T zq07*`qNNm!7b%+L-1{fQ7%NM?i%P3=jnZckzS|Kykzf!=qRtmlk3N>;-uOsyK-ir& zjx7}mfouMB%Q0nu%3Lw6a{fB;)N5=&9uy!gdCg0jPr}=dpO3vl#{^FEq2d;YOr0e? zHAzHgkp&&2ohlCc?rgU5S7&;(V?&!Fwroa#pi?)QT;2mnw@i;mK&4NI@xH3&_zwb=!nOv|&$a|>xdgZ8 z6n_z9k;yE3ZkfH>)hKSXqOYc>H$1Fcwc;N{KE2P>$Ys#vJU0A>pBIjpdN9&-_r?sp zirSl|ElZo47y7iWR;^Z5f1(L(>&?prccN;3y7{|%`@8SB1Rm`IGUx%?y1S$JO#@H+ z`Cfcl?M(ynzldlZ4cNiDuO8?Xr@h8M ze$`ob*NY6?+n>}c4@fGEHu_bR+f^CyU&K4lnjx^DR?3ILgkWZISTF~A&3SfatU6P-XT9C&cy7A|BevZqSuxf_KefPVPTOm zLoLp>h~G%eqB4gY*J|;mn-EFf_Kll|gP~wCmo2RPsf4zWJQ@p!?RG$4)1G=I#x1J<3i0Js>1$Vkz{5ab<82<&@ax5 z5P_pB;?jUdyH95N3zD1!9~2p;GL+F>7wo(9Z>`B)Htyl!T~p)dxE(tts?wS6*SF%Y zP$@QB&pEWq$4Sg}f6~1rT)zwJ@@vdg8W>NcMbe0%mzW$bayD$vyo*Pp3&b$6myhxdj zExeo`p^|3ILVZtNTGEqtj#^`Gt$28-lvt_J%|Su9rV`gz=E{H7Ox)|?D^~SymD@vg zEDDpbY`*&opgFZ1&K#0`4rpheGxFUG{SB|3@wLRw^Ix0HW+NpqXOre!G%3VhsQ;TkDUbjGGhP z6sBWzsMtT1kG1ccm1W|awbOTHv<s#9KVR#l;n{sF(MOb64_#J123qEYJYC#M@-f!hBzFaI4K zDI#0^NrF+YYO~kYm&w?RmHsqWRa&v(_KUx+iYI$Zz(-HH9@#LKY%5dTuYjvlu?HS^ zMqAD{I*f_P>emF)?_>{bqcMQ+N;>G5*8yO}=jeLJ+%~_1-4Y##QK>eILtlJX6>|5; zvTsA=F5T+tD!r25F#$i@;_v=()xTCD=nUg}*{O6MZAP!;f3eVR{ZXjS?%eG1pnYbB z;O!jPEX&=inoIh->~CJ227^*BWsAOwp zOiMWfTy^p%Elced!j>(~E5#M)YLoLFwK#BDv!)Nf_zf^B0)v8j>O;ax#e{X<^QCw~ zYO}tuZ9+$=)if66eFj~o->=ZT=oxaG;1=PPktj1DrF!Cz!2cxP;|Qg^mKzkj_%$}$ zQ7%zAHq45rY`lYlh%ZC# zQ5oFe>WR$m;i4FqeOisG_t5#0lgQw{Og z8qJ@`UCk=F-5JJki&h^tSw9H9r|N9`ijD|}3&ved7@m zGXWX*gZWLsbl!_8zNk+qcj`@CEc)&t6ie0|Ns2+FQ$qb3GxIK$2U>v6%=Un$mtLC(>)3+> zk5=UITum0y7cC?e?SDLfgq)}&w4XIxx-@NNm03S&J6+xx*~t?hg9kQP9FjzWcbD0Z zDv9qHjPFMh|2F8wlJ_op1|PbL%%6~{WU*M=19bbPAgn#m6C7bn9up1R<(Jv>tSl8|pqPN&^S|+{ zdU^8X-to!$_ciS;vV$-{H3|7g<~fM}kbaxXcFo17Dldp^7m}q&UbuJFIg^ZN${h^y z_ejw~9-JQ_+npy}JTXs%3KmeRpmRLXf%+#rco7-m_CE-6>bnZCc5i7-= zNhHdi+4qa7`<;@exBscra246nxO34B&5LlektwI45A=EHnXkJ{JsTM6=6iNkc`Y=W ztk~154SSb0T2t!f{HQ#B*;bxbGGpO-m&ks_yVzwz<2|5uKw>0JwG|_2X{31_gSh4oNZOXN^b(Bu8-|& z!2ERqbD5}m$6|SP-3kp+b_Q6ebd8sRE_3*^bon zjm4#&)O3=RoXwR2s-6<`Pt+*p9%PXpBYU|~p z_ND$)vwsD;ydth#QwTO-ykaA-y_iKZwiEIcx<3-Fn)`qi(o{;*SiuIVTl=lE)EZy~ zyX$KYR|m;G0|_v2h@`dtM_dn%!&6LG6RBf>UMCichSq>>?$B(mPS_0tSQ3Vs z;+*1*7uQ|mlWW}HiF`4v?xtjvBxRJEcO)J40~*P9ptUuZ0j@b2Eu?g1?~%AW6|Xq7 zmyP58BVMLYJ&E3aI@ z@BDM(9L6NIb4s9Y+Cv?WQ@Q2c^gUHvnqkALW8a;pqB(lqDSZ<~BLS-bJ&&TrV_nOA zILQY?sDp5J^q;bT;&fl}8xm(jj?a7I2aYJ-F$fgH??z2XEc(rHjOI|Ie-TRvHcE;? z`y7NSSK?tbb{PlbHHp>m1D3_qSuBAa--Ll#z*7&3mhlO$Uc7i z5_^2r?9dqPb)cj>nn!-xV)h(seX-RUeYE#kcxL+I#QJ$ zQUy!NQvKVM8E#RD-YK-F!>v|?nGgo#Oa6;!B0E`8fM-{feXkcE*KYlZYm zmkVckL5H`+$OPdK;DJ?P*ejnoNE*zNrJQuKLN6R(fn}YPDDqN$7T?qobg@y(6Lh(< zjm5WL7C(=P>~d*rYHDrtMa1dMZtz#s){dnCpL&U@jju&F;8*mEMbrJ)ChKCq44aCr zIu6?pic7b&VjQP@IHO<lEF#bFZ&JmbPnSKQh_q4!w9BiyGYq z>_e<4o7H0wIO%mv%4>atXFvq~mH5VM;d1}npEd(VX_I7^Bx*~9iC195O>kBgs$S=4VTSWXIAV6ij8sS3L4sZ}6-XA={}&1f=L$-VAqd>-MB0>WK^(muRu_GF*)hWh?&vp7@n~lqjPt(T`jSrVv+yNHhdE;RxdEYK^ z?d3*zyxJ$QiF*~dBrlACG3#ea79TU^%bW0pn7SLo#<5KfU|vW$Y$HP;wNqhLexxvK zNgfN_wsS~MtOoZDJ+8Y!%;TDyvU!`!Oa&h@$d#5erj&z~2^RB~h1q3M9Pwh2z#lo%C@K#;Pwm3k)6O3>ucd z`bD!SL_2p=BM<{A6%m^D*#;c1jiRLiti9jTR+V+TWfgl>m33P%X_8jOf#~AW=T6OHW?%9qeK7uP zJHQc5CfQf%wTHNS_|!}h0ddH##)8jHyUh6vi#>3e0D|ocdY9)T{F}fK+Gk@Jc1C~q z!v2zUKw7OjtsssGw*fP#JBPa0`Eh)b>1I)O*;pR=BxJmv(BFTGMFZab<`7I%;Q2hc zO9bqPV2%)XxE)2s6TEa^EBoVGgEje7#L2oXC^1H3{4jn{9U#73?gFvLqxO?!bc3`n znsDLPc=~UVeZ)#kd90|!`cgOaav|@c5187AOkL&6%g>=ds1WXuG4XQ%S?@9|gklz$pQ!r>bZ)$B( z3DF0(L1!zn-=Atex9n5H37dbPZf*T9uM+(oq^|C$qGC%0>o%lexO=FYzzDiBDlwwW zR+_yEmLwqC|Et2&C6l%&H?Geaes>&`(Y3^XD0HNy$mct@bk5b%6qR7)+LP!T?`3(b z{9NtRp)s#}!ce-W3x-%di;K0Lee#VZW%_gP(CY+vT%@6KoPKD%Bi4&!^+69poV!m{ z5-amwR0`~F0!v&s>~QD8SKw^8s1wT#EF&h{vR7||Y^i@bnLd%PghP5zH06eVbBi^j z)7l^J;VZ|J3nDPhSGeYXt3QoeeOyGH+>^!iH10Ybd}AdhlJ9{$qc?TS#iF|&!6o$|P827>#VvWkI%(B_)O*RFtR z5eC9g=Tq!2QK?ZKf(}-P|MQ9;%+2>G*%rd2l57YP5ot0LX)G>&>Sc0Y*7QqbLev{w zBzF{KwZ?ki1z^ovxA^}pS((9+$;gmE&-gVn`ny&+xwF%RC(ZB%0uNoFr(AnuR!P%O z7NhoAmtkR{qi6!vkrLOQJYArXLaPi0Ne;NXN|kiiCmz=@du|nn+U)Ot&O5#1OSWaC zWKUs18TLxm3tKBZf$8E)9+t4XTD5zX2)7+OH7N!kcdBEs2NjuBH{Nh9FISlH$hW-w zf{#2uUZlT2SXyJJ5_1Ne0`F`H7=j-+A+{j(}xtyrr+POY^`uDF&iYmO({NLi~ z@=Ty>)sVvJNo>o(qD<4z(-(;jH8iD>#FLn7qQ4gQwocM7y#2HNi5|Y&eJTBE^vNAk z!bo^T$m^)z>%n(5bQQMbfT-hna*}|MP@WJyV40p<=kRKlWnYNw{f1y{{&xd!Zql|C|fzxbI)Fi zeW&Y6D;RAWT)dV6lZf+#d)2w*`t^53%2-O3k;pdoob^gP)|di=8FG)xP2Mk50wdc~V%X8z3Ck$vhcrg=kAvnDouW?m{Rd%n;$} zV}FTBnT@KY@K}1q&x7@yW`TiCu#sVC?NUM;>f8XqxKBcndj9dE%&Cs97lAO9?Z>!& zJ*BOUJk!qtbXN3doWt;mgZTX+cPxoALz88q?7cRdO){qJ6z`l~IVv?S1E&TenlYEO zX2USjlrr&Fp(UU>1aQ*XD9e-(G+Q3=XHh5F|+l$_UiBRUI-5H5c5Hlx{J9)?;ZVQOSdaO*|ZOAnp^s|>r=bBvUKSEHgG!XVVYa-Nu5a{yU$Q7ZT_3oHox>InZ*9?63pSsg ze!8apHtPM$v5y7yRmVM-+qbwT&wY%rDE|C;-etc1dz(h(%iO#RF%QPW__l6vR z2iWDOXQMN#|GYhRJ$Y%dQ&e0%-Q%vf+k+KP3n<7v@qqTLs|PYXh<+^%9N~%v%C5D! zAZ>HcnXsN9g8A%DqTjjpPUX6oI=(A#X{+|g`?#p^52XMMQ|pIzQ~O;w?5@?2QoG}5 zZG=aMT%C5MjL0 z_=r$WVXJfr#$}02}4h#mBcSeo6p; z&)oHB{D8LE@_lqY2q#CLW!{U4UrduRHNGgEj3TQOI++_(jfk=1#RyTY%o> z6heth-}&mNsbXFwgGQMq#cy8xNXy^=t@*J_WETn#8zckk#_ya^Rbx`zBl0MY1MXD# zG%&EaY=!cKQjBBm&0L$50!fv|j9V;M%cQK5!KU&v>VXzxWAB%a=+dmH6(~6?tC1AY z95K9#5$T7bCz>|*j#g9F;CQ4(z!y2;6k!Dy@Q0IubhViX@BPK47tQJl&2!w4&ue4Po3xFIYuA@JwN_DvUqIDTI^cFf zBflmru9;VU`n-*;{eo53VmcYOi23zvH6^or)g$`j=g!ucN76mo0nG*X^LQ_4pRPw8 zV6;;a(qLZ?U;?)j{`Pk5jKb6C-5*N>%kIZN#uEWb?3gY60@GuAi{j`%KaIXbA9E|` zUx9h*s=mrR_SykGYtXqCx%+iQWffS%fR)-sCo;GV*dYpDf~!qN4o^~={CIobtRDl9 z(GcJ|IVwNuauxWp3vZ+MHhz6x)mY8=5EUj<<~0BI`*BQI$jw3FygcKmy>%Ua30C5~ zT$}cJwzS?q3jc;oScUm)PGCPR1VlJzp-n+5W38LB zelnQpQi9p>Ng4@>KZ#ic=SE)}IeaxIchk-pv#q9O`>mY?(droRrRc`DBegL-d^|e` zibmtIv^8i*G9QhU_vES*d$I<{J?jhkR*SYV5PffEjoI8+Hdju>yu7X_0MWfeaaWg} z8AdZF{nCC6o}9PMyX0@Z3@vxJnaGAom`|)P!pr{q#h{m?1b{`3)cs($JF>W19ezPi z`YMKa!;1Kyt0b4mM<b9U?o+C|U zcQ=>^NZ&0&0VZ<}EskQUD4Iv+f%QzSdPyd>Rl-jRV87A&_C#ZmTYUzmb727OnV~e3 zTHPkGSa=5M8+HShbdJ#%5+qxKU1%7dPa`sq1O%%g;f0@n#fhYHxP~n3xaCTya8jVLj3e zI3f8n$?V8qdTEf~A{rGkarQR%*h?`8JnS)&uB|+PxkkVN^F1 z*S_ye-Uv;XXc;QJ0nbNL{TFpb#;@WPdG5okrvxUpg9evx2{s>Jcp#3WOY59$`$_o< zccj%neB8C{zCsegygZ5;SD+N0MN;0YEGVWOFPwh=t&)cv!xj6`E#)i{gSC+khynLl z4XtacjqM{f#<^&rlakxA+=U{7ZV}BL5&w!S5UMivfq{(U|QjSOk(i~*3NIR)$3sK}UqOD8Ap8h3YqqQlO^5p{(xZ~lB=PUqb{ z1wc!!uRmP>D*Q`%`#1LVb~-qt_-dr)*jdYKr{9wE)Ba_l+i2NNdnC#3(&mz4VUep+pX_01bZ2Xa!_%i`6Ai7M zo&UN$`I*=}L#Lojny_}W?VITrVr8+y2l9ZQUElPCimL~gki!Bt*=cxouJ4P>Dtpn_ z(jUkDczJp)7HPu6dA!h~#brJNe#ErhJck`OFgBln-Os-`tZg(&`66V+r*Xx!u*ypc7^5TY z%FE(-5_R5c?kUM6d3w7FjiRbk9wv=XZHVh9dF&+tt#@QNxa~!Lz6Rr2k|d#ko>>Og z1PcbMAAEHbNJ=le!t97Kp5dk?b&GP1$AgkMm>w}nB*}ol_E=}tX`9n?VquwNH5^C0 zzez&{jE}{p4kO>CyQc?()p}DzNcm;#asG1GIBb;d8s$=wK$0Z`waqq!fUDv^)ijHUcfKwPB;IwJ3c|;?v7g2JfZOo={Xaim&6zYk zJ=t=7`{1xQUSv<thZvU9$0Ig+9i_FHpNG?eWE|15)Z!xH=n_TE4M!Q z<93d>C*#_m-^=L#LOMK5oxdNQuKW&JP&0279gW=13mEdiUkXULA< z0kcY-gI1SDD$mC-XRm4DVEDzb8d6T7sw%4x()VX1d9IT!NI4y0S<AOCu2O+J73C-@Dj}T z;d#zoO*!_{rDS|n;hK_`GDy1>feJ+pB*(gWs0aPXe4J#ECZWLNWCo3_+eo?maEi;1 z|2D|d+C%T1lAzm)F+y!4jh$AA24~g!*BcvT!$Ew89=h+VZ;E{lF8xL~yNeAIKjU&S zqRbSAOF>+wB4Hk;-3#WjhPB=5Y?l1hjWpyy(@P#hIYxrmkVG64qN+8CCAkD`7T+~T z2{&eFX)AarlvSM*=!$|rQ#>RN<#?9L)G4xD`3hR^c0&1oDpv%H+X*p&l(LM2)jjN( zge906CR*_Vn_arEyR~!-${e5|z86f&OpLI;NfIaDBQ3=A2eDp)*{!n`7|efW5+U*z zo04qJRVdF&eD7c-E-F_QL@i92$`hd0D%QHm9GfG;AkVq9V~NrXa(~B%!45xiHZY~% zoot|lghWv>RTFzL5xSvLyigze{2P582!;f_cVupDKtU7RV*7}5YMn#Fu7(;M@-TmN zmzkmKO~7%J|29^gT`@hYfIrKtyJ^VU4!(G9Za+vvHm}N$h6w3;hU7PJN&FeKo_l5p zxh9LOGSzkvgdd;CqedgK&cISSfGsfQT_cqbZCc5%&$**6YhUD zX5K#d`FGFA^)%yE_^WoB?nt1# zdEysz&24a;%-Db#T8{vFvjp}0s>#5Y2N0_(IP6gZ=*P^zCE;Zi-u6eghIXszX*#Jd z1h-~tghl!h%*6@8tvXvC_%4dl=tRkFvqY=z_88<%YdB^-#egZ9o^ivj8Tx^~UAuV% zj?_r4^d}ilk(&%6Sa5qzrrfLL6giyEwRS-7N|GQKJd#*a8OwoM{r6z!^CPF%1?kHI z)*(NfI2rIzdZah~1@YXXB9}-@!0e>TF(5ln${qP#TUAPg+NkEWnwV#`$$lDH)9wR% zOo;z+LZyi7PN*XA$T8=)9|L%G#Rw!xSg^Pr&A^ru))1h_2`kIK3*^v-IggfwHCa%jDr2P%^Rz};JrQ2*;hE*VgCxq`sOVa%rIELR)k0M(#xwUT)O>;sTTKn(}J~J?9 zhQ~#+Tcv4lxea`@S+$xG69dCh1Y^Oo0UM38{Oo;Z<}z{5VNXg?V1|~Y>Ch1K0SQKloI-PE-J}w7v$GbDC-Nq{2+?+`i;GK?onxLOpR|U& z0&UAbS03~+;J6bvajr)BQRc-)b4+Gcg1|t}={_BBWHy)+BXv?~amK81sdlLuH}VVA z@((puDVx@*yrULW zgp(h8vd6C-zHdq~DA4nTJfQW3qSZsAe*8EEjC=Xy3^Xn#tTPfgtA^^Y{WJR4Qt_RK zUsJP%d`Fb&A{^Me6viZ#pB5JFBrbT={n`6+0yI;&6ao{kVTwuS>c;7#5%1l(FAu`* zM=u=IRPqmuEKmYwV?OOK{5MI7R6JP448?O_RSTFaYw|yPnK@0O!P&FKJjR})z} z{wVv5M7x$jD2r`nJ`tUql9E)V>KvjA|F9(Nv>Rp=U5+Zkc_~6pWX7FY1leB&TBt8 z-Kr{B9eTYri(Fe$qyQ6!Q7_irk5XylDPCFGWbF}}wnZeF^{Zv8U%;HW;=`(qeQXK? zdPebaW(rjuUTJ%-pzk9+lG24>fxveQ+iHPpYx|K)5j`nmex7D+3b+oE??`)mMG6Pq zuCbs5GqWMS9e?gpFBl1_(=7$xE4~DJ>cbF6*>QTTK_YAY4ydA|l^*NjOE{_KUreot zPW*Q~a(6}a?@%%_v(Z%|1=3aqN%;(!x$lT=wO5c;zD3WdGKq`Sx}>mb&Adzvh^@8) z9x6G24q-WiTZDd4Vpy)?Xz;%1E^8?1v!1k8W-R zDTmF57k2-(`I-&cTshRY(NCSw2qvhC^%?JcPg)1{LNzM=ELMzyv)7}Abt07Dam*_iQ&gn< zo>#Cc+pqzQO}s${ng8ZWl~I*}bJ`ys!?o=?Axahb{PVG8Gc)rE^NETQ?8Z9HDKABP z%$!T4q)H+rwQ_TIBz%(s^?f^MGFnxiQ)N2DRqf^_Qx{;~hz6(>3ZpSfg)8x#<#j1J zzIH5>C;CgSEL=*r5VzgNqP+W4_;u)T?F}G=cKg*FmM5_zTg3W>k~O7&+S@$|Z`W{+Oi0)H z_j@`0S`p>&P9&uS?E7}1(53k53El5Y1YmAgZ=L~kA}84yWZr@|FX7UqVPGqMN^uW; zRJ)U~5PJCIb-*l%PjV%*`*}b+d@Qv~0F6PP?{Eoif7@gKsgJpIesub>x48<$Eha4e{@xH!;7%fU26zAv_h`r6?Iz?{z)TzbT+PfS zkV`V0faEy_+BGx`ZS%`ZTw*<2{MwPc)MsU&rB~PRP@$^5g)Qzx_AUb06Kv&?i6;2s z+PCs<=;<=sTENABZEbC>PW1(mM9h-$(C!+4drz^pTBbOp#e^dK&*3M7(N=SAwziIT zuV<~sEARK64e)B_=dCSwgP|PmjgD(d?UC|j-}oR^q!-EvzV8Re6q9w0Jfx}|3|wdI z&V@-f-Zrum)NSPDQ_^Y?V0iSgRNH)bb=G_;x_DrcBHc(yFpDfk292_eEWFulcR$zN zvE^vb#%O&5L9HYQFJd^QN==KZ=_v2yY@5eSBRE#-XxJS+4Ue>r*AGYjI@?QXksnpH zRtBZsviK?^pwInzdadq7EAqP`0aJP`1;gtg6ZD5O$Cu z0s~KT+t7`pkvH9@C%bISuPltDTn0@M9<{Yb%0J{mXJ?cb*4Zie_LFCs^ilRWE0)CB z`_^?=H_W;jXp@trP&Q5xOw{>wmsrwTCc2^_Mp0LnzbeZG#~UwF_{y-bPTD(Nl$}TO zirdMX**sklC?JSSWVTkERVuN+g%J&U)W!z0RVV{_5RrVskWemoKAkfv1yLz8@V*yB zaAamGgEkcamEItHFqy$Dg+vvA%Xq8tc%HZ3Yt2DQzCZeV_w8J}Kiuow0cJFnR=A57 zY%muo7)6=pGX#s3drpE(ZkRYH(_k0Z-XT;19vO$FCW1*lpZaV^Jq=WnXE*L3$Fd%^ zuWRS|SUhcru5WDxf-F3H)cjR0Zht-X?0p^@f9c%~1K=1YE zld7)YD+goK_Ejmn!)Q)eBAC4;`}?|$@?V$;5^Tl3H`8vyO+C|6C2<(i%A7dK%qq@v z@eE%nssB(n4q-^o;M-AW3*$!Q2G^grrvG>>0HhbiaQ0IB>KL=h0vU>KM-DDl;twCJdVvJ1SxE$Fq%TEfz zcFyvP)UNoD(fF2~qlK72ibQ|pf2dv}BVUNAxePktq%%OP#!cj*cHSz+gf^l5JKCbl zz_v*Lh)-DqSrc)4gs}x8nj-VuW0q!~2)H6VH;C(M(6$szhyvj2VjC942ZDk83my`vb72w}LoZL5F^`m- zfz$n96oHcVJ!d1E<*&Dr!H@DwDL90nv%Dk9>Gz5I$7bg$P05fw$-Y4|*-QqG5#HsG z3Yuw7*T3RzBw*Y9@JYO{F~|2?Oc*+Xg-xMMQ5jU)Bv*!qI&;zD21i=L``zV!8A|m6 zcDsMq$hOM{Gd`O=;n0Dek*(Xp|Ly+xS?zVQ)Dn}I@K z^RX6;Q$j7klkV%Xi5D5X?&+IkGGc=pD&aH}VO-7h|IGnL-Dz|(Z|q}~^x%q=#}(gzoBLi48pV}ON{%b0R@sili#|AaNj=@%6Ba!qEQi0lqrtt0 zz0OcDX>-FYD|5n1S*$z>qHn5O>wr=X;>N+0EhbVyjhC6`i6#a~btVDw$gzB4Z)1Q~ zDA?Lik2&qRSw51KUm5Jivl2)DuaSof1j!$y?Ky`IivB9}2Uc3h1FR*@ z=5P0cHOHEtJV`ZmtF3c^am7OjaHwNyFM*)TjQ{) zV?(H;orAS=AZ|BiX~OQMKFL=sp{A@*w&+eCcWqdp-B&hlzIW!H%t1bM;Og$S5e1Jt zp`zF94lnFQUq{_~sl_u`N~aaBrLy?DI!cty=rmy2?Im90#=Pp{`zi=4auq!jMmJT#DVF! z^{Rl(5!G;d$m&Z@lz0oN%7Gw(zxTaI5AO|M6!VkX!Sd{83q!J@7ys9i7|l zwSihjBnk&4DFaD5j%mIvdknUQ0~M0p*S+Kez9a({#N!wSx`ZL|_4<7X3knVt8@fS~ z1VsFOWSX4M>uIDu5>lP0-AVUa2T_F>cbblCcWmVP<3L#56%oCFa0MB2{$bg8ziVbG zofhz$);y?2s=aMh?+#(Hf9XW4<^x_^btwHZOVz-0^r3bffmGHaQ~qAX1a$?sK$}#d zfbd~v&KF2PU5;gnBAfae^e^<)Rl_>kl{FCcd^~}q|2WjnJHH}?3xDec6UYr4ufG~t zo;_LGR4h8PEnt)GcvacHKRvm0awSs~l>Ruug?N`D@4%Ic7XMo4Q?lq`Y+0&!NySoG zN>--iq0yFMJapd4zqM|v^PKc+OfDl^X=E|sEVoP*A92H>(~vRQH`#pEqPMnEjT!R7 zGql?Sct`f&lKBuW7gr{Iy(6GS5^x{g@%WBDvtFAJZ{8r(K{eq*01K<~i5H*!9Pe>c zK)Mg%+~w<_lCJKwP(a-SbPK)$3YBRYkbW7SYo=~q^G`J~sN(Xan-{z(mQBC(Dy|nR zPo?s`3tqfn%{9n#lSXw3;XzbV+tJPk3~Ns4I}z&5x=x$_ZFFGBN>lH(r`~WgvMO^F z_TrQJ%@EU{W5fok2Yg}k zW)zMmRXEE#t5QlGLW(^(GjVU%MH~EYdysJ6UrFRlpAs;8{yAm~^Egm~SjO2nfi^FG zNugl2l*;Q?;>NwT3EM!;?3A!vB}e5G45v3-%F0VOP4K#2W=_1gxN^R6biwW142`_% zZNYy7aIq&Xput<8Bucy&;RcL0%If}PRuPc%6ovt^7%a)g{P+0j$z5DM!|&Q}Pv1UR z2o1ora8%xwA5`t7Z*LzRW(lwH)-v`^9=vn`BB1Rn{Sy|Xl$5(;VsE{inNU36U3g=Z z9|QzLU6?F~U)aah`-+S^Cq|A=pa`i_j(IgYQt z(UEya+URo1B2k|m-@A_W&b)y+Cw^xove=UgM}*!&>AR&lc_Zqj`(R{pMQxaJWe=-V z1xwoYx<{reiV~-azI~DfDy6D^1?yGFz5wgVSzkeWaH5=E;Job~hlv}N&W;?*&x|l@ zti2T@`TyL8u_$dcn&o7MOGTK`gcxVb)+cf>}=nMiqid63JFYv;0x3g(TFW+LTVFT?vcLDMs-Aafg{#>&DBMJ3`SDa#2#`aw3wX!NNr|n4--#Wo1G* z-WN!V?pyEKQ8C8B#b+3jR+_f{bE2}VnITmn3?cAEaKWvVBL2i4MYRB2=?u~sk~TFe z<2*-O^}R6Isu+Ay9T_Pi!{5E0ZQm&`o zd223%Nt9?;`m+v83r-ovMbw~#DI$5rQBun;88V@gMtjc6>zUekBaSH#W^>Zfi`{T@ z<1uzAWcU9G8E_*fZKkGYNKks?T4@NSXEXcnp?sNa$?%IP(*d0eWyTDQC6~D39+L#L z!HAILRC>+>1Kr+1V*WVCy7M=Pvfa1h79l|f&~${&sw^TdvL-qt`sqa_A8CHj0Om6; zg2g~rf5mg~@4fuI5^pf0u3M<^I8_&<*V3Y0u=aDSs#zw1t9-Mu#eA2+$cU|Qt;uyh z_RnG<;C30_P1*Z&IMwGh>ZNHW-B&Hrln8rPbhf47!v1Z+B`5`vC;;dViHMhe!NI{u z;a1}s`I+_+@4|t|(Lh(7_7?4(ycBm4Br~H&Y3*%nM0g2!v)w!N%{`Oz))V{Q zuNUU!a;tP9ppok~)~S)jNj+58&;Zv`FfhLN+%o()Q*3MRL)RQEqO|TL>8tFzYm`c> znoF(t73ZbYmf%B1r_g-po^@cJHm4q#5l3QpFnV@gkI%G;EX;Tuf`ni=9ll>h3XUj9#K%uPD~cNa|u@W5TK1=SA7qTzR2alVB8dhm(Wh z`Up4I6L36XUK=0R%Sz`_eNrtG$Fq*VL$9N}I16S-|EY3@$dr}r$nvK`f-S-VPS#Rx zJ9jZ8+|a{N^%4rQrYrGE0I}B;fI(e3$8S|^;5=)fglrNHb=I=F3bQ8My2MtJn`@OU zcFDu-nc+P7mT4V{WFu&NtTR>p`ng(CYg6OBWp6koDyO<;)R^QU0D+%t_j04gp-b&j zdZ7b;cl6@odd%kysaF@e1Sjo>%N~s^Yo|_N8l`$CDV+g^2a&}Y zNI>6MMdDJ~7d_Z@*HcxVu+b~9GN1PX^2j4tVMa(|@HJTrdl&_aFc|tECv3TC!ZMky zPk;+SLVag*30FNv%lwvKJwPU zAazKZXBr|%Ts6yc%B%v8w5t66B)NXSZLS6PO!EHx{D?KbR9NH1{_Mj0B7!(eVUQ$H zK^G{O@rIq{&TJfk$fSHE3k2o%-9v438Vy$nNJ~>U6NTm6REtbfyUYYIr11`zCxpvs zgh_d_NK!9AFN_(*kMIzvxk@og*Uw2eD}piVh5h66^W}6gWixmWz-jk3<-{a1mqmxsxr!|eNCN3_y zrj%0>U-JBIbp5xyx_~3cqVC>;5j%4|ez5LqV>l2=Ju@7lxM6nh0c-Bh)>5`mVu zG`w+qLQgzVkN>Ch;N@P=dZShKxB{}h2YOD$V5*yR%V5OJnV=Cw!|@P3Q?W5nqjzMX zUNzNa6@D1MChWqezMUyP>Yp)^uh<&k^=QlTHVNo46Ew7m@1Y7=SEIS&s>tMqfJ-vA zrqoUoh`9vVx)?{O%iIUj?0n_d2h9X7k2wY`5*-Aob{V)@$P}0nl7F5JR*Jqurc=zF z=fteE?}Z9PHokvY$pMoX|$d+Wh0OSqY28oKOxzE>T|K4v#1qY{Z(5v>26nPlxM*o&&Q-C~!%{Evo z;ohK&Yh7`*eUqYsBY1+1Dl@ebVubo3-IvJ%GLR{r7rh?BfzY_c zE`h&1;11Q0kdS`(BDPv5vnp9us3!SwS5?-ON-8~fxMBT1o73+aNeqONf%cs(g)!?I z(}-t1*lWob2Lwpe`j1QqHobSc-!U}iS_CVMdBD-VVD{Ek0hAl&S-)|w6ObkRj5;_- zqdlNC&3Ic=G}o%WMF>h7GKr(=l6~V*P7aJWgYbgNQ&n#Zz6Qqr#L3mmI4x*>nHO=< ziTzY0@4?zbc`3QX*2*%C_W(SCG) z=D_(&v-$mK+!NiNSLyIjbJzahaQ|6?Nc$vfT7>#cPqyguO)_+DA!e-6&;QfL-DM3t zbhdV3hetq9tR50t34xS|qiH8V23q%WqH~s7`$7=3jiSE-wzLi0#?DR3+xc6I4kU5- zXH+~1X!@Y~q|E`sg;4UZ`Dn*v#EAzPn2G}#5XMOooGsZU(nNl~q-^MNVSNG8@vHQW z>*BhD7cY5>6Y3ZaGL`vi?1qEMjI6p(ank*o30q|86nqy^Kled(v(lM54%)WbP2878 z59zXAXrlXF@(Re-gdl<&!rL4Yyt~ZIH|A|^fg=N-T$zx0W~L`BYu+{`Kgt&dfsh@; z*hDxEEx+xvQ&|_yx)(5KQ>ZWxtG$1*7J;uXnq-!(EtsHerfBn5`%`IpK;#nY5%eC9 za4%U)N50wV2RD1%S{$>n2(Yty*&6gOE^Vt-CeO#G_>VWzsK2t7+$l04JO5%`q5obq z$L;E~)8CtZqtdkdb-U1}36{!46Eicn))u7-gW8T4`Tt}w;2`Je96JJ`9TUl%u?5Kp7h!j#ibp3cP^L1hEG}wCmJoD z+1}(h6L5QGb74WWpBrm|*JK5^wo$RswBhO(d3NlouvM5wo z_B8vl*1lrZUu2yGr+~Zk<;KiMQ7Dw$yN48t_plOE92j>Rp!7I07?oU(- z7FS$*BlO{vBOW3RqwBOq{duE8&$QDNqVd3DzO;L_@DQzI@P$>~b0xGMe*QBAte-s0 zTUW2uWkx1cyYr292lH0|8rwXciHQ9Z2*h>`D?E*fT+#bC@%E(on)&_9mMwQXHk+R2 z9xk1Z9IpNy7_j)aGBCrl|7Y*#4@|_ttG5r{@GX_Qo_^T)HSsd`VxuC;S9NAD3UEw4 zVLAwKle_Tmw=mr6?d`uIt#9X=y$*-Rp5~rBh=_>LDRMqJJUk1wwM~}Y{k?U_LgX19 zCCU%@fE;rj2s;_v3d841(gZy^qJ zWPvNgVy&mI#>bDCO7LXZ?PIkOIg82wER_?2s;1AqKY0U+4;xXEax#5?sC~TFUfx0W^TUA$4I!t0EImh8h1yV8pMljCbdh}ZPS;#|^>D@3#$~#7e zo`xyL>vn<~1ojc7b>z-I!_9Ydho;6GrW4F~L{LQ|Kzp1l#OGsjI+17012jb~8U-b1 zn{v!ZEQEw7jXXg? z;+>!i`m96+9bzph5AmM1Y!^Hg_2GRCuV+OQE}Ib`T}gaKx&pbJ?j2W3n(c^}gEO8b ze+jrbF|T1Bu*BX=V`l}`O2cY`D*3i`d4lDP2|RDu zVMIRf3M}^YSJ>MEmyXtdv8ONpF)jj=k0YR@bs3bm{B3EfXYHkh=kFf(W~_c9+Rq&e zalXI$`bj`OMaQtR=HT>jBF~Q;v$q)w7%pcJh7gI-4vw3<4KCBm-|o1LC^27pO~W zBNI^zAU7B32&1^?|DkqOYI^#c{|z>B zD6F`X&|zaz$|Z6C4_5uKTaBYxc;etlj)-*^f{3!FJ~1q z`iAmFiYz&UB%Yzf_F$qM%FJgC%6;d_r5v&A=Wx^%tuSDK6`$bY1s|5Kvmcm9l%Ndw1?b*rDUvx1?_SFo4{7H2RH286`ll>-Thp#7k`>F0O2@ zw#n9$TQ?eHw^@bg;6=K;S#+o#4@hXgx*a0mH#@~(DmT}}m-&JPgdz33uW1;X%zw{> zzuer^mAU4i?nQJ=4(IN7+*o~0*ZkF13;IaO^$O&&v3Bo&@SC}87TTEC!m^u zFGP~zGtjNEwH4JP#la_)RoNWuq!iV?5oaQ9bUq>Hz4N?B(jtNDe{3xmNd}>QKYLf3 z041!SvuVxYW2fBYx_4iXezi;aw+jYg%ZDcv;7l(6LoID@?9R_mCN!2&WadXu#->T^ zuRw8VYu>MisloBi1_$w-9yM;@y~H=FWGPJ# zjKANKhb9J|J}&@C_6%~Vx0L#o#DOvsNk?S`N?O`-Va+p8NGqE}6zXOi#ENt{^Y-xe zQb-5(_6~M=EcWj(5a4d{_Hg9OBEiD-UsM!w13*6#%Lk04RL-1_(U`XwekY5jwi~&9 zk*A-2Q@9X&Y!Q1NWw3tzzlMejpF3i8bZTcd=OSKOD69<(y!f;O1l3J??;%FAY@&2J z6;rgt-d+`T`O-t#j`_CcN8a9VFQ51pb=4xkuVxy3q-k2YHeox${_Ri5gX3+{oo_!j z;vSagO<*I$`E2h;M$Im4`O676P8_!EUxDN~*3Jw!m^Io+X}ikHW*&7(nb9T~ndhdD z*V|8Oe^!To!+f4m%&Qd1J!#|qq1!E^rP`6U-E;NLlpU&eQ@Tw2eVDU9Jl@jZ{I*b5 zs45=OI7Evtd^|p`ZF~!xI;2sgka*&9Y{?RFUjNa@@vmw|LR;i(a@g1BTI!04CGo9! z)74evZJsmWgao7YAQ~q59O#}~OyC(|5GdiZs!|$n1>K<6+&u7d)Im@L)Vb4WH2;$% z3D|iVl4cU$soU{_Oa<-z8(XhHTE0Ph<}p>WPMEFDe$>n^XD|D63^smAW1fqYaYh(! z<-sKgW6u82-?VV;`4*KRCysw{T+cp1z2mkLjI>oP+d0T#mQgQ zXJ1_55QNjiTV3XC9dx;oq}39q3tx?LW-76Z zmmP>ygx#=8RI`vZd&ks8gIx+Tqt0U7-Q_PvaOiNAk*MJN%x;RW3xwdtDgvfeh70Xe zZIhMw<@NKDZJJ8g;{@oi6HU^qsbqGD&eYi?=f)A1hv8a6aJ3_)ts|%!tIu)*Sau+; z8Uq%88fPBd1qb~$G>hMJ@lOFcET2MumI==r~^6%cyR{+Im(`v7VaAWa51{!?6-(dB(=m z5Vrs71NF(0=7$)9Ly4ZvnaX$U+ltNfC9#-}+F|X$In{TPaf*}=G_*Xk8*A#EHws~{ zoWy)9jXrEM&jfM|a9D{@{`!!S0i1R?t7WR%p7 zjz)duy>=2B6X_5h5#}IYK0(?4_3QVyi9EH&g_uh8(e%sPZ?)#Pc-z;?m(^G$s-6t5 zMtxN9j?r-~kDT4gZ9hKNY2)c|nP%4zOXoe{bv|AV`LuO#+qEnk5B#9b0#e7-hvV;M z-ruz8(Z4mlW!lAci*6V%dj~>Y4r$A^bLnWi_H^dCXEFI~YG?4Ov1Zww8ftK`qUYA-6noE9&|tMxX_t}QI}jm%I!vyyt(85nV+~IYjO2KQ z##7fVnZ@rwzP(U(WBjQ7fJ+WW^oD}cD;il_DmN9Ss5!fnXP`?2u<^RsM(!fp8@Xb* zwvB4y4j8JQspC|;rt9{+&Y=i{wjY^KSP=S_iiGK-Y-h&jRPt|AB#5xDl)~yp=4Vwr zsu3?I1eVl7U2+iuU~{8dOH2Wy2qR6>$0 zjxxEAa*f<_O^r>FBQcbtn5$84<{Y0WljK;8w1im5m7DLr@4w*v!~6YuJ)Y0Ul@rbZ zzpyMySEBe4_3xTbHl^XIoeNXo$0%nttS>5#1`cLu&#w)}x4x`_{SgYRqK^>v!Zbf` zHvB7$gGjaBz1nE+1?0VKi6P{)B0eK?Y=rMbv3dL;Tu`|!8{Mlf8E(HKcd3VIZi45@ z3HK^X(21gWuF7%`<1^-w8q~s82@VM;q5xM8T6Hd zaPLS9XV`2H_3X?)ZpcHx4?6eoknUE{_hVKZShVkD-W$tLu#$hb{LCJ@yRR#DJBf&; z^gOx)yhu(7dUFD(_)6LGxzxip0e}PClOID^PUcS5*CMfb5~5;72W{GMF;gND2F=ZD z-&uRaj*qQLy&LpiYeVk~R3vRLmZ|Lbw(W@dM#JeIuUfBm*;5%xE)nUzj-X6Kzx!$d4BDLxIL}B6JEF zV^w2;Z#`4L2j<=L@PLMEI}bA>*g4aQK`fwEPz3>Dk?BEqd<-Ku^7q9)W^%*TWq=;x z$C)ml%O|8boE198$bG`-R0DgN6d(w+`k@bPm?RU-UawNk zC*dcE()1o)f`iJf?7~`ir+hEKElg6>g#rz?Z$tNIr-(+6;C-}-pu})8!VfN`q?k+& zOv}f_oz48tarOgTN>a+lL4;G*ye-b(o5s#9;bW(zeqXfk?rCLJpMMl~n854Sr=WX^ zT5n%?tiDM{n0s8<_{Es1=jUguRJ2%vOIc<(;zw@DFP4MBgwK5$zr`uK#yY=6QnYzc zteGGVpAGZ~2`sIsdWG~+U-;uHf17h25I;G7mlrZ*=chW(qjW2CME&(2`wF|g8d4wH z4JN`4H)!_FPx(DRA97tTt|BSVFCt>v`ALh@9k#x24R(4Mc zx1Xd=a{*uXD$d20(B-c#x!?UY5%w6o@c!S_d1jzO8!8Z`5ol8%%U*fmdTPl)4?+>B zwcjXFkCi9As=UzhZp*z&Tf%I%HwYCC=2hQ592@`o!3JjD7y@sI7WFJWXJca+M&-}M&*l*P5J3C!X6(pyLZuck zbr{)fZ?itba2BP6bXM`>#e|c!9B&iB*-ot!`cA_1?3$5Z@yY1yasA-@*3#e{Nj<21 zW@FG*r+~OTL47Z`*~_5Pzr={q{C+T6hX=!Sim01=GuTL?6*^w4Gr(W^GUGS@PAyGK zqc6hIWHx68g=g*Ci@MnB|6;hP>z|Ix$zNnkyH?kjgTF6TXJQ+6mwBfs%oftdk}IF- z4B5`NJzGCK3~ScSM&zA>6uB05#>YK8+8>{(j$s##D=6QaYTn$|$B8ZhG23n09a>_m zd}UU^2DzvCY1?F?I3_Q>rY!rYjukVy_wv!M-Cto$##V`+w%nb*ndvv5RqNwYR0|hm zFOC~`Mq8=E2)?T{HLKBZr{2b<4~9ab%W|gxEDXE@5yh|8FCifVH*h}M>f6>P0x zd|kk~W#URduzQRlfchk3w4#5Di6Egp2M7<*me9Y0lfC`3`IUe^0ME_>1**)vmO%B% z80H@xpi^D*^9*n}K8e0=r~rT;F3!g_BY3VyoH3F3!_TO%XL=XzM)rTS5un5>@)%9yeYa z7uY6u0}_(962eki`MIV0s2V%m75mn{Gxq3kBSh;@J1gSvA;wJ z&)*jo&iE-03)>mth2id3tn%LoSGTn_IsetiGJP$`u1TiqB(E~5!G_ZDFH7IOK~%Fv%AXa8*Hy4rY7Z!yPnL)&(N_RVhyMuH6mI} zGm;YRMryPLIW61)JY=$$5PhM?-G?qmv5dRVE(Uq!Xg4(DEMF4E_Y6F6QIyi)EB%_e zT4IHtZ|CB^tsLpYheH^W+VZnKH1QhSvVIV7qma<&NRR-GeSU%3(dc*nd@=y);e(z% zo6a&jSC}#o3VW?@`4*Lm-kaJj41IA~mOT4 zi)LNvlNrm{Ugyk-7}wg{9spK-m|o=U#8p_MDhz&BmT7UYB>uC~4%-?bNF}j+!OCuZ zkMV!OeTBt+DlOxb1hhX3mVJyxRF0V)Kuag8p1r4#0vYyPUW(knOk=Ulv5S-;ZfW_a zV*y>3pb;#48#vUA-QTn%7^*Kc)pU!^BJks-mwvo|_3_fEn`&3tm7bND%D_PI)TZs| ztI>4|l7NwLOVz}ie{Le~s7{-3$tB9MKl(LyoiYD8byJyiA@rlb-Oi2-P$Ell_Rj^# zRl{aq6>+fgV}PdNb2F=?YTRi^)HCrn6IFgSx6D5>g}H@j^?_Lpabr_?3Z>`VoH&;- z^e%uC+70Oj%V2M>GIu!U`QjTZeT3vk-~sX~M2IQl!=5|`FuwZvTZT9`FP1M(m{%sX z2GEizN@I6vV?Ms=v!brg`Zw3@%IA-d|L$X%rC7r!^R-2~M}rARwZQg%7m}7+A(^hb%*!fS{PsgM{-E32q^0nzc2@VxL+;>E6>c=5B{IAo$&HA2 z;A9QK(E;us5Wx0d_dW;7T{k}O6SmoI5}_A(=k~08AkQUGsne=5|5~5psyZa1ITk2Y z$9WL~H~%sFY-^!{I z=hZO(aHbAoZprqo|3^aKQdA4Jwc1beG_KO_L3p_JUF0%*nXMAuKH2Vbg}t4LM!THr4&oP(}zG-RynzsNy>}HFr1BF4;uH*9n&XA&Ngbo zu=MWOUl(7<8OTy9D|{XoJUm7qaUp=|CSo56ZE07=OP{pryZKo?hbNIM)8!j?gMq;u zl?dte!;|k`0uFvghAM-{=7w1gr`4@?bGuGzhxf&goCj3?op7Y3$(mEw<+BerSGT=sorfD|0NSs25-|($odM*n zRV;tP@t|wOL940%XhV%Z#G!sMH`KM()}MvGOzP!(R8Np718;`Uz<|uOnXwOYiT6G0 z-Tll#q`lQCs&^Ok=X~s{vjtJ(8lMe-hZ#f`sYwJCt3d%NY9gc=IY|_A@Ll5@eD~7a zKrBvu4YD2;56SV;hk9m`0cDNli{(ffL0eV{w()LOSH#+J_=4ni&4t znP}saG&CHoF#5^?K*y~hB!QF6fxL%s>t)S& z=vt2KeZ#0A{v#Ryz(!G(@+pM?;UDM#O<;-HrJsY_`2ngml6HU*yUqX9v~uCXLZe#5 zxJzE+dWN9Mt`C7T$m-PMyo1aCP4~7B&5mDUT!taIPQjj&Sc>z39%tAk#XqSSvQ?lY zjfU`;yy^Bvf+y4iQ*MfoyxoM)`Gc^Czu(Kt5^%qhy9^Z7`QxKinQ8nrmM@kTl^{32 z#wXFcHlb(rygIil0fteu57(?sOPn2}I#8CD>fp9YqlIa$_Fosq5;F|(T-5|9?ydi8 zfqI$-Wy1+7V~RJ)(E7lvHZF}7gp_6dql-eC$MQ(?W-4TDm19WNO>miDGnjtY>b;@> zkgXp4CXXvu>+KIKhs3*iU@H;Wuc-Eyt~Dg@2kDB{qhHNAx*3BXAymdg%~v@`$4i4k z;M_q@@b}}t4!mtBtS?s5wi=owvu~fUx0{)gx4TDU-a`r}XkabP$>V1!mxNAx%+Wr! zS?=`|Gvwg9^l$Aq$(QL2gWdIG&>nWav|kBsxuJE@e1(}fK-izN?mAES-oL*4 zM@W3^V3&Ef#qZMPIcM9~2$2Uz!gKW1zuS9eF-4!kT3yP0vtYBLMXO?>xOrOh;@~_? zj^xdGeFE9vR+&`w{+%Cl&BqYV9e2dzu2KWPeVZVT!^({ARHt23g{T?G130Ounzn*S zD&v#B(Xz6&(`@O$;8Kevw-QDf5=2uQR$236#0Ag>=P&-IPhi{fAIoowgFzK)lM|~# z>>rHmRHsYyWlzBx7+;7J=5mk{016``xWzeSi+bv z%BM`QG1K(6nMQ#%OC;Yb+rmA0=vfFgG}MR9&7$z1@k^|#grU%?LA`sa&ys?L(z!Fn zn8EO&kwhc#W73j+E0;K=J|E%E1CY_Q3$4}|WAQsPsDA1FW7-hmg8Jbr4&cA{L!$z# zo&ja{k*+!3VW(2v-#IIkO$6Qn{8?~h{vLGqg^qNjMa#Gp+iyUeh$KGts~xHV;ru5| z=jJFIr5I_b_%4Wc&H7vZa?1!OK-&_-YG%)w0d5_b&c6}h>HifcLzbkPfPTB@xZPtl zCOolpR0Ut23F=RXJnfznHj=ai&qQD@v3;EJ6h?>2DxYSJ_PTP3+6uu8w2A|4hlcb} zaTqXQ`7Y~E2Vr)Kky-K<23_V>9CPs>No%*|yp6X#WOXVI0ROee4#HM3OebA!ZekyS zMIPbio3f-sPs^duf_+Ic8|J40iyfO5f`&-~TC7*H`)fSXlIx$CwD~64;o!qkG(qAZKHU zJF&g#R=z*Cm$nXc519?5YqW;l-{CHge(bKa4h`-_>AZ5iy103lK{!61IRI$A8WxL> z+F7A(HWZl4yLRb3TRk8}qXy^Zntk)QM{yho<-;0PK-MRk(JeQoiHs)NT6rbPuYXP{$z!2u zRw1d6m-)utlk%YjE}!q!ltIHq7;5-0A`9L~!d`m~;7*dCFHM+<`U0!4Gj-d6&42to z#}&tOMyipnV?$|R zKi`f>4h7gkje%1&<)@FIsM|p_wckyzu^7%e{A0N(4TixNx?5)M?;szd;x-p*{Bk&( z3~eA%k*5RktYcZHZn3scA_HU~5-f&ZO6<1lJ*BJjBp55;>yG~dE;G=YDq{qMQpP(3 zAJ}GQ?XA2khz9`wMD(_}KSDhvO-#!h$O&cQPT==EHruUZ{w)m&f;qt;a?t!$L*vrH z`L9TK4^<-yRGrjXr4VWxDrTId^*YGO*z~EO7hAXDJ$gxWm13ng=gJUg5~(?F=9Yclw=tGt^VYlSz8Fx)71+bRItt3T(p)Y5FXBlO zul-FK+o(*4v173OL4We~qb&g=FYl^fhZ$ffXmSQuG2ui#xN$){Lz3JQ zEG(%#FytqT*sx1{6Y0Z}imWmz{cIXk#sR`94qowvtq&%IyR3+xO` z0nr>iR_|F7O@H*j)Ltf;Us**h{LrIFnx3QfL812PAfJy1W}3~cFiIZioB&_HoUdX; zq<#ju$}ifR0}fS$ibq0?Z`J{rG1egODp`n{(A56F-6a~T%QeEbKpHOEC? zSC9X?shP``kFKXD9PX7D#mgwG=8SIdAMW`(F8EG1J5U+h+q6jf@!w_pEC2O2*&odB z@9zErKve9)UgQyvCgQx?A*q9LMmcvKMSzm_|A?$y{zl$OXp zlG0YC*ttrxORsCxtcma5#+_cEY=_b>k0+D*>k2G8a+bn6VI|MLO)zVlFp@;e%>aX; zxxwas2d>kBPU?IG7^(vmGSu@AzIPdmQ*9OWF)8QWPl)3C-!dsT=Y zi04jiLEGxi4JqZ@ibV@?#X%j-l|om2*y%Jy;8@LoVT!E+&j6uW6E%Mxte?tbe_7y} zA`AR2|?~e+)Tb=@f6v2<*rQbUL<_(y){%x2|hDkQeQ|440$0lz8xh&wQ zj;64xV+ZlblZL0qRu}C8`3C4namNU{eThe``kF1E6RTDLHWs$TNdsMpnE^B9i)H^ zeVjjCu)OGE72s*VotGmfQLDJ2ju3lpsbc-_CcbA_JnsA3 zA5;*fY!+io^W`k@8(Ca{?u$PtrIoJYFUw^&`%KwD6WV5N%_~>*vy7l2JCMY@Z*Vw; zS@2zxkphApz|Y^69j(s}8i5NvQ)Cn9!DK6jJ`-bM<+L5lf2tlFmI{?YK%u~MGN8Ij zR-Q`vAvtXIVAJ-Z!o7g?)s-urQ9E=iCXYUzcn#lTx3_*o8r!71JSJxGzqmvL5zwG4tk2F(v za=1P^b5xSO5gXrjW7=#Ui;ZD*<_mQTo)_EEWmXwlwX}a*&-qrry*PWKr9hh7tD~(X zY}30AHp|obYu6SkXLK(<6pO?jgfmhPm+9ksUz3)$duJxLH~af%+&detC|cRhk*5yo zwBsU-7h>>=lpc+z3(e83!Z)4!OqTb`DPu( z=!xUq+OwcK&_dQDzGiSwiBD(dJ7G+J^PF$9Q_H(Y3%BND+ph9&hC2=CZlapS{Epgg z+KT^xSCD1*R7L~uY+03#ArgPrxzyL#BbR77w_4rX8YX-FWmhMV(Hbrb4ix7h&iV~D z`1Lk@w7{wBTuGS*EekvxEE%_^YU|!_ZLj=>8+yt-Ej@eRFYtFTKQM=kx_=&?XsI!) z2WPlUg{?}vJE#LprKSRpZ)UqLATTWQudUMX1FzHd20T;Mtl9GA9cRLN`5RQ+`ET#E9-Hoq}|KULZxgos<&d$#N zrBsBpCEvx5@<1{>sBlAQ1+0qTJOk9&w1Yqpr_MpX*vc&bi}UO=?BkSCWd(5$AD;!D zx(;}tXPWeb0{G!`W+b_Mcep1UAJG9h4%#OuZbI3%5v4(|z@~V%fa~X{lk->rUJC<) ze0d-E*w`8blS^y)pcF@$i+S&MZZ;SCFblBFzYJC62Z+N7U!amE{R&0LA%_p!3)6`o z=N5AjgGaqHAHuO<%x9+f(V~b4v4<6OKn+*IcxxXJ!(Y=0n;O;)d;9U0lXu z{YI}f=jKYB-P_$9?m9RUEBD?1OFx=9svaG+D9Axx;C|k)xL8Zw-q@R4)O-=`7{51| zOI;JKq3w^RcWD;hw7UN(x%A6?OWfZ6_BKNu7uH&X__eft!R>HkkV;JNI>=49r}=O# zXtwLs;*l_oI-H)%IKK*hM|_M~6I)eS-1H{J9&gRB9j_MUYOeev%?&;^9siyPQgZox zy9Xa(0cele**^^FcuO7Oa7(pHiHdCIz%jP#Tug|Un6*3gF3s*v{=BRdSmO`GH+zL# zqP+LzmAP^Sro<^ur-*;hi%%$C+qG=Kjxohi+^puz+&Y%cuH^a__W!Q=&?s0EJpHg7 z&du>Rx{%6P2JlB>8Et_b)td4<2A|}IXId@ zUC6&!6AUNM)V*bx!xua?{xok+#kqmGlT0d&uuVvnFUngV!uMRV%PDw|?*>BxBRq-0 zo6syI+DRZPC%O}LK}BWmOXhWJ$!F8S#>G&;BNMSExzVXJM*_;Wt6kes?9<-aPVUc*|#At4P-c4b$Jlm&?k-^hCg*dU%e{(}VBZu4B^QD>-#HLE|4lvNz+VwR z1K|#B^iqBPWk)FQ>{Ul4cUWV=FOBpg$Lj;ec|V1Me*MoulVpw z7F*hr%EAVQZdGtqlPV#h{`TQe<4@!2@#0c(BSUC$wYa;e{T*dwaLg?T zO!=`9RI$FilLLkgX2UOv!pOSEy|dLEkB4pJqP5QBeN%NXOMio9$eI+0IlZKZOl;Dj zV22L&=7x2d&~F&}`C8Y*>r=_Pbezu&Zd1s7r^YXjk09hMmRUzugtrLq@{m8s+Bx z{#-J`DK0vGYK@Pvtxq1d9xm#PJzfbq`M1I^cD#Eq+oh!g+1y;OyYa`oQgdrQD*mK5 zMs{OqbP;Kuhh%$Y-Oq%F1reTa*WwS?>EqR#+lL!r%e06Y2iv%on2uSowtvj%*645T za-;20U)JZ>qcojssJ*C@Sk12ZqbgGfjQ>j6&B54{W%?3hZo6&uU+V~wI>?t{!ZE{IEdK6-FXL!;qrGc(sU`_V36$rnG?W@cm zA*~Y*VX5!%Ig>-M?%rzLBALY*E6r0pkL)i zw)qDvPPcinCLaq^-f=L8)=-Jr7lgXio+ANnw1@s@fL}v{bzG5GTGN7e!dD^>W`w80 zac;7zSpk3%QVzn@razefax;?HnpX2t3uCAacdH>oP9I-RMO=X63#xqJWsR?wQB|UE zc96DUn5d8FUe`HKfQP>-u<3iQ_f!)E()c1ccoQ*6p}<0w{b#vOtHij8eeCZ{GXjJd zfM`wQ$i?V$Q_WTqZ|xugk{f^vtl$)M<<>~1jpRx}4+j1NGQ|Od^w^a*M)GrGkyqd? zQZdR;f}=F26qB-HAs2)n@{CX9SNb^cW@7rtCbLsxK&Y+;Boi#nT(MVN7Ux_i6evPc zS{Lijv!Y%$)86P*;aImCb1!SW83ww2swc}y1O)m)6dxA(8R&ti?ilg1?x6&(Jhjgu z$`ZVzhlvZTD9b(d^ws+Yp)fxKM-aGI?+F{a`UR53V4UokN=IdVkvBEF0f0V0k~ZfT zX-!t4o<1Vy6ZfuIJ&*D0XskBKmP*Y|KL|D{Msh=2@=Wjx=BLbMDVC}3Lh9xcClWsu zDVbp1W4hSHbEJ$!qi>cJb@LPp4Pn2%#Gr=HTrFRrZ&&vo&9oOmuXo+1N5nF<4==^K z;*qQWFx#$&1t-T01=nZgx%@#1-@+~y4Ue|pr4BX`b4T}%($~_5r9Ijcwu56e{{;fh zdqzi{Ebw<7oO5ojS<`9nc=`Fw&ddybeKU7%@D!(OSHi(z*vqM~o#o@f@_t|Sq0Lz5 z7~hq3#vIkUi$WqD#cLK`1u0IKtBDk)_Z`y9cTA1&&4Jv_wXTJt-#eBVNiDH^yAKi~ z@i`y0CWt0|y}eh|wL31BS)Kf%>c$>2n#=nYHc_8x#B4-M6dOxeBjCzcLUc`<~s{vlOU3EwJ#O5OTOW zPk}T^Zf-maL3e|M60e1LEqeb(HCJ5bW^M5Yyr~j6Gr*Yuz@L>B$|<){eYH^OT8lun zZXX>HiXUzA@T27>Sxdj(Y9D3Df#bE&pOKa-hGw86Lhoo^R_oiV$4 z1CEJ7->X$F_i8kw=LklrSSbZ`hQNvi9jU~LtOF;Yjk;~Mtsab)?FiI@najq7fux17 zV0ZPC&f_v39eqP72P-SgfsHs2tv1Y<|G6sZyR+40BdVUs_wx1qTE@tv!rcs5MrJrO zlXgjS5HPu}4IYr^wgF@b6z?X+!|(6S;Eo zBgioaQ1|3zTzDi$-**$T#Kxy57?cpe(kuQz`0**_7Ec%ZnO-{!Svwo-rIJ4R(v(&e z8$P~zm?z37|BHOb{NU3hcjW|pU3%_$d)sO={-cfUdpGvx!(J^qAeqU!Y{HTJk%iMxgny##&O-Z$Ci6!2jT2Z;4= zEpQc0FFM)UoTu4FbtOa%H3r7{BjS&l`t%*$!}NtWH<@YTS_v_bFQe-%%&+)(eP%EE zWnmG2?W*p{1`~!JFBcsT_3IvOZ-p>zA}2$0&DB{-`b-vH=V(`5H*SxSJ9~5_7QZ{r zze(oGONP zr)Nc)!cR+~vz`Aw?0}URm|mf^&5wQ4CdVSB#MM;2&c~&OSBF*;w;HLwpHh-1p-=(+ zpTTg$(P?I1V!dfa!84>2Q^Eevy_aEUQB9Ddtd?l{5%&g2J?cF}Ksg&WK$3UIUB?VA z(mDKTP8R6x?d0I=80j;e9JR3D{A z%`gndOtMc8Lg3iwWf_OE3Xh!_%pMDaQMn+R*`Ul~`RCzJ{|2t~2?P4toz-z**^EQ} z4_ZYNFucN&*0IVCQRS@Mz{dAOnKS8d;ai!1|wJ zGJ_l&kykvvvb=YtBA=R*Md1DbkHQ?&KS~kph=t}<9Nia@0NMUYf$*g0(D!dOQ9QIpWQBH=s`t8FJ>CA zL2MR3SUT2O0Qp$}iIw7C73E=*n%8)J5cP_0V4RWqecx}06CU9mf%Ff541?1teQ9gj zkHb#pg)b{iKtxwTzAjH1$d~wJ`gNNh-TCE9<}|um75_uC-m8)*^|wa4Yx8Yeb@M% z?H<>D_Q)NQ?y>I4(xPs{@p#bpTv)>4X8G*#=4^mq!qFc8!NHC2n8W?U{h6cwq6D3b z|6FxC;h>?>`G?25>zfS}CUT_D4U4&Ub=`Y1x+Hfx?s$*JEXqO|aF4ns>?~5(Tu-*t zb;lP=wt%W^G`LVYp=gp-rXotPM z9k)v#S4$=pJuCOEl8_;5XRxlW@)=01Ei^ia^{SG8JZuNTo+^H*a|L@=4$0%pXFtq` z1rmw=(jJn0=__^0PXnFLH#vlh*!`!3zO(T(YzgJ!r{|W6N2Z}mnkeBjYP94CqXhsZ zZF0NKXCy42@SSKER&2xi`h8Wfh^Hy+ID8b+dxOOzp4aIqI&j5ZaUVxBFemd|v^;a` z&H$9ZoQ_e)n@#@)!fhCAKSc>N02;~#Vh`hnalv{1)eKEu0q$g1TLO zTQDVz=$p0@sgS5EH@KL>2EMMkwb!fng*VH}#|KiejE1w}fH*h>To}zS?IxImJ2Wud zw+2nh%RH(COSoqvT1|Oa`$`=Al^QD&acp-MDU(^gy9VM^r}VIL#2!`dy|aoNPk57XmOmmMwNSe;sw z4RAw`>AwV@;Q$0@U!KWMy3`%U%td^Jd(cr;j8Op-4hHOd67h*F5a%J0(yY8>eZ1%N z!s|EbKwd|Re4B-=q`Ncgn7pa5cw1F5nnQ&hi1sYz0M3)oJ~y+lgy4RM3)mVcj>|mi z&i++)L4b=B;#7(2Hnel(NqOfHe***#7lvfucwc-#sauK*an%QW>sJIoeJElvy}@_t zW{)gY=Pb0P`_3x@lOW@Al|_h;Ns0 literal 0 HcmV?d00001 diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py index caa287479818..c89080a59e9d 100644 --- a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py +++ b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py @@ -28,6 +28,7 @@ def get_down_block( resnet_groups=None, cross_attention_dim=None, downsample_padding=None, + dual_cross_attention=None, ): down_block_type = down_block_type[7:] if down_block_type.startswith("UNetRes") else down_block_type if down_block_type == "DownBlockFlat": @@ -74,6 +75,7 @@ def get_up_block( attn_num_head_channels, resnet_groups=None, cross_attention_dim=None, + dual_cross_attention=None, ): up_block_type = up_block_type[7:] if up_block_type.startswith("UNetRes") else up_block_type if up_block_type == "UpBlockFlat": From 8a6f0c9976bed8d2bd1a3a3682d67129359a838d Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Wed, 23 Nov 2022 17:56:41 +0000 Subject: [PATCH 49/49] finish --- car_variation.png | Bin 494266 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 car_variation.png diff --git a/car_variation.png b/car_variation.png deleted file mode 100644 index 6d791e7bf617d8f3d4f26891492723a891dd771c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 494266 zcmV)ZK&!urP)G*<~?K~VG4X#|>Y~>jNp^ADd zD?3-Oo)3oSCxiKHT24nv8n;_XyPY2HA1D%QXF7_!1tf^Hij%b6T|qE!zVn0GboTN4 zpZvpr`n#QWKE4<{d-D1I^GEa9Tp3dp^F))a>MYAhYo)cf4pFsRt#+rCCdutr@3+#G z{^kwTdT@Bq>E^9Y-tP4WCxfKBR+Y2)c$RhAX=a?SfAXh))Za|%V&Lm)dVI9Dy}5Pm z)vIs)FzIb0__?2(b7i zEWSec&wupIf8-ngD4s30x$Fqs|XW#kZt(99Qgr{5&m7f)7M@L^CwYzln z?Q0iLFIH~fz&zRe+0*`N`{1k7&FibZxB4&MKdA@h&YkvrR?f!$_A9sk?Z5tfJ)@ml zTkF?SqUb;U(Idv>*|9%7E8l!=<=%C3=dFI{om*ncc@ynkb)ddjP?|*+|=l+hA_0gC6S>Aba z3ZH&C>-XgD{rJ5fT{}A(9Xvc++uS}onmm6#m=}=u)>!1%e{}a>{_DSg@Z~to^R4X` z2Yj?QY~^O9pPn6$*H+RO2jgM_t@RiLuI;SHk$LoRe>iZxmDu@UYfjs(*}PN4wzk%QD2bA1PamzWu1u!$q?Mna&pnHvN;`RPwV$=Rt^UgS-tP6SHIuXj zqyG9zr@#K$mmlB1apmItXlJYU%l9AZ7muHR@HammPK)_@7`}0~-oLW3+W+8h#vG;ouyJLLopY5}MhjQs?eyXTYEHF@JN-IR+MqEC#dO;}Th9So`-ca{WHFhz*Agd( zFOIJ6?DYF|^Wx!P5Dvlu3Y*sm0r{3C>bh96BQ?lg9MM(3FNfC(mS(K7j{17G?G|HNgUfK7_GSW&T|ySkW{y zOg_}s<=r+2uniKNSRYs-Wwy3fZ5^t)D{biZsMlSc56f6beHk(pG^Dx@!9hiJe|&mXQxl#bZ@9Nh3B6=OjdI6GQW7bp6Bg6y>@eZ z_vuA3o6RN@mFiZj7e$da(p~MI9v>d=74>>gQ5=}_td(S~>1gaEl+`?rBT;Fh+IN!F zy1+gN29xTIR$RHsryu^u!11@f`(A{6F*xQ>9*wH`e4a!}n&q{tgR?@|S;Gsx8UpHW`-+r~b(*EM( zpUr0Dyp`sycAlBh-Y3VOe-NVV&0qU3Rn)5{L+^u8Sj`IWYHQDTUi(846hH)CgsrAJ z{zf6Zh>(j9;W8R8s`JZI{bd-%CYUe7=5j+#-jHPuc^TSq5m=X*V_6%bAON!nHW5Mq zfSXoe5m}e-68=W?Z-RbPk}tL?a2d~=f|0o?#b54|xhXZjnQoSC$>L<1h`%^Iz{@kb z%sQ8Q`_IaMSzeVUZz2l|HrEqnk^h7c7%#(72u(y3=Kt&-0EjRF2muHSb5rCZDk2~d z4uO!BiION&sDelc-j^6*mDgG;Jpu(MUp6vSipMEUR({fsnU>5*MN8m~?0?v6A$G)!0(TrCu0g+Jvh(P!<6^b-njtDk) zg&6=zfSYfHnFRolkcgUxfq@|~5CX8UNZ=-Ygs>FkiM+fQW)h zkpOVuripAS9~2f5LSSLIP%e>RC)YV&qK?ezKR{KmWM z&%c_oirU@Y;BajEX?Dfffa8g%R&umgT)!T@^(!|SSfaL<$y-0TvVE<7b{sx=JRgka zt9O!>>pL5-toCm9$Io|HcRJ&fa=160lD_pyckM=hd@+k7h@*6JI2#_8{cBwbYGq?( z@97v*_2&1tk6&CIUyQfYDAM_vpUuQoq46($&y()?RGmt;pvMPM(cLF)or&#xSi#twN+UJCq-39 z+Ds->B<;OCdKeYu%-Mo5496oRMcye#MNvTrN(n?-M5?ONTBlJgf^|_wMyGKS07k>v zWI9C%>ucSsSGKoz*2|(AjfQ*AAL}eXzc^%Z*RS0{jZtg>T)P#wJFQM@Z9XgN{r!Vy z`(A(rvi5oatg8?_+R{Vdom+QdS?3DSqK;iw)XSi@thbwpCi2aIB(T`PH~v``*vke2 zu?d1NOZt}^Ty6uF0LlXKc-co>W(#}?*$6Zr+2!v6MWFd(S&T1^^zsGGK`zh(p_ebd zJe_4qT7nKDf&jqZz`|tFrYzE*EKU!2kty)<`YeE>WeQpxz+#t8{fmMiEF6Rx1q9{t z5-iasXl`VXRV4_P#V|5C5!(R2X;A#!NKI4rO< zUOrTa2*6F_K(y#kS(Z112x)<1A__0=00ID^NB{x=2#y1L&%xQ+aWNS^?RPm|>8e&s zMdRY@!_6Hs@2$kEt^Lm?^-sR6t8(S~%DkxVUA;OwcQ2lde*QQ0=5-jKmWhF@uf!Lp zy!)((R?ZV8ZgR*~=xirfUu$FAe)!38QGoUNgHMll?sh91t>1WUviqdID8Kvrn}-kQ z{X4z>Ygg%ThIYRH*}>?3CWfo@eu7eLLOU?oS5ATkqZ-jix{T zizgw-?w~M{E~|P%w7C|oUFi%@W_9UeqlV}6+01L5=qU08HdLZOEYGGk&J>bjp*W7q zqBKe?t=Wl6b-&d*8PCMa#rd?zBL#5x*7f1}xDIxEYc+&0noMo&YcCh47jY{QL2I3> zMQ5g*RVIzA%6jjMs?tc#Nvt)Z?yj`0xAR#!oXwLYzOubh&F5e29S4SXH$Oi=w{>y2 ze;_Ec;Ygc!KAxTri%zEX@#(pu98izWM&se%Ge=pRM0qqi8??J|-pM@&7Ez2KG8_zw zd66Ws3wBz}BAc*;lk;N;3}OSzWIXh~Ug`Cs{_6B>R)@+&Q4+^#l0-&doSnDYD1az-3f`zV7En#(<0U2o@XZMJ5{Y^F_cE$2ff&4uaNmgLunggg&*w|nX7Qfp zt7V0|{1%rb^z!Wi8#HFIOTlG>vW(mfw!_OReHrDUIf{lV;BO){mo+*P!2$(Z07T7x zmIdna1TJ@u*lZ4onhh^6NW(KMird9zalth#nuTv%j%Au?_JbmUgH?pw)Um=K+|S{qv6 zRbsReMpxOu92tWV_`o69z%DSyNlcm+Tm)k90Tg*_mCi^Jl7tYfXVwS+Wo6?eR*FCv z34-?kbcvfaxetk&r-T_r1W3uE!4st@_#na}ute=agqxFH+)W7}BF(uscLeG3`D9^4 z03bjR6e6{FXq#q^5CD`y7H+Zwa{v?u7J$H(LAEOe^9en2xueP@aowfXp@7((L|Ihbt|LV0H zZ}h+T`T3{6IDO;o?u~n^{ms@Fzc_@5e)-99<>9w}_uAgUSuuj5DxKAnsh0y8W^qn8Qa<(^a zt#sG#Kyr4vaj)~q&!4>eyKk(u=+PI4&z?>C>l^Q0UCX+vEcHhpKYH=y_`Ub~5UE?Q zY=8E_(|R^JIXcO-D_M^x4ztJ*)m6E<*~`0X|IiE3Xjn|9&P0(QPK)_;?g*ikM8Xaf zhURHSn$OF+u63%bqS6|Bt!!M(qeuaZ5VzZDx6{dT?Kmz6MVv*0!C-BzYwIe_BI`T? z7PWKMHlK43;;LF$Sfnbe$QWjsmE7re=5^^k2jrMcaPngBx%F(Fpwg{88;?hx-Edfq z&&yVud|ezDRjbpEjnZsglE<^zL|7(g!#K@mqD%~=8ANPlkswmaId?vsk1s~uPA3Ez zj7C|WpP!v3Sys+wQ5<0ibqHCK1OX73mDOxoSX->GZz$BkI}qr1yOY_plcy*SxT(E- zuh$+9Yv)6|ou+YQlyZ)36-b*90uX9#5XG?%wJ(cGCnk#2yey2$zxwK7Ih)kh#tCZD ztyZ4p?ZI$(dh%pvYopU?cROwGt2m2}4vw9-{Zw_bXk{xo+B<&oVn2^N@BJv>xO1OF z%-RTO5o+N42K>M!81oHeUcSMKf}r3sA})T{FmHn22#90 zUp4^<*wlBtz=;>zY3jaZ*$uoPk1z44MRb07Y|FDiY|8CLt;k@##V#|DfY2oqjNka}Wt;E~LgcbVU?Ie&{zL%eK(dH%B8%<48fW0Nu7+6pUDKd(I*$W7J?-Z%P1cK~CQ;;(R zAPLM8SV(ga;HD%uKGYh?18SuOfxQP1Leg5*bqFEoIP%W>z)I=htTxFdH-v~tD3|%0 z2;q`x=Y|?z+*gIdjDU*--82Er-a3&$M9L@-A)|tINU9-LLSRH(@K^*O$jl404+($~ z7EOo%0}3!NTR-IBkjU4y3p}spM5fib$h$r4B4D;V2~W~qd$w00vx$9GOz4oK zDBHMA-y$D*e`PK8F&^$8zx&%a>oYn#nj_)WE9Ukqn}qnYKYJEuwB5#6-{{XSN{#-% z`QQEekN?$QoLjDueg2F2#_HtiM!O#K`Sa6TZ(TPLDFm@W z9Nf6KeX>9Q=w}B>L|^~%{OoA3aeM3Dd-onbp3IBMwO2bU8yQuFiPGcUi@-B|C$Gmm zncDSrbNzR=zx}=2hx?~reDP>=YooMY#I*X!HElj;Hw%R5R(ma5;ls}+vpHw2G*zOz z$~&lo<$$diT3P070HZtufS4$XB11$hpp#fjbmK~YGAqJt8l@2%s7hz+&~EikPLIyd zM_DU=@_4V?&CUm-Buk2-?({l?!PM5Zvoi71P{!?wes0)mMK+MR+F2KE4{hhot%%#DVQWNk=fo{ zn~bK@*+sh>&!+Pfy-~U->RC~qo(&?3vm}rKZ+U1ZMpHR0^DH-!CO~B?Lh5&VNtDD{ z%ptT}X%@$69L?ubTRTJ`@ll*96K!s+9-o|4j=6!Z@>W03XGPiWCG~6$#MV~MvL-|& zJ{a~v8Df@lUbb5q64Z4SMm~?D*|-46Ec?i`pFcfZ*;sk@{9t%7xPA9F`FdWv$Itdh z=ND`1y*!O)i&?|ZoZ+jZBmJ#7C;0921Mkhb6$Mp@MYoj z5{7Ad?Iz4E>aK6J4h^oojKs@Uzxf82_44u)w}^Q5N@y=6j?qyz#_8DV~D`P za|j{=tneimCjh`9fG{as2!%wLnFvTNC=mfhT0SsH&@>Ms+>pyn-@zOLH(8%WkQ(Ag zLhup-0|-KrX4A=VHlA$0bziryqe|2Ev-<3rilujL6|B5?a5y-X*WPQT8?Cb^^P>kx zv7zlZuN;1{XKghb^Zg(0Z2s=I_Wu0+Vo*GNQW)f~KHL9|-@l8$Yc;!lW9944 zVZTow|KyYH?aq~ZR}P&4G2(NjMlHL zISJRVY}C&0zc|}{F|5n#y>Io0v+~o=o)1QR;ERpb_3`s!Wj)zh@9(}ia;#jX3JFG& ztGqJUicIZ7WV8h5y{|oIsRmEZ*X!$T7aSHyM!UKk&1WpNh;=oR;!Zo>+Uy=2?U86g z9ho?I7*A*3g$8JyPYM!1p-!F+#zn8!3Lzj#WgUvJZTd{XJ&T4y|%>L^x3B#g|2!FwMiNn5f8(p5&S zZ)|RCZk`-mgn%k0R_JP9R@K(pdJ-olQr@|QbSvv>MM)gTX*`{qR-Px3niW-5)dJE^ z(^7aopZ0ofB<%KAit41Qm?#0zBDB_9EqqnwWy^y}$ll(*t6jka#=C=FSXmDM~pSy5IS*RDQ$ z_^fhxel~u$K2E#0fzvCuUZ*4$)WA&&pe7b9BDTO~O?xRy0U&t^X*YVHCQvs>#Ui## z6U`gY0x$c2ghsme5?GSu@8sowvJe(EwSDvPU#L8J`LSLiKCsXT0?1{uS=8mrZev+N zqKFq%O+a3v{7ZHiupt2OGL(Y|A_;>4Hv-OO_wusDzpQO9lfz|#Sprd)*$tOS#W$en zz`{!z&_aw7L@}aT=@_>xBad zKvQ3Xa0n=12n96|2n&!#h2X?{ugNe`;K0r!Di?gDw6iWU5eH@#QUHK~Ju{P1&IN6h z^8!FxlZlc%Ybk9);3gP1l|C>DFt8wzfCzJdfC~Yd0579I05G5uQVK+x(-vH)bec!T zhoBm0pm{<812c0VMXJd+O_t>4(;nxL10bWKPCKpYiH_9xyow@3B8o@}l0xT$iD*33WQLE#d59$P%vatUh~oApoXrP7ltuJ4>RI)6=S$S9LWWqteFLuAbK>iQ@!2t@gN8 zPZ+wP2p^{kjxm0Y=g<^G%R zMk+TNzxw18N*=6i?0o*&U#xC!x#`8`&TC2DZLe;|CNgOUAVL)ZY6&0THRdEHL%UK)6uwEDB#-^27*Btx!s zdC?X$;h!4dXptkR$z{AG2^xrrnzmqB&Hv{Z5DFj?a{#0T`Vt!48JF>QnIsye3S1_| zM)I=IDPBsCV0l3rY563y1^;5`={eRU{x1FGMncjT&Z=AwnZ$L_}`RdI4-P zFwDjSJDrU$E`kpTs;RxOF`=zJ3W(vxW1DvH{OjF(bu-;udHB=EZ@zQwt4|KSr>{PqeD`V$@0etKWZ3tFqnGpZ?WTK=!9|Qh|(x= z)|Ex!fUD94v1L(ei~=~-S4om|I~|3PbmD$DiwvEgb^G0pF_CpPN#iV!TkWnEvcc`` z9Y#r%rkT+Rh$jWJ>747>x;oFZsvvymcedlS+wO0I=s0irz$k!GETCioC?E(yGdu?P4H&4=VtivxfN+6b zEdCW(R>V#9zXW>(8?SxR|C8X{3Hk7^OzWuXFUrivN_=VkO?Vpc56W&&X$Zf_8O z5CTFHWDx;jL~3^3oRWJDoiXkzMALIS-J4P;>E7BYjvpp{Eq40Rzxi@F zihujR|FwGZzEXO9=Q`M4+~pts{&#ktK7aJ_*NE}av$M%KeEI3*pZ)K@Yr1PM-v4_$ zo~~_ElIFe56spqv2hqqA~42@=uDX8-W8{QUFX>8wg)2`pE)l6E&o zv}Z>L)4}wEpB@~Y*49BgSF63X{_2gj_uZTfK6-dKEW+0Hjq$MPbh5greGPFEz4z_cUK||l?VcT+&sR6P z?qpchrMHl^<0R9)UbhbR;(Wffy^&_7oEKRy-`?5z+0Q>whHl>2h)JeJDOS4Oq_n|W zM<7b6R;FtooDac!6Y09LBA7%G0JzF21Wl@PHi{z?$8}YmpU-L^3@TKJ1e%mlT9K-1 zm*tU(l+pTPG?P%~Y2sbbMs%!or{5Wz&y-Qq(R_}UIf&=1n<5eT&`P@!#CcnkWqEW& zpz?OUvANpmCgRydF*w%B1PV%P7VxgFJbULl?aT${+RlP=&Q7QEJWnD+g4D_)=K-|t z^;d7*zWe%{-#tCpofdU9FSU-mt7r2%%d*;5wyu>@{oZQj!@Q_AH`bMcqN)@knW&si z1t>O=*1Fs67>z|`vv#(+x@PP8VE@^{(TlKyV2w?|L9MD`=9=^ot^7PhiBjZ{;#*!H)8S+KK;~sr*({)J(u&zK6t2$ zwvICpfI^$NOHn2Qs7-_TH{j1Vn1Ds>2VAJ{fnV0%i!#3%d0kFYEu=<^Y_Mo4WC?aR z@v7OL0KX(!mj(8s+h3-KB`&heH<#gaksFq80=P_FBB6nsn!Lq}SzG~vru`9a4&)NH zY6^Q=a>9#4Z)T1cqp2^IsxR5`CiFC*cf)ot2a&J-g?hmOeT4^J)X`E_MQ(<>dbXFwq@hibyVx4&wh5iJC3@o z`JnjlCk3WyJE!T$e*Do%Q96?px23#zx?4@FwR=}?zjf==pPfGb#nZF%B57scf8*7E z`G5a^Z>&W(UR~*Ltw-RTCV?I7}!~KdxJ$o`ljEb@-O78bspM3DF|5V?8ZRM+< zy=e7X@7!KLcvb+UX{$AQarBG-@R9d0D(&;bahmJh@%-&?UDu?i!`c7m|Mug+Ve5M9 z{@a^t8yhd49?c8?bbqq8=0v~+7Og-+r{8*Xco<=1(yW{psfpKCTJvJw>*n)u$x3D3 zef{v6iX)9==MEW*xnHeSZ>7`j<*O_0(Zv{*%F}o{ou{ot7-}0xAqpXo2oNd(aMlM8 zC`v_20izHgm`E$ZIEk(0;2|dMLm*NnGQjLYsF{_f$~w8rFAi@ocFzci$Db*q9jVw9-{w*RHa)i*u-) zJv%)ck0zaNH%+oUiD$E!t%K5_m4G-wa?TCMVx2T^92qiIO~Pwer+EKb@DP4WLc3tSDVzGD;_Do~IdV4jg!1dB*?&k%z-E5R4}i z1~@xE6A$-axjCCoifNhjJ4Y`LfA%MT`>h}T9&`LFKl;tT{*yo6+kJ7dw`&sfcYph{ zyq*7}|Nj3_93QTBH+Js7fv2LH0z^M>=8mEMp;N* zmZ5ivxWfVnYNB22gO)0sxosVX4AtdJw#9 z1e$}R25Dls6pI4U%XG6;crK5pNnHy8$)bc`qWHYX22HA4Fw!DINX?|&C08t$1$vWO zUP45Rv>?#LI#vJ>8giq7i9{GsU0#W%8Inc%z?aAv5iG_Y5P=y8fIR`GY0T|(cXF z4F@(^nzyn%Z?{+0y4%}2GG4$K#nviRydZ6uniebsSjajT5KWWpSh$JlycFcZ65d)g zhAarcguo&KEYfsT0z!m~w2Gt*ApkInF!KUF5*8vA&;x-0k{|<60ER|-3nFZck>G>( z7pJG!Z{13hWIlU#v46IbAil8DGeJ^1O#*|_?>|N6C)quIggWHOv@ z_D!6-Tfh1GY1Ul(zo#U;mrgxccGmzH;-`n^j%(R#pM`fBHAi z(_U+JCHeU8K56I4ul&v{)qGmpDr@Qac$v3a``n69xU}QKMR<;I{J^J`*5Vdu415IXHvV3p;$D_k8yyWRFI ztG#}w)jl{nu~pC!&5JtAGa+4-bwc{a&6`JuX9ov|$n1;K+C;}m#8}oY&vK=yEIf!3 zA!c3(NC@YJj}$6p@;s|t?HtEx9GRG%l(rlfg^40S4uZi;)-nKMy${TRSv=byMw90d zC2SC>O9p+6afWN%K91*QK{*q zD2pPEwTQcYZM)sco!5E3dgt|f+t)TfMOnXc=c`|Sgi4FTCm%m}>xZwt`-AUZy?=Fd zu{#`vfAo)k&!nxRuRi>n|M2JU{>poGRg+3J)={F9I9^#_>u+q2M`L56wA~dFKo2?= zKoqg3L8IBDokKCwv7V}mMd657wY10N^;g_{`^T`C3Z;T;dhEc)B z7Jvol63bt983IC$&ac6WxG9@);UXgo2&O6W7nS)-%u1FsqKoK{0Nh|ti|*!f7z`Jo zotIEn1KA@0HvFnUgBmds@uKZ$P?4}uyetp6d37W0Z_b3d(FI+Wxk89QAReCl&~nx3MjCshc|c;M!@5bX|r_DMC#1B+>DF zUX^88l%=phEJRWiWutqGBZc5wSv()l%W+-X+69ghQ@eV8HchAk{`^1P*mx4P;;rq~gS~^zt$tNjKDZ=_vN)R- zr}Y>A%fH>&+0cpJKRAz9dq6hL_36{S7cZXVEAg{OPdBe$yL#jL_51HW`}DK+ zdO8`OOg_GN@3(&Q;_&&|@o~SMv@&H>h_wNYc7DtOqP(TE4b&;3K~T)XP6(R0s^(@j zL0)h%T>MhCvUE^rWE$L5$a1L*!li)!l23*Ps$Z7>FBPe93E?kO#W%|9OZbSF738Ai zZaNUWR3!;72@IsAqzD&1gfvL;vX`Kyw^%TU3({ku-hm|)BM3-Hi;9(58lqQNXlY`= zj7nV|HWCOd$z2vw3zM3K@Dmu8lN!9Fk(+CV4fVOOFknOkMDHCDDrHQXS<9dlC}n*B zM&JM-1Q>jy_M*USNE=etSs`koDiD!Dt;tze)!qjd(h*@CXGz;6X%c5yl*CaQCs`cD zu{KeX=V{i8k|c^#t+ZueWvsOTm?Wm@zOjMJ5P$<2B?#28>_Cf44J6BYACVg~pyqA~ zFJV9d1O`RK!W@7F8}67cGdebdcnAnc61;#YrCE4E-YqtWQR+S{8^+-HQ*xVnDxDyZ`>KYwZr zyz||yz4PJOQNhQu`*3`I688T1Mb?S8uB@J)+Qa9w{pYhc-+E=G+jrIM_x@Ku_&5LK zUzd~7&aI987sq>hhp&C>ov*+6_}Bi6e|EHcuHtlONB{f3JbLxjZr)!VjjCzs_YRK- z<5|{Ux%c{;MKSsG(@*>T_LIFM4PxSG|8S6X>UP$B{Pgk4O8@Td>sPK^o7DAZ4}bFd zt*e9c<9SiY1$d{ryW*0oZNlo*2dZjs;*R1r*XBvn&vC> z>EQakjs3@8AKbii{r>&w**^I4@kd_;b{N9x(}z1Z?mzqLqa;u6zIk)Fd;HZe|N8Gg z_+^r`-+S-Pt2eHn9zP^yqKF1(gSxKQwl~w>wn=*IgVG&{`-*bV5eK&@ib*y8im-51 zXcRaiOR8WI*q5d2Qt-K?0fd*r$tH-0;zi}eV*(CvJ%qE+Rvd9?= zPl?Onf`!)&dq9?jt0fR1=_WDIB}lcLccUinGZLH?n#LB+@F5OqwYZ zd7%Kz%n~?40tD8gjW$LH&nS(efJqY~g}}~BKqjL-D2#O8Nzx>3_j+-hMroGiS(3GM zl8{kJmgqE6NG3})86+Z7!37qGSVZYKMrWOKRwI#8>^v$$2BQrkumD3d2L~J$EglCZ zBEIBRpt+cr+{!}Ni723eeGu`k7L>-@RyUpyq%;Br762~@w6Hit4qg}mByeD6MGPRo z3-YBAV=*uhl2Spuj?C7ztG%^6k6KCW^R+cj`T1e- z;?dxIe~{&Byf<8LUstQEShJ2PY9;sI&YwS<@W5W($yZikwEuYhd#{6s(eb#w)w%NO zDkVS(y!GCV!Tx!>o8Elqc4-IOw^mIeXL}>j)V-ST{?yM$)#_HioEE$L!w^)jZPGkG zIk`9=&u?AZ)Nil$T4^YXwe>Vn98A>S>TYU&@bJ__e0cnHXLU7>vRhZL%%)|(+k>@q zbmsr7fAWw1Z(C(U}7Y}XaJN<4P<-?QXtJgM=$A@2j!OXplozD7AkGV?L zlxYEK6as+d=UW6sI3N+1G6+3Jkc`Y zkRnA&6Dg$?01;ASLeuC~nlUs0Pe=`fgv~L57io;oP>@N8C7>+fvPG}3C|aS>iUOcg zi)6GckV$D_Mj$|p(>zL3Su{Z6g*o^njZl%O2D$?Yf?At6BBKQ`0wAN4Rx379oVW6J zC&@Z#JCEW_MJ7s8QiMblMM@D8DXl|bA3Xbj-m?Iqq9`&+%-&n)=fymWGovF$bhTF+ zg%B8m_XwzUgblC*0t^iofQ?o5Qk4r$^&kKM9K4`-Yn!3u=A?-bRnxvmbMbu$&{*ve zh(HK#(PIfq5JVwVA}lP;j9Q~6LO>HCgH+Z|r;|93Z{54OfA|z5tg1N44g`-7saJ8P?l`;*b>JSH_bEnofi*4Et>4_-*wx-xnE=}#Wz8>?S_a;`K)iN1CB zX3~n!-+z4cIvB5CSxHyAYir$) zADkvyz3_vzUi<6A0~4WlFdU9gcK@WmX}?teo}Hc7CN`+pI{_3g!H_XZqk=R| zE9=4f(t1tg*#eOZ!8sA3IF58+1u?U1ZKl@x+A$JS6Kz7!aT1u>vx!v}Ya*=cTKnK_ zuq#PR=V_Lv*4Ey+ez&8QYC3cPL1R3J+4-z4T@-6-9<>N{Y+PVLoX=~mO_ryfJT0ma zLI9CsR`omW06cf4^L{kmqpNv29Zj!nZQZDjK7-x?SJ~|oA17D>$$U4 z%eCj{56g<Bz38Rr9M5dtx8t8wC=fHA&K$a1IF$TQw$P{Um00@mxV+ran zC78|okekHNl-I}-5D)=*=@^4V4Q90Xp#UmE08y7ne>2mHq#G$`VD=b7UX1b zB8}8hST)hFnNtN>=CTC~y_n2ipjwC`OM5fmW_FlCK#&DZoV5Gh^de8vxF~9N!5EE7 zS?3k%G)5F!Cm2u;hTw2o(!A{bHZwZ_QA3p6lr)9p7zZ{R=;`h+CRil~_;M^G<`rAz)F zQ8*v~dnQ5?B}jxs-Umb^tvIk+5G{h-43Dt{5#$AC*6fRyYy%*nVhkL_OVCDxhhkn~ zVklO5C&@cVx*G33Lrhh>Th3>#m2T@wK6vr4z0pMulhe{f`pVsI<@vAvcz5T<;m*D8 zz2AGs=tN1hb5iaePTzTNb#=8f9Lxvj)BE?f_8)u}udQ6^xRbr(ey9ED#qn$3+gaQ0 zL4~K!jz{b6)g%bAi&9|Bm_u}IXgdF>33%HqN=LA*K^j#iTU7@&$Wtf-n#Ml!CsU{ zgUL)IVKd1=U>!Rfge6UrC^D1DysUkk)QYFBvrL@WmP*H2+28* zO0~PKc~L9WEI!i42_isM+aOX0ONx}zz@nLbaDGxuJ9(=t=CgTaZKX8;w?Ih0McS

)W`S#}8#n}**k|2S- ziq!hn=43bl7EvlrQjcadpQ$7g90)n!%LnnXEg2ZMuqZ@jj?k$v+1Cx*hoi@kt6o6H_R_-dus z>h|A`lh_s0o40rJl}+~6R}N89gJ`PrXU~E>Yn?qz;=tPzS(;|aXLnHNMuSOC$4 zzzo2M9J~|uBFx@0~mfUJ%E6-t} zH)0Od1Wlnv=LXQU1;nC_23LWvqR0rMNI+;tW^tkHL5&I<0}Mb0ApvR%e89_)xWw56 zs7Vf_D2nntZ+F{CmT3gGU{IMPHL=b*?N+y^lT=5Ei4%&VBu!A0AgV}_QXD`)GeGTv zMl_Lj970`_i4g!mDMIGp>kw>cU`j-VgaR4_NUIRUD5E1$#vl-xSgVHV1(pC18uKD( z{2MrcB0yjiYJfm~dDjpLAuObxgiSXo92ylUpaMkSI6spL3?Sk|Xc~)Vatu&01Q7yh zF1>0jAC~!4kPsOgdY`@bwI{7S^K>vbQLEK!D>rxZnoCltNs_COBzE_~^>^R)l^^X- zfA*8Zn>R6P_lAe_?HkGUH`ikfv*UrmINw-(<9E9LZ=SsKt-CLtowqvh^uhV*(Qt4w zTfNbK{k6?|Z-+-;?hi(iznzgo)9n`Je!H(pY`52r`w?iBuC_tK>b3shi}PY$KY4II zn}(ngTSF^}-gxuY_1o(o{_Xpnt?u{#@vj^|KM^!w()G>k!B@v=nx-kW*S2-829xvi zi;LR9>FLwNqEw|x=LcAw9u)sqLi`-h(*#d(s= zN^ZB}>2wePJKe0??F>hg$zYP_skaUT*LD5*N1u+z^Qv~jyxQ+-B`Yhf!Ejbgr;g?E zgOhGQCQXVo5*ibwQ6%Cy_(W?FR7hy35p=flY%;5}mWkphO9T`S#$yQ#$jm&Ol~JrC zt;*UbQNkfO6a?|!3jz=lXrr`Ul2}MsmBGXqnBz3oQ4$x^+A|lkVmg_Wb%=~MK_^iH zNQ3EUI-a#U`Oa2XTi@%0ie+$iA(rDfPE9ePiv~*5*Ta zQPwl>tS_yK)zR@uJ8xG-xwCU~=lZRK-6zGf2W~PuA55Y&W`eWRvtB1_rI`SQz=SlL zPDp9?-YA_$iLER!w9|Ibp*D5!O^d=xt9HMe#JaY{Y|ce(hojE%!G2x2Znsrml!M7d zIoBrA+Z&zfymfGJ^8C@GR@RyR{KL*_w!XD?bas~H+4)J-7O{`>SXIj%Q-v8;tM-Ssju?=s2`&*l@y*ocT`1;}kSWb=)*S6af;o08) z+2ONDncIAtcW;y>^)_#2-F4C_l6DmX5>aF03lbUws`!NnADh)p7Bnh0*!|@o z*QLIGF|fN(mo72^Ga?F#1a4+S7T&`g1O=KAqTs}PVef6p!Fg8&Z^iq@p-Dmz4k+9R zTo)DvM8bloNi&N8D!@is+31{LSrp?^?Ih4x2Z2z7{BV=y1Ofb20tp@v0N+C418307hP*{`dpsD*s7V5>NS*pN7C{Kh)1f-HITiMzM=hCE|Caow< z^L9Jww1E_8YFG{yFh+xFIt#YefiP-RsMEZqq8LGtv@kcSqb9*2Dnw@Ru%JSarAc57 z=1xGQlu|}pSD7SoOu~VM#fL`ez32m>nFRnvN|+e{0wV$lQNs`m$dZaj(j-6_Jc@V^ zrdbAoL;?$j5JZH%YZRvn2?>=}AdJ#X?Ew)sYBD0Vc(iEYzlsD($l5vx=iOWsot~d} zR(qqtdDe+qo3{ps4^|8zrSqrz_M%+BwIjv(^T*GA_z%|NBuldV=yclawX!U2{j#%`_BVQjSuvUxj4MP2G`^d+)SAB}r!-CnzOw1433 zd~K_vG*PN%6W3eKKm6r~{k6QRO=0QUohzXAkN@=JPTSmhb?w&Gywy^VzCIpLPJiXS zYyZ>#^w(cME`R$suKx0a&mo}mbn~_QpZ)x^$v9Z6wl}YgMxFrIH`YG-{HX|StgVfT zu{)mwVDKm&qa-4v)FiDuKRurGSG$Q(M8FWv&qqm?4=%=?UZ>s5f&^q@CgD&!m!?r& zvoVH)WLeBC>=}uoNR4L&k|1CjMb^4Bisof8pH(^{6B$9U*4x@Er3l4^;Js8;U0E9( zr&$syH7UwSldBuICyHW2S|yRH3#-#SQ=+Hkytb~kzACEj#(I{e0&sYA6c7}mGLiM} z^!$7_nJPo0*(^yk0~A#iytGD`;RuV1}k((K_EAEB_Os0v6#z{>mJgGWJh7{r@MtgS5+NTL<7gh?^cS_7en zP)_TB1%%DA|ln)>L`Guq&ZowjW)qLrImLfu$K^wQX=B5 zYm%8FL`A~D3J{e@P_)nmBhi8YfW=yvxV!+u0!Ug%07@8$j4(G>Qjuz=1sheVEEauP zplJ+^v2^3DO0w{Q5Ek~$x{(5?OkK=t&n7izM~`mYGCQ5t%H7x3dK>=5`{Yg`gzkH9 zyQj})=k?v!Zn}`L;Dh&{ogdGRU&z_{`IYVdJ2y-eTaBT=q3^%d)lVm{efQ4BW_pX|3g+2CLjwC=5U zo;==v^r%i-uyb?mr$2tu>X`5S#(TlxXP-T|*c)%&*m?Zd*4?jM3M*iY2KV4aA|JHBad;0aBIZ!^(+2QoJbo=g|TYvrI2a}oI z-056QXIYXRo{ovMR(LTO)S+&*qS0vHZs&kl&Z_OLjaHO@_4H}K-`?5W2tJ$-&wy0k zZjEPkuiKs1w$sjlfGwBK&x^9%)ty!{nobA>8PR(M&_H>!qGlFjbeg0L0SShqu@Dha zthHk&985&)K@$N$WVDV51ewJoN+DWno9P{9Zv%^?DAoYz85mek#&d<(>bA62q%k%z zDyUuUo#h|`1Y|%kjoYofdww!(sifQKwX*p5@ML{st<}!sBsFm~omEj}PEO8G&xglH zM~ng43)sk*)s*6Ihv2~7s+j%-G z=EA5b8VtwG{_J$DWBp?Pu_9M_b?f!JpM3b~+1bT-Hp;qbmPd~sKWKOR4#U-(*Kgdq zI-Sl3=Yz7SPEStqw%NJ9e*4yKw&-eU=lS4h|M^#6TBf|!P0}`(EZj=s&D*!k+1cUA z(KCU)c6TN1)XHc&w|Ae1UL}sWke@gg4c~QrqN?CHV~~qDu<=$>1mY2d z*(=f_qKKFUAOv6#X$IDo7OV?U15lb=`f}kJ0tyI<5Srm(3?ige-d|7i7OJ=zmOus{ zTnJtOkQ6Ee0T%16^K}(M(8hq)Mk!ZU^KogdC#BL>&Qkh7d8ccXQA#(~MMOzt5=FYv zEQJs_1Oh;%B?J>0BC3N2BxVnYQ4(t;;SgL1-XUTmvC>i0a5Au10%t z{EO??S21-vx38^itv~;KznoTgzjZh2MrV)CqgK4TdqEMs^3K}mv^+f?Z)~^T`u^Sh z!_$Xf9lZOUH+rp3)X!i1@$lipsS|@y92?_HIXNA)bNyfbi{Bca?Cxx*EFOV>`SJeq zCoguc?R@#!!RHUE+c#rdxipF2|JlR$zIDZcDjgSk*;h#vzAPRfU{Xmq^sH` z2CLeE2r0_rq_WoAkmfM}&nGh^&48||wNaIKENrwUT6A-DWg9bEr7i4@! zL#MS;+iGonV`F`zo##qH6eW2ppBJ;wzxcG~FrQ3}mOO98kr@oedAFksRikmFqsmsp zgXv`SR~zdas0onH{Sc!4|ews4vu$i@Q3>+>ziwPyGO%OML?*Owc-4D zSk2sUIB)ejyHEGd&rWXNzTWHf_YRL{LmRN#yq;}cx%Sa79=v#Z{QVz&|Nd*8Pd|0% z7w0u7uha2lxcB^NoS5ycO*HE0>B-d{>aDK6@!k)jc3+d(*t~J=&YcIJB*%x(kB^`I z{y+KOOeXb5qhBU*lqT8i{76EQtgJ33sgMzx`b&To%Q(GU+ykHi^q0$kEWJFHTG;X^ z5(&7_-COpKz2)H8+29>}$H4(}uoVXn5P%tJvF;^cAcVj{JU0$;id0yvI|+oygtBCq zanYVEP>u!EgsKtzEI^-yOaEfdXu;cJqYGLjki{Tx)40IWrLOr|U`AnJ3Z9XuSwf~+ z$gt5hF*WO#HSTXLK-f5%0m8!I9%a#w&|*5l2llqEKq(bPE&u^K5no#dCPpGfN*kk9 zq&RqEunxZFK!j<#6J>c|=bhJa(k9Yz3_?I;;!K2483Yn9ii}D%Ha@b%20)p}NrfoH zVHwNWJ14@*Xl#~Y3E~3_AtU;_LL_Z8pduwi!~uz#yoU{-*BAvuv#@)^0xib?M8c(; z_vMIrGatQZmq^K_wip)RArLCk0t77BXmAk#F)}GjFg{ogpr9z{(~H^0&UelSF9u1W z%t<=OvI3qJre)gQ^my+u$$Rg7=lJ-OcW={@1_x^)J5WM}ss?k50q$fBoeA+?BKO+uzt)5j~fHy(a@ z5(MfhV~O8+cY8dY4nLpl+~`02@MrJ-)@yl=AN}M(+R@#v8cph10i}iQog~Ryc}K1E zay0zozkcF^VlhWwp6Ga(B(aJJmDF4ja+W77l0?+*w&wHFd9PSiQ(shNoao7THky^4 zR^~8dkwjWernQN)IEnydUODG%uiM*NU3Y;ie;2?7US z6gEqY(i#XNQpDb|q>-5w6(W(6Rvw*Rl&uy?K&2EQ)s-`mi4$!d$4OFEMWi4IS{HPj z7^OA)uzP%Ji@GkYR*|g=MDbqYD0LX-RZTRD5|!r>2QP>U>B`Pl8fRrym?-j4C<0&B z)7jJ{hA`OL0U=1zZgsqu(O`rGtvoO0vtW7Th9S64r!C$h(sVSct7>y?vnX7YBuXn+ z*Kv|+O+LtIG|94*5J5t|lE=mbL0<+hT-LRYj55&9yS&`!`>|uawc4tgf!ql^f@NO|dlrV^g&QGivqlRy%<5uLh_Equ5awoh zsxkUUY}O`R$U=acl5q(@;LGqIBuF5DMzgdaCh!vSU#L|UvjR(QcIiLSI2L^41pxw~ z(B+~Ejr^1WU@_1@YGDU~3b66bK?J0RdKUJC2o2r^BF&%x5d;K5LE{{I2%Xg~su%-P zahfy?2PvbXL@Pz8S-`|GDdRm0C~h1VXraPl_TD<3 z$G{w%6#%6T2sCS!MUf#yA_^gd5Pa}~Ss9a>*qg-HWnEQOS*J;?k_el{+)*#1zVK4? zsz@;h_Cago0XYQkd~+Do*n~H$UM|LGkdzXU-~)4j1(^T>Kt!a7MOeT$b9tmtCg}*VhbL6REX3SF7VP%0viMl2w%*zBm{j4Zi;ezdkz|nKT-o z&!Px=E4{s^2+2+J2$Q#eD&n{gVX)zc4f6| zM`1Ls=QDadE=_xQbZ~tC?JH@gQ%=g`-EnWT2x;k_S%z~D(fm%QDC^bwQ;gr z{Nca&!DqjC{_!tPchvZ`Z{3|wm-hATa$^V zMd56{k;YjokD_E&R`dDHL?(%h(Hc-h90%-nb1!~2sw{=bm}+igokp>WG}X0lcL|tD z<7_^TjY`v)6d7&21R<}Kw$^oeoj8u2b%W8cl_gp=3#1V##YlB+qu4a8-)vU4+ig_k z>T0K#-53uBTB~3D^!=>c?XRq#)aSjGZdsJfY!Ypq%i}yS%x1I6Xx2JDVdv5~-Pl|W zt~z{iG9FLYzqR(g-}=#uhmRz{{qKBtyw|GBiqdvwT5%HhS9*^hKQUz5Yb!VJzIx-< z^#@-(I=>j}G&(;&olefK-Mo?I`NjF!;Os1ob*mMPrx$yVABbRYt^eTj5B3gDyZz0L zovSe>H{SYw*4kpD#d{Q#W;u>#_=jLA2?_!X%qW4`1A7k6+nU(}2XE`(op5095JiG# z_99-95H~IkywqmGLheq`7)dfPJ5Ne2io%AN1!{EQ0=)1x0-W^SK;ChM0q$kz2t*5bi!gvFtq^qs!3V~wE)clW>&972si-V# zXDt{7EY3P%Op+WJn{^8q9RMOJ0v5kmj4g^AbAH<}Hz__9(+$|zK*RU`;NOf0Atei{Ivv=$LU9mFfGoB4AhZ1h>7Vf&@I zyn;wXu+V$~A~p*w3yXjv07g77xt7s``~#J>n9Hf-}wi3rsKWMl_-MzbTa(IKYZ`Q&kle3*N-OSYUfIu1>+>E zXJy*SUOXMlOPN!VRM7*{|3vrjfwS2nK1gR`obojrU!EWMdu zjA!%8bJ6dr*Y9uLx%Xxoxo4m3-+udccY8QLw9^Ym9RBoAe>NNzI?4|A2Axh`7NE6S z>$H_lXO$~zTRAs4Ec@M98C>02GdeyxIxUMDD_1+us0bB_%CfZE?hQvn9h;(dWmQ?0 zUMF)QRCQ%c6eZE-##-?1;$lKkT3N20MP-t>_8}Nj&EiA|qEOe)A!-)Rk|fRI@pwj& zIv-XFpw(7oS$i*O7PDs}g@i_F6q!y+TZ>6!2Ew3;u-$3Ri+PBBaGprId8F4jd)}hu z-~({*)A3w{j3OT-Zl{Ra)srksw4&K`R@YTsyL3`WCf3S$JPzLWdY!t0c_@PSN++na zj-r@EVGMrpnO8(E@N7O0j$5s~aVZJFEI1rZ^0XyM8plZpfwTd^ILX?byn&dN(*4zb zaab3{RL2^`hXAqBWJmyu!q)X$zB;o}7L9$;0jKtG8~vo<^%TZvtuAf41v_ zJ2`sM>vhjAPOS?>c;n8^$#|%=>aFyKqtWKtijJoJm44nzKKuK>eevvIYiHYi^wURQ ze7Uo;dF|HCE4Od1Z*1IoE5fajwXZgvnP{Lb#lBbgzOnW zhuRvgXT|jS?*8>#H!dztRFo)WF&M|4-pchf?TfH@S=9JTZD=E32M}N%TrJ*uU$M6m z>fl{)RdANQgXPL(O<%9H0Tko_1QMFoL6g!F!ZKnug*WnI*+&Ak(7OZ?2uw-|3nB4R zQMBY$!=)mXg_%7ru&Mx}7reCKr5QE=EG#?0MlT4NG*W>1lYOXk{T1L?A^- zu2X1t9M4HGboiK`^QYQ7>$RZsJG5{cH6Op1I z;Jniu8tN;`S~^M)wGWI)CQft~6A%XpAvjxm??IH(nj(#)oAG0%P*5;13Mdk=9=x-4 zF`UdmgmjERah#C|5yf$2OUr=~DN2mDwF@E&lU6GMsLLt@>%C_JC4kWA1=RwL13(l4 z#Xhiz1lHQ%WtJ6a*8C!+sqy$jQA#zSn=m!GHF%E#q!l6|k!Mc}rw$?#9~KM9>o_*- z_K#m=ZS^a!zGLucWqs1#2&2beh1HxAJ={Gj=g5#E+Psr@Hdn?E54)?~pZ)o8JaPBG z)B5aZPs^fM-|4PwZ(3{jA0L$xpI;37>rpY0q^&pBdItyd$%R9mrrmUBXL~gAAAj+p z^!C-a*G?|Ze`RH-zuiAO-2L(=`)_>zh8k3=wwjfl`MUP=*=#bXJi(Q#tDk)KB5K7O zE9(y*9XiH)_pko_`v1^i3 zZ>{z(hIM5QN`|a7xPD@8d*`iLfOnG10AOVfhS(3y_bb5YKTb_*zRwg(Ggkm-~ z8saGOjrcF1YVciKs0u#?9kP`D~dW36yfT4CsQvtBEm&dM}S z;uz*-EfjlilO&qVXDgj-_u!<{ij}6_!=Wzbo&~oy*Di*m$!MyP@;ndTB~hd_Ip-Bd zgTbVoCz=$Pq@8yaC}gs=L&V6$%xn^ajU6T`g=A|-3d_p*+6yXMlu?$ND6)0Xihx-- z)YUv`<(@gP%*!%Tx&m4h3rKH5-jg z3~QS!j3G``F|W$`JZopZ=G9)SaMWGtqDhBI|L!EAT%{8 zE!HKz1Ya6T8Mu)aHY&yjHeNy}z|C?h3&#Nuq?)Nz1d!!2jUpoK056gK#%PpQlywgAczM5VUI+F0>OpG0W&b82m}W3-~%%g zY7u4*pp+mGMWSQT2{0?8fC}Ld0w@Afgg{6JNeL_$SyxI+tgq@nV&VwSX8&`2>! zC2@u(GA0rT3S;M76eSX%bP|Gdwl<_l5elG3CJZ4cQh)*|sI<6RfKf0s=)Eka|CG{A zvlW;F2gRYeHv*_oEpI*W!eWU5ks<@qh{i;O6_L{MtSaj&v^%YvH`mv$WpNs}vxDK& zpQjtKz)-g_YU{yixh`?=3@To^8jp^P;lX5mHM#q(D>9p1$n8y8;m4}}^ z`{+L%{Nca&!C(HH2gR&D=W=gv{Kx&i2~-zj*j!cXZ`W@4x$RzxQwd?faYS z8(*Ltd$X7hCzFe;0}S?OKYLWos#o5+Iv+$wliAmyYPSaO{pvfPfAzRzoR`&X z?$RVLt+&p#AYq~N;Vg~xoS)pey7prCw&S;%1BWXDdFR}cn2&>>o|#$B=yeD%bE8cwMK-_YOg3OrLog%=V?}#m5>oKQIu&?n1j|zk($qn zh315%a#idMvpDOG3V;~LNvGW+RD>#cpTtq^VKN+7WtC^Cb8c^U@8azAd@$CJ09063W+x!O+0oTh0firGt1*Cjx|-@SHw=i=nFsEba! zeXzG5Wof(93ZXtcJQMI|N6#O8{`AWA4N_({EieQm7QxAA?1Rg)DA>@>yS;9&PFPd4 z+FQAK_hx6Mmt^hTCy%YQ2QLoqy#BV<9bo`;C;<`0TU*Y9cy^W$I7nc}!AtNGghZGb z*o!z#h$sq)L3pvW2uec&(jurzV{9>QZ#t$Z!@y9p;w|?Ltw%RPr}s z6JR80+&!D@ps3MU1_=%@5e}%-A|U|Ca*lP$?lx`B!VHe6S$d2YY9eVkN@fWVT<{hM zh=?=<@rW1#OJE{EAYs(1X_EpdLuFDXd zjuR6l&BC}1eXWTB0mXUet#=44ijc@?V{ItwvMfuZNNG(N1t1_=s3!peI}k(;F4V55 z>bh$B9`+$g5=v5~NDzfl0GUA$C5Rw;v9_w~*$jxnF8 z6q7P0cy1OkMnn#N>8IB$F4G8NMX5&r!-OaV;+q9E>2i!*n47sU1{UVv11dyPf+WBV z=7uPMQrV&`>x;=Kjn&mxzDMe0HaJP5iRq;MjqBynb7u5hZrtc(-Sp(?u+9radi6luwair{Kvnc!?}*!u%36ioisp_8D@X9cP>hHZge^;{VTUt zzxrhFlTXhW;jOo>?eAZN0HYDFtgH2Qw<(v62-AOG|J^|wPXgQJt9qmk{jcK1dv zcIVqS)=zgwDuO@z_wR?f3};o6#P?piR!*zO&(2rZ`_9!_W@d%Gn3QH(U7Sog0E3!N zOV2QV{G#8>`Vn3X<|xWKr(@me=F`${toJS^L+hog?A!(>(1c2vD_2*nqKUDo4?1Ccy4kup{7>)L4*2})TplJtAIv(8(ttplJ~WpSQ)Te;Hr`#o)H>-?-@ zQi`I_OtUo2 z(=5+ONEu_jujX~Xvj(CFF;7!es7MK5W$OTJ40-WKC&#vS+9*UAO$a1JQN%3X)zfL& z%3IoKTe+g}bvf^JyW@+I1nzb^ab$p?ET{XsPoqe^c=~8Cn5?d^xw1MwK5lnAc{|^@ zx>-zSF7Wy3fC#cU4mMbCpM3o!Z|A*U4^btpmiJuMrPgAU8C?wKv#Af&bTknJThzDj zUWub)V}jF%RkOOG)W!At95;D$ZGg(Qh307yVW@pbSON&rrgP@`FxHppVx;%4y{Xq;yn z@*24r3*$vT;HK3JfeC_eSS*SFEWpUZz`&w#ktc{i*b9f{T8B(p8BM`8hX(`<0klam zY5^Kn5h%qzG}0-P}-<@+$f6ne# zH=8m1u=XYWkP$Z{Bl4cOd*8L@9OL)2?B7xQjpp5}d%22D3Jg;vPe*fq6p)2U7JW7 z5PHu2ZzD($ec%;l6_nELCjxD_NFl1WNf<%mYQciYF*3*66^$$3`trM-E-hwLiPPeA zH#If0PSUU>A^_rCcL z-#~!nq|Varr5COofB&xMc>P9q|F~Hnb{y$dd5!((manzw^q@ z_3cl7@oBfOJ%o8x`T25VG8@l*s+dqsL))S9I0Xt7x2os393Kp(h~ST5(| zr5}!xx^hb&j7A@*39PizCSeXqnh6v!fFTMX-Z4ioR{18>5VY0ZZl_q(q`}u-nCC@B zB0(TY?P6AehIih6GtctF!)HN?Jj?Ds+?y}U;4$qd5kk>~#?^^Yjf4bE*_6v_HYQZLG~SX6aWd+$fVC(|hs zjn+rg$xHzTi7~{wae11~m&>qwFxp-thFQ5ZT3ga}T^F;-a5PG@p0FGp9F=9sfN7G( zSQm>%lNt^NiA_3rH_@G{t`&hq9-mFRon$hZYc#G35+F2=$O6Ra`EoGq57tLj6CXW# z@X?3&uiw16xwUh&w|8=Q=zUC*wJ1Q4QW6kR~I(T0g>4& zY?GoQ5~6FGqG-G)fTYt=wkt$TfLf>2&9Y8Y7T)=8r=v|0nNdKYs+&czoK?k45oJkc zI!TgP0rX8RLTD6{Y83}YBPk%j=pB#>bwsVSwSbw@wy6t&STAPYIGtofiji!dDh?#L zC`iipvf(pnKoV34R4wJ%Xo5@X=D(9jZCR|^sFvmZs=s+b_ig|$IGHP3U4DJz(MS7F zK0DmH(i4amZ>^hXLg~xH<>IXFZZwml*_BtX)J6UNpZu(J>TBPA_x`=z^V8jTzjo{P zM+YB&vKtM2=O4fR`Olx+`Sh6q9FL1(4?q0*hu{63F9y;NK7Zotg2=9I^?a1^x$BS8 zMHTlCnz!HR_By@wwfwc$>B0To-~NYR`N0oASzk}b<4M}l7uGvs!f*bAucL3CoSZ$r zv-|mH`=x`)j0eNCtisNP^~rdNRv#an8;iRK`>5-yw|3aXy@QF;sEzFndNK0p*=(|? z329zb-Nc$yjn=yP$nHHG=d4(qAPz^p)AK0;Ww~7x^N5_M#u$}|?&kgJteDQK;b8d1 zFTQ#A&S(42_CQB*p{#>KP7)OZYt)@Kdq?vj2WSI@j^t{p< zRXwj87sWfJbTMBr10ey!bUX=Z(;xOtVu8>(m*=T98UTv2&&hfJkC( znwq+DN}<*ioS)9-K1L2PX(!+)d8gx>AROoOGDc4dNue<~7!0drp^To->pacsszDG< z#%kk>dNyAWfXR|-SqMjAsj6~5M<3#HJ~uYe+BhHBvo*Gp_v)%)FU4XJqubuz+}Ymx z<@+B1V^d47)Adp63N>j)5RbiKZ}37tAFl0e@02w@_-sF-U)varHhQ~{i`XPp3BD<_ ze6X>x9)nax$=++y$kZ}!6ykG(`YN(VM61&=D#6%N71{~P>Xc8bi94;(`E7iWg@s!x zT9oJ07=%@gR*;YdnFFa7(g`FY?M}6w&$E}{IC_au+HsO_bdEV93Q6!y)o@b-X`3P{ zr3?cg8W6(Wm1PKBh2|?Hidqtp#P#6IfiUL+-+PWAjg$SSo+ZZSy za}*I$gvvliJ~9ZA){30=zOI_uMF!yLf-4uZrY;a9OFLUl~N<`_^sh>qSM2Kx$9LVTar5RBKA|MhH>-HT2L|E6AK-{{p zWfLhT&qiOk3CH_S-}@}jhUgLe{NVFXD_1@JwC)a%51)oBSIVZEvp{FN^X%hi*t@t` zo>sBke{%5Y&!(Vx^To~H#r}4;$9CzQTifh@;VUmKruF!A9E$Ma&-VHqMZiD$$?W%k z@7jO*{!^9Jx?hg>m$Rw3Nc~<~yTB}=i6Sa8UA(+;HXfgy z%?AC%IB$$eNS{^V;?^ip`sB1YIGzW@RHd4zyVkK%T@F-E0LU^$!3Bt(N@P1&lf2b7g>kkbeaMRLlj=| zpPzHwgjcC{Tr~sQY$!y)m1W@hj0aW|jrQz~LZqPG53UmU3=)Y5Tl{Z~Eq`BtK_V)G zC_qFKy{|cX21bc7G#mr8aZG8Sxr_kB5lJP8#;O<{Gf1qNS%`R*{ZRlUNC2XgMkHuu zRrEX*s`RQC(7p*`)3Q7XHL_{(2CNVvpaN;j3@zkYuOw;A5dlbRmWZU$rbH=Y0?G;+ z5Cl}xw&(&NArK{L2Zj-arLH780ErTnN!tOHG6qB^S&nTzyPY!u00}>LxEEC+^SP^Q z9~-S8O|mS{5|c7gj?sId)t{%dgiobFfW-qM zFp$zn21o!%I4GqQ=@x4^z8+Wq7V=zw>#tP% z&yJ#rXUF561a$1Y`h^{>CcF2a?Chjp|3Cj%Km1QWeDdV%;S}C{bMVF&tt6q_0j-$P zxym2zoui4J;UFKS)AKnC&L-8xOY6V>oh!TdPjzB9w`e@6A3QoeK3y8E_n%C=-K0On zv+;5{tN-Rde>Oio`?X*FQuW2;mp{HU*j)d`XL|`JCQt~Hr^)$vc4cRDajUNs#=xKa z;z84dG)oX5O{}ZjbXs)#iN++!I}-VO-}{xKj(_k6e+m)a{rW4&@zYP9Zmn(HxcR~h zuYcj^AAI!5?a#BkcXGB6gSBgvPA+a=9Av#$ZoOOv_xR~UA6RL6uzwob9hpd?&0^{D zK~6x<`)+@*EShCq8%^5i;9XM&G_*17^ixyLPkXuU^=z>ygA2Xh;NjD|s3_|VU%a|~ z>%~`*>@Ple-$mc+51GyBY`m5AUc7ZHy7HZ`eBN*H=bly3oO^Cj6 zA+a{GSy{RyHK=r`gY!*@oadR3vU_m2*6+H8>&9EF$ETBv7k73pTpFK_NjF`qG#MgW zEDP!=Kx~3PJ~;_4UcLg^aD?cl^ONyudi~Z`x38Mwe72Z%yX(u@3DQl7po!a^TT6>Z zqSn*Bs$~L;tZ2Y(j00NI3qB``3Lvg9{LCyICB_vsKp0v@I*M>?Y5UJfI#D1>48UGu z6h@599EDdy9$*HE!oK~nH0idJEP$jyShY$Ujl{$p84+S^vz^i^WmYtC5RPr=fiLA7O6$Bq*WTmv2#H30mX=|s7B5j^?RSIs&cL>7N8rh=|izZ{V=`f`Xicur# zmUM>jd?ii{!b*!xj5r6O0*ML{+bI*HQiemcX@W{45OZu-DMX}D17VV+1R&8ZrZYfJ zj85}Bv8hfI5e_agfMV#B*erf7RiO+>e7c=+ithtZJgJLgZ`i{mQ_{uLTKN_tr}BXyOOkp7!e6k zRv%+K9wbm0IY?Z^zuK;0WabzVK`V?Q{QXLRkRvOlbQ?Po7KFr_B%!23b+I_UcYFJl zUi#9oIhuuF*Dh^m-O<7Qy)-esY!H{|^Y+V08l_q^mtP#bdE>?}fABcEcC_6`|fx-mo$^2BG2XZTN}QPkDs1A zJDMnnPQ#TegUVgGd+(&KB62_Bd zbWsE4*#A?vt4TtW7bRV-oB$~FB^3K-T(A&x>kSVSHJPzPks!cE*f|1 z%GI**v%Tih)!dVw7C~9Oe&fX$f^Kom#u)FNA)FSqF_fp7QWRpaiRpAQ-LeutA*hH7hnmVL_LI^R0Sa4(xA!>zHStDSlms^ua2$@wV zwyEl>tO!LBvEce)vf6jcp~!k}#e!p_I4_K0AuwJKA}YiqK+8-iClsmeO|HtPbG^q|vS%oodQ zSuPfNni7yTsdJ&OVpGnFW!3KuMflR?%hSnhd_E?nv)nXw(^REW`sm>4q?nu?pUkFb z2fJcBaP;)?)7^tBH?QS+|Js#H(3JJEtf~?fB(_ULG5U5$vs%dtaNF+(L>5p092uAc zlOkAAKv$Y?=Fm2TSHf2b?InQ#C3VG$5ffGb2q>og{f zs6+%6wJv!Ej(@jBTseM48`D1WiMT}@qiRbH?G+@D2y;}}a^)f+#3llupdxJu>F|2IhbvM(;Q}?<(JvuCAGb zgrJntMrXs3iJ2gvfYK&%M8Ft1uGZ!7ob8NC6R?gF2!#NXBu$b`X&pI69~jUk84J&= zYX8y02lwutA0Lxe#9&g})JtFcs%$Ff7DX&~7fCw3cjw8aUwriKU;mAl-+WW&DTfFI zN+$|XoQtlh>Vl+3HbE&AG|E)XvZ>1~P5(}v7Ft{=2uO@EGAcp>0i%)-Tc1Pwsz9ZP zlp?|w90BbW!m|kD3K6uz3W-Dp)Y@30G{*If4PVYr_s*5AdV@`+xH>s$&ZondZov%3 z&kl=e-MKLw3`U>*BFXsVn_l1lYjD6(*Eq(u?Q_^Oq5Pf7Nbi&g%QGd{mPBm-eT12 z>_0u)xX=eo6vFn!;isP*X45Lq`_`z=Kwa6{JfGFa$CIYyx8J@rKAfE%M>6oY@7?2g zXZL7vasXMPzW?6+mtVedd1LGG?zCyVN%ieVM^%s@abwuC+K9%LDd$)0u@hplzIX+{MUf!w7 zaO2vw-eCCb=-K*Ukmt#=nB-lPce=C5LSjg>%!5a*dfk2$?)3YICnx>&ooP`8z)<-# zHCkMfCa869&_zwR?>}&^M&eOFJDbj`GLojsRRA!XxIg>zKT%|iF+07rI>aDy_3D+E zUVh)OTOZECI7S(e5SSXdExS5L|! zy3px%1lmvr_@;4z+YzSLRz#R3v55jrM1$eTSaW_lPXS_xCQo8$(oPye14@%d)LJLj zH%P>!Rqg#^S#|Q9qp+|sMky`go8TE(n}h)X|3Co0g=2rv@ArFIlAWE7t&Q3!_6@V7 z)>xadaGIFuJe2j^>dkJq>zq%L?EHKJEd72j22onCZ>=wvh1S&Txgvhad8?6qZ0)!Z2Pzb_I zN)a)TZo{X{(t>QHML!dg(xg@E7jC~WNE!%G830nsGH7N))}m6(f+_^pkc1FisOEKD zvU3O#MT{byB!~%;*2aiP@Ks=DA5D@0ph-}KNoj}zq+q4x5(MGsyek1B8I@$YQWga` zdH~cm4I#|NlY4jX-T(a4;DT#Hl44O5E{1AR)pb}b>N-S|CZjG7czirx{@L#! zz53=GolYmXhzhOAEvc$>uBj*GGD-R%((CopG+8bu^Yc@^xkaQ`y|?H^BD(+tq&0wq z7>TqXDMJh%Kw1o();e^^P{D<>7bvB`2NUBun=G{>_Io_^-p31aCG^`MrV-U`|Qc? zC&$;`y88NCS5207?1G5@@{d2ba&vulTqhZ1o$k(sjb{gkZ@+u%3OaEDJ0fBMUl$pnI$&!(qWU$``1`nwNyceaNA z^1u7XKlo37T{U15v)029MFTeMk^rNTO|ta#q&YsBK0BH(OQ#Jv=dWH`J6kluTsLwy zUiAB&$k;@{=)DiQNv>VpzVyP2`}^m2?ma#}Ih!4iuin@`-8(GH#mTv`xj8*s{_RhG zx-nWWi^5pjMJ=RWdHH6VSRyz)IXoF3D)I^;Nz&zV8ADW7A&Eo*Wd_5M03wVmSaqXHA zKq0VqM`sIb%y6*Q?+zZ`e*`+&+R3NWaxyM;5Lw$AXi8Z^U5Ea#ml|C)KCl>Vx`SR- zH+54ht(7sUO$}mdlDcXVtvN==oGBU(286087eth&320mvOXg5FHAqCK(lk>yPsP)!p(MF!Dwyg`t@tSMj@gE0wmQ2IH^632`~Y*DNI~J&V;BH zOt!jt=IBV#N)QYzu7T*0I5v&21Bkfd)(f;=7zCw=+UGE>*h5OS7IL6=e~82^q;%kj z?VJQK#s~sRX$D3`3PF7Md!Y5Ixd$NxX@w&zRA-wuL2kJOM6!~ctcVn3lv28dd=Tl? zx>A^1eE^|SmXu{gj&0-}fE1uI)&MC06@?HrC?lviFewn{C^k(wTQ27ky)k4AT9Yv# zp$0{Y6evQ{NGgUvB0-wq8dohELI%Et-Rx2q%biw)H8|R%5o&LbsG{gWPK8no~ zLO4I0eEN%@-@kj8P*uxH8`CO5W<|lCMJPlg0U;6~7>gWKVv^JI=^y=Xf5b7q{`R{` znuS)*14v{vX;+k0by^jbx^M|e<#{L8&3rmd(u|Y_MbUY!wN6r50V2qxDJiwW3*vJq zDJhT@=?~RPvp_=agMY;g5^mq+EWixWduDdtoy`tjdiBQoRY)as5& zOnFie!RCej?BpC3T)eSao)wGJ<>}FK^Va$cUw>I#+`E1H@a}Q6aPy^2WZJuZ{)KmL zeEg$F&+Z(a&0s!U$tNP))#_TAO8H=-CrJV?_{S>W}iNuzwxD^BB?!%pUj=7 z(Prm(?!p^CatK)$z<9oh>5A#KQJQcWu}kj@FB++I@279WIs;kOAYM zn@#6_t!p|4o;0-Y0qm86-QETJ8SW8K2cXl=| zZ(M%=?#Cfi%fkKmr$5g!Wwh#~S<^JYf{2XK7+rhEfZ!woJl%UT7!Gf}^2%g-GMksX z#}kwF`=jmci`S+Te>Oh8aCKvP`m8F0wLML#>1IGWgmCT3_1%L#*SO&Q(fm*XX6x&M z(k$y~xzIve7cPv?$76l0gKO%d>JAkd^vgyACH+pn({s^RmFwl%bWx1Yrrlwu-|wf1 zS?^yg+RZI!Yg3L;l@+2Wg^9J^2OpwQnh?vn;mBHx*0@AaLJ zid51`GE_}%LyUx|Z5Cn>AtXdn+GuT*2-mgO*0z{(@0-SBMyWJKj;& zn7XMUMhu9CLi8bGl5`2M(=$b}gf^BAIGHRqHk)3zS1lLb`!q8l1ch{bcA`vTNYxD5 zYNK_GjDRlq$dRM3v~A_5X1GD4&l&wvc<5khnx znOOpIU}gy(KtKYpUzz__j7|U)B-J*CR$0*4NNf7v((NF&utwe;muSw&hItw^;?6h)|+Fvhl!9F(Tk zKdiJR5{?{#2M|}Ow8f1`y99^ey>N&=YHbqRA+3XJqy@u?nVg;8{^dsxZ{HIJ)KoVO z_&8fujf*aVFe(r-deG}U2OLqFVDM~Bb~;{s|3CkrGa9}0$`^=KWR5^8$x;;GxN0#& zLshxVI~`!InmU4{Sq37;Sf#8&0u)$gAfX*Wr$%ii-kU=-%a zq}n`LtB?dDB~9q0oo>GmI-qsFnof_WuB=AeS!bv3m)zZeYEgH(Th!@>*qoi5OwQ`@ zQCPp4W}U2V>Q8=r_R<^arMGVM2I(j7J=wW&0bY7(bFbE(n!Eeo`!8Sm^k;`xUcRaf zs}XH)>+~y^5@RNnd+BEOkN)}V`%jmTKRbN(th)Z%%ifJU-Sl*S*&mqupNzNG`@=z( zl)U^(uW)*_F&LNe;B+z`$CqCi%;)nz`-4Az<4Z4FYac(EYYM6sLUsSnK^18C>G}Gw z=f#{JO<1JUG5ujOna<`_IUM!22c4T&*N>L2ZX7e79M+b~wW0l$uU!A-C;K5{)oC)T zx=D97pOcDRt2sttc7;!@icyZw&XPpeu2u@HPL9qF%DP#W%W_#JiAlN%_+U+`_Gw5}r zcUfYRG}oO$rnM@TRg&1P3maCOJWH}HbJK+~I_vbbO?-$|Rkm^S+Bft0f;k$km*or? z1f?oUrO;$n0eN2s&qk{xF$idk0)nb4V+`cQ7_EZusV1;60@9@%jNRY#bLjn;mNrVrkiC!VhG&p z=l$(8IN#KjbES9n`RVc6*1EXHXo$5$9|yx83fI1TynDwaRs%6NlSzSy0%kg?0ZzS- zi}}(w%Or)x_+U1j=bfQi+tDf`H0`K6N}v|l8G~>Di6o4`;u~&(Wb7pdi2*o>#5UUB z;+By_Y5n)CAYf_r(9bCh^1Ks;0^IJKaYczH0tVuC7=*2>5Rd>{JVW~~ifyEvaYdE{ zk=6L8edrLOFr%PA%lK?*Z|%=OfD%JM(wYuv#){`z0MFb=YdI609{q*Dex9>U^44TDaZqv?u zS(K%(14qF`EAN9*Mi~G`k672iD2N=1Fi~dj>A}x_{G;`?&GqeV=b4O2(o`ZVGM&ND zBsQ^Gy)3mtV6LmuJC}C)QBZ4*E5v`x&lVqI3}J;7VMauy6vvj4#0U&PEKIVpNC=Cd zaO71>L_|nXKq*6tnh<^Mh$!v%w3;`u=xlVuK@CXFyq+B`MaAi%fBEZIT@g+XCJ*l{ zBSB}h_UX^gJN?1lqqB`I+PQMsomcz&=bipwxN-6EAHLTFy8Y4N?jgKW``5pI^Mk+o zSgEw~{N>+xsXkSI`0qbDJdtnw`ub--**)B?KK)Fv6(GrF>o>r(0b=}K5BAj=U(WsXg5|HJxj2w%y&{_xY>!wCv z4k0q2mk6v-oC}FgGGm$$G${@eIj|Q%iXpnXQbuzuMZo#cw^|^E)-0>2)9VHAIY!Xf z9d#6{_H3$@S}d2{PS-WAk{F^-tVvR9i~;~xJKr=wiWG@(mL$w13nzG@exV}ELX?}iuly}myuB_Hgh~=U%hLDsn*L5U8@YJ=PVp1kuqe*;I zx^i){zqfy|d-cZ6vKaUJTX}a)*%X06JVzD|1k4;HG$N5>ly;HJ5g=mZR`AZx!M|mG&4&S z(ne{^NPu8an&qUL9Rq@|sw~a3ZYM}Y5Legp*?2LX7Nt`N%A{$M^oN_B;n1czl2Q;M zGLRskwF!Wf<$N(6=e>dL^;<}W(aJ}*+G?EuQC%%#h-set81^1Lx_A4IcQLW)`1H)A zSyj8~tgKuRhCH!^;3aBh2*q<0j;U2q7_Ddp!E*@m`R%)}y>@?e;ZmB}Aj|+dO>~+_ zP+8suP^f*Hn6zrn&W?*^$D|#tjrX1y0EGb{3gZgvthLspT2UG)3L&CGBMLy_iXr$s z7qn`Hw|7hwVGYP?CE9G7y1U-LaPyj0d)4G9)sXCL#Mqdw_{DN~(SXiST$0&(I^VlH zPHcYR>iVLROM}73KiaFx;?^5iMpw5!`l~z3%D?ly*Ijg3C%y5;l@I>l&h-~CQQgxg z=Qi!X@%1l0_;_#sL8%d6d3|U5YW1tX`TEh5@s-OtA$)Ro&)4BMe)IBTQeJxTA|U+W z&mUggI=^^*3z7_ob`RX-bU9ld^m@r}{qEa8{o$Rhjjq=Ey`Oxnb@G4u-~ayq?f>x~ z)&_&qV%{`Tm4FDtLD$Rj^k8lQZ(bW{gBNy&U;XlHr_ZK;@aI22o|M@j+1%-!PpeK& zCQ+mHjiSM=owUEX^Wo=r&X-f?n{KZ++}PS0bUu6h%yzQ~tL`R)guok{YrVXGKAA0- z^WpkhmiE&`|JJYl7YFADfAv>?5`sS&&j9f1#fxu$@k@X8qxX)FPn*4Papyv6aDC^} zI6ZU0fBcJ&u3f!|q=;a)tRL?kPU~1T4sCMh^9NhQ^y0OP!SVk2B*xJ14q|A^qFTga zQB;T�B3ZNtz`&yJoSNXI2CEo;-U{R;Av&uvX1oQB`@Gluhj$&q&S%5b&0d50ACg z0HQTIA3O)&1O}eYriwIh0FK6}JT)NbgZIpq3<_6uohZ|U03b24h$M*uF)B+*Y41I1 z6(ug01raV|=^2Zn5D=q~MV8CL8nRjl6mt|5o9GZdD1?ZKwL};~XwUC4t|*8|s0F=q zB;-P^71f?43WCgvf{Hpx5>-gA(^HxxhCEBtEJb8wX{si2NVFj|C@>lhDbl>C?(aUb ziDnmF<2t>L#8_3Ka?7yvzNu4dQe!%6nbr2>;CN$uXpN51FJ>hY3Q&x$o9EgnLeZ#0 zWTSLqjn+6A^t@|MPmd4wA5G`e;b@?>XcVIyz=E5S8AFH=qr^zS5~HwtUfPd{(xTN` ztUu^>4FGMGtgZ0~YCy)-x(@+>h?t+3eA>VC&yzROBI4Tj1Oj5)B73e5LF5<#P!SR7 z;2HpGfsugn9M9Y~*YNMa|Gc6sz{)hass*>H6(NO02rSAV12V8u6q$(>#~1}rP#cBF zD86M)E7YPWG6+C~Hv5W*K+JVLt>+V?by*h5rdhAUBEkFV$ z7UAJ={nFKIH(z*l>*AFp&rvZ4jvN4lv`La|IiJ=z%LW4y@*F_GBoY^)*D zXNO0hefZJ&$)wjEl!dFBm~`}FHm@5W0_KS}T8gC;giNU@;$t8oXx+@Dh)^jQ4La4b zd2s*U3$MO8*w~T?fS{AqWL@u;5iw0tm88vbVbhMbiT57bO;BsaRHTSRI0TLeN@-GU ztQe6nwsBh_2@EYBT-qTXGPmdmLIMse_^z~2j8|z7Cyl~}9j<9{PA4%7<`ak?n@1epU{rKR;m)Ab|WIS9? z{-4KxJlyK-?oU7a<^JUtH%j*NbKZS&vaCZ!@bZi6to-ib{7dg#-P+WjeST6b>!ys0 zCPWSl-0Ngj6&T9GnXi5IYg?B`fBI*Cu_$XFoDgZ%|G|%bQP=U})y;>uACfVHL2pyi z2;>D%&!^j4m#3^7fUx~%KEQVP3yCx)PP(v&3#M35|1zC=VKg)u_#KA=xc+C*2?jZ%nS zNJwehxPV#_A}}RMrc?roDp$?sGes)V2!xrn#*&0MpU*_3o97@HBS+>e)kN6%&=j>c znvm+MF-9fUs&0(h?HhY^wA`GIh|!`s;x?la?KJLhQk3!SS$-r zY;BSxCeKY%hZvop^!kJPymHQ!%LSq!1wzu=RLezo(60~18gabfb zHjFVuuRsUjEYG|Tb>+&+nM7Ly^I4U3Gr;)bE7!_d+3j{1)Y;+QXmi_|)Rm1k8bxA^ z!Vv%zf}*zEO6?WDrRz&eL1>S2fWVA^?U5g;4cg1|v?w7l0ZO}CkhW=t?aZcqoQMDl z3deR_3=%;m>No%7*JYrKDIf%9jYI^`tu_D*jHrxhxpd&8YnJ8WJTd4SPl)~D8VeWm^Zlp0 zyN@1BC)1{?nz}JIsh8DkQIvIf@ZjXm-N*OtKmGbwzjpK0*So!*C_zxDL_}p+cLas1-MLh_T!0)kSsp_NNEW_KmWLsw^v$rgf0IY|7fFNk^gP=p&;| zj82ol9+W~L1|g#X(6MOL##ra$<0pILvy;tBmt*5&tTmDCcKoD7o2on)7n2SwW(xup z10!M#y!sR1=N~^20aH8XCypUX8wy%EmpO_+%l~1DjKYFQt!=c;>$ai>4ndfc)M~HB z=O^G3QX_FsPM;*&?k&p$bL9zT09{@Pcs z{`NP&G1}7d{rgWIFJ5}h6eqKJ17WAyes%ELyH}R;`RV?dHoB~qM~{!6J*l^@?EL7x z)1p%5-lMPo?(0cfEoc63fAFwl^f$iqW|GG&vkJtZ>TUD~YlEGuJ0HGx|Hh4-VtV$S zfAqD(XOj(yWU;x6h;JMhAP_`m$c+388+ntqb~ z%YXU5d*_W`5tSVNS|fmh0;_u4mcW{unWs_X%oAgFSJ%v1n<4) z7(02|>Ew-bm2**;5Z6cjB+C}_Idfbt7fs^`G0T(0BvsjXADK~UjY4G^L8z+hpukw$ z?)w^)ey(JscHcPobt@3 z78sN=9DSB|n!2G}`=$m4(X2^(=gMUjVlb)hV zsh*7??^v4|Lz+}{-aBqjb1m>0o*UH#1XxgGOVw;^QUEO&v)%8lIQb&9ig>PqjV#hq zfw09!BGKQS=ed1zFrxsWVgP{%KuQ53aLe&;my3iFBIAnv*|wS6RRj@oly+Xku}z4g zLPCXzU`PQ#2;Mg?I7gms*1^yyoshCfDzV9`04yAWCsY6mVxw&;0>&6rw*A3^TQ=o9 z(G)`CLL81Z0p;xM^x>V)o;HR^>^-j0+rj-i- zF$7NtQ6jN~7-IBgy+qg<-FV&Ou@9VOiH-^aBO!RU-7L?O>62wWkB3hu*RKu_j_QMH z^W;#jUp~9^TDF{rw|;fKn0eoX@qz#Ce{pMh7A)Chn|v8}E^U4A$?lKe`~0_l_szl` zB1+crpWi*r6i&;JZe8n7pDoT$>)_&KA=kHdrYA=yPsdr;Tzz@RmDRJyd)?I9L4Rkv zI@sNv9+k(>=HL4lzjF1)=*8D&I?DRSVry&g@{3ph9Yfqm&`@*f4zWtrApP$#apWZ$hpJX-xfPS|l+)SiN39r9;>*t?-a`*P5t*!Mu??vacVRpKx2+e#v zJ=lGmq{(D514iG(L6*P#?mMSP=ib#-g|RAq^l4%#QK>S=XHx=5I%&<7w2lo4!m*!q z0)a-Abh;8_WX!wWc~K1d10AC%jV{C(2?T?L>0+jhL4i)*4Iz+gymNrkR1QEw^fBs! zIZ13_&bv7Pq*=nk&O0lZ*#xy}s@gM?#3p)eNRb*22hMv^DsWVYwf6~H=Yo%6KA9zX z0tDVyA)o@7oSz9{sGKsy46gB6CkKSqtX-9jLP!#WB1zUswQXE*uF*CL-Z4Vilp>x` z7n6pRVUFH8t8`*b)6{|4RZG`kmKlx8m0l}NqKwr(I9Jt))lJi6X;x_>gy?-)G=qM> z*Y6n`df(JlG3a%H`26g=scMs?2oaTXO>Hc(k5Pgqm1m|BDT;yxJ9!da9Io{RxU8DR zy!PyGy?nK7!u)J9J)08h)5D|D`ud>Pud61@EfR5?sRUM9AtDI2%er=iv%+|;aB!_Z zrA=BsC&xXX@!(3xj?eM@tBF6f72P%`v=RlPAh#k}B5bz<^1Rdw*sc~^CLIz`lz`7I zQy^{ctSy;EVL?;~afK8`Mn%!p9H5P%Dy0ZfYsJ33PXGYN;El3~LP`Mx5sE-tYz07O zApv8pkcuKoVRX*bOOVJSp^p90CTTi5Jh*%N_U@xc^|B zlf*|DpUwX2uYO3{zWpoTO0yI-Xq8I{TALtHESGMvG+8Qwq%>;Nnw*m~YXTo09xTRl zg~n(*olXE$j9gZYAlXzKrR&DE86BM@DD4YesO2QJ`mWY*3^C7A=c}{RGv8EcuaAtV zhyapY15pfsWDrC|D`Q+;Rm(yT4YlNLB95&Rk_ZWe2?Qbo3PctnlGQh)wVrW|s8p-4 z6qZ%ZgGhu4Fh(DP(>4*rWlD|NfbXK0us+sEy`&oa` z&j(MRl)HQ7D_`h!`^Dm{di|XXgHdOCc6RyJ<@x#K;U{NxfX0yv^6c~DYQF4r@~Y8` zs(AP7FV#NVx^m@5Km1H*_+R|jukPJBxc6{&{kyj^OaI6J%Rl*7|JVPR_w!j@Ke@B_ z!GAt{>ucHmy@R@_yS;377K+D{>69Knetb4=@_zF3AAh0|j!(|AZtr+p>>e%_PQLY> zFaGF+ftSvQuzJ0A7&MKwP@ zK1=hAP$rWZlA2GKf+|bxd{H1{qV1qJ5Q&q?m_qHm%d-@sxTZ)G>&hA;1JcH@&nQJz zYlPI=s~0aUL!3+(o`W?i2CtFoVxik}Td&_wlp(~k^D!Y>WBO??>t+xox)6N;gv_Mc zsLIs<Y;CN0Ymf=$xegEcD2vbw6u zWwCHYub&UP-Po{$CPXnvX`V*sXS0Q|TD*8@VhC&fj9JE%5(b0J8*u z*sd2>@qR?%$T6VOC=3uecm!g0F-BA*t$dIL0E{t&g2?P*2uxa~X`<3DAZl%eS7UvS zPMDmJtJ#$@%2*&mr3JMM(I%;|YyDgVQu~ng2gcg7dLzykSJnQA9uL+#*WSD_I~+$BioI&M zxjqaTUTNGVsG!+ zS+8fSrmlUw@xrC-h(7=5;iTdJ?!SEFaJJ~LXOr`CHYqmNlKF!6cBiN?&+S6ukAC#H zC>v&&F6)2uKYYI_>t%z7AD<@xCWV9J;%xTd$?oj>6kfAg;&?j4*yJG**g6Po%fUw`%Vr0k6@EO!sAHRor?$7g3% z>7ox0$A|OzVz@D?Y69jjzy0?1#@3>VjdOM3yMy&>moE4FYm50JP1D)w8Fv!KMgT}F zt+h6Kxh%6z{^`db-MH~Gd(RM~#Kf4*jZvOvRpAXe*PhY5KRZ9qyVfzA= z2{<3u*GA`OXG)>fieuDimKbHTZl37me0+we5^FnIZnb5W#?|x1($$U8x|^qq`O>B- z2x0`ArCLX$jj<-iC=6+mXpE^zy$_|<+FAhW54(WUI8QMst&+qjO?j48rQ;Yw@NMJ2 zE*e0)v#}$HuB?qVojfN+F-8=X*d#heg+{47%~&%k0wuBWD8xWYL7JFU8I@sO6j_pS zkmHkctBf&Lq9~;^o9ZN=&6lwjW3+c(X^a6vYy!yNIbOj-aDI-9&8^XFw!px3s6~u% zdbU_9lr-<8Hc@1>GR9Vl6bNQ{zrk5ql#e4x#OqSa$aaFB|+MOYA zYqJG{)#x2opv_gF2Up?F=g)c;!S)`|YP;n5MS-PFjJ7`ZcATS?Ffd2v0Buc_h&c#E zBxs4y0!-~IAv8=N%m~OFguTED9w!`yR|j?kQVO-T*!BhpKqAM00y;5CZjg*96+s~p z3u~JYVu(T-QGp^wL`tUsx@CL@77=gLTsQ{jTvNAN#Kd&0&1aMGgFE*gKX~L_1u?qd z71E+;>N++o>>*Dq0f8`~ip*MJY7~2^mhQdxezbGx%I4)OZJr6lC~a(-_4-i&kqqiI z2$p4;*hD0r96lSLjoWEk*;HN_LCV@mj7pg{4jULnpqr(D0300C zfP_es>9i_}5dx!vmCz9pB*qrqueCPX+BVG_g4eoTgrHIgsGhq-Q2@0z+8~H9fb%Ww z1DFAYL?S%j#v>xPJP#m_j4Sl23c&-A0+r?c$GhjfVc*SWol)+`p&2*Y4pq9dHd-4$ zy|ee^@azySzcRe_rAudL)6YLFG;!_c`}dw)d3j_;Su>p(0}&yp^8ArV9k(tzOnbd` zb9Q!gaD4L1&n8!I?%ck6e|yt@`D?Fz@cxIFchZ|TuDEjf*+)y$4<^&_JOA|?7hb$LJ{)gdzIOO@d_G>z zCv}>eyqkac(P3T7)`fKE(%@w8d^vB18=VW+^0h9#`o$OizyI_5(?t|S6zKP|Af}gQ zPxc+ zQdEu!Qk%G@8K0jSn*s|!K*nx2ySg^mKbs+#K`(<`*RCcqW!Y$em#<${!Tja>zo?eA z0@&JF_f44?>gU;Pv9wz27{nyj7_!EoqRq|q#3seEaM4TT;MrG!S?0^4shb$1Ha1B! zL#7EKu^E!Y7?sf+Ls6C-Ss9aLc0_{^gHbxqhV%KnDr+K>rfHHS0uco|XBdC2qy_4iBhDD4jeS;_4Rd*JRMJ6?Ys|}HCiY)xoF)qZa$s0#@U9Q61lLm zArhw3B|8pHC})MyN_DdF`RU~g7jIsR9N(i1~>AET8QkHr5YEy{7cT!G$Z=HiGx{qGAS8x~>}3gfXJh*hVm0 z>^}k^;p#o`yyJy!Eo)U9#+3`4pL2j2S7VlTDS*^&1vw&#CiUFr0&QUkK@eM(KvV?6 zQII8i5kX-E!s4O}9J~MuOYmNVi4aK;+mk2&Xr+u&y1k>I0+tYhLqKeo0U%7Ej0Mpv z)qz3*L_rotAtJ3*qKrjlW2DFkq)Azph!~@**f;IABkAO|s~_LLbN}|;f@ zX%drEv)TUc!D2Q|jA}f)=pzxbxZnXmAwpzO5~Em?*u;=hQN$V$1citOT1FlSFsk+! z&dd4vWU*KX#8%I_+S&sMFewCP(h5;x^xk_UBCSJ+F|g9QJ-DL~5Gpk7&9I%#X{|&6 zWn~l+77i=~M62JLNFe~A;#La~St&y#ovgFHab;OF$EVGOQ8wCsp_-k1_Q^-1tx z{Nk5ibIrZMR`1nsTp?>ro@QHV@be_Asxw~SPR@1@N>_Kg_LX-pU7Qy4Y4z&6S08=w zZ12u)HXNRx;0NzN*}1$uFOQ!+IT)_Pum9#No15Ls+izca_0`kIyJAv>_|N{^*Z%U) zo*eH_#Y>3I`u5=NFQ0z-TQ^*+6AdPlzx@8^m#%RZD_tw_!@BZ^|{`vp%!%siH zgDCX^dK+e5Hl0qoe>&OdcO%R0v%3%P?Ak<^jn}A*fs5Pyz5PiHtgIP!Q;BtIu%G6y zzWPe(l44OINQ`c}ENbtzwlKdGBxDTP&B}%lUYbcYD3w+O(RVo*s1v zJq~5wv5LUeO|;;gFYBV)Pb3ECYh#o94{k>ZS&|?}2Kn&*BW6f!Ht2PGsaY=17gbO( zeBtF6zy8K6|L%YNvq9e!%cc%Z$DnueTi^cTtSFxx96q>xq8sl;D1yvFDa=AKO$}h@ z{A|3jxz3J}RozrdnW`=kA`?-bQCc)vB=IFqyK*#ugwBgDEWpY1>E_xk<3@OJ~wV8xx`|mrcj1YuBz5g=f#6G#&tE_uI3mcqC#S65Amy+d zAN@paHyD{iY?}fMx}^k5KoLY1j>1ubkI{2mqGgf5BFr%;(u4|?l6JhK6|_W)7WqIb zaujAD!XP53ltx8_N#Vc<=svHZ@7IT+C6M=)ul5^t zcK&p+ee0cG9}hqJWq(8G+run&sB2y2_CMae_}bMpGrd8+er0fa2j2h7qg!v}>%&gv zK}Dm{Zf;zAAmei$@BO!L{qiRdZ@qDII5LqJlySpn`;Q-=?fUa) z%V&Gzx4v}Y-`|HCg{=@(E%+m$=J4RoiPlOfMN!y?powbdW{agZMp=`l zX)R5E)U~xaJvr_5^5wG7S~X1*Lgc_I)q$J3s0F35WR$Y0(u|GsBA92nLPC_hn*pLy zhyqm;op%AyNJPX=mM2Ntvg;LN?VGZ!fkZf#vB~n(X6)RJAzo06;td`^qh6 zh@_gjQ3x6_I^YBq@nTtek$HPIdDZH^A=HPu$2Doz8rXUi$g9wY|LIMa1il9WRHEBeF$N?oDoz6)h?__1H zg7?+3+1$ARTF%c;DlYO)HW>DfcAph>;K)QE(Ro+9+8fm-^F(Pa49+{#2D4GM>F*T= zdbQPE;n*0ER#$}9>f0vxK}1+Yfsk4yhk!vKDinzU2nZuYCPGpq-m?gYfFdzA9Q{fq z6D7tV$iPfQN&t;fqy-X*qLg8dqL7G)D8?wj0vv_eb9-GO078vguZ&@U%qV~q7}ZM5 zh)4o$F6nuB3=uS<0wM%O3`&RykiB;Te zT&(7{VczU>3G(LXk zw_oq|Db?Bk;lJO$eDl)FufOoZ^(*iFCg-aW*uIIC+t~dhhA3nKq_3FjTTTgeN3b^f^(c{OH zZl4>0*~z#l3Q=az@ArpWr<21AiCtgcnM}`#CQHj)rI++t zJbJLJvu*}$B0xlhbK4Pws;aEj&O4=v0JAjdbaUsb_02UTHCq&Uo)M|Mn`hAR-Y@1$ z(n%13+KWcbLI^&_s3O-P%M!H$yATqtjn)a(d@=PQ5TMbTg{`*07@Y{nvS<>EZ8gYR zRWFL@UEa+)iCwZsVc`%fRzgWH2O%^ny2gj-Jwj-d(wQ|OGDbEUgCGhwRh134IdD0)^v+SFV(j+25yE6P&-2{& z(pZGF*M-207^=oQhltD~%p4dstzw+*O0bRi2t04(vLLk^-F9Atw0c#jHt-+0wRHlC zaFFP|uLVUTD5J!OPz4E*0oZd4o;e^yrBoXbv>F8zD5bO(AR^u7h&Vv>fdv$4MCD_U z$btxhN@+qQrEtX@CRAElP8qbc2#sVT(<+au+y7_#B10gBND>7ISRy-*gp5o`!W^AP zr8tT*sdw&R@9FO2r@&~_^n5-+W1JIL`49vIt+ATWArZ1R1{FdZ!XXLm6#%)DX6K9Y z$)iVK`s!Ev>+7*`L71bEF@TD+iNY*w(RTfy4I&B=6>2SUcR~M9GX`;*T4GeFaF)nf?9m_yDttuJL@Fs z@YyuR@ZkQT^WlqMeaYtCpZ@I$WYEpv-EUvL{)Mg2e{t9}>MLK~zWUMygYuKVzTfHF zZ-4LQ8@H~!_v4>yODeIbOlHjoAGjub{PnMH_lJ@xx3g_rm@lSJ&rVLJbJfkvr1r~M zh!MW>&ee}TJ?7v?{qEHZg9rEb*L&Ip?k6fSdVJbkyt;uHUVm|?@jPGD-G0B9VLn*@ z{N9r&P?gIkM@O#l!MmO9tq^Qca|jqb)Q#j_6Fk5A!prxbKDc-P&h~iv@|AZltZ)77 zgCAOBs-~$MuEWwtzBJkzlQMeC$_GhPof^q9_0_koeSY^*s&P7Q zzWcQ+Pj^rH9sAC2ym7R5cziq~bGEfU0Kwn;-LL<@|Mg$=dr8NNu4^ZelWuBL>lGNDn>@6=tI)&_&R81IL11J#uc4T zF2K&aP;!zb%%V_h(%uChp{SbReQNFY=DIO@F)fqSR+Vv0;~N3?*3fh^1IBV*%qCN# z6~y2|$WkTV+cdYRYgYvyv?vnpWnI_Q4c8##9TS39#4)(0Vjm(YR4g1_9jr1&!)&^= zS|MWeF}fy%h(yGSofKTP%R2UKjS*q3wFo-z4QXWQBq}S!3=vgOLR!yt62e7in!*k4!tVCTa^yUh(#=d2A3Zg{e7*K+M#0b*PkgJ+0dRG&I zF$!dLVnsv+N^3+iM5gt}qSl&JE0JkONz%S$gklV&lNJfm!k>{$E87J`(h3NLpYNHK zWe_0+QHT^N%dFZBMw;imdL^{E^p+#-0i>=yY6~dZ8XXysgRf)sN?8$~r0HyYzW4a? zayADtb>$I6MAc1bJbMo|O|(g*1uFp=W7@Q2#8~}h^hy-wQdVYFxHo9<;MKMR$ z{$)pfZHwZj3nyWDo7*LFEbAs}%K2$&>^A|(L9Ig_TVk{y7;7WN9vF+N8E zVGD^97KC=C5q(hYZA>JJAfPY{>NYnZo=F*nV1Sjev&EdOBAX1>w+2JWhjDLre|h%o zwRdkSN_?QEu8$r)8ElL!1T{2Sm$EJ$KCV!KNg7dj_0_G|ctqdrLa&qm@V(hb9~?b; zdh(m!e&Ln3HlwS4`Tq3nFR9Vh3m^UM-Ah+``yU>%!37)_4Pzw)&UX_9^T z<0ol0h_!nB(bJo^l48auCw_e7d)fR;-?;VU!T6_t^C(^)^+)+DZ(SnRKl|YJ-tH{V zO;tBl?T?;~*L$P4zZ5CKx|Xz(n&GEEdHAJozOr>;;|p(=`%jx_Ro=@!`RsUE#-IJ- zNtR_*RhtwKj$LnMd8e zh1Cj3Ydx8|&9&{fFTJq)>|W=>_BX%v?sQxn9-Ypr^DA4I&W?@(7Y~j<(E!Dw8V%Pw zO?fh1*3NfQGcD(q@KEcJXw~rW)>m85V}b$kI#kS_3PG`$CuXh z&6i&J&aZuYzS#SNfB&bWIehb1ukY+^a5H`7M)%&6@t0n`MBaI6C#@{jn4+qyy3X^I zNOKTltTonoCjv=giIjkl(LOjLU;#p0AB-TvVpg46Y#<1TfP{1n7vu?NB@AZ|^o~5p8SG(vuM6gNHhI0S{#86ii0w6$9 zEXzgFRHekA2wYR?cD^f76Z@+4uFMncrZ!6rurzf53qkb0-Bcw;uA8c?3SdEI0<_k~ z7*k`|1#4|5&tnt>1%#k5>m*3f6t#09G#-HxWj5AFYomT{?F%{Qz`+{b?ssVGS;RYVA3Wr zIyE*^Hq%M2Yzm}BNKgq9C?iP9SZysC1Ed5|rCKG16rxgu#I4K#p;g-|iR^=fAU+@< z5u!q2ZbP@hIc3uzGMSv89v?S#trXRD(=I}cPe?NyyofBVw#ov**R z)-%iTY}mKI_G`B;?DQw6^P+&o!sV2|@Z!y3zq7Wn)gN>RgKmtWSQeA<+?OG^@ZbOQ z@4R?@RF;1C!M)%A-aq*6*ZC3y1A3VHwe`|aD z)~hc!^>TOj_Gp+6d-;X+&GGEKHyT#8i(aJmnWE@o-PA|3C4h>8A;yiZfkgk`{MK&{ zI@vp~UVQCFccTvp!rCzTz3+YTn_pjl>$UaCk?Z#H%BAGl)6YKq;BN+f|K+#WuV3k1 z+R_g{yMOP_cyrCpT&b(lDaG2R!MQ2~K`M%+H6}??tq6odj0otQZ<;ExsSlxXt}N?3 zO}m{QL-ZkPV--h(IPCSyx?ao{y>4$b%!|5qO%r1fAjJ{T0AlTH@v+yI zzCePt(U27N2ffMpm<5C9V1yJK0-ahPTw+w|IR+t3(FGtP!cLN9R)^>za^2Kfl30yG zl=f0ZsDLA)GS<~j69Fhh6d(I(x25SYyXbvtQ0+k;`J;|&dwHIn4_dw-1OF_g^bvu|7~J`1fmE_+uH_MJ!L_dpC4G61%L#| z2N4lrik=y9S$JcN?N~&WbSwz4#MPdeP*{{95s;P&4Tzvv5Zm8KsC5Ms0~2v%BIM9= zi77-@N&|o*3Q-8b8bewm2y@^VgZBua$W+V4@xlIlJ{JJjG$5Xt8Dj7uh$yACR)`RU zNht)?CLKuuAwio|)j+5M2SwD>)#U8Rmqj|<3`GQph%K%TlmeR=CEewCf-Kf*=Y153 z9Bc2ABnvUZb7M`6!px`?u68y+0H{dX&rGBp(6zuG02FHP0a!P6Q#F7>T5%LX5Fk_r z#S0^n2GRl)qX@U@9a%BmVg#0mQCjP#LXE1e(6z)*VPc7ds`X9~5i>K303j$r22?BK zLL00ysTY@;Oni+%3M_j(I-D)1kAM5SZ;DQtmwtL4XN${U_?Fg2cyjvSr=*(byXmOk zzuw>d*<)*Hb_mmxS(-ROT-zAUCiT@@n_0K~-j~1h^v;u?eE;K6SJ$rhe)hw?&C7## ze&bhv@wdBw^~WD)9ed^Sg-<^`+_}-V-yZ%bYB=?V==|soip6nggRk^Xgy`0R3!}VfXZeJX2471Pg9*9Ct zKs#}<{t?Qry^4IRYin@2hWq?;C&iAOEAk;2k&BZBmgKxgb1SBka)X5U|~Sg zKoVkG7X?NQ$^dErK(g&>0JK-u=rofc5tu|IBA_5iE1LSdSy9WPLsad8ml#-Jb**7h zS`(5;AOz9`f`Y&tghhk^HJJq4RcGYjgO33Ol-5eY?n%F0ab=C}1AXbLfi z00^NX_5qPtn6!#4z6mVR7+n;#Ynrh14t(ClfzA$p4UVi&lvG5=N{PykJ)33a}M&vGR(fMqec42e7J1^Y-_8)%wPk;AY zv-6*woh`;kO?Qwcy4RHRawZ@Abbjr6r<->)83wiY^lUPpU%r-i`u0zL@JCwl>3KDs zveHN$WFOXs_6x6GSnH@C{Q13M9tPk3;`hFFvsupH{o2*(?#T-m@=L32I>%aQXUjEbn`=`2?S56TrG*&@`tdqvbA;##V zbAkv}-NX<9pv7@H=TNJn@;u@D2Th^^H3<`BwF=S3fG_+N#fT^`j2nYZZOR!~#TO(tv^n$b_Kk1FA?Yh({vg&@|(V^Z8`Vr0T^x2M8gA zx?Kv>)*umx02_k_)JLuba||4VQe+}6(-K;@ZZsOTO{;*&WGHKV03c4RZ(CGf&gV^4 z<=t*_F{w*OK*6cCsl-=^)jG=#hUGsJKyVq=!)53&>K*{GeIYXPf@-T_JRUuL^7Qu0 zZ+0@05RjFYvN&2|%PFX7Tc=r^?=L|Qpi0Cs#Sy#;2{Q)oH3$-`Dl9ugIf6(5AVC&T zKxAW8KtvfWB&;HaEf9N!Svd{fjb}6G95IPH9~vs`$@!$)Ewaoh<6!sdqkA7^<=Nos z+F*ChJ5Om>WO4DySWA5T#UU8nxxK#i+{$cGpN*!+dyl3U<=SQ^#b#rx^YW|R?VDS> zFRnd!|LMia^qb$=`RLcrwl>ZE$EVk?ZT{-L(QwUw=XY*c=9k~O)8E=S-#teC6qr`LEx9eEZIXlccQV=5vGf^{xWE`s(gq|Jl=bzP2^9?dPA4 z*0%D$_?r(_H#(gTpN|>@^Dq9z&$c$QL8r4igq_XK>e}%8e|7iX-dF<7CShf*0Wqhi zvozyg(HmXN`@Nz+Ok6(y7|qHt?O&s z&+T87|Nh_o)u7YeTFJis%~!|!_rLw!cdqSZ&9q#B$&D=wcH_tI-5d7%og%NQ(eux3 z?H%riM;HH_|NIaB{SW?(&d;;;&f($QbjiEWp277yovxi+yS{Nk{-xK~GprWVvwL5B zIG#h~3ITBfwL{0TQTq@5Ou2K*pH39I7faDFtyV5HEkkMvyTv z5)_oU(<9SiTL2Zv36N1TXe=xd6bPcEA?dcEo_lqQA^J})sROudrUAv+M*s*&k?Rmq z^b#4>42l$j5P~9b^u92O3NbMoVM5FljaOj_j3lw_P(ml~%|;jFi}R+b71W2|B_Mzz zD-eQO!yG-5h(hcEgv3CE6rn1KDk(~kXh2mE%ciR8$z}dN`{nEW3ABD!EVY)=~2}m14-TM8*lLrqz z{Z9COSbDx<2MI+ntlM>We(Uv> zo~b5#!H-R^OSAdjuRivaY;N};r^)*#vj#S=lYj%6IRy z(`r5`^FF??J2WKfL*DN^`e<_Ps_kZ`Y3Sg=DJ632g^lsa!JW5m%zyOR?yJxL;%`1~ zn#yJvw5>|tSzVideE#4;sqSp|_STE9zA!nz=nvDX@^@}-ojtj@a&2dKi_67>`}^bG zEFZ4*zwyqUpMSV_ayUAE=2P~&8@(6qgsd>k;p{?pv*!1H_m$9~+LR3b=trNOO)5|R z`t`MWRcq_F*ZWs)?QnInzMlNi-+x=l|LH&Z_x+B!^1@A_CWN!+Z-7tgY;}6oCjar@ ze)W~t-zr--K6>;=|Jir*jQ)TB^uJc|6i(ane6ZEIv73#{rkvLZ?r48+eWkzm;Njo= z&7Z&f*6nitk!8R2a{9G*U-;=S9&)I)UJQrYA2gr;{P<*;t!?*47v;bGS0BCf%3yW4 zRW;%4P*iz7g`&5X_0smR<^(S0vb9z0KEE?sl<85e4h#UgMw|px^z5qIxz-zFk|Yxe zaY$(LlmM%y_8L-?_#~vpvLPhS)67^Kg7mu9W?8eSTkpL0(28$ensJ&Yu60dYGMaf= z8iv$bkzhDUqX!!R7=w6XF58k7h?$v_EYB0$%qNx!5rS6|_Tn)F@7gpqAP_{2Gos)_ zU_xt|$tH&Bl6?ydRaF~nlEie1L?zUXvx#Z^LdlBP#9DFay$gjThB|2;+P0Gw>#M^M z6hzv}8B5lhwsGZbVT^5E3)J`CdmqS!B+rBM64WMERGWEiieh7Ztv?ug=R`t~21%z) ziEZ7<3h{0+DLdWn_zQx z+9y*W8AXFg$_#oLEk#6%K0?5{l&VS$N)!iimU|5Kh|0zSuxK!hh@=2ySwY1s1aWOJmYFH|prWb) z6va!xAfQ2lu~rm%{cgFKBWPl6H(RNv&G!0jZK{)_a`p8c%-5Rw!(`Yuypn9LQ`4M1 zJh=JQ=h&EMpFU|y@D45xM@^`ofAtMiw>UZ9zkA4Oc6{2j&h6Y;>n5ESUV3F~_v-$K z&-Ndj&F0O@dUt&QPacoI`mH;E^t-n{eegv+yI5P(#kAbN_jGN27;!& zt&7Y~PhFa1MLyrSx>oMpTiNRNcCt6$NldZ1GF;Tvq%dJ~v-|50?runZ7A?n_4L`^te=FAu^ZcKrzeZ{yy-|d7y66ue{ptp zwz8V949)J9LE3G$w&blhUg-6^H=ZBX?PzV7-urytB-vyVPA*_;tXbb|Y+bqg@uRkd zrb0kCJRf`Pj_YRjtp3>tM>lqdiwX`7r>i-bG|L#G<`Vp%*ML9CNg#9l9Y|LOniKNxW2JwI5n(> zFikQh41{T7(>%?BHPPHx6hH|vX;5U-%;sn)%ew&BDRR^JnrbB0kP?$LBqmRb#D#V| z8d*xJx*cxzw{~`utb=5{b2iJ9q5zU8AbRIPNf6P{G8c=$hygGJj{q@u#Rh|47JqA; z>k@15@z)5*3<4q|5?rK6JHaIH=EECa5SRfIGz^GftcoZYAcJUBBOx+wH55U^$atAC zi(Ym{PJ*-uEU>g4f+`zU2q18op~YwmjYAMX0>qd=4&njHm_)%`CYc}x^AO2{s#i`e zaY|&>qKK$s+qPs(RoBz$tgcE3D5%aghHc#nkqyo(0$XHaVph>F=Q4y*ilE3uieU*6 zK_&=diM+Z+wGis94A;^; z)1VSuzsS%)-IQ6jxcXvm<5tm}KV6Kz7!I!ID{F2%o}Nt*le0$`z17a3*L~~TLod)- zO;7fYL+i6n#}xC`Ya7>Izq$7KxI63^V?O@Tr~9Afs~hXRb=%%;fBW~ID?^%gxk)gbDLMY*v#i!+d~L`HkzM43pZXctLwdbu^3%c(kA1@^zCoGd@%ZKF>mS? z25bFd&$3TP=grx(vtGaR&;Qxa&d${2Xifg?_~iYM=ScAMXmWNs{V)H^@3PfyFIjJT z`~f@HJpX+E+uywXfB(Pz^2T<0<@$}H(`lN6rVOv&y87wf@uI5h+Mk}epM3vs{`T*^ zwDbJ6+41MiJiGqpO|A9({ttbi&wuvxrElE&_HVt)S$=%>N z`r_H=?%Cr*`>p2o`tan@`PX0C+`6)}w!QNHukOyz&Qa#Oy9G;IO)hqJi;XqAv6cMd zM|YWFdxM7?UF7iW@xjXW_KiC$%y{p?*{?s|+uxtIu3B5^&u2|t!)RPNN=nt6&ux@X z7N34}gd#WAdDuzqx*450gQCcg*whEFs)KHi6DDHkLtF$-?`D|X*_2}f-5HYbayxJcGk#3QfW!1KgWiGNzH1urO zhtRfd(a94w(Ksj0XK6-=5K2XK&bJ^4kXi#8z{|4eXem@ODJCXCBx`N#L1ln6P0FTm z5?0q&P{3F_nT#z_x5($SGEEc1Hi$6s+WLBMO;cA1r)Q@pE9*YD34 zb3-g3g6PDHgQC+}EGkBfy}PklNgCJbq}HU~N$}p1A#1auOT?_EuIAlduWf5qxOV;a z#rdS@_7PJyxrR8e%0xs$%^;EXrDOn6BtY;n)d`3;F%T#OC(a2d#Q?3U_nrWe7=Y0v zqH)s8NCu5HNJa_R2JhQ)fyKJ!8-zv)K^X~y3X(yyqy+(T9Ad<75mmS(#xFByQAZ#$ zNUQ>&q9I`LL`cRUiYft$B5D9mtONn#xZVMxs>ZS!g-G0pP8JOsDQfB@pcq91h+vGh z8FEUBp=ni71enmZ?R++y&8BSGd&g)*5S27Uhz_QSU4oHRf{OD33L&Vq5Wl{lilPve z+YouRh^n*EMO9VC=DzhIh@i;_y)0!Zz`3dhut_?f&eGI2PC%rsy$7@ggMdMVh~5AI zA|PBEZ4_~N0|2}%^b&;-9I+9_;6vqn!jR`#h`Kw*3>hpdcZivcDhg0oiZ~c|2m~aK zkqc4h5!qN1bAlqUL<1s<3ZRM)Ook#rj$-wj03r~ns)QIjL!SiHM z62qcixQo+QULLaTYBRb1?%(#!3HS=ysWq;*n)QcS)6~;(xqaozY(6=7c$(zt#gn7W z7jM{ZKA#>w{P?j=igF^NzSrN(2i?2x-FDlR- z{_c0KBuQEa*X!riBE0)fe|V+WA0+j7{I%bGz1Q#0PNr4q+SUo1AN|$CcfWDx>%a4r z_x}3*qy5=p_W0|+^~&Ls@!`Sf<+rYX`0?RJ!r%DHig#ggv5+9Y_~}P)y>Vr0<7(Fe zk^k+#^~!gD|6Bj$|M9PX>s#C3{K`8&{pDw$KRBB%XgUvcdU1PqMPsB`{$z(imU%%BGtPIvx&!*Ft?(~M8>wEj<&whRO;!FMBpu0AN!GLzIbhfYMlatCd z?)?0uK5$=rc6jZ^b78)?I6FT&Dz9FJx4v@ocmL=s|MdUzCq*_`-Rvdl{NQAEa?!m1 z%QK%KwMYnC*RQ<$mFth6d{U3SnM`MSkqdHWm*+18D$800h$%~RlO)Di=OiArS^TDnSAn>g64g zPSBtthLBkMo1MjCK}4ibH;r?%Rs*T#CN*ROdPHSbRFhbXN^FdPU~FJ|*@Zy>FpWkaSY3K3V;r8-H| z!Zl%abB9wao>Uo>H?F_ZRx^=Cod5)cNX&L|Wh5dp2%zG{w=DqzFhb0PfdVO7RTW~k zsgeaHLIxtTIRFD#A|pX4Daf~=N04;YWC8}3tMFxF2pEaukRM``BqGJ0ucc~AOH)s* zizHGNbq=LP4bi!-F`Ph4Lm@Ew5SR@CD7P*r?dFYFZ_F1y@TnI&&*0^Feqy`EB(D+KN*iIN|Mo$PfbsV7xQM&?`>_bt*owO zy=-k2u51@iAL!P4cevHL_h5f(cks(!oc!unXRqGcnx4%PW3o=(U&-=bVu-A<-3>b) zPmlL!8@mJH#J9d`+dulFcR%~&(dr8K^(^b?%0}<$gRyI6>*nod-c06AKS^e#?0+_r zG&#O^GTpB`skw3W+KX?zv3ci}#l_Ft(!KGOtDf?@ZhrCor%gFo8TRM%^Q*h-oDGWP z@y5FS;?uLmWcrn_K6higSd34as``V!`&$p+|Ki~A;H5Y2oF0tp5>8Ie=NDz&)B?PF zt>^3cqt6fegYNY^yY+c_cRamydvjwu{Pd^$!D)Xb*;q}F51#(&z0;KezWT~dQ*;Vc zQc>q4U_AH`ybsnI1xeFXMcT$SO*3E2y4~)^+FF(*L4t^aYPqO`I6y!)Oq`@CDTmf! zPKXkmGloGV?-W2tXxN%T(q-nhuIi?-#v)T_Lo1F@)tKO&XX{8p)eu5&&@b3FP3fD) zSffFZF!;dCHZjK9wsL?0RMR|R!ksJIOp@otbav4+ZPV5({ToHmY3i!m?Np1Bm^_JV znngJR#D0Gef*TCC%6h$AEP6#hh^XLVvFN1fd{%3KqSLB~cOlJE1PBVDan6M##Wb-^ z-4cMYMkGYiVq1G;?RUD-)$Zz=(?q?1U}92R(8=+cv@RLgqPwzK)NLEGyst@)9uXQ; zan)2sn30K80)T*U#G?^e{j$s}7D|y(Vo)-OzzLZg$ZEU_AOINvhG_3GfHnb2IA2lD z6|D%O(DIzW#Fzc1Gdw0%V?kJ4@ior%;qpWa6x(4$1%u;Q<*NaaRFn+G6tN=2NFP;z+V?7&dxDg4iWy9)Pl@O;ngj6q8 z6GWm2Q}7{(50~WZ2*i%uBv1vz(Y7kBKx%2xG)G4##1us9Hdr>GB#Iyq0x}S>O_HS7 z%||B}q)^q)c}a5$3W{Mld7ZtJzIKztAPK6h(q zY<_rf(31Q4`}5m3V5M73>(%=oKGL@B^mioF(#w-yo^5Oox)08KYs1c<>%o5UI6zXp zd2^+grSJXK=f`I*>DZ_5KibT6Tj&8lU<12XYCm)v8{Mv5%>Ra2JS2lm}m!Ca8JU;s$&ZVip`qeGd zr)w|0n62O5e(9v0olo|U=QF=`>&DUJaXr?Le)@2-Xr+NHgEzi%XZ`Bd4}b83JLgXa zt6}r@wudgLYJh_iNP3H;u0Hsi55E50=Qgje2CTN8+x&eAr%9HilX~u2m!`H;WLcI;5LK+3MuGrABrUqohTtpT zAVS+TMA$7lX_^$BoZHX>6h(ocRnxL05Tx;eK@ij!Ho!%a2Lw)VQ7*D1%aSzD((Bi+ zTFYcC>@~&K=IOb~i+nMifxwmRoyBz8hLEK`0F5VOGTc_BOY^cS(=1C9Q)DS42hnm? zg}NCGx*$-OHIg<>gM>-HU-V45sE7;(9;;84M?E(bbNvQ4Jo{1s8m) z$o+1AD8srec{=HBZDTD8MokKkP}Ub$0aY#l22Z?fc~l8r1AyVQi<~2}0vIyD7RdlI zB1442v5AW~E-^9~b3}lQtR#wPfdM&1w15U<%sA4tLEuuZgr!*>F8>>UMKL3QFrxae*B}Z~5%*ssoMo1Iq}N^wfM*sh&d|t1aT1t z2ohPL2p}jCeHUUkFQ6f=ePx*AHyB;Ew_%(d3Fs2krZKQbXqNM#rRyqcD;&HRiHo`z zG74f$#zfHrCL$3Hj)p)rfW(qfP$dE{9+~4iTNF8>nO~d(LhYKeS_BocCQEY{>W@Er za&>rP?fPn%Pn^`{-X19w!){0*I4RGe*(fnBnn`{3tT{ZcLJNbF+V-Zyjm`%@xjR@J zzWd!9P1E&(KL7dsqx1RWhoiQF@BJORdM$r=e}cTzl=Zdi@a{XeN2AHV{pUaET7K)T z+n@dH@pE6<{l@Qp3{vd``thN(_cP(bolE}PEXI^ zJ73Fw@rzG=Tdu6G9PW=MqiHwGwg#Qeox$VNi}}UEjs4fZcjebVdfE;C&I`9(iQ&n9 zXJrHGa(r5zpOl-ouiklk4c)W-z0vw+XSg-kT<=~SUsSWsJ70O{);n)|cO2@ZT%0Y= z&&$f)d9`@uwU>+4?q~1c`{7?asH+DrzFMs8Ztw1{Kll6lfU!(-%65RmW9{ez1m;x1)n9kyY>8) zwUsq5?fK;R!_W4hb@ik*39D4CEv@=jx_C9$nb+I88DF z>~uSbNW?DqG);)8TXdYO=kwY4;-c}s^^OSv*@rNnmu1-~fJi&&bt5&Wn-_KC+P2M$ ze7Mmw)-nJR6`g{)?PRI*zSAlC-Tq=attMk_+fI_# zW_h|;%mK8n>fpj6G)Pt6=`h>zbYj@5kToVHYMfJ1U=$}xD4@o2OxXnq&edg8HLXME zPR|4xt%tUD1k}0!Rh<+NtkG!jJmE?&$RK34v4y(J)<}W zmq&FX3L>fikz-7Z#B#Zh#l1=)A|1eA3Z#*E69Ljom}n3}6b%YQ7J&$mVz5>qM2Mw# zZM`Vlrb1)t>4I(ITW1)Ys0$*X3C84=E=3DbP#{4zNz^+oJF+5*4!j5-ut-6F)<@1Hq%&)vxG}TS1tlT@FA|PUk-HQKxT=dAVMIOpa_<+ zli1MOSnUEx0wF5sDyk|0AnNBK;`Z415GcfbPF&-b0M+=8g&@n_4H1$UxzOcR1mgt{ z2t(w1M?|ZN#3D&W*Vf)S0WDiUn@_y>x~UuAs)+Y)Iv&+-GR5(DzTPg9$==E83oEua zsLCoZb5oLPmP3DSH5+sf9$lP{n*Bq#@$!mg``*8O@Ye5Kf9osHfAsSw-~a#oS!aE? z^W3$juC{Ni)n)VGY4zUE_cyz+zO|iJ`pJj)|G^)=MZ+ZgYE+Xn5<0#7bUIHS9^C!( z>2y|a>~^yxx&P_j?HAYb0=HM~dy8=Y(fR)9X#)9=fBq={j_m9XD<6LAZ@=@!FYo^7 zM+YyzzWLp64f|cnQnuC(uVml;t(X4#Pk-L;z~M2twprioB6W9m*At{KDt~b3F2-ls zN|${5wU@h_-`QB(?yjVRZ~xBoi?f4Fhc>6W9&4WVwudhc`@NMsNH{w`omKG7Z{ECm zYg4s-_TcW$&6{gG{ZLN#9zH(UKRMce9NG%)fRlb*>uTP6{?6_X{&er%Z}->NR>`^J z{qy1WT9#lr_rpH)(v^+j_HeM{=97D$Kb_ukJKJmj=HGl=SMbf>+UVvyo3@jAb$T-Y z;mslMf6V|?Pqz`>vxhYH72R6dbm2^;LGW}b`B6krEyLw zO{^gd#$d1Cu~tFa>FlDMU9?V!X*OLPAMfA1an&-RhN`SIhsOJPxllzi1`s)g@qC_%~Qg~bUg2@bV5@g@ag@TRMgG+^hCC=V$tawd~*KP zKYHW0x3>P`U;SupJzd>NPCmb%tqi~V&F6NWpZu$T_VmVg*WUihtp}g%U*FxhzxVWe zZ@qJIn1Sg%_<1g{FBGW|JDEYTbw~NJ4dZw`T9E>H#QfC)`jW#_@G?O z23M}g2>_H!z3?;n``xA^7!uKpa0D-&lZibXmT9%O}|ro z^x>n`-5u_lKl(?{-T&lp#P!3cpRt;k-hAck_^{jWjQfpqu=n8EWV$HF3r@S8_0{9k z#p-%@u^3$}&_B&t80S^L#R!ogY1UcGh}% zLwXL99rf#p} znr1O;ovRn~*}SUD*?chQA*lG^4YuXNBqmL>MZ3@t7K=Jb4FI+hy1ibOWrnFNOH{2I zN61A{Ov}r36bOM3Sy1CRKs73U@$@e7aQEf< z9OvE31AR;?AuT;AaYPc0>IgA3v^<~21N0@fKrSK1@tz?P5JV*)v$y~%5zv7^3aElA zNQg`}x-FNUdQn<>QCL-pNkx$`7SO0_Z+d2+P0j}W@jgd z_dodf?BcvFy-m_kHbEp7p%EeiF)=RdGf6-d!K;M$+Qe}f0^lR4A-ZyyFn};M z4{SI|*(8QJ0@|X)k01a7qH2j9NDwCx0;t}3Meo#59O1CxxU7p3AQ)o+NC8oV6cq%8 zm`PNvu_6K4Dn#S7M^$D*GNRJ9tzk;6otG687rhP{uBzH=LrhLQCnN}H?7XUG3%`E* z_IUr7a(H4pDVszWGm~eeeD?IXo^;c+=nwtojqa~LF8}P`9Q;Rr|K*MK?&+iP`O(R( z*RJI&#c*qI^~IfwllJi7=+&36J$&WN0Nua8_r-&=x4wDnSjOhKKkSg zeYkpM!*Cdl%cH~c?85)>r^h>2*GKavHTlDbqwSqFdpKLk^S}PV&%g2R^?MJWE~c~K z-4~x69Xx&dfA~NBqo4fiADlkBc>ax@enHKg+8Xx{54NwYagw+7xi+}#-Y2Miw zuKeuh&xF}4PHNc>@@W6!Yu~!DKR-Bs`uXOqEYwN5mN$o!^S#62){wnpYaK{i)x+)8 z>o4616%j#ImS@k7*SB}ZXHQu2qi4s{aZ?QXL1AmPqb5H+D-Vw6AAETK^G~0?{PwP0 z>D+&OJ}=$1D=TMbhqH1z7wUj@pbGt4rTETbn+Cj=wI_Um7J@zKHPqfd^8Tju7~4b$xvS=#ZU z-uuP80)#xbty7yYGa-O$+u+08&ouxsTVwJpcg_dVvaAv&X0U90km+>R)OEF3w9e0F z^E}TO*#fh%&UrE(7(*MXs;+8C6J$*lsCBMsLfur-N|K~a)udV4?RM+BvTR#d54xRp zUe#sQ)^(bg%n|@}vowIF#&oh`*y~)qy~`+%zW7`avQ8&U)26L^{p^JoZftC?Ly*Bn ze{?op?U}{}A)4BGb9keCd|i#7np;?__#tsqRNa~FJnSgfzFku{X0J}9So2!w2v z62PD;0m&(PCul6OWkU+cN}S|maz%@j5ODN}Goh$1y_JBtJTp=}g+{~2C4B}&qjQJ= z8331nGK`JEq`#?y|8nvS(Ww&0N^p52PykgSMARjfLcAvskbo3TLrZ(R0s}IlGV@Yk zi1?u(K6nIxNW_d6k(e)AiU*N>Kp-J_Pnr7QLfZnN5}P#Zc30Zg4>z}`)k0=-*ZQE! zM2+`Eq@u_emNpv#5&ZH_qkw`CrSGLF9~feP+7gY-h789o5`%yjFCma3#^FAaNz-^^ zAkvsiCxRdWkd;V69P>x>4oPsxhsPP z6$J=hRD;*xnkMb{thF|=k<@|;%qAcy1h$Nb5>;MejJY&o1?A|lQd;ij(drwGxCp_E zibf-Co)?1HhA^*6=Y8-l$+I9jnamqi$MEU>qi?p3Ue8WO)A@s`yQ%-+zxdXl z{F5JkcK38q!FT@I=Q$=4#t4>Zv2ZfBFCYeE*4=*7g7V z4_|$HJPwWjkN)1fciwpE|M-vp+mqwb&Xv`=uB-WcYsKu`=sx@Sqm3)ws+@fB$-~*a zdFkyJ?%sX!i=W?fjo;WAynN^S;raaDgHf;JEfk$%PzRr2=ydtvv*yqL)}WHYo=qk!#N?V^HtL6J$`bEICd?Zp48aQ zi^1Tt&kyzvr#8!i51hck;iz%)(J!6>P;bx|YJdIVSd+WgudmD-x0c&-q}#WyOOn0! zlSgxDni6)e&^up!;pxM(^U>mi51!_EFG;G=ytRF}_44k=AMPtGlwf4)GV@+b%OR#Ki+`O6)GXtY!3H7?!u-CU)nr3;~ zlxf1-ue{J73`fW3z6rhV0E9Yd=jF7!y=6#=H%8;>K5mNOZ|-)scByaWVcVo)Ktl#XL?4B`?7 zN-=U8XYBxrap)nc+InLc5EKF^2$G^2wm|}Xsocgyv;uKLY&o}zNSGpoHmTAionK52 z_V=24&d4CK?OfSJ!kg;S)(SwhB%obRxD=KJ_i=v|U1lC)M=mPFLp>lcQwRzmh%8|_ z{Q^QD##po)>2<6cmJUim@IDwK1?A{uUA{YIkgWm$0RR9=L_t&_%TK99Mpc2(g}@BV z*g9_$OGs=v%Tf&x#T31?;{u_o1o1#XY?9fWdiK>i_6NAQg%~bv^{trfCyvJfg8lChEgH;>VLD>8`AV#T>Z1x_T$wdQojk z`M$fDP3v=+)#d&;UF|wYS(Y9>onlgd_?e$8oD9g$TORCnq1A9KQFHPk;T9V}wDEfB%oae(=fJ|MkEA#j7u+?L1tZPMCq5zWdJA zm!Avo-+TD#JDcmfE9=+S_1m|Jyoma$G3%ZAs1`Be(Rtl|^|`P7Ugy!>M`d|tjLACP z&pv!+$qv_gPaYqYRZwe>4ypzwY;bQMW^MhC{_gA3>HP5QaO>Jmf?;oOs*HI*&5F`= zv-4T?@ZQPYduIvL{^{&=|DpzX``z9BC#TQ9u(h+*zkm1S;9z!oUTtpn=cOoE&itpJ z?URMNMzVZ9af5+r6vvhO*{=>=z(K$G^n>~F)7H!;lf2tY($u#;O%m$SY(6Jyjo~1^ ztZP-ZoHCPw2&nqF9(Yyo-UDh~2{U4r8e_Uer)nE>2~t0PzC2apmIWj5-@bTdFvWuOmM!cDi#_Pw&<+7axREPr%1>eqHZV8 zvV?+fOWjyo=?}Ugge*72NpRkv^|fP@RMWWvWJv}DDf4tP>2brmULk^!%XMy=^iZH1z_4CICW&bAbV1V^RqrG8;ifpb3D0 z2FU;sYBUHjpdgTbDa^u`N4n*TyYzi8=`#qiyz-&&GM^NP*rgbIWfi( zCumad1r4=z*|grZL4vX|HcQ;1Hc1wO08&_L6^anO|6xhYQ@OmCsH$kt=+caEi+FRA z7z6_i!RvBf6A$aeL}b8;M-T-uiH#vFLiHLW&9 zNaJ%5QEM#$6G@(=iOG-|nNTD`{KR`zK|`+b-m54n0P&Lc42?%Y1yEEC8oWnDt0so7 zQPoF>?PW+qB9xApDBk=?BUD^`>zpEzG0yv@twW1st@jQ<4VzipAW2!4b<+|eI9-%F zo%`*btHG_NYp-P6P7h8FKO1v7x2b8!zxIulwAXvMcl6+~D|GVwmCnk#ZPcF~RWCvg z6(yb3D|eiC)VtNYX^Oia?VTUa*86#CB%s-UFe)dY(0%rBw7Rz1)>VIPFxVOxbQcFt zE{;bxU)XWvUir%U%1-a6KY6^ilU=*Le*L*SKlp$CzZVm2nmMN_)S9RI{3|!+q1-IJnz-?3x^X?LTl(;$r$-k9B>L*t zuMInGW-09sckVntdicS!d-wB|L2!6*{}|JY)4fHu(Opy$n&t<8a{uMmH@^BiUw?A% z;Ms$!w`$ooXXkCNpKtE02A#H^*4FwrZ}(2lW~1?RYqS5sC-vHf&O`X8 zQk8WT1Pv1)`hbMSuwk1T(}V_kIQFa~c|)^F#X)!x?isti~M1%qOOpyAX(1!w~_L4-}b|C3iR z$ciY6v@B3uS~J7)%pNBk%a1OpoAF#8-Hv+cL5Z9NAW&LLjHn10qByzKegRZ5ZWX^7 z`^R-T=6FRBh%p4mOe#pm5+gvQ8<0wz>5Hl^0ToC@4B~}I#Dgj_Eu&>jpb*g&Dk30= ztf~s6ObQeL7{G_%gDODlJaP)uR*e^hB+1)_L&PBBd|*TfA{1!pz0rv27nPs^34lRC ziI>?bS*qG`qNPmeoUiKIwca@wJcK1x4SBfzfO)mGNoL!+ef;orJzFt8Ev{Vi^OM=Z{f!qkt%k*DUX?z}@^(BQX1ddd;b!*M zS2n)+wHr_GKi*vJ&PEqcKY4g^czWZ-tssq?w@w<@$cMjdpL@A|^{cn5lbRsgdwfK? zzj|l*$*)ep>YZ0Nxvi!L2P|dL&qinS>D+f#%y7kg`W=3{_N9JC#|)-ab@_!KmT-nv$t5ZhexN0?Iby-v*{oI z!*}kywDsTopZ~`}VUJHIpMP?3{WB92`?7V?`L(TlWCiG2@D!*3|dnzS}!3> zleYF*!In5L@@latiabqI%eM8tZe5lnst{dq)|wYY9pWP3wv@P4g@<2?#)dBu&yZRSm5R%$RpOZQ~Y|mkTxPD*W1}e50QLI@_w+o~31q(p%!;v$wfxgY_UmZHM)q5_xIiW*{vf~u&J z#2sRZX^Uu101@?K6bOKjhy;LsmAkfjE{^3_c(eF#$pBlfhW;6GbEfV_AZz5*vt#V%N4!(^|tt(FI?(jW^6n zz-W!_PeU*SNt!8u&24XGaQN)(!)E%%%Z1J>O6qDMML+9r{_bS|u{HGmUw*K4^UC(M!5{xa=oBk}cv4lX zMfTFGH=jJ2eD6Pd;ZHvJ@#)blPjI{9t!te$>lwhuAMLLV;QC8zML*rX(q$+*Tf4rV zfAx2EuHD`$C*@#uc=OKs#rgDbG(9<)gQS1_{pxpKeeX{n zo*j;|V*csR55D*N*RO9CAN=HVT(rmg`kmjq{_Nw6Z@jVnjqkp`vD>{keysg&*5Asq z&SF&VKfKs|{z_uHw_e;SlJ$$dllvd;pPs_K&yKRJvl#nccX)DE7g;u2_^Z2HC#SQf z2|%=SI#Eb~AOlzLr*HeRSkc&Y(YNd;R&K$W2yE#d&2Wo}(FTa?BcM0Jhh z)Qb1S7{uE&8IR9>kSt59rZt8LkU2pzic-~1B^cyIy>LxH!@X|Dg7#MiA{d+>_E!k8 zzmhLz)%n?II+~jROd%oDNpe@W<*f3po1C4VjnA)MS?~9|ZL?Ti9cmj)kv6jyM2$_C zhH#gQCweJJvlnPI3HJSv$M6exxHI07S<#tNgEeT zn&w#nf=P#w>YzSJlD2J&ykKNdh?y-m%M$Vq7(+^`M#U*2C<9PX1O)>e%|pxS3^4!+P{bEdbo;?lQM^3+FYS~|88>o= zm&yPlFhYpa5d>6Nf;s=gL;mH4vHXw%DlCWm@jxGZh(~qrgY!WEh^?X$Vf0|eazSJQ zqUe*vOPGY}vRFbdL%E`e0&JJ11)wMzVoMFPZCi&XPP!??4jEu1^0EMc5VdFkkN~P2 ztw7x*2mwJUu}#wqx7MF~`T3Rg zwQ^o5D6=W@P6#OiB$Pn_YZ3{9s>BHyUM3JpC8)%>Lwp;9gg6`&6$x>c=Uv>4K}AIa zA%)X-KW}O+d_w{>g(+yIX_d zbFYQ@6J@{s!mgkCYFbr|zy0#+_G@dm41f58C&%aICwEWBBbm$^5Dx%pH}`Jz{F}QM z$8NDWzy89NXAe#n(?-+}*X;Oolx6VvlhgCl)+XlZlSS6aU;N(I?CHhf!v+-zB*5SG5uYdN% zqg&4>`;RX6kLF2kd+X~Te|mIyIJ*BBZtu2-XEOnyM6az66&Fg$-d<@e3TeRNm`YSHMj_K5oIO-vX%kVwXKto*`(X;qG(#A3W5Nw zcWgN{GMklYo^|qEgZSXEg*YO}Q%v&I%@%~l7zx3HVhF@e%BuAtfQpEwg$1Ivb>*UB zBo&*GNqW89b^t2uKxt17`GzSEeCcR!~zG!aUem4kZSy8z~nyfc%Fq9n~@3*a6lvP#E zf|I6d`h#AQWo;|-c9CUSQX~j!6GqUYlNnCD4^>&4gtI)U>l*x$h3|a;Y5}lmolP^F zB%qL_Srw|b@n78gc(`*bjpeYhlMsv&1;8LmO3NX=qAGzBAOq4;6TGY#Ty~GdrTI&@ z=5pn}oCU@C#?r_io%_o&nB{z%NF_vtc6mWS1eK+XfU0pzS?&)iqHI_tFdGdX1q@s9 z4wm`>F1mk$NHvHWVxrgp z%mBo!L5*P%!I+YZka0qF-ZjBHSJzFRBz17s7*I_)aqQZ<@!laok!D$vg{paWI2wM@ zq;>QBwT*mp1qM(ae4H6s%;w#l;qdm(#ZMl(>C|RP2-99SsaJ~W*?c;2Ya8jaPaZ&9 zUK|~CH_|INuGy@6_oF9@esi~f<$7mrXYJYEi01s`AKyPbE?;|NTO>UBtbFO67DBRp z`}z^?f$>Gh{`J3nGB2U|Zj;^%NoJ8@baIyE$z)W&{X1V>JRLnay!i0@_a<}53j6V= zXDdC>Mf0_9^_pt@#joza_c!py*Cp@ZTi>|-i=TWtbe-$Z?+&jNFMe%jc5(Ld8=KZ- z3UG0-*DOjl;K~bE&jE4w=FZ3OKY8)Z*MI%qJ?Erq`1w0GLaSAK`q9VFs#?dTjHb=v z-d?ZM|MjmP7kPq$=cn#3{^EXQ3qQ7;e9OSb&wbOtYpJFZ(E(i09 zOC%2<0z`P_lE-s-8@Oz&y==@vB~&63KoCJiVs3-ChUs#1K_rNQ!OJBT$i(K|FFz1O zK_&JNlZm6HI1f@J1R{|LIg5NY4Wb}=X^KI`*n2KWL~KPBC{OLlys>^#7tS5*d))`8fGRNBYFyi@q|uDh$KW1yM-AL zG)XhIM8IsA04bVH<&rcP;|_wLih`v8ZSo^3_|5vjggwbaepd7s0#PbbRlx z4xT-M?VH8Uuv4zose>-LJD)br`=@bUQF-+3iTGA}00)2lbu=jZ#w zE@v-l!pi9{V$rgztVg8&1BZ_>WZ0;%W_tuHCJz5v1Fdy z-J4IQlX1DRzLC+>YuB>Vk#suUja_{Ejh*hwN;wOdWdGoQ39r9>Q~lz}y$5f;z1v+` z`OAOz(OLb;OD}G0Y!$g>Xf77#i}}U=?h9AghITRbt#fS)!Cbv@<^7*OXc|ef{L0$u z#rgd1=ZC-jy|@43uRlIMb9DeZUGTwDMka+&24(QxTM zL5u;gI55&n;|}QMc!xwK_D@0ddtXMsE-pS75e) zsu~+bfRG4H{3Jn_eZn!36vtS^AS%I2{H_BbB~V(T5+nc`MIr+BK{Z|!;@1F6XX=+5 z8W{#silr-}uq2dT#;r8+L zckSq8wENNxKbxHF?Y;B0-Ni)CCT{22YHzJGJ)eH@+0(3NuRK58*j_^%Wu5-+&0#%j zkM|DWe{VKeg*&egn`&};a%NMpsTmzk?>#!`^lT>|j3=YhvwCwQ`^q=o^v<0=+k5-# zue|)ui=e)(v1WIzH*Ko>_a^7Fc6GBz z2+*)4nAdfulLRpHSwl>1Eih?oEuXu#c5*r^>$VAU_sQ98)I2+!7YS``_R}0vMWceP z4_>9$OA?z(2*HbUJ_JvOjA2zZmPA79+7QIe=EfRp3=uENQbgL;1@DMBOA`PMUfQMw zgrbuZl40%?IkWnJu5F1)1cDPZMn!`6t_h%_==6)c>wO?*OWC|~K1{q1gO!zXQCC$1 zphb~_sM96k4B2elxHdjJtH+C`DYGO6sIP2o_A`5Sao)+@mDM#*)GA;IOoWVCrx!<5 z^XWoSIz=xIse8RuLzbAGb6K8;;G3!uFG-S$R|)EULkNrcB&4~M76RBJZOew4lY}`n zRaG(@B``_CfOXqgwcfXe(loVAON;3uO>CZLlZ$Ea;rg|kDW@6&8{@r@qJ(&MCsHC} z3Q>bJD!QzNLRu!I;(AY)Q;fLk#x)}@;7b$dWgy_o26E)f#S<_6DwqEYU(#vx(lr|Q z30dAiu zy0ic-(WW6@N>mLI_)wJ#4Q&VkLL6(c#yCYRSdVs5Pz4D-M9&kdr~)%y&e_yCpIBx? zaVLzey>U;bxZ{JWXsoA+>pW7F8z7qQF{bvSfMHLP18U-9@#N}DSHY)2`OV+n2^H+!e-tW1L`jzRJ9ct0K0B&bHwHJauRqv3JUKtRe&GAudL@bdGzRXF?Wkbwl}+q(CuZ5%Aa1eg4M&rvtF;L0}N7g zc(#C`(}^EG*L9%J-(KCjdt%zg0gxpY0Y*{<@r`#LRKcJGVZ$M~s?s3ACTUy*n0Zl_ zqLL;yNvw}*8l!VAXsFwUEhAEq+NQ0YcV*SGNka(Wg$zOnZ3D>Qyk}#)LYm|f)aD7P z=2=d}jHdNX)6}BMmaC?0nszYgPbTxKTr`b)x_4M)i8a*8a|p8VO_nDQ4v*_{(Me3- zIVBioJtqJ`HPps36Ji^hs;R2FZK}5RHp^5I0eyhQWRhlCH@9T-g#&`B+aJuP3l$Y7 zjduWAI|nLBmX(vaC>qNK>)-01%6lIL}?W`Q?(8yqq1x^7JLSVQJ{0W#iSQ3I0D20pRjVaJi5y zft5@CJ6>wQF)D=u7z@BIyGy>52$5r7%d+oL-4aB<06ZMto(J zODj+ez-o{Pxkdq9(xO2X!59odktE4^J{?u%{1UJkn=KVo2}#r`f(sIrWI$RD(V~(J z1hACmA*#a)A$X2!u*z?qGwQNBn4^J6BGS(=;pG-iK*-ByPRuGm$bbfbh)6>)Y?f{D z#2`XM@j3$t!E5wuMO;WU=tfyZA!hVqSZ-P4881&vCIl6L09tf&2Lga?-K3o~$vbIk zY@T`XjjP4Erd8Lr0GcORr!$z${n`Pjex zotp+aqy2+a-apeW{KNm%8ymZWhoA2M_^&_z)_1mRnSb!_J{)f5TkpQA zZ+7!u$A)*XqpYLGe{k@x4caqEspv~>^$;2im0{D#O*<6_6EAQOC z|MX-wg*1nhq0`AH4Ja1cp z@vIs4(;}h8!e5*!ersx)X(Q_nkNq*p3P4xW2^xsV+_i&=fj22HonNy z$muHEI?L0x^~C6XNRosI(=?S3+NzZh05vfNBoZ(aLg{yt5Q6tDq$v_JGh)g%ZA8R} zrf%~rspfUw%ksRFSd-)h5{e51z+}=SWn|~PYkih>oV3n6V=Xh!=8MFn-a)H!esr+X zUrm_(Fc{=CPf4LJ>uNFQWRUo9I-Z(=Juerl-EOZa+O|%Jn8-CPDyh(XvFH-}AoJNg zO}lmJL^QLOEVZsl)7+-Hv5jk7@KV=}HCDYRBCDVx^`bFJVvQmwFwACi32GAC8TMV{ zKs8BH--bn5I`3A8{WMSOw(&}7mYtlQ^h+m%Du76_k{dt;1c+5h6(K+%0R;(h1Py>3 z59aaUk4p<9E>Gsm!}{gOq~aoj#7YS=+UmRKVZ1twBeK#Is^B+8OP#FL?TT~diC z>SdZsR9hd)(gB%eLONDwNj&X`cuhj0%L^7Dkpx*j9RwhP<=W~M6hR{W;WA^ld^hw` zT>*lK;Eb>J<+ThHBh+s>3xdmFg~Y}kRnXvr2C%915F(r-LWJUt1VEWhglt^OMF1ki z2mru@vDM)+W{k#?2oR}2nr3zI=o?V*;-ryYXT@0KeCTA^qN?UqRpfb#A+re?(>Ct( zWCnD&ny2%_i^QxChWX|z-)ct>Gcv$@u-6PWu(!guUtKQ-1Mzfz(SG#FeD3hsqhIda z>c9ETS2{QDTs-+?|K6TSPZpC|J}9;ao5ToN^PN9@v99=|Up;#Ml`C5-iLP(nzJ7aA z9;GPf`=^gTd-md6*VcCW&z@Wmi%t2(w_bbr(Zg!G7#$rPpSAtf)wSKMSj$$Q!ek0B zzPMGj_2*9ynkj7E+V1v~gVXVFz4teN^5a{tz0$bw(yMoV^p|(v{qCK!QT@}u_~P&U zz2|LG`QRRZe6PPf$kOEa!TrYJ>67{04=(b~;QZqF{A4od!fv%U3a|L#3vnw9SA zm6g4{qxnT$WZB1G?2E#9(W=q)elhd9+U=ZNj4#gHG@-S%{{Hb(rnM98Y}S zfnncYjAq7Cmhp2AZZ-<+gGGHAqktVD#R#O`V!NrF4CGbvmDD4&&*x=%nSm?K1?# zxeqvB@Jin`fv1xUC=gA`g#j`tqNsN+;WTjYUVIyr$@@UqlA#b(f!fBgNzg#k2Itke z;JqVc1!OivMg`d0vPzPq$g-l7xmE?H1p)xh#*6Msudd3; zY++3XWa9B1nBzH^Vyqt!nF6S)8d?4vFFirAGX)fZqaYelpo*6$gUjndT)&sAHz-0x z3d0hKvz+f=#(ghGM8BExENAS|qoiL7&i_I9fDnP%7@J5i2+FLWAh8EVm&b7el_ez< zR6~S5YY?L4CZ!UBMn|JDWa8o21y5{)^JwBEN@Y1V*X6z>5bFn215vD|5F{i1^6(vp zVk!j78i1f7Xb3?9(X3vegw~5XHB72tZH#NF4;8Q0#Dya=%^*wSddwOh%44=cRxh z-M0WLq9URM7$0X~Ln44gNt$}^LE?-jASy8t#26YXf~a@iA%ZBxcA6l800=mQh?F4j z+xc{wB%InD5uIzxx^#7wrp576&%Fda6oEE?+< zv~xSDs?fHftee%XIv$@xcfHp({`?r~8aIj_n)H=--?%tF zUE5k^<8HtE>Wvrb^|h7rgYSR#;ppVN>?dLW>2zZV*4nm_(M9t&e|@jl>kKz8c3l8{n~o7maN~|y7y;)HCX9EOUL6$l5){Y-~H}xwa}xZ`N`Sh z+rRZ<>Es7L_@x-`b@~_6^9N7PpB>M(c2+yx{^952BE#7NY~oNMEs6)vPJ)N^wcIu8 z8`sYYaAm)nZLIZ0<&*ozh?u36!FeFY zJhjFcCUQO`Y=VTctU5)OSOrucT<}7SRn?U9g(@(cG)<^&napsoK8A6gW{S|Zt%zn> z1`2gmHFax}Sl59`!i%O}jLRgki@I_m-U}neju1p_eG4Eq;o!U%-?oi_W=X-82K}Cb zbK=TYM5q;?WChrqvSPewR+6N0((ey2NviQ|(CLWCVqO}gCO99&rb#F7)vi*}vMx=M z)@4<8a#Bfb(gqJGP1Aw`a~7l_wpk}{%GR}QlBCvJ4S|SV-3WqBOe?_}LxvGiM5dDx zh&r93Zr$T&hrLdbbqg{qpvKxIsSyE`mU3)741!u@6^N*{LViY!@|1O!ZkfDwzvhL>^zAQQL{;L9LZqznb25MvLQS0fdbwsEcvX=;l+u^nOninTKe01`AXsRz+KF~$HQ8A3$$ z@hmPxq!40dR+P@;gN=y-B3+6KL6C`}w)R5iXrg5HBAQ6?D0m$IIO1 z-^6H69B4u&o%)|0-EMo-87bsgw~17Y&LeLnVF=3 zjaOf3pFTqE_J_I2bZa~G3(AdDv#ML*n_s=IgipVCc6xI9{Hr^w>)n&)XwdJEj_}d@ zPrIvm+s->{E5_sppPxQ_aQcV;_#1!r&wlmUhv(xNK7O!w^ZII@;PuXdzR_=betBZzmkt<)WFFuB!aQqw?x* z_TI<8>U5Gkvw&tj!@c8bvzH$njR%>j$~NoLmG$Ci4CQ>@>E_H*P3p|Tt8Z+NMs1cS z(|Te6v8lY0$x_=^EfO?Ui^O@BgrERyjA@-m1R``%Ya*NCbUeT zc$7mRe`K}^~%A!Y`C(vw$@3~D%1{WG@V(}&@@FS zF&eQ7Dj+_1n^*z}A(+%G%6Y%j$@0|A+Teu^OHgBN-87ahA**Ur)`m=$r>T`B$;#56 zoSY1cfuTxr4B7oqOzO`UoNhAS)B-?V93M* z%FDaI#D>M_&b*w}kX*We;s-9L5}e|ZzQ0!)3a&H8XkPQo> zhQQWbel@XzI1cg^5S19fs1j(rMG+EcxTN4CW3>E-I9>mU* zC`6ui00%3<$F_sr;(xGjgy4k$E7@xZk5@P}c6;<$|Z8d~WQ8;USfZ$!4rfusIwrvRg zZr6*OHH%Isf3kP{%A0-6)U3{_XGF&?2v}29?ZMIc*3|;d*2T%>_G>rWW@gBzi5Z=q zOpY&}e)tfx&TJ%)e*IWc-~EHv5IcFNTHkE%|MF;Sd!v)r?YusHqQhdAZ1y{O@BHY( zSITK`>&Ec>;JhiRnuPPSrZZ?4r7wnSt>Z_Z@BiqBk2W@otJk}K@jt$|wo$zJ^7Cis z)6nuKzc`pqLp?he)y?hU{A@9+W?%iiJ57sEPERk++VSziW?3_?Mkm#LI_maUzV>_1 zPfzEo*RIS?8*Ps?=Yal$&!457zxvLr<*)Cqu6IvI)o8p>g~iO1B^5CVY;1R{vOGSM zZci>o^B11G-ZjZ&+KePP4={3B%Im|PNs9B?xvRDEFzCZ#-uC-NnkHpAnpAD43r8pQ ze7c9gh;Ze~>iMivfyXC{;C&Fa8Cgr8Jv-T0?WGADf|YI;)y&7IqfwO?$?5Uj7;B_; zM8-Fv;f9e_metja7!ZPYRV6_KGqPm{bZzU=1l5F115hq%n{Wss9+Jf7NoI^ufU0y2 zHd$)DcS(|1V<63{rmnoGs0#rAm>CG$)?@HqL{yQu=yqJw6h#3b-nY)x{eC}qKRufX zi1$uG6O*+*7(@}Drb$)SoRWlqkdgx0q;1-j&LGMA%n4fSszu|RlnYWvs!U2YQv^1= zC@Um3NtzH7^Sqp+XpHXzqD?u3u&67Wn>0%Rq$*1!EP9>RIV9?J3g?1ASYIDnn*`P1 zrK)QU;F{JHMuINN()-XG^ybqAS)-!H80VyJ%9!!hWpoVV!4(vI2tm}NKsNJ z9PG=NXLY=^hAxx9x@3Pv%~LPkq07ZT&Iw|PSPU`!Mh1}RhKx(nm%}80IHbOuZeM;F zmwtN$N>XtYF3+(MCqu*vpejI`*d%6=#d|;sJ^(0El&mq9=Mpgyd+$V55hDO8zJRFc zQaFtPRzO8!>)JY!ClDE90RRzDd_*w=%K_9}PQ=6uGqtU^%+V1mDzX%5m54-S>D)mI z3Wk6b0F+{Pt|+01s+V9;C5RWVC_zJP8k6`Sk>ai#B*dpC%Ql`E+g?6K1tDOK&GgZ( z8O^Imgcw1*aw*M#Kpgo4VF(g}1OZ@Wlb=+;dU`tTY(!uC=9eG;>~21ttZfhToNv6E z4VL4pH@ENp^1)=5-(nnTv zlg=PV}i zat8;eok6^{Iovy#UJ_j@4U{Zu4$U0s=~jqNIFEeAR>FVjgO6H1_P8OW;UBurR{Y3wsA&9kM^GI zF=R$9ce{f~#c|Y6yTqQIWAO?VqVk?yZ7EW9-7GB+0VSVJS-7gIx@lOXlcr^D<0t}v zrl_3r{XtJyVx@vJPHBSxDvFxM7j+FHq$!KzwY8(3i z0N}7iF@jR0h!DYJ87i2g{YMo5odjsgyz> zP-w_F_Ps1|&dn#eRw_0~ghi3ZaSR|4rn9E#Y-L4NXNf8%xf@NVFN&hB&n6`)n9a&b zVYk1%I&|@`-k)x7r_g2bwXKatTpF%E`Q={L>sUwKRrC1MyWQ@Jci!(zd#nAQ{pIts zxs8(~?ojRO-b$1$#b=}W?%ijDjl93SdGP#rX=CN`+gJa?zxXL*{ru34XY>8N+uba> zba^;j>p$H;U0&<2Zmj>&AHDa^o3A~%J64PjpU=Mcy|;zSLhO~-c4qsNTkqZd&fj?b zt=Hl0ubex0vb*|dWp;Lmq*k_A+1cSNi{t&hX=G@))a_+@cQ@Cu+TQ9qnbg)t0Cia`t!&1z zL5*pmW<@ifRr8Yb0`k0uRoPe>jHE!oy5e)&E03Qa?LV4yyRh6-m(Hy&_p&$Nd=-&N z%4T!vaR0bI^3%LY;><*TG%m8pIHuSrm1;s1?>Oxw2&k1tf~=dEL?L04B)x98*Xtx{ z92w)BXr-KU-aG4Dlqzdo6zl$=YqZf?DMj8njp&^ZG7sBuRhF~Kd~`ONO{c=XEUQRs zXB}HtmZcA+d)`HnVs_%4RumgkR&(d9Q7Ve#BCl-YfF+63VRz6?x}Buc&$?NZC_!TE z#IZqD&2nGYB;stjwA3Asj~T>?%&T%VojC6X!@<(>((1}mzuPmQNht^!yh>?p0B|~+ z0U`@{&$V;4v(EczQ9AL{*{tzyURHHuo#(2m8|MLFR#s(GDIyca9;B(8x@nrCan?B& zCvINM%erG0+2OY@btl-5xz6BE)5LM_BU#R5?Ue5Gz^OsiBxEODXhy+%L%BaCjnksaK;K2Q&DZGaFSe!M>) zRZTTL-QQbU?iZ&M-?-oX-8a7ewH;;HI$2ukzVhn!?N9F?@0If@|NOnjMFoe4`E2fw zM)jo|8~2_*olhoz`p0B<@Ct4w~ARvr3rj+I2HC8c)3Q zORGi@w$_L1>*?|2?AiXa?s9VZ>P}ge$a4La{<-rTCnvLfR&ygPNI*s90F{ojL?Hs9 zNlem7CC@=on#O6IG9wX=Co@DLVDb$Rph5wutEzD|fdT*>Bpo8Qu_$1h1_V6=5yVCz zi1Q8^qs%yOo4Pi}gwWrnsz|Z+hMiThV&N=JMEq=CX+^`Oeq|fyIMOCfQq`r#x~i@b z3A`YYMpTHxP}i0;#yTRcy=^+(q$(y^mX60~-8fBktt>6 zD1p{dRTsdlO{A0-K&>=s?T5DkW12qFTr_nrwk1b!$W za7>yY2=YOI!3z^9FD^(Y2~jEK*rQ1JDRM4|9|?d_0TLl8h3Ld7rBD$7pdt}$%0fjE zpi&yedtmRqmvDqAErA_DWOfv=eRxTg2gSV#$crLsk1!%(07bwa1%W^jh9WOO1f{$e zKtc>5sI7}af^nIGV+!DlM+O3+QVazkBp(R^K|A7!ps`+@0D;m$6vZe=3_^q`*1PCs zULMQB^(_MJZy`aJkS0n*280ku959d%c%0G_lwU*$!rp;E97m*8Q`OpN5JAE?4oST% z4z$*F(}>dEIkkART48wq#^_F`;}v>AP0F$r0PzyXW_$he@&4zOcuTU`p!om97b7+Qm<@eo99=QhR=TT z@jKssSHz#aIN5trEN^R?$@;}XZ)sSs=3dP5&hYae-0N>FZ(mr?^IC&m-&{vj$NSC3 zh3jiuS9c%m?mv3*;IoITo2$1!xGNgsi1KOkrLSE|V!C_ld8cE(^LKx)eN*KC5dlN@z5H@^t#{4gJ=;Z){#&A3dKg-&o$b*c~5^b<#QAJ=%S;o2CE+ z&O?@_X=TE^n#3lS*`zkAsVn&I?_3#l)5{mu zvQDzG-YIt{uCZs0UtLQ489dpYpTD@gV&&PBnRxZ=(Q&Hv>3F<1Z_-r%#@8?W$L~Md zT+eooi`mpyQ5|a?C;<^rR0#O(&HiXKK07_#I=6b~)`R1tBcrj(tH#;B=|8ylyr^q3 z%9n07Z@PjtvqrDuOcUxTi8Y~fKI>&Bi4>{W7yxqKWxYg1lvXA&RaJDd4tt*@T;?UB zYU+jw5P+Q{MCVxmoMmIQQWP4a2*f*I&MOg)qR2LNq_ijz#}R5B>aas!u1GnbXK6=k zWt-Yt=RDVSlf;S1Qf0`^YOM{Ut(~*Bc9s*P6BQ9qp3j9SGP*pQM_Fu(QYkaf>jow1 zb+gsM*2cQetE#HO+N0x>rRC+?d11F~_ z%yMDyO2jiNEQ=Z_YMd38IEhtoTC!M{GB28{(oq!02~j*Nrp|h9SP?4Hq)m(H1d=x- zd_rg)2NoeD3BDdc2ob)&F2-rc`Ty@ifG@Ef{Pm1bKyC@%7NA*V;4F+?054gCU)veJ z*dP`tk>G+BUff1Kv#>Xs5Ye*_0I5J$znVkUS{4K#^sXQW?{4g;=1e8W9(qt|;I=tA%i>mESCmU1m=NAtH!F_%llg>X25EMTmmJ z%*39N9gBo!w9wHdfJTup6R0r3ec0V-0W`HX?NGHDVwNa$F9Rq<@?x1GOA-L_-r2eq z0Zp1&Ty3=pA#knjnW(KyV+0*XjkRs;wfBH50Nyi_8uW)7>C%(kJ7rZwCi0%WZInh* za(w(K(%i}P$@u6SU;8cFymFo$St&Wb)eEmnY93Z*Swqx}Tps$OZ-i4#x?)Ywuhhdpj~ohpmhbCa7TT>b0jI z-#Y)=x#f+u)6+aITz7f(;^nx%vot-OJ-V}RlHt(B3wZrwjTJU)5r)$^NMLs#;n51)VZ;iKKtLUXjav0OX1eQxvgV5DN|^$mKUn6jOI z1;xqn(Zn9FY_72K9_we%cOTrIU%IUNLoH2>@eru4EshF5T<_nzH$9nFX{R%;e10&} zpx2iAv-ym)0*29~9%L|{978jH?X`{djm%Zm)@tYZL4g2EYtiNN!$&X1$0zygmse)v znYY#M-a#>&B(bgD>3%be6_M`gAnyWZ7^j8xEpJk19~A z%Im7OGMZMid8Ezu*2dxS*_CVCMdhA6J{pgCX<1w66~VdlYdY&DNhfm+6QI&cM>Y0L)BEA)+vg1hhe+5)mM+ zNK_PQM69bCJ;br`-hwbH(%NKcqO}3TMC_d}%SHhDy>1pov!bX=Fo_{TR~N39G)_sV z7sczV%T-mmrWs~gxG(fb%scRI!^uxL;dijK{>ovplSCZmyKOtP3g zTW1j!pst&`tjbO=(<*KpSJnZN6Cgp`TJ_mF>pdiqFv_>$ahJ;N`>DvJX`eoafX_zHwtk&;%cilJ3E z7?%WqnUz)x!DvWd0`fr;6Ex-u2>=z?UvJdmuqT zg^Cfpb3h1;gbJkf{$iloWf8dmfCvoS?pYGFJQQigLO>Gog+jRkhJ4fTCMeXeh029i z0UY@7Hq|Hi!Ns$9=RF{J_97k$lvXSf(lHdXF**c(D-OH}IQG*1U;zLlkgXdA22^25 zAyBTV0lBHG-f$?soSaMys=~QQYcaZ#lAWva@%CCbhKY`5B9p58%L~{0?r?JMX0*Q4 znNRd9SJl2x`s?ZA&vtjNyz=zZhxK@3()7-U&(B}q+&nip9nDWiO&Ukv{Jr1we%$T$ zCP(8Ux8B+D$>{WCzP7!MO1*e+d~$zuspdGPH0piyXP=j&{LA0_#^}Xq)L-6vxO@Ba zXVdxY=(JSn(dKgg;m<$Pw(fTH=Bu0MZfxI|Rt8t6wU!V& zNt#7TUDYUHNI9S=)*2Nm(pnQzCrNv&OP97c4v$XHMq`MRR7Jhj<)*B&I9g8P&Ee88 ziW0=CuDy6njE5x}?IPebFMxDaSJk}i_IpTF6s5Cu z6m_erBqA~jP{MvqgaT5Or3hqMR+W{AI2UjTtcsjH6jkX3sI(?AN`mYs2-pR@2tx=3 z=9bDO@0kFt^C9d7zK8%_Fu^YeI0C#B?*~hg$ihC>s?S+OBQdo1?iOQs`4T~Uv)D-% zC(O(917Cji_Ne)S4FCaTFD*yVrdJRV5Gn6mI}^iyiM)4!A$JqoIKmLkU_I%`M4gZ; zLX-fq_2RvYvLx1h0OOh2OPY$Ui%OJMI-F+0=-Z38XK2+_fhPzgs(>h@fXly>W&%Kn z<7pu-31A%{gNlr;Eh2@GJB^B4O(uviJLjBh9}eSUb6VtVqj<-P(1$}C5g4@504=T; zT7rQrhn>xLQx5X;tp8UA?qiY!=lsW%7rg?lp9zU)H#lB`JwLeqrLEJ0+0m2n*>PUxe7JX3&Zf@SZ+_$E z!M(l1=XXT84fvd9u5= zw48kFwYPSkK7Hl2jjw(6;*&e~Z@&3vl^6f=fBos&>hRvvah#d0ZX){yGoRa7p3d|A z=Vuqzm9=g@8=V~=udc%O`QFFBJo@sxD;IYzeR5~Fa7`ywd1dRWu4_?5!{zRBCoY=m zc*`vN#0~DprVCm$r&6Ae2>;Vj><;fV9?{ zNFk910Hd{61_8{#AZYK!TC22DTFu8r62%&#rl6?^Fz7-^1c4beMZi|Xw}GLA z>^$HXv@L{jvmk4w<#3@@lS7*#uu%1*ybP*uS%Y@$2G}@g85cwB*Ip;!5ACgg@qaHc zr;sEQOjrnlE&2?Ap}SHGdqiv%>=N=dL&6;>twSa>he%GP8937K>^@U!`LQIygWw<0fa>>B?#WzwvQeO!W>`V_AC&7ltOX* zg1iAD6sADxBLXQTMWjeToO2=!iV%^sQbL~DIVbE%VH6u@9ZQJPLmg=jrfcLxMGiz6 z6T+g&QB#$jPL|iRBqlaCPH^q~4%x}xi$`5E#V8Vv(ySTT{fuVo%hADuXU#n6q&z#_ z%a=Gmo<6$ugG+C{vAngWJ96;s=vO~EdHc)ffAr^{UcS0L9hExLzx_8}JK8_i3ikH* zl(KQ!J>5T9#%^zA71gksjPm)s)9=UUmYZ2sB)S?Tn_J0+w=P!mN~5DVI-67npP!nf zJ6LOG)qGSoWmV@*_2lV`$B#zmw&@$c_l>plJEYZ*|NT#oj{Vu0TUqaEW9E7D;>p?9 zzV*hXtI5@C7yi~i{2R}n9Y46eTR3|3>^N2iiH1u(FU9n5-2Kw3!d0$M(wJ_%e&Oh7 ziU_C@=bB<%6_euP&6}UR_vFrTg$@VsV>v!H7olej1EPs0E$#k6WAAk(& z&u=|mS&pt;+a3%CSFc}Ly|7b^PP5FcY%YCkXZifqmH+8~{1}9jI1f-9~?4$y2L{Lyqr^VS=&S6|tZagp2H1f=4x-St(5mJj+JMpBV)Z6 z5$h}|)kzWt07Z@S0xZacP*)`gB~k3bX#y5#8jFfZ3yO;fErYebau&pQ;v|>(u-jJ( z=i@w9xV*DE=tORqbvtPmo2)ln9SoE!V$DtI4i8V1mTtdSlyzNu$Dnl#qVl5l%!9!I zNY_mj>J0Ndw~dXIAyJu0DOUM>hCaw2qr9jiLs_bsanKt$Zxv$cY8Ez;nN8vdYf?NCj{TF1`EAw2-^o29!ofjMASlZ`?)!fDa)eu@|8BkaD5r9mRXk!q{$Bh#11R5fOs1OI~Wl zJQJa_Rv5quyVfS+tp#C1ETvLJah8xG?;VHCsz_-e05L|B7R%NyDOV;Cp}bI~!N7mxZkxIyfxHrT)oJAM}!;kiB?i=fQ)M%?mk@?q%8OQU2)e z?xh>&pFJHtc{bYLm(?}6{>pORsKcY#_}QcZjlnXjUuY{tNGN<3W&91>#M5ST)Ix9?3l&-x@@%8D9SN-$6iU=>CC64 zXP0KNts4Z0B4f)&k;*zrRa#O?Yn?>8tgviqMcD0T%-)Oh>?~81#LS)|9T`oCym-3)vyE4z*m(omgJ`8l#EU;2o!F+%kmmVhG$o^9 zb8AUEh~j9u+lfiaat?}}b)>v8QB_;-g_Lm(2n(XIP1885bV0@lyEu-WwRK*iAjX8A zBBebbh#HM&QDUMro6g6G21q&Pv&p#EO&KWG+G`&6v!-#4<)WxMoy-f6($2YFw^LWn zgIMcr)69#qw`Ocp3%9s4EN~JQ4xt6&tWrd+U-jiBwH2H%F7J!1$M!DS6158lKma*{ z)EtDNRg#DMF)x7rm$*n6n!%0`P@)9{)Mm$lEV3IGWx@yoV6E4>h1fxay-+*lnFs?q zC`1C_S(H*}h?!YHK@hkaK!uvTFew#sX)wUuKnR3Nfzl}GU4X}s0g$wbk`My{vTz(P zA}-pe+9L4M;y1i_)=1$`1m|t55^dRaF~)cZg(^U(6e$*A=SUH-W@kx|lwL@6LW2+T zqE<{HIa#=#F?hj5D;1QT7~p>dK@`S?y}Lb;C_wVwqd@SUDQpXT0@wp00oq~lf&eH{ z9C;VGj~2%X!c>J?8{ITD3%ty(P@2528*8K3)bl1zV#kdJVC}M$YB$kp9OF3h2bscn zX+C-UaDKRJhRdFGF>h8^)BSsUFCHH`Q7Yo))xqRh&*t^s!L!Y47cdq8DMr)P^UK*% zG#Cz-Hmle~XJ@lJ_x5UOF1>YCYdu_Ew@!_UYI%_EJ~&Ledn>Ei-ktp~{THuR$JLAb zyQ9{Leq!?+)Y1c)b5?4;25SfAa0| z(WuO=_v!cl#eYH_J2Lm~&t4oImJPTiJU-6%4j=#FfAzbc-+J_PZ+`B4eX=_lk7f@Z zzBo9Vrrr3`MYwit=kepctcwpHlWp!?dUb2CY=AmkxYLtnXJvy`d3-!x*qVXfw$m=&uw*eSu{pz(gcEvFr;pIWgV~xOR#lQpO09j+ni-4I#zaSm&6% zQaXyV#+L$GFcLIK?46$#1tR1{MT$s8DoV<-wCt<8(Pb^nS(=$dsfdYLk+4y|6lcLT z?&Y;wgj>lX5ra5p0wARbP)LQGoIr}Ub>)lUwt#~cclbrxKp25-o@F2hWC0q7v5Ub- z%r9^M3sztuad}A){2I3`|JgqQ2yaY6i+hVEF|)LjosfY^#QQeh2}l@-P?2uK?pp~X z0xDr~zzE8R%?AMh*?I9yB3{I?B~S50*eUnOYo|7vH=l#*MdKR zcgJAz3OXVR^a#|>IrNO+t#uZ|1Jb0uG|pRbPMXF#>m}5q)*3alcrP4S=mj>{K1~Ed zWbgn$S`oI+fDp9?Oajcpn!>kN34|l_WmpCwF8aCD*2wh{7Bm!n)D61+Y~ zy^?i%{iRNCqc=S`^ipnZUs}F!$(3h^kM~TP4tJKH{qpfI{{64k&TS};&8uT@7!jNbrDU)k>DV^&-Ouj!rZ(+uu5B*8_p^tf^#AT3f9JEi zho64@>C#HSuJWCo)p#|Y+q||-8ppj}_fNlnd*@v5wYM*y9n8y0wzlFV@;`X*C)QWn zmpYTPW_daK_P4*@>$!jXuYUq$b`OuHXS2r-rfj!SwBwxvz&K9Dd+ng8nk0#pV&}Xse3C}& z7*QCZscO=3Y*bz~Af8#8A{S;>W}3s%cpBHnyQ-SaGOd^rlyn%WEb2s*mPI`)X9c>t zVh=8gAU3_cIb$zz5_wkay@)qj8v#}baWo!}mDb*~V~*ljo5muh-K_9Vt4JqF))}6j z9B2^%R2mT}&nxez#>AwxfSB0W+D4I9TG`rK+r*JpMp^5UG-|az;)z03XHb#9EcF5+ z9jXegcaD8R6o}kbqWMzs)26vCrZc>}uLq#$<*pE>@nV%=ZsB8DczxS#U_mj!OBDJ) zBZk}Xo`r-IkrXH-#q5xjaOfU?>9WEgid9M>kwWEJPyk7hQbInUC|vqBo6(OEG_d1LO?{)!YU$0gL6(9 ztq@ccNmb8hlPHQQj){l}DN^h;hTdjE{33hgRE%ur4dVm-h9TWlaf{emQpslrWWFWXFkT^KVfWW@r+7TS7_;flyG)jBNs~gLm^&O)$8T<6^M_Jl? z^E=-t&k9@B*KzcYEz-_UMLk3s=`SHrAg%KI$*W z_Fi?kzdKwr^{m*qawX~a8h3pCjsE(T^Y4Cn_x9&|tLvRlKe}`9WK`tU!)K)ib#w@% z@btuqisLLfnU+x$XZ_@dKl$bU?y-)^9~F~n!3``8yG-iIv$Nf!+4jy*2u@COg_S{n z@6l*DjGjDg^6QYK>Uh@=mJliKu12=Orjp(LXJ=>CVAonA5;+e=&PVD#zf zh%l-vS5|H`E>I~YirIBK88fiJ*`zob&x#!S{eUe}K5I;0>m-R3g7u;`DWuvpo*k0* zjrAZ+>BuNR@{Kgo%yF&>%feabnAtWB2_T_!UhBwMoMCDZR0sf;??2ObT%z!rRCO-Q`)+m z`OW1%=@c*Zn(7E&nOY&lB0uD5w4syac0cSI$PBh zizfoD4FPIJbydbmObX&Ss;!L-c@$?GM^IK}T6ajPM!>L)^Fpb(F7wUh)qFm6Or5mv ztn(y9XtW}fEQyM|Na6?)ZPR$y0H9+=0o2sgm9U^D2`LxeBT}o7VQM4n!3&6vG!cfx zo-c%$ZPdUQ5}+`)e|?C5u@JyQvBSb5fiHdedvQ@CxY!op<*UiBz2dL&zimWWJNW>W zQlvD8ED^`do`~2nLlDUjp$etyfXG0?6e5E#7^6V3bEK4l)&nOZ5+V_TKaM?nWcJvi zAwg!UMMNn?K+m3V(Fzk{I|KeM=oe+0?G8lkl|SrJC_el;#7c+bM8y#)L&lgGO(zD+ zgy0&-&ZAnG@fK#|0J638Ig3Ii$gRY)_00i6Q2Yb4CsYu$o-Hpe41$85*|9k5LWhG7 z*>4Mk4geG>0zyUw1i=EJR4^NQ=h=G%0PlgoXo_^?#amw&v&quRT9O*?rcv696C|7q zm!>za&ORTkr%EQvtK41L@LU)5JY8NIEEC}L^zekRJ4nx|h(}MJEG_lZP7h;klGs+| z!6y$t|LN}8T$a|yo||NlcKY2(adu;Q>A~m6vjRT7^-voXEf2YwuWhW3jz@p?{m-&4 z^?v@VmCcpE_6OgrfPeAh_fJOCmHp$ai|ZG*%d=T`rIXLjyswi%eEG&88TNDxS*LHI zps3S7H}ucPc`?dXqgTJWqa{it{>wjlym=)ap4EGY{+&0MZ{M5djn6t+-MCmsN2goWUyTwt}?I-xoeC7P!^8;&} zy@&9BpyR#2cjpvphnTZres+yKct2l|3 zA)`xDCIqoq?`Bo44WTxT=WeGzt>-6`agila1aT(@0h6Tt;b5B2dRbIEJDQ!C*f?jC zI7(9EJw!^Qu=Bnto3xuYkyDY0)3mB>*)&d^^*o#B^Rmb~X;rzZ9LI*Vj%InTl=hBw zB&ZA$G-WMeS`!+rH7aGG0I6$-Dk2~-8il+dFNoK0=NEPW5ab@-2`e;;6d3|q5WR~O zhnE~}Q0u?^-_oiz7x#X7Ip{$+cw%YME&!sWRRmEu0_4|EjK%neg-mjBMuhVM!wr{3 zhyh4R+leazqDUc-C`2z_gKs^wiZHAT;#nk=YAF%`U_qq;Nmvvq1{AU0H-Lzs*)uPo zsh95!0ZEh62-v<}IN!p+!i705^iiHKC#NTfAf$l{qvXb2!dM6}KmA&UsN8HmC`^(!dsLlZP3Dzq%lS$6dz z@J~WYTwC|BSThI|Na(K6)`KCXBID~uYo$;?w>i|-H(WM?wl~Z?PnMSA{inNHA78%s z&g}Sc+Ltsf$}V>X_Vn?OdV`f@C`B<7^zqIc?(oH}AAUdSWy|Y>{OqW=I;=+a?k{d% zd-sw&d|b`hVU+LJUTN}n|LJ&Xt+TSRJgf85aq}<##V1dnl$`;RB9CG;pFTf3%AniN zwm0J!N7eg3fA(9y6YuRE57q}e+Z_$LKi#bw)7=?d*Ec@-$;X=)msP}RCw9_wmLjQ} zY->2)JB_nAPLp0Qr9{&84G~qI<)8oJ!SDW!^BWf~eEzE!AAETD+UskT!{_@&BR(-` zj9k0C+8cBZ4^J>or<0N>Z#!_Lk=NpPqsN@UXGIo<#l6AMKsr z-nw}H@_+oZAKd=*xSutLFW&pkH(&q$AAfxQ`r6LU_P2jK{ulrHxiB~&4Y`>vw+f@#%Q-dw>1%gS&gn8!MeYh(NEyzwwnT&AaEzsyKLd|HGf$-P^0q zT^}Ai&i>1Pw0_~{&b?2c9vn=aCDdsr!O_fKKfiwK?$cg}l3sk_a<`}~5xjEk+}^?D z__#zu$1tB4XXDZ~XstwXT-C1Ajgjc&WSk~RVsvHgo3HN_RUMUXn%A?LRVg-BblrIG zd{x#-qCt_-8kw8gS9K)-$N&oB$av>LBp6}J!VyugGtANq5bLUB&qYz9My<8hx|~cp zoviU*DP1>BSym>BQWGzvPvGSNn$tm|gb?>3Di zrLtk>i$cc+1dJwZ#49tO*I6ftBGWhvAY=?Evfj?ByqmzPAJrBdXDt@DDEWgTFNQD>cOV{P#G;X~(@ z$y7e8L~LVgV<PwxA&KwDeaJc2^I-1Qi_9e6ao_&;uAy!Wl<&uEhpRp zNAQK12wz_P0~~{D5g~`DRFHZG{5^D+D{a_039(`r`hgXaViDDLqDWBkupj~|quYar z1dy1WcrWZp>2RzNl5fKh1Q3u=De?%)Xh4xRvjJLWbio9-&n4&|h5f<|#Vm?Qgj*_{ zJpg$|trdGuK-4yKArZ27;cWwu(h7wMlnQh@2q|RXR%#>x$N}#?VYt~Z!j;-6Y4DyA zB!ri;EXKS-Xxkl>py36cB#ZCa;uqpQ2#^4=pmQ8EgVN46fMA{Tz;T=*fb&cs;%njY z+Qp&P)|ffj=ZRj<&wiRker;w%0Vz|4usiOpq)24~~U9q`qYsnG zWpC~6jqBQLFju!PJbrlebaHlbHeOrn-@S8UcH*wj={ zfA`o~`{eQ7J72&0=3ARbyU)ctQQpq$^A|QhdjIp+U*GxCw|?WN|MAbi`Wxx%Z(T{d zba4M!UX%xWv)(XGN7di|N8kOcKl>}7=+W)x)7+1zzA8PDvb8@RH6PwOgb4NzdBeCt zpQqEtd+hfypVvx5QL*=8t?~|{NP{@%eObA@sF9_q{MvG?fb+brnhH^JSNHoF00A*F z=*D?zKxyN=w^dUUwXzB(qlr;-EW65rAb~Jq6h$Cl ztvAY7W$is{hNg0)&^u`fLWKx=o)>jp*}CZs`kge(tJ=gSNiz}X4F~Kw=_Ic4%0zM6 z>-D+YCt|R+na}4+fWcdj8kLD-ByS=UqH_tMEUToGu|f-y*VW2!I6s>z;k1)zl_6@! z;G8qv4p)t>Y(iRTWt$3>CQTmL2jIXq-upC7EISfov}sJ^EjPC55BtS*YOPa*iqs(M zg9tEGW!ddyz?denF{+!TjdzuE-CjDIr*+eyGG~)B>U2DNX5VJkwDCUS!Uu?=v|@2U z$m|-SIMfNhOnLZq;lFSTmApim zgarvh-#Q`@YEppY*?VLTJ_fcn*bYb=Ld7hk$P0RJSwtyX*b;;plvap}*$XfdF?d8E zLNZYZd1GV-2E>4a5e4o*EHi{k0BBjbuS!4FPZB0tQGw3akTmRMe%!pV|LT&$=T|)n{jW2orPIb9PhF9QJigGy87boqx`g5y8hPPKmPv7 z-s!7feLGuOn;oCzGl(BfM-!P>xvjzd?<=3{>ca~M8 z&h{tSppQ#~>HVX8ROQoo5}Wbv9x|+KZAw!nK>V@9WeEoOcbN@O9NR-0Q?h+~&AX3&%PmaeLp>U0k zTP?W;0cD6Kl(UjBdz8p1-Rq))rf#hBO4tc9vl7ZWT_Wjrx>013BzBGMW|@vmT{cbK z2y)OMxW>hCq_xqcyPdAH&RZ{xp-e_86&kBH*4M`4X}l66qHCI2l}9#OUK&$RTr4#|z*4nZn$HbzTl?Zv(hMIZNy|@a5 zy9YU?Lw zCrOg3UKf~&FfyQxiM8_H<+B+&OWtb$-`FHc?7S+b)!Y}PpwsJ4r$ysE)P?trh@evF zBxJk@vxI~OL_ieJZPX`e_R=Z_DMSeXEKm<=r8;fJW4PACOB|@Z-G^!4YD74B?JPkESAK5w{tItFD-8|5 zQ0>WyKoG!sQAo}+5?brYv~)#4%7mE!Ul#aS>*}TnZ`U)UZoiH!&TwI7K@9+H&zBb; zHYP+B3b+KEsi*)T9_vF^oi*Hn-%Q~iheBnAA?;X#}`rP$i)ayR_{PC4n zw|kwHG~GQtb+_*y-@Mkne&fQ{TJQObGh)bddvr8Dzm%R^>5pf}ckg^YFWlwx>w|vx z#qR9W+q+qRe(U((e)H{X&-y3(&ri$G`de-~Qo$`t0_t z-S>WV+8dbVzI{HOy!!fPCDrWo^rK&u-OcR6Rh^i4FkD*OSbP7+@9#dI#Ji`LuD-Fg zy>s;9Xyx4I^XL2fhi+xqee!&A`_{=`$K8mCHypE%o6p2tT>!eXOo6Wo3 zq{{Qz%o~sdwNa3u?j|~2?kR9a1VwCF^}0Q&Y9Y-4Rqe8VQaLWm3WT#H0tI#Lm?h~X zIyQM#I_t-?SsX=auSZJ7NtAS==~)hnyb>X_&g#e{nr4$30g|FD>#%p;+F~|otc#)u zkt**H1Vz7vJ;P!SP$4gh?} zKY(DCYpH5!Kd#`Sy&zy*00PV{wAC6a1ciee4FHkb-9}6}cM-jJAvTqQUmb#yiU5R2 z5fM_TfBC{iC$PxzL_`4ZeNbu&v}L}6AXWeusi_FUpxRy+MpU2_kaBHo_DkWvEWQ`5 zvNTAW*!i$VDTUs-L~EtVvjg;qFI&tg21Po9j0QO3!Ecp3?E~%9@N1 zkWDdvcJS=^&W+deNzqwZO`^D(m%!3AH6zsCKls^)N8{<$n{T}LmmhuAD+lJ@-RY;l zJa$T5x!##ov*q*ar~A`izJD+4XSx^HM`v{-%S-(%Q?#k~jz=#J&Nf%G(|LaU>;zSI z`R!M-jmzg=r{DU=tB-#8!F;MN-#niVCX=J13xDUkM}PW*{m-9nym6k=4LEp)H2>P) z`}*PMFUALlb?LHfc=+TX>zZ_JC7t_uF`G_iXM2Z_Z@*YxUmNS{(UU!rsAJJC&D=SZTN56S){q8Rwi|Esnsh+4mdirx<*MJCt{qEC){U^_#KKaPR^dElk3#?VLx@^nl(X;Wii)e)F$>mpehN~Mt`@v7+ z7@j?X-~NNEXNTjY-#2N;>*Cs5uYd5fUp;;B0)>y~)nqcQEj-zs-{=g-IseQ5<*%MS znV+4(yI+eAPHV?-d{UM*yF#vCUKPs`!-Gdh&U2haJLiUnXT@k%&k8s?oo#Izt)REk zcak3;kC&EuRb8Lo>Zjd=v^qH*CuxktwQJ&3m1Q%xH7Zq?)Vj_s+$IZb*x>@Ns~uN| zGMQJ6^&Tj%Y-CU+sZ~(h$_gV2gMiUSsM8fh1)^rNS(;{LSNGH6l;@+0EUu~mzVXID9^&Z+nxX~6CD=*GJ)1|97OsH0ssIoNRHs;jKP->h6MDz z-2kwir;9JWn6L-|_a|+LAc~P$nyN9eQc8!MnwM%L0YKEGm9st|Io|t_P%gsOdxeC! zSTEX(vigGUMhcTw0Eo!52SieY?Gr)Q9s(e3lNT>=m$oJ`yzPLyAOf=}Y(E_PmU>l0 z3WR+Vo3H@%G{F!PYtVlmB#!b@b@*K^9}T<1QTU z4bQFq@PGOsO4D>po2XA}a&+$?jh8U2`#hp+`WfsU%{}PD!?AM*OUp?@_;3E?v&KuB zP%n;Igi>AA#kGr@#q6|~Q8q{}URgWayZ_t2bNOe#eDU#z4@)tCTw6aJ#+SE~qw(ao zf9uUxUb}d-zuWI-SFWAk*($fMZ9o0=*^j^fkv7rO{c^B%e6W{4d@?<^z54Mdhi9Wn z6h-4%GjDX9EFa!|I+?=8MzXZBq*XkxYq5HLJ)4)jw>M_S2Tx}2zOhsI=E>9Js__6M z9%2GuIz5_iZ!LRot!?61Rb`oVA|R=4+0>woLPh;pStsL(9rUr6_GkIDlMy;!7orKZ zcOFq0AzRmoNT8L*C{CPjtZM`yNfWI#l5VQHuInVr6k=^FplC846H>3+Z5nHxjg9u~ zy!VI*kv=*)GLbeEi#Up@vbBgzr}H?DKq;+t7AM{T#Kg`!<^U5Hd0D3x5v}#YJm?L* zc(W3DVCVd7Hi}GKmsR7rn`NWPc);D2VK0f}Xk|48$&~KKQ8!aY0ie&)Zkk56w#9s2 zx+XR{juI2a80mSDg9_NRuAM{Axw8N$&MFqiY(4APfRM0{Bh&3B^Sr7X8>LAUC(~jo zfCPMYJY(-P3Xp1Q$0D6HQEO|i_BztmiFIt9iyg$Vao#n~)wPS07_^2j@*M*#L1H1~ z_IP$})Fgn25>zZGq@bPWEy~j>f`V-nQC>Q~zKHRJHWZ=7?!&zmeZZQ&SOwbKaG7H*ug z#p9mQy?v|_bi~$`<0F6mre5CYZmf6jet!4PgM+orrB~kB z(K@M$X8*WkrSCnv|IOe3a{cJw)(3a9toz>6Psa@_EkFCK$Ez#wo8Nh*J{Vh3>2h-L z|&eg97$#SxsH!eHRfrG5NlbZKiR z8T2+)<19b={Ag{pZ@HZYrI(k7AbdKVD+8n1 z?BNr8ey0QMJkZYO;Q8KYa#pVIERQBdRa8w+zj5Qj(OHh-y?29dWIJ3HzN#fjBV9L6 zld(<1%!DMuN!kSh{BTRg;ojy%ck+pDy=k7 zS=L#%2TrW>1}HBo@yy;MV3Ai*Y%--It+P(YSxd^4^P*`Qr6`VdU6zP!s~Ui-y7bIZ zY!Gyu#Z6WAd)|I@T`u(ie0q_FgMI5j&C`g#0sv8Glw6WG2BKDl;g=dK3C`nUm84(Q;GN9JA zbz-n6%7dm+8rN5sjWOay*n4NKb7fQavBwCdbxq^vd1)C`2Vc&YFmgjTm9&U&Km}S0 zRFE6uuTAt8=~z@kwz`_djb9d>FJFW?KuCvfR|ITP5CCYuYFGex@nv4>CYj}>E?>ki zwhKWK=RAsNWh9stv<`a%ifFCEt1MKh+@2Wi^!130psa7$Glr|I7j1NCPM0uqNrkEt z)|%T+ns99oJgzuv0TBsVT&Ne8FP_IsCVXLaZk3e`f&v~x7jG~sAc!ytihvU)QkL0! z4XYFnA9vz-``MtLFz6;DsLpTFN~vAOrwJA>Dyrc0CFb21^AXOXxDt z0@{1wHlKBY=pnTznIdKu_M}aaB54yLc<-H#4H1GO;7Vj%O-ISfX0mb3&fl+^)3g&^ z{pzbyd9G)+nap=zP)xmkmrOi4oXn0VAbj$)FoXEwjmu5RCojh9J9hWz#mP?}h|ty7 zubk~yfBD0w2+`Q;vaZ+H#VG9cqD!xA{`Nn(o@D=FX|?y-8ynA`9Eyzp%YXDGo%F|h zXMg@DpH2%IET?r*ot>6{`On`gnxZ>g`tI-j_H;Hf5k9y-{s(_}gV=6gSn|ze@8RPk#1SPr5_+n`>*r@o*i!_1ACO z!u@Wi{}+Gy$)g7@86psT?Ms(0U%HT6`~2`^I85iWs-H%f=uQV6)|#p>ed*k#i&y@| zpZr8o=k~*0@$&e=kphmEV0^s0w>Q3Yb!BU_^SA!?Z+!T{=jYB{d+_jNe0q58((=4G zSzT3UqsjdAZ2Rga7<1?Id*e}c|MB?glL=#b=k}!E>o9VZ_11O$`1uSmvh33=%bRhU z!=0zc)1$LNFN=FowlcI@H7`!jCe8j~o@AzI=F?fdG*DF~)42o3H!fc|p4QI}PZ0sV ztSk-K+I%ujJ0^*Tjdh2olh{z!i#usL%j+mJ&hg>#nO0_=7u`-a%}W*OUN>!QDFE7N z)l?3gC4)xmG))I08L(# zHaf2wL{vmY(PBhP>viXo zX}{C$tt{<7->a*-F^!?9%Im2nXB$`7N|6`cSm#9;aGKZis`eh!tSinF7-t=OrbW(< zZ%h9OQ4=Vo46{?%3R7A!;9{P)(*Ku(|JU(*fN(aqI{pwN89ZSC66Q$A;&;IiwXo7I zena@H)c^^!Z3KZ#=CDh!Fe8h1jdwN_mq19_3>{%ghk(%lw!(PM@mT=avxioRv)JeYQ}V@s5W*V^2cbg8jyN>o z00$XwdrArbh=j^E5)t}!l4ud;hl&UlqOwh6$b<-%C{2Z}FbmY3o@7~L%6$QZ=AP*< z;hvEwNtXKM-ifPbK$7&AR@XO_SI4_28OFN+**$KflxM0CV~H&L>%~ zfAjTo+|*S*K07+=rDpr$((ao%#j}HZ-~PSt)}zz$bl&T& zFKrHNHDXtt?!t65-Pl>(ePYvzTif0&ayve#AKe?H)@6S1o$r4AEHVYZ=ambZrr?7IgUHY^4eUb*{0;ro<4o$jq}eRo=i`t;2ceb*z zw7SwoSG@ao{-&*p_n!Uq=ui$1pXH?yNbcR;JDvHh)xl&sofr9)^Xt=jG3<38?Vbp@ z%*3<8-MF@1mVP>)^@rJ&%UkoZzIFHM$??3bWog(wI&yX8O;7D?4es2YoWGFf)5cmq zDMs_6QYf7y$p+oO_qTrMr|*4wdNSTQzdE1fr>B$o+-n2|+RuM7f)&qedGtGvKa3N9(1bm2}XUC$*gD6sgt`rdvQA&|k z!Ytk+ARz$&2?B+=+Gg~#NGSY5K<`C>LA3)hsEGm>8%PWZK6fAq5fMQVB-k?M41oa- z=O-aCsNh5iM?$O758-kQ2oz%LJhLbgY2$a;2mA;n^rlEqJ~>hd(q{jJKhJ=nmw2J$ zYX8n6Y;8!2=EehnI3Yp+C4`9Nyo@@zQp0@88wKN%`o}u@=!W{}4~^kh-#M?A-pc;hA9Z`v zz2`q&*@7rdCWn)2Z(e?O_feHg+Rgf_t6%y~G+0U}qY08ubV5Wi;;bsScRDK@=RbJ= z*}wYdf306K zn!NX`gMJpp+J5`(H-72Q-gx`c#mmbd{_x|i0a(y)zr9}7_VD@H^rUg)+3aW%t*5Vk z>&qwikLL%c%+TNJel@xN|M{a|&i{D&+8bA__xq!>%Be>W_jmWFyH93Er_f1Z&`VFo zg%@N?QIslOP3FZ?Uq?hIrG5Bd>`VLYzjg(%_vuH^cMm7FZWK{n`Z$W;yu9(V+0(3_ zZfy5|>&ut_C46*Yd;No7?v|BBg``!Q#NAYRUq5~Pw5jstrT()g2VeQxYu54M@!_Fm_fAlzub%InD)uZQ=wI$M#o=loyKTeIZtPkh&G(+v3ph%J$FVgE~pw(nF?)G~@ zOGt`@g;0Y6woPN}PMQ@}S(Z5x#*xvgH_wZzs*KSJ6#|uI-RX2<6O%&p(8)3p>U26v z(JY_Oig|x9AVx&3G@?S%5TlOb$opU-c1jsAcKz#H3Iu>#TFWEX#hs2RQ;l z)ijP#DbwrsZDZ@ED(l)1MR8=5PWv$lP4m)Zk@5JZckF6m2f)HU>L1)7Vj8I2>>B7d$3-S0s-$WB9S7;P5`tb zBJ!Tu`_?t4RQriYfdG+`FFzP(#3fpCEmM`o3uDe+blWu3<^1z#1wh)jNG29t#UTBT|%oGMMR{~=PM{$g%}PA z9I_W6;MgDlFW|8jMhZwBppqa|g}{6hC<3K5De+iy11$M;HXj{F3986;JF}IQb@rpq zYSii4e0DIK&Nr`L7R$}(Od8+m4Q*NEhqJT2svH&ZpdWSm@BQW8*MIBs&V`NBGyDAM zNNFsqv;F;<(x#~Cy+6CXer`F>X>HlR^6J)nmfPlJIOuO(+&I}icyRyua49oMdUa># z=2yD=FDBl}$&nEno=$Tgn_vCn_IfwoI=4JJIXM3G<8;}tZlTntMN>dpZJt{x=Z8&o zYHRuMNAGQHuXj6LlXRXwe6qhgXB(d!+)BD>*6p0l=h-01>*C}Hj!*K{<=9(z@L=zo zzxCQDA3pfW&+ld3>@WWK{V3@OM2`K7XVua&m{?6`<4zXcf4G11%?lSVWlx?JgL(6( zKm5#>r+;waHBjS=SIIT2lZk)zt5=d?di?zGqYpnnJ>K7Y{Q0`?$B{{g{qd9Ov%8}k zZ{yzX?9N?yuIDSO)4f0W`DEUdv$8+H`Lv!CFqy+F2M1*qo8{FdrSM=h&N_)j=#d7* zvlrF&n%cUtnosA~udRJ*q9``ANj1->wUf2A?7h#Py!G}uNTQRy(_8oUO-#@B4!Z*_ z3Ou8rLa}j{vXxIi``meW>&rJj_{E2B+`RGh;c+L6>eAo3yJrx7=dXQ@t$X_9nNgF* zvPo!TqkrT2*7$7ZohqtAgmrL~#Bn63l+m7fZFM!x(z>=)S%R>NG!G* zt!>gcs_LfO=~(By^U4slR2dRd<7_>v!HE~Al~Rh5I1Muw0kkp9T$W|)fB~_MHIZ@- z5Yc;1lGqE!ozyix>twdJNs`8~86BMsmxhrssjc28R^p}^bW-1_vDob^i zbfDqVRu(X1y!#Mqsk8t(X05bEWbXtVfWlU!$gl|I2}5O}1eI|4f5{jl2oM1WAz`Z@ z4-!vt>=48|@s7pYYVN%i@gRPI5QUS35EYS%O(Y-yq?8s^2pF#VfrB8e1qCP)0q>jx zwp9*@;F+`vbg9ys2-yn<1FO*?ww|z+&4hhI1lY3xkTxQsv=+gjxO9$Rehv``2N@xf zw57v@xG+AD0Ep*c9{^#55Uk`v!V(CD0?vT=2%s>u2Q0K)?#p6#5l}6sKy4KN7lGVB z)MlGX*g1TgBOP*^J+pAgo<`x&lfWucBB&P(G%8|dFN{Ql5E6(yiDk#_%d~JD3%AHm z5XH8o1pq|M+(~1l6U3;VRZ6>FAC{IQrlKmF{@Rv`#*-HxbcdUYl;?`;LYssXq1Q#h zIx=zAtt-rD`75ul?LVGBeR?uj?Gr_w{oOm0X%4SlA3VB$Ivo{X{>^JY`N{2bmwK0P z4jpYS6#)l}jIevis3b zcK_xdzC-=q!ykXB#l^$!!`pY6o2_#z*$R)2PVfBkwzaH{B1Y$BG@4h9olM!*`uL=I z^x*y*?_3_PEXQg8gJ0dNbGZ4&+TrubE3aO*P=EaM#}6N#Bx$<6xq5VX_TuUIH-GEK zFF(3Bom9)4YI9?Fx_|n~t;bicT*w~6qkAtj!ObfM0ZyJBfrZcB|IkG4)i*EodsyZ5 z_EwS$qnW(%?#AWU&RuzB?eWJS-oNuKO17TepZ@KC^84vP|KI-4@10-83m4a}UA=JY zv-|tIW3AGrkx9kh`P${5{&LreDQZkJ_|7*kkeKiP^b=@cb-DA=N6$Cbd*|0+Z8QFB z-+SewAKbe4aB{F$o{h@2<@lYqFHTR!_n(fD(A()(-?}Zj4eWL_{J`=LL(0 zMAen`ey%k+_O7->DkemQm8}7=%yR+E^Kxyu&6oGyMcP>BMVx1Ay^V~{(zLFdvZ}b2 zG)bN3pj7nU2}>GBNgNeqt##J%U}=bi=!i&(AbXCqwvLk|0|AUpEs{57byipycz(2! zBdD!-+l#GpFt_0wjV%82Q|Tun?{oEDM4-G%Wx#g!t`{iV>)nAbp|`5Dn;=y_bN`DZ+4ncOfp0eXHwl@w>%I z#xKD<47&c%4GbW(@Cmd44}Aeo5h>4JSdgLp>0j8{S|RA-dmQvmgc?Z$C`Cr=NT9K< z_8LT?)pq;h+F-y$q|=>z|Hs|I>R`BxQOx7h`SFRO6j7ZV&!)%4&W+9CW;ZWfx7Urc z&cFMge|0vNaXI<@@7%Aw>PF_4Id}WW1 z+}W%cAKCrGX)kqY`s~HC$z;xRk0;}%g5A}%SX?z&?)A31uYKc{hd+CNur$yr0~Hyq zp1=3r?4%~d<>g^)(zm|;7G>r~|K>wa&|g`a)wN|uCf(mH#&^%=d4BcPOK-oKZe2O& za=iEXy-z-R+FOZ_k8)(V_we5K#t@U}_T%00s3~hccYY%_{)5NR>r};d@xAZ8bN|kh zqG&GPSliiL|I>f>i!0md&c&5$?{4ose)0V9@W+1+&kz3e%B#y)Zmg~CtZ%<_V}5eL zO1<%&FEwR+>1KcH+J*o5zyH^hF%0|S(Qj>ZfA&ieNb*L!$4;mBXFq&=<Q}c8 zXHNaIfBAtnlo+zsAMcKg;A}Q~?dHbc`NzNWzy8yoeE9xo>xv&P8FBV2U%k>#)Uk3O zeKuZQSEt8iuhaX*r;qME9R2>^x^Q+pUtNuk$BIwc+UClI?d7Gd+YcXm3t2}lp4%d= z<4*dOufP76|LLQ~R^R#7m#<&G_QOB_NmZAxys~xg_R|LZ=J_>i`Od8q1bOSt3m@M) ztgX{Zp=~UaUs)N(aV(xAZBEY4in0JA>wPuPvu@_t_tLD}?T#lC4_+&Zj47-OmoH|o zG&*PPtX4{UC(NAan3?l>tuT)6+9WzT0O->^#?1 zZEcmtF-B@u%$1OdVibp79Ds9dt#d3$N)%YN!rm#3aH6Qci5TNRsgJH0{|=@ z1jL3RBJ2PVfSk2DbeTG)JNHHsrR2>FXJsbKW~^m3R?pQ>GD^K@c1> zh*0zw@oc;FDo(LP?Hh2=LS)3w$z5muyR5iz8+_6%f9 zQ_tg$8fI$`AAfc_*-ORvOUdfy5~SNnXGvjGR;4)|7pHUa3W(~w>~zz;N8`!K{NBee zy2F8EHm2FSu%t~@%-q9AXM;g@@zUyW*njfq=cr@a|VHkB_EzK0dB0 zefR#@HGa6dH7(}TS}wnO?e<4crboqOUJV9xeS7WBy@UPT!cU7Y{r));UygI?ChOO> zXJh~3>G-v`HqVad)3f>dxz$VOHptdGs9cUC^$ zou1$93_88zljE&RtLLws|Fi%2!{Z}<6hN#*|S5FP}X-1I(tA;+=2Ye15RM zwA?wI)~BP{D>v3ZyM2TfRL$9X#5Jd}zIeTV`P}lsLG@4nyMI4i?Z0xpQ!2LJZm+BV z^1uCdel+>)!#g*gAI&SZ`y&6R|LkX{r+Gewzxel`He<#>AIXnqXC^uyXVLXbYk%pRe>*M3iT4 z>AfJt1{Ma(jzDQtB9fS>sH-GN1VUOP0|2x4P8317;f_aB(%O5jD(k&uS;U^>IO_Gf zb=BmxwN353(@LwzG>z+ZVgTx;D|ue#^U`|B%O>q4ny{`KP1+DN)(`sqn&o&r=@~-s zS=Pzth0;}Jt!4HMQJVG$;>a{**-g667*edYHacqRrm1Z=%baUMGx)~F+GLzt>zuDx zKp9gMv&K73$_m@28Fsr0R9!aKvx$|8bXB{&n0vw~Hq4?UlO!2?C~V`(xlZC#lk+cC z;R-<-YuVSXDs0sN5)p%^01bnPx3%~RC;}m7AIRI#9w2}aLUTese*m_L6saJG*Q7XL zM-(arLOE?G+)noK|uwRpMkOWzua0Jb#)tvim$GJAH!9?3}Xf*~TR;FA&oU=+djNi0Ze z?>&MdAZ}+fAQ3nY4=Eryo<+6mgm0Y!t^Ymjze?2Tjbv2+m7z6<80<=vxZ`0t(4Xj0o9#R7wPtQl426 zX|1!gH(b@{1}m>zS^ZCc_0R9!eWJLxx|WbTE9R)9RRa^()OBei6Ad=jPoM1dmX@x* zmOXxSd~`6K=k)a9c)T13{QZCO z-9P!aADrLnADq=q;h#R6T{yq`=+o!@K^ALtwm3R${_+Q(z4h+-%a=F%UDfN`58vN? z_nSL6-rm~3b@%c&ZW8FUyK?83x9b_iX}q$oicz(+v3mGoa{PQQKJwN*c<*WB)!~bg zPNTK;j?&F!T0MW-qyx}#^z-*`{a62+-|cjwd$%4q%l$q+e!Q==QAxUzb@z{FLU1-I zFKn*eyK{WFKmXEOt5>hDlhl*rla&?o_{nZJfleyT_~hLACEv`o!PWj6Xd0{>uMAHw z-ndxXx-9F_xTp$$^Ec0BD;L~!GCCS-s?#{$fAsL$mtN@%mVfjY4}bFKhdY;&^{w8e zoz<1C)q~mO^uYuaEcbgCcUCr*lUtu3T|T#Y?p*IjKfNO;-~PtcpZ)aNm)Mt`WqL{U0QkXXScup%{Tt!-+u7?;q2Ti zgWve-&b`M+?|M??9DwsmD-!g!>393C zs^YBERF!Mou(u4fNKJ7*giv|s#WQ>Hju9kDVnnJNPoR{El305$byX;B$Y|TRD2lQy z);gNca$(U?)ak1EERRfF*Hs7#^(>9^+87Z?ldRK?^P<#RE2NxjRAkZ^LE`xTOVxh| zTb^h4Vc0q68-D4=ms7vaJ<~HiId0y?B63Lp0!@pw$|7B)N;1oIS!Ky8yJWi@s#IlB zvP7#Wg0jd0L5hG0kRSje?JhRY&Q9*>oV#C+H~-={9RKmVuV>j))z#DQz1#P`@IB}I ziHaD2Spa%hf=4A-kU>Uot@FTvj-+O3%e->zQ6QivWC2{da*~diY!n7jsHBiZUJ3%G zq$x}9tal!fo6Qy=kq!_kpQJhnykinzoJ-T3*he-{0$+9s9Lmi?NYTA-g2RcDr zB_(0n9wnx@O}#e~dJpylR7OT%oCZ~stS=L_5VEEnp8_n?gSbB1rpBr1QFvmk0)kp0 z>YM{YAR#@}1?MEBMnvx%pNNu#R#Wj0pY+vuCQ0>ku*`KkBGdOp2rF?lBU|L^n>guf8f(Js>j(6w1Ct&A2 zu&={eJu^EmgpeczvOwf|4%S+qliLI`TF>N9@F(X4(v)zR?n+oc8xJ6-3`Bj?BmSuo z1q>c)X9Y8O@MLUN`_Vjm43yBYFh3tg#bC0tv-RWgX#3hbk7pW-lR--cT;^#!w@BQ; z(P0oplEh&D$b%RSOBsrV`EINlH?JR*B{&u@z5U$b5r5^2&;0Y>`hIRP@1_6sfB$E( zkREVvH$8pkl!{xsk0;k}4DteW2+c6w-cF@%eDYy`Zb@lXxE#7IxbpgnMC^9On{z8K zyf!ntz8EDzwo^uJD*N`qt<9PFZq+Ea?hT@L8$x#>VT12|@4?37Nm0RT zZ=H|U)tPhQpZ?jkP>Upp&#bS#_wJn!esI$j`JH>mJG<`9w^vWCE~>~$9_&7O zxV`ycFfJgi_|~t7#!QxFEHNIzTW99#X<1x-}&?V4|cC!8(DxCUR?X7pM9=BnuxNx z^~sGZ&(GiT9ILQ$P&pnBvsMxz^UKeizkmM$fa}b6ogc2P=}VW_#>4W??ak4TukP$k z&Yqtgdq&Go9_)-q#b8u<@UOje@%HrxMP-&266qO~k^pF`GKvxsSeAuGZH%?nksxCk z08xvhejk7lMHIw&kr#PURHc&6nMz4XNF>OCv(6!KS(E@2Xf356!YE9f^VV3c6iFZ? z?|oSsK!Jqbu@04~oK!L^3vk|fhXxr?0C?{Mtv$Q4D1{)Yq%$s50Radxb^JJItHKzz z1kNiNfTvcJ1gaf1H9!;uS_swwTL$l$0GxGIl}-k}tW1?fL9-EQV{BQKWmS65*83vO z5i!(J6v(VBEPDbggbYFjz$D09CxlQ!5I|)rVxhD^6as;envFyU;b4?zlfok+@+i#$ zfijQ+3G&=|18nQqdS-OiNWvfp3$55G&xK_tiM{v8V4WA_30Vm(WXMQBbaFCJQ=I{@ zJ}KPPIYhN#3$ez#Ya0|00g-UNt{aq|Jpv#x0~6B8pnw|0$IqO!c;Ypfj)VHmju{bk zpb4EMRU2tRYG-WyrHAR*sUHx%$68tjo=Ff9@wCf@;3RwqxUQ9Rbu=DM`?~yesxcFt z#61&w^oSmi9oG?RScCkv@W-F5<^0+8g^_CSbKN{_3E6m8e-{~MB0)fwMQM;3NFsVb z5_m$jv#;}|7lr=NJje zIi_jjSgnOctV@y^fq>B~B~^gqeA5nh2(z`7PQL{qR(aZ2{Yp@kZr|1ImJmv66}IAX zG}*hi>2$n$>2&pEU_A^+MmM0<2_N3y825^Q@w-1rI#J;D-8*}apH%Y;aPHy?qyE+( zf0T5hU;bZy?dk_Nu3g)C>DAMFo0HqO`>UtAr_bTxu{dd?>lbH~PM$p6I~r6Ew;_sQ zZl?2OH`_kmpIcZmE!^JS`_}jGoL=pqjK|~g;?jI&M}-w<&voAYbmzf#rj?!)rqj?r zdiQaWl{1~--otEt1)^BSLF4?-|Lpedn-A{pp1E|o%$$R0XRE)rc~=C%%_Y3^_cjiC)zV7% z$N%#8t5KE?(;se(E6b!PZnlSqqd)onkMG^y-Pu0AWBXA+X=UabGhhDl7r*fNul>Eh z_g{VMw|_q~d)k9^I-g`z`Iq0i@?>l8^FOot=+30y8~>w!`0X&l=U+Q(3VZ(9v%Mp8 z5YxHSOZ&$?PpFdM)XL1md;85;?d%-Sb{lzKo;kC;e=x8`_ILi%zxKQT zcSexCW0gwBM_ocX(g2;XTaoneb|(F zVVnzsz=AD3u`RNquw$om;JpiVi0ns$iO_O!zLQQSuCh)N0xKc&sw7iMrIRRRNSt** z(y=enNr9x4t_{Mf5qa;XIuZaP0mvvs7{%ywUt|E_oK;f#(pblUEQA(11`vqiL~lOj zK5L+vr%dU|)?4Q-OiyhDU&rQ87vAZOpdKE+7RA)RPUy+c+_cj{fG3v(pqeuE03Pa$ z2b%iDymfWE7CMiJ&N`rJDLHta#vmYg@Bmb6fQZn0H(lZVG#3P^o-X8^Ma@K0kbPPL z3{&|~oyiPr5$b?XCO*;MPwmp+ruhkVbSZlVYeC4`XzLs}5@ektBvVzPCTo2K>}&;W zy_3#4L=s5s9J`u`Fr_ihPp=eEhr1zoU?7-UAL`&aU$g5(%qP(g?CWUIDeVDV<4eGR z9*F_G^Ok_>agGAn)iJRid|iDr%}sH}IFlbAJl=i$$Io7xYw{rLZ~puL@msU!*0e0Z zR=%7Jw{}ISy%ha}!=~@FW@qLuot7%-o>|)bXm@9O@aEeq?e5IM?xf89o!gsPMYBtG z|1h2JMsu@fd8yfm;&3X`D{_@ZK>p%R{Cws8ACVHiCK?0FBjvcjxY<%?b#*cse__^mVy!OV0zyDwT z;YS~gKtX>DMZY?Adi4iCda}P)LZCvW6$aOD?d6vL>;Lla{9pdB|M=RcTR->9FWvv} zc7Ig<`ak*o|M7qP-T&{I$ROb(thV5Jpld1at_!;e>>Y z;-r7n*P+&ms%+dy0%nKApmk;ZVB8lZjIoGFAOy*DoDm`bN3EzCH<(!~&ETxF-g9ML zI?0q)fDiC@Nz~2r0y5k~Uin z$to|4FjRq}ZnqN)t(NB+0WP+aL{m4GjZinkP_AQ%Ls4BUlN{-jjqppEQ$3 ztKCGl)|xoRm(}3JrdcfBEmohMro5-5>s>=h!B8; zB%uKjKzODTsW?te!8Jhrl%_q2gsx}96y1Rn;&r;lBLGd6xF?NZQ`yY)SbG{J{q+4m zqXFtHY7!`j3^O}#JXm80356#RQ___XYbV;tBe{A=AfWTiw%&+I3TEe~m>Wuoj7TKC zKOt|y3qi!5kr5o%4e#DEI}iX^0*9Yv5{~|C=m72~^)3u$m0MqDbCCHytl$``@@N{Yb z5RzkNMDN)+tAw;3EJIov;~XQ^&)GTyjNUr{CrNSbmPQZ6h%nO(W|n5x)?Qjz0?47r zo(wmhtX^EPY3{fhZ#@zkbfjgg*qvd$!8Tyo7s?doy!0*Kcp#ez-3Mbi0jwT->~K97bYx zCUp0A%Zi30vv**cZFSTyg%nce2gg|}*4M6WL?NC!(>OlNZrt8o8de{?e|>+uH#e7{ zXzMUL7`VgZQEPU2emT0;o1baqV}E=w+CLayyS`0? zyZd%HeDYv-lugh$SnR<6{y+Zr5aQqY7ysIn)hn;B+0_Thf7*^p`4|LB`P9H#krlI|Ujf8m$k zxVW&lxVmuj)>b#4X|?CR_k;VRky~DCoIbZ?J$`z1J1^YQ++3$w>>M61EhT1R!Z^?v z&2?u-lhMK6Kr4Lt)n_hVJbn4uXP4*SRZ_MVTKPe_yE6v%Nv!|+-}>4=|A+sRWb4~4 z&0ge1m8L~idh|At6gPt~2%IS*7Bg+tO`_4n_0n>Zp{cA#E~~1l%0|-g&S|O3vTQaR zfer);t+fz<_9XkiEe&|B}EtrISpf!UKF zM7Dz1*F7qhYI0o&l8S_I-m~Lrl>OA=iZJd&z)6sG=3;3zeYHF%tL?l#7*YnIz`;D;94NwtDKnJzAL68SVf*=e@C`m#}iAX{SB?UMH5@nGwa}b0iBr}r$Yif-XdT;7t zjrxmG$3oP1D@VR=!f{;dLTmJrtoOj=nQGR;d0+?N&{dwq)P7uFl1#;<{M2Uvo`|)! zfgWjj&|HbbgiFM*qdPP6s9g+PCMr0;L-bU6soqu%5t_NIV&&YZvCTVR6*! z8H-!HB4u*x4;@~)&?UG|U6$YVHAAGo3ruT~q_jjQx{QlAIe5W&+ z*rS7^(Qw#l%2pKKe~@+CGHOw)5yfFV81D-Xb`CQg%F)=3`mQQUp$NUsD!BIXP8f(z z7tWq-zyHpAGjsFLUN|)vru)aaLOwdmbk}|H^UpEcAAkQRtwdCgu3zoF{JG~k7n0ZB zI(zrSyNe6K)!+Yd-s!fa3bK4+ zjR}LGtV$G+XX9od8XY~~(jJ}Xq8aGQdT%i=jTCZ_m8}GvV+<%(5eSr020F0LN+GJs z)Fz^;G(cEY#(Jxi)H?9a8t0s2@2t|2jdjiwfvF1bAkA~71?ILaia65Vv650q3>yt+ zO=-+=zaNHCBZ>2@I6OX{pX;cA_78XBmBk_($yVr1WxQeU$CF7$!ZLfuSy2h80v%Sy zN}&W`GmeuaG}e?k5TXMY1(A)RKbc4+jdiB1TCJ7EMag$paIh^i05jLI6??*24)A{B&op7tJY3!#_0`rhi7R8G~s`!PHYQMTqzWEIRp^ z=?&n-Vmy`ePX|T)zT`bv??_6CETr<*d2hjcFFF zexiuICnR6XeUPLi=hzve6eEI=5>a>nYf5Gmr0Tiix%SFP2J*~AWPP361|Cjs2?&5( zR+SK<@*e7tI9p%h&;xpJnUI|$WJFIAOl7>cNSc9>znD#VBlwxGzxe(KPabRyo@^X>Gg)3r&Yp>1d;RRC&tDqvJnnBkBt6@hpD}qjI2zDZrtF*@^wE7|7m_M_t`Tf3tYoAd2^8^fZ)NmiXZy>e=8_U`RR3yZBm z?sku>xf!yq{Ifs*v5-*a@N)<62~$p6LX-nek(%BfSU?I0H4{*CYa%-5csTWj2W_eVFb zJ$~V}7Z=uN2^y`|{BTfFyY<$WF9{iZ_x<~MDc^YeT)W%)vp>3exI1{^(h|t{&B-Ws0KAi>w)iT8hU{w!zH?M2$e9DNSK2 z%it=d1rjw{Q6Lq9N-Jk7&qz8@D3l-!qR4noi!v+9tfYx zf`Qp_{SHbBW^mpC`l*7z7-x;2nzKNN+A~Nf0HqK@)e==dja@(htQluO;Ve&eI8S{F zQ$ueJkAVla9+-r6&LjBRh=x$-Kzn9^qW)M{KLbJ{!A#6RD4^~ttM5A~lp@dMfE-}u z*wnIIL`D(l}DbD5n-ZW<;zV(F_CvfXP~?1Ub*%gSCD*9E?T-?>GpP zFbWaDmKK4PQqHp}9fBu2T3zULXNq>xQFMH~bvvH#beER^+`*G=t)wZTcQDZd+dUN{ zHe88Bn$w~cCr5k7DE0A!eGK&a`r`3XAEh{VWcaKLd8$u_Euix6gcwt#3$=%zJXJ+slzx+ZR!I_J5AglnNmxZ^!8jYE& zbYf-Sh%g-Vs*SrldBF=yv+2g};m+{Rt%JN$!?fBv97ai)r?x+aTaR{1TOJ)vP*Jnd ziWCqF^c)82{KbVIe*bo!LRmU*ZGY$xZC*Jb@$u$ynpMF5;Aq&0<)rkbt*mnqk@M3= z(}l&Pa<0tFgWX}AC~sXeiI!L9#+j|03j!RBs*Y$53uhdcdDTmcC=eCPy`xF2StwB$ zHpZzC_I3}$fcN)%yGOl)-O0w*9T_XIcSD|L@x6D+_n8ZS5bFTL*pq);mI}XI^;m$KT(U;ghT&g5dCQu(>f@SZ)kb zy`{k|JUl zhDcTlvX-SFDMeWnXHAV$d z6gQD@oaF+&2Tww2shnl4R1jcVPORswvW3Z`ASv=PNFox#8pFO;^>K}ld0Sg;YxEya zLefO7)Hw+N!ur}U)pnj>Gba@8$PWLF0ck@pUq07fSC#(N^~xvFX{zH-)|3`O+7z_sUV8ryeLs7&6n zr@BF`?mPe_L?k2*NY+B9Iuda-8V&l#WmXdE&P*qW!XS!lSs7=oG0Zld3|5xZ`dT=* z)_?rfiV;S_okSU13=qoGtBLcz%(MB0_R(>g+w!#+79Tw6 zwc3ra)n(-O3%PB&?4TI}e& zG+R4^D3VH$^WFCDgP_@> zc9JM|KQD67r**; zI|=pC5YL@ITNZ~q_ja#5*F1Z^^X|KMzwk?!&YlSpEwX|?`0>3v_idx$gCxB2@`c00 zQM0A@_a^i6F{!{*&xP>zys7gLY?zd8+L0GfQPydz?IHc z&N*wWF$PHpLiROj01Aba1VFyNdLTHDh$lBDH_e>`=BLHW1Sk~M0#L5iP@>jRvB0{R zfQd+H^~XiIm|9DXX9eF z(?n7~`4?9=_RGdht9R@VdpmJ6T3nq?I;&6Ky`CPAHy`&GSLSYi`T&d=W@htnynJeQ zW*P54$O>!whi2yHXmi^f9FdMiR=_0Bq}E{)8ta(h@BD{f{eSm>jC#AiPN3!OnPhKVO&)S}{I)v~}ghg$vKGPKJB;Z#?|c7njeSUR_#hEG{*_`<=TgG(Y+A z#~WKm-}sdmFTXfjKws)KYQ${iH~;A4?G3ju+v$&7CHSQ`U$4^M)sOEM8GQX4=Rg0} z<$wGS-Z?%>dn0%E&e5|MSMEIC-q|YG&o%(%+H!dP)DsML@9peNdooUc{O%}js-@K~dBbI14TmFJIVqK^Gr1I@ zM6E;bz!6X3kKn=g0gj#D=r>irt zM}V>@1d1pQ!27yIfCwcCr4&gj%Cf2~0car*!5SMzVHkyB5IbgPeUTMKQDk}Uoegvl z=&;+J0irZZEqFvM%0gyFYr^<(@C{f-9G`fB*#ww1m3fSI$qg>pI*DzJLf7344%O` zk0k0#5g~&20--QDdv?y6$~xogv?X>Y0$X2~`S>Z0cEY<*%{XA~#i&QD7X(t&#ylWF z1mLYFMu8Y8C1vOtJ%h8J5Tq6Y7$A$IveBF<*maZpVaRifogfb87Q11inGbD+kujKb z;!~&RHXhra@YDJUVvyr2OXh9-dpD`78h4SCpsT;jpU8g_Stqsyy0X zfARE7U%w(FSEbp({as@U-R)A8s3bY}?D>WcwM=Y|qoI5FIGYr>x;&@DWHjQ*Om6gi!U7aQez>{U@$0K?bgN9%j1dNJ97Jn!}U`gK_ZIKVWCCw!o`^{zHxqS zp>hAoembchZ;mw5`L+4K@YR>Pokpk#*exwLUVmwA*gHrX;_JWoBHMCrJAJ%$xHK0_ zK`V<3U;FY~ON%o~=um@1Fbp%Dq}7bxeDhhTbf&)Yqx22 zbxsPIjf>M47nYY=l5o`TUp_zI=>+=+NB`=#|7hi0=i-G^py$x>p%0X zEv;lVKffTgKD8dc`0{y^r&~Kcr3E`kv-I|zjgLNg*zQC^%B{^oBMuDn!Qt@6jg24v z`G*^uy(0IhgEMQ3D%5G}27CGD!$T_HNO^F>)iLCk<8v|;jv2T~TX zv<{_!%6bos9EE|imACA@my)b;Q5mkPgmsodkw8R&3LJQ22faNI+)i3$nR#ysIFPcDM1fR|W|UQI zN|U8oR+UUdLOJgpvvsa2DiuYc4gwuGW4tkf$UEbl36vgOO0}8HD?>ZL0j%qm3cH&O~`S2O1dvfak z)Bu2~!hfnunVzS0qv7;GcYH$pex@Hi!HAwd?$c`n12B>YaNZH3u@(?K`#O->dG_qQ zb0h>JvUNgAwzV}1q4r@BVx4YHQV0Qv&V%!wu#OiIq`(3J$P-cPv;m$00H!9dsm%(} zO(_Yg_YTi&jU%XOVPq0wDzqVHCL-VyZv&ouA_?nGuiAy+>i!v5AKGlKalVp5R>qWt zHP(}m$Si@XDLjXOFqNP(z{$1DgI9T>u754YHtIe_09tzd#MB=dhe|_ z=#jmvZ(8WF)lQJ+R%d4#b6e@wN8{ro=UkB&cNZr&eNSph2x4H!+bNxt@M>)rQ$!q$H2i{~HQ z+iSFw!~R$x&df$fhj}B4?%v)W^|GHF-nsP5sx7Mdx%T?p^3gay8syy>UF0S&c#@Xi zd+*v;Uw`&fm8HYUY*+Pr$B*vszw+i;l;EB3bmOBz)md(i$8KYP?|3{scV>Bcaq;?{ z&3>;K75QwZMJjx}yYDJfl$CW(hbjtnCJItPv(@YmizJTLPcL1+zds(7>t~wJJ-@nr zZ+D~|$v`Qerh}_j?#DU(bIb8pW-q1LXr|rFEk+@iS+#m9x$)^{ z64_t)l~;})-v9AC@5N31>gQf)wUiz3mDkR$|LE?6-O2HAw>=liiM2(RH1uSWwUTIY zVMYNBC%H4e+ex%= zc@ZSqInat~j?OxtB{9&Y7afow1EpQ%f<)CaqR`ae3BSD*%Qglv<3& z<8eCCI`ZU$AZ)eTl`UBi3280&t6^md!451PkH%`Yo0YbkM2%P~2S#Lpz%WqkC@Y)= zEQ&l1B0#SNl8{NH8etfbXY0XPYwUQKrG<5t!>H-lJHtpAgh7_(qrs$#eI(W5%v@EL zL^K`@%OX{PN!04~2Xo!|%KGtm2!)nHGIJ=EM*>C0dpYI8K?bUhs+?jjCk!qUF|f5( zO02`b0MYxCk{0+3=z$aL9#33b(`j)6+5gles1tsEYSG7&`B9%%pN$ZF>Kiz*nmru> z)3p|+DRM+e0O*0iSd_@lde7Fmx@lMlQ3t|JKU4@`6FY>?xv33o>dh6N9oO-22<(^? zfgtpCAgEVrI<%bkLJ-6BT7dw7sup5GePjc8V8@IC>wIMZaQ6Z7J40u5<8nVD_qDERT!n|u3{Z0zDhS@7EjN8V$gRGC*7&$die{`kia z0|kQtoV(y!o%oZRkAIXs9*j&9262?U`S#f#{@`IY&hrsmy?gH~Uwi)MwVSKU$?dy4 zON)UB;7@+%ouk7lX-3xB8@CP-DJyKKCTZr+oLz9hBBZ%yd;6&W%1h7f9QXE*hh^mh zEsb%9*<^OEofo!p&^ykCX>sY93pXC`M*;2a41VXIy%UG#{Il~99t@XeI^%Nm>gyM0 z7CP^I|LQZ(o&THv+26eXllTAh55Kj1YVN_+&6~G&U;SJ<82Kx2ERQFpP-byeuAVt} zu<(>x7JwiK?t(ek@AGX6G=(lGZ4mcRF^1K z*4e@;t-TjQ2omJkSEfXRQVL`wP>4o{F)}l;Bs%UNHJhy{5K$Bc;oL#*Fw%h}#h$Zq z=Bd`kQJLqjyQV$OQnIiN=9y)#4T0mI`37zE9 zAlC~lv-9<-4IUVsch<0HZ;VnBu`a8ZPoX26f*&VI%(b7nHWN<+=?Dc{JvR4tu?_Op$mv9G7L~th2@f3U;3bZrA=cK$xcT)kT)Rp6H&Z-UCKN z=TE*&Czmi10+>m#V*>~%>kefQf|wn9r^C=?w$;=UcEe!e<0xKQIkAr8_a80IM9)0expnQpmi*S|)`~%PbkP6eFTS|_;PL%i*T4ERrxxZqOLOf&K_cAo z@xd!EHy>~3y9ed`a(8*LIQ8QC;hp=oNjhlj`G$4XWapqYGjsgpu)Uu02R?~mXk4eO)3TiO zOBKMm)wwJ`+TQH#OL^;hS~zI7$Yd6}L6PR^I312mZ&*fQ^`mz_I(w#*rv0Pc>~mi_ z3xxgos48vL4Chhq?WDQmIBAVX>Gs}W=U|{odZy9QNGmn$jgF7T!;xQKZO^yc!*tRg z<-<`SmAL=tC@oF1rF)a>z#t<9g{dlf?bY*3i_!2If9=lpD7D@B=BcyYwbjP#a`erA z@td8+1q9QYZJ@>decs+YLJHpa+NJw99&S9`U0Z4X++Tb%?~iQRfA^aozw^PtD=(iJ z^$rr%LhBxH`0~3qF28p6?Aeury}ogL?#xPWGWq6rKV=G~)B+JwvKE11rkhynt#JUl zH^>92Na@q-tK)nuoEMUdqG)t90LhwCLsHgQhy)W@UlnDAQcSV|M=d46GbkZgkY!&P zb8vj5gks>Ts*H8UnmS@kNl}$XNdX9zwZL8}p@cvb#+XT7AVQRADZsKt4h@&uLZ!K_p4$>Btr$3N=cQD0MT8!+1QIG+Q&h;Zd6AL8usLoaIPhym!`X zm1KFLL*=R>41+;l%KlK#LQ882f>01jtq7!JFln9{>jYvDM`0+f_g=`sa5R|YrA6n3 z41!Q8V_Xmfz`|QD3&Wn`xFMy;^T{MD$}$haAkr#sG_$-qo{W3NcD(QYELvw2aPsMP|2*0`yl#d|>n4r)=nWAN;Wxn5}8yp+KlGv|7Xun-*`e2JRK^?Pt`~G^rBE>C+NWe*FpPC z1kPEZq;rmhM}@K1$H^;wL_9}NEBF_iYiDN%8YJS=3xN$%gMo9tJ~?cAP}cc>q&nyJQ|^n z6Qzd7qZdE-j4#s?AX2)l(tL7kU3vQXQ>$m3tK9qFIbOXubLrXB<9=ohXJs}{?Ksb8 zpmDJ4Tn1HP&ONixY5IoBfAz1vnvKe>JG)zZ2VsKk4oF?9z`ya<^WXgATQ_cHKY8!* zOV7@q`Pt{I>+xSG_1L4DUuae+pFA0;K+LwA*KQv*6Q?6JNqwByk8V96<~Rl+FfEHX&|+4X zMUfjn8fPBCRwB&`N!}Fht6zTU%xae*J^joJYv(S1_>&)f`#TSBe7G?g^v_;cYsTc3 zTFYm;k4`m`I84iOWeJy0Esb^$i{Y@<3NAml*6z;lK7PD@>EhMUT!xOk&HAUjz+mL z&I5&Uq`@B@9}P!oBP5Bc+ilM+Bwu{-X2y_^fWf_KH<;q_F(0X6limV{SG%Ly? z(}8NYW`qy`uBu9{;=SIuYDA4FI_!_35owQ|W;4{XFvTP-Dr+ZcadbS0lhA45L@XVr zX~v#{M%!jaI3G3|xnW}~DHA~JJx{V!N@bj_&!Mt3+1MK6eWVzmJu?qX&O1%Is;V+~ zo~>^QLJ%lTQ5AV=DuN zG~LHq=bQ&-=d5!^2C{~2qDGt8gYrou(P8KToMQ$@RBMgdSOy>jtOG;|1vp;|U{4&* zj97;_O}}2Q=Fq*s`6B@|GOy$t{X+nQ_nMcI)cKy_=6( z?P%@FVoQrtm(G0h{o7Slc9vTfad@yhbKzn(+)s~=4t9>0*5?B7Qp3?;GSk8JGs{|v zhc})a53*-2uTLh~g>!T3=a&z5C(DbAWmVn2v3d8?ql?dU_MYs=v&|oT|7My4g1`KH zeD?gpg%?-;{14v;pI$gmL|~l1`OWJaH+TC7Rn{X@R+Hg))UT@1czJCFa!7j_5)OOW zU;x>Ww)V;-(J#ETe*e*X&9rj%NVu&wqFkB5Wd z^OxtN7!P(P3tdQ(gl)!Pj>g8_-CkU5tuM{reXt+Lsw~DwNBvh`np--vfCNcP13o0PV2bMv!rpI&(P+TEc|wUU5% z_RPx1*B{Sx8bK1;vQkRSb(AsL#@6}>3VrtWML1$Aa;cel6ZRu{onH$T4m@|&+@gWbce4`h54UgM%2PM;#*>hxotF6Sr$?&CM-%f$jXP?hvyb%fq~LOqA*1@nT#e`<%NjjPGk_!m!+Ff z5ulJkQ~;Ky3ZtYI$Am;0 ztf@$-yl~^xIuHiDma5azX`!mJk|coZxJsD1UahN(Cu^NOB6`P23=HlhGw3tE7i!%m zpO|{#Gv;>+?SFO`IEe10V7=Z;zXTg+~9iAB95NZcCvh&mFbW)L1&n+>vV*#)SBI((mNP>JV`L_(-TOzK@ zG9?NU1k6$p393pf5w#Rj5+k4hHXu+Sg0mnAYXcYxS?i041cDbRS+a3cg{t+2P}SxS z2k0FzdxLCjk!D3%Ob9HY09S>fM35exb$|sg%tS&*wo}HWmSZFaySym5y|J@yktEWV^@swo7dB$<9m~l zee+9~pado3xpsJSW3;fKE?p8od2cJm$;)4O_TuG*s;X2(pa0nx4mWojOVsX)^H^qwHJXxq11@`Z%lh_lCJ~^RwL>H+L6i;ppJ_*M9A- zt>u|A{q)?kD{(7$aQk3ldd!ft8jGvVAN}b5aOfB3;Nr#bOJ99uJUrTev~{qt`|O)9 zRXIGov)^bfb{grRw^tS*w2nb}TO1u0Gcy56bnJ%Xd^9O%o53Wv`6T<~!DA#THH;^P z6!O8NJ!?7ll~tT)RUE1P!+xg;-tg(uP2j-~zxi)|?i)YXYPH)ck}*E_rRVrP5B|IA`K zGS=Fqg@s@Fl`s6pum9-GS_4omonH9pC!a=*=KkTN$h}o`)E`K8VKd4LZa0#4r(KqL zSy)rrbEnn^*>Lyhu-geXcL#!?8Hev(eI$emwU^$5QV_(NYHb>UR5A)f>uhOCYpm9~ zEXqI!h``oaV}w-5pp^2?21+Afpmk9c9=R+F=UrtDGP{BSxX6l5w^f<68OLm`b6ggs zk}8R!KnGP-)yQugM?ethpwVbFnk}uW{ewLv$$MX!GD<=ss?n0#C+w^R2CX#$C@ql$ zdq5y2Hr5oSYle!1Xf<25C?~n!@25qEXXZQOvMNebls0ZR4Wk92f`FOPLy;L$QV49# z&*jJcVLlWp%*zy{^2kzBR+*|Sbs&|JSvqMno2D`nPy|w`P-2|9rwD!iJ z6s70hCjg=b!4UYV8MfRq^Vx$t#{sgJ~@;brc3LI zRI^rh)~*7E&vyEMYYg~0G;~Ua)-@_P4TC;$q5-2nQU3#CjaocKo<6gMP2FQaL_}Cu z6i6X}onyedBAU=yhs2D4BnX*MFf$QKDG{f^_9%p?@t69aXL0}x-dP4xAOsOPYf(sG zUrZ+8oU*9n@;PMy}J$OPCfP&fAx>Ex5>}{RTEYPwAauz%gNG*+JMi3I6_lRVzXLb?;1kd1r+0~d- z?NuWLuL($K9mYWzC0YeqX`!WeP+8LnLz`t#RZ^lr5lHP2JXb7+ZZc>!=aa^(jn3oy zKlolhb90OF%K5q0(t_=es=~hX)eFOin~hdjq=y)CqZ{-#caHnxrBe&r+lT#r+HN)t z)6&`H7@|M@-A@Z^-u~R;yYKIR@$IF2I6gi$$A{SudXHXt^~~iL<`bWs^PPLw_uEVDR-0Mq zey_iE<6&#=R9^bUQ?t9nfs`D_a&2XHdvCP8HG1W_rTybUprrSqH4L72cfD3Sjdt+n zOBddG|KV^jXtyEKg1mYBaC`aG^4wf=z7zH)#oo?{oNsoLS6^NI$@d;M18tMy>~ibX zm)5&;$*GIWz3tmt>b;Glhj$LDLR2pIh@Ea2H;I&DmH<9VFfXmJSX53(jN>4PW2OAw z@klEXgt{tCqT{s8$x10@e^g#xp1pgpI~tf~0MD!hUwQkvMiVz5-S$k!`#bGUkTl?T zf8)>APdA4JTsYfJf+l2sePxBiz*!38=GKG5Ygae3k^hyy@ukQ2H-k8wpP5CQ{P_EK zRxjnF;nv3CvN5Fu0N^D{NSjX>BMm)nzNmy*~Y=09YA+RR>y-&=Re4 zh-kebW~Bmem1AF)27sl~rm`Z^L^L}yiy$gfv^!02jbrC3KN+P-94jfx(vlz{Wi`%~ zlFm8jE9nV{n4J!SyeNx252GN<@>ZO*XWAUQ-tIv*DI*H=%5|DiHcGui?+((uq_glsfMBTaBpIXog8tOe}(gx&=!hdIWLeiK>}N=2{DhPo+C`fB|!z z;)!+YGMvP!G1SJZe_PT2w5;$mb`<$Dfly5zKTXk@%7Lap&{J2~2>?_Zk534II_23j zSZfi%xmr`k-q;$v0us(yXAF^~I{&o}w;*y{2kQfX0Pmb*q7xgfuL*8q1VHZ`gEgpy z1TG7cr6cF6(ecsl_U4^WuQ8g+dlCddmGdPt6B)L~RACU-HVOeKgmBK{v>TI*`M zM_{RB7)H#PG#XJ9E2*P6h=M4LqA(1DFwj951{z5Kla7M0-DFcqjiFNDN|M&wVL-h& zIh+ZPx-;M$%T{;&rDx{`*}wXYyISrvxSA)mjgZ2OHbw^u@s0;V*n+W&d&Nb@kD^4-WUO^N?Ol8^LnerO$o- z!tdR`y76H0%yS3vf?_B@<*&T`Of?u6Z3vmBY&zAV$;a*-sx1)Ct`<-Uc?6#H`I-lNrR8|}y z?Cn$wR$SxF&%b{5d+*-pG=vZ?Y09ch|KMMJH`Mx*Pd9dVCm+1;p1%_Ot^eSQfBc8{ z`-AK+{<~k;dAR@R(Z=4v&s5{%AAaZd-Y#_J8uxyD=b0;~msVzvd)fZc z$X7_@ky%NeZHJ@ERR&0i(J*Jn5>*fc0!0$3UT@rKH;gk9rNDCgptm^R_`=s-IDGO* zyJBf&eR(w)9rZ+lItd@$yWKn9|H9|aDuS>7;;S>u-4DKb^+z9G{qon|w2bkU=kDBj z^x;Pj0vX=Dxp(;FC{ChuXsV7r+%Fbanqk=3-Z^kN%*@ZmVG<;2Zm#>+{?>YPY2lOa z+yW5k=_~PlcbEhtTeErt7N9o*b>)w-IBf&DS8jX0S-MRhnejMpR z&ntyz&M$Slt>@1!esc41Z<3A=tt13ze3lhRq?8Z>g%CoDs&ayaQqopFib8LUVTXv| zS%DIQBBh=8ve{zq7%DGM)o8>?l%*wjXe0@+&&$G3)7*u%PH0^hn1o>keu~e zNRZartQo}^hdxWgICjpFR#|Bc2_t~wpl}v|!$w$oUwJnvO6Qyd3Zpo#ERsa;Rg@Ux z%Bswd%ZgM{VAqf`RX1AAY` z*)bs3nK(7Wez-_2L(Vv5DG@LzU~mU))dZE><~z>^#s^zc6iBEd3Ch6 zy+7)E?>w?~>^wT}jq!DJ7J@_c&U+7@>+&w=82~*aLRyr8B5%BB@FzK=B*`-ef@&zu z1J<>U9$ftd5zsj62}uVc(2XdLqXr2@N(rR|kF9SNz~greDjNq zl~c>Y(dNhZ&wcqNXRz5;jqY5uumS})x-n*Wusay#lQ3#@7nhJR3C7uIe7HA$<@0Ty zxwFr%&z_n;diZfwk zzB%ec>!9-NI_yr?kPt$&TgfoXo%3;= zAfX_2YH9xGzVPDkm5bNj|M`W-S7P9`s#Y? z#VhS!`sJUkh8y?qJ{}zp7f-iLmOkFt=|=E#zx?^V$NP8hY)ch5F8=VFKd9L69;bmQ zl>!w;bDc(<=!2t!!-G+rgv~fyTA7I(k#pt;A3U;#breR?M3NM2K?uPBh~zvQW1Y2v zg!f(u$>14$nPBbXAwsb27yvq39h_S5)pNvlV%*pkrK*T z@Wz*UUKV*(mXeSt0Pt}j_eU9_Yc-lk(hC`Mx;37pCDk|kJMObtFT72DUU$=WPT=$!%fzV=2r?-;OVeetQ<6F(F4 z^H2R%ZQeo#hLZ%L8glnPWg_O20OcAYLhw&N81aM}#u^%W$~4!HJY|eM*Oj6~Bo#R% zDG59vfvYM(wV^@QJ(a+KMBaO6JtEa7>D1dr0&B4*1G6U~0m-rlz#4BcRasUx2!beV zVk1I?GRwQ2*->klrz2pr&I%!_iY1BK)P^$lmaVaMtQ!$4Nr2v2WJX&#XN?fTSr#ar z_ss0-L~cd^CLweri2xXgP!KuCQUDM-Yf%WL!o`*K7cHQNFib+Nbr2|}>dzhup|sS2 zV$WI!QVJqcN&&GZ0fbr!B9cN-?Wka^d(J3SfDl9>5dfOboY5j}CMEEG~3O3Tq;G`F%AspxobILgXqH17`cY>=I~aye`@I8)X&m5F)>$A^0*tB9nKm}G^S zj7```+8sV~X`ZY)+8p$7SK~KRW6(>4lfCv?Be+moLvQFaF+d ze0S|ayngO9*>G>~_~BhxS)MGcHye>w7dn$suMs!76;(ECH(Tf5cq=_P*nYH8PExJq z_U-M*8(CIpQ`#WZ?PhfUaX$!QYj1dFr4`J#Po19syeM``RZDS614UwGSTe9qwjd`}t>P=9>TVw{9de$>!#9GYT3a`lD}tk`8l} z)NBX)`@O+#LpXK%!r7p`@a^BfcI!^R)fIcYJxLrVx_>xnHYkpfQ4WX2C%3DaW|D;Q z&%gfK2iI;?&Ju`6n@52Xj4&J(6*B-2Mj0c8VQ~B5*5CcRzqNjG>Dsl2%}_3#o|_!( z9q;z`AKxF3#?5ZXq21ivoSE-Pp2MG z)O-62aCk77ol%YUtwy3o{h`&tSHASZAN}j^zWK)4_kOaqw=wwJUw>`P0-YBD>}~A4 z|G}N^Qfp~p?(hAB-xf-ZdIflN-~e&`^z8A`VDI69($sE-K@=bN#)qRxSe0p3Rwa{= zhewmO<+gOrBY+a3wANZv8N<%#C?rNCw8n;7Ba!o#JxC%F!d8_r6(AD{r6e+BWnr!N z&I>`Nsz^u#au%HeQ&y!Q@0?{9>QJH*3cTggRB;@pX-bk1pfpvegEUPMWM1T@KqZ9^ zq?SS}sipJ)&J;o^LEx<=DTJijJCmomw^h`PmC(pUK`cqJ1u1lznRYWSyo1uUW}2u) z<-KDo5Q11SduM#bmdZjYtc=m9tT&T!L7XkKX!F1_h@1Bny&;0k+n7$5J4XH_nx5)|hEU?97obv5ydKqs{A35o+J2Q#0X?bF-9XWP6#L5RRlNme*1i~)wJ7W50f;Jrsep@de7gb;v2NFZkKm6oEm&k+KHsJqe-1xb=3 zhy#*7NJO3<%CJ<>4>0}jqZ<$~^+FlG&(vO91K^^%o*eE+&nxSK%+xpOi2(uc(Qlle5-d@ z9rla8qX`Js2~8C~jbHo~bLRZ|^{bEH{PJ_N^U*CrXb(phGy1T!0Jop%H@ z(`_q~BrvnimX!(>02D>(oC{O{0992PXC;!*X!1%0;Jx>jC*v$h0%Q0_rJz7d1aj|I`zhyaAfSfzykL4t`m)Y@1(J{}$D zntK7NddlYfUQ9sK3V^#oh?(V7oTW> zc#8B-rJ6tWR%Sq&(zhr0(R6V5DYMK^$pkp@8sO7JgPJnH&+KJ5Wo@Ch12eKG&rT}M z-ZG%`9<1}y0kiY00tHB(y_iZ?u@29favp#r1QJ#C+K!Te3BfuiBq1Q75Qq*8Fsnd$ z7j$R4gZ^Hq0+g~c4iL(!s;omGW)h)r4iVWqMn2- z!zd1dNGq)Zt%9gM(*c_L9uR>9)k&o&2#A%`jb?K?U%k~z)ijGUmShcvAs`DO2@w&! zXQc?J&T;YtP!9$o1Yjv85d=}FB8g#1IGkHZvf)mV-C?OKGq%Nnju*h}GD34{0lm!z zVA!voKK=OS*1b=AYZsq8KAarwr~Q6*{|`PqeQIg$)Ka6_`Q(G$J9o?M&i0j;mL^8+ z?oO7^BnOcVSj0+=^R8p4u!$Z&pvw&#ced zk-zY=O+Va>TKbLE?hn4dqc#1?ue|h6fAfR$XIi6)**nP7Miz(S+508u) z|NPIMzrQ&=eY*P>e)V}ta&kCs1Y$fI-@LVP|1p2%D-jWV^qmJ;3FlAGojJXJd|W(y zymLIrOM}Pvx0EtJ|Ftuxo?E~9t;4)@(vt{qX&J4t-A)MJkB8}7pMT}Tg^PFY-!IZU z3gDIJ7ANEKu%Fr7+`jXq)ryAWe4OPBQk*AyJjd%RRv11g-PKv%V(E5gM-27 zu>ab>`?WZ5G1>JC%RwAJd9-t8{nXVDuaCynJKOhqqx3KQ>KDitPqwzNU*G$}7tc

rpj$ZZu2-99&u&P9{emICSLnsfSNL@W6>94;(tQG8hOPP)^2- z$%yKzTAfsp>9JD>g47=mAG!0re@41yT`&qnYyz~<8++*sUq1iIfAue#dHyp$`UB6r z^Qoo5;Nq2A9akyv=f3*v|Mqu(?7~Z5Y1+LAs*qWv2u09#C2Z5p1Q06Ls#H@1QO+5l z^8OQ3x#)Z`f0otA;wfM5dxKXoGKZX#nsL@bu@Xv@io=M6SryDdf~o}(FU!pi2ysRL z)G(|@-}td_J9+ZaXTSElm#31vX9$#1E}*Q!x$NbEpqW@;%m7FfR_yuBx|g4O_GkX_zkT?B`0Ld8uYCGb zAOH5WcKrDE-pX+#q;N0KKtrxFKpbr^~5tz{m=jK$ECUX z1K;{xzxxZn_UGREG$KqDg?8B=?+I`tW?~>P0a6F=&njEAfQ1||%{k}Xb*-jOt<7EP zG}&IVVy+NEnH2~eBnUADadi>_p{fU^xR9vob3%NXnKq6UwqNN0P{W#)cW0hZ4;!6_1C*U1l>SfKZKx zqdPkruC9)EJpJ?sKltH)^RNHe>SXfx(@!2abaH3oz}w#auI11C+SZ+0^LD;@?=BHZ z2<%YO&N3~czHDK{wm3caA@%-5r5;f+1}}wrZDnmTTpkTWuhk)#s+7%*Qck@v)#Y9E zD8U^8?Cze+!hn*a>|i)t0*4S(l_;#xu!yJJD5>R~=k2`d+NPbSt}`{WT;g1OVZgZG znh8);`TIWw79t>RY~Xu)g}WuYBq1 zjjMNeH_o0tzqhy7blvVOsi#3zk89B`tLDMrE9-~X&OG$+N5A=7XY+3J?%hWoe)N0( zn;-et|LA|e`qHbKRoiw6EIlD%ZRZS9&Kd zeC^(gXJ0=5rLX_t)!#jGc)guXzxd^6txMnW@ozbLWaY|v6%QEj+n;#v`SZ`NbkkFh zp1gWzdv`0ZEY*$YlgEyqefi~$TQ|d9xpT$8_W3#tM@cEvQq{xBa1`p{p#w`Nj~@N= zf9j7NSz4A#RBdY~Klkd{&wuS}uf1~Nzx$y-yY|-8O3D_LG2QLH`nj)s@|S<}(6I;q z_@DW{M;|)UHuJB&`g%55Uc(=J`uF~)|L*V2r&j};uoyC!)rlk&3vP+QvNAd+gVgJ* z>(_7Y{J9_fi^m^&;5R<`bKUlCN=oDk6AG-9U75H`nTXVKsK(Cbgs$smO>1td8SY?a zVlUp(%sFL;XUjQh2s~O@bwby+&SIv-9zt1`E7l=mMpYf%p{m3|VjO{{d%N?kYiD2h z!(aL2gMadSzx&j~$A9bhKXc;22M-)LLZEF;!vhB%KXvlfwd=3E_`=0YZ_KuL4?p<8 zpZhC+;oCm+p?bt$_|z-AI~)J$w?Ff}f8{UN#|~J@pzdQ33P_6{$I0IJdTzz*Y+;w% zMN8WJ&q znESlw{+aAe6C+XI#5%Y*4wpdY9>|EkIEI{ypGN7X9adHeh>F8tc2ncPya2&DLbnf)& zhhDgQ*DSYfqpkqOy7JV_yS!3YyvOjq5?C*UFMf#kEw~DEE5H!d;e!WP$IHXPfCxh5 zre!CSouI^vHl5rn4lHb@0M)}m&X%)s2*puVJ-E7N>sNV6JQ+Jw6NsH$f*>+8Ms+M=1BZ4ql8Lwg2GF%z-5! zf?Mch>EP)n-hS}((dWPR?B~AlIgG~7zx?XXeBLFUckN^{W)E2@#36?~l4`a)-MF>g z?w#MdyZsFx{+4Rged+Um@aS9L@dH2n7k}m_{`vK@ueZqsHUT97#B+25a0lB1lDdHy zK37V>h=ZC2)zIXcXm;=3^!y7)o_;JeZ8zN=Pu5JeVhjdhJRV%Xe(mDxuRrqG1BcgE z<{=DZ493Rx#`}+ch?Xvl4o`1vZ2ZQjKmE*mHmizdFxcGL9u9}YgNGh?_|$ZsZr|H> z!nnrD(g4D(r7CcYbaXH|u{?R_nUnW+wtwq)K7aA%^|M#sc*naQ|Hco0V}@X0kNl zIJ$k~b`@hqv7L3(1Pe2pPVZj4clG`6Uww3~ncC;S^z19Iz4&Xt{2S+=d+y0c-*NK4 z{}IipF^8t>mpA|2zx$=z8<*epjx+Ci_j^yDoaDWo*Is*lV=q4Z=;^O~`HO%1um3O2 z^m+&+6et8suIBDqu1Pf|m+@q>y}6ap#(HUKJb@=o*4)Y! z8@szZsclye9tuHrx3@LtYB;n$yw$*v5GLbw3bcJ^gGAiaRLPm)iel-YRGgK=Fm;*3 zU=%8ITb`^QKXu|;Klbfk{=?tBadY$gzyJHc`@6n-FbP3o5r;Ihot^D%&VK5lQ|n7< zc4zNLe)tC;J9FyC|Iv@{ZQXWq@h}=qtnDZe9910lmpn$w6}NWt|Hd2=(cQRA3e^$dJK`=d%|O zrcZ44ML{MCgGy>Ff`bTU6e~tL+&s&Dv!(w6j@Ct$SuE7EfHV#sX749FeoT zi7<#vEz}jV7q=}G1Dqmf>cNX%+EmP}dVsCR+TrG*VlRA!8VRUy9l2|@>tc*iDleNX zKvhL^(=H>#Z~o}FA6#92u1jss4;(*v?(*xu{K=pGfBcRAe)VcS-Q6MfteR3zo>T6s z!2nFjEjhDr$onv5kM=BdLA1dAeSmOqZSCOF`qJ_^3X>tV9We>Y-VGPjvb>o+ZD6ED z7ZFiX7s|=5T$mLncp!$m!HKwd+lfn|?aAR;J)81kykeISm}%gVMch}+Op}{h*LH1} za&FtMZQCZL&U$|kD`qj~<+%)y!XNM=&T_%1^z~d8{z5Hsz~JrgdvbDk?F+yEd#_)- zUQOzkU%hbY?!AhudQcA`l~fg>Y15sJxx0;%f9uDGod2MgjUVY=rjcZp=9zAksZH-;WjCN;xcXQ)A|JcX= z^}qS)+3l@abN9JXwKdC~>2UqP^2*v)7muEPXtUY=?0@{?#_jFnM~{BsU5~9VkA^jm zM-{T?F1b089X_=3mwxDv&u8h%?R%g5{Xcy1i@$s6%8iInk?q{Q_?E{$a$@bkXK!5G zym70&yQPoTFTe6rH*UNUVj!Wl^|gDKE6mcI+8A^=7*CcD)T4?+ij`kEcd?^%`r$K& zj~#mB>ZPxJ{kh-xEflcZ)m&sz^TK7aox1jt2b|a$;z$ESDJQimYI=N&77%%n_)g29(?F+PrY&Wx%u8^xjbSVJ7m1PJQ$4T zO|!GP(RK5E;}h++-~G`KZCtwg#)V75;szF~s^R4Dfo^XvbzSNjfW|A6#~yhX%k2F5 zb9;O9&#J>Prx_U?sq7r*IaA3b^CUw+{^Aaa?AU=-J*C>Tn? z9vPTafWrMw*K!^O68+VicwK`>d5C&qLbmuhtq?b7}mZ+?Ged3}1e z_OVaB=g7>VT3i;v#Q8yzEx&#Fjc1-Z(Fl{>k0z;GgJ9hMa2a$jfky1#Yhd#xEO;Lve%Y%Ym zym#Jv@A6E*x!h)H=4=KmNJvU4t%`C$K@{(mRtR9iNGYR=GG|mp3>%BO%_6v_2w4Qs ziAO|KIBZ&F7GUkXWf1QncNPGMh*%T_S=zPU&K^8==H%)1)va2ya^b-TUw`>!m${9F zjq8{1Dox5j!72p+6X@L9augzI4`_^*AQa~^;9_g4hHv4b&UJVnG)krM`dFh@Q;N{D z^P&Wlkh2~UsA%}um*0_)iY>VpQVIwhJJO2Ch~&lQgqe#ng#z8;r06OyJhk_N#s~<; zXkm8D0*;gcX77OZK^25R1}RW+ToMLBQGniyIG<$MU@#b@X=(>)ZW)~`mJNlsAE+R^ z?L&>k4CSDr^`$6)q_LUFkpok6OG|IxzLO&^ukPGhU9ZGt42&>XrrpF9Yia@)D$SV( z=qdqVPWt^po_JlCG)p5iJByo(-+k`Hr#`j5y-O#g5|^@!N>MyBb>L+#C9b!yusk(Befrdy z(3sI?4Iy7&xb?Af_l?w>%e$KaQLDS{QifbKsfnqBQFE-7d8*p1;y> zCt<)3JbZfQ!01S$UJ5A)2-#f_`OV+_oma11Sz6z%O&vLO`t+@Z zOFOG8ESMp*cRCQxsIC$j-NjHi68vr zfZUgU?YFwUy~0bFdp0VXJ}}wsbUU4HU@9XMqc<+S;(P}n4B~QVv?3a9)@zN%UaPge zwduWwdo~0_t?=#VFMay+KUc0i_x9yC45>Jd#txk5_jl7|kY~AdIRLQ3wd!iWzqYo% zTd7p5jpnOwTxxf_@BQ!xfBeUPtWt|@rsn5wUcPfkmn+@4?f zq0ju(Q-A!-%dcJj#lQAf{=wh>$N%sD>u-0rZZVP;3QOh2=+xHMdZpS7%can0pSv_k z`g`5O7f&s`bt|rnCcWJ}_2uS>OS3_@OQ5`XpSb4OL}NVa?riUEZv(If-570TDL1F4 z6o6w#h~v%ixG{U>)$erE4kMMSmD6W0#Bl?3`ttKHIX5UbCJm`18Q3%`MU`Wx&uq8a zw{G1~EJ0BEU;gY*PmVRwTM+RM9n0oc`|8WjUHQtFCbQ0XC0^+EuD2Y|ufD{Zma4cAW_KRKz z0KuZ0PedUADl}xY0YHgC5xj%QE_M}cbI0C0n*oT+9e`M$JMXQ_#TWmfNLqscDosG7 zQ~|Fbq@r?%v_=G_6@mZ(BIzv4g-C}%5Qal<5=d{*;oJ+TBgc-F!sv|~*B$wHKmP7( z*RHnqwiNBnFDxEBJS8AZsE5lRa$+rWtTeM^0C5mxgWjL?KPVu#zx$0;FwqztsW;*% z)F9ro00buVS*{2S>fyy9?8PGtJBfe`-Xszcd1doKac2`E4_WYqj{x`a4oVRe^#!B| zG+6IRlXH#>pf(n<5EvFyWL-1-+b)hM^;ysj?NxCao_p3_B#D`ODPcN0#E{2 zXiyHs17rXhLBNviFJKg42EHVmXX(O=uh-5V>n3_-FbfzP$JCy|5IYKXu^v(di@8v(-4ty!T)WW%AGenLquB&AqQa z_0%7I^P5-SdSxsw4_X5g3LrYqhsW%M-gBtNm_XFZZ=gMw=tM8yi_S z;Sw6)_r2$R_doGi>eK)7@BYP$-+Oywd81T`w8bpxGVsUV_ui(Zhm9-WQf8Hns}hp$;p1VGf1<$kBTwY|5sy`9?}0fHd37D-SCx*SIcs?*QE^|k*I)yj$j_CEKreD^8>2y+ky zq;#I;nak#GUq>ZVGY3jx<<9&(dwJrC4}RpMpKMNyri0|mU;g?FFMJovr6=F@k>|b- zEAzJk(#SqSOj_HozxezQefsmi{%`;NzxtJ5{R@Bjum93N`X_(q|M-7(HWoZ$p4;WQ zc>rQI{Z0pfftkgF&-Zq^R2#`X9l7uE&86F!#VD*!j!m=8#7h_jx#QG2RcjV}VgcA~ z_lb0WZr+4}5R2gU){gk=>7Wnb0Co;!NG(#~i2$+R>fFA1`K7OYd+XX8&Gyzr7>L+b6z#5UF5S3v`iYMU z5IClSFN4GX72v-Dy38W`okJ{m4=sTE8-ilj4md0ty7$k97R>@BGV~Pg|0yVNk2e4S zMTguy#02j*H$l;kS^Rem_l$^$qJ$tcA%KWf5v6DhF5E;2DBd%Ow;qP(V0O-ei1*$* zYrSLPA}lO`NN7w*L<*HgrMw^!72yaFAS8t(z(Ek*ci+WM(!Fu}>Ia|vP`MHxKXG(p zb)nO4t#7VO%r;AvvUfRx3}qMLJkR2CwV=0|I0`#DV>^`AfGh|D8&FjfPmE7A>-ErR zwjNl}i3viDWTJS8q`CteKnBgg#srfG4(f9G#^U@&r<1YVUEcD5svPxgHgL(G zFdE^0M-2c#VqI?gxpyUP!oVniR=W?h@!a#@J9_BQwcFQ5$43sHIC}T$wQiD>Ky|?h zXc#hCAqU9;EVBj}0OSBENL(Z>KxN=~YrXOPm&^A*e(S{-N2jMtl}0HrjatY_FSq?Y z@^8HP=7k6DpP8N*9Vt)GHlBXw>T}ON^}Y{%;+s!B+iI^(H_N^59;k5Q(D>@w(&59= zU~g+-VPR!$A@6O)CVlY4iSy@=*dSZmY+t!^qr04nDl@tGg_kq^G-KmO-myLJ7t6ALU!KU-T`+}+J@-C7!Jj#Nta&YM@EvzOH- zlAX?8KjVIOWOD4}{l_19?_sZ>Wr(D}Y|o<$?r zY>)z=_uNlY&mtbOJbmf8=PKoxgln}5K%E^YNh&&g`^p=wUMmVig?hWSm!&z7lq0jS zww!i5{chXlw#Uv8SZ0MPDwPU2m?vw}1N!U;G1?cT1(QCx7uDwcFdTeeaJ7 znhk?BI=X)O?LYg!{gqGr@F)M^tKS0kYk%!;{*{08FaG`i@pu1LXLr%DH;yJI4~sFq zb{7?ppc7vy2buTncH8%}jkzVGW62##==&y3y?C4Vy|do4Mx_a~3dk7hbvFx-D;Hge zh=B>1Kv!zHSR)*2K=6TyFf`ML&VKH5pTGY0TZ=bt#m$Pao||91_2!Mw|LGq-dh{T( z&vWYq1f|y=+`WAH?u*ZF-MCuHI)^za_4?fH^^`9OY^K?_{__`4-uIZUjS1Qz;E-jX zl(~P57tEgm?eHh)VM8|ZzS1rt`vt2*g&pzUuK~dOz=5HnROE{pkr0RF(n!MwK!TzZ ze3&?cp?&n;46+ZTbRxxxw69(jEg@3$dn=6yS{p#naBmJ74mdDeRtbCWofi~l?{c5I zL?dZrVU{?KgD`MuUIZ9(=cZ;4k5p^DL3eKc&e7S~GpCN9Iep>f7oJOc+0NGHAoX!n z0zyMZEA557Nba2=5@IgSn6Or<4LZ935P-@72si{aRvVcb8?V-CY`q~(p<(C9P?l%j zx!mPmyvq_K)J8>-QJ@G(D?`Ru=h^3iUZW9V6aiuOLwl@J1v3f7I|d-7V3_9?A_PHW z0D!#r0BRV=Mr7~+;Jm|P(N4ocPC+TE)Q0jJfruhKtPw;2$HjKuM~XZ$mLozn(r6B{ zEa|6do(_`!AnWI8p8*K42e|+rpFP?bAKOW~%Ue59xwN*tky}ncpr1SNu#bos4wATE z-Uxt%?75w!iEg>z@mH>=er zAARJN=Pq4(_08Fn_doE!g|}{|NxE05)!N;3eQWRRh4Y{K;JZ$ZHS>P|-S?fnbMyA~ zOSk(wgA0#j=T4oNm~C$lHl01x?d-ky$Imj_r91OcSQ?+5JbUW!1Lu#AjW&E)HO8A4 z8jWB2JOAK~%h$g8`+xAlrRUOiJ6YS#OmDhW;(mMK?VV(|cjEYcv$IEzOpc8-t0xZ~ zoH{n^mHqzrp8MuEp6d4#Cjg`l967jgeR2Nwtxl)^fe$_M*r}5@|L_|hI(^{&#z+N>CQwjs6!eo%QJ8gDBb9#DXdE>(Q_qGPD3S!pU&hpM6 zPZ&VF6ZXgoz`7)BCFwo_v1oR;o2v;x4iM;4E2&JTLMekY2J zV?mEDmqD+O#MNrERIY|W!vqyk5h=@(W`j0pWKm&Y>gAwZ3F@_y3WG|iT&q-w7}*Wd z)I(Mqoha99>$@=mr=Le*l zb+@v$9c=f>1e@L7X1_P!@*98noo}D}PapYF0C4YK5GkaC;W;xjJQm+=``*XmUcvp;6Alk(zPEZV(!=+rgW^Yr#~Kk9&R9ed zCWSBrlp=tzDA1u6DgZ#N-6J|y$67) zshQBw%Enf|(;8&CW8bJX0~O|OXKQ^WO)?#XK@|HzrnH~{3Ki6kNJjt}Xc}pZv^(t_ z90JDxv5HDTJTo!TsMoa#DF*Kal(bs?UboxncY8_N&k{z~A}m4xq#+n=ABM2MmG;#n*C(Z36J#D?l8oIwXY5qKsn4Lq8OI5mZVQc3cvW z0%d{*<5)cLmLgg|)WPl7gbVUhs@o7YsAr>`d$2K)qManQpyFGBc zF*DNSyEoo-@zL(uR(-13Uff-#; z%5H0RYHEFPxtI1vK3ci&;+fHrhW5VG@2@Vez4q0&?yM|qZgv>Mpi-*G@%HB0{Po-Y zPS@u46YqcM!|%SYytHum!r?=WrmXKc2dJTme6r+kz4Da*!1*jo6thBN=fXf$Yt>q_ z_Sql(ktZK{LR7HZ?erX!5c|9Rz3yObdv$$hX^^xeNp^O32fdCX81(u8n5S7Kj8U0E zGH6sP&9RBm@sZ4>d7iX)c3qkQz|L;VlO%(LLbX*_Pft%i_TWP! z_41t?cfS0k-|O$~)+#k;eU|3Coz1b)27nd@1cl&dC#M{myx+O{_R>H6`~UFge*V*6 z{p#0GpFIB2kA3_HZomJP%a?xPXa2&WMvp8o(z_D?jNjcV*AW4M?IqCIU?cKqkwcG6{*#Pd& zCC)8gyS_Si=l+Kt2@rIs%C&HPdFN~Y`9H^2Kl;%p%ZJ8SZ{2wIzkOk6Y3XoTb7!x$ zx9yW8ZRc4h?Q(G!3Ehc6sE^Lzj2KfCf2Pfb4e$YysE4Hsi3A!-&0^vwF zlyUf-XBfY8a2Mjw&@(y&$=;KEV2C`scR%cB2!^NskRCAnMd4nacsO?KXN;tHzwR4o zhPTTQRkVLg2n_RO1<cYbCdNsLjm5lWQBhtE(F#GCgsyRvT$&8-+KYL|pEy%_Im&87mM} zqp6WbfEboaaabxxaS+AIptF8=dsUFO+PmF6?`2sx85I5Rgg}CYk)H^`gLmA?2b=9d zq=R}Xo@|U(N_9pyVNi`DK+t4{p$X50&7X#dU(tv~4J--?a-r-Ak&cuAiU>0S7?A=V zA>sj06wJIY-4n6X?J%o>uHhFXaPL(h`_2LoM9EyXx0hFzjF zvU~l;#@(euXV3JLB<&1z?mU7+XeFIjUV8cB>2n8<&;GeT{h43>7yqrlv682Q`;H%o zYUbht4{$FzJUM;pm*`ojyBzU}kh;;?tiyz&W>eyL+u( zZ>wW{7B?!6^X0{*=bn4^t+%hPuI{QhsMM+^j?2|%duzA5*Y_3xxLGehd}Q`>51pIn zcJkcECYD~$5V8`()I@bIjQ{ZZtAkc2BD2#?bYMudW8aJ-rSLcY8~XHER2KB zz)#f1Mr+d}BS*^h=8>Zpnqz9R*3_s$bE#a((t!voFeV7tdY5PEAVI|0nMoa}9IZ|B zez%)>wu!au`u&6)H_CNW(k4_ExZ7>*w%Wa3$2u$CB4L(sq+(`FJKc1ULGF|`^=hp? zT6*Zgi>6Wa&do2Z|L*Vp*X_+!09CKmPoF+BF)``}>2uFMd+o|q1hlzRO0mzDm+#IV zIH=-go@SLOtTk&V&tJHG`}WT6+JE?!U;aP;(LXqP=DxY>mww|n|I;H6JouwO^6@)! zbI&~U?Xl}`&&{p3yDiCC&?*qpj1npV41gieT@Ij2rE0F8ZUT5X4a69@x1+J{0fud$u9kKa}qY^{@|^9Kt~N;bK_S zRNoVFMfSa!JUr5A*w3}^9~zoRi_t-fGkX4?!8ndJ0aXAj7*l5iH&e=3$ z&hypn&CQ+N9dGk{0~G*?go@Y;BPbDrBt#mJK?dF@oz8BzXUL3JtK+qX)?ri%^E}mo zDr_%B=Moi-d{kt?n2Nguhu*GtHujo|v?AgW5fEWOM=B)-Ct&gDA+uhE3zrWt@CIAGAfQy#){g(so zfAQZ@AcxNnK>$V}1j$7@Ng}9gr7+E{wGu|gw0CaJ-K0R5OXc<;(|{>>1a!k%F(^6` z{r+O0(9XPw17e^IxC~bHcXwCrUi!|{6Ne6+z5o0hue_Xy&$}Jx^RQIAcJ=aio_YGm zr+@6jAADD5H~H_s`{jjeuipRI7prF!h>!K^=c<<0A36Sa}i@|3PSfog2t zb@tGEo;crYw;J_v4s}!+>2?Rp*H?VbSuP?pH8%C=z#JD z-|mTbr9dAZAOG;9XC8>uXnV`#xyhUkbe?%cm5UEd1V|Uhr{3sYyZ!c?4?gjDYiG+_ z>q*BZIygNW39q%cuf4K?ghgI=Cq$o%AeblyAR=9ks-<$Z(VRYX=-9;6WEch0(=)lX z3yXJlwziTaDObwPYQ0)5m&>KNRH|32_4-J2e99QT+3Mx}v>Zh`g3zEt9382iI(woV zhJxtXgLk0JXth?Z#tKyCn3#Kg+wJZ-&tr|o#O!#l)m>ZPY;SH@mj@;~a`gDj#I(`0 zu)OrlbI-agi%h5#8KqL;t5>d`IemI`YIb{N&V(fk{QS!=wzhYbg5A}fZ~Wfx{kcE; z*Z3_sII9jH zJ4FfwV&KK8j&PdqZ%SdgVf^1@^)Q_ZRS9~eJ-GH#a1 z`DA|k=2yPWsW;XvJ@@=8|NXZg`GvoVf=(FO7Xi}3)nDibLm{D9m?G_Cp@;7=W?!sn z_g~oe0`*9~AZ}(nA~3JAVveLxowjc)!aq%vQ`TATUHr z5GmNdBnll0WuHY{NEx1K|0fPN){GjMnLQF|J%mpnB8$|k^~i+1UTn}d^(7VS+niB^O&fmVayS+6wHZ?Lbxd68Wy;8~-@iS{(=#^IN0g%g;xH(qCLT+OR zEP;jqb@R~SQ%88Ps|z^-7S(fJgcJdw6(&Ik0K!D105oV2PJOo0%6HnW zN*qr#M{AWTky1n=!d`;F0EqXjh}a9L;g@3wqoyGQwBSpKMif9rs0|2<7aody;+cg# z6t&7!NPzqPU%;Xe(s|CUB_v@+Ktb@pLO_bb0F9`Cv_+fQEOqY8frAqhQ|&=Nti&5j zOFe)92^6vOAiOUe<8Ti(yb$*P}Uz% zyO-GvK_&aL61~$?HY6G`(m+H58r(rnb~$EL@EGTmO9JBL`R)pC@CH3Z?I>B;t9t5$7{*AA~NuB&;byI=*1&~LS@05 zFjOkc1}Tv^c19~Um}a>O5MpgJs|pc@7?_k+m3kc@n3|YYCQ5p}tk+HFHs1Zv!3x3p z%HH-XuQ!gKuN*k>$Y*~z-Rp%!VO)wvVyA?G-JrX7_15y!e|+=5eZAssInt}G_T}II z%Bhchrgmh;<(Wc8L@uJHg?KI~MH{yWI8;MPy{HZ&6hINZiHq61aDwu^T|}V#CR{6oM0t#6+=c5!lQ z{K)B(b9Zmd-Mu|AelUzmh(uhpErTF3fMe&8OJN|uiVAWkAOnfy))lENP%cnrbfom| z=DC@P$(Jr)o$IWNI0G6H36Q-gz#@8XkkAW)Xpj&o1Iz_FVv~XErfH*EYF6s?N<9uj z0LijUA&PK8X9Fp|a0Dcpj2BnTO@zS0R3y_xh@kLD?YA|y8EXKYr$! zQl;|XL+77-;(Zh2$A0s-|LffCTN`Vey>8N5lQf6g=vap^PjF>rJ+4lSOpM*AR|E%Z zYjdspl7oj1uD5zUwAHDRoqjLlM7iFT#g`yWz0KR*Bn;!sXlFAWhGDrZ>}us^x7A6z zojmaBi08#+L8534FN}j5Vt7yLkNZqqEc6$DJK&brm9c z^r#34JPQaSXsy{f1Vr{nqtfMtw>Pf6Hg)p!&Tf0q>jjmPG2!&=;j*a>Y}*hr6A+*m zP@;I4CPkp38{D%^2b{U1r_W!%a(Q|FHi9j}B?a#V1%2*Cob_V!EpfQl+R3_!_im7o z34(;Juw&+Kk`MMe;+=`YnF9wOe)L`ABgZy2R$qPN#m&t%0B~9EvJ8}3U*&GM&o2LnTPyFOd-~0CF#{5^l{)M0V{GU769Df-=D-!WpmJ%R~PX;L<7^Mh-J!{g%^Fphz zUOu*!EW$n#u|R$agCc$6?0tF4cdx$^N5qvF9dI41X8@>H>?mh(p_qF_cmj&z(1^WgfUp(S@C^yLd^AJS1_^t1G zMTY-q|H(+j%z#iZF94Jvk@&*$#DXZys0dIrg7=;aG=x%GSR8;B)J7keo<;F*zxC#Y z2j742=-IvYmsVESjvqaG;q=+>KlAL`$|@q1g9<``$}=z_F|%j3&Ix#BR1h22OAFU9 zcoK#{DG&h#&ZX?qoCN}XV50VkCmy>zzi?&l_Dw7|_4_NPVrVT8bud)8ii%@V0DbTsaLu0j%oAt*%z?$VwXnVYr+(~nZ@l%! z_g;Nv{pt-0V@0OBKJ$kzOM9)_*kn|Set%-dJ5EZug@Vdh*eWSFg^mF0E#rRw)Q{Xfm5QZ$o9mD6H!^4$9Vh&pIwg zr8ow_-BwFOm}N;2mKycOpp%tqW$^lqS1u)4>TC~v5{G8|@Z_<{>e<5wFC3dXDD1bl z)ppA`N6d)I18`o*rNCYUP*8+G0Xy%iAotg6*Peam_{E1dlO1n`!H-NdN19WiDGhAb zJC@>AfrP_JTHU*{oj4-R?2QTM7jLyUSN3&aEFwT$(Bg`3awSEfb+^?E0u`#NGCEJX zA|4UT)#}NkC*o@T){QHhyL;V1^8D+sY^{AgilTOVTav7p-%J?S8za?fsX8*Uw!9KV zD$fih?!b`)xy_WuPOE+S`j!5muOSw1J)>hJrCZ&-uv{`G0=8ZZ5D)?~ASt5*(s^%<5FX1{T73# zdn6bn(#kn+RLp|i-E9EzVihVys$kX>4=*7<6j}w4l=m>`yXIK^=#k?L-0keeWXGn) zl-*4_Tpg)at!v%5u`u?0DTu;mCGB?h7H{>Imb&XJt+#KquHUIAX?3sHPg1W`%QPpqMc*QC#BQKr6*wiX5+Zo<*qO zED;za&jL2BmZJAuIDPuyp-Xpe-&~yUx^LW#G4x zq@5)7_V$s{iE6nK1`#3Tc_uz5MMP+{@;)aZ)Cv#?g%oK-g?j(yL{zG95EWi2RK(tU z&jQ{T$dWwwMbBH&o5Vs2Jp)nEVg^Pj(4r6l2$W`#EKNaRVsyOO99!DlSl!y|3^D*9 zRJMT74n_as3roXuzIY1UdnLgBTIt?(zpn)r?yrPhsa#E54vDP>Y!ST6!YBx`%&|it zzYnML`@c-F3NEyu954dt0ux{k(w4!mKmWb)xN_mx?DiYU@v-X9f9^*<_sP$_^4bfp zU%Ilov6{4Z-+bnqOIKct>!ah-llQ&r$%&ELSncfT6SKX&ZnD=|Yt3(N?dy6Foq@Xg#!&f^f`DHAYrXhiOWXvbYpY7-|2-~ zJMdVc)siJbwO>&p!Lz|Bww> z#G^7(Glzpp?dFZkTiaUzu(Q5tmzMw_1>Bbk0U)xO+um5)-cSS!3@L&R!;(f-Z8U9~ zRjZY?^^LW~HA2X34$O?;Jc?nYEY!w1)(S<~3o$AI0n|DKqoS>iQxX15Kp8C2*|Nx5Hd6} zT3f$+mq7^76}K>w7XWra6gcaw^Th|7JSYe?nVH#{v5C>#<;l)g)Z3npa#-(d?{wQ? zIjYtN-FAC^e*3`9(oVea)*I>EweI{zZ*k46ZkGox?WfA90oxR5R|LES-fvyLvhnK6 zjYl5N^Nb}E&*+J*Bc+TniE~I)JlmzX6;P2PrIlC^0ac`h5J{=xRqvSy_Fo12sWI?8 z%##TU0kEL|6B_(5JVrnWFhtT9-B|BT1tQ49-%9QkmJ#kdJd4kB0W07lq9()X0FmCY z4IvT`h?hdC7GZC_BBeptyF7@3dc7W(Vk9I{p$-qt9_qKdSKqu*ypH1_oSizd-EQ~0 zow14f@zbYXd+B@g3pYtcQ8~`D3~)dFAmTiGFJ3T+LPxsLhCsjzgF*lhFYKh)VrWIo z&Zmimv|bKB^uPnB4jsO7`|h3P`M%9GP>zU#hDZY`prI??iv zWw}DDv-SGa*hCbTNEPW|7V(4%ho`C-G8(DKeHHm>QdB$u-Z^iHP-_NZ_5#A5S!jouKm^Malo7g2HqL}bJ;2tcey=w*FvU~F^*0Qb7>wY@D1OiJb66Dr=fa1Z@o z$p6zp|0n+_F7W+zRgn@KvPl54+;);QF4R-!mUgy})|*N06hX#P3zYIYJFsE_V~GTGhHg~X6zx47; z8yhRw?Rp9;)k?+|P?Shu6e`w=y-S^!POqo5?sL{^I|NHAn4Fz%Hk;*gH3)RMT&-6t z&C$`3YPDVt8ljFP(|NzYwYvJutGnso#ObpK4$lP5v3`Gt(bYq!~Ko;df|+^v_E?ylOL%au}RudfKZ zofhSOXLUOpB;WxxB7hb`rhtRVi7AVAV!UoBU@HV9ph2`EGd4caY>q4~u6+0Dr#|?B zk5(#&dcDu*~5hbsl6%QrVE$0zyDT$wLRh$4)VZ5A0W$J2D*>B1yTZ@3@Z8Z z#@e$ljlJvLgfQq0ymh2W*b|}-0z?tV-m?w@6&PeC=slzNRP;zYD@tin1m5lYg^HzO z@ts=qhYd$XBr3jfvDh#VY0X%i^`Nx)VjYZBn1Tw<81L`<3hAi`i~$tga0rTvlk8sh zY~TL_#b1ZfHQt}hJTL+X4mZdJv{E#CCyb#9B4Z*GLGiC5lqyj->164^5Ui~&l^=iL z(4nI*fB*S~`NfHehtD26cInj@uDo^WzWd)_DOEQIyBZ0K8gxQIaO?|!DFqsYL_9Mq zBmrd4=(#{saIu4SOratXmkiM4hbGE1;}1Txed>)nHx^fyIxf`!Oz0R4kn^k%1EQRr z46!zFuji|qJlu22UIldl|wYD0HTx9j(w63N^w|kjsVj9 z+Ily)0BTrzEPtXXVgFpmdw0S9Lbx!84dVd!Rs!r)4gO^l6W z5Ra(f@R6y6-Q>IO?vBn#}CX%Tyoy0yF7dr2=_Z^zrK3P8iZsVcjK)wI z(xC?M;*2IyinBb+lK$q};_bD&i*wz(H|^$jWUb3{1$0w=@6mUiJ@@zr$`j|3QY+ir zk~CEwl{f?(7B&a~g4W2^X+Xyk29@0U35Zti-s~^lY@9v4v^dz?S}m1MJaF;ix1alF zduKI{%7b(OP%4GBYO_8%Hd1en6XDYQjV$Yp9yoOO?aKi78h{u8y;y)i>v|Z4{ay>5 zzjpz9kdOV?XTSZ2zrDS*06?USQ9A8)?|pS=XO5&U?RVP*3c(X7O~H@;rN93DZ-0I6 z?i~PX^|O`ruL0Z4dPK;r0|J-Wk!E9SYpvbc6U7Y53MhswWh7T?*3VqHFY)Q*p#x)w z4{a>0gVL39X=HNz^nIs~9yt-$>Wd3Y^LKBRtD|9D?WNNjTOCjuRV*&`zzhz6bl80F zCqMDfnUCFEdhX@#eq(!ijR93Wa_+th&wlOq>!Xu{UM9H%@gQ7l)JwH$qf%wh>+72j zz589utD9TPJLm3yJaGxyEa|1qN<-N0CEK~;-OX)8<$_?)&c`RG&YZg+lTr8*Bk<2`uP&@ydaKgr0E}mG&h|iw)&r3)P%D6zb0a}{x?Ei! z3>IF0^=x}Dipsq~+vOQDu(M$pW{LIe5W!ooa;<_;E6prgYh^T~Kt_XrwK)+HseJ-ueY)fd~+oxhRs^za)rRSUD&}dB9@1NNPCQ4^MMJk)p$sFNJ!H zI82HS_rAq*-Lvz$`8($?oF1*$%3-{^w*JWDST4u3LE3~a9~32T-m$g0cOel0Y0r#6 z3XqwHn4Kbs=2-|pA!#J`AP^{G&aMBdckj$?uB0wifJNdN5eS%| z$g+tVwycN%Y5)Z)1#tj7Ub@@6rS09Ba{1ucR2)}ArA47*f2*iswh|E@@2FIjT z;Wz>suI@wx36Mk)LWsiR90y_GoDaevx6ZOB0n5z7!UBLg2y^iQqLt3j_j@%O+s@ew(EiCk6y=Lq_B)86K@028CZJ92?~CAV-Jt3;+M@p2rvT&00t-(+s%Xk%AgP=?eBc!55NA0 z-}v_U)Wora2R`tLAH44a@B8qd`pJ*hW}+(OHeb7Y_13qae&!Fqv3~W=`tnBm%Hnkj z%H?vUWU7^tL#-?2wRUSD?wVxU=u%X!G-~B)tx~B6sv3nlP!nMgiFXdbf%93vm-YLd z?jXt2wCnP2((Pp3mhbn%B#T|v%w0)v0>OCS&^ixvBA8iu{rStc-dsEO;Dg7`Jwat< zdB^36Vn8n-PDBVnyhjFw;zd9_X!6}`q!bIxunk(A(dCfy$=QJgT%&L?+OKUKW;W zlQXAww%3_x+y?EvrG9%CY-RzTc=BU!y!_hY+yWUB>L3c@JR6jvsxcZUICuX1=)|G# zJ@sV)1Yiwe2KeY>PwuQNtlgOlO&FFUVG;JAKxwkp+Cip`*;(5f8y`11Qm71SVex&8 zE2GCRp8D8FKXl=d^W{n@%ksmA5C8Lj`v1FrYbEV>j~+R6@m5n!Et37yMcW*Bp^Z+H#vs$e}fJvGt ztqf^xjIlu`!Yn2TwbITDX`@IXD)B;Uc%&<(ffXqu#Y{k02xY@jpm=x_3M&sq440Hv zBFGLIL|6z(8v_8s1T=hwl2W+8YuR5Zfp|}%h>%FJb3-kO2tbr5@mR2on&IPTB_F8x3*iY z?Q$uKN|h{MH32dxAW}v_CgKG&A{F!rCZ)*~6YmhR2F%5a01C^z2zo#v5p?1iL~E^z z(O}S5Wp`}4erWW;jjfY67Vpk&EbrNY28hszrdWd1`hw8v3r9Opu{Xvs)1~Txk&#NR9t074n>%NW2_s_+3R^%BLR1sS>+S7qZD*cS_KA2E zX@d|N9T2&|w2}HoA|Y~le_$c^1C~@&0^q?XS7XxuT9qa%qan8iz2}6z%hst$vqG;<~(7s z-Pw8T4}Rz9r#`DImEQJlR2eya`s{13zr3}#8&<0QuD^Zb%HGE6*6O0|Z%|xcaZFb) zWqG1>SREgoojoPoTU(kd*Bj#_v%Nv9*B|7WRTu{GNJYna)<1Lpv8APJOLMOy7|}|E z{GdmQN|iW{>f__H)zPtQZ@rc#wwPK{aFx<5L-5Tnd@&46SSov$g9jiW3IZdV0s>H> zS>ImEG87KWGw9!l4Q3hI}09AP7dA^~QLeZ7ihZ zfL9d7^=Rhc^yuN)iSgN}KAMU9+Hd{F$x~;;O1Qncy*0nU>pS7VONLpd+Bw!IkIug9 zXV$KLod&%Lmugg1r4_S9NB|Phsm49;*m8vS+|1Op4>a3Ol0g*Y{qFn=ubjVlIvXS# z8!Lc}f?2+9^v!mtN(TBIYXYMR4-dw%A zmvtmp3XuT_fdEJfT~$=kA{6`lVbw5|OO@$~>7xgaOidlF*2~%`?^#3 z^mcFGy0x$}zrD5IZSNSZyz|a8c*%%!fmo9=+GyjgB>=a7=)-=jg%KHqWSC%=ebXAC zFb@lG5Q;#fTI1XRga8&GgkS*TJ+`;{x4!f0x4-jdIsB!m>G9dgk%vF=1xbDCj*vKtMhXa(_J1*KXat`RsSkeeBbFyIaPmXU-gd&wD=jN8kSH z?Q2&KA3byN%=y)kdZ)ZjlFMZGT?*M=R8DlJqvjec4r-P0eWY*3t-R*9#0$6~GfQ!qM4?X$Bv)}kS zpmKt?H&99e34sDKBwm{koL5RGz25rz+JW(jPJid&Cmwq5!yi0%@x;>N`j>w13*Y|M zS6W*e&SnVsJbdTQ^7^0p$)A1s`trZ~m0#YtbqB$RqznLe2RrTlzx*eE=U@JtU;pf9 zf8vXO@UoKZ6adSr3zv%Oodyt28pa{e41_%b6)p3EQV31ndPKZeFzuZX8pI*Nl#l?# z<(Y^B(7>db8Gr_97V4;0t2dkDt)1P~l?{1VrY5GMQXO`7`~7Tvb9LhEsUruc2WhXH z4<;k3#T75Gw7l4;k8f>+-n(JofZ4M+mfYIZ`dAwv0%8>?ajtl)7D=@Nfy&|mDEB#Q z0wm8&gv{bO50nOCBA+GQATm?UaI|{*+_8g~ZroT}UF>oSL!$+hNhjVb5E^PGVubdK z=0)!-D;Z$s@|}(K>gLY$L~~|*s*EwPWHxbm9-7cZp=S#~K@?D6bf5(QhRN@>e(Kbm%rCw@=X z5fNa(I4+;N@4}Ho$4Bb5L7ueQTPs_)U7lELdx?tzt#lBXsFx*ElVc;J)vIr%NjgA~ zPN!cE%>b~kTs00Hh*ru7h;t6e0u+MMkaM-~MJUS5iq8#!kciq}M;FT~1lB+bfPn#^ z2f5pyx+2gy&>&0J<`-X{zw^cKza0G2f7WbN&Ky7f(8CX3eD9M-F5EZv@PpG&J_5!= z-tVq%ZQWh$-MF2+H7^VE?bXFjyPNHG5OpeIauSz^ARe3>7OP1P03c2)#GwgS5Hx}l zArN9jU@b$Z69G8SRUkoCLNx&2U0HgiwQ=y|$%7|Om*WakA970~>_7vtFY1^8lyYEI zpkgP7Nzc9b+``1@)Y*$kJB=gr_#^kf{^s}Z-n!bDXuj+06Q?d54L|Y0dRz&Ea^?~f zf{CQv>a}+E2ECpVBjjgCn^jZkXT8n6_WI_=#@c$f-S*76w{N|9ePG9@4~!o?cx2|t z5ihX5y3pF*N(Tccn#rVlJu`Bs9K|9KYOa^#>uD7w0hX z1b%I0t^DqH>o9!a$kF@Hoc!KvSAPB9|I3AiI|{5qgCT)f6P9OZj>eUdr@!>Yjq6tp zh9(Y&m@plMNG#cxzW85%;DbMO>fGsezjNrwsbBchzYr*+qKHs}I2P>|R@XY)y`8O{ zYPHhpw=XxGYONyFBUjQ`>K4>7duz+nZmSw^`oa+wS&SX_oZ| z-R+&-EFJWEour>v0-X(dS>}$Pef+`4?n`&K?tcH3-u%)rjicqLzq6Hf2jJQC^H7@_ zm};OQk=#Q9U>qh)ec&!|6ozNdoIZ2*Fr<6WKKJ$@VF3yVW3775t_aI711SO_LS$e~$`@FCM5O?REPG<{?5QAKC?P}`^tw7At%JZQUE~~yC?FN%wcJYwy)-#~@_1u%q_wtNE?4@!L_~(3 zCj=Dop3>BoNNc52pJ^Hb{Xq#biudBZ7snt1EC#@fC=G&CSe#faJ7Gqp%b{_cXM-%w za%I9Fc>G;!TgPtB-Mq89()Aev5)lCbcmy^erU*O$IsmW z=13_L0GlN$#Ido7%-W4kt5DuyU$$epx5yt7KM=$N5C$iYoqXWj`LVIFjoppQSMTm@ zuLFw9ScJXLlhh;lz=WZRfH0|4j~_W^&z-$_{c_q*34xK4JkJ84|J z@AG7NXd(TdR|-%pqL~0_$R;8WAb0dJ@_R4Bxh-AySwS$Al+E)TwiN1 zuk>%+>2GdIt7Y>{a+?eL)Ouw0);SN5_rNP)1sDT_N*R*E)?c7cWj|s`AkKM(K?EEa zGe`%km*3o2x^?#Cqm}7V1yV%22?4}|ATx6@D1+BTQR*ITPF{NEMJwja0}r;g+sBU| z`Opviz};W}&#!&&yO&>k*}1&Z7`0$$rjFG{YGGVDbl@QB`cz}QQJd8i<#u;-Yrsu9 zID7ESg$rR2grRY%MUj5e8zd<>hoD8kk&kO-h0yM__gdYwZ+ipY?zNPVHl;u*&fB}Y z+r8W_ug>4SGne!Ro!#v$$)Y$2qacdn%;x=ei;>(Q0kDfpb6J+1I(k%?e)lWC_pg8T zUn97El6kXy(ZB z#_G=Y%H(K`OvQT4^1Rp1bznz=T63a4J2G9X)OUM3F{|EYcW!;H-RkVM*Ed(Uwp;5- zzdcCPPIoKIt+zIJ*14?k5Dg>gfAUiZU{Jdpt#wLQfAn*oootkEKKoLCeXY{o9%C2o zv^raD%93#k0;j}DOl*7IE%XU{0g6QLLe8MZt78}5`@n&N<>gD)FJHYoKS&xn3I*y0 z+2tbGm>8>1js&&ws8I!%XG!MzX_h2DAJ{B$HucW4XUobvuXPE~mn#ipG!)Gv-e;N3 zt<7yB?E2mA?q-r_nV^c|SZkFf$;il%fFb z&m(D{=cot(!&1cFDzA-Fc`i1$+L(frVKWjx+z=Eff6opCMLZIECs1%p5QT{p5fPxu z(u6btTM&)N-g}+)2PUp5G(ix6)=^XngLrIea(sO9#n)aF4$j{H*v97S;_B-B;_B&( zXP4JEZrxoNsa796e@2(9Z(VwQasKXmKKS8lZ(QmP`sBTUD@v)IV48b0TA4tk@4crX zb93le0z!hKnGAuYFry(605W@FCO{BDfz;)|5=Nmxvce`xP97dVd3a)PxA)51S8uH> zwtYeXB^`kGDB=JVfGwmTKs6LLP=E{iO;KXg2Y+{aXKj0Lx=}qmHQQ{AL{UjVGMj4F z1>Yjjn!N*5NPvu5DS-k6$5^042$eF{I-?XSR3Lfg(F95%kg_%p!?4UoHzxq42 z+hVZ2d=Qn&^H*L9-60_7{dQ~JvCPgKYBtBCh(;%>Q&TgIk%mjNGzWH;G_zo}S*wLuP>-58q~-<}UWdmP%MJm-WVm`3OlpS$e%?94Zw`NsEOeEM%KUL&pYRCK6% zt$v#K(j+6HP-zzJT}}Zxw!l6rmwcWkY3h7la94_`1QOWX70rW%Ly>?Oyi&${k03;- zbRYr@a_Zy*AA0{sc9zx#?T*^p92cG`>CGGM+~y^qPzNEPcOV)z2Zq$atWv2Wite`Cm9Yt9G-=X6_4=sR>Y>Np z{n~49y!OhQpZwGhA3lC~b9eXE7hfcUHXD5JThE&c{Ni8x%l&Tu`>$QN6&os;tDiKj+ia?YiRpj0ZC;|W|LM}MY z3WQOk%AG~dopa9mMy2%0C!g5r-hXp`@!H(2jm|a>T`|SaVgp!2@f3gs5g-eo_i1Pb z2tZQc#a4Tx)gG&s4$n-DO-@(JbyVWL_ue8#3bj$>ya0+;%Cn$Sj=d^a6RO}%C=nq+ zGFqva(j2uCg=wCIaTJ-j-RmbdDyy9)d#xQ=#zw6GcemkewX{0&v)33GaA|WGEjNf4`qdgLi(E?rp9Mwg?aDa>b?Lfe{q|<{}6# z1s;e?z{3!&LXr34onBv~^$q{}mH2Btkv)Dw6e{vWWB-0dTV7lU0ZMM?14=+payS%G#W3CKq^Qc z(1}=U3)M>zS!cIz-fFL{&mKHduZ&fyB?sAXE$)#S6e(f}73eITMY$N)pZ`z)^|Alu zXAVxC^m+E9pZVd%#pUZaUPaOMM!nzZyF5j7k)~#SYX0{1o@W*}boDU<(8}`S8<$?l zoi$-ts+1?DM%%5O&6Vw_6qTd0fbVxZndMr&7DpAWaC~&~BcJ$e;8g z&dq0@{#HI{&E4rZwi*bC#Df(Vnm|OX!XS*gY2SF0*{pcDB}w101wf;<^9+C*HG>rM zI3fb5{m*|<@dO}5K;DbxfN1oGe(1+arnG+d%3yg#ZEQ4wchaGA)@CQ#1ghpyc`=MEJYH{3ojgHPM|2Puv>HGX zFM?ixNGYX3nP_Od5oYIrnZ0+Jb1wIJ?rg^1@`SxQ8ti3rKKFp6XAfE`XM zW)A?K#XIYTozIj0U@++CxkW`g+uKnTX=9Rp5|v`DlnFyAyKcKjBt{2_sI?)by!Xzz zg2thPG|&MvC<10v=+cPfaz|QAu`MP-L7Rh88jF5JhBWOHfwwuL*ZqE1kqW{2uxo_MqPe;ew&oP1C#*N2O4N6_5aWX6x;+M!7gMNGUZe zX+u_o3cwEmH6lX@J|cj4cA*ZkUIv05iG7x&ZC`d{jj;B>@%zsly|cXf_LVEEtsM(q z3k5{TXu&H01l*@<1BxJs0%BkQ)_??*fM4nkHg4Ph@0J1p$btXlpBkB!d0Y3BZx& z#3K(r6qiabz5MFl=C)R3%8^E7t+sbssdGNhlgw(RG#XV{NJ*@7?1fp{+q*8c%()D=zQfYId#XC^%R~goA}C1v zKbM9)M1djY#2X+1A&?xz0c6NUP&5ZGA^{X2YSXrU*Ta0T_1c}i$}4vxgjzXl#Fb{P zIo3EhGc`4NaI85yZp!h1?o6LQe?*bwy?kS9>-z2Wn|B6V zn=Z{c&yB~jR?7JR(Ss|7KL*HIJYYZLUbcO!yEoFDJap_-sa7H+uwDTIX%InX%bpCz zxjP-zTjlCgzxm%j@e6-;cItSoY<})%e&Rpj5v-R=O(`QaC z&CR9OY5-vyN6laQ|Ne{fXAb+k+v)CJf92_q{NShG{Wt!`CqMb|KYr>vuf6d6&c-HZ znPci~1H-Xq9`b3E(Zhz zGJ!7Il(f=<$kq?TvIGJ^A_b)$l^(WnkpUA&Vf}$~4?q6MgPim_bMwiao6SMDX0z_r zrY1pUOb{lTyjH#xfKz*rmVr*q%#I$IZJeIm+g-l=^ySsfU7H6;20;SPwoXfm-BFxO5 zy|&&vYiv@EFngWosDFAo@XDtATcX6rWU7n@6^MfQEq=VE1rq6ws zTSO#Mg3Jt?o9ng4NEnr(I1Yk{5S$wd(@JXqkxYa^2O1E>lOpB3Go+ExS>D}T4@zOJ z)(B0cliY=2XaeKCjdfJ5SCHWPwYeYo&<7uX?|Z)X)h~SeTi=W-)v1ZvwcX8f7@s{%h_P6;Wc z10r8=?hsg5p<(Zlftis#Y1&6_N?}JV)N${*6qww5tyPv=jh+xIwK&bvK{rip7M4oq zj!hq&99>=Cyty#HvbL76BLISeCe8FG+IorrtpMmDBY{IuIe-AJ4f5^lx2ks*P92;* zaPVlg-ZY`|>{u9C7)5I(>`{n-L1{#^fLbX4Vc{SQa_52|U_wSgFXd{Lt!(bLhj0h) z5edP=J}?kOgv@wj{NnkGVJW_G^GecBgHj~yeD1e;Ef13BnL_2+W1vYX6NZsC8Wk!) zpaK%g9#Wh1x-CEqOC`zj+;K0>ve-^mtLz*qbcGcdq}Up&;sz=Nx&JACEC4W+#{mg| z3ey%qFL^Om8p~3?F%lpPA?|}p{MFEfu01;CVjjVuK0W%{NoO9|hw7Yh# zyQeoh*4++3)kv$7IW$_I9IwrcP1VQ8n)TZF?DWXgp^@py&83aLbg|$w34>AxjzWOMycGtCKs;awssz-@+{XH9((g~t9&U_{k>${64~_sJ z(8BD|`7Aek*(1%-B**7|?RVb)b3ZqG_}I+k_-8)*xnJMfTw7a0fZXK>$j;|>fL?%< zDPlJJ(BOW*2c8MASdtZxnF1#u0s@A`?}tfIDP>f+w7%SHZvzyG1;{d2WP18ZE-W6h zjg{p>Gto+kh{59eW)bl0ZNGQEZ^M?Uw-PkeNBW9!znn?ayT)#m^HxBuSx zz54v?-+wODy7-PoP|O7o4Z|Qz`YACna(}OnT8VQY!rnV)slcJ2vCbhXF2;PO zV(

vIMn77aQiOqnb=aAs<1`oyX8KmGa7pFJ|YcI|R+Zm!bWog}aun}M|^4xKiZ zbPl9TWpuc-xvoKvR-5M^IG-xM^U9kmYs-oC&Ick85R(-kjQ|>=+3^F%jvnE1Se_gY zPL3%}&M`9}Gkag;e+nx`f&2szLL^m)wK$~kBM=dwkWvU7KvXz6y!S*X%r18zEG#z9 z*t4*B?}`F5Qe2%t&WIJys%eVX`_q^gHp*^+qY>LXhF`>exCQ!<|sr?Yf~52A}B=K zm`1&!qtcmE%@2I|1KLy*Is#~v0vH|^-dYnyHur-pW0t_^NCo7@rM+qto<1^he0qFiyLEeE;m+!6 zCr>iqkW>IVXEp!~n1k0)Fo1@ssC)j%2to_IW8Euv=dLd-9h#j!e)QPb=y(`JUK|KJ zZxuVO0{{_4%w7RHW>RE~@zzJ7Vb2Ifa{yMV^?o|o?RSf;w>COS2L(Tb01y=mPfbo8 zo;ebRdU0{F*Xf`kpWA+?*Gm&;^FfkmO`e5Fv1bKZp+*8@v<^sVGg7NL5G9o5HVmU` zwUVbEub-*EasJFLI}D(Vvpg~Ed(WTl+Kge6fv?WL_G ztE_I8VQw_klTm$Ubh6f*nmjn$oSGdSt?3G|aA#vTXC9lKoSdCbc6PUx7q{Ph+iz~? zgFb>&AQ~VjG8d?{K&*8{V zAdVw&ocsKDfBUy1ihuBvANwnR_Rl}_%1g-dp$~qnS#RFCb@w-Z<2PP-`S}P0fp&LW z|L#}+)l<(t`)He-TkpG10Jy7^Ac)wvELR!=$tn9gpJKIhW_&IU?}P z%xrTD0Bl`u9Vud9?`(06f&g1*^E}V8G|OzBDP^3u%IHd?UMa^(lGkfB5UUg^RQ+zd z-|J@1wp!a^5SGi;IIc9uMx%0>SwtX72K`Rkvu)Mtjm8*$?#F-r)YkA9FU6$7BRmt*$(~~px(sW6e$uy#h&zv|5EZ(zGqzFSr2;g(8 zfS3T4QAXkLT`2{LTY(l9_8f-FrJe%FeKs;Ok_-}4+=z%us~|MqTY%z;tq7H4YIl>R zjg33=3!B|N3qn8!h(Q=+KOz8bpO8$5ETRAmA_!^#3!Z=jfSG3f^qDhb7o0BJ&I z2@s6{0mpy{QUzTGRaMbsxqfVF=HSe1V`QXRZ&c!v2X6x@jWx)i+f6$QOS{)^wdU?p zyUoxK00@v!3y~8wK!92;C{=6YBjrkkIVXc4EFM`rA`_w#@M$iNY#8(#CqMO>AN$--5M`MPCaR;$%bWktzhxpg%~R6CG2fI&*^H` z;wi8iGv`8sMxZQFDAb52W{;e&)*2JDGeOz*u+^sY%I@~*&;0DepZQd7J*Z+FFVJ7C zPy~dA!>XWyj81n>hyY&N^?cw|SP@JnQ%SgF#y%=)f2qgkc@}^2m{m_3bzB+}hsV9)QyTfil1V z!mj8GgCTlafHe?^Ab1TJAUUBNcz&a< zGa}N&%z^RAQ5~49pX}{zxA!_(o-wm{Pm02j(j?tXvvRo-MdUK8APyp9{UB%RCW))m zy-2wf_d8v75&>Cow13d^@UdU0|9H=rx3A$0{X&Ig#QUoP7`jM>6c>ncB+#QK zbm)z_(^cN@uBgGZ-=P$&u;f(?=Rphr&3}JA=4d3ah2^=~I=Xhey`7_O4$`*4A8WH}Ch9 z)<}vKHY)XzW2aOcAR&NHJ60Sck|x|Q>hPd|v;su{((I@2pE>dD#?nh)`rUrgf8??E zJ#g{9|Mjo^wJ(10e_XrvHghIgYk{yFmZFFeNogGh41&xAaVaq6a(Vpd0T?9nS8ioV zszt|@>fy6zthGzG=Gwbk#)Ojxj%_U8vDOwAas(i40uz81k3u4V+5{%!T?Q(=yetJq z#uv!ZG)Z%p8Vr>&?A$lL{q@~e`%nGs&kU08wHsT%{a^mg*5YDhVgcuJpJxLdMPMNa z1MA$^|KNYD-(5I;=7A?a@Z@j(-@khM=9Q2FE8crgI&d!66cS>dTLurgFWQilB9Q{J zSJX*Z0bnZ#T9Hz5tvWGY85t)oXGtyXBpbaJIv>QN&GE+A^yuV)=>rF*jvp95GCMjs z)<{~t@BhK?cQ0RUwv%cYg3U~&oTbgI)AKGhfzhQp=s4~5g0M6@HBoPr7G8b%>hc{8 zCIQea#)&4<4jM*}SI0)1Gc!j{nRv1hJD)GzxVG^9ggAOK!k9Z zh8dE#c-XO3e9mQ`utWQSuVH%>GXNsN5X?+!IQ|bwZ9+)JH;x#M`@K#`Fhuby18}cq zaCpbC_#wYpM0{bM#Ny(y-g|a=YIB<2cP`oKmJ22>fuKo z4KQkVH)oH|%&F$TV#g)4&OC37^fC$I{DS#YU0cZdQPyi0t z0OlY{SRn+02Ez>O-0J$u#`?jTnG5GH)T>Q2fS$!+pp*yl-lGCdij9HHp#TYahH_lK zxwPzn6&fG}J6yaXsj<fOc)2HJkN8R_46!qd8k#T9Q)il zDN4GBdQ7n(9Y(6}NkPL9KylCuzVj~^9BCLrXou_KV%{YNQK%G%0vvc-(wda=0;z}- ziGi3A1#AH`L{xxOG?@aIK-IxGa2@~wJt2cQWGx5?ffSG}bPSdx2M|Ofm;&@TTej)- zjlJr|wHn07#%E7WpPZeXYEB*SBLWqQP92;%GTmL6vW9y-J4oV6Y4pG>2FBYQ zz)6;wC}f`&YrlP9m+~wWDsKTmY?9Zc_0gG`*L$5C-}qK%ZSS%7fBeX?6QBKwpML7u z?_PTI<=$?G*=DU?2fF+3LG=#d7fGpT>GEBLf(lWis+d%=Yp=h`%w&SfXp=;Cw--%7 zwPpoedY{>&mFpwgi+aW&9kZDh1mYeXg#?FV_*8Mv*zT=JI2 zG%kgQW@a0cwV9cb6US%oyYJAU>6yu9O{cK6x_0 zQp8>miho=@h=kA&dmyN|su?^Nnqio9(MUS#XN+d;! znP5N$(Le*8yYKDWH^<5+?Y-A}f1Iid=mP=vce|=ieRb>XwbpO_f)oM8s(4$dwE+o8 zDm7&9-NZO4^45`3&RUyW=kq*HMZ~#0i!=6&D1eM8&RS+UaOh~354brc!n#?j>A-kU z1SF2D)ke9r<3R&(nkTPcfBn|2rTgCY(1Z6s_?>6IJ-dJUAN<{a_|j{yedY6?pV`wK z#NDlpH4|W{$%_xe60>Ve&aADi7E(;dUJ2Ffv(}6EuGj5{lq_$zD?!+7)FvCX$#N~X zcDvu#q(Y-2t&LJ5?3_aZQc8$4L9rhb0d|50SU`ZZR^D460A?@1?6pyX%&1U-wXR^d ztH4-ii9~5Rw148zzNvHF6LU)|a|;U_?N$a}11Y4C2w9v60w@^$VgRfZycZv$@@O)j z-B?`N*xEdI_ReF6k5{4+h3GkFCJK~*cOopJ1A~E)I7U)I5arf;07SIbib7xz@%36` z&&;fj)cR&?dt=S{+&YVh){B6ioSNOg|46M;?e>$o%X8h{R=Hd{as1T&{fBF&f-2NT z5M7oIvMjlI3ja>!j1Qxzwr9{%F5>Q ze4fUgc59H@%zDMl?23&Zx?Dg4(pd&Muj;cB83DL^4*yk{f|LQp|yZ5o2o z8lpfWDVy0ePJ6w+(II+fC!|%NR1g}ijdht5R$3XQZJudutOq2xy>RP4{Q5sXeB`J$ z@#`zu)uV|KZ>Nhd0;fe((4HJEBrX8%+!tlqL_JelY89ZEWtO zvG*R03QeSS7zJUaQk|;SkCq!_wNhE2;j%RG+r6ZdXQ@jPpJz1<^}RSfULGHB?BCni zcW`?D{>jEzy%dxI#CN-|U%fj2y%(1+TrgYPXQ!ve%{XSPwPwdaiV4I>FgZEVXe6zU z_d<+OrIro)t(7fKdkCrmIsqF2c`ypi{sa4~6H`HBR>GM;)s|M~U%mCp)3>i(8)Q6o9EOX8}n^|k~Jolc1z=-l5oDRZ>fKVw#+6$8kl!-!J0)Uy>iC15H zJ&3A5^syh?++2R{XZAXCtowc!u)NUCdr^vsYFC5jw|)D#()VdTUCVSFfxNU zsYf;ROb9Fvoij=c`^`?P)$43j%MDW@0HCy{N~z-#2#FA27%7)|C&ECe#9ODa$YmhQ z;=OmmDB?ZqK!JCF$X);>XA3L{Q25&rpjJvLvesGag|OBv-#d2d&eNwi*0-;l?|S6n2aoPM4nbZEL+4zYr3@L73NX-$R+n1edH&ny&p+4N*#>lh zHvOcd$dtm6Z5F58%kpZiUM|<%Aal;uqG*sgDn#NS83=$^fC4P!Fk_tJJ*+YD$D2hy*_r|iBrdppPkva z7m;?_ozVL7)PxF5f4c=p;&VY`RAk5l0yyWHL6HC>F(7$w*`pURii91ftOovYIjlGL zpC9ae@4x-G=HWxe|A8`_KO!hOiuDmP7@m+Vt|`8e)I;(BZv@Pft!x)oT@FGzdHE z(++I!bo<@b*5;Dy69}1rrW=*!?Bwi3ZF>LYp1o6()1_*Of&ex6EQ>okTXV0zws>Q1 z?bdv}w58hlK7_lcX6iEs8bLLWQ$b=tM#VtlMeC5+nINi!k?UuiIip~EWjRS&HBcd- z2CPC^AP8#{6SI5A_Ri>%@u2$2^7Y%x-@AF`T5qeb)y(YVnc3PF`b#@`E}Uk}R#=$5 z3=`2um?sEGv8x|p`7mtJhx*Nebu}E23q8Xytc!}j9QtCqLia>P2p1s&up%WQ&iUdZ zL`Z@T3-SoLg8GhtqpOo5E^>nb6`CQPg2g#&;|xHElnR0(ppb^)G@zPcQG~qr?7g?n zTUV?P(=_wmYCsS~LkbXuC@?0}Au|GiR%Uf|<^3OeGD|w2{p{z)tK*Zky0yJ0A3S&Y zsrUTpAN&ttY1v+ARJmGCods}VRPoLVfFk3Ct#z6qwcaHuBA8Ht=h!9HAS{*2sm)e8 z?X2dTrLuTxt!*JdtsITjtM#ztEr%vRfYh@mCWRspX>v$fE5{P*&@$}Ct%COefS>?H z5P^Y(5w*9Tk)8KS8}S}NY&>8>QfANovDumXcFvqwT3WfewAAWs_q`(kgNlI&5g9dj zUD&`CKnMF!ktJi<-d^79b$cgIoH}^?jwpx-0jwaR(u#)`mWn`!r3#8dK}1xBFP(p- z9B{iCcKZ4HMsK^nk+?KKGtsQyci$s_6>!Tn1z5ns)S6{p6#rFF>VGM!5MBe4S zES;`5mDVN*4N@4E>Y?tWagtgGR>0>l%EShiA#5?s94-D}00o$adBOky0iu_JBnm^u z9Iz4uPXx-c17JkW0$S^cF!f#wAS$CxS#ZEEWp6<|LT>X8N>*4PB2@?zW(PnX8O0C@ zvqsQ};Khv6vq1z!LM*K85CFV@FK{X{PNa{1tGhY>^5(%SmmfTF_nEVIj!#ZG?6*2Q zB{7x8n2j?5#nuUf3d(}$FhvFiUvQA600luIp#|@qR|Zjmq_*n2Z>u(D#?M_|UHIN- z{@|kys6*_OV)})9%-g)mMhYlTIy!qnd`~nupRJBs7HzQs85C87p z?`$jqXvDDDs7=o7ojq`7&&-h^n#huVY`g8;FI-)pzcA<0M4SgwL0GDWQB*SrCM);d zvG2(K0}~Td)kYKs8VwkM1UMfg@#bc)yR+Txv^JMF*EZVg8}U{rjFV{$PL56OJ=9RC zk2IyROXWJs|_QhInN6>*G(wJ?AP0+9k$JcEE`l=wJg!2^PTFCHsH z^~9T*;YF^P*2b6shJrN_7M7vO3nPgLk|IK*6rvIdVd%twfFdj!1eI1S0_-#I0RdH^ zkitQp*DGVEPTujguYF}{b@RjTeb_4X-S0m4z?dd6(9z zV;+3&Q)jIhtrg|Y8d5?WgyBxNZTp#LVdsrfwQ5ue14Y67(pqHn?D*t#bIe-Tjgus6 zcdL256b8x#ng~gNloRW6L_?sg2M|h<4A|%5K^amAfZhv|AcF{cFN%nnh1faKhO*QW ziZ*CWkfk{@$9c?5QQ4e0dGzSv!|Pi+x0ja})>isiUjRZ+8k8Ct<0z1VLdu>Esz3$k zUEE7AzV>?D@1HvNkcmoU$O zMr$F-+9hqq#vAjC*T?Ep=k9yxpZtIR#UK9OXTS9IzleJ~0hw~8+Ud5kEX%U2pA7bm zjiv2goTuf`Xs;%Msw-n=^MOs1Jj*!u;Qd=X^>`R0Sx|#e4dq?MSud`zjRO#%04Soc zqyrKm1m_q~ymw9{2bDXtz$5Y~WT~|%LZDHK1PmQeief=5Qo(c4KnM~5p#U*DKn+AJ z4h49a&RwL=DnKP*g*AXhF+f5EUt37&KvV4Pe1B{0m2bU#>BWcdyYJYEyBpPse9)D6 z0J#fHNC?(tK2O-?3V}!ikarmXi7zm4@P$iI7b&K>(#7(yFXXSZ~%CI88F)pj;i-L6{|}^FAzx2%$r^ zY$47qNr*ZMYU>+YI*b~V`^P5tD~h7pL~fIICvB}SA3kvM?7fdpPtBY-a^%3FgXKzH zSiHABCsdw{a>xM%CFKJR+9+*E>rkTv0!#}@CKPrX*+>Lu2w!ehC0|$X78q&9A^bj5<6hn~@K)UFk zsK_-H0`a00DMn2y49W;-C~UU}Pd@y(bJ^FQ{?>!{+&A;{CrgrNw{O#X+1}Y%UfXJwf@&000u!hpDn}6+qgBdY5#%=S#yw((%wAlU zXMr(NP;p0*Sx5|kp4odLBJV{|R3W(FoJ9zbii|XC9XlPE!w04h?3q5>K6!g-WqxIG ztG8{z8o&}MjaUeIir+Abgn(tG6Ys6#`J1;Z^@$^A?gmYqyF$)GS`m>p%7M$`TmY2P zBF-RAPfx{ZzPh%SWf?GfaMgNqcF+FaMvH{vUV6{n=RW@9A1jfo)oV+)E^V!Dk;bk5 zc6qvK8sp2G_M6{5f8+W^fD{AhyI4hG_vUi*zM0Sd$)8@kHvjWK`_qky@jv~OKT7&- zU&sZhGDzZnl0b7R45K9LYv8eZl|)$<>wLzQelm#Dw3nnQXN56>8nrvca62TNyy+Rr zB~Xo^QY%-R%@8QT$W~;cPVq`(kx3I`&2jwP9^uA8CZjO?=5&D z2uN4MfLKt#dp7`y06i)@Y_^aESc>V4hP;5{%g(F>0m+~!Bw!%|Lh=kb_!1h>CP7}z z)6VnH9+_LX_vE=#M^8-b+Z%6hHr`NADekca7xzyOR4C_oAT z6pM>f>Bk|`kuW^ksQ1-G5;W(fA5NWdwR;vfUff)G-DO#p=dIPgbumK+e5%Q4Aq(1O zmYpC{san<)AyBrHm8!M6CjV97zP(mQDf-G~) zf?5TUN&4yJ^il{`+C-+Lj1(g?1F-cHnUXG*qDtKh#WqIJMp3DxNohmM zIhQ6$9;azO@L47~I?pb5NpB}ha)zAQ`JA0t$OQA;+eET-FzEZ#0yEhRodv=G=tKzB zs)A;v98Lr*#^sVbWzprrX;y599C$Cl1+Tz+&@eiG6%YsrJK>Q^U7-R1aaQTTL|V#w zfB`+YAShKUDpW~#^}>ahZgtiN7#ul#=+LnvTb=E1+*){kePf}WwVbm|K}l-$nuxl1A6m8 z0&0XiK_R;=><~t_2{NiIz#!m8QvwU>(S8~{5Go)R!J#>e3}eJ_*Ioy_cOr;LN|P>7 zr6Pp|i5Vm)1x71D;>4dib?VmAg6|LRyZ@o_M)jp+gH#gvYY z_V#vf5aiQi&8S{#wFf&J*Cxhldzxe3R6E^vn&ojhDw#a8X(fsRr4b+qbm}?JZJ zdi|dD1#-d)=;-m|3Tb0~HR;DnD*>vO>a$ZbJ6l`zN;S>=J@?)HbASEkuU)$2t9pEL z!e*JNm&;)oZg*<6@s+KO?|k=#Ygf-J#Vi&CGi%#B+cryX+^RqE_~Xxf_qlJ(jz9L~ zQ@w8g8{hoepx;gsTQ2E1NqcGRfddm%OrQi6VQ6%uj4+CDvsO;itUpM)Njym7*k&HQ z9VP)3qd;LC#0c0h)tV0XjZaQBCaYmss+P*7a-|WKgHT~vdB>K>fw$gg-ZFb@nI+E$ zHp^1$GMo4E!61&4Jc+X;b7|`HF6S9r>joAOhV=&1LXATbkY*GDL_#H0xDTTALEvKmRz_vQH`bQ6w>DQUUpRN?xjp+1llE>fP@!Q$RH`rqg(1Ct z=$TsNISCiCT=7(Bq?rPR>{*1J6X$Ty3#=f_AC2_J^0mx-b-X%OF}3}7J~(}RKUT(7 z*+wNhHq+2SJw@H>#B7~o0Gn{`^V||RP9e+CD?$$$OYYKsnh~ab%*6Zco>#~~PEm}| zM@J3_ggpZTMnT}bEN|M(#u_AzQ}mMgjB=$z)XIYOv1H&~4y?TKsGK$3xT`d~oKb;K z6NiXd&YD3)oVCJ|dG;1krw!SD$7e~-o}G2#L)2Pf#aTpP@^zvpQjuWny~}K-%E|+` z^A3vEL0zhiG%uBq!ZMHn0Z>I`E;>&_#nA-}Ob(ff8USj@kYr#H6cC>aqZOCsjGz^w zCg)rbRqA0S2x)!!;??UHJH4H--gx-^@2CqrH+SpRmD|s3cXo1b6%nBVKQTRCuaq=K zM7oHI9$BFjV2>0TAV4T12HL1|ANm#zlYWb%ABSj=0PHRa1V@bjVa*txFnb2(ssM6$=6ENB+Y%6)=K4awd|QYaj#Y` zWzH(WL6)(1#hRHd5>gSKLr5$lRLEWhm_36=#Vn#g871tQMVO6=0K8}KE$7w~Q7Me} zADY;|cjB(ocP`A`ytT5}i`$mj07MG45Q?|#HGomZ0dza9*7|B=W|Bq1a!D)`pwT)A z^v>3HFHadn0hOwyD2i6r);is;0`gX(lG(riNV~Na2Bkoo2Oqrq$3OKWU;e_EZ7v`A z=)3d8sW6mmkR)l6XXVUZo4bAe@jO_Eas2IPzBw3lQR}c$?%8}N?o>lm+6a46XpANzh2$ItI;~-I1pvUR+*CbOaEsM?{4h#ghRM6%cI|3MwL>I{^-&C<9iCnQ|YyvUq1y zqI%ORKU)d!(KRC)tk)_i6qv;a4ul|t;tQe~ij1s*Js}~8^VS1{I3^HrIXYkjD^?2- zmBOeLgv;}n=I1YUljP{V_w1cHu(Z>8X=&-p3yZhYw1>*$kn$p^%GEkUSgtn2OL1)h z5g-u8qTw#ieJ)H*M%t}HKR@iMhpLy6K5SPHhQrf$v`!uhY-CqcBg2USv0%bc(P$x# z%(q4t6F?XyU%@ciS9X;OfWV#+-xM5(XW7;67fePhYzJ_-xhVYli%3WjtrZS)L59_X z2XT_R48Y%g?w;pfeyP0 z`Ac(GwpUlTx3+iMJ)?tCZER(=yRdZO!0f~wM-L0)`s#YC+p&W|o~5FdPtiI*G11JO zV5s75R*F0VibG)!T03?D1>Oq_qf*QsX}H-ZQr5bnQ0M5ybJ1etE@fr{6h=g0cByyH zd!tOXQM&WK({6k6{B@paNM+ittfD4>zqB9R9ch1vvz zG)Xyi;1%e}$?5(e-q_v*X3$z|JvlM8z1i{(Cnm~|KKA(A9=i9_|LJ$P+wC9yvT!B@DtwB^z{?Zr$EqT1yA1?ZqIZH6dt1AdQ4dYelHF z3Y7}90;5S$5E5aiRj623NCPu~2kY5!&erymBu~@Upf^b3R-SgVGzQxR+Xt6hpHB85=WI<9Tj@Se!myn%rQ(%jFp=;ih|InTCKTnW}*}Y ztxmSp>n>ipbp7R5cN%)4m6f#yW)FxAph%Fw2@qhhKm$e~v6ej|g2&-+0NLhw5)XRT zJ1{{RgBZN`;q2t}zS+Ht=U=&Y^JW&nxpzDghULpEi}T6grNzZdN$LQQ1W+^p5*n=q z6zEb=DRV*Nl%YTu3l(o6s|Eo9A{cS~i-Wn)b`<_tZ|;L})b9`R7#U`|j?VjG36R|d z?lAfr2;L$9lU=>dXr+L|Brh0>bWld9k^qpxUA##OX$=UBvYv`-%Fyto*e~O*re6Sb zaanCh*b^cFc#&GO2>=%Pjf+9|9%~pSq zB$-Dl@9dOF+cX)kHB;+}9Rh)K-g;$}&oWYqS%^q$dg;>2>S{L|eC_K` zfB1dxp4m6Mw76lS3X>9so{hI!I}C~yFnbneVL}37t&KN)<@(jPzvnSReD;OsDGdMi zFa4b#`H4@gt*tIC+!A2csvoC=Fe^u4=AGD_%4MaD0^vgLKqPtLE=z!7jER&20nc3Q zqY#lmg@lx6^-}r3{{7`z$)<^Ib>^>KS)IR?^}8Tc4Je92>)3j>9&G9v5EVH9LmH6` z5h@C_DXTCtQOT6FgbEap4zxl;WefmIQ{KXNh%be=z7LNt*QYxRWI!ugLgh|!ip#x@AOC$ zdk^461el9PuwcgGkn~r~>gbC#9fJ@K{Xh@}(GBNy4}!!J2mm5!Qw~EWZX3;cF6E}E zX*hfzA3T|lO&L=jt7gZ-U_WJb?ZPa}Al_cMQ8l6Rh_32Hh1IgISpoJeptn$Y0-t5M zP2wmj8^C(K(x^tHh!NS_Bu<^p$YFDAj1e{Rpgm}{l1i0ADutvYWlX6Agt1SAB~9~k zqf9`#a~iN#kH#BK6=83Cdue{o=Kko(lY95hR;r~a)G7?w3Mo}Bm4k?K=dw6w%uygv z3Qd^#RB07xT?Cf^hR!j3^xc2-nJ>Nmh0ivyERK~boYMpVbwQ0`DR!qQAjnvpZtRgj ziu7-`-sh;HObP^$ofT|OHufDjJonNM=C90A**x_2`<+Q&n}7YKwf3cUZ!7n{;0a2x zG9s-Np+ta@QDIam!n0%u^A}iRv1=6(K^mUh0tB#|MpxuMlE~27cu14jT814vmXN+B>2?2qiSXmTT$B}HO;F1=T zgw|Sti1%C&Q8iLz!YHbS(fG5^JooS;53ejQk2kAE@#p^d3uBdVY5sP(GUjaloB#E< z|Mh?TwY}5hw-#c?oV|PFC!G{N}!Q~5=Z@hlx{A+LIY0?D{Z&z1$78X`3QEC6-y(i9` zVk_&bE8Ri=#>(1csZuXRIWh_-{bYQs!6Jh+4bd==A|V2WXsuQ34F#_7$42l1kui=f z33=pV4dc8AE*Np7G)g9niU74XCeY4i`5;qKSe~lAZRU}?diUI(zkT!S#dg{OKv58j z3ppo3VW&}{R?a&Uny6gb>9lj}H0VaN+3)uIS&V=VI3km!);np8Pk!)+KYaJe!=3i} zrOVfoEFqBRp8M9Z1N+CvCmNHJo15Fs`gj!fyZuB55h`>b#F=Nu-lHKT41%be^)oAj zm`q^K-gR;ixBsty{jWdu^MCzaPd)V~fBf0F*Y@mtSt2|hMiHkOATonu0abJuC;-X? zh2}(n!HE_SKOAgXL=}7X+=pe_e_&6&R!-tx-0Lr0pS!&@$9YmM8-qH{viXgzLFRG+ zCuj>NxZxML$m$dUPy`A=69uFKQY90HT31X|2}_kQESJh*5Cvgmlqxq$6|FSR0%Ml5 z&5|_D^VGUQGKl+07PrztH}CI2(t|V+?|jYx4#)x`a7a|xh!|2VEFxiG+SVCi0N>i$ zYHux8qIysboO7C$_g)A@SWs9z0I+}v4nG$G5nvK2)&#o=3A+C*(ai=d&zT218Jx)})YB;1!p`07W^Uy2PbPMnu-;3eZ|NH8t)T(E>$*%ag$% z6`!{|+od2V*D8hYh1OK7R(zJKkUY>|{+T^+=+L1%jwUX~Q?S~$H z@aV|{1gbePChQ6?gpzJ7EzW2CY;0_zTCHS0_JZZG^q!A@@Rjd>cV%^ZzlM+nNT~2h zKqf||5P%U!t585@L=+MpdJl?_Rv|@@Wg4iTOQq2~efso`mtLN~bgMK~zxT0ITfTSc z_R4pcH!p1Wx==LYsEyKocgLy&kqV)*KF|U0a(_mwnGwK_R4i{9pg2Eq_}fL{AqXSG zK7Vw8kDBvg$N!eDU*6=9!l>PcU0zWE+sp3Q@Ro(caMv(=k66G&yB8eRj3HNh#6|+# zEk3(2l=$HuTL2J95ejOpw2U?$-Z|%NWb}Bw4kT0KV}}kN{Nm^T_;3G>zw`VLUNRy( z-S*eN_Vtf^;wN-ix_0G_7oU0I;fEi&adUBPb)!+O?VmpA*&408{g@z+N+n^B3Xu>A z2Z==x$4Cw~J8^HYy1czLckA}$t5?4N>~kkh9J~MFdmsPEBk%p>J2tK@fBVbdxq1Ei z;`&+=$If}>ZM)On+S+MMjGs7u1VnAGuJ3HDZ??J{Tiaue+OdOstaB)!Ok^1xTW7N{ zFyy=mbdqKfGawsrXeZ(kMKGpiw>ZZVjg#|J(d$D;?iyUyZRvWCXStr$U#RS3n_O{QwLKSFZw6f04 zOz;2LCqAk;-?_b_qM+OBqUTabBI=8O`NccVo*k=C?QC^6x7I7=T5F>lMgro&2?qcO zij+nqLgyTicaWw#sg3!<`8V!3e&p8T?ce?MXMXB${o>Nn?QebS+i8-eN;yRF-UK>F zABrPzn!;h*!J%d#}jdV_X9>2CH~J1$)tbaSu?aLTy> zB_P)OP$~9;?8*e=<&sKl$ry63;C4n)l-ryL7z7nqFNg&NNJg7I9v)3Y37tqW7ytsG zP;q9-5X44CM2}3mxGxoeXa=GbjE9r6u)hZAfE5sB?E5)K&9uLhy?Ei`{rB!UadkVNR;#bbUcJqxJ`}Wsv zE^WPbdH!wp-SfZ$4^2$ZIL~1~Q+J%)xc!|8&<34C03?-W<~1cGbnBiw%jQB7j3r-Q7t50Cp3{5fF8e zmQ^HY7u82&!KhXWK|&%x49jM+IX*kR_sqTbFWtU<^61%;(bunBO9%a+OwT;~^rP?k zz@Djzt))xf{`%J+d*aDccO1VtH#b(FxP0S=fVHB%aHCp61a@AK3lni|v^X|_a-O5m z4Du|twlhdK!(>NAUX(9*b_Ax6F>D6KXdKs`C7I9sbBfY z>lZJ-_e18wE3aR_c&*l$+}hsw>NmgoBOm_ZiCS|tP4dB@QY{-&L0}Ra6RQ#H42jIB zU62$lkniktUU}(d$Nc4m&+R#S=?Ci9+I@?>@F17V?RXc3;2Adgkv$kh;Iw$Qs z=S&DF77d7F2G7FYiWPB0$dU_-^6Z=y24_7x&e*4p(OG9R$r31p$H)eh(Nsbc8Pg0) z^{74(H5!$%MzsT^u6aSwmZG;cE7XM+8MMvy*y4J&wS1REEsx? zy>z-dcJjz+69j=${dPy85+_W+EJz?j(Qz>l6+I{kFd!6ZpfaLY6jY<4dxX)S0nh;p zh(iD(r6~2@iZe!)jvsY*owoHVI0Dpwn5T-6w>LVgN$-z8{k4~#+xNfy;~y8t)-f}I zS178VWPkW4f3~%`lIFa)q4iWA#RbKwHR(cw=e(-~9Uj*=@J2%ij6;$A0HC|B~5veg7NJ zKl>kl?b8`2zy9z3)xsNB{>8ujucr@9{`AlO)VtsF6fq(~?%aR-?LYYJAO0B`^H2Vl zU;e=R-u}^#zWZ~(|0lN>Zd|%?@!07jgq({1%>VNTPk-~J-}@iG_M?x#W9>ix{;&Si zfAhA-&i%{(<6lnIs$oeVIdte7Su9w^01E?fKu|;3H_H%rAwf_?A_5A90Te3IN*R}@ z-EKP@k3Y70z$x1ve~-t^|Gd6-Txlqd7B9PnGp35=A-1*2_u{V+tG1hrDc9)v|9I2P1m zw5pWLW6kCrM~{8}bANK~?lW=P-)?ON8f@xQm)yF3?aW=LUqAo7OK)7=TH8K$?CA49 zcxioOyEo_n0DD&{mD#h_0OCP}(E%zVz(9#W%ADkRpfw13;ck}4TidJsRo|dp`dDho5-#^%r0I&eP9azIvtCjVtAFdaO2GuMM(%ZgbTLA*$*9 zhx?s&ztvgX=m#p%h?QE&5C)0>gpkl^77_2sQ|?lYBm!D%GT_*wFexSC1q3w+f&xMn zQlQy;afNe-n9w-qGnAP9Ay$Mr@%-Ps|6-~G*B z|CPV@PtP1b+ge`K-Vc&kO;kDcS`pjKlOk0o0LPvb5z_EzV~<#5s4$XX;b`L75KYfa zH^v(XF70;ntsNKlO9AG2x{+iZ=hN}3Z$xiwthEc)EvO>ZM->1$jEt?Q;O0u8O`w%7 zhqb^|CD2UbJ?D<|)Th>4n_HXu+_TMrz4y*%60<>6YPBRfRJx%}L!0rSK3S?X8};e2 zMpP+PqHuC->{w|s&*Ll?+evz@es9p(+G!8cyyr5dq*f}=&KxY2YYHUEVl12l2?fD< zPfGFdjw{p;qd@>g0J+dzq7?ekp?Y)IILQMd3L^kAVR2k|?^$UUh{8JRdaYjD2`ZpQ zrw(TuYppMSZQ-li^PRatM(wzlRRin|vM4afT+*dDPQLNwS1!%Hq6IeQx60+l6OTSM zb7rbsYkch+-+blU-^;QTD$UcU?&`*a-}~ZMKlI@T_Z&R(8I6SIxAr46z{*(XhU;g^57dC!U zl+xNc7n)#YZTrh#`TEMn>R5H^)4%;E?|A&}wMzMpdrn+mzT_RTWm6NE*q5Gvt=Hci zYs^%u(IbyO^c(;F-=(qJ*xo`f2lvd3kJS^l2p(7iu`rWTL*XRK@H9h4(1pcJ)D}4XaVn_hr+yu*;fDjhDjL|thdAUleucsk6Poc0l9+wNaslm!Es~T_1RF5H))3%@?2j{tx}o zUqhw0b~Zwz092@gTDj8ic9c>M7y$v9J$vg4sM>hW^E?MGBCgm9lH-`WJN;6yK2ckn zUSFD9xc1U3&mTRo|A7adcOJ-NBaLRg-`ZYY zoO}7XSN7DWOVx6{Qfv1%r=}(}1>O$;lxPyp1Eq8CfKj7j0GGu=fgm;}$M#N*kMG^{ z)Z5?r`jt0~sm@I8X|3H(GtWl(K37cGO=5s7%?D*&LJ<)u%;ph@06iB3WAI+ICeq61 zwp^`G@0kfB(~rAJx0hwHpzLI6ra6hCHOx0{K6i7q$Kn7ygE&SM;$n?}H2hKzzqlfx zkc`npVl-(agIX&cB9(%mVZyp@XoGcjo)Q9d;=uWCl8A!D+ky8z$J;IryeRBMK&2%t z5srt!Y^AoR-WZ=48*5Z6l}deVtTr``09l&FI|C79bS#bm6mr}j5HNzKBEW@^3<(Pn zdM;KLEC>a{Go0yZm>VcOgoB5Jf?$!I#)8bkh+Y5?i^wS;HUN1qE@RlNHgc`v*a>35 zD7nLKva_;4ovrbpdgj=^ews!>V8t3C1fM$__u_uspWS!-;~)L#AN_~VB%I}$uT+~G z>&q{ne>uziqwjdvPyh5!o<4azblGqG=g<77|MXuz{po+(Uv1xBTG+eiUO}OTY5duikulerc)y{p+k zoS6pj>^y6^c5PvKX*pEx!|#3A`D(WxHztB32loZl`tg$|;v^fSajQ2-^E||W7%`Qx zN;wQ06Jz6<&u-4$*gG>-4l6>EIl%xCkO%~YfxX}mbVVW}Dvni!;sjBF7Yb<5>F03_ zD!A+32b5B;zVI@Ka`dhvb1S#sSXz62XM4`O6bO|70~X=vtQ4_$AOSS!y>l*OFKL$4 z$D1k$hG#w?zvWyWVmCN+mf2m$!U)zc{GI_AhrT;7EF#6f0`Qijc=td?0icnze$=`b z6=Fmwee(%JI0#22XLz`eN&^5Q6<%@z07Kk*^mN`ut_pbqj_@NI)-Vg5l8>_gMDvrD(GKKuReKJ@6@ z8XPcb+nvs!H_<4U!%|Mv ztT!^@z$gc#f(i&nt`t=2Svp8l$JPpXYjf~WDVH^nF@Y(SJb+m1toL36q0$OGduH-r zGo=8Tgs4aqX6GFN2??>*N~Jbeq%{~oLiTaDLyAZOtxT~#%(KK)8iOp`?spJ~-Eczl zxn+bShYv?(diuE+pM3nuebdvGsPWwS7yjbQfAPcYSNCDQ&be!p_>%IusyaB*hF&Y=ON(0R}=eMzgmFNQh%& zjoNqv5aLcZN%|tv8Vq_wzE*p6ZF4D3JDxKERyrz|D`BVzkw6nEQpm*2>>V>(w#+Qd zHuJf4-deEUqennRL?VbpAQ46(sHTdlkq(i{N=-%8YSgsiJbGkTID;p7&g|H8&*ce7 zCry^)f!SO&^EeR&hsP%lOzxYi?I}m0i2@?f-V18WHp?9=DO~pz7YZTB#TL6w>^CKFaOHV{m!R9ck9CZkN((S>)!4+s!@{mHHOaH zq#q|qXQEtw^1UCN*}t#d-*i|xe(3Pt+1crd=@);Mjxrxs&SHI2yw| zT)hrKlc50+54{~mflRO~`x>J6u&c}(cJ6N?IU{$t;?+jFuwjAN-O14j)TX!~ju4<-H{Pp#tSzm3hNx81f!2&YpQedFee%rGt!vk>Tz~loFFg3rLxmm1=~HKJUYje@ zR=o3Js0ks$NVZm?|o)^aO+j*t640*(?u%dU4Vk3_xpZy)jyx&Abhx()|y-?JHmT z+Qv@%#Hkbe_fB_{xn`rewXuHX+O_?&vwNp!4%PRax$nNaE?&NVbN-d{7xzvdJbe7% z-o1Obwl7sn6^+C!LPDSb$$J*>Evpkpj@0VaZns+tOGi$gc;d1920QVU*RLNsd~9p0 z1&WY41MVhiLyHAQK*tUk7@3F_YVb~hh+`lW6d@o)M%1JUAk@Lw#8g-cU6!W(L_Bx; zy#eRaXr5nRzS8P@?aNTCT-gc?M|BZz0q z&Rc7pbCy~89tti)PxX<2L#0Yajp0pn!!yDHE0;w9X0{@T{~5dk>OW+Z*Kb zgWJ``m8o*&j>)}u96C8(o%CecC4is<1q@094?j>vqzr@QBrJt_p4;Mn#zJl>#3Vx( z7yv*e;zxbq=w2)Ca}h)V@LoKk7w^Rr0t0y?#)Be17LZaVNhMMy$fZ3nd&|`yeb=$4 zP8&k(F%yEf5XS?pwfCN#_w1b50|&?Nx?_*CgQG`&X65#li+k&v8;1@b_^1D`zwJsm zheb80%M*1U-Gf(%CMuvSev10*a0 z&ZIE&5-6PW1wjF#Aw!x)lmZd(o_(5UX|i|Uz5^$Y-k!g{y|X#Fx7zEsuCA_JY`1UP zJOxBm)Di?G;DA~01)(V>Cp*B5iO%LnBD@;|5dDA z-Xv%ia)2RSn7I%Uz%Ebd%_YNIxSY6q$QLBz;g)s?I}T@nH#Z0cu^$Nmkx+yYM0N`i zjyR(-oKkkR1~i&FMwJRtBM~wRh|L`;REkKe`q)?)mRdX8)v1Z4m9=tI3&N;B7?5EB zB(0Y>R_?m{+&8}cjW}(6^J{;3`qZg2C(bVMVG}lNXn?f)aENI z%O+Il?a||RJ$CFge?t*|V>F{YJGKhFZtgGB5*^fb-5Xmdn#8 zP8@Bey7-@)I9jURwLj-}ubfOF@~9@%raS+Dv$ zNqfCe8|M`&6cqqy^qxowQ_jU55h42``B@NH{F})jP^itZkWmgTw?w>vAB98-p&vy% zd89%%EM7Q`LlP8cErx>L6Z)WBLk)dPQ`Jg!%qKx)`f;MQqG1I>ZB%Y;vtHK@JPWdF zR4cvSV0Cr*(9G;s?y_sEDaTXQ37^8s_O{Z#8kWF&qg0&5fgS^pIler9qhV@yo;j5zHcN9Msg!CnQ&VI0@jLD~Hq)p9 zXHjS_%+1YI#wN$6U%PhY>g8+W6Vv03>b(z~TV0>m+B@&dff=hb@=VILMkVTqcq9-* znL=O4vJhDud!!n9NrQ3&n*r9Fjn$>K#id14QMtIq^_`2I?v*^r zp}<2>JeyG4+@m)2daYTjwcD+Bt0#g=gCYe2;JsEsRIU+e$Gc{q_*URXhDbicE(dIM zT)$-r@K+$4Hy=`LG_a63%bPM47}^fWE>&k#1d0J;w0?l1h~RLUJX#@)7>mQVH=Heo z61KOHlty*p&9_}VVub&U-n1@?6B0yGP)M1*_u6PQu(oC!|gSzX+yMx`XnSy)l2QmMvq%+7);1l~wUF1JQ0 z=UBXF5v@!Z7;7_T1_19t0G)tj9+mDT?I%C@-jDvsN51pb?|=78U%ztY`n8L%ec|(; zd+==!zyE_Ddf=%C-v0i#FI->x{7 z(!22LO43{7IG&lBTe$gcPJj3V?`d`W-~Pro=H}+F+?cxa?0rX$9NF2~X^c$_ zHr4}eie@4VgE+B!_DnS!&1YYD-i!X!&-`Rb1;78Nf9gr7LcjUn|J#ilbAwK6tlY#T zam|Jm0ReJi1hQm65opi?4uDa3xGy49N~8CPphy9!v9a-Lt?KjK#tG(ryVL6^+2Q<+ zc5Bm%DF@|hEh<;rtv&=s>tgg2?-|4k3wQZ zNry(6B#ZMnc5E}7XMkQTh!CI#Fhqf)n$mTO27qOv2ne+vScf2GO(AJPWF1=0*OI~Z z#h3Q4-n{SRoqP8l4zz+i)j2uuwK7DSo%oDP<+2Z@-|lLo5m3Bha3DxZdtoRhL=mCU zCXi(`2NaDRN#Xn}AOa59dI4c1lV@q}@;tX%2pO%1o{<1mqQgAYFE6y7ySA~twR7*u z+TZxVef4_9xh(hIJ4c8{2c$|0L-3B-r&+SuX;s3y0DkA0Xa4Xr|Igk-_x{jNzW1X~ zy{@QQScw*qV)urM zQGgu?dgTln(k69jq#?Fhsa^?d;liz@#O9N;jrG>{Ypw0q;=up_m3RhZK}DXiTy4Dj zeIKt>_1f~%+wOnhJI{abh3`E>OiX}?#1JT5uQn0YoACbbc|L5Qi}fz<(r{q(|0AZS zy!mi~T{nfHD0tXwOEHm*m_@@S@$QZnX{a#9w?IaMBMBG0DIO4cQ#8CwIuZ~OE`|Uk zlpzUKMo-+0upd>I;ZLEM1~fZ|r~!dgvGikRr4&qzO?O+{)zf#~x^?T&k)zG#L~Cc$ zIW`(eWM$>HF~R=52e;N%a`v|u=4~3c+uPtdcewy7LIAXm1;Bf!jM7@=sZ+$Hl+wog ztf*i_0KhDsi#A83$i8HNNCi|`Ilb);-CHb^Dlm{)!p7~wVIW%pVzm-s1!yXkHVlFmQksU3tcoDPo$_g%Mj3z@+>II3M)mx z;H?J$55iU;a7rN{0O{>kXJvC+00fYQiNrbIn3xoH%&kz@F);v9Yn%PRo18 z?2L&7siO40J^Q}?t*39?xbedu{m4D%?*H-^{_@Hj*Y+LU|Kj-z^K%OTJU%-mN!FO2 zNqs!F@h}YIvZ0gXcw~!%Tg} z`Cz`i(|YyAdj_35@3=F-Qku2HAhNk9%}N{2F=coA_I$t7y5rDUjRBE>bA_lF2pNl= z8v&3Y0}+CV67WNg27@o!J_Zm*24uk8IwF;bcn_A9^ zANlxCj8BviUAYuK`N$JG6ze1`MG#e;^`%m!WTHq1#^~I-m_UM3n#P*YvvJo zP;iPDpU1He1A`nWN(NmM=-Gn@vq7@9y-}`}T1m3l>0cWp{ld#kfJQF7rc`;`6YqTJ zq5F5%H(YM_OwN4tsSmty{s(!Sc+biPnn;swj8BhP^&>=P#Ld~I{=bEQ^Ja;VQAXa7 z{4=C);+teo99l;ZDjE8U4y^`;dxWB0D7^WH)#5GeBL1uUG7<(-|K#OC`C9C`JXX97c^rp1-pd6F`UFbgvXSes!+!@-zA00N_UsGwu^ zLTa?b5E)iK0YOnj2v91Q$de9(L4V*TKtkMeZ1#hDKDKz@19LZST)cSx=Ec{a{^qw| z`2MrA`w!fG*V!kZ`q2H4KlJFko@{RnzWeOc-}=g*UAXYV%JRA(hB~a(8ha+jHn+QF z6O@9mWOTjOP*G^KnW)v`ICkEq=>Wugo9E6Gs=$PmQppJAE=LClpomb=i6~GWJs>&n zl~LANMFi{-#bwOoo##>zEw8SxciIA=6albjZ^0eA<77N&udZ(bz=2~Y4jwtWaD9Dp z&%vl1zVQ0>jsDV!g9i_t+Uqe`S-d?yF|q%^*!}n2y|J}9_xc-GU%xavJA3%>;km`d z#l>ZC;MlXb<*=Tl{^)_dmv7zJGqLZNe(q=2mNq{3XJ6`e+x;}2Us(|6U7mw=$}!fe z4nT%Jw-5$qd*HOQq_rU_Oc4PPfUuAPpeRj_ZLK+0Zd3z>{XFx`+x<=p>~cJq>-8;> zHi7rhX>T(Fh)4#&fC`X{*Fq?m2#7!;9zZD((F%7N5Fh}yxLFD=4&qS#sbcJt4vjJ7 zb5kids%6vZCfR`V3ZC~64zi+~gbp%CmK zFax1@U<6=cVaLo$7}0siZ6*k$a>={iCrL)mc?iYz5XzNqsfJjo>t4_Y*IQrP?&?sN zDwQma10qr?0LBP2pQS#wj@yH*GsvS-X{s>+zC5%4psA^dW1j*BjkD*@0#w3E$r#N5 zN+TczArfGornjyvtgo-_Jusan{RRd3AUSq+|F0bQu`G6-#Q)dd{pIWY zceZlpoPb9Nl?fea9aWpnAv!W7<=_x!88+tyrLTb3c{m9SdHEy8C=3tPVS$j{+EAc3 z;L*QzPGDdvEGA$iGJCV86i*%!_lJ7mT?$aaG1~np2n@>!4O9GxMq7zun>;KM1+)Z+ z%%cid?16y*K}+F;Ls+z6AV7eDHk0F%x)R*FHUIYazi(+_vC^#7E6rrkS4x41O|z|? ztpf-513;i7>+@T;=kh!cl?Eh4FNiF{)`>zQ^xh(B(na)=04RY1kYZtAE=<|j3l}3a zA_h^Ic9K1Z4~|bxr`9vDXBSoV(c=dX9@_ixqYtjGEnd9v+Kr1>7p`6V?lUi3xcJ8Y zg9qiWh`yVI(af_kk9>0q+ah`ZaG zbg3MM7?zqfU|;doUa!*|BrdlBsdA;9+B{T-lq&3HJOhz}zkVjFF^y90Y!QW9`CD z=Z?FNpE_`0v%R^rxNz6~_dNWzyZVD<&};qRwbvTcjdwlu-n(};uU@~lzOjNVmTeG5 zp;D-zRx90i@?>+m`lrA3mF4x-fYewJe&QpaFr`wvv%Pq0esf_Z?({+vx=67gGL}rB zRN#H)ePCTqBPRp!Uf7W$c0LFVu&dYWl}Z`OyEq;U`fZzRdY!;nOj3<5@U8HlZxqBdGkX2+^QUzgY0_046CUFTBgy|*sQ@-WcEz#suqtK048 z{j6=5)>~zeC^UDTJ<3LX>SzDnqfg#lQg&+m?AsrI$NdkU!p7vmLwic4NF>)lh{!$# zUBQ6U{?`BXumAJ^{%il}^y#}QW7QX)`N6{C&8eB$`%c{X-HWgO_uu=yx+&Ktt6Q5} zFWb%|k3V685YSP8M*#n*AupD* z!_I&B!x4@DEgqn|V9`j#ySoe!D15gH1;H@rad*_%J@4PN0sL*vk5x(KI1C?zRyawerr12#?Szh2%w?b;b|~6?-y0?45kE zI`iHm$?m{Hgu)WX2?L#b;Gs&bmZy#pob%*3G@5{_<>ryYd+xdWuH_r6&wS_kH{N)0 z@#dA=w{M?6|H7BQ^5P#wA&! z5=G<J8$sRDSI3Pwi~Bo_Xebz4o9|uibg({`bH8y%W=A>+Q5qjh6!T!#w0ch2>*tcyI?>1;VG z6cliuw<$BL!hV)PDTM?c91&`z3AJZW3I%|@7ZB$ip(ryXQvo025YOHZ;dlTc=vkZOd56>>C^wDiSvyI-#>kfIq^l}O z1uf@SZ(UOwPaHqv+o?@s$9Aedws&%mDV3ZDc2Y!edn}lMP~4de!s3MsMkpc(kboB= zbih`eC-DF|up@Bn954%}HmAr4dG?tV^g=+&R15?Nxu*=J>!j1?txcx})u8OMJWUci zh)ZUS!A1mo8;x7n*DqaNGo+q>@xn(QyT=$fesK1uf8xVm`oi-seC}ByPHVT{^Y-_q zar^!g_dfdgBTA{bZ!1QDcfjDCyz}jke(mhrF1_%^um1gi^^1T1r+)QU|HeyiT>a)> z{^0ifx+&AjMoU<|=c6AP-#gat4W{-U%3KzN6b7O5{DJpB^~KMBv%lW?${+lN{c!f; zg-f6MiJ#hdY;(NXu%;hPsB>>SJvlkuEC;n}$@SycF3jEk(A`Il9buc#PLGqY&78~g z9Iy-EIV%Zi$4Vq+A2=9zt_x%U)lwM{BH>J2*r<-rOr^b@)s@?gT2!g1>l@u0-F_Ds zv?<8Bt~MT0{{@wWQ-xyMgeN2f@-}PRck{m9C6p6XV|OD z$UqrK1%7nm?;qWMEC2#^SAIHio9H5x4dfC_yQ41J+Th>;%7Gz6$YEDL}!8bE@8_U%7# z`|6E$t25CYuZ0aAhTwgc+c-_iwdUr|W~qGF``+_DEAq+<-_LWa5xo=dxwr|1Kq&(feAOf@UlFJEfZqjqmIvu)>N@isv258^-n;$M8~Cw}($f#cUMUQ%%O!F%qI z_&sUT|H0)8U;Fwue(`Vp!Xpnp_S(hs>zm6Up@2WK|H$Qw^B@1nd&Vcm{^NiDt*bY# z*PGQ}{-6Ku>_lU1yo8)np6;7%)Pl1?{?pHV?Ta+%pdvz(<+(=mLQ;q_NJNmGCj?Lk z!ayL*m2$08Z75@sq}z`NeV5+qw3gyH2SP-JqP5AgKB6iD)Y)-nGp)2|Pg;owrOA88 zISUNAX-MQfJDVYZ%{>x=P!J*lv9pYIXfPwHc8{1-cGgA&?>NqKm22#_`;t&^ZX0fKK9q% zGuV!6^|3sKK;hr{xnH>B$hoCk3)`K+6Yu=c;hFuP{>?x7-5fb1a2bH_-c z>SvA~GUbvMt_4$r#BFxky)+Ghc9LW{i_3u?)8z(*UdRf7^S0ObB7k6`vME>dBt3Wb zY;SvGdts#(R>o(V>;27(n_DerCQX{G^DL|g1#o<<+3U5|Z(fx=HAVyKFw}W2PFMj5 zkQX6sP@p+Bp~KP;ewNWz7kA|VZ_2WU=k$pCJGAMPH{qLM=fArLD8hIPre@K-4{4es zr%w?Yfi%P8nqhcbjGB58#u2zL!=O< z1~kU7XB?7!1PNK7*nW8bSJaX)ym5z<6ay0|8Pz5P)H-n1PE1dpIC8X7DrYiK2HAT} zM5MiV@shZla~G7NU@SU0b^7p~C!hMzhhKR1`!9a~TNhq^_4e(}<)zI_7cai>!ZSw? z?mKhp?2$94-tnF%-u|9nRxS*6a0p zx6|I(Y_azTj~sBAY_(d)51(@DTiGDCSs`zNJodM4-u~*BJ|BMIhtHn7^Yxdmz=Neb zAGzxzS=Rmi{*B8wUViE2qbJTBIe27eb2CX@V9brBxk_pJp1aPit*?IZt55g3ox>-N z-v7X{nTe?@Z`@c}UG8shUcP?u#;rx^x`}eVP7Rl3B@=*62!~E>nt+*sMG+KqIsgIC z?9*zaUaeP!K%6AKtTRX#I-M=H3=|l}0$GxIK@kPIWV8YF?5vRSHuhdDWC%_GgHjj- zVd`?N$>zesBA`Tp76D}Ik|Zx6q#z2Nvk0OH6_Nx~$rM3}%j>%>(=hbCl`0bxaoy({i7Aw+>7FxKUo$!TO^@d^|>hS)ds}w zWt|(j-QBr!e{knPTdwIa+g}Ox9+|D$Eaf8C6u=_{T0QdMou^Nnt(PnPPUjtu+}mom zb~d*ke&XDCKtKM;_ptSGJLwIa_^tcyIevQI?B3~GVpX@JOkmJUxl#hnEYO$;?tSb` znr2}T=4lQjwQ_yWtOS9Qf|6-+jd1r}C&&nUiww>(iznujXZD>sdq5*ND_dLbdPNWV zd8ZWzib`QA>!+YJo018Xq}~FMGI^Zk-e+mNc6;#$FF%9ej4Ef$G5a)gtt_qUU{BbX zjw*$--XM)5qjL(R#N&-dq)ls(G;0%$Mk5RI#f2N!+r0z;RFoG9A4FjgvKRK;>g-sT zA`*hqS}DOm8Lf-3Ek#-bf%A59eA1W@caPBBj=nhMar9qA5I?F3LwxVehwdK3FydG4 z&IhBjR(9RxkU$toLxFV<8}y;}d9+e0x@8)sgAonaz}}C1K#MM;uuK{jr{R1tZUn*tAqk>Jon!+?(UBtuEA{HFTQ}eNk@u`$TD9WCsDug; zbJAvfvgy)zXM1B|VKMLbM1WbGFaS{DmOiYJgrvb+7{;6d0v3(F2ooqC66m;quLNY6 z(JXxS?45h}&MJa1GMWgqpb)bM2GGi=lCEga&U-K7$|X}fJT*1(!3Q3=|K%56{{GY7 zzBzwsyWMHsxV5;kdF|$n$(LT-KRvte(18O7_r2qt4?OzB{cE?^Zd|!}>FUL~g~jci zo#mA^Q5Xff(P(hHJyx&PqLO!Mpa{s+gK}c~S(>tEag2y@0%RyKDj_hbfDMIKKVoMb0A;% z{FgYj6XO#f``8cpl*YztK~(1 zCM-w@grtbAXLN*8tu~`-1<9sq;yrJ72Mg_(flw*$8A*vSvOfFp6F>4}Ke^eCzxc<0 zx_8fX5Ky<%UA?`8;?q5V*RNb?l*2}C-1Ym!j)5E(ksrf8Mv$37Pz<70I?r=P1|;X03^8k` zDRp)L5_{jxvVrrxJc(_dFxqX-{~edW)VL8;ECz! z$%Yk#=~JvVObo8B`u zF)_3E;LP5M*>^tt#N!XIudd&`a_j1?t8?NpGm7?CDoelb>=2U%dOJJK zv9akr`#JW+sv6aNcPsP0Y7AH!j-5CR#03B;bkQX2eQm5(tye-rY3}1V8(6#29jrO) zibu_m;YXi%WZ+m6z4q#*k9_#4Mzi_a`8Q^#CnuYY+p9~@KJ)yG&wu}O zi(fi)$LSA!;)m{i_<>3#gw%God%adK(4irn_VcUPu6^Y%zi{KV?>P3JtR{tum>C#V zph1w(3o{hUSO-q$QBbJLx_DY~2zEQE(V_^&2VGn@0Ql4OvVBiI8GR|R$noN|X z$@$k`z4z`1v?dp)T1BV`1=x+`_QhQ%2q^4b7=%tdXoUe}>^s(Nce`DiZ@aY5Sz=u~ zNeAA>K94;+@i{@A%S^hTW{`BIz#W=YhfVFVJH~q@-?}~sHTo!uYBfPyLb5iunICUX zlxtPe#1_4Wem_f-B(pGRwSaJIeQT%HzrDFu3Qeui?6f!YI9^%ZV8A4eks#FJ_V#M6 zT;1AgTbp${ty;aI0}6}*5Ah(*vSedzEltum9tePQb};BBX{-RfWhY>?>h!xoK$4?U zD$lW6iAX_`T>V zm$N^FduT}I#}VoOO=F%n9eZfhfYUHRzCeH8(wdLxJR{;y(X+^?;Ebk^p$5Gm`w!3V z5yf-#mI^Lt@rr<`2!}v>Q7;go@L3qLt4D(5f(a_Dc{oZ?v6LXvEJ8#=P>D+S-}k`z z=bnB2<(J>{k&g$FB17>YWt4OGoI7{!+_%5~^x}iev~;jS@Ejief-SAn3(c>h(L0pBkjD)!s={=Ui^BEAlMaTM9HO%_!Dc zo4K65h1`o%fh4H+A~BnVMXphRY(UQuDB0ZFMmNdP23 zmE@K^D@7p0kQr3UdZkr2&YWk2LiZ$Dp3hdQ0J62Q0f3syBB-?)o0tr$Wkp5>C2ax} zK*@n2OL75Bef+|U&+R{U>PLR~ljD15Z(P4};oW}8bzrWOvQvf6+5JXgg zagKvZX}Pub;>)kMyXoFNGgD)=sp;m?Bm3{Z?>K={o{zPTSn*dcUt3i}&4i zPFJd&SP?G{hujbWAZ<`s95TDyVGxi~v6HUN*9Ps|-FQ2Rx6-%+dEeQXZDv^rb1wK+ z0!)K$A4rLJChu;%^z8qiu0IdAEW7Rk!8PoC&bh;z--|giGjdj@GE?)Ak}TQsY%Ckw zfInj}HdH|shO$4}K+#1(cXdO5O*hpv)de&da2eaOk!8!m!jffMQfgMtW8@eS8RLsL zzdN0C_FilCAA6tsLe$ssNtu!HB3|5k?%r$t)^GjRZ!N5!tf#ZDUy_5(&7Je-zw&&0 z?WGqsjx7wTw3s$d4n}*MhuizR(`MXGxj!tMIBjMlAeip&N(}9!b&Rg!Shp%r^__}L z4rjjbqM`845SnJjh||f;ObN_a6((bS6(l#APMFZl9CPWNfz?f8W=NEJ`OiQ8E8jeS{=#_6aRFvQ8i_J(YK=w&(V%cXw6Spg7}b?Lta{+8m6dAe&6_1* zSyo17pa`5HcSr9Wb!@H63piI+ChekitWT%KfwbK|7Xqq=bN7a>McieIr1tDavqgdmtjZxOQxRXZ)3~JaKLN z=4{#mSliZZ8_i@EYh;`?v$k!Ux)sHG)(BWg3sT#uwV{c#wryL9ZHxm~m4&mxa6FwH z)U(|C_N=WwE-nIH-CeLA;ZIP(V%C*h+HzIK~e)mq7dr;{Mp~V`|P=oe)Kz* z7Zt8$JoNC}?|JJZT2E~@S{PQnL9w^fmgHbMIkUdONGyOTO3Z{RDyY6J zCA8ibl|L;%rhJLts`o!JK%Y&-F z@bE)#?JpNiW7FxVa#dLtKk%d9w{q_s|YrEU~i^E}~u+m@HIJRCE z#nS3hwKVw15B%vHo7ZQ%mk9_#T~Qf;h!8Omnjr(Ls+pvoTRWRLE?m)X>v6icmi-(R z0Vye;5yom3+R}MYBS2PlRu2G5=cMpEH+QaGyL#gI*@!J0nHe#ffNI(uYs3&udJFv) zVBG5Kdz-KA?_QW3+?cle5+Yh;GBN-R1eQEwAd{g2*>_wHw!if8f9#6kXn((HC!`@Z z03a`bIbuLzX2(pO7;cLkO(`5OSf>PM$pF2ku~dyA{20b4wIel^m;BLenwi@y{H9 zfCw#`jN5ocCoENexw^i0rU@m4EPPiElx?)&d6t8y0F{r!VxTvugT6#bR;;hpEso?1Tn=zG5B^MCwXpL+RI z#d3dG5Cf{2Q0L-_bIp!*jiiK{6O)*tt{xFSiGe@TWFCcP>QKBLHH-5ASjXf**OEJz zn>6Y1wV?^IsVAc(F$#?mFfdFD|u4Ozsm`nkH*{KS9E~;vAuuyu}HchV_yyfnDHinD!G%hYL zcvb`(6g^N>N;MyfiXoz)nBKf`^H+Z3SNqGyKlq^su3xygwYPJ8WBrH!^bbAv^b41+ zy?op0gh-uk`={>-2I;U}MZ>Rj5=P_c_33yk z)=8B{h=35=mdSxcRFgOj`gvSX@}x**X8>OW?SX z7+YCf7*3{8^?K}JG~T*7YhSw%!{O}OmCa-8w_O|U#&$-|0}*H%B~U~JWF#dbQxFkT zGt>}U=S$~{*w(kmBnS~Tsq&bBVPf&7(y;7JVqhZ&YFZDix6)ggGzV9%URqt<=oP)T znF10Z02@R@B9#at2;8FWwDog0ZajVS^2OQo5MZh*MA1w{2oRE>s+%k|H74zB<(z>` zx33!*0aRpW=DadH0upH@sF~GGZGc5pIp^zkB05GK7@2Aev(_=sX2B2yOiTz3DU6$O zl+c9M#uC98l^iDy);lhJrKaQw*f}NzEZqXKdG3!!+YwYkMeixBOg9@|6Oexx3+fPc=h%5#l>T5i>FTCwz|;USUJs59*kmbF^VdB zpu|9=CQpaXkGi`C0HEFYZdwtX!fG-G&$7;L1~8jIATO}UHED=*}b}X`RiZ%=JQW|>tK7|z;>8r#J4u``!!JE?>KT^U{^8htvJg#D+pJ(b0I|i(#)v-WTW! zriz#mMok#iv;0{p5MjZLfcpmrCa`gA?ZY2@&%e#Vy+akzxv=?JzMg=)V zCGbG$;kh=%*Is++*MIBZ_EvxH{@ZRl*gjB`<0qDW`Y->Sk}SNRDK*Pc$QkgW(YBcEWaj zdwZ!j;KIkIag0o4%xV@Sc;^&!ab>Z;I3%{FZk+P&{`kgdbO?r!d|W}*XzqWDghoJy z2##4|z4hirixE6$a*#a$als~ljtw9Lbq>5QAnol2HI;;DJeLH!Mx!?`y>|QA zd(hBKLl7WBG?n1lF?nR%k=YYhUwh)l=9M_cq^hKBtwO?jX2Q>#DG=5 zn2e?gq*KAEM;BvY_UQ&Yc4mqwBEdUjGo%*8jD&$qRZJvWJDV9nH|Uv3=g>^%J;NMa zJ8^hoIt~oTDqL@w8NP+j$+N;Ka(3+jo1eXW?jbk%=4DJ` zsQ}$%RudvdA_GJq<`@VFy(h;cCMQm=-+tF!U;E=f`k(&(KfM2uw=6F&?(ghgc;nR@ zmoB-o0>>f|F#;Hf0fR}jE}@IcJf&(UgB3G{m(`f-mjT%%iscC`GEAZ_m0-|5`_0Eb-lxC(7yrsQ1}u1KW#O&wearQ|M_+mVe2j2? zbNiVmpFMW`#0TH^-VecJFudSB7;nmC6UpasN)hn;LnH(Q1 zEiJ7JPur&1+`nm{$A^p3wjxRZgaphGRYg#2X?bI~xKPhV*2a1~Ihf8aAC4`P;*d3$ zM4CErK!E15^{yi(ibB3X&MviSrx${u0*Rsli=vPC0L(~}39AkqM6D33>azyNW zGp)r8Nx+g#H6kI38c+&AkV*lKMcHp9X2Qila}Au^xk>vxl6FVh<#Jdc7N$J`(LicS z{`$2yPp+Tp^#`$uV2P-P#2Bc`-emgt)}?Q3ZEiMkMotXGKoAtbAg>+-v^=(xZeaKK z8vvn++N`b-#n2Q3sv2?#25K5bQ+`8Mkk~XSwGm@zRYS1kAO=9hjzQ4?LD4vwEP){q zGj$Vc007C<8xXCV#Wpk1CN^NEM>|CqCkwu7r%pM4%M5R$w$8KO2#n=FH)3Id`L8AZY=gr{~d_7-WG`^#>$ z&>L2$n@_xS>hxKE_KdQ$?JNSHx%bS^ocN1xeenK&{cFGa{MWy^ytH=PxzqQ&`yJLa z&iCr^?B?YgsNp8)4zZH=2oA*iZZnXrht(4H{+aTPx0A0(WlMDKrsK}{I_8; zobC)F=0m;nnSon#$-LQot9%HL5=vK37$9}ensO5FTEeLr09`H7wW1BQvwEJtx>SOg z9dSW~*i9BBT<#30t{WsOm<^Of2@x2BMl)13=lnhQ-uKy0ee!U7^TLIfh!BYp5SRv4 zwYR;m(2}7;hXj`5D;OuGf0y(uOFRLg^PGT8jvCCL5kj(oCAYM^G9HhD#k5T+dvd60 zL{QV1xg8+U01|^iGz3i2r&bj+^=gue??fFbsHRRhQu4qDR5L`fgd({Kv7OEKF1`Bl zl?xjy%XgkUbN0mPvm3|yr62y>do;!PHeSP)zX?yX;jlIL&!^6?R!E7?FdxLg3 z7&J{Zo(!wOXfh$75M$N%F~*|bj~chPck9_?I-dTsfBG-}?Z5wx4}a%}@4e&RN8kPa zs=xfc4}I^8uU))x<#h%{VDA|)s>Z11T~RnD6vKm#Z_G&Dvw1Y(B_R5}CD z7}S7V8XHFg3(b@qIhDNKB9Gi4kbx$hoTP@Hib!axYGGC%0!Ts|06umB{2P*68$o`oDnQh6FnGlCn|8vRPFs|pUl)<*y3JI5X!g#}4?YL|G#hNUq>VgD z33v;?GzZaLYMEDr^UO5opdh=GqHCKrh5$ejS|mhf6(K|sQvxQwxwEr>bMM~!?%Ta~ zV>X!*qAwg0)wS;J?Hg(4_Ypm!W$Pj8Xivi<5$t@hPUo-Jj$SRz4|=~ph^lp*pd*bD ziKD1R0WktfOF_Bwzl+iWfQc%Yi4hSq(VT=*H8Kzd#I!Pv5Y04gDFLQT3lOP@0s^6- z1C0}jvz_hDz3uZC-#EFva>wzr>+9=lE6XR2A0JOgggPwTv6cI-Z*AV(JKWmdo{SGi z2a|eMSM2)zg%Hft5D84HvNW;ESN*EDzqeBjmS*kjaBFLC>*l9-w@)5B`{ge^wzhoi zQ0(-%J2sB3_!fr!Rl{hcVlbMGSC6kAUtYX^>FQ#&bo!oiVG=G~zHsR`E){`ki~{+l~D zZ^Rh=pp2@Quf2Ka(ng7;XX;f8+uK)1Va$$Wj4?KatJIX)hdApmtn`LMg9s)E`-l6Z z@r}uJ1c1P5X^|FmlPXF?pn%AbT+z%#rSn)vWMU#SjWL1&ks(;}2|{*JBOxkKGNV(_ z!Z`#|GnLdXN@hZq<5vQjF~+3nNjCvW(NtPA0#$bGT@k~~y32ux3HlINVNi-L0b;BP z)fp%#67+qqwb&mn3pkC~D#WPH5i2#baAS0Ee*5MV`#XDP=9vhmK}4lw?_B|8Axexf zL?r-Umh2o8rp=Ke!py|X%p#&@F~oG4iBQctKM_=L1{ky>8q!@MJ8A^Z5hGLjg9d=A z860BA{N_polGEe7Gbk|cJyNcs5Dk$S7`SV1=Jth&F>}`*O+^|KaQ-`z+ku(|oz=m* z+pS;#*!>eIC0!;!_C-O&y?#-az{)e3g%;LU*V((ZwWZt6-aT-`8#gXK_Ju#bdh@E9 zY}d^fwqHJ6IMrKUuAmvrhQ;n-{p!W)9dFUXanCnxjg>E#29JE`y=PCK{$Kz3|MHpt z?|*pl@o(Mx&>e^S(-&WS{4!V#Z&pc3Ne46w> znf-w*_r&DWZ>o7dE4w!*wk0KLJ)-5a*fM!GWML0<*f+&Wz|QHR<7v9-PJZ5+3Fg`*H#!^>0keZtO0w730-%T>b1iZ(9^JTbV@>e07LEKUv<>>E=s_YGCO`{?* zGh#9fAONNWW>L`$R7H~`Q`%jk>2#{5rdnbu6%dk*i>BCw2%useod?Oi&CJa5mM4Ie zMVM1jYk=2Bhg;*(E7z|sFAPs_Y}|3`)at_0>cRpenCOYs`?g1;?VJ0TuV3G}xjm{U zGiwhmvN88iG?ephfdZqLj!P;gI zKmpv!(z^2=%tY(ibUJ%tnjg|v;P-y-*M91+|H3!E@Yt|gtcs<=4d40h_w7#(&fb3e zV0G=`cfWJ@VC$2g`1ID+{(trJKX=~)cfI<;^B@2CZ(g}_rL4Rnf*KLc+W6Adiw~T9 z%fMBGqPN5D^8VhL!IDRk7|AIb5U2!MSXpyDFHJzBHp

;hP5s0>Fthjg~z_F;#O$ z>@qed02TIxgh^n4po#JG1(_;Q;ucub=~X0Rz>Hhj&{Tt{nklFQGEUeOOifh*!9);} z1qvaksuHlF0=XoEYo{Xsqs+@ABd`G)vO%e~0f251?yXYJWyQ4mh7tZp@vb`)05 zo^gW(F>cz{vr7&}JG0TVH?BN;xO+v!oUfx{;hAW$DijTfN3)p`fFswn074WoR12m` zgklP!0PXyd5VNVGYU&M8QW;Z0?7#p_)CA0sh?p!>@JZV(ITVQ+As)HMq`Nj37Ur%c zVcIm6BGWunnVD*|`S4PJ*u@R#>Hwffh=i0$3_=13G$%${-AW;nW!N!xq32gW}|Xp#g3-33?Q*D2+%FhmC-HoB&C4%xKXOgq{zogT`01v?;+D{t%`eJA+o{2e-~ zALkWE_hN3XAuwflLIh_bAxf)(*%6v(YpO8@1T?_uY*x3;sk0{^svmy&`KM>~bTmE? ziNqW^ag?!W>YXG1c?z1)r3+T z*vvEwhk-y%Ocg0nKagiwqnQdIDx@uBSu>?-CXydpPEPrRQbtGlV^GQ2TmS(`5L=PS z=wN$1di}=LwZZba6DL;|mV4~h*OoU{mrtJDxb4izD;KX`x_b5U=8ai9h3QzukP!iy zxCJSlw`c{rlWQkG@SzVq`tEnX>!G(j|ID-Bdg|%Aot2KGHit3`v016yeJ`{-M{;_3zsfB?~@*>ZX*FI)3t-`bL-7of3PrED1AAeO~>93v4S+2 z8G`{ZQLi^B3g3oyba)U%Z;nTsvx%kExPVD61O^&gLQ(~jXh_MaszW97%Vk-vkN^zP z5g17*6f$P&z>YbhRIL$$LpzGcDB+RY#)xsF&2x9>u2tA)yn?%UTm$XILK^oa`58zwWs!P zZkYhPVb3j=MOpY8ZLBBtq-n(fOrn})8*EE_Jptt;C)P>L?dS%F4g(<|qIO$_>6WVt zl-_B=N!jfVvbzdchTEB6KmZe?%rY2SB43i7SPpUNbtnLLQH3ChKOk51M`?tT%7h>f zavGReLRpxF=0FAzGk9*g4;^#BBXo|LK_bKuz%1=(M+29ZMwk+^1et`Xf+*ni>({>d z^~dkH^Nt_*p6|Qow%h;Yi=V%G<Svx<{DGo8b5gVo z08!OsT1V<_EdPa{|F3%k{N;c5AFfZvetEDs?6*yuVgLz{Ctnm7F1#L^akaQYDdOt9 zWnyOYUT+t3=QK8D?Fs@|x1pD)(`RPRsDt=0SK@U4v^+VG!sPt8V+P4KA7j6zY@LH- zm}j8*SC2yMyi}O8ce%*x(hO$3P#S(VHA8f<4FHPhVwk1=kZJ@niPF|>h*CH8;b>fN z->I)p7H&!z4Yqy*DvlgwSbb* z907>MP|wh;S9xdkjor=F(;MG&?gs}aj(_Xxk6(P_mDp|r7=Vjn2vNa^7}-al!k3P* z@+1;M8!o-_>O&7abjMw1clSn*Jn+!*jS~mG@$TNfk9PU;>(0}oAN;`L(#q@SU;R(N z{|6T@y>6;REHN4qrD_zok#H~@ofs_oLAfwoKA3LpkEes>GDd5qWyLs~t}iVO`U{~A zAfcT!`=iOt$>B(AfbMZJqb3_GFznX(fneJ3iHv{>sUu^Uqz!3zGH6x>Bu$Q4UKlVh z#FMeDfusR^(z4gD231j2OeBJ%gM+576MS`y$f>=VMCEC~M7-gUeP46{GEiIq@}LCN z11Q0k0328kpbx|VC7?5DgJ4zN^PYFLHOIrLXI~WJG>qfym77<;xP5gGsOXn<3)|CX zHj4qY7112KmF7o1;VVsn+7OVKvY!P~lG~(mHC=u+05ecvBm|SxPPVRQ7L6bT0LgmK zal(%seZ#~=o%B4%14DyE!D1?rRRqA89j2}>W)IWUG*?ql6HQwTELmV60Rf3hnjAue zWPzT&Dh(;4;%wuRa3>*}8lg*J5ddO{(#9+X0gak4HKPE6BTz@wuPOj^4phXVgb?0* z{gwaM>o1=@dHel$Js7QjxOMq3&Ys-9{E2UT;V(S${aBCMSFeW?uNH%z3k6xj$hZ|5Ubi>{w9HmP-cgBNLV`~HdP}2YPOzm{fJg7_h-9Op8Uk_%YN%?GTMJ^D zB~Sx012l{2kc(Qn8U|n{#F_ZATn>QBjG;{OJK`MqO!a7fBGnU}KI&+?4*?X2oq?Gs zVkV8Uikl!BwA1PCD=)4tET21hVskv&SYN&K)QNLvZ$EYFJ62vl{rg}2ih+Xh{Gl^q zqe9LN7Wyk|3kSQ0S1(;$J9qa-KKQ=VXU-lR?j4Rt7hilaN^`Kcd;Rj8RaI5K+&Fc1 zHk)`Xd`Z2s|IYWm%FENk?eF>SkDa>h z{)?}_yg!~z4h}9}yEJK<(mPCjuC#TFl7i1|j&^TfUSBGDgN3ET@%ALNjYc(#jT8tL znn8cD*B=7F;bctg4yMyLcMk#p0x+=zF@PMpnbZ^wqojwIZn$P)F%fZUh-T!ZS?k={ zR2>NdFyqtQz%pj10MdrGX`5+H>>U#oj){EXLKFrUW6(CDS>e5@s0f)dSRZ&Gx(wC> zSOOe?c~B2j05X6F^PmI{!5EMLXN^ovPoBA-c_q&F%V01I?06)HFK@m1#qDdG09g7_ zD~(tn3f9HQM12AqC?czgrQpm&gbvA!nlPiJ3xlkD6VSN=!T=nzW6zv@+}Ux$C@Q9! z^iKKbp$)-;fVpIflKRbHMd6qk$kHQEj3=TYp@<>51m#UsqqZTYJ@rW7JOd~p5hXir zFwm~qj3SblRz!*-s-{_M1ln=D-J43#juR>F!5NSPbjaR&1Y+;WmF&Ft<-*c(S@l*| zm#WgOZ>&_k1p+QB8t?B{&K>Me+@OE+aC^MBw>{c#fT!Y|p^|**+6z7Bf8wo=7NI?S z@|op@qI~2*T3(3KG(iHQ)eWKbmxh1gm;Tlr58d-Ge&yG{`oc@0)pF4HOoOs21dcBa zEi{*3dF_t3JY4n%YC>6+(EYHS-?HW^gVjtDl(sG zd`Q|BowP_tq0#2y8!)w1GL*)&F)x!-DfxaopOTxYxppwk75LGwrlKo#D9yzf5djIJ zs+tm^0w7V_wgv#GRn;SM{e?lVKcI5JuD`Q&IZQ_}HfFISbGnBH-8w&*2l*}+%<=#H zW#ldO07#A<5*Vn4XkbL0?S@F|jTsmkX;(K}YA>gxYib(7Tvq!e+#$%+OBzK@cvH3e zr1HY4F5q;f$UI{u8ZQapC_^gD8*IUV%q$WH(5s{U8>9WDvcGt(|N7;NXI{H-X5(b2 z(&8y|%pwi|9CJOZCzFXH_4{SN^2gSCSNhzH_W_F0!RXvwcP+21{HgExK95!(j-P-2 z8CGAEw0>fpOV{uBj-NWNL4~=V?JcjY>}+i=4f^kY|0AbvKeM&7_l3v4b>{Y^+fP35 z#QEnQf98qXPM&<|ZErtpr#G%#O2;8lkm(#JiE@lG4zvAc(szS?*{jO_SJBKgvlGJ0 z;gKdiuQ$cc$13lz$N&r=*;lDCAeTNd{uyl-$~>)3-nTj{R#lscyVLS>V(rukBs_ z($?jhV1^8q1`=2#I%HRRUw8&mFo86}kak0Fy@vU3aA4%K!)rkW(GlG zlo%m8U;$-tG4HZe$_WMtT$;LIC><~YJ9geVUwB`Xz9_u+?40)=kW1&7Tv7C^fp@+t zD<1U9;liLQdrO1iU}3PZxU{jcvb3_;8}@qrvOg%RUg5n*GczIL*vi5FB(yOKOxxLP zYG3`r*MILn|L!Z#KcUQ<*8KkU=ds}LKXw0xseAm%<)zX+c)xn4(wSH_?AvrY8BYhx ziy!>%AHCzAM?Ujgzx|0%eRgLwHpH^B<$mwj&=&=6U%xUP?HBz%s0l_B$iyT9h#IDv z8M?5ZeCvgBJ^;|v2Tq=yNJw3S27s(1;z2|VjK~?7iZErSEm5G!v4oNvXcuY#!NiEr z3~o(7iPEt-l{oJO=L?gJpCK0+-EGRK`xY9eTLS~mvQ>}~0Wyl3fs!L<49-iVgp5%_ z6N;+ZIJVL6^;g$c_jh+kwY8u9xt|om7{kRkUi;-=`_$6n;A21W(_jD67k~BF{?+MYtHtq+{hODk z2RqJT;Sd=B)Cd^}1OSBy)m&|H)XZREp)7pA>WwA`htt`xbSf4l4jm2`mYm1wY#OGs zY26;y%|X*<9#Rw;Rb^%A9K-N%dPCajY&Hg{0T7UUS$fBVr6pf@Lg&eesI)Oklo&$W zNE<|=sLpHM2yw1-BcP*f42Y2cpztLj7QQ&PwuS`bgPrN<0DbAX0E#ixW=5diaG7|; zbPZ?`paSRt6#xaG2Xp`ofFKn>2}ocLoBd+!!R1}HC3l^}!zZ1}D#*`%Az03*YrUKm7F9U)j9;rr5YwDsg?Nvu1YKPG%q>G{JcX z2B2t6rV#;<5i}a22`0#lfXI}_T>-&}lByy{TvH@UrdmhTeWy|D5IiOKs!Yf0oKV7} zk^nQEZ}Uo^lTsp}se&Cfm6NGZ=APzQd>;H0+QoUU0O)Lp&itn zUPO9DPerJx(6DXAK%pwD)2G*utOA3lKUi$Qc5(btF_vovHkka>vU6Q|V-R4+nsusU_sD8WBx2 zW=aa@1jF2IDP<}IOTB@Pkx0W92AwP-D~56=%NsURk&r47fZTuzlf!2F@PM{`S(KjH z#9UD})7p8jF)b;Hs8V>=qFP@&KCbJ2F?`>ni%Y|W#bHV2c5m)pzOp%Mro&-(^M%6G&;O0Tytckxap`--!piW=U-{~V=U&^}-}&($`O)RY!N))G z>r~77;@aNz&9~fn+YGTB_O=ekeE?wY6@{pXDHtVKF>7Z6jYv~Ds)_>C8qp;pF>R*J zu@ff-gMqZQwgD0AI_yvCu_jw@2LKqA9PW=UpE&-uht7R;hLgRm&GC4DGTxJV8tPeq zb~--HE&mK58H=LVD~n!L4EtqOdglR>6wY&V}E2Y0xbbOU;zYR0dRu!{nU-o-F$HmgZ0wGnsQdRqdfm&EySw$}T#jq+| zzuy}SdR191F7(S@QT4npy(_&h3-8%EU?u{vn8dHv%-YGMnapMfd*ktJR>!(&+P0of zX0tYi>1;NuL(|0BEKH`8$!ylfX4X#BV%s#P3L1gIQ9(?ei?!9oq9}U3(pCNY?|Sgz zcfRef{ms98-vbZ)lYjbuY7Y0e+VH81&qmQlhbt>jZ3kzk%Z1|hGgcNBjTs1$td8w$ zqAj31e(!DHbNiY7>o@DI-Rbn;;QFQOd)E)9Ge8hALS#>-X>dHznjkR~(TJd;X|NH@ zz|~Dgq^VDkw~5&;UE_`iI^vbGcoiTg9aoP=Wv@gAF+n6!12c9G0UUAA0ANH`RfQ|m zx7@0mHNi1ey;2qirrNYYgEeg%Tba%#A!t3DinJlNAx>3IVv7nY(IlkFOw&BLk6FoS z$fPO=X~M^xvBZSGF>ec_lshtLG6RCS*LGJ*sHSp3RWhNI*G$3@_V1!UbXo`i==`0m zYY$}3Eb`P&IqAH#e{U z_Q!ws!FS#N{XhJ%(V07!kKMI$V)!rq**`tDeC*Hv*iSXH(Gy>K?8M1CUw-Y4!Y?f^ z#KZC4pz={AiJ*chtBRRnY(m|}rUJiEmIeD+2-DygF-TZiTIu(z7(=YvwrOjL2lcF- z6P^GHW-*eN?cJvu+dh5UgYSLxJ6Bg0k;`T_yLsdC_U6{;U@x@O@no-_OhTwbs8Ojt zY^OE_NTV9v8X|B2K!%9Xsv~dR5Q6qVygs->VA8T$)KU2@-|94{B7E z2oz$7QN*kbF|_R{g#ClihPI7u2oi%dOA+lj$r-JDp7@vnIr* zY3imPHL(pLG<6eNiLFSK4iH+j$SzeggrtfFK&D!_vMhWAEK6UvH4v~BC(oXJ@SX?X z{k9K(@hg8Ao6$|QPhEVWIeqWjVPX2%x0+C&{{HXxcie4GW813MQKwukwQV3kjdj7i za&p;L20UFmf3Uaz+DiuoC(=g&LJ-8PwW3ZrtD?w^mc&1*l0rWKfJN)norHh}(acbj z>OV2eDxye;jx|Pd1PYdP$w>*CG|egArZAn#49(EJqex*cEPyV&cbZhqWkL5VT~8?! z*BywpL^RGB@08L2%%Qbm8cl%;_Pzu*bR0uKW-ucpbi~9)7~0UQDv`LfgtNMhDlxQT zP{tTT9O@vnaTo?MO527In-JO%+a@%j4H7~OL8OTiCAOjw%*Yf>Aq@{$&H<*TQMHWB zg&C47M1qJ=E~$Z2@&ru{^K2%O7&%ikI_fbSbkmn}INv=goR4YbJei_FR?+2ubVz@m zOS_7pYZ=Vj*qu?60;a)CFd_yeBI?*eH5Dd{K|+kXTjOV6e0gVUcjxBrn^)d^?UnOa zuU~D(V?dvAO1TW?&w zUby1K#?s!__PzH!@V@tb;OuR;oqzs$SM`hj#%nKK*xugOsKn)X+6?=>#BMTzkOFY& zaTbFCiD|FsIr33;64a}f&TXupK(jDwG#U|wpp!Ol;mBlu8ea;EGCQnaJG^=E)u;Zj z94s%dp4>Qj@7c3=+;QJq*4LJM!yd7QrfsLq!Qsxq{^9P<_Gs_mU~hLao=j(>>0}&3 z(9lYZ02*U3i`qT+g2-A$M}ja|TwYmQJ9+l>```V(g@x7Uo_+FH(GjJ>Vi$AAk-J!5B;cN+1H7f-ny6dgMK8>nHcGTz7TjCZU<^?;VV9 zOsBJcZ?CuZA3y%o=0RN+YaYshcQhbUU0m!B1_MXFtV(8dg+nz!@kK?BopVhaOYRkg zPi1qfVKD3gXn$B$!vYuv!yc;jdnFk7s$@nIV4%WzSGd)+bx-IE2PW(Q*g4Pa9C<*8 z-utA9Ku{9{HCGhQfuts_ssRyWTCI^57@3NMFgl!0567dMXgrlj zLhE~jox|NaNN8i-v`vgn+cr%IF@_kWjrlUSF|@G`bqGtL+ z-^9sw&@XLXMmcd$fP>F|Y3t_J{XhQ0i}$~Uy=#?Vu8Oaa`pWqF zh4W9n^z7+7A1r$nAsY%Yf`N*Hs#uzR15;3mdG3T5^GH{sdd$Fx5aN8PiKM<&nrKOg zU!qb}K#-7ov?7FY@0^f(8M~%5G90WW~tvODi)H&3eA8hZL1eG(-sg- zBx_71NzaKny#>l|E^WKVj>CamScoql%{b9+15$TzKR1H=cOz>8HMN{qnW#%^RT&2RqxL4G#TGW7R9) z_Z=Vp#lQ1c+x=!|XS*u>sWZm|=*5e#{hNRN>(4&*Y@F4LD`(#R(AzeSpM3M;tCLwc z93MV={>Asb<6(_5-jBoelP|sg{Oi}Q5deDfS^$K!r@f7>BS%K;$fOM+2;*c@A75N9 z%Cep|ty+-gKL!TEFdK638P`*ha!B9BL-(awRCuZT5VBda!(5}GV@H$>NRV1Oov; z(-w2ctxt1~m_2&75O!}~jm>yvabx!&M%2sW$&-51`QhGJ-u%PI-nj7QBR}%Jw?F!5 zb>fWcc~Yo`2%yx&FrBsgdk0ru{laJd=ntNL`r_u!*+&LdQEH545Q8vtXR_3RL;|#K z0WZymwleevGl@#vX`K;GVrGc*?LY&Nc~}R`zBHzUN8eCQ%^4XQLMqcO0wO1p8M{TT zotXzRyPPf4qPi4tEUQX8!oQmr$ur56qh%**LIyB%DneYOfQB)$D`h%m&!!UENRA;| ztFfpGH6SAA&=GZvi+Oe;I@Ot?l0vi~KyJ_zjb-Jy8Ib>|L(=i#>L*y}kF^#mkSp>m5J!6Zd@X4}SmifAY2Wzw?7X^A~>dw|@H* zOt8GX5o0~U5yt@l0!l#)0HPuav2CTNYEt!kt?K&1!ru6R)M67h&a4fGgBThz?J8gn zL30B~l3F}@@K_QaW@^$f!4s*+f>=*?b~h2suqYTEdtVln^Hr}u=r64H7lsRq%S(%k zgQbShKG6{QKaNSscm60~V+1yBjo@l*vl1y%$#QBaAZCZZBe z)KpPH1q4HAr_))CqS`dGNn6(n7($3Ks#%aIBI%lBolrD0l%Q>^qKXL7lpQr~04Im- z#DQZb5sMH!6e)VENpkZ^`$8aXsZH_a2wkSy62$<3R0WyluJtOIN`Z7e6aWkbj7%Zh zdIMV4QM#-U;e2n=h=_n8=~c~o{ZfK7an@g4SmTqU@it0)vz`u!jzwFLaQ&&5F1+%_ zTR;4*cYO3u_s^WFM+b!pG_6j?lilshFTM24E6;xI%GMrZ^!&j;^@H47GLp2NP=k=8 zE^nnEBZy=`hDL-YhTIK5n2AVoPe!GV3vih>NOuJ@yd>fjrie&EK|xhDD6=z2axe=T z)8i##NJ5L$kRUWmK#LGeVpI`CL?TFhc*;fqNy(}L0BSLdZ-^KS3?+SPYKCdZ1If(X zpw~~Iu4q~@tTD7PAQBsxa|IG}QAv!3NX&?sbk8PAW~@MnOT%Fs8s|7f0)!AH2$}&9 zh-%ZYme$m5Wkv0MU;1qu>e&?0LTJyEM@Ke-7FcJfG`=jxt5oq+mAt?otNaGMbl5i+Q@3GePqcp453j zA|}}u2aVY?>h)_^Hg8-Gv0)%~E*N=V+q1ip$kBuMzg0N{2@I@)$6{0E=*n;;#WwGBsxp9mg zYAX_i5mh9r87c#4QhIe2R+g`&1sX)Asob<=^6rR$K%+uv6+&!gkw$8^Pn0Fn%+5O^ z?_5C!g)hOtv9~-qrUu%y!9dzs+sH@$9z#_y2fD)_(ECDJY2JnC- zfIs@s4FL$C01jX^*!r2X_rLvZULj7VVLFPjErz{=`runvuiPlzBR~3mcfajjtLrDK zUh(h)cWo>!RNf`ug4twNPeWbD(f(v_cW-nsZJT&F8c*v{-A-rixS2JxaUEw05<_jF z4Rzgs#5y+UNHNSp4G`O=F_Ah%WT&weiEQi?%p_nTwPiiI_E-Dk3xNMdw+!S z;;i1QTs_n8{_c;y>M0UC!@*1Xgr=OnuIp0DoKzQ z5y?Oe9FTyfK`jIb5DC~I8$Qo#N$hG$0?t)!o)gYoE_DiZn2#rE9_=wx0VO2I&XQ@f z>v~vti?AU{r~#rx)sT_{W^-QSWF-ee-KrBJO6KfHaLWtnyeN{3N}2_x6W61gx?mg%VsiLSwnsR+}@~rby`#0ueKL_UsT14Qg ztT3B+D59?*UftcA9qtqJVF)!)s5tbiSlW%*4Ep{19(dbd`8$7mVR@xBD}2EvsRE|B z$CTiGel1CiK$d5zOp~ZLiBpmhI+!72wufK`m<*;ZjpLaj5UZ*wxT3(cUOv&nxhBXC zXM~LG(h3?X3R6+Fe90gW2y+^-*0hWX(!SsHxBw6sL5+}91r5kK1k_}pOPH!RW|!+X zQCE1jD2T2ooSAy>L`?-C8TV_9ji!O9b}GyT5W1oWjhY%IG=UhRl+FRBmF+O>RU+!6 z2ZIn|lqd=!W(sX=$zkOx5mD9DLIMDZ0)SCkMlw^%&V~vgbz9H+0?qcM%PdR2=$0O#`mSfYr3t~Bbb?gpeH`3SThi`A0-OG)8>JP9=P}H$zS{UuU@%)xgaE0mcFFT*}`CHcW+z{dRI5EBiq-%@|C@<=FgtH zYiXfc-&p?2S0B51_3HomAN|As;qUz2Pk-unKKI!#yzuN3>nDzHtlxgysk0|foOIVnNfaTHmq?c7y1fYbpEL(#SLKa6G=S+Y~0LW-60Fd0riBMHFMyKWh*U_&6t(Xo$ zJy=ro6CeXP00eRX1jv94V4w_OfDU|PHf1_?=C*fyup9>>f5(}%EE zTDvYdG`bO+{ism+<0qEgvBit8-k6TYO>__3bMN2!AOG&^iQ9!ZF%_DPRsjK3)uJSs zP8!8Z$S^%1W}eH+(@!XoW15))01(x50aWrE?OL~>kSyqdhyck{RTPPs6YZm*KuDc_ zg@}*n3{yZuLe3qA>`lNaL#S$=S0h9KN^>Mh_d#sQkeyJ)h)^x(4`(7pQ366)l@bFY znL=4PRcK?Y!syr#BuY_mlvV;HN>Ex6K<|AtD150XA`yTbQHbKaFW83|`eoHNjf&0c zMpThpQ92P7B2|FG1A&^gZ7?-akC^Qf2p~w17!(PBd%cCap3dq?J)2EOhe2BMgig#f zNyzi5CI;5IokmKva|KW?r_y||bru%gDb0>H^KJlkt%3CQTZ)Bw?b=a9NByo2ZDmzg z2+eOEQJXf4MWo2BI;#iDRUVZ6J&^}E$$L_!D{;SB{*kpaj@L0ipsH>IYiMm1aJmT+d4o)^$t%gEDtuX3$P+0g7Zw4FbF_$f5|H9 z>;ZW+ZkIQlVp`C1OG1Er>BP(+*&{mQbk6_)$Q*zHs0bmDacR8Fyl%X#P(iy0<^c)- z4+vlm%xA?XLK2n&AZSVq4Ag=}^y_!s_Q-qQ?G&2p+i~;2yXeYdf4cwqH!htQ`?-Jm zkH7O{KUnzQ_^^5T)mOjtxi5eI5B}3L&pfd|I~0fn%!%zn07l5x${?g2X#rWMjnpYK zV18BrA@%x_rC4T;jk>Y+IjZO)C1fcAbtf^!>pV``{hoC8U8X8APs)5-U3&Cwp)-8T zPTe_9pe3YAJWth-Mk_7r<&MUW^7kZ5=Q7D8a1Q+EB{a;{5c3q2%jE=V6OWW3l|cu| z5G1ia5|scV)P@)g4902EP16VhIA0-9R3Ua6!|r(hJmR|RQBjR7##q->)}BV>HfuS2 z<3?0nSv~QQ_rCwX`5QmKar@o1p^MRgf=18I(%v961Ovv7_|0C~0H9(fDk=Dz=7}Oo z?Sn4QA|e85vVX`$fPtoo-b|b!fN4~7n6@CRN@wg!Y1j83kE;Lu%^LLSqb0LoqpkqQXb*@O%sv0_vA&GyGTuF$ifFO!$X%APJ zO##9A!pt-V?@PrN5WM%8AY0PBvjVWGdFO(Kswxby2_Zyro(;T25s9gb=z5GuF-BET zRYD3OIT%o7+Io_QDHuS^%rpoXqRTE@#GIR8mgeKL z6%%yk0~xw>Sfr~)J6JQnyz?+W|Dcc30(7%tb0C}#G69Cr`hFEcL+Gb<4ZsrWIGHT< ztJhw9?#AUCr_Y>w@BArhq3B4`-fz{H>; zCT(bY)xz4MkBHxnZwgPiF|^9h*XiNSHMsB4X%Mvq?Y@lD&W#T2v9uVhGFe z$IJ|6jwN$knA1q!pd?ykUPHGaT7j0J1%S_MA&J0T9_vL9-~kGt01?0tTnWxVXQ1f( z#vNzwefK-6!q?Y#bZ1n8kn3H&dGm|cF27>-*Z;SF{5^l}&u(vyo_pc-Kl+nzefl?l z`}OlrMi?1H04SLxn}S#r#k>K*z#uu{-cmZ|3IB|mv%VY<(~MDf2y^ugnJ>ZrqP&DT z@p2S8=f`o5RE}`A0i@MydK3b>(2pJQX_kDM(C`S}bXkP*+8Io4q{%NLO`dfT9U!?M zce+Ccu>2+njxclDF=LQID1e!z{d}p+Nm)$01dt3y^Y)XRQA|1KFNH3ZieielX4;17 zaJW*Br#_i;K-`^;(8txKwS|6V(CqDxrxx~t+2IU8-v8nE|95}ymmYk}Tdl>`kiY{) zM`VD+F7@8Zm2L(%iA2+fDAwgOP|f2Fgh$UDb30tM zi#6$+nW188tE4s27HuFU^Q&A!xV$!ot$ZY&MzA z#*@k6Y&xpznMyQMB;W!OP-9y70zm@+nhLp0OyrP~4rZq74Kl6SF+R5-{G(uhL=2^( z^%gbIyv$BDfSF~LDyWDVF*{WQ6-M8Nc58F9p4I)~;KLvNu7$35%a{I~W~D;bf}3Glxs#IX5hal$ zW->BM2Okp$m8urRbSEh}hL~{zs?=SEbnXodlT4eAE=c!k5~xZ8ab{+gnC@B?po>pQ~?CXuK}rK^ z2xx@)bF{R5DLYnVL(#4|oOwGnB1$Dl`rKT4+3Z!ue*G)oc=?SBKm5Hv z_?_SPk>jV2Kl0Gs_uq5pk9^k;{_?;6mtXws9~?~gcbJb=g{T-8(FBz-Uvpp(M~t9A z$ZcHe_1|;%TddV@-MH9jn5Z%k0~Npk#-L8~1}I9lHvkA2j{HG0l4Sq^NHz=v=m0#C zH*^36m?E)31S|nUTGap;C3=7Y(1CgY18{%}T6QeI14(UC1*N3k1_+=IiU~-BSoIh0 zdElXQ553hJ%(i#=aJtH-K+j#h{OoxDrNwyX_kG7+ul>cp`in2T^!l}HSE5Z6lmLC1 zt}5p(a+&|m=sV-G4vA;#CIy2W+Nje8@VshF0*#|^*D*413tpSYKfnaSlQ7-pK)$P5 zEp-kE5Rl4V&p<<3v5PtXfT`v?pKx~PHUpOWtdN(Pq*tx!dIF)`LP+I2)p@!eRVhha zowbPbU%!P5npZseYT^9t-2$#URmS|GEVwP-M4B25Ubw>=+44tX5BLf2gL?j{= zoo~fU%#c~C_w*>2nkD;Up!DL=02oOzkLDSqJx%}slsvl?6wHV*#$XXb+p4lFu&f52 zTxeT#TvdgN8M+vPFTA!98NG87(;OB`3^LVr)*`d>4Bi77A!~@qtg0+ZGhnkQVtH5* z9B~pW2Q^|34n@QaBt~WzqYyhoMW(bxkGU|mv^Q^gc`3%>(&F-LwlIZ-9+2=35{PK%toWezvmRe9k z1!W@zMRdc3B?r{=?%?16om*a6+q$tM=Clsqc;ajC`oM=DxbJN*z5E)MeEZ!Sd%Jrt zzwp}itH1iK7oPs^4}Iv-4}5TCedV_MZu_Ob^WUF3b?!g@>c2ji9=0J`RWwlypkFyK zGciCwF?5`)5XpN4J+`>`?tAZFUtE22b2rqJrfCc`fCdNxYRM53vXc)30ayXV07;Y? z0ezZ}NwFS~03(=J)7+<3H0lH}shBVVOe{YEB)={PgkTO30T__LB*94_53HaHKtKUx z3Nr*cH9UFGyWW2Gp1Ulx_2y38K3rfexwvq!^(QyC-Was~w?FXtuYB$LCw?b15FtbW z%GUQ}MrJ6Yh=kqJtxT98A904=5y_B0D+&n!F*6=>vL=B9J33)q>ocn$bGtRK_CU%u zRS1xU%N-Q4yqP!0W#@|2Ss@}&g)}8>b~G`Q(g@swdQvh=I5Y$A1lNOh$C6*?W#s*!#xbw?4 zHO%hC*flp&wUnwxWA>?0mdNd_)igjb=ez{TPqo{!gpLzclb<`~@f##kQ6xj8If}r% zWuoJeuuJ`E{nZ?+bVXEG+n^;12nuYOaz%lBO#V$si1>l zuc=$d?8q5FGpms(=@~RO(Y%VVV=5B#hO#Wp)QBJmAQ>AnYx?AziJC7ghKNK#NTU)e zGX??$3(<(2nK1$q#Wa@)n9L!1gR)mv%L_}h>FQ)@G#Vf7?e8@8w3$s+BN`ARbTqiRGXhz5)Vtb~ih0jVu4FC6S0 zEDS5rRRvWMAl%yC`qoolz4yL*?!WWi=iWHKxV&-qgAbJc#@_D1*S`MvE3aL9?$rz5 z{o#+?dG6fG#^PW6%Re>jyWjowf8T^4B?b{jY&1ZWsFYm%g9#y%nJ=6M@eXcZUpjYu zeLAY+B!oINO*3oSgXy@9^`xE5LOY$!qR3Pm0Sle6Fn5A1hNKY(mjggj%K!qVFU{rQ z449NW9uPB=V=fiJDU<=YZc4)d0Z4K@Ap;Z;gUH|?IC1|Y?|%Er9cP-R8Ex;mNm~Ss z$``k;eQAID{93#$xc|j(PTEGn8Da!uU_>Bd(v)?K=a$1AOHN+x<1*he?k6jM<01^ zD<1{>jsd%+4ze7@5D>j@rB#So$CrJjvTL@P5FmgLZ4)h;!TO0as-(uqm>lLs4M55A zUf<4u1k-X01j|c%&G(C^BGK7-;ru%ph(zsRna5ZThvPDFi*h;Nk1af5E zVwu##%}F|MSnm!CA_C5r7GSqaA|Hix8cY!kjF2#Sgaaj`y6pX(1;mi67D9AP7}~}F z*bx(hnJZWYkPH(Atg$G{rl}oq&{+BYcrtV3%EE&IIRXHz3e}`9Hs?JO#L$>hj4dKM zU$wJYQC2GAz4KXsgP{$8=)G^+ka8muIN;C*D9BWuFC>PlEJP$mArETijJdx4#+`ZC=y6;2vrHRc>GR87eB`zr|4?-k?mWN~pQIwOEKN>Q>1eDkqy zyz7zo-F^RC#?9m~%%NKwBH(&pQ&wlPZKk~irc*i^Mz56ce ztv9Z>wY#+h&4Q8os1RMhaO`7xR;m_^)r76`UQL;RL04B7ZPA$61Y@2$jUh(0*fudV zO{{|iiEUHYO{~XK&)PVvV-S&AV*nK}1ywKsi;xx;rUU>A=D}eKpI~W{%Y$+@T~Y_c zdDE6DSOFve1Q5U?XoP@ZeQ+m=jfda*_PZZ`>u_zbcd$3Qv16k+C`(=Tmo_i`(f;+% z9qwG$>R>$UiiQY?9vrblESU;GFhc=BZ6p`x+L1jO4t8kz*1%}<(}NxD0!MuM{IulI zJTC-nE+?2H3H!FM!qJcPO|<(PNI@*bnD=qaWeG>{BoQ@~b_S-FZQjF{w+6pL+OM?_Q1l_Nw`X?4QrS~X%$n+}OtLA__M zB&Lo~0LeK^ZE(k;#@*UbK&Y7y35t1cOR>>M|P_-ngogZGzx4Em8=LdCuM~}{VR^@e| zDR-C=!E`p8`JzybA-270h(fPjI6odwe)NYwHg4)CpLpE$s#9l9md-n@HkQ}7uU+%( z0yf-%Ni+aOb}m5KUtElebqGP)rPa03WUGK8Kx^g@XmfA#v!DM@fAg>Z(szE~!(aIF z7x#9ykDoZRuvqpMde6R4PbT{pE?(XK-A}y!`fES+=YCqW4yLsH&gZraecbx3;xVgP9%2GF!^DgtC(Q5QW8u<61?64(b&0GL8FPzTt9;>_Z) z`)<4Yj{D!TbbL)jZeG7R*%=kg3rkD0!^5Ypy!qJHwa<lhhRq-$rAl1k!@-hWUc$o>oOwAZ93Lwl^>aut9Y-xyu7)1$@0D-A%A*WN)7HS&|5)76G~{c{pgS#BB<=L!zH51&#= zmSwCC%)BoU)8aSvo|zdC2mL-F8(LGxrf&Mfeu=E6CT0kZS;eZVh;0O-vaFI}MXGno zs+2Y$Q`@wF5Jh~&8e0HIW`aV@YCw(*pm1JLn9C4FMGQb4hc=3;CyoY5DkVsuP_PRk zBFec{kBG?3Oo)II#gN3L@?}+)gN5GOv5m=eIvJ04c5faW?ljYh#ukatH2LCb6d<-d zIh?A@RDE@Ow(~WdbG>t}tvjyWX`ge4w~$2nc!$uK86hLM*tR9}_~7u_7a#wzAOABy z{6jx9mN@&y*GhI}!5G8(;u1_EFyEe3D z;F+{-pM2u+FaGf#{^=k4iDM^@Kl#j)2fO3jPoI0&yWc%nTYUBT7Y-J8uV15M{b95RC%IW*HY zGB_0@f>~W7J4<_{*gKbw7_uvQSeXqB6&Wpy`VlRyNKgqe2GKTXBcW+ykk~fOtZCY| z4s8?L5Mm=CM%C2+$@6}SiLL?l?D~~o87{9cFP}JZ;@Fvu;mR_S)}!fkJR!5C#id?_ zhnu@EZ@u|zZ=U~FyK@K}0hox0614N=6(B0YbR?3pPfU2%@<@JOqy>;E@ms^?w*}@~ z&PE6SzD*22|8W!nyZ^#DradYhj((?g0{e96AU8-cYbYU4-(`Pl%Md!huFm1g09==v zQtY7Q4c5hRfFnK#;g*+Ra0?W&c>=lh?O47oDWYRnC4JkQPE8sBVDf1-JQJyz^G=Z@ zv_YZ-fo|nw3h$bG;Fz}Zg|(uf5lj>XVB^4_I++B@%iz;Su?%^QQl4Tn7_ z924geb%3Y|i8~?+F`%ibifB@ec<+(Gj5GJ1ge9U91=Ntp)RdW$QG_aVwhaJiW`rmr z`I#b68YRo#4rw(QNK_CbL}e0DW+GxFFi8%{#?E8f6_acy2$Mf(vR#61(UyvI0|$ug za%#5x=fEgg7K<3EA%ZKbUNYup=VK6NbI1l3MM1Sc7+As16CyXwlpPO-{b^kTk+K0; zQ56OtBFt=vF$V7(AW-4e)HyE7qHUv?gf?TqSd6IPdawbl7c%?IM&^soX?N0<^1R01_S3LE2QW_0 zw$tZ+;3MDh=mT$k{P|~hx30bN@~bbu{M7W|plNDk-U>57x5i$YFr76B%D_EeR)sGL zPlz6g$QA6pF9^`HXGF)&GZQ#QCQ2DjOwlnyl2Mx=dr`G$&S9cmla^T!kkFT&`i>nF zI|jr|JsKiOqpB9#7}_8T5>-XbEUG91f)G!MfXm0GmqBPf!te$ zN^Xghw}ka?3;s9{8ILyyfo;az5U$k^3w9KUzG*3d1fFI5z)xbNeB`(#^}8_1yNwc zD8dG5NI+r)^aP-W7DYnzW%8U&Ya(?X?h#Q_tnZ)zV6rbr1~!l!VTn2Q?JX)3fNJr=>JX!ukx{1pv#qSAash}i6=4QMsL5LtwBwBO0Zi6P@h?5{z`K{17H>Oq?zQu;_#WrMKO|&8!dX)ne%TWt*m%u#w~yk?`i>?%4O5Vz@xiGa!{%$C4uxupAl>UR z00z{KTkGCO$NSh350mVL5wI+K-npvi4SGH2inW#1m8I3irN#A)wT<in_g zVG9EQLShpGVo_m$7~}f#@;!Gyz(~z(IvqEg*KS_Ac#Y5;L%-iYc5MCFv199Nt7TbK zWl@$TA{O4Ub0U&AZwkbqMrfc&2*73-R8-7)79ci20b^5*n5L=QkW&DWfe~golg4gw zKpxIW%6-$M`%Lj2i5LhGnKfzH#hhd8I`z2(Q$x1w_?MQGVIqtm#(;z=SFuY<4#*u3 zghV$Xp+f+T<_aQa6>+{GMm8YCsG;8*grK2~7NxAp?#yCcH;&jj2ZmXnW_;2J5IBPB8tf9&{V(xI>XiOy5lWZhC2K~okZ-G%WaTEyee%F^`613p47|h z8#{XkU;OH0kG$<&OACYV{qP5#eEP}99{cibr|!7z^y!V`CyM?;Y-hwgtor?7Z+mN( z45NS>^qemVy`i5>XQPAB>iV%kZ@7(HM3hL&rcu%M)z{wm<^Szp{>}gPZ@vG~4;F(` zNY20U+V6boGaq{Ihfbe7-OifH!R+a0pPbY)1fGfQ`PjCj+XhRu5d*nGgDHVZjG)G% zsbi)JrW(x%tW_ZZG-Loa00IVZg)fafbH%k|jJj&LvcvVqh9uq)wHdp?1f7%xgb93hf=l zzXed=+Ao;@en`4b=-R(s+DI|ia-SOvl6_FtP!R(Gpktr3-j1AO#N?e0=)5nBlEL}H zIcDd*bLf4-X1-rl{a!Wb_414XIw;Qwtur@Z6*(6E~+KtO3 z^UUsvIi#vTAjKFXDyB(bfMlZqT}{?WEes%A4I%*gss}0(#Z-J%feIi#^p1CKtR53- z2K{nC)N@czXA)yw*VDuOo7b}o-<>|nkG@aS5%8h zjwzE#O@xk}D2kYfwV_1-=Ljt3t1o7tq-JQ)ZGa&rAdp7#8fYYDQZZ(VstABYnk|_m zl?X||lVVS{T%@ElO)DlP1+xjT>klU55fHOu=NuE6S`ybsH6m6~b`C(D;~1qVOEpyjM~;b! zNlh6T$|8j5*v*<6kP26+2@`~-32jU4kf|tqG+{0XNkN%iQIu*X6e(499%IDB1e9f= zil8P@M8q+wL@|v~2?5QRA??-yQ%_!^m|Cw_R=r|%ZFMxgWB+h(duQule`|cWA48K= zUyfNslS2U^0e6cClIl4(v*uK7ckSo*=A}2mFsTT{1eh$y`DdQ_?ce^@pZs%w>75Th?7Zu9e=?rEapASW z!r*W+@};|S^G$)+FMHk>vo=g(Gl^|s)bmB08|(lf@hQ@9tLDd_MMIxAu5Rx zssKid*osDRj9@@&4$v7mAVw&Ny`g9Hj+nW0TokS(=UwTGlGr)V&M^{sB2FuA0loLA ziCAz5NQjQu5qKo$&;gS}R6>y$G>SAK)a`7h)BVxx^1=4=JG;+@{XL)-RDqJN-qenY zZc8LJLn5CN$&x#dm;keN;|s~Mp7Mf8N5stRi2)c12(l+*zMuf)7?}Vu!3$<0^3FLT zB4T#lr)hg;SCDh$ysyft@a)Udm##k;6pRal<-%9XE33WI_j>(;T~T^QA_s;0|8e%G zF}7t{dKk8bz4tliPBFfCW4@eg=$d<~>Ta^C*<_Ox2Z0ky zmf;_!WC0c_c@PE3rbG#1TM$W*l+>(lvInx8Jylm#SJ#kp=9?m3Om{fL-g~X(AN$@L z5t&u8;EP1QhI1b~x^6GCTJn4*<{b(3Z= zkb=sl*tRa*mK=KvUIVL0-$z1F0w~>R#Z{L+9N4t5manCio?LQOjX)_yVq|szl0^3J z-2dFCzW_Ne+xd9ZWVN~(hq{p%eO<-ATP{{AS)&d^_wM42u5U-pL_v0EJ9RUD@ZjO$ z;XxJr-ekg_RJE!-5u*19Fbp{uc^?cQ57BuShm7oC(>o$r5Q!Y4rZuHSR%QaDB9BY7 z=J?4H5eNy`BM7J!(*II)Fcd4iQ&y@Dg`e6CxXd0SGN>4{15*PP%>gO}14ZvK<*W+M zyHfBe#}rkUQ`RBYqXtZ?Q00`z-qwMov=RC1_l)vN-+ zJAg7}Xrk%~jXWZj&Q#*Hc!p-7GWjEhju^-}5ET(AuqrXBT1t_aRn&6P=H{L0WOp{X zcXW5PTs%L0{`C0K<@{`DS7uU#kqI`#;KjhJsure>jL2J4f1@-9n?KNX%(teX%eikm zYMP0ncrfSi{Os(Pf90#Mf9%Ear1^ti`te`T303-=!$+&X;zE? zo{XP8egdHEJSE%PJ0x`T`Q^~B4CV6joEh)kdEp!WYnoFQ0Wi-jh6XZd{O`W{->TvN z>|gz#fB8#4d3fvA-}!t0?c?J|)kRfR)kQx%`rrcqVCK4RlB%k$R92dnmYp~*V~x6= zfhzO8L{J6q7|FmpBw_}tJvmfG0Ioz-Q&xto24ajrs-y;L492FI*OD@9fM&d&2POpY zjvbRjteMFZBRO^zvl%jyBL@VG6bNaJiXj6NQBqAY4Pt#x^EjNw?wQ0VDb69U01|*9 znxU4aIwB4(50L;1ReOp0*S*y1VcRwOu@O?`QC&~RyDvO=u)ni68%<};Xi_z!@yIb( zJ`{d<69E)vV)nJKkg3eouyaMxgw}OhU?Rtkix0!nQ;2|u zyFB!1Shii?imGMRWGa>=4uj+rhe1V#zBSRV>-)Y_8DdN(X^2Tpb54rZ$CyNiJcwwQ zl87u*?qgaGeH%3;GbSLsCYicLr>=K*u6^Zt%{Fxr+{8afP;~9Fw5S3wq8ccwY#J*z z@Y=dKvB``WkcPfDwL(LqCS^ozb0S;QwiuS4M+5>;=e+Z7Xgh?mng*go?h_yX)Xr#@ z@-W0M#g%g*gvOE}In&JSs@gfOeQ*FMV2iU0=Xq#{^XHc-K7aiF6GQ}t@pwGlnT^Mj zhc7;?>w0%Qs~eva2nhl*kre3_1!fR2RdtAljHNJOplez^G&RY>L<$1AjO!KxI)!Xm zHZ*Xhm7EKmrBO>v-gBL)}G-Wk0(^&fZnRAgQ#)3wmilD7Y<(!$6 zR$$3OLaIuHYFO130xAfABC}-4IkMv|IbM+>69rNpPbVV6&S6RjjveKcsyf6Ny{}av zRNgUX$t8dB&WBJ$z{yP~h_qZT5s*C@g5})Hl6{4Ug?q~dXQIuhE^0^2Jj7IoDyyJ# zXqZja$dNNOB@oFXM#c`*6k-}o1lJNenVCvqNwXjk5QgBJ!^8dk{TJ@P^!)tvX~x7pi#cY*;WvNl z*Z<&`|G;NI{kfMOKKSxae&z4}*MIASr*FOT@Q1$ena_Uk=J(Q&cE&r4`Z7ivjmKW> zg@?DCJY5pwrFZ64=k1qkWw0e00amK!G^w#G07r}CP^{n zoQ7Bw^1v{qocbQbEN3;9oK#FDAwiCVpk|eQ6l%fI_eS!`$0Q3NiPC-?&W45bxC~U@8B>vYj-jWm) zLyntgWC*m@Oe}yYG7*`onxL&=aa3IsDXsb85Re!NiHbs2;nB2>v;f?gx!kf*V43`K z)x^8dEtl+^^WH3j<{``Bo%=ue<*yvvJ^+)-ALQ7oqH{QO5fN64mR(3O*5itRvk9n< zCX=phC$p(!0W(4#`aY#Jw5x8pOgTS$^nn)Nw}VTgs>I(MzFiUvR;BxH2nsRT z4q1}(+%$E{StP|FhB~MyAo#ilHA&gmm8v-BN*U0*bz_XGNzTsuHM4+1%o3V9C&?;m zl7?6ZuV9+R087`uijZ^UD5Z>wCB*m6g^KUryK`%Q|8jo+@skfO=4Xq=c^ulp#R0V5 z9oFsnW#XSy*ZFo~&)ozlrNP)XML7gS&AIFP>0~M?r7l%XeR_KS)nEKaFFttf*1^u7 z`%{1Pm;d2E{CEDHfA?SiU;JyI|H9{f=eK_S%{ShBdi<=ZCUcHx=3KLP>o7q)TU|Ks zy=%_So?OlscVD=3`}UpH#jM#7mZ@=-qpZd{1^kYByKiz_<|>F(AZJV%dCg0rDCiD_(LV)3AoUODMV4^jTx>P)hN(hM@~?u#uSA^x|w? z#by`-HiKH5juHb9vC_e_9Zp>zy0p#XCg?Xue6X{-JKjBLMz%Qn`ak;nztb+m4+xtJ4j z2%Z^KYqQ);CzofJ4v|&c)#};lY0C2EcYdEMFM@kVheZDH=+>(*y*!=Gn&8Hv3eFiS zkOPydYGRI%E$2jlMnFhvmZN~IfdfTz0EDPTy(eW?ruS3;Oi~6Q_C@*Fq4GJWmE6HLlPB6MPxP2gCHR}a?Y6nBe}qm&9kS9Vm~Q06{;c_;{$6+P{a_-iXMrAcf^Kj z_DGU~dXXwa?Nx^BoAh(r}q`kmkW_22)AKRDg};lo>Z z{-wY8FaMwakN?Ks`&)nO&-{hI@Z~Rm<^1w_*LPV{6HF3z97^y{`*YnD|Znc&*e40EL2M0|spRUP`0%wsU4(_PeUMmet(!L%;@{hP98`b`9DF%_&K1 zxs=i{vdz|Do6(lWYD742~;S1VX-bu-nwl?(WWbI=y%AZfNSG`*)hjXtFmOk4K}?q+hiAclL%w zcYd+>kuQJc@BY1?|A+tJt4Z@{JObu+)tU-ynEuy~ZSxGbk73=o1W+iCH&Ff7FPG?j z4YggB6~G9v5G=3#gc_`AE(<#}ZqObCL&Yu0#Cmuikz-&tfUlB4iquMhe+iI~wgxPFoT#pHWofpfd;F<;ygAahXT(-`U z^TBb|E>=X8a;|)J|G|S{7-EvxcZj9{dAVA&-Pz+0-u?Es0H~gf?;kxlxOHnZo*wS+ zhRQjIwR5iao(K&@aw)rRU}O_e)vTU)BdP(2qLAZIjC)z8tOj80+_srlETLiH5GBW^ z0${{0s}X3akf)^Ho1&>0J2rzfBv(~61SuT_ISZH)qVpk%m_pa~NJhk<;9WoiVkauz z`92E)<*3fF3r@2E7&C+87<=bE0wX3Lf{IjisOqp>wQA;_iz!xBRaZ@nk%&gkh`sOo z9)KJY-iH0mEBW3^+L;#F2 zn_319wO27i9EQQv0F52yB%cQ<_`L%!ijj#X4&;E%&{pr^~ z>qA(rx~Gqxz4*c_5ANUl#y7u#q@FN4(yRoSQxZT%HinpE90pT+<=)G0zxC$Ve)G4| zkeaIU?7AGCtNLyb&A|szDXj^mvrN@g1Wd`XWHA62e9l=UBY|T^)ZoG}3`Gr`08-Ah ziP2#_AHR*w*EVBo%WeI)UTpn_?PeX~l|=svjNblq?P!}?%ZA+Mni~PL>jlhBx6W_R~sJgE`Td)G`S2$iX9XojxliZ|#Hl#0mu zV4JVaCU3ra%7~lsW>_B?!4O>t4LTgUo{5n;M=6vq?COtw;`5*T{1<0ClTfi^JwH7g zS4|ZHBeUl;q%1m_jAKd+;G0oSeLoDA#kn9N?3kIC%cU4p>?YGm-w#bwiP>U4XXl2l zU#u46@u=CG<(!v`)$QB27N?i4_NztL55witlSgm8-;A1Owm;jQ?H}F(gBM?Tu)jO0 ztGcO!XY!sE$rGe33|!`1Od*>gf+%=LdBc4wh2{?+=giEOWzA8l84cEitZFC`EBtgEzAw4LPT7)ivXWBolKJLW(^A zs2C_BQECVBOiY@ynOaPx&!M#Dfg%EsOuT~#BIeYs2 zn(2BzX?<;INNbO7Rx;d-%0K{+hG>nN!O&~)EGd2cw}0n1e(N`X_7DB^bUgZB|F8ay z|K0!gfB(C`^9?k2_wKz{Uj6v*{`S|V)5-li_doU7PyM(5;H%5Ci=pl7@zgUL^V{#d z{khM7_Q4A;RH12yM5I1=LD5`@$xS!4N8<5kKlV9r_Ur%T*WUl&gW!q0>tYO54SL$~s0@P>$E5JZv0@TuYtXTnYG}@7zkWjJ_LyMU|6Y%Dw4MJsAb3}hDtsUKZ1iH*SLc~iRXcK zws>|-CwO^{lCpuWc8i0ajDwz@vRT>t;HlFOY8isle;Igk?fyQvOTW^qR^Vh^S~}=o zpUh^<$?k%?1&Bd}vZUpb{}_zsi^f%5&OfJ`T(&V6c+jsk^FyZo9vyzI!*kGZchhj4 z6jM#Af{Z*8|CHrMjF8CO-1aQ^$tdAw@S?kk%s;A*c53m*2A6%*-{fEO%~k|8gzXGO z0(J7Ps?Nkqo+K@jQA>V^cE{<`eSW@u%Ygs(O0dfcK`59WE0OM5z5J3)vNF?Ze{iqy zMAa+{GQhv}@)ptMQ&Gz(+Vn`3eEk6(AKTFk1w&j|JoZQU&o*=Z%2}sPl*MM%hXeZ1 z>fDZt)9r2MheiH7%Z`EwreEhf4zoq)msM3c10jEJofhHh9%01O<{$k}*MiS}4VcBn zm6DcMUKD2t2|@YnKrdhS;9w0N>Z~CP4tl^)cDMJ$yQbaTM@$cB@AO{b2Lp%@ybj~Q zOK|d1;upQADqMu>(RQl%{Bc`vMF(6RDy^x?c=o@i@{)Us0Y?`bJ&Cy?Z+4if8-a#Wfq(f&3MU0V0tsLC~HJozF|s9P6B{7q^Q zQa{$5I6OyeRKcuClAt;lNWlftuOZD+{+cWLr151r)$e%vkZI;?SPdR;lJoSVtBvXW z7-in{yy5Kcf3^}5IE81wCmt<%pKU;k)~8*sXHLx2lhAu(hhvwsJAN&?+I+vYQpcL= z(Z!_Q0%gL=4}&3J(C(A@g_X2;muJqQRI%0-_LYh+ zj`NR*&eW9nZNENzzjtuDE)$%ErLzj6&%dcdc4NnMdW&u4!>*b23eFvk>xu=RHWzhg zIq1x!Qy|wL_BcbC`8)vNeWk4E4}a!EmRnxhq<=WTe3&82v=d{%W+>d!R&UYu+&pTu z;EB+{Rf9{K%vFAGNr3%#gmT%SUFcW{LN5*#`4ayH^SKpdx`m8=Hru%U@Tc`c--`qv zo`oo7wY#L{e}o!mviC@{!^j z&Db6njHSVduUwuMWybPQscMAuzoc+9O;k+7(uomOgVR6w4_5>2X*iKESgPUSXrFj}dS%3y_)a&^< zFw=uFYB)o!^OF#mO)h}{rRRyhOG5IbN!iL;rKu&l+x_=b8M)MHIQu z6EdsGm5~xWox{6QUkdk&Jaf9)dz$JdkIwBI3)T?d|Q54k-_)YHBy0 zkP<^;R^eniWkS~{r~jCE{z&K@#eFznfkxU9MNn*nzU2Pum8W{ zbneCku}?+^#)5SyC|GX?>3V(cZqXihD-9{>u^>!cL+A;r2W=MIT=PxVN?I|hW|HphnBCj!n9afSQe%gWB}3`GOSv? zX-!^IeO`87_Skfc{#izE{ES3W#Bu31DMz)3zJhYT+9$Q6u z_w^}CGXGYU^K^4+{cOILeAj^D<9H%z)!uqh@ZZZNMkC{b^}oyY=1Fb^MiN4_mgr~g zoe@k5&&x~%s=oR@c`NbYb8TNhR;;TN$D7Xv4MvI|L;H_+ToI6Jk;d1by7aWV8XJ^S zr-^&~QT^NsD3gTzywi1P6v7Hqs?&2`BzbJYsUrOE+JB>1i>vK474!OCs!r!OmAQ&< zL^9AeuJY++wEnd$YuXHDo;AYd`zY^VkI~>bIuQ162$wEjKi+%$$ZC@DQMJwScCcr` z>WJBCrn_IfdyXsis-fy}d&?cu=xJw;V8x3S_x3QwcFpG*wUuQNB%ulsPSt$y?l+l* z&7@ud==rq!8U^f>LVI|xhtbks2Y8jt>|ub%{~_cBPa{Kqseo-^UB~ebk89q)i%bX-zN$qa}V4fyMO|mwFK@r}kc?g`u+)u4+oqzu7ulMA5bi*jYj5jqn}l zCVH&oW(=c_DsF(?W!;Zh>CFqV78MuKZzFFPJVI{cELYJfxVegM&T38?zr+r#>x3a% zJg|3de*XT=gIh1Z6=ufo(OKOsvE4rpcSSkk>c_A{xT%;FVPlx$)CyI&>Q1c8PG4AO zjKQ;YX-q04(fhdjp7H%tj>Tm4O0dR+z_6tXbMW=&yf-vlZF-i?WTQ5`j3?D$&^=?+4JdBe$z-nA-;i2KJooZ9EUEWMtIaJnnxP-S>CXoo)u7 zT?qa3kPp3@^yofEPlb6o_+S58C&r=M?8>Q^Lmco8BM@x5xxrzl1YILYJt~Nu5&YY@ z{@vY4z2n13+e`d{Z0Aes`lokbuaCAY66>eOZElzMWPHYkhGgl$04tdh2qUJL1!#n4 zdKBzu7Ym}l|KosTS8%`=b}MG}QneO%a-z)_ZYKT!`94$6V%9MvBm`Fv57{z0Ir?`s z_&|_r2SVOI1&1fin|n)l`bGnAv&2VaC5FYo4GMZwbG&lyQ`kB0JWUS7^rmY-Le&oO{WY2QyRID2Es_m46xV*o8{mJu;08b#x>aMh zZ9IV`xc_1uG7LE#j0}Sp^g2Xf+Q{R#Vsb3k)|cImf*c&oC2f&^00bHK_SZI*Cb`L7 z1VS_){7iA1Nqp`>&`jzWRg?x*NA~Np`-@(*B;I7;)Zhj>>>#eRK`kXQ(OBGBYceMN zy+{y>kn*b+JE5H|5@+} z#}Ds(`)+0k3_TCA=OUn{BBUJS z(!WnWX_D#f>XK4`w#%BprffE!GctSzvVmTR=ub$~>}bFieP;z$&tc!MK}5~`D7Kzk zy1VWBe{ZdBZBl326OV(!JkNuz?iUnpVj6Hu=55pS*Iy;b$Lp~dIbr*6!~eYu$H4EJ zH}A1XmXp)F4h0k(*DJ$wzc+<7pRi!z`xDaX+@IESOB{ZLdoxaT-EI#wgdZ0upd{~- zP#fWGxJ@;l;7LdJlP8)moOR9fAY)o3frQe4lyr|7t>m2hqP!n=XDMSvPsM&6S_=Vz z3L@XAl_^yO?5u{9(JgJ?8BzTwF6zoO7DJ($ZKlxJNFyoV!)oN7W)KqN7TZZF@3ZtW z?&B<<1=)+&pR8Tw?o$f#dM5Y62AkN;Pn;JKYuAoOc^c>VBme`cV7%oFJ|e+4efb}K~QPe-;6|!1q9(!f(qv`IzKq6Y~%0~@ylYrv0@=Qy|-`g zuWLvj6(i z%KL&Y@=+m1BbqBSI)4oDQUM#+d%M>lHJrO`FLYF2VkXaFazL!H*irl{3fy@7Km2f$_ zxkY?;Qsey%JAL;%A!5?i)3bxw6uxdF>|fk;;5FUz@8+do@6F1_#$C0O`^iaaZ6_p> zK9`4jzi@FzF(xrX(Et~;)jX5=C{8)hPK&8VDb`^3(I(C<^5$mfrKSh-{MgB}&y@-pg}}Ag!V&8M+|l&7?fC{cfF1o01EP&nMd0Y_QqI1D)FiDf*bb9*aBDDj@A`TO9g*7(qi zNicgd(RiBtzpeXA@9?Cf{dqbo>63j;+-iwk4?jcQ!*(Ql{R4v}7$if*u73pk{4_T2 z`nzwYoGsIa(qWYg-8qpAN8oyFPdvh~n21j-j)NV{VNbDIeFC>XG!%b@uq~DphY& zzIa5erh?Du1w%_c1}Ca5mbrK3Q+86(ZHgr<)(!X?{2TrvdEI&Kgf{YE%zep}x|kW^ zHcD!w@n`W{h_(*c9l8fVvrF2zlki|7=2nJw|RZE~-uMQZr+ z!4JdHlfU@H7yT<=G}83R(M>6o#0R=sLo z)ydH0fpkLHKHcb*KJHw0cP~%f{}PlBx^UD`PPk9v>aqwXUA)Ci5(oOOE|wOj-8?of z4~h8k)QnK{g`M9yL4dvmB`A3t-u6} z6|j9?;WxVk93|6iYw{tuP>&q8o_1)nnGxutz(3M14r3TY6TtD~r}!qGYW6(HSbhVI zID}!#3N3lB1LS+wzI9Wt&c*q?Ko9D?45h@!-{;r7aCN9+3*rIU>3S6sdxEShOHWjN zW%GWH`$?~A^Fj{)pg{Vl@zibt0`SG>B<1Yy;CvgU;jM1=2$MvHz9vVK9-d(h8mg0( z&lDhZ{`Zx72Jsj+s81nXYeKe2E*kPa>(_S!gDomy1Nn+B7*YIDr*LyF2elcsoxsUU zKE@a9U-NvFRm2pZ(uz`2(P{z-bQ-jFE^KNP%sIX;gjM#^Is zBzw>cGDvN|VTJQ23U?K3=<~Gj3){3}xf{Vd9N>);g=&wEq4SyHB408&7!r3yl#BH_ zlz0i-%@Bn(PL;)h8p7l(94Trk@qgXS#L056mElV=Y&SisoLsO>)fvXY^=carY4_F- z*JRMZSvzyAms^_4!$FDn=1g?Z9_-((wT}h-Az};nuLZlVbvz#~Vkgz{SKOK!sX8Ow z4g)8Re{^rA*sfSoT0VdHiZ4`UNRIN{^oC%%(S>Wa^FsM|5?Vw^RG&FBUr4_?q|G!C zp2ue$b~v9CD}NnohWXv+^&9uxYAj@9We(kME(tqnp^F1SP3tO~kSs!bGeS&)ap>UM1%lK4OYjLBsU&Cg{H{G zFZ3P)&o+#=#ntiLjTR^s=O3r1D}Pks<;qG<6?gDs&I0gU{yV!aYPkKc1U*C7i?c4g z6iyiPk}s4CHwArKqO;UcVoBkGNEf~91cirr`e9&2dp3K@#w|&uX=Tc9IxhN?BfUWu zMj-D%&!CVC=GTyEM_ER^^j&`GpS7P@5`L}A`0MDrdtfKRFh%G6r1pr4n7&iu+OxA+8R5{p{Kr>#zus)JQf78JHJ?>M zE|nM5Vl?kC!i^!u?EuJ^>C$Yt+Jj{XYH(0#PdJ?1%pP z&HTERK9qn6mhw+|(trt|WENE!nomr7{x5;B*iO)azJAIQbjVrQMQwb1$WHogqY*ys z0n`|jl9;M)JNc$;qQ}`;nI(4PF>d@jcR&h=7;H-(Yc<_$G1zO=I5Up83tJ^um4Bdr zQ#4IgoBv$tYwpb!*2jUn%6QRt)wjG-fM-5HAcC~Rc`6NCw-=G0#Hm^D44rz+K~?A)i+|2>Z0 z^YL%{#Z~#QuHjyS{Z=&mx&%X@a9%OLj9=b;yE0&=j=`}HLiz6;ny39YZ?HxR=Z*C& z`QUr3e{XZ!>YB9F&4Pa~wpjrqIvpVYb9Q|!4Lg^1f94=Bxx9?tz@**5Hg6kx?pih* z3!@%wVQpHNQ-;g-b7SQrc{wz`i1VdXSIno8kagAO(PyYUc}7jixG(h#pM96#@kd6t z1eXn+&6^)&$0zwMU53O~~E`w2+b(Wf|$d}sZ-{HAr5?5FvMXvW!VR!&C<4M@m z6lDgBC}q}j>t;)RLwk^qIS~G4V1pPbXX67PdQSLrFJ!rXMNh6r78!;%!ogM{f9WVZ zPS!hpf6AKm{F_mRcUMnNeJS}j$n7=P>1QFRJeOo2blXq7QyFx<3%rTg{q++)=pc_i zauF=CuZV?K>agpYSu=kqlo_`j=c+RxsRf5# zjc<5aSrH1sH$Eb)BTIRQ{rwv@jZ2$OTCXbG+n_Hs-@%I0xSQA6^}p`zaTtK0M@#2N zA}MMujLD&(G)CJ$JkEc_%JpUk*#oaJGLx6^Dxx>LMzv2T&CjaesK9fQDo-=k$1Zo8 zXOnb0peW>H3v(D;cx?Iba@xns&&zcm+`K*=u-5V_e;pbgJYR24f;*LNi{c-u+=o5m z9UL4W3v~jU!NKe{<7lo#bQ&&Kek)5 za1+qjx%2XZ_Q%gmH-j+oNgi&|QTaJxvIl;`1Zt93MUU8r97$jpejx5XPpbBIZ)#qJ z!6Ayp24ZBIs*p;hDE?(&t81TuN`#wscq4gJ0vr^re2&d%`Be*KygqYzy@A84y>Zj8~YQl@s=RdPE@p+ zezwv$;YDALtBHz={yP&f?gY zp6kEtWC|C{Q+Gx8H}L!Y&3{ZeIU_rjH~+ESk11SE-J@kt#1zbChLL>5gAL<7li685-oR`o1h2 zXhhZT+@dsCS-)COJP(#V#0VtK`M!cEZ(7&_H%wtnMAuvgm+apHRz#}>|p89l1S|pQ^Z>09nwrNICN`Tva_ruWj_85v!&tA+3((e3XGCYmXcm&M& zM60Jb+ayJ3jp$;O0?#9qUjaqEMOoi!@v2T0cTHh`K!c%q$!TdN+J1~#=7FT;YGo&< z2#&IZj~{Un@iSxRq98rh=Spx4s6aoNR|H+khLiNw&YP>;EagGInRk-?=j^G_31j

ynb$GdOes9J_W5;|90-Ew2R9Ze58V4o_=<|d#JBh z_-@!&E|bR}DSyy;dR{)FX=m(bkr>})T6gV!!ssl@xY_A_Tw!F@<9l3PGqNKc04D+D zy?;XpCjG~Dw|DQt@aqewuqquhahi>B_1~NR7zk4O87V9;A0qBMU!@UI+xnpI+=m~i z!!_$S->IAT+j$;ekpu>KKarZtOcA=7tj0CdilxLXyQ-^@cBfUBnfzt|cu|8#iknHQ zA=L>Sp96zoyfK|H;E?=qcCFq=L@c{BJjp!!^I%4jw)iuZzQQa!&wd#=0Kg*SH9%Q# z&I4CT_KYuuJf2Wo?vT17ks6$cj!ln?9<=5o0@~Y%{NT?oCvT*S$?asX%C>ug2Z0{d-eRa05am*InvVVre&fmkQuvPaJ8jRY*3O7R~H;?Xr zm5}vM;$~;k<>t-FCc5hWpAl;>Mu2h77vLAMHnM@HfcWSF!2sQ~4zZ1GH zFuuGEt9wu22~X>?YZ2R0916{LL{LvUcTbRBMqxE_V<6cOdQ9rE_M(rr`-S3vvH;`f zUjSuF5o*;C6$73Zj|^c4>G7j`+390q1evznnRo!u@&hFm4s~|5I;B_mac$Q*>%77E z(P1yXN6_bfY?VwI(0wpPKCy9$c8 zVy*7e&0@)(+8rZK29o|wA1TqNSOx9rD_oZ=pnuo0L4933q#4A^V}&Ym_1}qbW@wRy zE+&CNvT`!4X4y%w!mWAt-_H1V-vOA4~glD2U4?QL0+V0ty_{3RTYA&x80K&}XUJkwT; zu4fzeJ!p1HtPSH;?hVF?rKU=X5GP1-3(PG}QNfaDqxEq5X73G^-sEG7G;&q%URPxI zzmEs2&6nfYZtS`l@%Z?7DN}xBYA9ox`iV_@dsjPV3pO_EQ0uICT5Mk@sgFm_8EYtAGrWKTXvX{lOCDJxuB8RQ_NbPnK5= zV&~L*1*rIh!c)|Tk<4l3qleZof9g9sYjQPyYw>0&v)ZUcXE((YB15ZJ{Du%Dx}YXI zJ5IYNE>O^{7_YskJP|-X(fkUK6)gkH z5?#gxt2cDrC#;tA=vM#fF?-uX(_YQ~BLjIej zKq`cw_vnhP3PX2LJ?BigGZc1@>ghH&H-v#ENhT)HrN<$(S~UGlc6uL_z)HA0qAE_v zPs4r@I&)esm0Vi`UC=qq$hX;;U$F&J%RXGFPXcMos^xO}@EB3n#0^Msm;g@qXp zKe(;Axgd7w?}=9<%@a7*tH-=f+Q)EFd+#`y;J675-YC$< zVHlSi2{Y%j6kM~OdvZe8p`G5S9!FQfry2s9I%-Y_pLq6syu8d8Tm0)Gw|`nfOUzF& zQix;+lx4u-bQVf|a!FYF+HDoA*$=WN(bBFSdm>E18+vvsa{{S#q1Y8au3L=*t*>>2 zVq$Wv@+YsAEq1K1ZFgQ&6{A1`4w=)=p8A(6NZIC{c`wye&SN_<3%^>i$Hc_Mm*X*| zO_y40t##hXjsO~4X6>p*B-szh%O_OAlO4{C+L;FNJmgqA*%aPMK|$_W&37}tFTvOB z2ELzd{9wMd^NwPMugoF5_No!OGEc^VwCHVmTab|_y}{=tDh}!#8{{8#eS4%AOwoYr zor?bNu?)eY+l^p9Rz+K_ow4>bmgfzi$OMDI1%58IK8sBdJ0*sHrnR<{m2QO^q-|2@ zZ(<)G^=X)WI2+{KX#7l1OK*;JlL+vw5CkA0i=ey-ladK*!Z8K>f{L_boCzFMP-iis zt?~h4U>+V>I(WyqQ*v2B>IOj(exq}TC_&G0Ti#q_He26;XMecvpjEjox^ky_FZT~*{84(+GMA-8f>wRQMqp%feE)TIHRW9 zGGf8E>s1wgW8SkrvDB*)=8y;t9L$`Tu0r#88E7LGMgRQyOCDOuev-aiQNG&!#|eE* z)yisVqwX_^?}nGT($ifQRWt1RkFlNL;21B9&nxG{@7+A^hY=6V0OJ-78oplBdN0@p`a2Fw zXc#V+8Z27=SM&Yu>Xbf>%C)1|8b#+zdkwbSWs%FDR z>DVgz{MQl%4thB_#hS@9uUAc_`a8&!{M+Tm)HmdhWsM;DbKWXz2w#UL4f1>pfS-uUlla#grzO%O&XtDu9?gwYcje?b_m2<3S|>DFN^4M{y+55F&Ji)HTbiHxa~EGO*)Q`B_-SCB7{2eka^= z;w7EOkpcECHGP#c@y@IMnCtBqe)#}^s#0crU$8QFXp_``tqU*lFs>#YgS5y z<2wX`#1uYxQcworF@854XmxX}HL3Ne!TcTY1=ZN3qnJzF0+e;Ujy< zBbvG&XS_V4j$*9aTQ3F6F2{!)4VrEJV4MWovBFATzY!tHQ9HWBCx7@WG+<;{oD2K% zS(RO^GM%d3sBp#7$m=&ciZJ{y`cGR*bwW2^Mt~1*&|YC1uLK{y7Ej{C!xhd`-HdJ2 zX?FL{*afD`R-fP1nU&lixUGWEeOb*?l_E`SR_lu818)zeF~}}7IM|a262HrWfu!pF zkr~Tld7jN>t@oA}%*Dw?izhm-SX1Q!lQH(YVoZL zc$<>grxH;VyOqIm@X=ts%g$!V=hVYa_kpWD>Z&c@1M0csl;veGGb=W0Z zExg=JU<>6WKAy!bGt~_oi`S)!@ z*PVX&)_BR?RaGsdM@k_W**zkt(8Z?j%-Fz@bw2SdiT#Pc+v1SBEMwHs`K&{+QZ{{@ z!94rF{j-CEUG!`Z5>JbkQ(gb$#*&k9rh8Q#w~X;FqnF1VrI~JC)7I*;l@xbf|9(qX zRRx+QG@m=#L z6AUGUlJAMj5m)6w(G+lmo@%-8NYBP#-DsDP<+B!Z{RSI4(g!z@GWM zdJ&b3*}hUltqC)9|i|Qp#gXOnBE|-i`DW?mB=7w z=PB?tu4);5$Q$L9Dj#5nZ-eaO)BOm>cjRE`>&xyVot2J!869a)62U@;=WMkI3l?R% zvGf9OSLV(6dItuw+CGNGLB@F5^K+@cqFNvcTm%&_a|b2vX+(cKMaBAdIkJ5^RB%>m zcpPWJFwb<24pIrc^u>~_#$OB&QnNwC`Y8I!8(5}W_rnH~KJGX!BDc8~JU>USxyHx1 z_0E0a`nytChyRuKkz2t5)AwY~jWYq44Hnhu$L+?jxBZn5Sw9W`{h5+fNS$Fci7148 zwbd8G=c33})q1b8CIV+KV*q5Vnzy-mHBXX0bJuRRcyib;9>ux@IzHz58>#)@4z4$< z@;Q%W8j%L5dh*%GYeoL3ToSu$)=840NS~84*ea3|Gk`6hZm;jU$BkteBxy)h%Q)`H zCQMDkp(-b9U6@`>JNMo2c#@wOL%}bcBo*3*q?!pvhJW-7l+Twa_ssHkUK<6d?+`tH zCs@+(B`N!4EQ{MSowwTt(UaxLq7htFTjv}D> zqV;vK0U7o;5;x~BCu8RYJWek|xz8oP z??=pM+9|=Ds%ytWd^O{BecYI3Mm!IcK_g3a8wE<#!~l>5Z7c!TCaMV1(RVDO2qQ@+ zP5ecv&ou(!16J_^)+j7ieJwWqLM5-JDYafb;?%S!w{MX}(&daAt^jH@OnCVy<2s8^ zBqyc*kv?3RxAE*ac=}eTP8RTsl1`;(A3&bR%n>`?ZYHo;L+xw`5ZYH-s?QYtTAKTz zrjX9$)iXN|cdjR5TD-$cQ7ewEQ4rptdKbK}tSKac2QnGaYV{Xizi=k)i!75esfjLq zEOAR_!awpYCovlBUl?{LJ3xu{Dx9@!oP&jWRl{AO)7SQ!@DXj!yC{jg8@|7Q=MqIUUX;s%JKSf;a zruc7eY_9*Mi7jMZ>t6LeKQBkwyld98jmK5ZmBqh$>irCt!{Pq$o7&Fw(50w7$5$zz z-Rk32NDUp`<-Avie|t%@EM{vAc6VA`Um_pM0d&Y1SXHkY897*DpA@7UpbQ<@Gs|pd z*^tM9IggVO1}$kt%0_AVxJQ~9v3lsL?|4rQ7kqvB)Mw%Ft6tpFyOH4jSv0yP-mfwA z;vYlv>ZVM)uTL$+WjdO4r)b%$@uTnTx7?MU2jb$@1uOBLr4H~vSq(~vawj5u#cJ-X zr8Xe^(C7CAR_I&qO;1AspA*akKnm3Qew;r>#(1pD{!BE}Rx`c1S_M9kPW)Bm+^tMv za5p!UBx$?@k5a$XjWR`hU1NdseGKU*P88^D=iH~ml)?CmF&}YkY&EYM*g)9k0#pt8 z>gN%Rkfv6zsGLW4LKy^tHRk)(!n`W=CJ_eFyt$QyL&D9r9EP+66T;|- z!EetHvebL6{>7l5WuyuFmoH=%I5lXLRQUmvM<(-gJ7Oo8|GdH^zKQ*0iHKyBsjrP4 zC26!M%o+ZrG%^`&4WvlgGue#oWSzuiOk4Z;z!##}i?7E09HH4Z`o!1YHF>4RNMW6R zyp<}7$--cQvyCT!h#Jz!FJHp-g+SI~;$T04e-M!jew>++BiY}Nb5HC_m|N+}mkeOB z=uB1gvDz9qLJg=-SixKP9Z~VLlkC1B1N(h)vL)fiB$O>>y3KLGmu62H9l5^dU~4Iv zt<08q*((@`;vg5|udkUwfQcIq68l848X^oJtpC6t>PKHZyZt#KZOCJ$!h)%IQ zq(Cn*5ZKF{HnP#N=giAd<-ncULOQt~tUfc`$*SXEOW|&wl0Vk&mQ#Ygb%+ny6d3#1 z+U%lna?}*Cwtl#=7lM;qANEh5I?IP&?RvgJ-)`UT?&jsSgs!4IMOZ8>eSFV?F~m7j zg39_)8FqT0=kwnM0xE`$a98pQ&M-WQWW_N&s39SUUdQoMc>^6*mglpVJG1Qex8e7g z`{_RvCO%^IFOI!B7KO6`*poL<4x%!6U@F-t={Ynoj0iy3ccuV}%AwJut0SuWZ|dgb1~H*=?N0siyyS zQ37q!R367ho4?td0CFqgN7E4NXB9?}d@y^aV#61G0%7QXGg@hqNJtz!VoV~YYhcSf z`b>;0qJq6*uE8Z4Z2d(%fh0RsRhhtIfU$kw)>8cw8y^9IdGlR?^*M3wDTUcefk)_E z=C-=uD*jWZHKHwu6mIpCrzzOk&t{WrSpNqfSVR+8EeawqmgqQgUpY>z=LQ88EBu?W z8zd5e3x4MfQAfwG+-c0*T`8{Eg`5Y>9f*$4T%#_tAGUbm6N-u9Z#PxCybxDYB#P0U zFRiE4Y$jJ`QyK;SwSY$wD|4o2_ocHiP}%7{uo2L@sgNQDR2tHZdNWgM%*Si`ydu^; z@{U#+BsEN90_$j?th(AHSF^I^(aI%-$=up3e?Iy<=x_pcYVBnn5A0-d3vPr5AWyOM z`U^PzkZPkl7|2 zQ;T8?*`Qlj|8NHn{|2NHw+`Es-*soJElJxcvrl8VpBEy2q`cAvvwU}V_ur?%e~5p( zwrs&eQ0EqN`OpiWPEjZ5-PxI+wfkE8Vh=K|=lJrO9_Ba5Vsn0Wni$#LX6iTDeo^1) zdkib5*yxsp{+oa+V6V-P{QK})5Vyo!c>)Rs^ypZ!JGE(p`ja@fAvb`)us6G?Yrni@ z-5k2N03YAk$I9Q2`R07zIK9!gn<-TF5@m?$Z?b50Gn;g##Z!ixc%63oWtsBHAeR$( zg!lL(aCs-J507G1zUk7;??{jE3#U}s4%<2My%{v|Z7~k;g9Qse&&qxxN|h0b+I|0? z({^N3>`K`Y|49M9jJircv=pgqQIR_eocuMIO!9(AsWv}i)N|BVj#{%UQ#{xUq85#d zBtW~*$LbP9*_OP=1tDsE!DEOgNx1P-otPleAj(zsei~yQlio}z6zsv(kJcLPvsoci zw&r%uFNG*;;hP4WGC#P$Q`gQF%c^9sY9Al>+xSbh{DAIzz4m+NAOc%Ef-KHmIpKU- zX0{2!y0hiiFo0J21TJIXVT@K1r9r>j{K~@#l_7)2G_KDlJ^{d5siSlbS)8>RHK51J z6lFG%gDRY=tXdU3w{Fs}eWLz&*v!3*0^M#&RChN;?IFL>r`NfyE!Ez$rIacx-FP@X zbO$gkx}jT0?HDxMEqlA&0~CI4&abl{#PK6h-E$Q1mu&b|VLzTPce1Umpj}wR7H3Lp zliKm~9@hh-j#2N!!~ox@518oj@!V<|Vwx!*#hm<25_o5QY0Izx64LcVEL6?Uu<*fR zN|BEma0$}$wCs@`5O*&ah*pJ=A%dq$L$MyBsHepYR7nmz+juUDmjD)8l977D3z~Nn z?qxP&$B`uOFZ(e;Vn;b{$pnn4!{wbdP*AT_II=$q!#y%G@O^W0`2+bZT_x9H5lW%m zA8uH}scA}H_w^mBa)8p`ADnr4dr@{`Xt9dIeng(G3smG1D|fbcx#p{RfsspRXIme8 z3ayG+B(wH$VtyG4OG;rj$kxEnAv!>$V8ZX+lptqaYWV$O5``80V)<5e{m2*Vcq~)z z{Yj9R63(=PwiYWYE-5LNmFa#F2MrD$-}@!}%5J*l?L}&C!LnUor`WF`>BBGM7?46f-Z^zfECnQGWu9v?pRA-`Awcv%9}iDY_pLHp zE#zW@;MboK=2v2Cek{}vE@FiGbcK`^9z&=h*30yos;WOBE&vU_9na?`R3GA%NoXmr z2L8-xXY=qO_{QFYv=gJeA4g~udV7I%r=_mS4_yy}^mSVy_0vGbcB$S%E$$QNy+Tx< ziO91&;b&r5RJl%qm5D@JT+`tNx^zDEsypGHA{f8vl?tHxUftSt6sPBB{#=dAXepRvfgBK9*(ac%}fsSM3V3 zH7Cgkbt`lH$^jh0=XhR*%kzM^toV&}L{B^@<>Gmt`lC^z%MHKd7t?|;BIwS=<;LA+ zsvzx6i}m{YMQGICamG~&S!?lLKGTxrFHksapR|EGrNHmjRCTeHyMw{QgXx_|;Q^-0 z6?1P>*l_BYH0o*<;8yignPPE&q+b7Y?FZb5+bnoT+pl@~`pDh$$%L}!%FqoRqiO_) z;g6FS_=*v}49>xl8Zm}5j~{nk?iSgPnE)ogt<_LRCeSOg*)qo`q|*aRza@Ixg576FAw5FV z15sv(HLq#XqX9Zr`uB@%>yXtV7F@JlSdd>C2QC5@PIt*ZeC~R4&E&z;XE~{Hb~K_8 zc7r%KL-v@>g9L;8fScXMg~dgnVqkQqvpd^{rG-G3H*QBiSP7f2kng^_cnhzpnhkDN zPuuX`fJ^QE{}Z!e=D6P#%cXFPOce`!sqNcU+6!HhS<9P(CZTcB7VvKWjTN~iD{}#; zd~N@DNkGb&5h(QLmN46v74T~yu7O`bkb;5&g=Fr*46o+U^LQRG~7MYMacvuWK|wnO7a+ zIj=Pzfm7n}#M}MdNcfcZSkDW_T?HTE9^sxUDK28JTBBxvBcZE1Zh~u;BV7?tecl`) zqoHE<`n8yfDZwboPcCOYTK#on`%We41wE<6UGXQMoIj8*CuT2oo)au8-uo@OJaxM~ zCe!Gze^9o_zw<>A7b@zs3J?A}tKAu&mcK&L>;3#+FW%x-3;#}e&LzQF-f`viaG0bz zRXw2LUV*Zd!eZ=i-GYBq`yomYV_8zMva$ZTZ3SPEtz(m7vy=AZgH7}#VfP_pzgfAx zLGcmT-L@{-2^qwow3P}-{#qrfA>_=79`HC1GzW~+ z;1F4rJ20xH&w8}hCMPO=J(x^b3X!`MQn9(%K)JEaZbhPC74)im*F`up-W5*Lh`YjN z<$xPSC#hAF8?d{!-pdn3^Tof@MXS8y=ZCY0j*MOKnRiL;#{3XUTp&S1mtR1@zZQ=$ zTakMC^F~~&9b+s`O}1Uv*3$-sW@SwX;>h(juf#ZYb&qglN{}p9lZ-#|72RbLoX#P~ zp$+ApGCbQnzkCNf9UHsMjVnbSgT>a1_;R_YU~=IXh^>Q#>Y6O)BVq#cdV#|K0$QJR zDaiD$t!`Ra*56i#-EI)e|6A`q6dJT=YaWI$=*OPh-S)3~l$Wcgv2|asj9jBA;`N$v z2o}_ILQqg3A7>BHXg4;O{)OY&S8^@`-{{TVeBURjY98}iej4kl znAwtAFFg+-hXHx`opvV%6{n4e@Kpx2;t`Q{ulqh6EX#0;FOyUda&q{oW-)lbiMJ;F zA4TWk&*s}k@z6n4?Ny~jwH0mc6}w2Oy;XlksRpUNXUy8QYb1(NyEUq2%vv=cxgK*Z)$q~A z^1I-zxI>S(Ct43<|6ad2q1J6C(BC_Oj3Ib32ea#b*Ll@^;$X?#W}0si$T;1>e*MjN zwm+p^(O*}iv1{+cf809ntK9bM8gQ`~KTw;WoNq86FwI@rU_cYXWaw{l-|1}%T|g|3$GnOJ(dmGq5lHecs#Hi566)*lVyO|T5V9O~9oxRU|Vsp)0L z#K%1DRYk^82Z;dkfthuXo2zW@Ezy}KPq{bGAvz_6y-DnZh4cNBw6s3J-ePT$^U*h+ z?dd9sG*OqNtA_$S$^AWlPY_K%$-jn$lM;z$LcE0!K3ZGIta@+)e^-CM)8p9ksJTA5 zdpm@yy{cQj@LCScU!bFB7h^4_pzv{f3%ULuJoXzMNS~w=Ha?@x4a$i-`QW~ZEw5}3mg3oqj>xY0g2UtJL{-)^0G-lDs83tNw!nNs&cMd2qOBrGc_F8;EE^%3RFr4JZ;vw| z*s-S2My7r)4+ldy!Z)M#&zH779?b;rkau!(m9z+y+T46lb!u1@dtFRJ7mCV}OcpYc zSH0>slwkUxHvjV+JoN3ni-=L{M)D=VVG9d)nrZpQzv>c_uLM7le^j#@u_7ud-3ZTjBpxsJbFQr6riS*}U9R@Ui}m?#8LE z^j;Xc^WwwJso^d{3vc7f3g{1m0tIKxjQo!yxi!hc5a)wR1?K6i|K8fyJBq$sH2MyU z27&(2EP`N#L!ek-XUYvKEq!y(dIsDgSbdMTR8ah0L7DCP=obW_w?#GEQn!^aFq~C> zMbV^qCKO3WPSp5DYW!5?8hRyv*Vl4zs3`SId8b)SvWH<^ajpbb=9Vh3CJuLS*$5E( zxW8?v4X<@>S~EEz3clDfCcgKC~@{+W7cnVo3QRDaT~Am>$5#O zyEiwH(3%MYp3yBwmQMEpE=DlaXZ@q^Orz)ioKfD{c;V4q;fz6zliAZI5?;ZA-}2@xAs0e!=%CY z&d(ipzjS?@ol; zgOA>`TLq+qcYRq3>S-%~OI?j>Q01D$?X4Fs;zQQz@K$HtzHOuf-x>drd+#4GOJ!}3 z{JT5|xlFoWqHjhnXT~*`so<7!e2W8tHDozW@iz-^uwP%L-l$rCt?#h5u&^m$P~+ep z^?=fMp3Qk0ijrW_6$~%I51yM?J~#AnLO``J%DM^B-r2GJn5~Pab3g^e);Q-?CNNOwFQW%;J53o-tr@#vL7x7y<+75A97 za*yHMV7dJm8tn|bn1283;{6K|@uE~bwz_W`Im5aiKiuuS!yA~_W1Bws=4k79{VZuF zc>nk#?%ep4RgmE}^O$V%BWtC}diikH6EtX%PIHA9b{I2yi{ehPG!5Zz$~iF zP@}ry#^oac$}058d+0Qu()Bw+gKskiJKXLZTVoY08zS+l35~by0)LEIS>2ZwPon&N_gc?TiY+Otgk)K zs_5-E)x8&ar4J`({scf=8*9D=Tvuyk9fh1142N+t-kWD_ba(|v#A!!v1z&Sm%6ZTa zW$mlEE2b{-E z!Fg*ydWNL8rjeEWqBACZ3#wnuE&h44o>m^@p1DYV#G1xK855rA7~!bYseaQf zaZpG@5d`21^O#K+w&0{u+I23+IH6>cuBedkBlKY7^q z+1rympMMLfepvAQH-L4@lSc-)dC4FUAbBXX9q<~izPgkSIcpf!6E2sPF>Af-1n^sx zf{HToGP3f`!Cw3OODA6Ae(c)!pHHK)K?)mF=Ud0Sa)UK+olP*3D-<5ke!$6R(W%x<61OQ_M@2l)RDKsfU0qAa~aG?`}sEp zcSdbJNVy7-)4#tH0cT`p?H9!trraAYc%F*V7#P0R$5BqhF!=^;h{F1K4ksE684eU9 z5GLHG;6El$iXlbix-|m&Ely6@@%znO$SLm#}&fj=DAw=fp3$$kK4r%>Jl+5Gq{jP?JV(@V^ zTZhw4f#Ar-wUs(#o>o(~1#5ROmcCVCPI43Zp{4`=u3@mYcdsh%c7t9Z`53<{u=!Gh zeZv1S_TzOp4YghAtMZMUdwd_fJ4650cos913$Ahm-MZr=12Sgi%QR9`G2)0CX6(rd zqSn}Ii8jo>CxjEYQd*AS2(T+b#l4&tp8KKw#G)kOTtmc_QwCwM1fQ@C(V`3m4KY< z0h|{!G4=O)e{IB+IuK_PS%Uxm9)GyI+y7BcU6dX$bXE3YNoiw&N5tFt#Y+~FdDeeA zg-x{{3_+nDi|a^ME|A#EJlQ^^{A~p#%nOU*v`L~QBF*s-hpl-U_U8p}C zm1j@%D1N&+B!5>glLQ-10nis355ggDBzHUg*(X2UIjy9r)A0E8(!xsc1*@RQ+F2(^ z-J5$%1f|WI>Zy3E>(|3o`2q?9){V-|8=opspLpVWl@mpc!St!xy`jxUX+ zK!F`TWjyM>8EXD5PK-IZ2z&)7!C1YKpHvoOE4yktfOw`o#}UgQ3R}l~&l1)f2DFbr zFOPF4`*ZbwKZ$2c;goVkWX$IsK5locQp!$>#S^J@(UoRQa14EFh38(|6|* zi-u48b$9nA)jVLX)x-4lKHsydAE_-3@L7vzT>IFV_{2Gk<|^jyl&f^3Rb1l=?-r`! z)WN}(2V!O&VOaUm8}l+MrssguX>0A2xz#78B0l`9|9Lf4w^C-G?zTe?XZ+P9FA}vqrFyG*#ePX6EVxk&nc?Tz8+ak#hET#~#M_ zSU60~63m5O9tQdO?H@<+cNf>H!eKimqR*G-1G& zy-zPun2p5F)8=)0e&SIlD?8nO`QuT=@^Vlc@NU{al-o(@#R3CLpld6>zM|hL!-LNU z(2-X&Z}x4KBmALBWtU`EB)3+{^`X_F&lEhS4V%=8ni42=xx@b5XNuWSf9MV9J*GhCSuoj zy(0k<8^;HhGyTB9%vA2^CKL}2wR0b4{#4oxjvA0-Hyj>@@@`_9kr1 z#me-qMLhhG?wJr96Z8DrH`35FJvYT|5HtE|svV_jT;lVA@mpW7=rY2t9zKRpBxh%* z*lU_Qbuq*y0%YoE-RXP`_6D{>_q{}7MCpQagfmH`lb)WFp1-xVq{W`|@2;7tvI6pn zMK4?7X~M518pCF5)co&!p8n$#aHqFx()v}WtdzTyi{0<4?5szRC`0d1c(?qBtM>^C zUOM19NdHc1k^F8R8#gjVPwwwk`1z)&EtO^6tR#MEF^J4gF^U!Ods+?@wk~ zWTYNWN(@^?_*X_)Of}&1+kVK>C{OHrGqEC}v&KxG7G(!S9>G zUz>4BNMc{G+u+IhzqP}$he<}98QXg^gfms?oE(L~lll0ub(R~s@&VI56x0znA{~ zLfj$x4j#`D;{niapwHgSD522iXHuSgled=yIw4mJgUhU;B=h?U z-43q6KH~?LfcaQZiCga`&)q=Iqz8yn>h((QK4Syp#8>w?W@Hr zk877@DnB8&ZMUK+Y{fa>#L~bsYT;h#G<;WA!l@odfEjP}jXB0K(uTcAWwQAP9`}wL z1!d`9O^snGUyj3H|k-eZt}WCy*)Y+mBnq(`hPx^*&@3x1gPei>wa_f5~>wZ~XdXXSz4# zsSWxs=*i}`(-^6-b>G{S+2zcozV7$?{_E3?m5+>QLB^+c1I7n%%Rh(6;HkzsEIMq- zupA7p-md%7$z#jkA(r&$rQ28Rhc9Va6)JQcy;Ck2R3#G%OJifM&)dv5O9A2)JkJM} zK2KKeRu(feVY|Z2e;uUi*%?+QYWivTdFxS3KD;mhKm(-Y(>%T9d+5uy8qXvp<~bYg zNinW=>lVZF9sv<12GuF=vGuKs2zC=DcfY+Q1)g%5?p89n&9ZUwXj||&KqA+R-0vrr ztqYIG8JhGh)(DQ{9J1Lnyj#R?8_EM|EIB#T+QZ91T!_No<~M(XCnVl}9dSy#nAp@g zQ7OcPJ)qF%yZ1TY>h`A)J-u|50?>{0NCrNQ=dVQUIFo-7N`=O74MUQBXo;y{_*cjM z>O-i>Qdh8yv>qJUq(^+y%e5#SMG!ys$?Cn_B-zvRguK{0PRqM^9{0&Hqx_r2zK-@0 zk5$w}5E;{c`ERr&>8||7pSM&Yr~5ZJa&o>Xo}YU%_>&Kdm;7^lQnxooBV1Y=F>RQD z*U+z;ErEvZx;}qe+V~KpCC%Tf`J?ly^sCUo&W zfD|)CKt-6;@AkzqP&nk4FsKHnF-JB3F5}E+UXan_Q_(IT0RLgSBSQ6gJko!dD?Aoz zBP7lj#qbSD@@USf#IYpq_Rez2`dE+M(;4=WqK@Z>zOwxiP=7Ju`q|7WQ>I zk*>)1tr0ym2l!{un@l6wEUM9!bP{7E}f@05I~Pvmf`Nox5B*N`KdtIB*8^_2lE z#rLe|lA0X)jG%YRq<5Axj%K%xo2=6OcFKA{)K?wy`(}#@{}4kOWm^jOje~iTREExY zRtaosl1r5Em)6>RVs1f4dnV7&?IAyvrlS*uZGzVdXY{C7G`w0G8JafSl`1khl;Iwp zw#6N?gEfJY-Nnw(UU#KP7D@JpUjN3u!XnE4q^W5?{hoCyuS2jk5w>;-*8p{JF_gv_ zbGE$9jcR%EWIO>9*&+Jfa@KMNbBe9HBvL6Ik4k7;pD*<(T~@TOa8S>!_>2i-x`@A| z-rw8u@zn+jiTY|d-=jDv^tGT$?RLe6E8B!8DMj_S>HjlHqQHSge1jWiw|6WRPIvV# z>>4mP!3=y)UkICVEcWq)923TNnt#cMoSc^^ols%siDvD`MJszfN9{q!Ywly89|E%6 z!ZHa95Ake0SxQTqd?QV4=mv0eNo_zXp15s^aftEbpf}1rt$eL*W_F~?Y~AJu#da+w zDH+x}P8<;SfYT|nCLeOTZZCQ;izHd%8+`G1;1}wbTRg4DE9*PRqpA1`U~zG=bP}Bx z6dBWi_toAN*&6w?d3ID8dcNtJd)}pVwjQBQ?|TMWb(9$jIU}{-0&~98Q}S=wVuutj zBm9Gd#gmvt86Mo+-=C_IMq|1Tur6G?)?`wM0z8i|S9$GO`8uT^AC- zHD6ITxLuM%YLPWB={_SUMuw`v-tmGcDPv>qB$6h~_>?@kc|=Jz?Rt$qr!)KHZTwAUrdqU?0hN{xWxjt-Ne-s| zX*t0ra!Hv*rkkY!qd*~qZ?*fl+C*7+!@vaRRrg{Dra3GMGf}_FD4AC59Nnb>a-OAI zWKrSjt8qm0amK*zXa4qfrX8i|_-d6abG0h#QM5bCNSlt~wE*nfQlXyiEmeI+UtdXI z2aHRGwg#foXQ@`Ok)?#=OBM$Ge2#ZzBWmsb9g*;ea>TZ$afvc9G?Fh>YuG5a#9wAh zs=-%Ux=Ol1y6eIv*M3Ypvd@V36WY|Wd!HjgCYSS|F*N&5t9A9L4w(?kHY=QOrLz$7 zSg|EjbE9T#n*iFQ-#iRFS!-?DC;X=0xuV(Uxpk=_PUXk0AJ{I!gI79UywhQjr*!?V z;QS(C!0E+2$Ax-k*doPcNHydH!>GyHnHVMouln$QCqcRLP~G;o!CMvCHhC-6hZn={ z*7tKbm#&D^E(RdIyutddTKT05nAT3?q^D0V3u~m?wET|cmbVCL_i*{bydcJVw%hOA zsAv*2>S#MyvcR8*)yjinLO(eg*Ew<;>9fYk)BT@r#MUJedWZ2IlX`BHXg9;_h!{2F zfiHa56P0xkF}JgeSm<*4AVsDGi(v2OnHgt6JBj*R(bW#@iHHqSj*(`U$QM1mEB8NG z@Xr#a4+bk^zfyo+P}9Z;(a`Bk#Vjp*6v2UhOP1G&E_py__ok(oPSzej!pxsLeMRUn zt2!BPk1j`SZWE$;kiWbL0ZlHG86U&W=OdyYu9kGU9RnpO0I66QdAw*NP4rzP92~6w zy;1THyAyKMZ_lH2I;ToEFWr86kSadDrHP0F?g1{H+DKfzZRt0pNg_0>Wrlqh+%)S zKp`42j>AP#&kbLhJf)d)prN+&d?a~c#&9?Hv?xcpT=kN>Nv4?!j{_Q`11ircQ zDE5?a-Hq!*n;iTyN$j;>OW+*O1^(>G9KrCsFRFKO);q}8sw$*IDd-Teddb0lPVWi~ zEC$;A+){X2tGIfmejbr{b!YtY=vmIu>42PbyOn-o(jJniZ)w!M!*gExs0qXUuC3Xg zA-Z^TPo~MVAPt9FdA%rP(vzS$oeiPS! z(sLp)yl9gLW3%L$y6R%I9i&9do@o|ei3zzJt;ht_D&h7Mf?3FJ-nkx!fr z3+c1F;33Cjc^B&y4Z=H?z;akVh`iB{HWThCO^Bv%Ki)sYVrS2{M>@7gqmux+CYXG- zb9FyjiM+dscSW9|s}#xSwG|bCLW>pB__6tF!p!^Vt_>%G@58*H-F2CCccm}7r(%Qg z(p82C(+Wv2Q5t<==WM=L$`icbiNxTk6n#Owk&ZuA1%b2K-H!u1}7Jtzg;4GK&1tdY!4yks4Fm!_!%Q4R=O{@nz4) zWVn4W$X1CID7#|u1s|oMorL;|f39}tQ%t4VsC*`J^vVR503Y?$NY{!D`vs2lX(veN z+!LhhQ2S8Asfr>Esp%kA#a44LZITg&G%DmwQ@8)}zWAOggvL9}3a`r7#s7h&gufgu zAacA`=1)=S9+N08ege>(Wjr97YT)G3vMjW$9P>SW$nEJceYMU{)?NuULr`|`0|T(i z75xtgQ4i^>d?09>e`tE`^C_Q2c{Wi{yw2ua=n`@5sYFCzMuM8Hsb*PDLctHP2IEzg z0^d<_22oE>Nh)a*krFrUwHOv7_7un}!N(KR+KOqKajBx8vg}W@1FL|l9%(7${PFId zd43V1!N4O->HW3R>R~j>#)HaJ{#2Yh$!OHKaOKFrB>LzEr94wCnLeXPNnmcr^zdc6 z$CrY-Q{R8BkJ??Xh45F57yTs*?3$Fpa2p&8jR}SdcsV90sQlj8?rY?EQ_{V7Stj-B z!ir5lbHh}P|>2d!sExk2RZ!$V?p$lfCR zY!F@uJ++AeZl6>Xz=hBLp7!0&MoLjh9#;JP7q_|3EJsj{eZg1ddOsXI z*idm&&$_r+ZEtRAu6A0bCt6xr7Qsjr?c|eMd*KrK%e82w#;L7hoAwu#AxE24hIe*? z{~coo&8nVRP;PszL@1^>q^}yM^k~4lGWfTb}Z4$e@FXf8?a?s*xNg@CL?nn z@U^sr_y=OFh0XGUcEqV#Fs;aoCCQ8UOY9{Dg9R&>anbf!&$n4ufXo;--EAuADizJ5 zI(7JS$pxBeIB>ep&&ystUp-R9>w%u+Xsj=b&)4UfHDu;m}bqMN+0I}8TSmm){fzSnYByS zNXhD(Pqyt6UGj%Npc1)4moqpm@dO-enhK0_EQJwZ{}4nwr2(nZ3Pv-6h>5Kq~z`+})3R_-%_f5Ng!jFj$;EFXgWSYF<8&3!IB%dh_N7 zn07F#ciwHnX~(6qYDcOedfugdw#?2%{OcR%#23P{rR63spq1lZpF?qy91P_LH#V%m z>NcM%N4z3$be)YIok6P6PkY!}@0f<)`+a5<7(AM|xn`gwJ99H}a!3wxPHvoQq~uBJ zz5X+%8$>MVQ|vxH7*8^qTD&+=^f+|swp4mS;orxSi-t0qK&~^!P`-M1SJl#oqP%C6 zajsDdS7KA?n}zr`p5qA_O37AR_M0k|Hdp40;|g;_J>ZiV-B9L*dtM=x3*kB+Gl`Pi zM|>n2I4BviUu?X*DgeEbU*?ejzDn0gfQcF9tTs8K9!Sx>n&bQJ8=u8#BnNHxBcEQQ zq&Ov0(Yw01Oe;jOyndW9+ZIr(2-OmeU2mMN(ls5EiA5xc_7qz+QoQ1#gRu-@dYA|+ zj;W~?Lil>D#4w_MEl}6-?jiVd(@>GaE2P_wD3h)~=c;3?6@lP9{HFv%z6EOREL7)l zZjgqNsFD=V|3&D9T#(wKP$+pJenj&RnFw!vi8+ht<7q>ZS{8{!V*UCUd5{WC=8j8R z-2YdYbg?0P$&}q|1Kc?;miimc2z1|L-q@n(UTdVppeKTNN-kebIQ{lGSi!I>U66Rf z29WKKsK|O3?3cA>BUWUrHTg`5tR4b}3bPTA98ZHh8=vdQ(JhwmDEz2c9XVUx>(WyK z=)ECNQ}pnw1$$W6_SVaPdxaxP>hZ4DtwH}fsUEJbj^tig;plwPnN?PASZyaQtdHVx zHqt>SJ*z<4$}&&r2-B5Sl6*X0*q4HKZM!JmnT)3ASxbl5LiQGLZ$SeiVk<5iVvi$lJ~*&Qr@(RdwMgl=wSh3f~RwFN`yWxziE z9Can#yhL7|4#6p7$ZPbZNPDc&La1029m~98^Pm_ui$3wGl{2qGRo*$2TYB}f=d{Tg z?T9K$KlmG$a&&e@w759C_x6n}fnt6`hEJOqca+w!4x%^*U2O1y!4Q|IYWJjvA~ZCc zWHI!0=XAhYphzbCj$fkTeGn+_$#b=*#zRoy(_%YDM!IgWh$vrnX;x9Xu}M$XC}B1A zY}pnChyO7I>Zh?>(^K^jphB^ex-p9>1+{9?>;g{p99CNj4U5QH;)@AoY0+$=yU)H5 zkPm;-GsIx9?We6?fPyPW%f3{ms^t_rxJLo`W*TOt(?d$9Cw*_iOgTwUZEq6@+Y-b+ z_hd1Q$#=qT1vB1zU{hKlw=n4+pJ#A`eEZqR^vs*c+1qA`JXNrVF<#@Fo5@F|UB~Vg z2)Os>*pR6n2YsN-+Gmm=r&a|RcA4O=;kSb1{qN4U~ zxnm-#zx1X6r7SM2vB_VfAN!hm}+<+o2PvnpqBnAn+WgN2f9e0@CHrd*%4u1{@f$CYvS zU10()QAaw6P`7qDz}dd!??JNR#MFb~lyeLN@(1z%cZs-4J6&;eTAaeWy3Dj)j3$Nr zNZ<)N@9$YZ9ppittei|e)N9B1p3LE`gX<81_rBaMvB}C(nU$Q{_NkHmNFSXkoNLHS zd44)o#WRhWoZilO_;ARNe7ODnjxBY^ud36B)(V$ki(b5eKctU^nkdd+?)V%z3W67(wNdam} zf7Y~QgNkP(chZL(A4kx)lUIC8R?UR170*saLT(rZ9#pKBJjX^c`6+fF5J)b9XZ0r>m4@ zzg+_HE^{=@reVSaLJV!>T!vZbx@=mlJ^()@5u=OSlQ5*6n?n$6DONCD;G$AQSeHbi z<*a*xVPsYmnL~BWjL^u_Bmb;2y2k?kEKX;DB(FOb(5k6}UTxZt*AC+gqZCDRQZboG zt5Oz+QsX3_7>>N0{QiD}@%N267=KiLx%Q{cDaRbfU%-+71$!4mtE{F%O($d?uYsUs za&Y&(C2D#tYX-Oe1_G%sq6V|||6HQ;7xH$$#|PPn1PQ%#rO^nzUfH9~k{tU&XPhO} zWSQVmW%5Hs-Q;GN8jUC_5VxOIicpLZfDtU!O8jS%q7C;5hc5EKS;Zn7K9-1%q%~!T zq07*`qNNm!7b%+L-1{fQ7%NM?i%P3=jnZckzS|Kykzf!=qRtmlk3N>;-uOsyK-ir& zjx7}mfouMB%Q0nu%3Lw6a{fB;)N5=&9uy!gdCg0jPr}=dpO3vl#{^FEq2d;YOr0e? zHAzHgkp&&2ohlCc?rgU5S7&;(V?&!Fwroa#pi?)QT;2mnw@i;mK&4NI@xH3&_zwb=!nOv|&$a|>xdgZ8 z6n_z9k;yE3ZkfH>)hKSXqOYc>H$1Fcwc;N{KE2P>$Ys#vJU0A>pBIjpdN9&-_r?sp zirSl|ElZo47y7iWR;^Z5f1(L(>&?prccN;3y7{|%`@8SB1Rm`IGUx%?y1S$JO#@H+ z`Cfcl?M(ynzldlZ4cNiDuO8?Xr@h8M ze$`ob*NY6?+n>}c4@fGEHu_bR+f^CyU&K4lnjx^DR?3ILgkWZISTF~A&3SfatU6P-XT9C&cy7A|BevZqSuxf_KefPVPTOm zLoLp>h~G%eqB4gY*J|;mn-EFf_Kll|gP~wCmo2RPsf4zWJQ@p!?RG$4)1G=I#x1J<3i0Js>1$Vkz{5ab<82<&@ax5 z5P_pB;?jUdyH95N3zD1!9~2p;GL+F>7wo(9Z>`B)Htyl!T~p)dxE(tts?wS6*SF%Y zP$@QB&pEWq$4Sg}f6~1rT)zwJ@@vdg8W>NcMbe0%mzW$bayD$vyo*Pp3&b$6myhxdj zExeo`p^|3ILVZtNTGEqtj#^`Gt$28-lvt_J%|Su9rV`gz=E{H7Ox)|?D^~SymD@vg zEDDpbY`*&opgFZ1&K#0`4rpheGxFUG{SB|3@wLRw^Ix0HW+NpqXOre!G%3VhsQ;TkDUbjGGhP z6sBWzsMtT1kG1ccm1W|awbOTHv<s#9KVR#l;n{sF(MOb64_#J123qEYJYC#M@-f!hBzFaI4K zDI#0^NrF+YYO~kYm&w?RmHsqWRa&v(_KUx+iYI$Zz(-HH9@#LKY%5dTuYjvlu?HS^ zMqAD{I*f_P>emF)?_>{bqcMQ+N;>G5*8yO}=jeLJ+%~_1-4Y##QK>eILtlJX6>|5; zvTsA=F5T+tD!r25F#$i@;_v=()xTCD=nUg}*{O6MZAP!;f3eVR{ZXjS?%eG1pnYbB z;O!jPEX&=inoIh->~CJ227^*BWsAOwp zOiMWfTy^p%Elced!j>(~E5#M)YLoLFwK#BDv!)Nf_zf^B0)v8j>O;ax#e{X<^QCw~ zYO}tuZ9+$=)if66eFj~o->=ZT=oxaG;1=PPktj1DrF!Cz!2cxP;|Qg^mKzkj_%$}$ zQ7%zAHq45rY`lYlh%ZC# zQ5oFe>WR$m;i4FqeOisG_t5#0lgQw{Og z8qJ@`UCk=F-5JJki&h^tSw9H9r|N9`ijD|}3&ved7@m zGXWX*gZWLsbl!_8zNk+qcj`@CEc)&t6ie0|Ns2+FQ$qb3GxIK$2U>v6%=Un$mtLC(>)3+> zk5=UITum0y7cC?e?SDLfgq)}&w4XIxx-@NNm03S&J6+xx*~t?hg9kQP9FjzWcbD0Z zDv9qHjPFMh|2F8wlJ_op1|PbL%%6~{WU*M=19bbPAgn#m6C7bn9up1R<(Jv>tSl8|pqPN&^S|+{ zdU^8X-to!$_ciS;vV$-{H3|7g<~fM}kbaxXcFo17Dldp^7m}q&UbuJFIg^ZN${h^y z_ejw~9-JQ_+npy}JTXs%3KmeRpmRLXf%+#rco7-m_CE-6>bnZCc5i7-= zNhHdi+4qa7`<;@exBscra246nxO34B&5LlektwI45A=EHnXkJ{JsTM6=6iNkc`Y=W ztk~154SSb0T2t!f{HQ#B*;bxbGGpO-m&ks_yVzwz<2|5uKw>0JwG|_2X{31_gSh4oNZOXN^b(Bu8-|& z!2ERqbD5}m$6|SP-3kp+b_Q6ebd8sRE_3*^bon zjm4#&)O3=RoXwR2s-6<`Pt+*p9%PXpBYU|~p z_ND$)vwsD;ydth#QwTO-ykaA-y_iKZwiEIcx<3-Fn)`qi(o{;*SiuIVTl=lE)EZy~ zyX$KYR|m;G0|_v2h@`dtM_dn%!&6LG6RBf>UMCichSq>>?$B(mPS_0tSQ3Vs z;+*1*7uQ|mlWW}HiF`4v?xtjvBxRJEcO)J40~*P9ptUuZ0j@b2Eu?g1?~%AW6|Xq7 zmyP58BVMLYJ&E3aI@ z@BDM(9L6NIb4s9Y+Cv?WQ@Q2c^gUHvnqkALW8a;pqB(lqDSZ<~BLS-bJ&&TrV_nOA zILQY?sDp5J^q;bT;&fl}8xm(jj?a7I2aYJ-F$fgH??z2XEc(rHjOI|Ie-TRvHcE;? z`y7NSSK?tbb{PlbHHp>m1D3_qSuBAa--Ll#z*7&3mhlO$Uc7i z5_^2r?9dqPb)cj>nn!-xV)h(seX-RUeYE#kcxL+I#QJ$ zQUy!NQvKVM8E#RD-YK-F!>v|?nGgo#Oa6;!B0E`8fM-{feXkcE*KYlZYm zmkVckL5H`+$OPdK;DJ?P*ejnoNE*zNrJQuKLN6R(fn}YPDDqN$7T?qobg@y(6Lh(< zjm5WL7C(=P>~d*rYHDrtMa1dMZtz#s){dnCpL&U@jju&F;8*mEMbrJ)ChKCq44aCr zIu6?pic7b&VjQP@IHO<lEF#bFZ&JmbPnSKQh_q4!w9BiyGYq z>_e<4o7H0wIO%mv%4>atXFvq~mH5VM;d1}npEd(VX_I7^Bx*~9iC195O>kBgs$S=4VTSWXIAV6ij8sS3L4sZ}6-XA={}&1f=L$-VAqd>-MB0>WK^(muRu_GF*)hWh?&vp7@n~lqjPt(T`jSrVv+yNHhdE;RxdEYK^ z?d3*zyxJ$QiF*~dBrlACG3#ea79TU^%bW0pn7SLo#<5KfU|vW$Y$HP;wNqhLexxvK zNgfN_wsS~MtOoZDJ+8Y!%;TDyvU!`!Oa&h@$d#5erj&z~2^RB~h1q3M9Pwh2z#lo%C@K#;Pwm3k)6O3>ucd z`bD!SL_2p=BM<{A6%m^D*#;c1jiRLiti9jTR+V+TWfgl>m33P%X_8jOf#~AW=T6OHW?%9qeK7uP zJHQc5CfQf%wTHNS_|!}h0ddH##)8jHyUh6vi#>3e0D|ocdY9)T{F}fK+Gk@Jc1C~q z!v2zUKw7OjtsssGw*fP#JBPa0`Eh)b>1I)O*;pR=BxJmv(BFTGMFZab<`7I%;Q2hc zO9bqPV2%)XxE)2s6TEa^EBoVGgEje7#L2oXC^1H3{4jn{9U#73?gFvLqxO?!bc3`n znsDLPc=~UVeZ)#kd90|!`cgOaav|@c5187AOkL&6%g>=ds1WXuG4XQ%S?@9|gklz$pQ!r>bZ)$B( z3DF0(L1!zn-=Atex9n5H37dbPZf*T9uM+(oq^|C$qGC%0>o%lexO=FYzzDiBDlwwW zR+_yEmLwqC|Et2&C6l%&H?Geaes>&`(Y3^XD0HNy$mct@bk5b%6qR7)+LP!T?`3(b z{9NtRp)s#}!ce-W3x-%di;K0Lee#VZW%_gP(CY+vT%@6KoPKD%Bi4&!^+69poV!m{ z5-amwR0`~F0!v&s>~QD8SKw^8s1wT#EF&h{vR7||Y^i@bnLd%PghP5zH06eVbBi^j z)7l^J;VZ|J3nDPhSGeYXt3QoeeOyGH+>^!iH10Ybd}AdhlJ9{$qc?TS#iF|&!6o$|P827>#VvWkI%(B_)O*RFtR z5eC9g=Tq!2QK?ZKf(}-P|MQ9;%+2>G*%rd2l57YP5ot0LX)G>&>Sc0Y*7QqbLev{w zBzF{KwZ?ki1z^ovxA^}pS((9+$;gmE&-gVn`ny&+xwF%RC(ZB%0uNoFr(AnuR!P%O z7NhoAmtkR{qi6!vkrLOQJYArXLaPi0Ne;NXN|kiiCmz=@du|nn+U)Ot&O5#1OSWaC zWKUs18TLxm3tKBZf$8E)9+t4XTD5zX2)7+OH7N!kcdBEs2NjuBH{Nh9FISlH$hW-w zf{#2uUZlT2SXyJJ5_1Ne0`F`H7=j-+A+{j(}xtyrr+POY^`uDF&iYmO({NLi~ z@=Ty>)sVvJNo>o(qD<4z(-(;jH8iD>#FLn7qQ4gQwocM7y#2HNi5|Y&eJTBE^vNAk z!bo^T$m^)z>%n(5bQQMbfT-hna*}|MP@WJyV40p<=kRKlWnYNw{f1y{{&xd!Zql|C|fzxbI)Fi zeW&Y6D;RAWT)dV6lZf+#d)2w*`t^53%2-O3k;pdoob^gP)|di=8FG)xP2Mk50wdc~V%X8z3Ck$vhcrg=kAvnDouW?m{Rd%n;$} zV}FTBnT@KY@K}1q&x7@yW`TiCu#sVC?NUM;>f8XqxKBcndj9dE%&Cs97lAO9?Z>!& zJ*BOUJk!qtbXN3doWt;mgZTX+cPxoALz88q?7cRdO){qJ6z`l~IVv?S1E&TenlYEO zX2USjlrr&Fp(UU>1aQ*XD9e-(G+Q3=XHh5F|+l$_UiBRUI-5H5c5Hlx{J9)?;ZVQOSdaO*|ZOAnp^s|>r=bBvUKSEHgG!XVVYa-Nu5a{yU$Q7ZT_3oHox>InZ*9?63pSsg ze!8apHtPM$v5y7yRmVM-+qbwT&wY%rDE|C;-etc1dz(h(%iO#RF%QPW__l6vR z2iWDOXQMN#|GYhRJ$Y%dQ&e0%-Q%vf+k+KP3n<7v@qqTLs|PYXh<+^%9N~%v%C5D! zAZ>HcnXsN9g8A%DqTjjpPUX6oI=(A#X{+|g`?#p^52XMMQ|pIzQ~O;w?5@?2QoG}5 zZG=aMT%C5MjL0 z_=r$WVXJfr#$}02}4h#mBcSeo6p; z&)oHB{D8LE@_lqY2q#CLW!{U4UrduRHNGgEj3TQOI++_(jfk=1#RyTY%o> z6heth-}&mNsbXFwgGQMq#cy8xNXy^=t@*J_WETn#8zckk#_ya^Rbx`zBl0MY1MXD# zG%&EaY=!cKQjBBm&0L$50!fv|j9V;M%cQK5!KU&v>VXzxWAB%a=+dmH6(~6?tC1AY z95K9#5$T7bCz>|*j#g9F;CQ4(z!y2;6k!Dy@Q0IubhViX@BPK47tQJl&2!w4&ue4Po3xFIYuA@JwN_DvUqIDTI^cFf zBflmru9;VU`n-*;{eo53VmcYOi23zvH6^or)g$`j=g!ucN76mo0nG*X^LQ_4pRPw8 zV6;;a(qLZ?U;?)j{`Pk5jKb6C-5*N>%kIZN#uEWb?3gY60@GuAi{j`%KaIXbA9E|` zUx9h*s=mrR_SykGYtXqCx%+iQWffS%fR)-sCo;GV*dYpDf~!qN4o^~={CIobtRDl9 z(GcJ|IVwNuauxWp3vZ+MHhz6x)mY8=5EUj<<~0BI`*BQI$jw3FygcKmy>%Ua30C5~ zT$}cJwzS?q3jc;oScUm)PGCPR1VlJzp-n+5W38LB zelnQpQi9p>Ng4@>KZ#ic=SE)}IeaxIchk-pv#q9O`>mY?(droRrRc`DBegL-d^|e` zibmtIv^8i*G9QhU_vES*d$I<{J?jhkR*SYV5PffEjoI8+Hdju>yu7X_0MWfeaaWg} z8AdZF{nCC6o}9PMyX0@Z3@vxJnaGAom`|)P!pr{q#h{m?1b{`3)cs($JF>W19ezPi z`YMKa!;1Kyt0b4mM<b9U?o+C|U zcQ=>^NZ&0&0VZ<}EskQUD4Iv+f%QzSdPyd>Rl-jRV87A&_C#ZmTYUzmb727OnV~e3 zTHPkGSa=5M8+HShbdJ#%5+qxKU1%7dPa`sq1O%%g;f0@n#fhYHxP~n3xaCTya8jVLj3e zI3f8n$?V8qdTEf~A{rGkarQR%*h?`8JnS)&uB|+PxkkVN^F1 z*S_ye-Uv;XXc;QJ0nbNL{TFpb#;@WPdG5okrvxUpg9evx2{s>Jcp#3WOY59$`$_o< zccj%neB8C{zCsegygZ5;SD+N0MN;0YEGVWOFPwh=t&)cv!xj6`E#)i{gSC+khynLl z4XtacjqM{f#<^&rlakxA+=U{7ZV}BL5&w!S5UMivfq{(U|QjSOk(i~*3NIR)$3sK}UqOD8Ap8h3YqqQlO^5p{(xZ~lB=PUqb{ z1wc!!uRmP>D*Q`%`#1LVb~-qt_-dr)*jdYKr{9wE)Ba_l+i2NNdnC#3(&mz4VUep+pX_01bZ2Xa!_%i`6Ai7M zo&UN$`I*=}L#Lojny_}W?VITrVr8+y2l9ZQUElPCimL~gki!Bt*=cxouJ4P>Dtpn_ z(jUkDczJp)7HPu6dA!h~#brJNe#ErhJck`OFgBln-Os-`tZg(&`66V+r*Xx!u*ypc7^5TY z%FE(-5_R5c?kUM6d3w7FjiRbk9wv=XZHVh9dF&+tt#@QNxa~!Lz6Rr2k|d#ko>>Og z1PcbMAAEHbNJ=le!t97Kp5dk?b&GP1$AgkMm>w}nB*}ol_E=}tX`9n?VquwNH5^C0 zzez&{jE}{p4kO>CyQc?()p}DzNcm;#asG1GIBb;d8s$=wK$0Z`waqq!fUDv^)ijHUcfKwPB;IwJ3c|;?v7g2JfZOo={Xaim&6zYk zJ=t=7`{1xQUSv<thZvU9$0Ig+9i_FHpNG?eWE|15)Z!xH=n_TE4M!Q z<93d>C*#_m-^=L#LOMK5oxdNQuKW&JP&0279gW=13mEdiUkXULA< z0kcY-gI1SDD$mC-XRm4DVEDzb8d6T7sw%4x()VX1d9IT!NI4y0S<AOCu2O+J73C-@Dj}T z;d#zoO*!_{rDS|n;hK_`GDy1>feJ+pB*(gWs0aPXe4J#ECZWLNWCo3_+eo?maEi;1 z|2D|d+C%T1lAzm)F+y!4jh$AA24~g!*BcvT!$Ew89=h+VZ;E{lF8xL~yNeAIKjU&S zqRbSAOF>+wB4Hk;-3#WjhPB=5Y?l1hjWpyy(@P#hIYxrmkVG64qN+8CCAkD`7T+~T z2{&eFX)AarlvSM*=!$|rQ#>RN<#?9L)G4xD`3hR^c0&1oDpv%H+X*p&l(LM2)jjN( zge906CR*_Vn_arEyR~!-${e5|z86f&OpLI;NfIaDBQ3=A2eDp)*{!n`7|efW5+U*z zo04qJRVdF&eD7c-E-F_QL@i92$`hd0D%QHm9GfG;AkVq9V~NrXa(~B%!45xiHZY~% zoot|lghWv>RTFzL5xSvLyigze{2P582!;f_cVupDKtU7RV*7}5YMn#Fu7(;M@-TmN zmzkmKO~7%J|29^gT`@hYfIrKtyJ^VU4!(G9Za+vvHm}N$h6w3;hU7PJN&FeKo_l5p zxh9LOGSzkvgdd;CqedgK&cISSfGsfQT_cqbZCc5%&$**6YhUD zX5K#d`FGFA^)%yE_^WoB?nt1# zdEysz&24a;%-Db#T8{vFvjp}0s>#5Y2N0_(IP6gZ=*P^zCE;Zi-u6eghIXszX*#Jd z1h-~tghl!h%*6@8tvXvC_%4dl=tRkFvqY=z_88<%YdB^-#egZ9o^ivj8Tx^~UAuV% zj?_r4^d}ilk(&%6Sa5qzrrfLL6giyEwRS-7N|GQKJd#*a8OwoM{r6z!^CPF%1?kHI z)*(NfI2rIzdZah~1@YXXB9}-@!0e>TF(5ln${qP#TUAPg+NkEWnwV#`$$lDH)9wR% zOo;z+LZyi7PN*XA$T8=)9|L%G#Rw!xSg^Pr&A^ru))1h_2`kIK3*^v-IggfwHCa%jDr2P%^Rz};JrQ2*;hE*VgCxq`sOVa%rIELR)k0M(#xwUT)O>;sTTKn(}J~J?9 zhQ~#+Tcv4lxea`@S+$xG69dCh1Y^Oo0UM38{Oo;Z<}z{5VNXg?V1|~Y>Ch1K0SQKloI-PE-J}w7v$GbDC-Nq{2+?+`i;GK?onxLOpR|U& z0&UAbS03~+;J6bvajr)BQRc-)b4+Gcg1|t}={_BBWHy)+BXv?~amK81sdlLuH}VVA z@((puDVx@*yrULW zgp(h8vd6C-zHdq~DA4nTJfQW3qSZsAe*8EEjC=Xy3^Xn#tTPfgtA^^Y{WJR4Qt_RK zUsJP%d`Fb&A{^Me6viZ#pB5JFBrbT={n`6+0yI;&6ao{kVTwuS>c;7#5%1l(FAu`* zM=u=IRPqmuEKmYwV?OOK{5MI7R6JP448?O_RSTFaYw|yPnK@0O!P&FKJjR})z} z{wVv5M7x$jD2r`nJ`tUql9E)V>KvjA|F9(Nv>Rp=U5+Zkc_~6pWX7FY1leB&TBt8 z-Kr{B9eTYri(Fe$qyQ6!Q7_irk5XylDPCFGWbF}}wnZeF^{Zv8U%;HW;=`(qeQXK? zdPebaW(rjuUTJ%-pzk9+lG24>fxveQ+iHPpYx|K)5j`nmex7D+3b+oE??`)mMG6Pq zuCbs5GqWMS9e?gpFBl1_(=7$xE4~DJ>cbF6*>QTTK_YAY4ydA|l^*NjOE{_KUreot zPW*Q~a(6}a?@%%_v(Z%|1=3aqN%;(!x$lT=wO5c;zD3WdGKq`Sx}>mb&Adzvh^@8) z9x6G24q-WiTZDd4Vpy)?Xz;%1E^8?1v!1k8W-R zDTmF57k2-(`I-&cTshRY(NCSw2qvhC^%?JcPg)1{LNzM=ELMzyv)7}Abt07Dam*_iQ&gn< zo>#Cc+pqzQO}s${ng8ZWl~I*}bJ`ys!?o=?Axahb{PVG8Gc)rE^NETQ?8Z9HDKABP z%$!T4q)H+rwQ_TIBz%(s^?f^MGFnxiQ)N2DRqf^_Qx{;~hz6(>3ZpSfg)8x#<#j1J zzIH5>C;CgSEL=*r5VzgNqP+W4_;u)T?F}G=cKg*FmM5_zTg3W>k~O7&+S@$|Z`W{+Oi0)H z_j@`0S`p>&P9&uS?E7}1(53k53El5Y1YmAgZ=L~kA}84yWZr@|FX7UqVPGqMN^uW; zRJ)U~5PJCIb-*l%PjV%*`*}b+d@Qv~0F6PP?{Eoif7@gKsgJpIesub>x48<$Eha4e{@xH!;7%fU26zAv_h`r6?Iz?{z)TzbT+PfS zkV`V0faEy_+BGx`ZS%`ZTw*<2{MwPc)MsU&rB~PRP@$^5g)Qzx_AUb06Kv&?i6;2s z+PCs<=;<=sTENABZEbC>PW1(mM9h-$(C!+4drz^pTBbOp#e^dK&*3M7(N=SAwziIT zuV<~sEARK64e)B_=dCSwgP|PmjgD(d?UC|j-}oR^q!-EvzV8Re6q9w0Jfx}|3|wdI z&V@-f-Zrum)NSPDQ_^Y?V0iSgRNH)bb=G_;x_DrcBHc(yFpDfk292_eEWFulcR$zN zvE^vb#%O&5L9HYQFJd^QN==KZ=_v2yY@5eSBRE#-XxJS+4Ue>r*AGYjI@?QXksnpH zRtBZsviK?^pwInzdadq7EAqP`0aJP`1;gtg6ZD5O$Cu z0s~KT+t7`pkvH9@C%bISuPltDTn0@M9<{Yb%0J{mXJ?cb*4Zie_LFCs^ilRWE0)CB z`_^?=H_W;jXp@trP&Q5xOw{>wmsrwTCc2^_Mp0LnzbeZG#~UwF_{y-bPTD(Nl$}TO zirdMX**sklC?JSSWVTkERVuN+g%J&U)W!z0RVV{_5RrVskWemoKAkfv1yLz8@V*yB zaAamGgEkcamEItHFqy$Dg+vvA%Xq8tc%HZ3Yt2DQzCZeV_w8J}Kiuow0cJFnR=A57 zY%muo7)6=pGX#s3drpE(ZkRYH(_k0Z-XT;19vO$FCW1*lpZaV^Jq=WnXE*L3$Fd%^ zuWRS|SUhcru5WDxf-F3H)cjR0Zht-X?0p^@f9c%~1K=1YE zld7)YD+goK_Ejmn!)Q)eBAC4;`}?|$@?V$;5^Tl3H`8vyO+C|6C2<(i%A7dK%qq@v z@eE%nssB(n4q-^o;M-AW3*$!Q2G^grrvG>>0HhbiaQ0IB>KL=h0vU>KM-DDl;twCJdVvJ1SxE$Fq%TEfz zcFyvP)UNoD(fF2~qlK72ibQ|pf2dv}BVUNAxePktq%%OP#!cj*cHSz+gf^l5JKCbl zz_v*Lh)-DqSrc)4gs}x8nj-VuW0q!~2)H6VH;C(M(6$szhyvj2VjC942ZDk83my`vb72w}LoZL5F^`m- zfz$n96oHcVJ!d1E<*&Dr!H@DwDL90nv%Dk9>Gz5I$7bg$P05fw$-Y4|*-QqG5#HsG z3Yuw7*T3RzBw*Y9@JYO{F~|2?Oc*+Xg-xMMQ5jU)Bv*!qI&;zD21i=L``zV!8A|m6 zcDsMq$hOM{Gd`O=;n0Dek*(Xp|Ly+xS?zVQ)Dn}I@K z^RX6;Q$j7klkV%Xi5D5X?&+IkGGc=pD&aH}VO-7h|IGnL-Dz|(Z|q}~^x%q=#}(gzoBLi48pV}ON{%b0R@sili#|AaNj=@%6Ba!qEQi0lqrtt0 zz0OcDX>-FYD|5n1S*$z>qHn5O>wr=X;>N+0EhbVyjhC6`i6#a~btVDw$gzB4Z)1Q~ zDA?Lik2&qRSw51KUm5Jivl2)DuaSof1j!$y?Ky`IivB9}2Uc3h1FR*@ z=5P0cHOHEtJV`ZmtF3c^am7OjaHwNyFM*)TjQ{) zV?(H;orAS=AZ|BiX~OQMKFL=sp{A@*w&+eCcWqdp-B&hlzIW!H%t1bM;Og$S5e1Jt zp`zF94lnFQUq{_~sl_u`N~aaBrLy?DI!cty=rmy2?Im90#=Pp{`zi=4auq!jMmJT#DVF! z^{Rl(5!G;d$m&Z@lz0oN%7Gw(zxTaI5AO|M6!VkX!Sd{83q!J@7ys9i7|l zwSihjBnk&4DFaD5j%mIvdknUQ0~M0p*S+Kez9a({#N!wSx`ZL|_4<7X3knVt8@fS~ z1VsFOWSX4M>uIDu5>lP0-AVUa2T_F>cbblCcWmVP<3L#56%oCFa0MB2{$bg8ziVbG zofhz$);y?2s=aMh?+#(Hf9XW4<^x_^btwHZOVz-0^r3bffmGHaQ~qAX1a$?sK$}#d zfbd~v&KF2PU5;gnBAfae^e^<)Rl_>kl{FCcd^~}q|2WjnJHH}?3xDec6UYr4ufG~t zo;_LGR4h8PEnt)GcvacHKRvm0awSs~l>Ruug?N`D@4%Ic7XMo4Q?lq`Y+0&!NySoG zN>--iq0yFMJapd4zqM|v^PKc+OfDl^X=E|sEVoP*A92H>(~vRQH`#pEqPMnEjT!R7 zGql?Sct`f&lKBuW7gr{Iy(6GS5^x{g@%WBDvtFAJZ{8r(K{eq*01K<~i5H*!9Pe>c zK)Mg%+~w<_lCJKwP(a-SbPK)$3YBRYkbW7SYo=~q^G`J~sN(Xan-{z(mQBC(Dy|nR zPo?s`3tqfn%{9n#lSXw3;XzbV+tJPk3~Ns4I}z&5x=x$_ZFFGBN>lH(r`~WgvMO^F z_TrQJ%@EU{W5fok2Yg}k zW)zMmRXEE#t5QlGLW(^(GjVU%MH~EYdysJ6UrFRlpAs;8{yAm~^Egm~SjO2nfi^FG zNugl2l*;Q?;>NwT3EM!;?3A!vB}e5G45v3-%F0VOP4K#2W=_1gxN^R6biwW142`_% zZNYy7aIq&Xput<8Bucy&;RcL0%If}PRuPc%6ovt^7%a)g{P+0j$z5DM!|&Q}Pv1UR z2o1ora8%xwA5`t7Z*LzRW(lwH)-v`^9=vn`BB1Rn{Sy|Xl$5(;VsE{inNU36U3g=Z z9|QzLU6?F~U)aah`-+S^Cq|A=pa`i_j(IgYQt z(UEya+URo1B2k|m-@A_W&b)y+Cw^xove=UgM}*!&>AR&lc_Zqj`(R{pMQxaJWe=-V z1xwoYx<{reiV~-azI~DfDy6D^1?yGFz5wgVSzkeWaH5=E;Job~hlv}N&W;?*&x|l@ zti2T@`TyL8u_$dcn&o7MOGTK`gcxVb)+cf>}=nMiqid63JFYv;0x3g(TFW+LTVFT?vcLDMs-Aafg{#>&DBMJ3`SDa#2#`aw3wX!NNr|n4--#Wo1G* z-WN!V?pyEKQ8C8B#b+3jR+_f{bE2}VnITmn3?cAEaKWvVBL2i4MYRB2=?u~sk~TFe z<2*-O^}R6Isu+Ay9T_Pi!{5E0ZQm&`o zd223%Nt9?;`m+v83r-ovMbw~#DI$5rQBun;88V@gMtjc6>zUekBaSH#W^>Zfi`{T@ z<1uzAWcU9G8E_*fZKkGYNKks?T4@NSXEXcnp?sNa$?%IP(*d0eWyTDQC6~D39+L#L z!HAILRC>+>1Kr+1V*WVCy7M=Pvfa1h79l|f&~${&sw^TdvL-qt`sqa_A8CHj0Om6; zg2g~rf5mg~@4fuI5^pf0u3M<^I8_&<*V3Y0u=aDSs#zw1t9-Mu#eA2+$cU|Qt;uyh z_RnG<;C30_P1*Z&IMwGh>ZNHW-B&Hrln8rPbhf47!v1Z+B`5`vC;;dViHMhe!NI{u z;a1}s`I+_+@4|t|(Lh(7_7?4(ycBm4Br~H&Y3*%nM0g2!v)w!N%{`Oz))V{Q zuNUU!a;tP9ppok~)~S)jNj+58&;Zv`FfhLN+%o()Q*3MRL)RQEqO|TL>8tFzYm`c> znoF(t73ZbYmf%B1r_g-po^@cJHm4q#5l3QpFnV@gkI%G;EX;Tuf`ni=9ll>h3XUj9#K%uPD~cNa|u@W5TK1=SA7qTzR2alVB8dhm(Wh z`Up4I6L36XUK=0R%Sz`_eNrtG$Fq*VL$9N}I16S-|EY3@$dr}r$nvK`f-S-VPS#Rx zJ9jZ8+|a{N^%4rQrYrGE0I}B;fI(e3$8S|^;5=)fglrNHb=I=F3bQ8My2MtJn`@OU zcFDu-nc+P7mT4V{WFu&NtTR>p`ng(CYg6OBWp6koDyO<;)R^QU0D+%t_j04gp-b&j zdZ7b;cl6@odd%kysaF@e1Sjo>%N~s^Yo|_N8l`$CDV+g^2a&}Y zNI>6MMdDJ~7d_Z@*HcxVu+b~9GN1PX^2j4tVMa(|@HJTrdl&_aFc|tECv3TC!ZMky zPk;+SLVag*30FNv%lwvKJwPU zAazKZXBr|%Ts6yc%B%v8w5t66B)NXSZLS6PO!EHx{D?KbR9NH1{_Mj0B7!(eVUQ$H zK^G{O@rIq{&TJfk$fSHE3k2o%-9v438Vy$nNJ~>U6NTm6REtbfyUYYIr11`zCxpvs zgh_d_NK!9AFN_(*kMIzvxk@og*Uw2eD}piVh5h66^W}6gWixmWz-jk3<-{a1mqmxsxr!|eNCN3_y zrj%0>U-JBIbp5xyx_~3cqVC>;5j%4|ez5LqV>l2=Ju@7lxM6nh0c-Bh)>5`mVu zG`w+qLQgzVkN>Ch;N@P=dZShKxB{}h2YOD$V5*yR%V5OJnV=Cw!|@P3Q?W5nqjzMX zUNzNa6@D1MChWqezMUyP>Yp)^uh<&k^=QlTHVNo46Ew7m@1Y7=SEIS&s>tMqfJ-vA zrqoUoh`9vVx)?{O%iIUj?0n_d2h9X7k2wY`5*-Aob{V)@$P}0nl7F5JR*Jqurc=zF z=fteE?}Z9PHokvY$pMoX|$d+Wh0OSqY28oKOxzE>T|K4v#1qY{Z(5v>26nPlxM*o&&Q-C~!%{Evo z;ohK&Yh7`*eUqYsBY1+1Dl@ebVubo3-IvJ%GLR{r7rh?BfzY_c zE`h&1;11Q0kdS`(BDPv5vnp9us3!SwS5?-ON-8~fxMBT1o73+aNeqONf%cs(g)!?I z(}-t1*lWob2Lwpe`j1QqHobSc-!U}iS_CVMdBD-VVD{Ek0hAl&S-)|w6ObkRj5;_- zqdlNC&3Ic=G}o%WMF>h7GKr(=l6~V*P7aJWgYbgNQ&n#Zz6Qqr#L3mmI4x*>nHO=< ziTzY0@4?zbc`3QX*2*%C_W(SCG) z=D_(&v-$mK+!NiNSLyIjbJzahaQ|6?Nc$vfT7>#cPqyguO)_+DA!e-6&;QfL-DM3t zbhdV3hetq9tR50t34xS|qiH8V23q%WqH~s7`$7=3jiSE-wzLi0#?DR3+xc6I4kU5- zXH+~1X!@Y~q|E`sg;4UZ`Dn*v#EAzPn2G}#5XMOooGsZU(nNl~q-^MNVSNG8@vHQW z>*BhD7cY5>6Y3ZaGL`vi?1qEMjI6p(ank*o30q|86nqy^Kled(v(lM54%)WbP2878 z59zXAXrlXF@(Re-gdl<&!rL4Yyt~ZIH|A|^fg=N-T$zx0W~L`BYu+{`Kgt&dfsh@; z*hDxEEx+xvQ&|_yx)(5KQ>ZWxtG$1*7J;uXnq-!(EtsHerfBn5`%`IpK;#nY5%eC9 za4%U)N50wV2RD1%S{$>n2(Yty*&6gOE^Vt-CeO#G_>VWzsK2t7+$l04JO5%`q5obq z$L;E~)8CtZqtdkdb-U1}36{!46Eicn))u7-gW8T4`Tt}w;2`Je96JJ`9TUl%u?5Kp7h!j#ibp3cP^L1hEG}wCmJoD z+1}(h6L5QGb74WWpBrm|*JK5^wo$RswBhO(d3NlouvM5wo z_B8vl*1lrZUu2yGr+~Zk<;KiMQ7Dw$yN48t_plOE92j>Rp!7I07?oU(- z7FS$*BlO{vBOW3RqwBOq{duE8&$QDNqVd3DzO;L_@DQzI@P$>~b0xGMe*QBAte-s0 zTUW2uWkx1cyYr292lH0|8rwXciHQ9Z2*h>`D?E*fT+#bC@%E(on)&_9mMwQXHk+R2 z9xk1Z9IpNy7_j)aGBCrl|7Y*#4@|_ttG5r{@GX_Qo_^T)HSsd`VxuC;S9NAD3UEw4 zVLAwKle_Tmw=mr6?d`uIt#9X=y$*-Rp5~rBh=_>LDRMqJJUk1wwM~}Y{k?U_LgX19 zCCU%@fE;rj2s;_v3d841(gZy^qJ zWPvNgVy&mI#>bDCO7LXZ?PIkOIg82wER_?2s;1AqKY0U+4;xXEax#5?sC~TFUfx0W^TUA$4I!t0EImh8h1yV8pMljCbdh}ZPS;#|^>D@3#$~#7e zo`xyL>vn<~1ojc7b>z-I!_9Ydho;6GrW4F~L{LQ|Kzp1l#OGsjI+17012jb~8U-b1 zn{v!ZEQEw7jXXg? z;+>!i`m96+9bzph5AmM1Y!^Hg_2GRCuV+OQE}Ib`T}gaKx&pbJ?j2W3n(c^}gEO8b ze+jrbF|T1Bu*BX=V`l}`O2cY`D*3i`d4lDP2|RDu zVMIRf3M}^YSJ>MEmyXtdv8ONpF)jj=k0YR@bs3bm{B3EfXYHkh=kFf(W~_c9+Rq&e zalXI$`bj`OMaQtR=HT>jBF~Q;v$q)w7%pcJh7gI-4vw3<4KCBm-|o1LC^27pO~W zBNI^zAU7B32&1^?|DkqOYI^#c{|z>B zD6F`X&|zaz$|Z6C4_5uKTaBYxc;etlj)-*^f{3!FJ~1q z`iAmFiYz&UB%Yzf_F$qM%FJgC%6;d_r5v&A=Wx^%tuSDK6`$bY1s|5Kvmcm9l%Ndw1?b*rDUvx1?_SFo4{7H2RH286`ll>-Thp#7k`>F0O2@ zw#n9$TQ?eHw^@bg;6=K;S#+o#4@hXgx*a0mH#@~(DmT}}m-&JPgdz33uW1;X%zw{> zzuer^mAU4i?nQJ=4(IN7+*o~0*ZkF13;IaO^$O&&v3Bo&@SC}87TTEC!m^u zFGP~zGtjNEwH4JP#la_)RoNWuq!iV?5oaQ9bUq>Hz4N?B(jtNDe{3xmNd}>QKYLf3 z041!SvuVxYW2fBYx_4iXezi;aw+jYg%ZDcv;7l(6LoID@?9R_mCN!2&WadXu#->T^ zuRw8VYu>MisloBi1_$w-9yM;@y~H=FWGPJ# zjKANKhb9J|J}&@C_6%~Vx0L#o#DOvsNk?S`N?O`-Va+p8NGqE}6zXOi#ENt{^Y-xe zQb-5(_6~M=EcWj(5a4d{_Hg9OBEiD-UsM!w13*6#%Lk04RL-1_(U`XwekY5jwi~&9 zk*A-2Q@9X&Y!Q1NWw3tzzlMejpF3i8bZTcd=OSKOD69<(y!f;O1l3J??;%FAY@&2J z6;rgt-d+`T`O-t#j`_CcN8a9VFQ51pb=4xkuVxy3q-k2YHeox${_Ri5gX3+{oo_!j z;vSagO<*I$`E2h;M$Im4`O676P8_!EUxDN~*3Jw!m^Io+X}ikHW*&7(nb9T~ndhdD z*V|8Oe^!To!+f4m%&Qd1J!#|qq1!E^rP`6U-E;NLlpU&eQ@Tw2eVDU9Jl@jZ{I*b5 zs45=OI7Evtd^|p`ZF~!xI;2sgka*&9Y{?RFUjNa@@vmw|LR;i(a@g1BTI!04CGo9! z)74evZJsmWgao7YAQ~q59O#}~OyC(|5GdiZs!|$n1>K<6+&u7d)Im@L)Vb4WH2;$% z3D|iVl4cU$soU{_Oa<-z8(XhHTE0Ph<}p>WPMEFDe$>n^XD|D63^smAW1fqYaYh(! z<-sKgW6u82-?VV;`4*KRCysw{T+cp1z2mkLjI>oP+d0T#mQgQ zXJ1_55QNjiTV3XC9dx;oq}39q3tx?LW-76Z zmmP>ygx#=8RI`vZd&ks8gIx+Tqt0U7-Q_PvaOiNAk*MJN%x;RW3xwdtDgvfeh70Xe zZIhMw<@NKDZJJ8g;{@oi6HU^qsbqGD&eYi?=f)A1hv8a6aJ3_)ts|%!tIu)*Sau+; z8Uq%88fPBd1qb~$G>hMJ@lOFcET2MumI==r~^6%cyR{+Im(`v7VaAWa51{!?6-(dB(=m z5Vrs71NF(0=7$)9Ly4ZvnaX$U+ltNfC9#-}+F|X$In{TPaf*}=G_*Xk8*A#EHws~{ zoWy)9jXrEM&jfM|a9D{@{`!!S0i1R?t7WR%p7 zjz)duy>=2B6X_5h5#}IYK0(?4_3QVyi9EH&g_uh8(e%sPZ?)#Pc-z;?m(^G$s-6t5 zMtxN9j?r-~kDT4gZ9hKNY2)c|nP%4zOXoe{bv|AV`LuO#+qEnk5B#9b0#e7-hvV;M z-ruz8(Z4mlW!lAci*6V%dj~>Y4r$A^bLnWi_H^dCXEFI~YG?4Ov1Zww8ftK`qUYA-6noE9&|tMxX_t}QI}jm%I!vyyt(85nV+~IYjO2KQ z##7fVnZ@rwzP(U(WBjQ7fJ+WW^oD}cD;il_DmN9Ss5!fnXP`?2u<^RsM(!fp8@Xb* zwvB4y4j8JQspC|;rt9{+&Y=i{wjY^KSP=S_iiGK-Y-h&jRPt|AB#5xDl)~yp=4Vwr zsu3?I1eVl7U2+iuU~{8dOH2Wy2qR6>$0 zjxxEAa*f<_O^r>FBQcbtn5$84<{Y0WljK;8w1im5m7DLr@4w*v!~6YuJ)Y0Ul@rbZ zzpyMySEBe4_3xTbHl^XIoeNXo$0%nttS>5#1`cLu&#w)}x4x`_{SgYRqK^>v!Zbf` zHvB7$gGjaBz1nE+1?0VKi6P{)B0eK?Y=rMbv3dL;Tu`|!8{Mlf8E(HKcd3VIZi45@ z3HK^X(21gWuF7%`<1^-w8q~s82@VM;q5xM8T6Hd zaPLS9XV`2H_3X?)ZpcHx4?6eoknUE{_hVKZShVkD-W$tLu#$hb{LCJ@yRR#DJBf&; z^gOx)yhu(7dUFD(_)6LGxzxip0e}PClOID^PUcS5*CMfb5~5;72W{GMF;gND2F=ZD z-&uRaj*qQLy&LpiYeVk~R3vRLmZ|Lbw(W@dM#JeIuUfBm*;5%xE)nUzj-X6Kzx!$d4BDLxIL}B6JEF zV^w2;Z#`4L2j<=L@PLMEI}bA>*g4aQK`fwEPz3>Dk?BEqd<-Ku^7q9)W^%*TWq=;x z$C)ml%O|8boE198$bG`-R0DgN6d(w+`k@bPm?RU-UawNk zC*dcE()1o)f`iJf?7~`ir+hEKElg6>g#rz?Z$tNIr-(+6;C-}-pu})8!VfN`q?k+& zOv}f_oz48tarOgTN>a+lL4;G*ye-b(o5s#9;bW(zeqXfk?rCLJpMMl~n854Sr=WX^ zT5n%?tiDM{n0s8<_{Es1=jUguRJ2%vOIc<(;zw@DFP4MBgwK5$zr`uK#yY=6QnYzc zteGGVpAGZ~2`sIsdWG~+U-;uHf17h25I;G7mlrZ*=chW(qjW2CME&(2`wF|g8d4wH z4JN`4H)!_FPx(DRA97tTt|BSVFCt>v`ALh@9k#x24R(4Mc zx1Xd=a{*uXD$d20(B-c#x!?UY5%w6o@c!S_d1jzO8!8Z`5ol8%%U*fmdTPl)4?+>B zwcjXFkCi9As=UzhZp*z&Tf%I%HwYCC=2hQ592@`o!3JjD7y@sI7WFJWXJca+M&-}M&*l*P5J3C!X6(pyLZuck zbr{)fZ?itba2BP6bXM`>#e|c!9B&iB*-ot!`cA_1?3$5Z@yY1yasA-@*3#e{Nj<21 zW@FG*r+~OTL47Z`*~_5Pzr={q{C+T6hX=!Sim01=GuTL?6*^w4Gr(W^GUGS@PAyGK zqc6hIWHx68g=g*Ci@MnB|6;hP>z|Ix$zNnkyH?kjgTF6TXJQ+6mwBfs%oftdk}IF- z4B5`NJzGCK3~ScSM&zA>6uB05#>YK8+8>{(j$s##D=6QaYTn$|$B8ZhG23n09a>_m zd}UU^2DzvCY1?F?I3_Q>rY!rYjukVy_wv!M-Cto$##V`+w%nb*ndvv5RqNwYR0|hm zFOC~`Mq8=E2)?T{HLKBZr{2b<4~9ab%W|gxEDXE@5yh|8FCifVH*h}M>f6>P0x zd|kk~W#URduzQRlfchk3w4#5Di6Egp2M7<*me9Y0lfC`3`IUe^0ME_>1**)vmO%B% z80H@xpi^D*^9*n}K8e0=r~rT;F3!g_BY3VyoH3F3!_TO%XL=XzM)rTS5un5>@)%9yeYa z7uY6u0}_(962eki`MIV0s2V%m75mn{Gxq3kBSh;@J1gSvA;wJ z&)*jo&iE-03)>mth2id3tn%LoSGTn_IsetiGJP$`u1TiqB(E~5!G_ZDFH7IOK~%Fv%AXa8*Hy4rY7Z!yPnL)&(N_RVhyMuH6mI} zGm;YRMryPLIW61)JY=$$5PhM?-G?qmv5dRVE(Uq!Xg4(DEMF4E_Y6F6QIyi)EB%_e zT4IHtZ|CB^tsLpYheH^W+VZnKH1QhSvVIV7qma<&NRR-GeSU%3(dc*nd@=y);e(z% zo6a&jSC}#o3VW?@`4*Lm-kaJj41IA~mOT4 zi)LNvlNrm{Ugyk-7}wg{9spK-m|o=U#8p_MDhz&BmT7UYB>uC~4%-?bNF}j+!OCuZ zkMV!OeTBt+DlOxb1hhX3mVJyxRF0V)Kuag8p1r4#0vYyPUW(knOk=Ulv5S-;ZfW_a zV*y>3pb;#48#vUA-QTn%7^*Kc)pU!^BJks-mwvo|_3_fEn`&3tm7bND%D_PI)TZs| ztI>4|l7NwLOVz}ie{Le~s7{-3$tB9MKl(LyoiYD8byJyiA@rlb-Oi2-P$Ell_Rj^# zRl{aq6>+fgV}PdNb2F=?YTRi^)HCrn6IFgSx6D5>g}H@j^?_Lpabr_?3Z>`VoH&;- z^e%uC+70Oj%V2M>GIu!U`QjTZeT3vk-~sX~M2IQl!=5|`FuwZvTZT9`FP1M(m{%sX z2GEizN@I6vV?Ms=v!brg`Zw3@%IA-d|L$X%rC7r!^R-2~M}rARwZQg%7m}7+A(^hb%*!fS{PsgM{-E32q^0nzc2@VxL+;>E6>c=5B{IAo$&HA2 z;A9QK(E;us5Wx0d_dW;7T{k}O6SmoI5}_A(=k~08AkQUGsne=5|5~5psyZa1ITk2Y z$9WL~H~%sFY-^!{I z=hZO(aHbAoZprqo|3^aKQdA4Jwc1beG_KO_L3p_JUF0%*nXMAuKH2Vbg}t4LM!THr4&oP(}zG-RynzsNy>}HFr1BF4;uH*9n&XA&Ngbo zu=MWOUl(7<8OTy9D|{XoJUm7qaUp=|CSo56ZE07=OP{pryZKo?hbNIM)8!j?gMq;u zl?dte!;|k`0uFvghAM-{=7w1gr`4@?bGuGzhxf&goCj3?op7Y3$(mEw<+BerSGT=sorfD|0NSs25-|($odM*n zRV;tP@t|wOL940%XhV%Z#G!sMH`KM()}MvGOzP!(R8Np718;`Uz<|uOnXwOYiT6G0 z-Tll#q`lQCs&^Ok=X~s{vjtJ(8lMe-hZ#f`sYwJCt3d%NY9gc=IY|_A@Ll5@eD~7a zKrBvu4YD2;56SV;hk9m`0cDNli{(ffL0eV{w()LOSH#+J_=4ni&4t znP}saG&CHoF#5^?K*y~hB!QF6fxL%s>t)S& z=vt2KeZ#0A{v#Ryz(!G(@+pM?;UDM#O<;-HrJsY_`2ngml6HU*yUqX9v~uCXLZe#5 zxJzE+dWN9Mt`C7T$m-PMyo1aCP4~7B&5mDUT!taIPQjj&Sc>z39%tAk#XqSSvQ?lY zjfU`;yy^Bvf+y4iQ*MfoyxoM)`Gc^Czu(Kt5^%qhy9^Z7`QxKinQ8nrmM@kTl^{32 z#wXFcHlb(rygIil0fteu57(?sOPn2}I#8CD>fp9YqlIa$_Fosq5;F|(T-5|9?ydi8 zfqI$-Wy1+7V~RJ)(E7lvHZF}7gp_6dql-eC$MQ(?W-4TDm19WNO>miDGnjtY>b;@> zkgXp4CXXvu>+KIKhs3*iU@H;Wuc-Eyt~Dg@2kDB{qhHNAx*3BXAymdg%~v@`$4i4k z;M_q@@b}}t4!mtBtS?s5wi=owvu~fUx0{)gx4TDU-a`r}XkabP$>V1!mxNAx%+Wr! zS?=`|Gvwg9^l$Aq$(QL2gWdIG&>nWav|kBsxuJE@e1(}fK-izN?mAES-oL*4 zM@W3^V3&Ef#qZMPIcM9~2$2Uz!gKW1zuS9eF-4!kT3yP0vtYBLMXO?>xOrOh;@~_? zj^xdGeFE9vR+&`w{+%Cl&BqYV9e2dzu2KWPeVZVT!^({ARHt23g{T?G130Ounzn*S zD&v#B(Xz6&(`@O$;8Kevw-QDf5=2uQR$236#0Ag>=P&-IPhi{fAIoowgFzK)lM|~# z>>rHmRHsYyWlzBx7+;7J=5mk{016``xWzeSi+bv z%BM`QG1K(6nMQ#%OC;Yb+rmA0=vfFgG}MR9&7$z1@k^|#grU%?LA`sa&ys?L(z!Fn zn8EO&kwhc#W73j+E0;K=J|E%E1CY_Q3$4}|WAQsPsDA1FW7-hmg8Jbr4&cA{L!$z# zo&ja{k*+!3VW(2v-#IIkO$6Qn{8?~h{vLGqg^qNjMa#Gp+iyUeh$KGts~xHV;ru5| z=jJFIr5I_b_%4Wc&H7vZa?1!OK-&_-YG%)w0d5_b&c6}h>HifcLzbkPfPTB@xZPtl zCOolpR0Ut23F=RXJnfznHj=ai&qQD@v3;EJ6h?>2DxYSJ_PTP3+6uu8w2A|4hlcb} zaTqXQ`7Y~E2Vr)Kky-K<23_V>9CPs>No%*|yp6X#WOXVI0ROee4#HM3OebA!ZekyS zMIPbio3f-sPs^duf_+Ic8|J40iyfO5f`&-~TC7*H`)fSXlIx$CwD~64;o!qkG(qAZKHU zJF&g#R=z*Cm$nXc519?5YqW;l-{CHge(bKa4h`-_>AZ5iy103lK{!61IRI$A8WxL> z+F7A(HWZl4yLRb3TRk8}qXy^Zntk)QM{yho<-;0PK-MRk(JeQoiHs)NT6rbPuYXP{$z!2u zRw1d6m-)utlk%YjE}!q!ltIHq7;5-0A`9L~!d`m~;7*dCFHM+<`U0!4Gj-d6&42to z#}&tOMyipnV?$|R zKi`f>4h7gkje%1&<)@FIsM|p_wckyzu^7%e{A0N(4TixNx?5)M?;szd;x-p*{Bk&( z3~eA%k*5RktYcZHZn3scA_HU~5-f&ZO6<1lJ*BJjBp55;>yG~dE;G=YDq{qMQpP(3 zAJ}GQ?XA2khz9`wMD(_}KSDhvO-#!h$O&cQPT==EHruUZ{w)m&f;qt;a?t!$L*vrH z`L9TK4^<-yRGrjXr4VWxDrTId^*YGO*z~EO7hAXDJ$gxWm13ng=gJUg5~(?F=9Yclw=tGt^VYlSz8Fx)71+bRItt3T(p)Y5FXBlO zul-FK+o(*4v173OL4We~qb&g=FYl^fhZ$ffXmSQuG2ui#xN$){Lz3JQ zEG(%#FytqT*sx1{6Y0Z}imWmz{cIXk#sR`94qowvtq&%IyR3+xO` z0nr>iR_|F7O@H*j)Ltf;Us**h{LrIFnx3QfL812PAfJy1W}3~cFiIZioB&_HoUdX; zq<#ju$}ifR0}fS$ibq0?Z`J{rG1egODp`n{(A56F-6a~T%QeEbKpHOEC? zSC9X?shP``kFKXD9PX7D#mgwG=8SIdAMW`(F8EG1J5U+h+q6jf@!w_pEC2O2*&odB z@9zErKve9)UgQyvCgQx?A*q9LMmcvKMSzm_|A?$y{zl$OXp zlG0YC*ttrxORsCxtcma5#+_cEY=_b>k0+D*>k2G8a+bn6VI|MLO)zVlFp@;e%>aX; zxxwas2d>kBPU?IG7^(vmGSu@AzIPdmQ*9OWF)8QWPl)3C-!dsT=Y zi04jiLEGxi4JqZ@ibV@?#X%j-l|om2*y%Jy;8@LoVT!E+&j6uW6E%Mxte?tbe_7y} zA`AR2|?~e+)Tb=@f6v2<*rQbUL<_(y){%x2|hDkQeQ|440$0lz8xh&wQ zj;64xV+ZlblZL0qRu}C8`3C4namNU{eThe``kF1E6RTDLHWs$TNdsMpnE^B9i)H^ zeVjjCu)OGE72s*VotGmfQLDJ2ju3lpsbc-_CcbA_JnsA3 zA5;*fY!+io^W`k@8(Ca{?u$PtrIoJYFUw^&`%KwD6WV5N%_~>*vy7l2JCMY@Z*Vw; zS@2zxkphApz|Y^69j(s}8i5NvQ)Cn9!DK6jJ`-bM<+L5lf2tlFmI{?YK%u~MGN8Ij zR-Q`vAvtXIVAJ-Z!o7g?)s-urQ9E=iCXYUzcn#lTx3_*o8r!71JSJxGzqmvL5zwG4tk2F(v za=1P^b5xSO5gXrjW7=#Ui;ZD*<_mQTo)_EEWmXwlwX}a*&-qrry*PWKr9hh7tD~(X zY}30AHp|obYu6SkXLK(<6pO?jgfmhPm+9ksUz3)$duJxLH~af%+&detC|cRhk*5yo zwBsU-7h>>=lpc+z3(e83!Z)4!OqTb`DPu( z=!xUq+OwcK&_dQDzGiSwiBD(dJ7G+J^PF$9Q_H(Y3%BND+ph9&hC2=CZlapS{Epgg z+KT^xSCD1*R7L~uY+03#ArgPrxzyL#BbR77w_4rX8YX-FWmhMV(Hbrb4ix7h&iV~D z`1Lk@w7{wBTuGS*EekvxEE%_^YU|!_ZLj=>8+yt-Ej@eRFYtFTKQM=kx_=&?XsI!) z2WPlUg{?}vJE#LprKSRpZ)UqLATTWQudUMX1FzHd20T;Mtl9GA9cRLN`5RQ+`ET#E9-Hoq}|KULZxgos<&d$#N zrBsBpCEvx5@<1{>sBlAQ1+0qTJOk9&w1Yqpr_MpX*vc&bi}UO=?BkSCWd(5$AD;!D zx(;}tXPWeb0{G!`W+b_Mcep1UAJG9h4%#OuZbI3%5v4(|z@~V%fa~X{lk->rUJC<) ze0d-E*w`8blS^y)pcF@$i+S&MZZ;SCFblBFzYJC62Z+N7U!amE{R&0LA%_p!3)6`o z=N5AjgGaqHAHuO<%x9+f(V~b4v4<6OKn+*IcxxXJ!(Y=0n;O;)d;9U0lXu z{YI}f=jKYB-P_$9?m9RUEBD?1OFx=9svaG+D9Axx;C|k)xL8Zw-q@R4)O-=`7{51| zOI;JKq3w^RcWD;hw7UN(x%A6?OWfZ6_BKNu7uH&X__eft!R>HkkV;JNI>=49r}=O# zXtwLs;*l_oI-H)%IKK*hM|_M~6I)eS-1H{J9&gRB9j_MUYOeev%?&;^9siyPQgZox zy9Xa(0cele**^^FcuO7Oa7(pHiHdCIz%jP#Tug|Un6*3gF3s*v{=BRdSmO`GH+zL# zqP+LzmAP^Sro<^ur-*;hi%%$C+qG=Kjxohi+^puz+&Y%cuH^a__W!Q=&?s0EJpHg7 z&du>Rx{%6P2JlB>8Et_b)td4<2A|}IXId@ zUC6&!6AUNM)V*bx!xua?{xok+#kqmGlT0d&uuVvnFUngV!uMRV%PDw|?*>BxBRq-0 zo6syI+DRZPC%O}LK}BWmOXhWJ$!F8S#>G&;BNMSExzVXJM*_;Wt6kes?9<-aPVUc*|#At4P-c4b$Jlm&?k-^hCg*dU%e{(}VBZu4B^QD>-#HLE|4lvNz+VwR z1K|#B^iqBPWk)FQ>{Ul4cUWV=FOBpg$Lj;ec|V1Me*MoulVpw z7F*hr%EAVQZdGtqlPV#h{`TQe<4@!2@#0c(BSUC$wYa;e{T*dwaLg?T zO!=`9RI$FilLLkgX2UOv!pOSEy|dLEkB4pJqP5QBeN%NXOMio9$eI+0IlZKZOl;Dj zV22L&=7x2d&~F&}`C8Y*>r=_Pbezu&Zd1s7r^YXjk09hMmRUzugtrLq@{m8s+Bx z{#-J`DK0vGYK@Pvtxq1d9xm#PJzfbq`M1I^cD#Eq+oh!g+1y;OyYa`oQgdrQD*mK5 zMs{OqbP;Kuhh%$Y-Oq%F1reTa*WwS?>EqR#+lL!r%e06Y2iv%on2uSowtvj%*645T za-;20U)JZ>qcojssJ*C@Sk12ZqbgGfjQ>j6&B54{W%?3hZo6&uU+V~wI>?t{!ZE{IEdK6-FXL!;qrGc(sU`_V36$rnG?W@cm zA*~Y*VX5!%Ig>-M?%rzLBALY*E6r0pkL)i zw)qDvPPcinCLaq^-f=L8)=-Jr7lgXio+ANnw1@s@fL}v{bzG5GTGN7e!dD^>W`w80 zac;7zSpk3%QVzn@razefax;?HnpX2t3uCAacdH>oP9I-RMO=X63#xqJWsR?wQB|UE zc96DUn5d8FUe`HKfQP>-u<3iQ_f!)E()c1ccoQ*6p}<0w{b#vOtHij8eeCZ{GXjJd zfM`wQ$i?V$Q_WTqZ|xugk{f^vtl$)M<<>~1jpRx}4+j1NGQ|Od^w^a*M)GrGkyqd? zQZdR;f}=F26qB-HAs2)n@{CX9SNb^cW@7rtCbLsxK&Y+;Boi#nT(MVN7Ux_i6evPc zS{Lijv!Y%$)86P*;aImCb1!SW83ww2swc}y1O)m)6dxA(8R&ti?ilg1?x6&(Jhjgu z$`ZVzhlvZTD9b(d^ws+Yp)fxKM-aGI?+F{a`UR53V4UokN=IdVkvBEF0f0V0k~ZfT zX-!t4o<1Vy6ZfuIJ&*D0XskBKmP*Y|KL|D{Msh=2@=Wjx=BLbMDVC}3Lh9xcClWsu zDVbp1W4hSHbEJ$!qi>cJb@LPp4Pn2%#Gr=HTrFRrZ&&vo&9oOmuXo+1N5nF<4==^K z;*qQWFx#$&1t-T01=nZgx%@#1-@+~y4Ue|pr4BX`b4T}%($~_5r9Ijcwu56e{{;fh zdqzi{Ebw<7oO5ojS<`9nc=`Fw&ddybeKU7%@D!(OSHi(z*vqM~o#o@f@_t|Sq0Lz5 z7~hq3#vIkUi$WqD#cLK`1u0IKtBDk)_Z`y9cTA1&&4Jv_wXTJt-#eBVNiDH^yAKi~ z@i`y0CWt0|y}eh|wL31BS)Kf%>c$>2n#=nYHc_8x#B4-M6dOxeBjCzcLUc`<~s{vlOU3EwJ#O5OTOW zPk}T^Zf-maL3e|M60e1LEqeb(HCJ5bW^M5Yyr~j6Gr*Yuz@L>B$|<){eYH^OT8lun zZXX>HiXUzA@T27>Sxdj(Y9D3Df#bE&pOKa-hGw86Lhoo^R_oiV$4 z1CEJ7->X$F_i8kw=LklrSSbZ`hQNvi9jU~LtOF;Yjk;~Mtsab)?FiI@najq7fux17 zV0ZPC&f_v39eqP72P-SgfsHs2tv1Y<|G6sZyR+40BdVUs_wx1qTE@tv!rcs5MrJrO zlXgjS5HPu}4IYr^wgF@b6z?X+!|(6S;Eo zBgioaQ1|3zTzDi$-**$T#Kxy57?cpe(kuQz`0**_7Ec%ZnO-{!Svwo-rIJ4R(v(&e z8$P~zm?z37|BHOb{NU3hcjW|pU3%_$d)sO={-cfUdpGvx!(J^qAeqU!Y{HTJk%iMxgny##&O-Z$Ci6!2jT2Z;4= zEpQc0FFM)UoTu4FbtOa%H3r7{BjS&l`t%*$!}NtWH<@YTS_v_bFQe-%%&+)(eP%EE zWnmG2?W*p{1`~!JFBcsT_3IvOZ-p>zA}2$0&DB{-`b-vH=V(`5H*SxSJ9~5_7QZ{r zze(oGONP zr)Nc)!cR+~vz`Aw?0}URm|mf^&5wQ4CdVSB#MM;2&c~&OSBF*;w;HLwpHh-1p-=(+ zpTTg$(P?I1V!dfa!84>2Q^Eevy_aEUQB9Ddtd?l{5%&g2J?cF}Ksg&WK$3UIUB?VA z(mDKTP8R6x?d0I=80j;e9JR3D{A z%`gndOtMc8Lg3iwWf_OE3Xh!_%pMDaQMn+R*`Ul~`RCzJ{|2t~2?P4toz-z**^EQ} z4_ZYNFucN&*0IVCQRS@Mz{dAOnKS8d;ai!1|wJ zGJ_l&kykvvvb=YtBA=R*Md1DbkHQ?&KS~kph=t}<9Nia@0NMUYf$*g0(D!dOQ9QIpWQBH=s`t8FJ>CA zL2MR3SUT2O0Qp$}iIw7C73E=*n%8)J5cP_0V4RWqecx}06CU9mf%Ff541?1teQ9gj zkHb#pg)b{iKtxwTzAjH1$d~wJ`gNNh-TCE9<}|um75_uC-m8)*^|wa4Yx8Yeb@M% z?H<>D_Q)NQ?y>I4(xPs{@p#bpTv)>4X8G*#=4^mq!qFc8!NHC2n8W?U{h6cwq6D3b z|6FxC;h>?>`G?25>zfS}CUT_D4U4&Ub=`Y1x+Hfx?s$*JEXqO|aF4ns>?~5(Tu-*t zb;lP=wt%W^G`LVYp=gp-rXotPM z9k)v#S4$=pJuCOEl8_;5XRxlW@)=01Ei^ia^{SG8JZuNTo+^H*a|L@=4$0%pXFtq` z1rmw=(jJn0=__^0PXnFLH#vlh*!`!3zO(T(YzgJ!r{|W6N2Z}mnkeBjYP94CqXhsZ zZF0NKXCy42@SSKER&2xi`h8Wfh^Hy+ID8b+dxOOzp4aIqI&j5ZaUVxBFemd|v^;a` z&H$9ZoQ_e)n@#@)!fhCAKSc>N02;~#Vh`hnalv{1)eKEu0q$g1TLO zTQDVz=$p0@sgS5EH@KL>2EMMkwb!fng*VH}#|KiejE1w}fH*h>To}zS?IxImJ2Wud zw+2nh%RH(COSoqvT1|Oa`$`=Al^QD&acp-MDU(^gy9VM^r}VIL#2!`dy|aoNPk57XmOmmMwNSe;sw z4RAw`>AwV@;Q$0@U!KWMy3`%U%td^Jd(cr;j8Op-4hHOd67h*F5a%J0(yY8>eZ1%N z!s|EbKwd|Re4B-=q`Ncgn7pa5cw1F5nnQ&hi1sYz0M3)oJ~y+lgy4RM3)mVcj>|mi z&i++)L4b=B;#7(2Hnel(NqOfHe***#7lvfucwc-#sauK*an%QW>sJIoeJElvy}@_t zW{)gY=Pb0P`_3x@lOW@Al|_h;Ns0

GHy#3lU&pg=J==b5uxkP||a%T^P zxOjOD~XGu5=722d-hoggqA@{Wx-4)lO#&!=Q@+g*mxUy=ba^l zY?68*2yA7Xt145~neaXd!ys%-@`zxl1rE6>N1N!g6%e)xkM4<5qms-BshH<>32Ha8CXN9Dm` z^<=aE;KBaECvo74G9QoIGtJ@dAj?d5wqtW&n5s1!zxL+YJ2#F$`D8N;<;tn%{^o8d z{r{!}||gb!|Reg99Myn7v1 z7NFS>qu%)Pb6pVeU~DF%(X9u2gP~npoZUYd^^Qy!K@ey+m;|VfCzic$MXJ+`_mA?q zxuhA+kKnWs{MNtl`LJ8q2 z4k8MqB!Pr3)FN&LjfOvSD*nnZJr{b)dqbi1qx<^{>+^AQ=Fk4<$J@Kvm%nu7yFa`; z7*;>~i}cbL-uQ3+&(|)UPv&NmP*sybesp-eyc+00$g2AAom)W|AM9kk@u;_N-z^`% z^x_3)A1$3(a!ScY#57u8pZoOo_JvDxJ6pMwl>mOQxgR%!w_m>`lkiXe`28dfbf~?z z#`soCtCCCWB8}EUAl0C6+D%!cR?mcz1bN2@;9P|$OjTJTME0&!fwIojy?2P17p1jM zC?TbA%ys513S!`*s2Yt%R+dmFGCHq}Q$jlLob}FeQ5Z>7dZ(1gvMdNx7zGGimX;bc zJJXrurBB&|MZ~hq!LyK}C^BojF{U-sCSWZk6A)mWM9#U|+7vhAJj<_!w8&J&MB?Db=f2rLX!jM5fF{FSy?*fM$(w? zwwtZi-p)>Ct0a!oEU)rR5PEORY@(GWpd<*AD9VZyh-W$tQ>B$P-Z{`xNvQ;dT9=g_ zrnyq8F2D3(rIPIJ6aq#RNX~hl#{K|!u2X`ijpJNH;sUTn>+3^gdJ~v#=u_kdp62zO z?CA*1wSnp+Fk(vbPQ%bXi+-yI3FFiIL)}Pr;w-2?0D-1Y8c+!7Q8IX%K7UUD-UBcS z670b{mP9oUWxTi6F(4s`8e&JRO;YviLk4!!yd3L{Gu9f5BnX8Lw0Gbg1d2>$g_O=z zDv0M-*FZ?{sDd~O0F<>LP#NznH>`Xq7;(Q zGm`*fNr1I{msy}7K@7~qEKE3)1P#Gig|NrwK@iMH+>aXI4d_Ng7z8NG<1Iuhq0|_v z`31k1?`%x6g4^A>?|u7GUb%E)_V#v`=90rBJlfwkrPVUn-!041Q3&90xHkfON{aZo z%PFc>k>TU*(Uq4j?SAmMsO-^k@!dbW)d`!=Up_sz6z?4z9UZ_2Kf04NlPfP>xO08y z_T&Dg=UNZ{?EUr2>-Rsre!NpjE&7A>^)Ids_Iq1*_ZwObkFtOw>u7O(<#=;v`_bg= zOV0?IPX^=7MsfdHRoHV$AoOgC=tzV!0)jrTV1-5VY3 z!o7Qw*WW(Byf%OT3APGCJ-5_e?1rWBgkm^xozmXEdz@!wGYW0)PoL>-9*#@v zDzHd83HAMry_JPV)JQhAj$O%s^xCtlAKl#riSsk9)R^MnR}m^443Y${dc5Du=MMu^%?+nM*5kYn@0&qe;2>WVewdZ-43S z<;Cvt!QkAbbHDVzxcuEe{`mj-pMS8h03h+~snhrGAK!g=d~d5a9Ata@#jQv0wYtrf zrS>E{?zA~=jN7HS8m^VxPEbE`^mxMC+Xsfs7gno0*}T4qHM%^wXJuwH8}%8 z5C+ma51zbdsbu{F2n^nNLNcZC&P&Nk2=CcCtAupc1A*3oKn5akUI@`_>8dhPP^iPK z$O+I{i|i}ofISd;>m70w2gcaSl*|z!N=2RStaH{n>x~J*pfnYG?=2u=o|OO~m58Fy zl*SmFWw{OmV4P%WoJ5GENN3qZ2eA+$OY>}!$4NxY#`~lZ0yt}Ykx%SVK|)kzJ~KOm zLT>H#!11a1`B;f&(nLaF&p?riK>--fi_DRb3d=lCBsG$R8C9SGYR!wa)<#Llh0#h# zfrJ<;MdZ9Gqh@4^LJ|_V;pk9{Ws?`#ICqu>5lV#~N?V1I5=fP)fCL~hOn1!ba__yX zPmq&ioT2s;;YkuuJtmM(7V`SYO1{Z<#~cB z;WTstxQ@-_no9--kKmp2-Z6ndu-;nZyt9A^-XI|%GBJa2{%f*YYDtWB&BVge+U(+Lpo1`of*|nhNgy(!ASo3P zNv)++LJ$!mlhTrih}a_{v$d`sb6Q9pghEL6;CUL)bTUOfoLG>-vmiv2%pintNB~Gg z9$08_-q)N12~oElIO7o!!J{BVuIoQZAR)1Ho}KedvWNp)F^4VVik_>Un8`N6PSKkj zJ~>*xbjC~$We8QN(nkl7A!Fbk+s%jh;l4FUangOfv6nOw&(ZiWJ=h(Qhej-95Ptdf zXRqJgJ3c(>E=8}sekKI_qi^3GPTYRqi-x&&b?4rl!L2(Vg``nR>w|+Qc{U?PS(&s0 zA(KD)y{ib^o{z_Q)*GjU@XbHDedW2MGiO!;mHgn_A76Q6;pl@MljT~7=igWx>>pov z{rth>!_7wr>tGpaesx8vB#QCcwOgYJeE#j#^UtkRd9}MgYIVX#+mk9YOUq4myt3Rq zyS|dOwF7^AFu8TZy#7k_;pSv-XBc)o34ZqSLehw{EWh;n+@JjZ&2i~kjn1$CgKvXs z1BOzI)wOwBR{j2Hel9-W5kgUz;Jx>5>V~@d!w+UwPpzJxx%}3p58i#evz_(FS(!tT z!(adFU)n!7{^+Cow{C3pj$dmnEMj_otg&VBOf=Gy5-QJ{*0wbP5g@jrZjaUouqi_=kY zSkkZYhy=I^zQfXf9Y#4G&;?H^KX8TSpf1t4ZryMS6}V*2FXmcd=}2EE>0@` z^LIY!&dlH2zL!plD2`C+M4>gfwLO%fjAAt!P6qvAJQ`*;{lRyy&!3sI-go8&NO^j` zIhuH3eLpLmvz;UwmJn*`oRi4Cej)4gqRz>!H_)0B^v>%jVDABl9h9arW$D8}OCN|SrK5|h(~tJo{l4Ptz|gVRzi+O<94SJ zX|=X|>ac&1ra_+OMOnp3cQ`7#omSFTg~$Fl6|KfhD|FU4$69Gb0uSshGMUooNVXb@ zcdn{Rkj7S}kg}OHtHNQ6GUKo^);S_jx?bg-CJd0IK=4i}t%Jl`HyDgOJ79L!X)Ot$ z^5DQ5V~i=Fme$sKN9UP&T0!L)JUDBF76c58wvJIf3G$z+aO!2h9$_c?{#lYn-!! z6e0>CjjcR`Km;Imj%jK<^Xvf08B0PMwo@Sul5>`b9Ds992OvK%r4$kn2+=r?LI8sh66=>Xm3Ts(6fFp`&NUT+5xle0IY)$aLUx}N zd9eo)pCu(R0|E&lJu*$r5w_NfPK8~-D5ia3$e!y0)D!BR1fmp$DFQXoF=&_w<;hHt z#$c4ba+`1+K2m zKYG$Lh5qgj?*7WJytT2hKR1({T0M2^#{J18yYzhb{h##vqx8q$`pNZ=A13n{?v?}m~lF3b?1Bg?6JY z=8MlfdayqlmF%O%rFL4DgI<*vRXKpQ1^jFO^DjJqWhtU; zmT!Iig=XNd-@G%^g}1+a>HFWgx_5kZ=Imnk)XJyt-@p37vLEzPLd7IaVSBw)GJ})| zj3fbky|7r4RPRB|$l%kgB7s^50?AtIt%V4@w@@-M)!+I$2#l$SNFaLWNfTEd0fQ*4 ze^AP*3L+K9Q5b3|rIVs6OZ47?ugZ$O&(bsu0_(i@*8l&h`mbQgvg=F}Tx;#Uk8pXl z7NH{}A|oYIK!YGa5(FrUMY6ioOjTER&Dhvi33m93@GT#xapHk@n(=urut|<7yN|gj(q$O41~) z24tfRi}Xg>!t|6q$8uMl}bP|I!R-vCg+MQ4}l{c1zp&}+9ITtB#z=Dvsspn&-h#hWl4k; zkrxP#Noin(pa4jKJ#z_HC5^_n=C9|U@V9LDh%GUcO?{>eXvppY6;AW6zQ?fMZ`e_ zNoi7%Vg^#$L?(n_3yXqED^kknL=h<@CSdV^5JCvflcIWalE#q;L_{6yG0aXN1g|t{ zK{Oi6Y_OEjW<2>GeG)_{FTTPeN-GhxzKFDjlOs`?YX$&@P}ccsZ2(AERI1srDmirG zl|~j}VI&Y{L?J{$Rscw%kWr9G2$Z0(w>|{N9UYET7;wHT320^F%9KfR+dk+W_L6Fy z3(hl}^ahA=2yALqBK7dr=4b>5hZ&iy)deIo(Tt_)3O5R3FP@&OrS%6JTVkr2>o*(m z`s(c4Z{ELqf0PwR2S-Cwt*l>I`QY81!$TQ%AvR`br;m}wxLBI5A9nh+x;Z-PMirH$ z>fyc3Zg24PbE}ouKlS3`^vuGox2{diKy5NgQaiUYdHTY#%ktyBR{Ov=CW_^Ai*ATp zk9M!VeAN)>O#SS=+sDVF^OqO;$M%2z*FSpV((2?)_0F9~SFWyn@ZsTTWJh_Zr|Rfn z^x*F1`DbQo&E(7zQ=418FMVZcc5cG=+}&HdJ3HN5*WEXM?_BG_=!vUmE49wU2iYL! z&FvlucRGVKmMqWCo}Hatt+$T%iQ>HQr_ZhC!(8&B$cAT5tu>R%pxaKGden7k3@^TL zs(ZM%ufm0MlY1MFn+Z-YRi1h7{IC6r{Ka3qgC;sUZbw@Cf{Pq%!NComyLxeJb8k56 zCkCe{;o_yWY6Ks=cjM zPMxVX>eVETCgvKapO{}&cA|HHrfz1Hz?(1Y*%r*GGF zk~Wi3VzUwVx_NgvN+UI0Pq+7v^Gs%%dT2uco|&%d(za)!h*^LcM6^;RFNK3JFb9u< zz`!Kn10t}3ey^vD)?~T@|NCD|s`;dnaj5gIIF|m#l zofm}y86v7ytA+Q>kRl6wb#{?Adze;czfjjew+9O>^-32Yb!g zw9`4RR;q%m6*zAdk_9Hxqv6Q=;Ju5Y+GsSe)-j7t(l|n7-R}+nl%ytb=nqF}b0VEB{D=nP*&xX#loRfy2!+;{FVqaX=Guo0g#mdphn_yrh>v!@~nXc1B3v;7@RPB zW?&2~9CFq&Djv?xM+Tx`y9BwZH^j%?o$Zi~jG<(#H%uem?Z}03Lw2n9w?zv|xw?Epp4tDlN z1E+%cJ9iG+9p^HYRO5QRw%;#?nT5j$H0BzBx!^{rJwt=_i&L-U?hDJg%V+Q2 z>Rh{ZKOy|$ug~>+-QW$A+uAywn5en1%W3bVF_4-}OuNx8J^3czx^Ekv2)x zz>lwQl9%83jWcJ@He8VD$wuLEW=eIAvdfnnm1OqZg~dvI>;Ao+sk!>f>2w5=RO=sI z+dqGP_uydo@Xj_X^ZFaNX6L4E-06^I5c15eek+R1?D8bAd;f!v(<;66!quPrVs1bG!I@8S>`4J*7&!zkA5s7G03WZ=i zy_FC|E2WT_1BXx+BryvLvUQ{tA!s0tkw)Uu<$w?y9hulVTRI6DfI|S0G>(DA+9I$| z(^wl#nu@~eSQ(>qq^)(R$dD0)My)m+=8Z-pA6XMoWReiVXqY9HN))LyNx`vLK_Ns` zpiE>$Ac>M9x1dQWWZ`bNSFKlA8~_jc1yng1j*QaXy5FnIgkjx&4gxQ54f^igU6PK3-MF5D##Hf%tl((Oac(xttgIe6!+DyQWjWzR4PX{CgKnF9oZvXlm) z9P=tvYP`J?VtJPp;*#$s0f;1oK;^t12sMyG2|*x02n^0U=R+P`A;J0Jf^*D)*$G`*`wOXB@T%PlbbInud zX6}9bV191l+V$4shkZqGu#=Cxx_oy2=A%}t)5{Ax(NG#VO5wSu7S3LnefVHE-h=Mo zXl;F-6+XOwe|oCYIyf5kY@|h)=N4x_^Z8mfbXl+0?KBnA#phQ(czw&5YT8gsrxscp zy^r2~aO>vLs0$0L((V?w@A=s}{OE75edgse_iyi`iBByz9z5tx&Llgw`|_uk*3Qn^ z(MTnF(CR+%+}T?nJ)E3KCnlON{>o_+Yr*i~=3bVAFg||ND+*4dWM}h0MYz0DTV1Ly z%*Wf0dPx#pzP!4z_4wU)ZcauxEc$CJ6Ysx$y-}O}@cMpIg`?f>xzEo&`Q+TA-9A># z+}cboMHn8>uUDTsb!LAj|JI+p+v??a?ra>kZwHua@ZX-C-mKiTfzBL=jST5 zWOZ@+&9`raJkJaB=+SU)KJH|r(Y?X2FB20m%m(Wh&ri=xZr!@`>QCN(`kD34aj~>K z|K=;#rtUp{^l)=;XIP(XI1Q^S(}fhPr>dLxcVGI_)uWx=z2k1$#D#_G?Yo^K55r;K zdahKlGErZgi1!aWn)vI#_UYbeRH>Sw4O^RsufO@;Pk;Qesidu=QM(AeLbO(kv&}Rz zt={o-&z{-YYIVE02cBH44AI#`s{w$SH7EcG!jaOkg`!xCWcI`o%3}~fjx-#-c>3QLt+bSB2(mfV6uUc zWH1`VNmQw%S(XP5jfpw{GD~n$No!eN6uGr7qY6e*{E5tw=S7r8z(GKck9(2Pje4z7 zPqVzpUBS#zVm!EP#A%waigZOZ`0aKl(llSIq)8?3_iMF8g2V|5Kw?tQ6vc61t|k#B zi4)NXB$6aCC9(4^bF2)_Og2LZ{cI>qqOh<|8M4-8qtWc#q_v4_A3~)n0wz)wOah2! z@eF{J7Y<1y5|^u}V0p$B2}&EV*_iNvI5voqhH-A| z!V~i6xZ~yTj013-0^m=SBl+a8KmMb9;tz!}VKgv1SBy(>1%br_qi{*n|ogbm$A3(3PB%i=DaNm8*J{qbs`Lkfy#CO6;+LBP_Yndt&pO$(eyANJW8v| zR4W-^T%GcRPEv_9;>cSma$Ttud9nTQaOGTM;lhfJ5DkM;{o})$nKp^Law$IZ%%!8f z?DZf0V*UK--~H!b-h8s|y~xlr&z`C_qlfplZ+v*WQjhyRKh;QjBY~jWd2!{^ z;z4VW4f0g$xdvRlGUcS0Xhh9Ps#Lf!Nh0v}>z(!W`25AWrNzrd#=`;M`shG=Sv)Fl_j_h9Q@_U@1FuUuHV`poI?ef#142WVXzd>jJDpP!nRnm1m2;SzpSy8;zYwWJ z2O}1++wJMOnZEO#z0R$>M*;N{Ppt19Jlxpo>ooFCYSn71-D))Jw;vp}wvU?Awc#j# z^st2@!@koJSXg-{fPny%0t9rW2ZKR?y$AxZh&H5@QAEt**;i|ccmeWA3PAe(K@umy zdj!y^M3*NyM8qHgkrA-h>qSPF-f-`12tr7aR8bUZQY1lquq>=l zTbBpNMeega=McOOQ3MjW$XwORABB8MkZ3m=%TRNC;<+I^*~uZQW_%z zF8IBpmX=U63tA+un2>ws5NT9On<#cx5K$Rp8Vw&fFC2!TG#OIHkX9J1HAayF>t&eb zX;ht>n9QxW926pv3W1F=_2%SQ5D0;Y0%Bnuq8??VyvPZa2nJ@YwUnmW5CDl3SVuG>6J_Ze!HuLca0LS!x_&Ypk`8%UtK8XjMz(dPzVx0ID78E#{{^H+R>;*pA zcE&m56Ei6wp)w}6>_u2Bje7hDqjm5;1YbI2kj9h|1QG!7!2^+z079q(2QI88Wub_Z z#26EhQISUDe2LLStqc)GD&|na_e(j5kO+uE;PPCrKmh@ZBx!=BRo4c907zIy4VcRh zmy%dcEW#X|DrXh!MVL7_=e={G$e2C*!aM7oWe5@+ig@pdfPLYM%!OPexUz3R#0APN zgn(Mr$RsMNkk-z!Hfa=9j52AZ$>3C!AQ>=>z7uJOr2VK@ovKMOF+4mX5SxvHcM}Vz zz%;}0R<)J_Q`Q=I$4R5=F(i%J^y17!BOGjvY?jT=)X~HAbP}i7-9H8a_FzG6?R8g^ zMy(OAF3p}k)p+H*A3XoL3!kah|LU7>jtaVaXE%ay^+IKCp}Kx%`ux@T=45>LqX&1c zKX~fd#pN^S@?IyYm?xgRB$993-DUd$z9CqAuFE;v}EUi{;&U>5t z7cVdEZgwUn;z~8TaCvECw|DXC*(*=a>}-!VHbzff)_?z>ym0Q^bTMpiZr=XlSJrOa zI>>vS`v-eZJ~Mmq((>e#IyyX_nywl%r8#WC*TkM z;S)dm+dB`px~sF%#3Y_s!kZtp8Z~(GnVE%^%J;wf;hme?4ECkZuAW+7-h8~9R-?~+ zX63n;X>q;zqrbTI<=^nkqN20wjl;uE5=Vfe^QRVw z)PASe>GwNFM-!9vK|i~G`@!Vo8^tO&W*=B3lh#NlG<;CR$?XbI@<=(`wyFXf6 zox}>h_?fHU-uP*Z^#1#IW*fDo#i_@S4~DHHjimnbS3Y=fd+XfU z#Rs=~_1ku2i3W@=WXa`2CL`^fOLUTdU~30}CTi5#7JF zRf*zSO)s5UC3V>9jB0W1*S_|ffAJ^Z&Vu@{|K6|v>0f@=IiQFKWHkaPO(8HT0l-o! zE$dJN3V2aOrT!0qN-1G>>`N6}7?ZeSt&L425C)@x(K;VlQivWT0BCg9lR{x|-f2(( zK8m6&D_F#P_RgCq)>^!?fRMy#Y+?u;8KV^yg^yJ#0NHR5#}(_dPPZcfk%>Z(JS$9W zY+h)kSU?;OvJ4QCC<+`%p$G+G=RBg=VP37(SopAYScwePlc{F&*md^XoylrcnVU^a z;zl`8s6+_>#<9^Ty}Pd6Yq;kR0#q^0D&!7>zwz_)CeU~#-nvdotAap zg^)y1Y>ar%1WJ?5Ef_;c5JGSkVN6mJpc9}o3X1a;3CN-eIRFL+0vs$03j~DVy<@iC<-*?Pg)a*4 zeehOVblrWVyv83pJCv{E`! zw(v=%W}Wlanp)Kbn_FK^V-~C9G#lp5n*8Wdr4doJS_#vJ`$ic*N|Yf8d8MANuFoIr z4YnV&j$6a^v(wY-v$|Q|zPmZKFM8Qvuhyh$H6mmIWQ5K8 z+l!}{ul-`1RCMb6e6UB=DkOCX;-{z0?K{0F-u>w9gEMDO&91Dio|_YK z)^-ox+)10=rM1aB_l{HyN=J8Y?$6Cmbz0d}Et+4CmrkAOc1DkG?w!9ltI3c0!+xhf zH9z^_#O}4sOU_gow+Fk2}5Q-7N z$akWi6L`3}H|Q1QAx`6dQGD~8zxadSdUkdpa)aIT7nZFvMFx)_AO7LL_|^aPN8c9b zUi$3P^xV|)*_GMZ=E3IHu&N*39!k)nlUg<>(hbZjrZ>NyCd|8(Xg~Ov2&Oej+ppg{rCU!Z~p3afp%of(&@!dUApkb z@?tfuzVXqWC!f9e;oG-D#w4Uk6NrpcIWqRA8Ks$pxV#ZCu~G(^h0%W^9QzOyf(XZv z%0?M81Bfjg2jKv56d?lpptUOdyAhB*fF_qYrKre!U`7H2$#Yw{qF$@UQOqGY>vCIY zqm|Mu!4|5xI(-D?yzOSrYpv2m*#d%#Vl6?CgxN(}4+bNp2!c1p#3n|7Y&7Zz z51>l13s{1;&3cV+T=$wLrkbWnDiDMWRnnL(i7>bjDH`S$QIMcosTmb}$3(g;FHwpJ zkV6RGciSz^E-K}m(*8*r%aCfBeEqwDtpCjMgX6Vc0Dl?$f|C`zj0DIDe6E!G$NK>S z35+>FpTq%970M1UZo zl#YyH2)@WA02QF32$d;cY)WfgdiV)SBr}!Y0B=P|p(aEW;J^YdcrW4sd>FJ3|BPoJ}AaS)am#BLmZMTkc>$dwNf9B$H zVEv)gQLUIbW_hxP5L5eL=6g^L`F4dqL*G?f94C9*G^A<{Q6Hf zx3(o++uhtc|I}i?SKNI6g9i_LI)*2od}4WN`ul(KYWs+XgM4%2<~M%p>Cb(Boh9!M zi~DzWZ{Ksn9^ZZM?%qykVPQ%|lU1(lZDdM?)wIa`16(^r1*x7XkK$!=&% zbBg-;;jjMs)w$_rJ&j-f@>9oqouj@Chl9&cEp2We%&)>rzxwR*OHXFE_8#56bNAul z#ztrD?EJ!HeD~3g3!G@Pyfj^(spO<)AllsQO-z_h$4yRB#zG9U3zPZC9Ul*(I4OL= z;-5G@#UApam}oY?`K{|`&du-a9i6*y`U_vZglh2h-+zWstcBijUaN(;uJ(8L??34L z%CBG8*lgdwe{kG6{{4Ua`9}{Q{O|`KPfkrOEzE9gAH4hF!|tIhuc&YR zC~TpW8RZ#qKxH^9kPui%0|1VO1relalI7MFd74&Yt(~=6B}7nIyR@<*9Ei~6d7M@u zN*sU_s5hE^G^kc8`6w5`*pwZHi6A8r1OQTGeaLN=J6Ee#l_D1O&LwdKNLg0&Mp?by z^a7EN960YB8bgSn2v{Ie#-ImLp1cKA5`=V;YGXVHF3C6uM1%lIT0uKt2PYw}aeFOG zc>};t*5UC4S5Der%C(#D6PG@mxLn|u`QCD$7+(d(yw5UBfF#1>8AFMf_@xR|zDV$w z#vml6w9yf}+!saQ5PWH*!Lfn0EI6QXwu?EGw~UgoF9<+LsLQJiGlXEBbwsFKVUj9o z5h1Ma_4LamO}v?2*n2<9E~3K%prcqx%p%4_CQ0MCBBV(fQW}Izk}{wVsFWe4%cg5>j1S&9XX3;aRuDrdL_`}+ zWTI&N^(&?0IEi)K&-!!5R2%i-aAcy`+l-wDP}b7Lr@kclfhOxiM{M7YhROT{_}m`% z*cS@@=`$0BQ=@^KKebSqZnhsjN>VCDcHvAF0^a$+R+HB3!tD8H)*PnyZ|~%J@tK$E zherePwl-N6#9MbBRw`;^qcy)c?*yi1DoeQUe5&js!b9r$6`1yKjAeq5+e0OAj6%ef;|M=U-S-O*ywrD-rF1G)Os}Z-+t$% zr_MB&($-FU!Yvv1| zec}9tXTI~@zwNdAk2iX8RV^OT9m zPjy{Cw+JH6Yx`z+zOlYM;iPPYfuI;<>B9Bg-{r)q#^ z=P3J?PoJ5Ynf~7QKWbLtbDuf8_4uIO5BIkoO;6VX!cpJWOl^9$`Qi1CJXYR&d*e5M z_aD9c{zo^iKkSeENB3@b2l=Hd=MVNCzq_|<$XLfN&omO2^+|z*2(&0~eFy=V0ZD6O z5mLkO0C+{Hm5#NtP9#VfT=m`q0V0-U7;!eR7Zm4wwUVZl zq(2y?X-r5Qc$j6Q(a<}ON`=6rFp8oO?D28I%t@RWqXdGEh{u(a0unG%D9jv~DM^j; znh1qGD$>e8;De(Ui%d;6F^-D`4UFtr0utz0 z`2Y$v2UkXxqy+VHKoKED#*k8lvq$Y#9LM4vkP_j+Xc$LPHBClEh5(f~A|~r?oJKsz zgJ56=f;iSb|Ok2Doaj8`xC5Bsm^?| zRh;CK7zCx!JB{PHewasy#uMDJXdXW|1YMF13$Qc4jP zp>W=d1QVO_d|JxLbpR0%4h*1(hy+;>ya)m-r9pgfIWQTcl~KY0f&c_mN)vdO7g-j( z^{!wE%+BXo2rQnNrA(j+m(yASU{nf#ggJO*P)g}2icvw}07yiXm;^{6Xwn8WMR5eE zP*Dg%+K`HZfQhs(Un@iySb#t&X3%j2ib_u^D9~{%BH1Vp!Nt`S6oC*b6eP!-07tRO z^CAR?m=J+N4$gX4_&ju5eOpXcOdX`3bq}!^WZjlhg9zxjoiRh6;nJyfmZ<1i#lql{ zW(Cn#CZgulypuh!3An!kDT&MQA|U%WCc zHoW=vwzp8N8PD!;=g^4-ow8Otk9Mmg_uBXGe(|?nY);6jbF0Rvt?Qe~?o_>6Sy-HU zaD9`6;z_f$^WgH6r;C8|>uZ{lRzLsVcV64vEZD>3+QUXo_ebD|YHRy=dmj#3kAP)< zc4n$s@pY(H&42j!uQn2Wck^ys3*Y$0)sSr+w~sGeTGfhBW7aFAfXTV3H{N_Ow=$VD zlZ}n`FMfVJKLe{Psn(V0)%nLC+^^f(@lktk>)^sO=a$Y+Pc2ODK5SjTy|;F%IkPZb zTUhBl+VlO~RORf2ncMgJhljnYP9N@dpPWi|4>oK+-+wgXM8Eyc_2<8E>GXwV?F-6# zhdVo`ECR55Kk1`?X&7`CmJ?wz4=gx$>8P_U2c=T5R0jYJNVKlR?Y1hVIY5{Z6f}UVeG$vtPJq zTYEP?>W>D{HA64K=e9eNsf$yQPNQn0UY&h-&~i8LPRz}&pIaNYTU#6V&z+j-b`J8v z5EVJk^NSO00?jPQLYg>OtrQV>=Z#k03x~i8@nrER^D{`=1z&cU0|RH>6+HN`-ipEfT(Jc3J9VXmV#}`EDN5AG?OYUGcsZz1oVzQPc@ry zoCr`R5I7Xp`Vd?Q-i2BvB}F6ayb}Pho>gq1a9KVa4tmoZ$V8D=ipcxI=B_+bJLiK0 z4+s#rTzAG7t`nDMnf{d%RR+g!&hj6BIS_y`!so=~dGa8E@+UGT|I7F;04zXo68t@h z0^pe0#v*i*AQ-Dfh=GG56eyc!NQjUWNFdP6pa_Kn2q;pdNFfCl?@x-qgcukFNo%bn zK(OKnRS1?GSr;TPdy*wM>s{{hq4$oR7bs&^5?BZ{qE=cH8BiL68|66&la7QUqfA`a zfLf74RKgM^5hA7(tOh03rcIP1Y4L86YKrS&)%{!84X^Hw4NBhd@e^(i(+C z#(d`_OP^QWShKYvjf<8%10{+mmkR?eaFcyDulq2BN0fBen2e)TJt*NWkp z3k#-F{mGyI>HJy$rQi7C55D!r#^d6@{8yj%SRooU>Zjer!SV6%)ajYUON+D9ghB7^ z>}Pq=>yPf;+}mk&cODlP&P?dE(%K#V{Kn?h%L|8{Y z{@ZJRxxO}U9Zb*Ee)z)=_xD?)0@5^DKQ+TY{<*k)gWR)5sl@5)4cJ{&A=fR4X+VRDbAK zm!`h*n_pgy@1Wj5I2KIy~5(vsAzr@c7B7j0fQiww0omW~*5LJ5agg693LLp&c8v6|e z5C};V3p?i{LrPKb-e?m>KN)oKERw_widV@h~Qbb9bQ`I!> z@-Va3x`MsvNHK%fm(E-$#ROCeK`R3$ibh2y41@|n zm3Did47h_#`S^KA_Z(prq5_2_6(rg2X3jVgXPz){ulh zVHb>5!tq2w2$9Et10euow15Z;`=Ase7-8?6&sGXRptS0@59@Oax)!BA@Rr@#?=|MrxS{fXp`v;mnUCLo9J~t1+gqdE zqdNl&81svtTYvoM#{ORC&;IzQFTQYb_39I$aP6Z)S4~ZWwP9tt*&hu*{P6Mi_TJ&q z?A7O=sx?K%slr5RkCI64^o^yjyayWQXZ$1l$;&urcJVDa?y z?f0%-dhTLctKPmgQlcg%%-Wgx-~Rf`-~Fo__rmUjN27yw{?-TGN(_SmEHA|UfnQpl zxZUjV&B z@Amq`pZ)M{)b+WQ>hmw0|G^JG_{opnb*up+qrJ7ZUXM_uT8VA$Mw!d3jTIWB7*ocuclxVO z)?fLnw{x$DISvNlXRm+sJOAJ}QPktD-FtU?XU|Uk@BX(>H>>K6H*Tyg%}mrQ7cR}b z{r2AEbo}@J;JLex2Y>yi?|1uQW`6!Z{>N`Qm;By8`h$R|>&o?Zcl&Wln1`9kx<&*YAp%GMW{$Kff;CFBh%gEZpa2j6E7GNN#5<1& znn;jYy zTmTdRPHQ!0c7=0M3@m7r>t$)Bnp|V&qDTXvFbc2>EM%gj3Sa;=2ooTa zfanlEXd} z=4{9M=)t49kqr} zfARU7uioBy+`4-|d+u|SLCjzO_3d+KruMhmZ`U8Jt~Xd0$GzTcRDX1*Rhy|Tt~BS? zItP1)Q?pa`$;RS(YJ2K$U)#R(AlDIwNA6$$kAHmbY-6gSVJm;?GiQ^!y7A7IHpW@= zi=W?Nn>1$EhlLNRY9I5?e*3@wzyGuEfA6iWz0olDQ`6Hc3o`>-^ag`bQH(}~U^K{_ zwGtTWvsFUaKRj;MqN!$ebHCl~h2Q?|3lDBRUTRLBUR}C-JpAHk&KMJ|E;eqze|Y-R z6CbwPMM&qG)2nM2KX~=V%{vFrzI5rMJ3EDQ`;Xn;#`fmj$JgH4Rvw_B|J(odXTyHh z?#gfd;WIaGZTyG-CfC`LO44}0}LQM!xn3;@X7Ntz7 z88drr6bN}ABBPjrIS>H=nn;5PGb^o}^{AE7ic2k)K^ubtgk%f~0}&O4l^}wS6u2N- zD-#(&axRDx=UfzNZ-Yt=DeQFn%u-~dtgtK`Skff2MTm@H79>DM(t;YTbu{)AVqr6_ z428fUL{Z||H7A-ymKoB8=TTu36Isns6y;fQG|Fe4Ph*~}ri0v)GR_u(*+~}ZBudhN zsy}p@b7Z}BQWUN%TsO){DUMke7%G(%5e;hAC=3d8wOUPM?JNW5Zd7>h3FCnq%_U}b zX#o{-2zhR`D(_9mfe}f7fQ*P8b14PNV1=Au3{GeUCzMVYyE5Sf{4iGT8GwkHK?G2h zI?*^J%nakGU-^uD@=PGVG=B<|Ma(DpvJ(g?B7n2b*1<)vngaB;f7!9#=Wkwl50U>CE#t=p! z2nPrpl+q>aL2G5=7;G7=FaT^_s7<7#i8Jd8R{;nCyk{06Bqrm9o$~|+hu~^uaSTl$69EfyH@mP))8Vn+dc1w_{aaU`szDEvN|LvZ(&eSBJ3z_fN)1&7MmADp3zv_yrqQS~9Oj2n zEm^v-@Z>XV-~Q9v*1>cIYD@b07tj6Z2d~Uc)fd;m4Mrcley0fCg{e71VWJUc7n_f^ z@>}c--o}}Wr=PfT`K?#(9QOgR`O5e26dvDu?_&f70B^kh z@#nw(v=(2q!>J2r4;~$U^w!opZ*GNZ^5HdTAG9AnzJVg^vo(#ZP^ziKF@N^cPc1Ap z9zWVTWGZrd5JrZ`n`4Qk5=EW%VCw051>?!{^Se9kBA^6%`}?=nSLcdOQLR>w2maX? z7Z#=`?mcW(Yti<8zdl*Nd1qH};)%dj=jS-){-(+N*BtHYr~q3*~gQDn4KD+!<&MMu4% z6Tj6u4h4@g`Sb6*b!usPbbssBUwmAxMAbCv^@q=1ng7aHou0)0op*n6w|g{PIX%l_Mm>A9-|zNZ z8b{rJ*xT*q7Nax~Qp1evGgIBs;PH)XOP~8foLYiG*6*yWu8sPgerxFSFq77;%~E9^ z+}c>2sgRCz2;L zwtPIu2Za+-KCpKZxFo{pG>s8ee>Cik`u%J$a-+gp=h>K;P{&cFtZ*S@B0!O%DB?6S z+OTk72Pj4>L~zzYkR(aiJEIJtisBea5g>bJF8~-A1QftH{H1h+Kx6%<1ROv~J!2Rn zIm)a6OBpSI6AKRkfZz$vtOU0KAv1Fb-Z}5RbHO?1eDJ}0A6)Rxd+&o|?*j*6VF}}& zvXb8&taE{xwANauNvgFbBBM_vc5hPJW$}{^Q;}&MXQuJW~V8OD*LK`Bb zqX>keC}9Q=Vkr?62uPp|DorXTqB2s<07^$8lmR&`3v5_KYlWJEa|qz9*IJdiIz<4& zMky3QVPQuYBsfAKfXHZoVBKiYYj+M0vTVR1gd$X;8lYNUIluR?vvp@Xs?;!^HH{@Z za7T9^O`d(RzH~{YlN2S^a&SB~-OO^?-*uGK`t4A!)$>Dcweq3>0+1LkJu_XY+TNa9 zU7!8*7oWaxW$E;}%GssG!=3ir>g2Ee{-;izoA}1BKk=2%EfP!aqm$( zip?uOxWR;1Uu;azOziI$^NS1CMbA8aPRCkDH5U8IU;M}ohb*%9aR27D&6$OGqMm%} zX;@pY1Ilq{`1qjnjo*8IW?Dw0?)pk?{p^JIMOvYiGtHUBI8C(^YPYk0{cnFVHO;01 ztLNv5!z(|zKj?5{ZgKJaT3*QRcJb4{esAZXU8%=QbB%0Z&!1lW?caU*`IpWIaP1vw z?+s=bSFgPIEXV2e{8V+aar1f`Ri!d9YpK@S8+8x!x}oV>{KVzy)%9uwxc#8Fz1bP& z0|VHoPIex)=9bnv$M|?V^m>4t%*?LcyLbHk?_C=f(3q-y@ZRQ~o1HY)^@+-@`&%~m zkutqr9vqM+U?>XjJ@kip`*_gLasfOz8g#qXIv>Yyu%A7A)Y{n|ZSJ}4cGx&5_K)1% zM}xoq_O<`ve|%-DM@y%c2YF@T?1jbCr=5?|%53oJ^Pj)+>CdjWjz;@?hcA8c!sJra zx5q1MlT-ETfA`=0?>D!4o@H@qLd8iyjoM7l*T3?mr_P;U?{wRzFRVTB)Y+Y#J2!5= z@z>w{j}LEsq*cK%EZksfvN}CcNt5u;{;QYIUzoc5#6mreHGn80U|<(iL?!3N2jLL3 z*32v{OrZo66C-mVMP)2WNZ2!y7^BMbW)ww1f(GTRLBKN_wQQBG;MJ9^O7(}Fq*$WGlY~k{43JL^; zL!2b8a7q~;959<$6Ou7h4l^6oDhRmTdSK6CByPIe*g9s^{gsM+PkpeSn4%Uk?I*OyxQA?yrmqu0gBH&37to2HpGWmrJ z1Q3LkB7vZl1`uG+!ivBcE#eh2fmdQ)oqgSb43brCGPTznCNS0)k`?{f~I0>qjV z#e-o#iexnGy9&7s6Tk!oL;u1PrzWRW=Rl2y)p)k9&8oIXkUS~bTdj?|jYi!Sq0=9Z zT=d~b`E1jF@z>97KiF?9Er0y#b?b|-{lT+8{MI$qq{XvS@4fNi`6nkD zUp;?#*tz||_8HXcmsX6Sxs?Xq+Mb<)bE}QL-QJIW^wG{n_S|QuKYIV+U;gp_6Hg~s zu3pT7uS_;RxVwM;^4bqxxmT^yqm6^rNAbadT&*`qRj-4nKQhed*4ft?j*DugmxD9v&SY9~}9sPgcJ5AHRL>nKP9Jy>soc z5AfaBK1u)^GxdDrxlmgV+Z+OdzxD7QV3Kt^AN=CA>B&iwBKSeQ&MbazzOiwC>$ugb z#n5cT3zJEHxG7d;d2j+cxS$j%M3Hf)hjU(QqqQ!z+EN2HMCCf8QF{(X#{`H3j8x)r zB?P6F4=%8fR>r7OZMM!UMP!t4aNa8-@4OiBA(ZG{LK@{kRj_Q^C zNdh2|vyL$&v2o4=k~RtiSVC@dt+fHgg5m^7p-7%NtqBN_C{_*gjG#Z`!{)8Dcc3(+}sumRH5JEs0XPX(&I|ooM|5}qFPyk}tmxE`)GH(S4 zTo&*G3yb&=h%ky{q_T6y35ZsT6c*MIqB0RF0tP~0W*x^wIt1~yU>-q9s4)mUPznJ#6ADMC&d*QP<4?a_ucY+k z)%AW4zxkIBw%ZS^3$qjS8^8A4{Ra<%3`I~p@-gk)KS)#a%*(aS`<=Icc=P_%f0jF#$NlO1NGUjR2EO4zI}IF zo5pB_fqgx(y+MBA{6a6!v&;h0qs{$-!{*M>!9kB1W@qci1MfV8fb{_th7goiGgCDY z$cDv1ySK73n+PeqO7f!D{-gw;Y`$kM@wQ_E4wOtwP9E=Y4jyVKA&gwoirqiXRhP8#y zjwh!Z82Y`|F?0Cp*Pm=XxU)K^zVNBjk9UtZAKY7>UVrwvE8Cso7k}{+O&TEpC@Aw^ zWQ@{Ch(53)Fhl|p0(%aG=tD4C$B_vFNo1^dhy+5xGDd_Pm?3!YoFf1=4uayzyBlK@ zg%B!f8khrnr4+LuQZ0!kkabQe1;R=Ji3BfYqR8d0EZ9J;kSH)~MS!3Q03a}=QS1Y= zXGAJXRznc>UI2qHx$&%&D)kdWbcM5pMG2k_qBe1&GmpCW=5I0w!^iR8k3$*&=XAk~oB*%j2t*l_OBA)tG}1TuqWJA7!H~&+?#j zwN@KtMHIyxLZ0Qq;GD~CK_H$P5SbuIKoHNuf=Uz%mh>K_)EEdsnofB3f5-57GVeWM zJP4pL!%2#u{MSTl=UfmdH~z5)ls_>50U`!w_@w%4y!1>iln^v!r%j2=bUvSqKqaa5%CZt2y+01gk^>t0a1W`QLtl>Ak3&x0)P*Q zB;`n>G!8kRq!DS2j0!*$X=Ny4YGbfgD`SB@7XXMvkx4j!F$R?|2vO;!Wl|Ud*J>5^ zo`}oVG$bJiA|j5xFUn}K^$ryW?+QCmh8cZcjN&9NhE^<~F0Wp>A2|%Uco(z^gbuWL z=vofeIfzI7BV%lBVyY5Xhy6VTT&sr5PhG54`Zjkn%d1H}2F!ZRbOw+p6j+h z3PoQL1Q+^!SN@!?zrTknPp6CZOUt#X#{Iv&d2ym~_Tp-L>+sYQxOiqI>-0@hYaI<# zYF_@0i}$W?uAN(&o|`*;srBTiF4vR#-+b@d!;RgAnVF&Wm#&`a_6O_dmv=Y&AG~*0 zLKGZyt(#g*Tb&{`rYPv~R_Eg7sZ&?ayJ4^2@6N5(2FKaPtxezd^B1FE`^sWk(JL$I z#=S@Ns)yj63$=;n!A@s-W&RKT#p?I|>Vv=itGl&2e)ef9Klnb}OWd$-D2|O5;4% zi@w*3W7vJTvAf+~Iz8K%sBs3X>vMN*w{!N>%jx>1#;|2y|Nj0r{_wQHiRuK*PEA39 zGYgHIAANk-50yB2aA3dt-J7OT&xZ2g!S3{|URhZ>JZO<2QbGg+-kU>g^P@Kl2B4-irJwnXm_hRkxtA{ z4DIN_!!0|4S6+L3>+bQ@mo9$g_dlbF&={p7;=m@-O6gIayAXf{l@byOV?Tp{AdQVW z9D)-r4_c^51Exuo7exp`DU6iK3rh?xu+gPbAc_>TXf(hg0rT7|Eu@G9Ntxh-h>$`e zg(y*~oO4MMyTTizMcBEZ6nPs^0TN0WhggvS0~oDJr%YrN2jC!xG}bNz5Wp5z5Q#u3 zA^;zPa~??Eyxz0Kqe(2#Sh~2(tG?MuCVih!ohlP^3u( zwQ;s50h}wi(lA;D2#5i|g9IT!0v5}mfRF#Kc~M2$2UaR46z&V1pt=5eq5Q-WH;{k{FkbTv~P6D2-`uE-h2uPj_2>|8aZJb={#6Pn~@9i#JD)H|}jd9KP^{mu|d$ z|L)CKxAw#8+Qj4g?Ovb%Hr z^YqolxwCV{XmtJEEd?-tdj8X2e>PDcnl#*cb$51Mow+b`>f-X|!@bi>%h|w<2DUjf z2T65rt93lIwQ6g>iLDmsg1kak8W*#{da4h``VS(qvQY2|M1#VPb?KIla-0y{wQvw`7q2aH*VkR zc_?OQC+C-yaM(L+pI%+L{owJPdu`9Ovf8Lmrh~3K?hW@l-R5M&`OxkStCecKQaS8& zP#{W7vzqQ5bqOP_HG=edd72spdN?YS39dJ2)N9R&7^SBT7;PTyb&{Ai54&fUC$mvS zcy!#cJ>xs0y#Fw(*3?c2LX-dI-~Q-eyYY$a@3$s(pE0c4R zjkuy7Z0?FbjCB0^FK%4F@qi3flBj!JboTc~BbTIM)OUhnD*D{1*`eo;-+QpVbEI`d z-j}E@49t0+6RF@t0XWA*=ve{>5YdW|5H$v2Aap?xF!zp#j8?;8#vs5Ptat@Pni+}8 zPNl%i7>(}(A&6%-QG|em$eulVQ~+M$NFj=4KN=NUqYojDm31Bfb)-d@03)Lnfel<_ zR%v1pBw`LS&c$k_6*Dt~4;~rgBvOu@3rN5M9J~(!m@8=#thn6O>Qw=+BXYqaO1m?V z!d6r?GgECQRjF1ql4(S~JbQ{bW&lVN69U+x(5PZ#1jYM6SR&Q9H|QmmDgcpEg)1}} z02LgY$Y^aGGofguqLLNBp{yMzETQuV0uq7*)>;`82U~7$s+^jhcsvP8dNh3E_8FIg zl`-Kkrpt}p{RkxGHXy<*!G{ygXS{8UT|ncfKmdkuv-~&)IQAHo5wh}>PE_u@K%_{C zVA8e$0}>#UGQdOdeK0`@07gb9EJ9jyKvEi%cFuud@a!xCp(sUh z!c2|kWMMOBb6@0*iP}`%pLudk(8hMf=u|AXA79IyWAXhy9Ji0}+<$w1x|Sago70su zXXjQ{Yd1c=J~8V)_nYeg;tI~Z*JNmR%WL=qv6qR zHxE#&nyuZz{8Sl!7t55*-(D+y*J)|ChCjz6@B}`VX*M|FI`xinf&ovk9Ur`GjomBC`^%SJ384R-Lubl1lhY0MYR;xq~ ze77fRN=6;`Ve4SBshbmtbx4Sl6OA}B?c>g+wfSq;w~jl!uol?KYqWR3PlV z_oIF%i7CsEI-UOMGqXZ=YO;RZ&bBv4Q3Vf=25ZYOGhMCK;HfaDk0T3OlB!FY5kV3{2wW`fs zW^EcJK2^D34>UwaJSi zI!IAxg{{V>)~JK{+}R>{(Xjw(qk<3GkPZqN*#{yt##rYNIf^3$V&M>M@S#?%1<|#N zV#Hv*Rw^z<())<4pIvCECRuMg&t7Y7UByC1^QegWXFg7ZkwIW3}wS%ZZp?U zZ646)S&um%nKUKTV4^6B07wuJlz77{gP}Kx`u6u8&NXFnv014#lR7nORXZF# z`TW_SDz#><@L^(N{%^nYM%pkYiVINR{mxG7_(o%Pc55>`^~C({{_yZ8AA^nqxnU6| zo3&wwH}>+i8B&JKt%wtCjfL2Y0g;)F#cj zE9bJ_vG=M{sZ3U*;qk~hm(=3(PpnK#PIk{9kJ`O+Pp@Xq`K*X)wWXEh=Rdgp>6ezT z-|61Ef22($FgWhmx%p~;6ryJI=0}gcY|qZl{@!oB_%}cP`QcHka9pd@C;(H)at}cm z;kcg#0bnr3_~85fVI|RV6h)e{(TGK|JSz$la=&*xs#jI3n@vyE8WW8>dq@3lZ=zW_ zwX)RhckR&5EhNoGW!P)i8cC&=_6H;DQE4%RL8KB#_qLCcMs0P1_70BMS8GX<*pN?D zpfQm`AZwvgO}#JDIM$QM%nQ@gpZm%s9o@e9;RCJk!i%%R;h>D>-@3Ec?K@P)M2UA~ zoojWnP~#PqYn?TYu(n>iOI7p+}aLzkxgBL|43@iYOMA&FjrIiFwf<#)OMq4-{QW2u4 zTBH?V-~b?66CnhKD2fQtTc3fq)|r=$vKMA?5WEXfk_y167+G)U(y6Slh4X{apcyrYfP)PB87m+n6DJ}imZpqhD-Ytu z2MiD_2b*hT@0qAJH7giF=>!4M@*vC2d|&Kr-lA_hSs2<${8xDp&)it@$*uNa{qc=RGrR{pxY5C~fAu+z$W z$HDfq!DyHb;xy5KMK+R9G-@^FgAvaeYceW|mC|vgo>ZF3L|)LORS-2!Q!ubFArcZZ zm+|;md##ZAJ*BeXI=}!B0h&0D$S7;AMaKbRU14Dxq-Nq^BzOsk!oegegCZ7Mo=-1d zo^LcdWH%KJJY^>JL2!J&KVbIplHVkRnZ z)NdUfFE*38^JiyPi=!L&=FXh$9CUh}0gy3KEz5!}XwZeF$rLffk&)rxonLHScq%?~ zeo2u+tX{jevwxV?Dz)LDXr!@8DpQkF4>oo=A(w|nwchDv2_XTX#1=;dRbn&%Ai9BH7*VflpuPh~v>g{*7zyE{lS;qg(|M~a!?{2;G=H2yErZG2JpHnyPAEwnfuGcs2 zv*H)$C9IB(Vlc(~Utvcg&#*-ST^y%Bf& z95}ZUPA|T0SV+2uaBLFNRj#Ln_&X*pT!Z{;Cu*lihB&UatlL z5y*2}t)^*G%|=-g8{jy%z6inlptP2N-m!I_m9JJSl?omocZb#qlV>nd;ygIE>;(ZL zt-Z5(kp~eGU_m17*?AwVbv^`S)=II6V;~jBNmG!d3?u-L?K)#DsK75v!N!x`Qmh{X z;t<9qT2lIjc%0`dEuSa0A0!Mx%Cwn?aPWlV3U#GQ1)ia-B48f7;!&4nkn9DuDzz(- zAfBile%~H-AhZe#Med-Is66*s-vOF9#Y&V!iJq)f z@}ltK*H>qY!N_y8x~^BxG#kx{pMUq`SmS)Xe*WSzplsghwfBw&$AkW$&mqh%%?dAYzubK0`TcMF{&Us!X8WM~{yUEz?HunM^Kby0dqclJvLhFn zbY^MvCK%&FLBCT+>5~Gr% z<73Azcex?xw2mhlH3B`t;^^2m>L~yws0;5SZL(qZ!&l$;MNlYr?rfbtvwCX%)aJ%k z`!Ms~1Ch4^3L~RurzSc|_q#(RvV~8QczLaP+#BVY>u2G(+e@RkI?@L-(_ zh=}rM-+Y6BhM6xILI{DOuWjo%&x_EgSC01jdr4QJ@yr;!b~c3Id80)yWl$8yJ?!51fnE~j1h%_84-Xm zsV1@3+8CF+BuR=Q55NeX*?aKL<+XZRuT?}~n2lWU!Mn!fgkwvH975r2Ubx7pYMKy; zj&!|VJMQ!Yva3Y;w%TyQLHKI4>JK&N)hS)U@#huDoN^mu)$mBePAXGf<_zf zL*M|;D`QL)dC$yLuUD0hI-Ry4jz$F_CTVJIVO=O!e*^>;6=^r}h(Juj%nE>r zjL~JGG@;Vv@!z=;Z%2rNQc|ke2LJ@6*fWw7FoC@U0Ya^H2r7<}O0`mNHXDs9DdPhI z1Qrt|QIw=1vqX+aiGYd-$e>mfMNyoJpz~m&1VRuIrN}#9ZbM`A#dyXINv#oRel&DS zQ>>$iD0m*Y!XRs7(pXh0)ez*swyV_?WjHluo_z9iAK!X)VYwc7YI<>Ib|Eo(uYJG` z08j#y5BtZbo|;Z4=9kxeb>YI53r@+Kew;U7cT= z5gYydryt+DzIoUgUHf=&oJThwbQ@K?`b=-*#~=RU+CvkW`T05lKHNQQbqk!Vkntbg z-!le#o$#ZdJifRzZTaE!LOQk9+}v(=JHxz}Kio?n- zfgj~CH&<>~^i0w|*dc(!J$KwU8h8# zl~gt2%HyL}VWlV_O#zW6Yl)qkDn?llZM4dr%|=DFlC--0nThJoUN2U0P%u4FyZ`v0 z%*4CG)v9Tnr1_}W>>gL*$ZMTtwzq$b8fIpi1|Y43(I_*mR9N{1xp}QX&mQyj(~aL6)8mEz|J{gX5rv05k*N1%tV;m zLTj3vp9uoaxgb)o(}G?&J1CeuHGmHuwr-}2yIUB{s`(Pqd!N^*#6tQ^ktb*9upd#gc zh$3BB=UhPH$V31jKygGPo27|Sh6Gr|N0CO327_TeHSJ+`*zT{)%vPgxI2?AmeQ#a0 z(r})w&4UZpguo0)4466RLa^R5nK-QuvRn%1T#@HRAgCs_G^rHchajGkNrPL2tuDA-O5&B1`%c?lJapU?J*~3YJk$m zd6E)iL?tLC+gU)I8&5b11%&kYQo$jFAS6^qyAh!zc?v*jv?WCdRMvQDrAaH0(##}? z5Ih?}*$TZ2I^?Ho*`Yz{7xpTBkokbQv57f*lT zv!D6*fBfy;R-XX-y*zmBeYkved1qsPb2I;c{+o9U7GHW{;_H9^i>qs=8)>U?wU#s< z-+X)LN8f*^m(#6V`>SWxJDUTQY8hpfn(OC2t?1nO-BvcpvRb3sXx6rO4~C z*P`}5Cpt|;Pc6+n_l1j%iRi(dJNHC??rUHAKVCVrb^mbb{=r}V^?l`lIfNkA+Sfk3 z?tO@2Rd1%J&w`-ZK4@p{fzhf`sh(P0xPAX&UijRCA}DgMq?I_0Nz~M2{kS{y)~zfz zP($!8w>CJKp6bufH%9q@J+p}SetKq-I9O-nTJ5maCsKW7u%aVt)!Kt(d>M) z)jrN_mRoVeJFNi;XickYlXvcI)T@2TN{@P@I5s9$t4kAuqIhoc^rJ^RKv7x&H*%V^ilPVzBc%v~1I|ZT zTB+tP#E}^cMwLp+AS@*lLOEtzIHe*IATm)z%4idrYPF(_>a@FQz0z$DOl(joqm3cO z;ETdC0TRXas<(yr-nxuXYo*u+t!ZU->ENK{y_=q%N#oGzw3Q-nrPJxfanziwPEAc5 zw>nyDZ~S015NIaf?+wygMJtNaIJ1SxRe$6PYf+%n@8@~poKq%CC{bD=OQcO8bR4q5 zXp|Q^j)qy`0OOFxQS5?*5aJ{P7DGye%b9@-Ix*2F#Dy0-DQ$m3RQOde5f)aDdW3igXR^5{>fD$1saN_O+9Q)o&3qOvFfLNp) z`~#sNTO0D>u&|jXm85aCUL{Q;#GVx>Ww_qs*wosfER;+SC+r8IB-iE_rzZ zLVz4c8U;9bGLed!;QCqbpm2rK$%XUt&eoMu)5~*v`uC*&wn3$?uxDch)st>$-*xo;Y z-Mxc<_sws2x`R%~%}%5qB!n>8tV~Z;Y~F0`9pAWJ%+J9vtL*F?8|_*L?FaWZf9(%m zdj8o9fBi>qzWru))B`q0mFnc)Mk~uL8IxHk2Dy$f2pqJ*hT;0!3}F0dV;_~O#7WMq zl-_TRJWHC#TCOQK-&iKOBvq_@=^3OlWy6~khFMs2od^zNUt()6vMOT`QcBl31 z7oO?e?Y;Hx!}qWK*{8nv%-&Y_Q=fgh+qzq?Rsa7){pXXdS$3a^t+n>vuRAtR`s;0Z z*WRGJ(Eu8TBnVI(QsPJ)F~k`uLYk0`&|jdCz9>Q?7YdC?49Ov9I3#EQ!gT}f&|O{L z*R5Ok)^&Mr`po0cdEGW^>BF0aiMn4aZ$@U-*?X_``#n!*#TUor;o->}Z#}qnxqt8B zLTUOR{@pKqe0%c2?S}zy>1FR^*6AdIDmI!aAka!X%ZoxIcqe%-tql6zR=3?eyi1rg12#OL@QRr7Rn+oi&`r=I`kp2R91THacd zrdoq_-YAVo&O2jpwpjSOwv{zXrI}_=LM%jiZR1w5nymmqyPbkw8I4E1wO-oFAP9@Z zagubpqtjux(@9Y`wpQWLpzDJ3q;(MAYIm%4E;#QoSxL*?T0XGzytI{w zwA!rzB$Vf66@a{F5h0)?idtDGiKAkO#^9dc8Pbd5R&YDK_ z-Zv-+W{yl8Y!#(#9mPlljEE$k&tcdH1jHtv0!+>MK*Ws9OaTEKe7}e^GMVO-XvXr` z7|2At1Q8_f!*DdZ^~r}HzyDTI%#4ZGH#T2<;S1NU-t4cgYDF$sjnUbT9YB#n!dg)X zK6n-m99Zi!`=ioID>^%7ST)Unz+QlBXX*1QS<_`eAox7GM5_7r7|&MF9-HA4DU{~N zB7(>Q!8=8&DW@oMSIs4X7goCiF>fl^m zRnwz`w3qi+H&Dl3fJ7-$2;6j-HK}H?P6VPvDIs048&g`O9dqI0)2KJA*)Zva)A{(x z&e5gK?TvF!#hY0HlyoCUdtTbHC?%uoH&)*H(Op~Y*AD!6Yum<++p8BQlKBram@y~9ZyR^Q3^ZMKW;hpdQ`#WF!l`HSOb=QWF z=kDR{nYEDD*)M+f`0@T+AVCU`4i?V2ix=Bp*K57-)XK?da&$bq|KMm@ z`Lc$zg{LRE3$Su73U%nTlWt2?LWifzvV!MtKE(?Myu5s4qncHdv765{srZfWzxB)) zu4R4ITWJ|ZQ&+wA@y=odwtjbZ;>Ww=i&w8DS$i>?|K%Tl{D=Q$nw^K|zp^$Q+sewD zw|7tW^Q4^?wdcByqO&x4r!rBS@7O41a?#@Dqb#lfYq zu8Kuol_rjoC^DL26P3-Pk448CBJdQ9ia;pE2$4`!nzd@SN@-G2nq*4p#d1CzPrL_I zR9i~`T47|8AZ(p8am++@RY!>dO&Dl(ZJiKm@2lFHIFS&lvW^nX>`g*gf`=dwb-Ha- zzyX`a*kUnPs8J+MjPpS;0%2wADEL;E#IY_)R7xjF1fqQH^2#n2rAHyH>#9y#oh)qu zkkXnt*s5x^+MR9(1p|j9Ng@+$U6npCK`V*FqE54Ra}j3(rF6Horcrb71f)?BDr8Zp z1e8)Sk_wEBA|W7>3!VT-6C#&oH9R>w9Ug}|B#9XeHdj_x;#ON!Bj#+9|W&Xhj@BhUQ{=?2^pZbcubwYqje)hw^c;SWTzVq$heE#JxwtHP5BtT{X zX3!*F5Rs6awMZOk4TMc}=WLFHAPh*NlqNuC3c;Oe?NJEnEX2xAng;}cAh2geO0emm zFcK&v4h%reY9Emp5Cn-7Gcp847?~)D^Um9%m@lVQzBDExO544GiQ*{E07*v%kj~a5 zpTjYLAlLxFwk%y$)a7DyvfuBonY1Se8U^I@&V@83JC2NIrJ~5RJDH8uXmPk)?BrF@ z>G^1|qO;KN7kPiWG3bzc0CG7^Av2a2kE+rq=dWg2z1ZJho{Azeq;0-Lu(49Dehlp- z%4Peid3ZQdhPT$^q}K*Aks4pQcHyP3T!paw|NTGym}~gW--^Ef?|%QrjqBh2&1Yv~ zK6`L?ItxWfFFp56d(D3ThqqVHZw@DJQQEw;mPSVAFTDc)}OiM>Q---7ZPOWV0gxBQYwy;m0o8wE|w)*2ijny zqI5N0TkVN+lX)3LDPmiB4SKD=^69Ppqy4>W*VmWUPEP0lvw!f#Kl?YYErv%gf9dMQ z7oVCud2sY-JUg|2^2fKYTr4BU6ymB)x*Q1;Jr~1 z3or6Iige**Q7nJ(~{tuCJ5IDhW^bY6|}#nVq+UJ3Q?5+Yj#@ zEDNuc+1;N8fle0t8V1{)ZrcZitNl*ePFj%}Id$00Gb0NF4+cF#a?T!& z@@tns11YUnd%aeyt*_3lc8Yl(o2b=ljYlJ`^ucI4U0P-U4=Odc9*j2Ex^b$%{EZu$ zG&#?RO-uw6lPWZ6s!<~VIqyRVNJ>S(oUzJV((nygoI~&+rTUqA}>Z+`bFGU_VnoZ-FM%5|DD&44)&|MjuUg?{FRqq`r<2J{>Ik! zdC-kC2AlcJUy@$M1V8|R)**Pjb@+V9EQm$r7_efROLH=p{}x7wSseR=!%-ra7m zLvE6QbrD^DHa&iH(BJOqWMwwnF(#_3I?U$zxRQ)kw=$0K@GyM#WSnF=OQIKEy!pb5 z7ytD?`$IB@3-{iekDffK`&*p0>9yB(@7}8s6sYJ=etKtZrJp-}bmzbY=yl@R+?GBF zMT-&;!9bK`6hx*o8>jSmIJdR0o!d}w?Rpm#ITyy`h4EoJo33_S#?bm|Z&sBjvpPw@ z?@d{7Jgx(%;b_*5qv@==@~sO${_{`TokS=4>N8hA{$zhvq3O`=y9brU+Dl}Nj?6~8 zr=sM+lcORpS%ewdtz@ytyX~ac>n;}gcs#G1L@sbG4tQgu`)F@|^K$&;*x!6Ey?MF+ z+QZqu{^xIY`lg=Fzx%7_PY?I5eev4-FK>matV;OOmG1Q$gL9YG7PI*e|Mb(pmcBTh zEZZ%0;p*Un_dhznk$(EY@ZH;^IEseTs;ptT)p-O_Hwc8x7x+L{N}e`c=OHMeZpl?v=Y5oR3Zux`e|fh zii{E9<-GLXj~9G#GtDbb6TK)4Z{1plh9kayxz+3UE^Ke_A0F@R9v%;ijn#f(>)oS6 zo6GiwUhQ?d9l3LVoMcFZtwfi#?X=C|@lde@NW$mG4uW^>^{q(vl)jh7H7W-(oOr9i}J zWbfi6>Gs;AqglPojZ!+!YUlHOX)7Nki2@}=o;fy=1MZ|9Lqs}ir7Z)bl>(r}Vy;-) z?IcPfGSnBD&F0Q}b{2$@2vL;M0-^{=0eU7xtrP*GRxF}Stc)QfKw?C0W+R3Oc=yTC zAN{+3`QDqay2`O|a8L*j_lKW+_`xS1z4P0@^Y^Yl`=W}PgpY0(zeLTXo|=~g0WiCd zK6v*}|HJPW({VR$i8w%jfC0`J9)xtTxA))v%m2Ep%U}DA-|4M%Sfr6m5)lXlFM&aj z@ys9BXkkEWMTkNmKAi2InHd{iA2M?TLp>u@3;U+nA;Lk* zcGORcYQCILgR6V%7Xmdd7hn+%0nco70jQqOc1zeVi%Fu*#YK zbs4B&DIK7uDsUBq5Q^Hb_ga&@hTyDoNWcK6vnolow|2OF-fuPfa-E>3Lim!&fiworG{I5TFg-&!sU>+PVIA(B%fE-Xq1OmE*l>bE*? zz5V3*Ypd;^>GjOP;o`S`<-(xbn&-uMuimOEz{B-w` z|J}d$%;9jFTlp{kueVk=qIuXB*zL4Xt9Gwrc^PLJA+4;efAGoO-fF+H^=z3h%9@ni zytXo1R7o1Odg)}5x7ywPgJ~4Yl?$EWq~yYB04SDqQz2p>>e}fj3KD{4rIq(wd#58U z%qB9ydto0(F&VPXGBYa}jwe}`X{ACF%Bd=2lo!s&lOW7NqSy?l6GZ})DuqdsGz7G~ zEF`cG-jHGracru(=2AMXOaM`Z;cy&$AWfYtjiM-DED@pIPFWK6LfR;0LI~OzBGqcA z0$f%lv-e)e7(}IGD65)T(ljwfwL95#I;#r1$cuijvz#xKM$#Ywi>hK46KkEu`C=g< zM4k5D;qmj=E@oMzH`Z5L{j&5TF$QN;+*$7{F+|!1UY5Dhu@?cY%Cgq6BvF(`v8}xe z9GPf%dOTR|r*Y!h86A5M8dxDkMmHr~BE=jK02sUrEP#Zc_q#voI@MnKm%%<%mL-e+`h@>69wjCxmYZw5d2`UmZYsXO@Sexc=6Uc_L3w?@U?3GGC~2=S~DQ9kckX& z6Z#eqS6dS&SrifJ+37)GyK?R-60snIXoVue(8MB4+S^h#zdYzX`rxC(N2e-XEf>?Z z)oZ4A&iD+t+CJaedU|dDle=*%yZXY5^XaTNt(G%CJB8WM21}p(;DhzcYl9mX*v+#3 zYQ0>5xV7~QHlMF==DP{PxSOp83UFw@ydZ-8-}O z!IS5%UAlJT`k(ySJIALb8kkOak&jXh_xGmV?#Y!)*VZnGg>diF`@iwmzp-2_{`}8A zn4Q?}x_RpQ<#u~;|Fiq6gVt~UwO{+-qmS<2-+Ax7>CI~3CLs)n`?Sj_$W@Mg(QrAQR!M43$CU=8 z#tdr=Re7zn3c+y*h>DPmLQyLCfP~&VVQHnQGODafK~PFr=M`$BqTrn^9Yw_6a{#3@ zY2)geKtkn+Fla$yRP1uw6o4B#TRYRWcL?B|4W6uXfde%QWh5ac#M(I)$-3>TDAOno z!ld+cGPOdUK-y>{!P}BqkW^FeZ4hVz4W8~S!O4K6wbDia zATX)M+r}r8$zS~GAH4t8Uq(QrF$jC_1=v-L28~IacOQQE0qTGH_y4E=i;GupfC3H- z!omSaB(PFM!sF@e^;ds<{A4H7iB^pr5(K4TjvAwElf8i!W`Rd-n2sKlx<$@oAGXJ42;!E_qIA8hqYP#z{QBE_!NX`=zVynam!7_H>-~>~I$wI}nWA`m zI))Zf00>?Yz+=;fw^0Q3xRfo=^Gu zl|fOJ0eqUl*7?@y;o(zHuZ*Vo&ZGQ|*YB@x#M#0>xV8V+{zt!7S|O7axf*P){@KSL zt)4%(aW0NKovO%x^2Z;&`rhGif;XRAosDa!^mJy+g;2~ybUG>GB<^PE$?0jJMjcN6}2glX>Z`|J6>b&sWYPHO7-n^i-xqI(nRh9kj zdQr?yP7kdMn_J*(Us+dFe6pya0w=Ws^4624>1<{_-?(`-A5Kz@gi#sQSrnfRix-}{ znsu|uo%U9(A26^J0ip&(s%>p+ zYmLz&GFa)Uik8blE2Cmw)QB6etEQX0ds1Qk!CPD-u1RzC9H(FhhfPt5J@!s2SfAZnmN`RDj z@gayPlz^Z>fE0lhw3U17-FF9rfA@F)2P22>1SRAl1FGcRYOq9|Lf zR+eNsjsu70D|+w5a~!9rb>J`?4JX6X#d6NUDFt!XYPUPR-k{s+r0teLZ5=~kWfW2* zj#-eRIKvg$;^Dz#AndJO1ckr|V2C^^KxUxDGCw-qf4I`ihSTa~|LCcwt^;u~I_zFJ zcj4(v#eBYZXXpOAaPj=;dU9oTt@Df5Ca-+CwSA#KpYuoW?wp(i3~=FUM`w{o*BZ1= z?jF18-D}T%?MAuOo&Meb@&OQ>zmPuOD{tM}eX=`_A^P~ENAJG>s9??p(cFf1H&#ab z0HJi9P8?j@-EQ{P)pO;P}1`dj} z^IX>+0vL^}?Y365q@_;GBCk+E9BEQmdW)qf9ksi0Wy9r*+js8oU%1>Y@>+tteC5LZ zqxpQkoGtB2KUwB3Ng`4h0N4R*b{vc{8-un|=zM5rE{TnImOXdkxb|FEuHA_vkOxAgxvlj0 zbUqy}rl-q|eyi6>0ih9uCTCzP5nqyd?x5JR*AMILKm=8$(K=4OKf!6`*rgX<1##=HntZNeE(t_dYPY z5SX+g)QDoe3)UkNAqCIF(KL-?qYR4#=ap6hpp;Rl;v}w$S_qu5t!kxomgvD?WtkWA z`C_)r)5u%rKxDC405`x(pQffPik3<(v(kDtUjWf$K8|&g#!;em5}Vq4inJo65HzB& z_blCZT0$Y1A{@ABF*jRs0a4hDVziC` zm2)A_=T&Vx?QXZ%MW6tpG=<=7T{24)r>OY&X#d`wTem*>@ZjKZJe~$07#KmaEZJBa zJayyd%P)NC+Vy8!{hrb!o;^CFBVyqoMn|po3T*S~{{6@#Nq3D^VDGg8t(d{(rw7MJ zADcAFw0hzB8y|gqu>W8$A0C_7bS`A=a~ImHx$&@l{=w>Mmz}$CbN$M}WMi`*O8o4@ z;r_h}Bze43eD=xmsm)8KMM>YWIyP|(NgA*o9*v%R76O|rHp`W;w`aYd$4YN(t#zX4 zU}8l>wQzvpnJfK|KN)3OLcpT1Pj<#pO1*A-xwM{b8YQ*0-F94iTNYKbOj${FX_v09 zYa0-t*mPu4-FWmDXegI;}J>EAKdqb?JSisFg-Zs*1|(>>dYaH{$Lr zcf0$OGKe^=tQY2XYWi6`%_~E&)l0HgT2xhSZ5*53-EpstSt~ie(Wz>Gd^{4sUcXBY zYUg9Gr$rTO0+y4L^75v>_QLsk|H4<#fB$Dss^tS)hi)3B9W~zfGtqKd^j12TE^L1A z;k~2N6E3S4UOD&s|MmarZ-4uLe{MbbhyU}x|4;t;A0ACAEyCWtaD98{bip7t;A))q zThWuphyTa_-FK&l!?NhU{`#kz8>=Ui{G(fkD_!%=wU@v4m1q9ZKl-=14O%P5aA9+0 zJU!VMK%B&tb>rRB^XCR>6n9$5%1Rb1=6Z2uyZwu|MlU|SF)r#y4`$1XZ9OiQrFFKl z*?~x`6?^ckO3Nk&%^}n!P(#yXXw0$*1j53eh*GUgY#gaZ8fa@P$l7RE*Ct7XCHO#E zdFw^1NMXJ#gk^oTqY>uQC4o|;6jJE|OLH$IXMG5QN;q&sqz{%Ez|~HJLZaXq7=v@l zm^h8fqH@-WUq_}Ev}xAqXdSWh zS(1f1D1|y!mVI4WZyhKC5Fm6eWJxm5mmofPe==FVd+)(&nlv+|R=X1=X|LBWi)uEV z`@lNZ5#Vf@kLUBcvT2$L;36+4(@|wBWwe*zoYP7N6z>C&u(MtS6nSiHmd)4&kVqgP zomssQkvT9lF?V7FO%y~%qtiFuc=gHShpor}LTv*O1|N`7Sb_)wGBGNN&@j_tk$?XO ze|q)CvsbQPM-KdXD3fKlh6#zMK2rMjw;5~yh$x+c!bTSW^Eb@DIZ@=;S zFFttp!@6`0wG#=P00<0+#om+RqmTFg@@MaS?aQxx?>GL&g-e&fM2eeD3m9#Jt&K^# z{k33=Cl7C3f9?yW)uM<9ye5gGRMU2O^l6f%s;&nsol9F=zj*c2r=HT+UpUXjV)SH| z^?GRHZ+@?@V-gE#w|n!ct<}~3a^#X^{>oRkr^m2!GP?c2(F=d~MfA0#ol9Rhm!B-_ z^4|7^?D((t(pirC_M931Ym9`v&(K;^?lF)_i)zm}>$ERh@-H0k5 zzW(ghPo5l2=Xvcpj&;8kAD&KosY()kw7=|k@w3kk`>U<>wXSnu%L+nR?I*M8qTh;B z1M6$;stPx+UAXmd*H5b9biB3QVrToE)|a1u?zMM6u6@W_F$-0$?6%_Cy4u!7O`ydn zhz(61kLyjdy4t$`{-`S4*2V5hr}bp#qvHd=e|Y-cUw!`d*B_>Fe(v)6bXe{@aewqj z`(OXc`fxD@C;OA~Z~o3Riz=M#PtR@jV`F}J|75=K-HnWalSG%LHH{Q6@O0u<`%!FY zQ5IPZ`^V7k#FK@6yg#~nalJ0>>~z`cne*$td|CbF8=o4jLgl(KYEA$6pZ@Fq<@U?Z zJpFk0;kE0l{?YMpj2476c+XzgizKlb&gQ)%_?^G~%RhSZgVsuV`=f^lex;vXy4c&< zoBg-{{4aj;m+uzKdNf(oIrloP;kZZ@cu@!xJf@M+Q4**A;Aos?X)8@nj*njb(Pxps z-}&ut9-WjwjXrUrZh!V<|DfF5>dfb?P23t}j~<^SQD%MBX%hh;2M~c^eefJOh!1sD zJL|j)!H3|Tt!xM^0UFY8@PPs1BnFY-f-oS1CX?ux$OMN9OkGo)$O9jvrktt@Lru@T~; zs;a71T8V_Ru23M3lTN3brCFAyfkmTMWF&~Sem#UjsxNacC&Wc1`@vU51vAI==04eWvhfbnE10_+-UjbZ1DvKS3dPlu@< z_Q5kW^=vO7K)@hG6d7ZTj*L;Hfm8^>O>#&u2mmS6nv_AUNNGT15J40|FJQeFG^Ar- ztn0eAb+6OUvbG{4f-LNOT^0-H9V$9H-1&n)_}Bm9pa0WW|MH!(2ntO^+9*v*feU{j!2e$#s{(w#n_M*0b;k(b8 z$j=v}FaGkiZ+`D$5enX1b}Q|#bRt6%xTwnDU8?l?tw9`NKxY!2wNw`&S70SWsK#J z#ipv5(3FL<7T$gTXgn{HD0%;*2eTQAr$M`Q^~!~pUU{bG>gZHDnW-$Im{!`ow=qnw<9{ zV4|q3$~aag(w=?ptWru86?MbWrof>gPl^|8IQX84v{Fid>e_=4DgqHjQB_sWdll)T ztdut0R&P1afgzS;nde00ovW->N{RT|Ng73IlGSB7nJwZh79TiB^Pce`vaXN^ug9Pj*KnJ5CWpEYgY%S6b5#}mf4+2#1Ihy8|ZGMwns!k z5dcywf*`K0?%)6H&DVck6;L>N&8;pP=5B2GLCDjkKuufO`Um%sSU z=U;kJIFJI7u)nu=aPSy{D2+aVQpAlc#S@1h49u*IZiJvf6j&4hB4$ZC8%;m{@ZB$e z^=sXB+p~y>grJmYM5Prl1VmCqgu#0bo`?{T6rnJ41HWJ*1&CxcQp4qDU{<6EbyErU z5Ksh21pxp8QX)XgAdt6pQ7p4oFG({%BZ{1_gR}G5LQq;sy8rmekACncAKm&SpI5O~ z+Gw%tg9ohzNc5TMvJR|}M5uQD&d%|_`2GLxAGSI#zw#AO;GK8Dn%H>97?^ateg67K z@BaCn+pjyxOnsT-+uWUH@H&y(b73r zSE%Bo+f6%bieb{}^fiyBGy3MYH+~Tv&T}X%uB>&Ew0N>t^jj3XT)et<>B^NP&7SNY zL?)@~Fr4P?j;XEl4kDw+^D2$iWGbDG_qYs`pa}cz_SS0e&Vz%>x}t2VgGb9Re(6S)SL12nYCykO>8D@&%Cqyy+$gwkapkRd@3t~U3Cbt#o&G14kxqO`R&c0{rLTz$K&_jc=PMueDTVSs}FZhx7V}HsJ+Pj&g0XX zq1R4kGu!FVSHJe1(Rl7Ct4o-SrmYT)7Z5bGf@ehU^p{_F>1V%qKQ{4a4<_pyh6zZ) zXtwOGBw3OM2Bpl#n(6m@AAkC|EZp;#H}mQ2AODwsa^uCPa@34ENgyvnxm+rRhm+DM zvNiYnX?rCp7P)rTQNw+dptTCj0O0EoMOsjZKxyTjQzlY`&O52u#F2G13s*%=M#m}+ z$lh8XfRr+^(I!$lLE=0Qr<3tYuV2@ql?gsr7qnKjvkK3K6-r?mCoEuHP(*2(1kVsK zijAwS4^AvZXZ(98W`$CuV1Rb=C}p?=Z^73= zE0shoLc_s%2Q^z3(WIhO6p(NrLefeHgDlEu8#s{4+C7EXX?5aAGfGw3e!E3U>b;-m zg^A)+MMDl?Jv?2r&5CwF4eERNN zuV20Slu-&9sv>{9^KdqvDk1>!!80JEVi!VS>wWXP2|+wVQwTsrAA}GTaR_|p?x#n4 zyWMA=4uOO7uC7UmM-&K+coG{91X1up2uMJHO(~6qLqHG`k*1dgNC>D6lF|T8KZ^m8 z&s+usKuQ@<5wjqP^IoBDw_A~k95=5hDe{~YX%jtpa_=wx?E4Syd={z@p%xTE2nr}t zp2Y(Rf)#0kNCD4SM3Kgw-Q$1pul_|Gx1M_LMd2DzGoXs2I$vr9Do!q6d*${AKWlf) zGtXSSbg7;8Vxz~tK3!QeH=iBMr_(Qf^M;f)7^i_hyFKc3qNrWBwmZ*%wIJfni~Yy9 zj#kd~U-yxSoL^nrQaYZDio?C}w}1V) zN1qLU`8S>$jmnSSx%1TXYHQ>C?c1Z%$+F*-)tA~wC(Es^)i1nsV_e}+U;SYJV9{R* zWgSFdICA2kzZP$;b@OtxtU{{?!mwNhtw6Mpg8kE}j+9%9(GW)_(lDA7EWtNfuewHo zuYGZIHeK#M$yWxrb*>kY)gYJ7_xE;>MyHFF6?5&z=HbD~WW4O8u(=rzx*cR!*UM1n z+goX6i^(FlbFdB;6S#QZZ*R2HG+rMh8ymgeO1i%{IlsL=UR3$Q_d9a!>RM&PWVuvq zTLwDuvOL+%UOq=GN+m8|%uclV#b~;Gyg!=MX7{bV$B&EO`R!*Oo{SF; zPPfkwx@-M+-+pvD_D11mWVF@!+<)-lqv?G1tuNm={l$~<)UFMBNTw)?ciz4|SWWky zl-q0YrI)uC|5V_@!tKtpTF~QSwmhyJEkN_TWzf<%F#xe;y4~I@+HgW<;{ZO z_18cC`OiNzk-ByFhl4?9Jge(E7_TR@$_Hq*sJ2yOoO|Pg59(rhbTVBZtg!QGF9qja zkdu?Lw$N{Pe*XH0^Snl;&F!9`k5L2}0F0(QpHJs?La@5hnT_*h83w)XqT<*@kN0N> zi`%=W%b*AaYbQn#04?+S-0ENkRkxQqRL%!2KCH~^?C>p74nr8FXH zO^7N4_ST0GwAP9gX)O#s1ZRDuj8Ud2D`u(d+B(NUj5aLntrL*4v|5u&vcyD)Xlu7D ziY!V9$tWE~@pv{*(*y-p27LgWjA!%tQV|nT6G8DH))gkwu`$BveNbr7Kwg&4Is)N1 zLIIP+k=9y8T8IUbBq@v1ItN5Xo2srzkq@q{%i30GR1_HiuBuumDv6`XY+-FpL`rFc zrqxP5&{cRQ?0^A;7&+Lf&_$`v{FRFlj8^X@6<(+XoE_E6<~3mnHdO0&WZ#8 zf8F%J52Cd4zG6TTAyf#UlnRcUegh#XL}4Kyz~&30h_DE| zAf!+OATk3WGY1LMgv1aKRpf$@GClwzL~&YIRaF*Il0+sER3f#tE?+D~xYgi{=fUlpr5r9wHz0(eBt_+Ub?ZIj`OL`l6EWUsc4?)#nlUguYKuyZ0Hwn zetPS{aT-T|4KZ~w!4&f&7)4?cQ~9HmKXIxYsi&gH8Y z-g)Pf3+H-^$?V)_vXaHERK|~w!nLG6**Al1>&BIQHbouv2dhUX!zAtg`~T)gAAibG z3Katoy!P6oBKVa~S~*zk4J+rP2n6JGY=8RWTbkmB_lIO4DCnlq$#Kzcr|aw4`*-&0 zy!^r|SAXTpEv;OZ`G z*wf2krbpGH1VtRFC^KPY&|xB&&iBWYjg>xNYdAh>DXoP;7>E#1h!CKWc%asbG_l9C z1TO+07_FI^NStR5B0d1KF`AtirJ0n71m^&0u~;I2Hi|&i)(J=e2^;{(m?#o(!E3|@ zD#y&i;+zKnAuvXH_8=h4gi34etczk(SG87JE3{SZ!FnHzj=c9spp|ycv6i~5jn>_E zCo%>_PR5hZ6n0I}>hoS(vxj1aGcQ;O z%&dt7n&V3l0s%G2e;ubN%uf_Ud$l$hb^@vA@n$%DHO0=tHY7o~Ao zJNClJL=c+);PFg%CLAPyDAZa33J|$Ee0=-kd!OC<>bJh_a!;f)*LnE{lhEikH9j?8>uka5gddPyXanV)b|zuHI<3`sb^hBP|Lf zKeFBPm!|vs6H(y$5cLvk3pO^Li-tyo4SMU7B?>>Dlti=8P+SP4W=ChsM;nBR)G8eC`eRywYFAIzl;>w%iE zFll3u!59-nLSR57MLuwpsH&X0g1Xs_Djh;t zE(=m;w_u~S3k<@FG@)|dhGtCM^n-#wBOSAFI6Fc>gn)eDBF~X9iDE^Xy+rNk^AC3{ ziadC*&TG`dNMMlASw>;Kq7~w@s6}im%XGWP0PH*=ASwn@pqQfhavbR<6V4Lc+MrX_ zt}IIDz1GC+B8*8?9K~8u94F3&qNuboO6$sd?doY+M}rlS#>5`z3~UUIZ{$qH-rP$S z5yEIV`t1Iv-c?#@76w9gfmuRe5G9NN0UBHGUsZ@vnoX-apc2iFJ%kXRZ5b<shjr-gsv7wIA-^yAO383F9oc$G7)P93Or*x&8`Ye(}=&?cKu%m5Q_3NN`>~d9+~D ze(H-YTPy|_y0gh}ZRP5<=btI7>0kcg`>%ZK;_B95|IXg!^WEj?2%}oZ{P6B0qqE7V zynIQOb$I`SLSh-OpZm;o8C8;h?V(eVWpEK79Y<4-xg~ zq?k`0WxWImw1l1ek1t-_u=67B$DJ09XXT^MPQ1nc_5bES`zOEm-wig*y`5Viw3yo1 z7`2l8;;m0RS-iE~X}9UqyN5=@AOHBx&8~X!x%Dr7^~%OZ_O)+cu4j8mOw-Bi=+*rKb+Rf3LUFJ{T_R60To6kHoWOap}VP>-ncH zxbJ=E`RP2@@gaMB`IVcdoqX_nuk9VfwX0(D`spj1moBb9*m-z-Fg)Bl(pmclfA;=s zuYK|xf8!g2ekVX^b?L)TPWruQu-e}}7%vJJrNUL6C2-+l|D{*1FGsUCe(}j-0h5{i z;!7J6<{)bScxD4%JU`f6?Hr9pi`+(rI&u2l?|t#ZTX%qD_wiwG)wH{&w(Nt9i#$uH zD199uceQo4uT%1hIm9|Ea;t@)<_Hqee*b7(6|lYCW}%3jDAI`!Vr`YAS$im$S6%jz@UO>6C39(5ebVjN>IwG z0HnY{Yt?-8ltqP#dT+ zm(ztJ2p9r$oTQ4#L|PFAAC%E7a(uMkU0F>{f{3jsixkz1vQXv7Od|vzcs`%?d)?#x zqs@)2;dBy3=5%~of0Hzq>C!zWLkJo?O87ip-fAlPg`_)O3K-(S(2=UOw*5kRD7lFp!jc6WCk z-TQ2{_l5E4iLDByFnGsJR0oI=dL<40wO9rYpE$jk|wKz;(eISX3NELu+nd? z?aZu|LQ;)CL;VRM=_jzn=Al;iVzhcAcH7qa13WV1K&)En3+HS&=w5N zM{!Jq!HR%*YXLAz(@#JC=+;Lcxw=A9Wl^C*ASD1uD)d=OD^W=#WR_?)dRh#)~g zgf%H=;qlJSVxDV*M4Cm|3zBG)c<~g+y^ZbXzw+hz-Vw#i!$;#qI=}MLxs{FegO3i1 z$(%G!r{yw-^XK8g-C17w>tDQ?uXPvmdi~nQhi^U{O^We&+UjR z7rxrP`1HWEu+v@dUpxH0^XJXu>yp1!ix?Pl#ZEobiT zy=AwX9GsNbF0^kxwK>|Gc4Nq{tsgxa9-qt{69(07r@!})|H<*m?2Y%QNvxHkpdiw^ zs+m-rmmy!)wyfK2GdXe9f`9wFmx|+^GyO|4?46&F zZ@#j5^V#()>*qgxXEz|+e7=*cChxp=@aC=Yjcey#x|#j?FK%}(w0Dk%j~*Vab)#o* zY(I1Te0p-qB#uL?)jGGmdgbEUKlD`m<&F<-V zxtQ_gcKpYG{_fWLYTNQ+IlFnWUo&>oG>%NWWj%*hW;|PLvuRmWf=r323cu1e?Nm{y zu3zd^E)<0YgkSmE&7ZvX(V&y~pym@!R`fT&`Q;DZ`Q*_XcTrKh)tcq`rLBHlmF@K& z3RP9D0H3{iwz;&<%_d6_kRS?$6h&mTFe9p_9oo=cfH*cPZD&R)6)Ep*;9z|ah7f#h ztrCpl#3b6)C7~wNXb2IN(cU^F)kK2`0hq1#1j4>?#}cpv9{_|I8a;@$)PE&S}9|U08~|Ff+iwF%JU+MqTs!= z*61~Mm7pk6aT1rMB~7+=q)1U>v{8zRqO1s5q}N-Y&*svCCZYNCJsmn zg|TsViLf-~U}Rw;0FvY5y<)LYgh(XlgdhYCA^|~~X;Je5$tVrA7?_$5xHCrpKAZjK^Ld`p);Zd-7_I$Q~4n9b%6A4gM zm9L+@PZqOTBd!2&klc3s)Of`9^m1Qf3q^Ss9B@-w}gFRxsAD*N!~b61J0XAGg5*Nch0 zaB1zv_35nO+S*oU^P`{LP1<@fTTVtffO+HR2a^e`b>Ql=+v5{{^tccW-}5~upH=pZoZjJW#^WpgZN5dDs`}O6ck3U4OO*tqRf@cgYv;F0-opfavpj;7>@FD>zzDb6ieUfX$63!I%&EnttM01 z&~EGL)I-Msa}pUsDHf0nD2wBbm2Q@ry~C5)H0&RaPsdeR!K?4w-(1bU^X;d9{FC>8 z_1jxNdF#m6071I3X$sn4~NSdA`^CY#=CT+B0X6w9`01u!D<`d`f%r$Uc5TWXM2Z}2hrAy`zXm5CM1LVY@3 z#F|zI?bFjakaW7q=xDA9i9r-<#|{x$LXt!ZF*1fo+O2ptcST;9C{~&TBu!FHIs`YJ z%;G3&cjHid&rGC51RKaABQOAi_nuu4YUqy~f)Ef02$R-ISd`W(#wdyjB`S-KMBBlOu?* zfNG-h4ekK}SjdookTME&sQr94sq31Vl}03CfgtSIH=U)1n#LkQHg+5klu34y5576ap%Nk-5#n4{(~K#iUY5rr852^D|tDn65624(<7S^+j- zlEfJ8y=yX092ksxc<=u1!$%-w4BDD001}}vFiQYNJS(#`Lr(&r<^T`@0Op2G6j-Q9 z+6~92!;|ex+e(?hf!UE#Kp@E0Ix_Lm=y2om+Rd-5ZEgkya_;i>>}XfD=-lNCu}`FX+9oZd+vNy+G2J*J~+Aj%*6-q?eu&4Xt?b3 zGv{n80@K!@bf*=i?YOS$G}8U^W;(810m6QJHO^AAtV0&ZRc)=c8qsS&fkD5uxlWT& ze)-(md{IP7(hJ55E{ej1I*Z6#-%H`-WVyDUoE%pV?$361rq`d^Ug@?-hf5pDC!>l; zO~+1%-h2CgtFHlRy#-jqZ~wKIp1F4MjkoXr`KxztJazr;_a71g3ss&qQX1xYxz!Zd{%hv(cd|EPrxmKRp7uL@k+Z(j?Senc^#cGf z8l$bXI?@t=1$Xurd#a2i6S^7jMj*lrLE;+{^dv*J%=dO2GtY197 z{QPy(iBAshG1Qc>3q=*`?N@G&caEo{e6lQ`JU-E&*4EPPwf=n0q>I&VS9ivL^!+Dk zjL$sN>aApx;dDK2A3u2186+w~o$Av^d-c+H+VOH$U48yqIU0WY=Dlmro$If581U*d z8S||D7%KASJWG8*}+QHPR*@*LsInc$?QM*2fuyecP@p$<=%VmF%!PNyTI-mo1KTdC+$wF zH%LblcQRc@N}}~fPpUZWz4DE0 zc<3mO_Kx$z5wFJqg*@W3&s~^LN~}ULnqR-Z7LnAxe(CA+KFF>I&dX$gn`$>%R<>KO2zoXRA@pN*!w0Zq2zx48*+n*VtX;E~u_}OQlKRG@9r~m8^ zgz)nDbg{H)qOFy(bXsGS81g}p?zA!=LSEFhmq9ye87PH^*5NRY( zNfH@jSORLyi(E&hF0HNVtksIsEVxix8(e6%g}}Hhoz|)ybrh+z@WO%kxlaZoeRMGeR%j-7Q)!c{2)!XRKGjR2GJC@^>*@_d;jN#Mu@@4N?qs;&)d z0BI#zRaxhP(keJ7g6($O=*Zb3xAy#CV7-qt*Um*zoMed)9uT#Tmw8@0uax!<2+itX zZ8#i~5=QCvy3^T|Eiyn=J5VY}z~GHm!az##8UCIHK`9bM@FGF9GA547Vm29_IO|jr zF$fW|2MLS-1VDH;AqP;6xE9aWsf`7pQJzs?5s|#eXS1nu78%$F2u!3zBiaxc0F*RQ zq-GieXN4&u6a)z(ERF?`K%uQ|k(Ui!qA^4V?+}py1p=I@E}Qe08zC7`bLc@78Dq z4mNP@6)S4!Nf`=m_Em3tBZ&q_2l)^G@RP|9 zo_lHi@xgSmaFZEax`dd<#mre}r_05~jg7ZIeB5azb!Dg3JTmYbzy7<{^RoWo&)+zW zGj%fa!?$+MZ>If0|73Te7@m7^!!dpKU^E%|`EuC+`9KE0N+MDkNJS~t^Lnzh^Z8O~ zjN({DE{Zj}5Jif;oJ{k|^ZH81xzO#0BuQ#74wP8YWj!nE#nR6A^1a8Wo15vNl{#;G z?P#^zc7@yBon1NC)1(fEnZ0jZeMneURm8?<9h>+piQ%1&jMTy? z-g;5U)UeKg0i1ULM1-C_kq#UXA&NB#un#N^{aEU%R{AUiL|P$GPz(W3fdpADiXdS+ zosJ2y6_%u<$iFIBp z6D5Y(1>xX*05(x} zGLeFDoK$tq0pcuP>8+Gy?u16usWQg; zdLIBmA#iA5_`+wVb^ZCsgb@e{MFbg|A1aC{LI$CxU=>1Hl#ON!Su|0AGXQjxKSiY= zu;3Y?2+o$K2qGxWDGMMF0s}}<6yAG4V(&>c^F3i<5uxT?AcE8^_E{QYDF{knW&#Yt z0G~TvSUgH2FD7WDk^m$E;zJhM;D8tay?_@H z2u<}snlIj*1xrqAY6>rcmU;`Ryy+?yBpoZf04l)lG|K#XRqCuFdDQxBmPE z^E-dfBUz-`ul(Q z>h?x@a$2?&Ja=}QrUR}R*e)F|gKe+t##+P5b{No?J9c%jf z7cN}6dhKL3+CP4v#NUaYN| zilgDs{_}tFi=r0Y)`uhS1MHt1XuxhOQbx|5TOSUmm37y)2i96+bQWRZip^E1#&GoT zU;tU~@1>u<{$`Z+(Un{0FU?L)C&S^hudL_uLcDzISD(+`J*usGvNL(>!~DvXWNT~S z5k9)VlY8&25Zj9vSGHGsCP}NZK7VfW%2mr=?VrxlSlxW?#()0z|N3W-9v&V3(Zwqp ztya2slqXAH1u~H-Ewg~2shw9!g#c-$lPsoXetbFsFqUCacw-@Wu;t|Hg>!WkZhy9K zqNLsHPiFbS;lcp4GPBb6Z5_3uxRsh>>0$#Ly{t0Tas@75=v~&ypZw&rh#+VqBHjmE zxh6|v1v&%?!7qQ~N}8D`k9OiDIy!byME4Kp^Vx~Dp;&sQ+)kLTuXo22J6qOy<&Y?j z3atN(7ekg)v$y6D5%Vcq^SY_zipty;<#Kbdnr59K9+U!wd6gsKGB48QvYn)Y z7{!S&a{wDGqET^6xRFu?K_!6AZ$W}6pfqcdh_Ii{XAgEB)-E(gauGok4g$@;0!*Zv zMX&%7A~zl4vwu(!f}EiyjM0Qp+fsm=0TC(+jsu}%?0YLK15n}MJDod>tg1E{C4A$FryXT!7o(?PPnxg_x7^z~hES}G-H%%rM1cd+) z1PFjhkef6pGC`BXYOI;WL6&(T02mplEAd>ol86B$PLe!dAhsSoUO;`ib+vuzV$Vd~ zqlX`E-8@emm-DLCi^FvJ+53k#zIt(eMJH+Y@b=->h3<0U&)*z)52q($kCzFtQVydy z+S=^tdivzy@Ms7XZQeL{a_iytQ&)7qV|Pz>KE7|IbMe{rE7#76qUCgc?Q56kdqusp zD{Gx>E&lk<;>IieXP>=MRG&TETRe5WKb}{y(HAxcMVaT;om*?)`SgB-Y{Xx^&{|*X z?LFDkq&pd8Z5mC7lhY~y%x5RJW`{4mxc=2|J^%8v+eiBk^SoNyY`yxkpFaJ}RmPjh`1Y;im!ID#7xP!Y|L$-9?XT98sWMK*Tu<#UfA^Jxlkkhz?+>SC zWN2A{GU#iHG94?kac-pyJkMPa8I9-NRyv%{+fkGm`qCH94Ik|Nt-trx=byb&jZYtZ z^7u=?@^m}hoD2_6_71i-H@7ceo{j2H-hRBb)}N0bJlI{N?exoE?yn4br{m&iSfxpn zSX$_cHsyH3b^3n-vNVKjv#5%GbSny=*R{Hwrbm0V- z6?9^_w%xJLJ$|?_RJTmaV7=P!BquR)R9Rn??De z*jP=rdaVT9Q#aNG!~TIG#3ZJBkNLU+WW&tX!&X0=EecS0Ff0MVGS6q+NqrKj?RCw7 zM>FWQq*5Y?i=5jXm=24;;F%T+(Atn9(8|VM z7n~stQOkMH5?WC!(h)?exkVDGy0$9ON}&(ntZkY|3b8E8EY0F1s-$KPRb2sqQJR3f zm%YQ&G&bYba(i{P-Rdxr_jM=hFP24K+jf)~9ksHSQc+Rm!G&(SQ`c2l*J+vr7DXyc zlJR1hW-YB1vu6lInj7je1B>{;q%?D=N>^3oY&yF8@m=eDlqA8`pc>c%0w@CD&!d{c z0*EMthz$H!W4JRqKatXky%=Mh^U~lhn>>{w=B7<1AP7OuJY>zF91sOjP@28z=L-QM zVrXcAoF)-_A>t@WoU59PH4y>?BtRu01P+dy$Wib_ARL;_G9m&p2VfQiY?`_fSPLiFBC@EAo%iZm?qg$NQ-UDc_gLxW@)w%ikl__o}Gq`*cU{gAyA+I zFf>8eMp!I_)I2vA96Yl4VCT!ZbuMj1QIst5T)ZgIr>Eu7@yR^@(SQDb_~lF2uc@%y zd*df7*I&#wH{CqXPiD`3=Std6#=E0rE9S7c|NcpTqX$G!f8%Ks^<-!A)!(?VeNh!h zkF%5|_)D+6Oo+v77G_7qbkZ5DqT?sGA7|a}V6dvZIX*h-U)ZdRI!=>zFAR^C(`kAB zN?#(qxw*0b_-MYErxA2Ad9qtQeRIHKCa1*6NReN3)gHt*?Lm<=sDc!#b07R1_)aBrhwY z)b-Wg?&&OwjN>pX?9Gc^)GTbfG56Z()8Dv!DvV7D9+b~gx^ih_XMeo2dsKQ{ESv?pyq@iWENgq~&OxgaZEe_l_m2AgR$lqV zysiRdnt$-;Zypav>np8!S-<`Er(gS}XIHw}a9BzZW3+XAbTkbjiommU+9@e4ic%@H z%zaUd_dpSVGQwb@2o)HU5G{{ROGKbZ2XT?6`P`j5*S-9$m3%tuC7M#z>m?}F`XGsQ zouK>5i%*ZHixqH#IG#ZVbxqj_uuYNE&^)^7ipRtptZmiB{ zi?xk@3$v;^WyJmmS_P*DeSmJMUcWj5Y$|Di0{VZfltrz<`Q0QV&O0C=Mi5BS$W_)>MU*7odxa>>ftiq+;X`0A!j-M5 z$!`Qj3bi&>Sw(RSf@N6*4q9m)Wz13*wRcv>QB~H1eosZw8E_!rY_Tv}x3jJbA^1>M zH3uhg!hxGJpeRZt&_qfbAH1)897VnsQmQD*UZ-1D_5Yu${|dHj%g!^gF~*#8x$XDy zF1@?At}l11Py`AfKsXZY7TILeg_>$fEp;eD57e7pr3i((Wk_{MEs+$OBH2xfWRL_v z5F|Xx*Y!Sc=1q6J-F~@g#?Zq$Sq$YdNG;$ z+9{FXr+~P3dUh?2EJxPYq?Gl3HW{z1thCd1UX^hiJMVQAmz8a|Td2&!d0#JFwaD5_ zWOQ9u!8_(~w0{u9RptD%4?dpGM$Gl};tZXQv~une9V(s)AvE3=0U`pr)HDzQ0a-53 zY-~2UcnRY~q!8FU3m^?Jpi#k@#dDC$-2n>l<*C@5(EykAWB@E+v}OcxvdHI)`Lvxb zh2S|z@Q#CL4q7V|7B4JpLjVx(y@b%Llo3gxlEy5?0su-I2p*7-sM!`9V*-c-2`t(e zKnacoK}V6Z4v7>J#ZhdMv-wdz8Hu1GaS{+wy!Sr%W{p6N5XC46bopVP%RM5E^|tW> zL8Ed32>KAtMrZkA7R5?QryH-2fmE*JT%o)^cP>+QPC z*_L*e7lk{2a=3o?)_C{KT9?m?#cbBU(l@R6^yBl**YDkYNg;-EG5`FF^OF<%;>)YA zzjpilG_Mxb_F79P>EYSz-u2}4WE8jCFWp>8div^m{FVu`5kx!U>`VoT474(B5`Cqc zou8atyD`|@>?g{7@Xjai{^-fhRrBoW?zQVz&-PEt8ovCs^}WXzPacGgO?B(q))x;B z)|S#Y-nhHFcf7uO<$wOa`5)!e{CHBo{N~F=_@L9x9zNP1N_PM8+0uIF!G5me7ywv_ z(v;WUB=KUJ+p=2U7>vfVrw7O1`0|Z|!^5?#_0mggcW(}(*z2TNU5?JVX!km6uUw_~ z!qKA%DEi<#pPZiBvxU6#(Ya2{SH8S6&FkSxcV11(X<5{qS79=luJ(0?RF=FbtHYz? zRzeqN<#fU;t5Ls~_Lf>7zjqiXK#;F4EAPVh{^ZleJd7vRVh&}GtJ=+Si4)KSnqXsn z$yt|W>2zLA^8%0nId~)$PZ#CJMmC-nC+Ab=K$Jp2=OiJrj%Ty^)s3DK==YLLK~a>4 z`^Q_Wo!O*B=Wo2W6vfGV?|)W;9%j@@yTNms=;`QaI-f1~+d_DFv}bEx+ibt`^2`6` z-~6D{iC(-u{NkA}irg{lMCYaLcH*nsQJ&k=({ZbvEDc%)u-$4Y9Vacnl~P`WPN!8> zb~@+YK!)8;JEoJ1Jd{>*klJ}BR7QL65lIle^|RUByMPSJXjB*{rYcxzwAPuHK@{&e zPGj`gG>ltUM^UUID2h^{Qc5*^E>x;^c9GlpV&Po?g%E^@OcW_1B<<@e>$HONmrF|& zt+ZC!ae&HM=ao``5G96v0Bp(tK}W`Vm&S?FCRAP!2uK)|#yE~xAX0IjS4&A28?6Z! zMVZeQPWhs!2E$>u*LA^{CFhF;V;00GX)Hl%TSJ7ACg)tM(?SHL$$-&DgJ4zL>3o4A zAONu$O=iJ!D@s~%DwXBHNs^vT=j;OmXIZOe**iK(JAD;}vd&9a4LgIPs)pSzW$9!# zj)*_~;Jv522RHA$oOGAgx7T8w%<>`#?;RYjE%%jD;HAh5wyx98Ts~Gr-V2Ke3kqmb zf!H^&%%)5WFOO7G4YIYV(n1taiWoxhjvK?jNHdXyz|9FS+`u;#U%m+%_5(6VW8tJr z?u9fbCZimmo}8SX4h9*s_ugs(B2aYsZ7>Hwp+$O^#LQY7B+cUHi%DMR+??tPAh0M25;&lcZU!N^teONu zU;%m_o}kO}9RO_h(SQvf3c*?n0GhD56A&sU_958Xp|FG?B8uVyqVK;q8BadCcZ+`Y z7j8njT%J8RdiQ%~Zg%8l#k zw7WhWZfwjiCZh+T)g3~MdPL>R>oD%yW? zeEZ&wi^F1cJl|N$mU_u&pC8`2*?aAUTYvPO`{hx2G#XytOzewO2{x1y%hFF#E9|sm zqw%NSya|yxKR&v7b!B6y| z)>isDmh;oe+wVLChSz?&z4yias7wc6**^GWZ!(^Ax@oJ^52NY%@wp-FwzJ?TCzJYo z3}qQ^-QA3g9G{$@kLsd=em|>ip2ip$D2jHLvfbmcfV8sI)jrnP>zmb;?uQ>8Y=UfT zw_khh8VA@|8a~=R*+07I4)w`dwXv0+pBGs>&f2s(?28XkqSuEj4twr$3X%BwqrA@?Q&Z!FSm{ljs{)D3WYC?&` z9G%VP1+1@iF~F><%OVJo06yDYy#DfvHs-~<*Dl8M(b;HuJ?*!W-J_GybT+q;#Sj>W z-OjMnEn`=Qv%p~D*jpJ6`ir7GIlhR}Xk}?AQVB?KzVy%oRXIw2MH&I5wpIfHK@=NQ5T^zJfJCH0fQbuMH%mGOqm3ZivL;PL3K4=2BH|qj zgGLoN07)EcRN6TgM;Z~MB+{bm(i&}|B=*)35+iiGowBM3kwb9ayUIs#yqM>49IbhI|;ji+;Fsf;V4I*DRoZc9wIcFty5+ZC0s!*FR8 zkO~`$!qs+4WSAw1(+GhyblZc^AAa6i8t?BN^p@9u*Ld&9hUO_CX;iL)5^IrPq(kzW$j?k=!l~TduWI8X(oFxE=cTQ9gb;wVAyR}Wh(0iB(mE3I!lJYSVF2_TP({tv9}!HPhQNZf zSj;UuMpVLdX+i~%=j#x-l)IYOS)=s8^SMa4i8nN()8%|32+ZPw_kmdi2%EtdQA7X< z4Vfs(JctN%+kI;nXVWO%qr;QVZI@KON8KCOUb=Pk#lzM0{>MN3VffBRx4s&21+%#w zba;7vy>zzT-(TBa3iFv{tOzD2qd1A!X;+2TV5yibdYzW)^z%j4>c!=}-nf3PczOnP z7@y>C|M213`d~23mR6F|im?109Z6*SbqfNq-O*b~IelQ8HMJC)+zM z!~NOZ^}6kQFWlJLSbOioXHTCVI_FvZYp=a{?e$l%F5)D9{VOkOW!`@8i$8euRyQ_R zzO?-5yB|M!JX+rzj6QsHHmP)kZI-j$^V5SdnE2gy4l57cHp%yPyBXcPxp8$fR#EG{ z`-k-`2*9GS6{rMJLP$Dk@Lp?NUv5`!wl+);o=n;qtSy1z@-P0yU)Xu&4*1E_kH53D z-Rlm9$x?c;ceJ#<(QQ?g4W*U!wZZXtHm$B2T4^sjJQ)xA$)GnV3V%ME zKR!4&3U6NBd3taJL@&K`b22N=$MbGKIXtl$55TP? z681X6^U>IOyOs^*q}be_dncS-x|o_I5-Os6=kh->}*)%k{CESnYTI_XXrz? zy1hM_&4bvzgRx@(4=RS0)umo519s=rsjqlxWw_KIiiFvGu8`6!TU6!#>BXRv0sC=L zE-fJe)OFR0T9ZZK0NPtNkpVMK+E#)n5bD|^qKV@9BDc=63xjTl8HK~Fu%K{K6muw% zA&p~OmP^AyS=7v7eQV9v92^^k_I!j|<%_vOW{j4Wm#V6ws#a0tEiVmM=7ldqePeai zL@5xOIBB)IY7Kl{9bG)$J9tvtc`Hi+NSoMt$H5zu)wU7<1|UtrIRK1J+_01txDdR6 zC^U_!5eP%@k&cWpO@7#zBme-Rgv-R0_Z&nnJ@S`JV8Q=Cs>J2F*|NwALuV%t{m~s)K8`(996W zQ4~i2#yTs2TF16_i+SOlXk{pPA|XYhJwfonJ>OYe`eK_2>GJa-fQUl8bR3HiQ4>OL zKvb@-n<`^!9J(y*S(qEpQD7z=lPCz3nyl6CJUN;CzyHa{kIsv`FRq7CQe&~ba_dk2 z;76;QJ4;I|Wl==k_?_Q4c;#noZ-`DzIU4y;b9p?TRB4v<*4Anp_0~6A%dP37M`usR z>)WdzeD5RK=k@KLuW0R7r_c*~S}KiY?%C<{iQC$VVhwpUd;Hnd#1KaiT=n3S`8R+1 zX1CvY|J_Hujv9^UMPyf&R~PmC^eo(aO@IIG_jlGdXL+@|Ulb*@+Tc0VWq$Z%_tuRU zMtL|nJo_L1#iKZZJ6Bta^5Nqr(=Q%|IQIQccmK&nr<>lpwXu6}e*4z-gVVXE`PBMy z%$)>QSF+K(diMC@_MP>1E2@v?OD$asD)1ucL6~{>XcU}V%)<5S*rNESz8t@BbM@Ml z_~w-^;rz2d_^lf+-x#bcA%w+bIypOCOeSkrt~_}*sV01UR4iTX-?%<}@1tXFoOdTX zTkBDr{NTrr`rT}Gb-gOX$@#1*T~R=kiV(M2y|fkOHayxrzc`<_yGfRq$=vQ9O^3Z~ z^wHzR+%2jwoj|9h&dzc}G@05?hZjYzG&ID7z;JP43lGCVHn06WcV+ER5h=AOY;zwY zRHUGBu!|3IJ0qeXUMa91jZV%-_42R{%yBD=lJ>#rS*<{<>n- zzneNwdk0f(io3Tq-@5;BfA2U+&C1F^1madJQIQw%qDJGz!zb0|s<9TH9iG+f2mS8C z+DTpyy4_ixADm1BfdFi8tP9bzX9s6TGcuZmw5vTb1>wsD7@z=3;5d#@07#Tlfddkz zQQSDDS;PZV@Xk5`P)f&fOax_7qf*+as;nnp9n1G=p~>VnO}rEWL$g9? z^m2qt>(S+6KoIHD?b%$G5dyQ)L`I_`l&1Cfa(lpxHuxrPh6+UZGIT{kNbJRP5Fda% zHYHtJU!K)?S(8JA-UmhGy>AgAQdQT^`?|KVL0guy#lm?XLO?_Y3<$!ZneY&skn`o` z{`u*T4JHo>_!4(Vm&pgZ^oTaaYDky*5dd7WOArA>DFcKd1mU2KGCE6>R?%R9>#HEy1lzU{W_Usa&iVbB1sgP zc4ztG@$vGVd*=tc|Nh_Ye)|`%jVI;GO18ARw3s-lt4=#ReX^f)I}AjSy#L{mD{wx~ zzw+kIy(gnS0Kgd7Jvg~>eT(^cHp*S@{`mVJ)ecaZ zDmX4-GOz0gM;B+amDSKw*@MUDwt{OL<}dw~Kf9O}-}%mOF0JZLr+s+7X!YAWSBLER z^n8j*a(FyZN>=*G#aWefU}>eZyL;SOicFk#Tbv}dbKsoMQWH>OP^aU0zmq00F5l3v z-D{V#v;WoK`m0$Q^4ZDn{L|li)7@yBdNx1*;76Z;)Gxkz*JNhvg`v%Tcd2vm!O3)C zFCNYg&#Iq#GYQ)C`>pvb|K8jCcW-ShuMBp!*Z%MiKgk!1d=6QHM`yCz8ihb<7Jc~H z-sxd307;6452Ve-#09bQg)L^FG(<6Vy2*UuC-X7{hz+i7EPLjn#MVl4Uqu*m=e6R&2d|_ua=zsU*@D=XKvxSykPkB#PoF9#7_@(ZUK4 z=~?aO`2w^)o*c86Ry*50o@zrzW8vlQwT-3a&hfZ#%9xk}u+pfNQ4RA&L;?r^+R70E z0kL@YAnbK)3~4VeI8L%CK&b1INV8+DwI&mMaNO9p1(CGYWmSW4)A&kA5SkRe7a^sH zgBW8>6afhk`Z|~hb)*q6j-nup07|P6f&?b5g7cn*gI6X}ioEwx6a^oEfSK6`G8CNm zu5R)@$OuZ4qUm&IjOHN01;Joz8%2>Z3V?dlv2{Ijg?Cn~I0Vo}DMhxblQe=jF7m=e zy5H@(TH-iqx1y@7HRySf*VdXS5eVAoB+0sIivn(KY%DJiNvqvw2cz-1GN!V2eSK{_ z9?i;QAyBcZob|Ji7ixqMyeOk5`DwW*m6;vy?O$x4roF6M*u`{ocC?28op$Htd#|Of zxYLRldg>a=}uelb~E?@vb4mGxysD($pZhC@IwMxCFZA06yFX9YtXM^O^TQ9QaB z+uWidqgAKXD#}VniI5H<00AJfga%6S6Yo!RVQ-*)KLLVbW2B_Z!yQetgf|&-yU1B%0DzPtg0(?s*iV$A+Pbr8yBA^hQ(pp%Y_l-0!EYCZi&_oIa zSfC*s2k#Z>IF6A}07#LFD4|FIm|1Hr!0dhV7epkmkCkD>P&lI%5(du#(A0Ye?|lde zgq}bENqOtFiHtI+DL6|=9N0ur2u_(uD>a`_fjLXF@pNXbmmtAIZJi>D6XTsD5@%fi z&+N;E_0B~UnK(6Zq+?2yVM^;NByrr}eBwCDvb0*%QI@W6U0GURU0K=g4zjY6$4?*T zRlR-X>g^Ycci(yE#@2ej-5-s{id2@y^F_HRN@Ze`rbe0ffAD>~7-v~~I-1+y<0S3( zI#HBuU%Rf2T3uVwicgMCSD5s6b@dP0KIFJe6-dF7T@$rHr?~VbFfM&@Zpm3EP|v!e6y)mh|L0ke&T0puole+ zlFPS1^Eyy?i5+F{oC^TtoDa;#M5Mr16%hsrp4kQOLl6jcZ7l~;(I=lD9h@#AMZ(hW zZ4a|3AotfhpL{TT{oD2CtvmD4$_R(qG?@HRu4ll|` zL0-XP#)jB2cRO(J-pG(z3{@$!O8g6we`Dq zZsvJ@^Ts(E`tDozmzFy#{oXsfUp#nn8Zc2BQ%!*-L5!n#W7xg_?1B*g;lF+N>WaMa z;#xVbqh)9X^rBkd zSP3p1?8B99I5?W_>+t%`ImJRx50Y*NNkpUD zTxq9Sl*aMt=)#7oboKhyDrqX&&&#^)6VH5nmXGJWoz6j`eeKRHzBOa+CjQEa40Y#u#TqyOqYNsj8BU za!!E&TWR3QO#n#*2|+8x0i5-SNJ??j8zU?r!k2s6EK8c}Hb4-jIMTIPZv!ETgt~4z zxFLiPd|;8PsE75%t1^eL;DbjNfV)B_C7E;pePs&)>mniilB~kyPp>GqVZkY;9?E4 z;AZprv%|xS+3e@O{WINOFKu^=-T<8u@1CEx($>+*pF2?KYYyEz2I2d$$17F$Q zFP;z?#D>b#u$mE(m;p643r_^amrS8XAZ&8J0t|v61WIXVop%9=8u1VTMS{QV$w5>K z8|{dMKtO~65D5Sz1SXLnf)twdGBuZU5k#d3JO~I9DpY}4Sb)$v*CHeVLQn{VA`)Dv z02LrMr9*s)uj5O5zleY|t6h9vYYzxW&Bs8xyy!Q34rCAr!oogyLJ9&NKx-o+Wm#jR zZXqHOa|pt<=b{c8)O1l)N+p>A6?IJK_RgQWGW^De_y10NZSC;*;k&@4nxTS}1(B|ERZmBcpWf+LhDCdru$kee&_in?HN+@u$1Bb;GUI zS|Gj_4~I*;PY$sNQyKl|{Ls$O)mWYCG< z`tkec$Fr}#apURJ6H>Iiqz?|ypB=(0cVy6yt1{9AyZg@;3%jshYH0%vuMR)>{Qmj* zET4l;>?G$XiM639()~=&3)jh#n%!*b&IqE|obMfUv;JUh8H4|+Us-`-ada>~Jj_)j zn_HXJs2E&>vq!tDx2|5l7p616zSho4{`CC^yPqGOPU~yikhS6Voi%OJ?|%R3tboI_ zLW!C$?CR?B#bh2QsWwq%osOf?xG=Fvj1q0C+6Gp+3m3Bzg@^qti6RIgIG?nP09~BV z@v>*y$3+dW>Wzs> zsXD;JC(|^B(FB;?yqqb5yj&cd!SxN$T5qf^?d~0bmrf^pw7Zv=f}~WW%DInIm3xVk zI0G-91+kMw+9+#XmsFA^I!WSmW~~p55CAkd8?qK@t$c8GDNCslQo9!yt`1S8XEWzf zL_lAvp#s8!M1q(7f@GAdU6N=dQG`KUzNiopf(HR#`zVTq1(~BLB1L&#C{5l6MAh^W zx?r0QSYh^oMT9gVOIceH38DePxhjrR@0^LW1oqYgpi-nX1urI|s&viZ%_7WP=T?zw z;+t`nu!y20cs5FT?*j{J5(Z!lAs~o}bQEbKoX;1=s1QQC)k@;n7=tKElQxtu>L@aT zQr8utRFx&A(l|=9c1mKa+WSydwIWpmf{?AO0Fc7q9V+t9ezCt_74GExWU#llzPWny z+LgnzgRRZ0KYaT=sC}=KUA?mXAN~1Xt!=&kXxAi($8a%Uh@$1qm5NHsl2=8m*JIXE zk}fZ8osUjio%;Oz1Q0j3){`vR-8<;E;;PO`so+8qC#$Q=pM3hs@x}hd>Dkqto$lIt zVj}W99-Yso^W(#lqA1F$)UnAIl@jL+B9H7mh8iF|QgAf3*h-&(cm2MnL*sOMi5hXO2fXh)pND+|=czIkC zspr*gC?GyCQPaQ!g2KRv%DW0#ob}E(2nhlN$Cev{DPjW=B}G6DTCO2~UJf5k`%8mh zy_9|$9yJ0HG`Lgd=N}XaN!OlhYXQl7&nSurNSH++kVYT`LPn(6Fd!&3pDa*ojH62O z>b+EDA-|89F z0t`>yxnC`O)J|Xh>Sh$n#r}Nz?oNAUcqMHa)U)&1Vm#9c{n}q%9`-thM3G-v>Td1q z*rJ+T%szVS$>*P!gH`3~@Qt6jwYu7Z5PnMJy$>(O=cC0e|Js``B&}yl{p|WyI?26> z^~HIa_th`|)XSEui2Ukm@6O$w^^Gpsh5k}6ACHqrzxkCf?H$agi}H<^T32>f9y~bF ziC!6Y-}~^rQ8-nqQ0neFD9sv+{|7T(Wh^+rD}oS#)yVS`C1O`>)``s&wS z`~4sMaJCqJExu7#>Dr5TAOF$!KK#M)wHKFezqF-L=aU7pUB9tyi#my-&mZr9`q9Oo z`ZIS@jAo1K#>-n*ujikBa`^fElN-0T0>arT3|DwK7#tl>pX{E-S#ohUc9sFr2aXKc z;ARMt!o9UmAD@v{K)z*EWXw`8MHJ24>9&bf z6zQy!1RGww)h(+!1fQi^E1Aw5YC*wJ^R=D6iQ`_U#XhX9ES04{E{Y^hdIMk8K2j8d z1i)@4CW^Bp{_OtKVgWa=4W1pG#!=MoCzC}Ltjk(j2uth1L<*_hiL2UrZ>=|heUe3r z^zq3lilnh7qTJcKlsHCX6tv=^^r4o?qMBDuM1q${YeiJK5NXiN+=PaRLBwB*zXHOX zBqmOxw4Eu1Ay}<(KFtwHN1C)Miv^)lDhj@-3K#FZR)(l4*8uha0umbJ08+DL7OC?p z&Jq)8=X~%S6m#GrFXK2y5a*m0fMv zE5ZA#g^eP^4Ey`LE)>_cS9jLeHn+EKUc2T)I6NRm=yrN5>+93mVq+!b zj7a3!2WA!k5hC!`*3JrvRw_x%<ROhmc7JPg=QN+WH7OTQ_;2@NhsRG2H+Qy^)Wl1jy>}kn{rT4>4<8LS*Q0Lh=(B_UM;9Ag>hyTpT1vqH zpmdiz{nak(qPNs(b(5XVN+;Rj!C0qpUE5;3P?3p^p5_%1v|E{SVLA)XPN(fOHim|g z*;wsF3X@b@=i?+|!3yN$Xuhz}Z9%WyA}u!5y)4dFx=K+4e7rcls7gB+b`y<@y6$vS z;Vf&_lkq%ASYGXD6P-_si%H(;rY^8$VT5U3UCd#*Z%`=&NMgOZoYX~ec0Q8;!+v~x zF<)BlEy`L2QHqKpbh@$iNMKoTw8_r^0RR9=L_t)1q>c4Np3iN&OB_N~+oCS( zTKbVL%UVZRBP_4>0d$ha&ed5v07yGart=~)N{Iv%qa}{CZmiuz$igI-Iw)xjUIc(5 z6>ON63-+!z>@TmcR(Vm@6+0iNX%fc(G#gKK5^G}`6*aifNTZ0r%)$^r2vAUhW2Hfx zNRjfjBTWcoD+>aoR9#oW24gfLkVfyV(OQ^EfEgGA5GYN|-e}V>aEvi=6p6@eS}3JV zq-|w`v&|J&EA+vOmk@$6nzRZounV9R2g?Y~`Y1Ay)y=;^S=E(rtJ98j#DO75;6MsA zY3F_5P)Lc`>^=|`D?sOJTh@VH6q$Z+80*N|K+N7%5~|JBr9rRr;~)GuP2-nexZ^zU z?mp}FhCcYLoAr8aLS-WDTu9T_WHt-lO-A$6(^0?E+1Or7G194d_Qe<7Za0a|Cm(;- z?RNW1gSlNyE=H%PC)&iuL`j@XrlTLeb21r^t+SikJMC_FQRVG!ii)4@?{99d{mCD_ z^Tqyyc9N!P=j6%L>0+vobQJjtw26H10+;#;DMdsgtQ0o6yv7K`L3BfTB1B*W0A>Qk zAp}=@4hW`+8Z^HyGsGO20}%pC@T?RHP!m^cd_;vS`VP;UoEQExLF6#&o1w;ZPku>cDgqMQ? zivS}kU~aBKu3Xd+E#`A}MWiD^@ZLG=BBK|Rsa8g5MOt(110Wflv@#E*l?j3%RL(8% z?DG#N|J}jfzx?N_6K1cxxFYVs*ItYIt9?@2I*m^s{yuYc^NrWz61Rd`h7OMCarH`jMoj&}FH^!4lc$bS6RK`70y|6kw!#?M~& zP#+(UmzUxFxAL#P7N;fx=1)G{J$W{H{k5wro6F^7(O&BQ!na@k@sFPw@S7{x=`RtA zl5nGAyY zk0$MYR-Be`tK07+g2X--fP-V$+TvNxrY;02F;rAm2$p(DWOU{H>vz}R`SiG~+`Onz zIMNs!?6xu@Ddr^y34ot}c9dk)?VBcf;)5&mN(pv4Nh{G&noOqo#l>6*^MmuMkhBXs zJFCxj54;O;q(@`ByrQ2SkIT{pz`c`20B|*+0?(ij7Nw6O3PF-sO{S%-tyX&2PZqPP ztUN4iq>V!Xg-HzFg;ttMz`(w;B?xZ}B4ZS=UmkRcU_PILh%!()j|!Z~%2FCaNR2uj zk05wbM!e69f{4;IK>+Optkr}X5J@2rLAWfFZEE6`CJ~5r6laMC8|y4E+q$Z&8Uf=p zQA!s@#Vk6Cg7Z3#<&vf%&U@g?$c-=)!DUII2nexoU0bac2k*VtN&|p*A#jMaLV)1d z1&;*5T4gkXM3H9pHn5Mc4pfmu~bsboYp(Iuz2r{#X_dYaDXGAQl5EUt7$bqnc zwboe|MX~sw1cX3Dk~H?gI_uh#A7HEjb@TH7jCcp6%`-&}^DJRwgn!ODZ3d$fzibGwlTiCPIQN>n5#^ z23lENeYSTr8_z)Da5=gdjVjA+O^ZcEMyKt5x~M}4N|~an%i3D!d}X67F&(@`HyEJ0A^@5F#^niFSh`E znK2fIrj$5vK#ygW1ESH2gH{b~L2GHwasbh$QE{}nwAt__UP!bCW@bWQ&BP%{;2;dn zd-9G@1r80|lmpiu%hF9JHp(h~UP5g-T`a8gV1uY;_)sdirfUF^=n{s9A|iBIX9g@t z1Pr3-5|Q1|DHs?4sc||fw6?zg$%jkJgAjtP3$2uQ?8TvoQhGiaJ7)o&-*S8qP|i!> zfJh-g+U}^99!=-{Vdr>Wm9_ip|Lec8_xSxEz5U_pVChc3Z`x~lwR`g5;jM4HtOs=Z z$=fm$TZOd0w0dph(c7O=RJOb6^n4Vxvi`>E%GITh|73r#M%V6bEG>28I8i%G{To|* zpS_g___<$r;dg)Y@zMTb<;vhs|I)1=e(&R(x5M$#bg4j_0 zPOE9D&dv{7y|mZv{lWKtaDIBWwb5H&8iFY6?A;r;4$h7jb9?93)!Noc5_LKujbiVE ziPDp^3ulXK+k-{v4^L+R`cFRFzrDR2n>0;rcS*hWyr@P1P;p)!qYoCAgtTPx~yu0}AKN!FH<^aa!#&YlCaP+l5`_jo= zm9xFVg)|Xiu#R7TW#`e;lUvuf7Nwm|ibY=Ros81dxZr(*VQ23c7o{!hMPRhnMM-jH zXLY`)=ZkW_s1A-N%n&Icl3B4hjTC593erwCpA~!Ou-=B7SGTMU-iJ6gfS4o@8SR5Z zWtIp1)3dQA)oZIb)stzty|FxBCM-YG8ycHFfEncfkXKm{d0@PVWOLOtZK_=4hhEN!S#TWLXlt5TR1kZ2T0F07j9X z&2sU`ipxdm8-hHeBEvScvJ_arfhJJKkWx(zz0xE=h72iXod;1tyq}Ln7%E~Z8PLS0 zvi9WEX|0To%0*Q`WxZ38ic~Sni($V9pmh`tmzG+c)(g~f?530P`Nhmb=w-dxA}7*= zW_P*6PzyX@dFqY6%hl#KR;ot)Y4k5VE$oP(#nF9xL&atrfo&yVuWAAN+3cT}| zd}dTuB%hWcV0U@lhoDfjGVOMIJRU6y7sZ(^OZHOQngzWNq3N9_g)9;R5CslOYa}Fr z;Dw-(j4oek(tzcfavr1rTnD@N^r4&sj}64I$lht!37X>K7b+&pin7o zd}RX&X;syA@KqdVul>}2GCzK=c=pMaotIg`!w-J&!Mko{b9MWt-l%tXcV7OgZudA) zt3RmfNjZ1RJL@oP+3(%&57TR}+#c;7+v%)KqwZRF^A3FJ^}9(N@7~{ERPNrZFGH?D zXW#nymsYlxzu5n9I9W_x{gt1+^V%z$i`nSse&v6Nj z*Souq&Jxp#(+ls^{_e@LGGG7lt)sJxr{QGh%DT0S$DjR4k|j~9+snOgU%U3>w{~B8 z_13Tdqwhp<96UaFxVv>_`GvbX_dkDHCp6BhTeq(*4_d3seQ$jnx5o3_0vw&shP@6E z8(^iV6DL^~tu43D&gZk-2Z4j5v(~C~e`1o`VO4l}Mqq5W(6*I%xcs3rF08kXrX_tc)+hqN;GI4_Q zS!F%6TiORYI?fRR4Fm_v9lEw22Wx%sWo|W6S%rSP75CHC!Afb*&nLzC#gu_kqoO3a zwY~Pk@fSyD7v8fAzSoIjV@9J~0z{cCEu&-1^U&=k^Q!vf@zKUo9|2sDi*sAInEdKEQ!dk#k;aGDchP%0<ph^T=aAfl^X<8BF;#UlU`G$^eVfjG}f zk@G%aAS7>HmPCpY28vB8?NNE$t5eV8) zD@7nsRaJvNXmWyOUMYmqItF0G;6hPVyU(8G^ZBQrf8IIh2KK_fuDlD*)|GWWcwbd@ z2%)mp2XAd18rn8DNFtFCK+x3=0RsoE6ryp#5r-h+y=P=*aCN1PsmqEm-ne>;ly0|^ zIF5Vm-h4i-Y+y#~0|LbGk{V`no@-47adaQ9XAg=0bWx7u?aa-voq+m zdoRBHavH^6ynr|x(kyLIeEDKJpNyV;@x@|6RpqqSid6H4V`e}BP(~B@ptMq9{OSu| z{ov@s|MTDZ-~H$Rv%h$2Yisw@r{6UnU4P?8?qu5Tu0RNYY3s_JY$fQ=_v>jnetfuf z{rculeI?)zz? zRySLJ<8OYs9ntCT-tn_XuYT*>X%x>Vlj}QM7iY7lho4Rt)=+Gs==h|#cSmimb-sAu zPfo{K);TyhxqfBy=YQ_a-~OFH{$lrVHkqE!!qx58#l>PYwu%-T8*A;fL!e@#SC-nV zn2cv*WYie`m;c4D4Tg4pzPEj4`L%D|x&7sxa(?ln-~M;&FSW;qyYVpNst6t|xR1a0 z`S`TD^5WHxe)RC;kB`O+*!$5L2XFzt^4i9y4?erNm{Vk0NxWDrUVdq7zOauT?s;*q zytG}G?(}?$2#W%u#Md4?hgL>`!)(5&Z15gdSCVd59Ea1>iFlqBwI=P6SioACfwI1Rqo|V;*b&(3pESEKpF z`g)!_7ofGoO3TV{Q0Ap|uD8^&btw{B323cM94Vq8V946QhPrimazmjeI@5E2n` z5JuquN`VAWisUk{52O@`Xi~A!Rn5j29RUghNSY*dWu5h;H6hoP*O8BOOb8G>8R8(` z1y`|Z7Q8HtWd#*BRku|wMgs{Jiy}=T@j?LNgRN>74xSMS6h}nDLJZD{LMBF$fQBRl z3G7_Z4gbJ9O^P)J$4z@dU3(-l8nsf^iGXlJzR?(w0twQn)d4+l2%&bKQBZ_!sT!zJ z%iAL9v~;SewYr|RvoAgg2T%9Q2%{(l1R`Jn1!#Tn!KHCJA5EM0pVGRhf(XWG%psr> z4qW?CR~AW22!tBED5W?sF)4!xq1*19Ehf{^#cVPI3Z34dDl0EOi4*Swd#@sch*}%x zgNY1>U`))4K@kZEN!Ch7+KId+)cXeNnScvPfq6dUb?c>>wf?H?`~h~E5L`JmA$S2*L!2224sMT$s?4P{z&gYZKvL>}Xyy{Pn*u__yEr zs49!N-R(-JuoKkra=TsHk#}Gst#y2SF}|4CSkt%|-@VZ_j_s^gYCPz6msf_X8!z+* z-SekM@gREP>o2rBt#*P*1_$@|XOn#A_VrKSKis;x!6lE67wti7&FK25d!pqPOvorxie;=Hbnibk`% zDBYrPRe&@l@O*Jmv^SO^SZ85pYhXhN-pv-3^AZ3$Sv(q*&cUFc1P@=FP9g!RaBzH~fVGK| z#MG4m5rxVNXwLx!BNHJt7bO5>=K=vV!?`w1Rw#g=LK40#Z_%U#5m>BuD2~MwP-GGx zE~kEYPTjpMeG&k~AdNLcDJ=*>1}}#IxU8=tr5GeHYSko&y$6EWXj|H5KS)SX5}PQ* zvGG2XWuXX3FiVrF3Q0^==`b(=H`Ob_bF+PjHF*c1rJH6;{ei1%Z>>V@g98@tf{BfF zfe@8agy?*TB1MXV=Om7VId~t(ktQVsjrkoAL?CNt970*vAXt`Fuir{k;JGbw>(C-cV%fs$qn4}#8s?{3A5&!=0{NcYk z|5u20r;`DLAUAqe<6w{CG+z|Kvo;Yh&dZ|L>Ua(+jsu4RDj!0U#=!+5MbhNx!~ST4}Ic7I~6o*1O=Ck>V^V%UWp-)C7N#1(>0> z&YoUqrGhXiZEH&kLD(400h3rMgjP!t0V#}==y_!{B54pXIzmj+q`jCg+U=ADYFEZl zl(t)d1{FXEsWDL!7rDCk<*&c-&96vND5VrBC5Yh8+Uefj=jRs(B-qN@0m@=lqHfv_ zkRe^HazwoPHH8I5r5rK*0(Iihh zt>Id{Kfc&KIZxAe+SVU`{P5%``pWA!&(4ZxC-YejfA7El9n|{dsCxCa6`MsTClg=W zqgin=aj)FlJR21!7pIfLc?U{^a}Zy5o!C2pAAk3|;VWC6L3sArBNmg!^yV*reR*vS zu?!-k-NR>}A4X}ivO{-Yz7m}J^zDNvA`>f>`{_m9&eYHRsoNiaa`d$~u79z2kw$Rm z_VQrR+rBnD*&k;eyS2S0mM4pRIZH`_6KN+}qq?@fG_!8+_$)6Zj(`Kq>bfeNF^Z^p zn88F80$*}()fR2DotP%4e1I8MgXg$=+ANyNQQdp5~ML#q|l)@5mw z*Am61C<~)ize7zMOoKg zE2T}OFf=h*6c9uS!bI5Aw3tMD>uO;Z3C=5R*fRu1BqUHoj1U=Q4kB!fZqmUG3!teF ze4hVolvV-yNu?k@&wl~{G_jh-pKV=GNT?x-j8cRM4exV4%3JLikpw_%0tM{Y7|k4l z1nX;5)Re-5h&D!;MMQ*x_lN|j5E~{90f+UKz6yZ^G+`1a)>&u$EH9EIb)fDah>}n%WH6*vy18}bl`p@sGF(ksEkP|z5`sAHC7tEr`qJ`Ju^6?}ma~<$9)yiX zW0cm~R@JbR*0meWN^O+$ep=?h!t8?x0x|*ta1h~3sm#`O9P6wTqf&KIEf%>#j-nKR zs-ozno!CT0Uiiv&x*Zg$YU^yU*5^e{W?_s;v$U$L_@IndT6>UotF^Jc)9dv^@KIz! z2#5+;v^K&B989E`5I~exQDg)_qeemm6Gbsf)and|%foz{r>)F83j`)oiZm$FT1Bx@ zpmd^(lH;h|jy3BZ2%t!?0Z}So(#Cs9A`{pH(WUs<*gY8-84(m>2tg^ld`em9C-neL zD3}|k6Km2zYYw3(3a!kds>-VJ4wOc%qVfDfgzaK3L?%v55La8Hb&QPYB?JbB0EK|} z9uQ{p$!GT;){9v~F#Y*o{_3r*D9P%xvv9Cj;6`$7dE?Q8XZJem@BGQ`v-`WsVN*?;t4|H}2{Ucdjz!=q^dckZmVqSm`V`ut~q z{xyT&`_q5!%f|=Dhlhu6{_0mwA0H^qio=h7_q#v-&goabc{lFHZr?sVI4dm(hjs>w z-21l9=fykkKYX~GKY9WyD=<1KvsMZWle5|8)zv#U*PiTOBuN4uKKtU#S=rp^$=TvG zuQLPpZe}mu8oYRC`Py>!_`w%k7ws%w-)@Z#7t8&nBm&)v&!6qZ?HHI|_}WYRkB-le zCd(U37iaVLKA8OSuYKv{j}AV6I^S6WiS_+2_KzT3_j&E{fn?T}u>0ny9)6%vhByAW0f0N$`wWVy5$IGMhDy zzVnOe#jHqEU3;F*ybsXt(8;b`g&pyOa+{E}%0WdfGEUhpgiboI?7$k(IRDg)&k}(9AiJRtyfS2!r{}95F z%ep246b2zyq==A^yz^0HjL}F~kf>T#(Ly#Iq$KJ+C+)8ma74UVZ)6qb) zqH4!vlrY!96%5XM=jxh+>+2xFcUyzniUMEdZh6p#5Xx$y3{FND%nWF=*i6RrUawVG zVOIG%I8{r#o5l(kqj^keRfnXNX&pzT00lipqz2tCg*=G}gD4}4Koca1A_e5w7_c>K z6k#ER;5`K2^lX8!#-=WwRYU^B%(2!SLfXlc){MBax)w)T;HcN_OORHpv$VWSv?t6< z%PYfP@AP~Gf~|J@^yqwPeJ~gdQ-vq9`QGE*cDLnOa{n%{`J9NSi3q+cB|!r5MuCNKu|R}Isj(U#9q+oGzAe; z&T|Ic1bW4Jc6Du;luBC}I0uM|^m9)E<8zP^3j-=Z;Sdxlk+Ud1?6Jr(ayXvhm87u5WB?{O!N@PqsGw z&PM0uS8o022cOsg2I*k$;>NX|X&%U!8=dZ__m34y;b1bYQNhoBd;4U6zfE?dXVO@# zU$nB$=JlQKAkC)_5&7ceba`tjO@=z@C=7MEzzBVC)|L=?!%!Fb_GbLChHwAk_Dio_ z{jdM49|eXdPbVwG*4EBy+ScFu!_U&pOlP5T<-6}c8c(Xy%IU;L3Q*DNQjd{FqjS#^ zM--drgMMyNsbH{AGy?%B1{CwK#Wuwue*Cws7F5v1$ zSCe8b&ILi#Mtc`rU<2R*HrAK>E%U{*@_gb3ZDYN3d#!wt=ebSN7z58M--MXPvjt*P z@<(2{tT;)Pb77FARpl`ys4W&+eo>j>O< zR7OSxU_}T>MoUpt+Gy`ZlO|2Ni2*g7+vn|gqP23)p;qjClBPRGBhw_qp7%`=(IqQ@SeSh9+`t)^)i$=p{*{Sr$bxJ3rUT43_%&qEHf8LY>!`Vs72Z>1b)V zbXtv9mwO^oS2+r3q6nA~Lf~nhUtL{#@Wt7c&6Y8WGvAJ*(fN2<%7P^#yS&l2Oe)R- ztMk!3tJoOZPPA4!cPfsfR=ZP{V$@75|5m^M8iGYQPn*rgnB>+Xd695K85e900bIC*r!lCgl8y(mVm-?MvHyE8| zsStSv)cSd)STjqjI88~gDoau%)~87ZaWyaV(X{^ifA63E$}fKF;L}l9OzzxVE-o&9 z^Ebww2(oVa*Z;z6fA^yw@MPKK$gpM~9y+UiixGKlSH- zuDhmaWdGr>f0s=7h5zgog}A>zu|kstogR%5I9h4dUM)^8K7aSg>4^_O2!2+SOD(u{ zWBJeh%9}|NfA;=|_dh#&`PHrCz4LZA0pS;9+FOkM55N2F<0p%} zP%m6rJ~%ILZ1=rl71P-wr`Y`CfAw!~zjX79$ItHE-P&B~tSq-YTL@g#?$OizVQ+Bl z*1e;X<7Y>ci?bq{P6=Uq9lrkN#?`HR-7Nn7fAKqI6;3YT+V$$>$x(l!-RreOY0F7| zfs$lhc3>g{EI00Kk4}y!C+D4&;n#j{xOa5;Gr#i2l^cV)@f((Ox4kj8+OXouz zspUad7fwfXcs6#{wPT31n$BibsPnuI0iqLmPjE81h}}Elto?Su(r`wq-tA(_0Do_gAi7QWdIQhE+7E}W~I_N_TaVB zx>=BEt(sH23HwW6QpN?>V7-tu09KV1fmWwQq)<43R7IhTMiHY8AXZham2MOr=FqsW zfe4-_JmEi-@jWkLZ497ziRxuxg~ZILKr79ly!Xya6aMnnDWl55QV3eBIEoaK0QlfJ z1R_#e0YGgl(j*p~^GXqmG&WE{a4aE+XO9qEUHRIG_-+~vhJ78STB!iqS(nDKMpR1o zdflv*O)nOet6FJP**ZxL*X$*X7mIec3yfu10ZX2_eh;U45hX^v7=@L`5O`(Sr^sjo z6-Sx?h)iO1mStp}!onQ3BX(W0^UQqN1+j$OPdyY4v(TS5@fh zYBm$$NK>2`Mbc_#RaGN^HY!OClB8*BG*x9$S7lMw-nr$KrMxPRk4D{IZ*6TOP20}- zUfM3oDpH068)K}i7$tD#oWMnMkJ#KRSVpi!i}4WzGE6B$=l&x0NC+|zdXJ`hEheCy`~P(XnoAxN{I08of< zsh%~IS=3}S3o9Z8N`eg{C`5uuk|c4|pn;+&;^3)q00E*_X{+P%Ie>tI+ShrJTU+@c z0x(>@di~YczWC(Pt?gU)UR(c<|ME|N@#xcT7X7<_`N99=AG|j!lXo9Y|6=7=R<6$u z$J-ScwMg7)~AARM^FXuL~(p$gQd+_A+{^#e<_N-MX$J0q+4^F24i~s3g9v8p& zZ~x$f?|k$-FWks>($?D2K*S%NpAEYaV*kOzNg>efs~1+&AAIoP$?3VPigp55u4GXt zv$l*+o^5xRuHJj$Ti?Euw9IsJu6@|Qe=uCSbMwZvV02zj@4xj#89i8O#Rg17%iA4c zl|-$zt+nOW(hvXS!S2UL{XvQ=9goysw%2c}D_h$SAD=yXG}4BadJq}??QgyD(PvM_ z<7)SKk;J+z{K_CLtjy*`@Zc@9S_Tk~PUoZ{OUcBst855Z*WL&4%vuq98FZtp9nGc- z=ivC@3=ou2afBzQQxk&%$m?=6nkuc*MCY|N8XQYEhF&`}-3S0%S?Ytu01_n2;_l&RQ!$NRwUE49y=zK#b!^D*GHlO^6s=@FYq# zpaY0v&FsDRS(bWd>#FcJSTD8rp!C`Ksi2w_qfR@mLlE|9)~aj`iq5Aqtu-S>Chc~+ zjsca)vZUSa60^o=KChHgr3-moo=g`Y$^oNuPSIesUArI%wRNY{S{oAu=xd519Yt}g z-A>Y0o|if@lgXl$MvGaUrOC?LveJgxH{mHl>||XN=_HCGWr)-z`G8Rb*awZMv}VVI z$X-Zeqj5J?!Gy@(MNKveM7*8PCS_45WDj9594svjv!s>8MomVB)Y4MF*Xa)i!@$BK zRZ$j2T~~FQw1e|#%&?WzR+g4GS5`L>j5KPKiivda9w8zvT9dOD#UWCpO_MPOWX(W| zLI@0?VtR3Jd8q9|$%wGMn7Mb6jW zJI6s7S&;w)7GXc1O(A&iU5s4Sg>%-6FtJhOQT^WUynEyB%P>Cv{&ydL% zN6)@`R9ru*{=vU_*iWK=^ZiGt(>u4X1;C9f=g%H}_T&M)_`=H1{L;7k*M}d!`_b0* zjR(iYKmQ*5Xa8k;Whwsn2truV4{`j582Z!~fkjbd}^tXPn zzP|GGS+TocrYqUijn$)z`PtdU%<_NpPk(EDc~vD)TG%@*#-n$C?prUPpC5nq>#u2| zTd~=F^8UdYcx`|8`=3UwWOP;}8eV<5{h$7YuOx19a(*^HI|vmW?d_}BEpIQiI{fgx z$1AJL{iW6SfAnBwr)M3_bGZ5P%G&0z&L_gW|Lpi;fBM2#Zsn(o&)?tQ*y<0rR)KK$ z;rR1+P98s-4La%5-NR0|UplJmyx(uVa_`DwF@1Rd$?mc1_KnfHbiUtB!?ZfDU@$N{ zoy-TBEb4ZKaU3OCGM?lDV1rb(M3IT9Iq7^7n=30z`-rwGBcoNMTwR~crxB^{FsrLt z2`q}rxe&)DO`;@@d{ryqyjYkhIXM`4%j;|1w3AFGv&F3LwbGS-@7d|Zm9;dKTR=u3 zyyVsjb5Yd8UNY?WY>@*%?RjO`IzC%;G83oDR^D2vZEdu^n9Qg1qPE`FGGEjPP*u4x zs(I#>CLg$ynnh7{I&C5-#cCoBtN=Aja3u!;6Dh5=p~lmX!3Q6L(WWkIU?EZsF_byP zv94^O)bjUshzQ6UPl7DW&^5E23~Fkxe`yBq<2vJhxcqax2Kq{5_{wQw*-0ivsG zt%wnrIXJeYn#QoIs6wM`090koAt*&e$~&vHa^5SYIf&L8BXPkqaPSfs6hUN^v!T^V zM3M6$j%Yex4BOrLqDYd+7&Yj3to7El6BTJ=s#0uGrCB^4Pc?$>wm^a~tgH^tr;}Ee z)pgrMdf4gus=jsO)|1_*)`vN}>3Ev8T8Sc5`pT89qr)Rt6_};od5Vxg5wJlfqIQS!pE7D_2@)jP9f&0!j1!G)jmO zC?FU@@0(T@5u`@rs7eb#rsL6geBo>FoizqXsj9Lr%<~GItF7nheC~r3Rh6VM3l^0X z60H>xvQj~yu8K5^uiUscSYEY3DvwH09Hp0)y9|xr7s)7v-ucjsf|p4J<{${F(cp=I zNO535g^bP%qWq+!Mw4y;{yK^T1c{8%LLrDWz6BE*MGA?$NAH7gh;9wE<9U-AT>8=) zMo{ylT^1c*K8Oe`O)&hj2aVZ_1OUGLP7sk40ccVpXgO5QQKX{S%!`Ud3t*FL7Q*0M z)0u`y1Y}GE$~k5NKU^F3SJvKp@~|cD=;-K=-umQMe);xq{)@NnUca`nZZ_XLEENCI zpPW|thkx_0eD!yJ>qn91gt4RHTfgv4C$+Dp%iFznulMMD@_+k3Tq`Sl@X_qM-#gsh zElko{8V>J2yBHPvU;oC(-}u&7&sBPSG|7hJ>$i74{_OaZ&-U(pY3u7>yZg8QkN=K3 z&>uox!V52MU%k=gm_I+57Gh2&Ja?_6*M9KD=$%h4Dp4Ji?48c4vJwNirIX#$_dnem zE_I&m&d<&uT7%hi{Qd`LqCbK%)*a9*ozl=7Q;`!v3&PVdvx^ZNB&lcGzOvcw^&dXitLji!)!MK%9JZZxRZ&kDRi1NGpLcwEu_(fz z9VclvE1Wjb(3qoX-pr9GKha8rKuoN<-Int%FLD-em7fQ=er4_1;iN8^fubm$Oc!2i zRn$r==fMW>00sarZHhohwf9WP}Agdn9nqN(iJB0tvI1K=hopA}IbpI4|JxgI(5)U^4+UuL7l*x#`+N#AeW@ zrc@09K?q1hl%fz&X*7m}xh`tNrgRe&P-u!onx9vsV7&t*b8@K_W)9QPFqo46-g@&g$GUK$$-@5 ze9%s77pBvRt=xkrkDRyOxkRBRG!Qa_7#jkN2?B>|Q49vXC~Yqm)pX&UuyziKT4@_e znK(8IfkX@HNTnG9UV<%x2%sWE9<^g0Ei4ocIMg=OF4UzJyd>T<0{{_h=;F*0 zE)QZ6L{dT`ptaTnAk55-tRDg^jY<+|EqAO7frm+j{6 zgJ(r4rAxo}-Mz=hv+LVy_aBa|m4IPVO)bIBdcU=t&0IYz{CH7Z+g^X;mGtQsrz^|x zbUeoZ-+1Gjo0~8G)^Go__wVmVS$AHBL05NLYN?NPb^h~z>Xp3APoEqfp1>%V-~Zz$ zSFiT(y?T3cJjqMqsNIUq!;c>ihwVT5&hDMty`5X@@4xlPJUi&*~(Hculy``PJjWHyO9KR49rl~K6lezr=v9HdDXBu z<+5xW06_tW!0J*jNn->doh{<~*-E-EIg_)FCovx^Scd1Yv*(P$`7K ziK1Swvnc8^R9dM>D~YrX)`bu_luiPOw@wo&9|9oSvTiDzf&^wJ0aA3y)DPnEQW~VDsF-c`Z*3POL0K_u~MFP?p^iWZ7R)U``D#sQA_~2Ys zJ6rEPds@sVL~4972XPQsCs8A#a|q50q0y0MfS8m#zo-ITF54B70IEwuk4A-ns7WPR zq+?UowqZ0t03ZfI6U7t)AqHk5Edpt(kTfZkrl|nxL?lSkih~OdaCu{Wd3m+f?uNWj z!URf@YEWxLfTV~}8q09w^cC?Ts3vI(2#nbH00#hE}S?YBvQ>jqbW$>z@i!s3W4)MDQ%);u+-0D z)$VksCnxXj&1a{seLgN71?ntq_7Csx|Iu5Ezxh{RJ*vNW@#}YPzJB}o>F)b~^4_hN zURvK6{`iNVyBhY7N1gk7Co`y3t~+t5qQzoSR8v3qJIpR}T}91ASVx8^IOmm7N2g<8NPBTBPLzrfLhZy_E?t;D9`DCv323xI z57ivXl2LT7!`mn|VQA;{(o z55hnM0LaacS#$Hh#3Fz^-*+ga%dh1@ArSyj6lrTc05FSE6g&d~A_ zeEwzm`FkdgA`z%chd?5zwejA2>w!Qkr2t*+Ngzwe)-_RA5i2xae5ak|Md@9g7pS!9 z^*d=~P)QQ&I=G~jbd~ZPG-#vaARz>3wOg$$QmW;HRF$tw?~%3E{dOx(B85mO?REyF zjE0q^!Bo$?q&+(zxtND2Hnp{N?I-8Q-~YpZU$eN%i-2=AdjR3Os*u=O3kb1MDv6(` zr(_Suf5LI!6dOD}Z^&h`>k;Bxbe(DxfIr(|`HB=U;yDGoSs)4}Y>pJoxH&@2pR%>(A^y`1YOA1fG9x>z&)jS1xb9 z*By-pbr-_P;^69~wR;cen?tzu^rd#zKRBL6qE|olf;Fd)?i>vn9`AR1r?>AsINUpv z$-0t6&$clN+uI|wr`~@1(c$sz^|zj6hu5DOh2iAx?e49&e&nrt@!7$%&mFw5bM@+r zFTVS;+XwUbu}=;E`YRtyYP|Ka-KRft6D7?~=BI~^5cEyleRdlIJ$`V1XA?gAi?96Q zAN(j~+}#T5?A4F%o;79~$#DPC@#%>UD|qJm`jgXMK$|u*VcX}2kMp9NRaG0iJXCt& znQL!ivEi2)Vr@5S5|>_VBb8pw)aaAt-IU{>VYB1VL2NJz|_lZ1jBA|4D!DQ89!(_$P0 zGeBvmH>TsJ>8jv!IeJZJr)Q(# z5FPu#xlL`3eTqcvLr7Wo9`85fMW`zW$m~2TJLkNMbp;BO^~;s3cy%~3ppipT0N|8j z*TgP2F=Y`ou`Wkb$s&d#Sxf{>C1)#)R4V9GTY;O9zzoq4u{dUl*+fK*iLzdtXsUmM z(noV>VkkK^eHsiZ$IknJOn^c_8C5~ENX){OLnL-ALKaOyR3-IMQZlkENR%0wn2=aR z96RSc6A=J#DFBR!M#Hh;sst0^q_j*)v?M~3iDm;aLuNxzF>(xmMS^_cLtH7&3i1;H zkV<@<;YD^>A(0?y`4p9oMFjd@V@yrcswR~*Th7ntCo#seqoXHJ9&y{Aog9U_j(uPJ z1PI8)2H?C;QB4hy5gh^%F(Em&5Gqy6IcvGVGmFu%a(&jp+SX_~rlV7J&JDx7jn_ui zf9K!-^4Z?eMmuU~$2@A!>(9=-a}%m2>5{o7yt>JL8k`B%5rpZbsg*hu3wrpbb0&A&1Y|XuzaLV!0wVlSgol94a=X^MO&&mld0+>#Ry|IbS7R&Ql2h7Vh*8@%&Qj*aiv~3Jz%Pw}aI2Zy4UUuR{ zg7Y!!av9sU-(Fj5Q4>V9(X{|XQ3oI?=agfV@!9|Z z9e@%h)qunRquMjMihVtGec#6zGg#_VpJB0__kFB9q)yg$YD+k4TFvJAE(Pb=8-S&h zQ>Ga<@10@AvkuVCXyopaCp%p1MlwDhy z>KFhbArwqJf++wKs;c+AG6ox{Sylx!$pS{c4kBU-s;jN$s=b%Yo>NJbWK+{nd+*o` zMa6}Inwo0tvw;F)@SJ+<)y@_RMjH(RhI}?(B2hgY_Aw%{8q|Y9rwPD9RnOanQbhe~W~KU?mNvct>&; zF+)UVANzhZnoQQ#%m8n_R52Vse*Eok{Mp01cL)(gfqdbfi%3=hW-Eq9Dk?ZJQ)6!0 zILMlEX6J}Y+d*B-fX>&UZF)vtG@W#LdDQJpaKF8}f`eD+Vi{mpdu@&55_JQ%-!`?U7({EZt|cXodC(>G3M?cr(vXz}*Z zc^1RrU|<9!(53dzzIpFxo_>Dr-hMY~+g-b~L)af2bpOf!_s@Rsm%h+ss)p|VUi{i$ z?vE$X&nH>+w|@Kc*-oDC&cE@;`?Ib&x${KSvgS+IHvhr@XowNpc?UIlU){caK-2oA z=hse;7l#MU)u#sc-rf79UmRb1=Ee)p-TM7M_<;eu{lRgc=;XWyM_}B?^sX#D*>uDy zt9SV5(HSx|3mr@~cn`#>9Zvv_c{3}c-4{bcOSvn4w^pC6qp5qa5U1L_Sb zM`7UGwyPO$UB6@!mn!JV3`YU$VIBLF)EFQ|XxrYhIz$4@U|Z8sGR+vtaZ+$BSuJKk zRWR_(V5ph~b$w-Xa&|WBV_HTT1ZMW9r?Zltl4F*fYeyBj*!JTP#=bsUwv>^<20jF@ zsgs;+G!71-?>bW!1tM_4JIzL9=n>RH?Ny;~JMZh!aA<06(^lRO2Njj*EKA7}CWclfni&)|n84-Yq)r4?W`L8nB6f7&&=zU-!1ZBsjii9g}1~DR*>=gQ5eT^JIQF^FnuDVfMQhb0IsD-ww0GWRc;kY&xF zl8Fh?UHAYHxFpu$-~3|0qQNjMhBAQ*Z2~(lDd#LzRar6uWb(+qq~8!T{u|-)o|urp zNeBZ)kQ_x705mH(2h31_O5{0E$^w8@sGN6`wP6(Xj;&vQFh9F;W9!%d@!!5wZRU6q zU~&JA_dol&>pR!4UAjE@r~mmM@6YYQqwaLR=&8Q3yR(1Z9UL7u`|{g=`*XkYg^z>B zdp~>cdtdwR_3Pu`{@cIu!izWm@(=&2oy*!wlV==#<6C!czyEO7>)I4@h6j%h_Z}Z@ zYz)@7reFQj*FXQI7k~BFKH7HlM_;}3t=HdLw%rfjd(bQwi>3U^FFZ5PzOKV)vi+$~ z{l>%n{=xZsC*6{i1;`v+9<3j@O>aGgI_B=Zdq-Q__a5Ah<7s$d{gP|q8&5uHA1?m) z|Ll){@zsyb4j(7g(?v5_AAau#Z-4&tSILIEm$X&?<=?57i}s)XKfbd)fnWYRuQ;@| zY4!MngT04GuYCTMThCqj;rAcB_1fdF{>8(mpQ}FirJMVYAD^C`iY~5Q_D3hj4x91D zCVT*)BG*A(*NY~NNA=-pHy-=G%L>4#k}Y+4b0aLH^cuS?Y~a0X+CH}Jcz`3Oqth+~ zI6iF^G<70~fju&yK_`L?5H&DN>##9)8`JgUQN4d~QVpu%fRgB0)7N#Fj0dOlcCqXx zgKE+CqUH$*lq3@Xs8#Hz<6+yyrtQpth)O`ma8Os?oiCeF6|!o~I$~-7%Rn^0`pyyd zl81HWlAzfjRJHRlc5T;pxsj}O@F`1dqXv$W$K&DpbaHTb=3HO_=SZTu*kuA15bZM* z9fb;3NSM{sdG?i)BrqhxQum`N4yr*2b&7pTiHruLQR-7sbTE;wbQ&w==SqrKvZZJ> zl`Y5#qB4ht5IALVKr5iG0THdlJp|69WM%J{ZIhyMQM_aE+;Gl{M2-ll@I7;IE~^@- z4*`Ljre{YCzL-nFQ`e=vgOk4NmrZLDC8j}D`^wkDYCNomgHZzO#k#h4A&5w8k=sCM zgPP(pf?CRHkn1F68ljK`m|0B>L5!KRX3Z+5J({V;Wlt`&F*ki~V>ZGvwhq3gO2Nwi z32LS#m5Q8#N)}Z?GVfMOmsQnTCO1-Sr=*}|t%+zTD^V5#B876SAs`V;mM#^|JfUMy zBQC3*RdYc^gj{?XWwk?y1SB9#3`WT00`xhLH>Q*AE$0Iw=OhM{#X?|4N6?#C-=|Ph z-}X6)DU@9gAdz#3D>k4h1dr@IA^1=gzZK$2JDGDPqAVGKnMz_=8P-5bx!?v@avoTL z7;?(1buQ7x0sxA;reu2;m8YtfS4Gq=j4F2VkpmOBpa`L=mZqSX3&=YMP{#X&)@w&# z%xtF2OacfXDriW~dG@J~07OX6`uS)27<@<|nl0x~0EsIH%*;(fUnaxygV1PY!?c_ntj?eE8t;0S5l~r*0gaJZN)#yw{l>8ivuN zj!k;w?K>ZP?piC$Pk-k5gU7cwr+)kL##^tywsR@`+OK}}VE^dx2M53L#i!5Cn*DkA zg;!sA|DF3g+tuIsjW7SnpL~B(O-~p5|L=eL&OiM7gWvkKSMKjGzxMsTdj~&r#Cq5xwZ8Pl=N`ZL#%O!1o5yax44x(fxboDx zNen|cJ3ax_?X5Kw1Q72@MF&H;_0;AUfART;9~`!Ab9M?`smtlppMKtW@aReN!Tsaw zSJ&nXZJK@<>g$(%)CdaKcGou7*6-ZkKRNFv6Bn{SI$u_vHmBk8^-FtuXY*)llYwMu zTk*j!yQEo70F*irBxJJyWJw&rrM2+lt(_O2**#mdi-khYr^j(vyRH`}z-C?7Pbvz| zA;8$P84wz24rt8goD0DlgD|+ECuKwHViJRj%p^I6^H!b5v2qsMn2wjy#J*2SDl*T0 z-~xcAB%l#ZFl)@25UOESRqkNXk4M8c3sf#;jRMXwgE}T6Ldjxg%xH#8E+tXzM6)9X zfKd4udm(1;26g3$)u8XYx(+HPNd|R=hMlw`rIswXX#}+u3Un0-Q|32#0e?oxA_W*4 zm55?W7sn7Ftz;pt_M#@DF-BE0Q$h!3hp*9#GQ@8Ys?>{7}i+9w1@y{JfT+J3(5fzh%WlJkxU0T2W@ux0=-hiC;$ z2^>>qg35;#p@|I0psXno6cmuvM6wtlJ7Pp+QZq0l1QS^)DFMo!LyObT08|TBG)GQ|ffvAceybr;MQ@&?|x>{%&)x!ANlN4Pu}|A+S6C!EZ@2JTvv;(y?yrp0ohOl7NUe8#s(b{DX8OM%2%%4T(4>G@YE02 zzWw&y{lopO$>^zTmroA&PWJbI`FB3Exvjy+jp^FI{)?}l9^AXUJ38M#?VI+_TSqT` z;^wQLyRm&^V|!;bntKL`-I)=;{@VMG9-q0v_{P=X{qU)qn~!fl8E*{Nru7Ri?v6+G zc)0fP;hlSr9<+U1*KRQKgHiqLvo{_*cyfR5WP8JHU!Q#Px$E_4T6y@sy!Cv2W(bht+7* zo}D?<8@uaOs1A|IRK#Hzkf{m`KGG#04f~U<1?gxh15O zG5{b`2*K3KdXx~zvC1louyovAQcr0-7>+lG8|x!BXqq-9@$Amd&VaD#yYXZ_i%r(2 zD*4jx*5Yh7KV1%dZ6MjyjAG8jFqwv?S-KDaToxr}L5IkS<_9(TQ5cR+&u3}T#F!Ms zK+%z}JTM{xc~Vd$1XtFVAF^wdoHvyWrecW3#lBRKeC1!zBo=cQ;a4c zsySv8d2)7q-Za;l1uu*xNC+ybt_<>FRWXzc522(%t~#JH^fxtP0>BUbU}jeK;Q(L; zV#>fpC1i#Q1|>LNi{n_u#JM1r3%-z;i~LPZ%}_+uMAalGEV?6hC}mD(BEn4GdGAB0 z6%6Z8r8cu<_Jf_u?e1(K6=?2X;0=?*4>qzt=n(CfBD+wk3K(nc%r}l z%g^3^=l%z8Jb3Wv>^Hyo%KP`fzIQAx*rl~0R&L()$EWkIR}VmB9t{+(Y^}!}my3Rp z#6huw#ANc5citQi>n`i-Z@o7j4mQVYqWSLq`(JwX+O6wbKl#B=zWAkAmS^{NZ?65V zjZZ)O;_lkm9^bzIM}P3X3LG3Sjt@_+o}Fy14L2{1FYj#B<9z(+{H-4yU43@#l~pN>}qqD=qy~D$^+xMoUk!8ENyZ+t>5BgrVFRi_O?|fM6AcVj^ zI4`Q+xwY}=QQr=0Hn5nYBbp3H=qNavHQm{Hmk4r(45o@%4Up7enbU;)>Adaxv_2VR zU3uG2m&<`;$>IP@GRxG}#L#s;`^q4xXj#ZfFFsK4uI+mVNbFIiDBlW_AJA|(s)`a0 z0Q)YnM-^Qz8z#)E&N(V^ScIvIXh6h8`DvieaqjafP25UpsUO}3<#Pz2MX<3#LRCINPykYkRna`UFy@SC9?h~!XJP1_8Hqw}-#uIX}4i0B8whoE3X=IO@ze7OW- z$6ifNPv_&&pzr&fGuMOQgRes6T+_xj@m+Ftt=q?u7K*5jr{77ME?znG`Z(8vTz zVmX#Dof2#dhR6a+fQrbBC|F=NRF;?k07fPH8~|yhe^&(rBVsdL21i zx*xy4{P+KN|LQkCdFdbhx4zK#{2PD&H@^L~w+`NYg2Ul?yUZeQytD7?K{TAKO;yc# zR!N8Z%U566RFC87U~Ov)))v>E-)d(uCm6c?_x|4JUij22`;QO(bo~pz`Af@27H4OB z5AT2ZOFwrU(kARjnUE3zAU@To7I&y zzqkMJ(SrwmIRA}bdHI94-u~DZUdp-o;GM@^GoMW1xo3Af$p?>N5UT4>Uz=`BQ`hZW z-?@DC`gHBdQy+b1xmr zzyQ|PLJ~eXYa}Pn&@TGAc7qU7$`2nr(iDe*uI^kiwe_|2>9pS2xfG91E$g^KLRzZ_ z1OR5^DiBkaR8_ufQ%VvXk4EFJkJ`t2ydJYS-?swEEJi6nHClz#5+Su+KXR;D9e^kh z^Du-!WQtKU6Q*R2OaXy;9(!c$b53fbVOc>z%sFRpl2)78Y}r(qNwOe8@di6)H9_>E znTd1G+01$GoI|kK_2d|t`o13yhve9M&a2HF7EgT{Thq$q_BUeCi*l^^hrxv+m7^Sm z2n{V~4a7xto;ygrpr?}Rw~D3#Q!{pD!hisvX2g_YCL|SAQ3I*!kTt0pGniTiA%Hpz zOau`HOi=)>?^@vKE6L z6JSYvqbwN!LKRdqIv7^tdN62XzxU*UcXf){&~nZ>_mVQ0LSfOB{6&PeY5T54rr7qP zxhRdyl$e>FiV(0EI3{MlzOezoNFYhjOjKivsas@);reAUG5~Z=VqBp*RRa;RvkS6R zG5x9mAS$fXnJei(Apl-LY7rR9YqK)YnbL(T5z)MNOi0AUY(}8Pwzzt}fX2+oj79)Z zzLTYM5QCbXXcQA-My9M%0#}*1s%k_E&NZpiE-^bs%$ihEj47qa9*G<>AD*7q1HXQ0 z2Q0~Q@tq&Hv&Gl{z2ABMaQ|>;`|=;Z`~IuHGB|ATY?-fK+gP$4pDw{w?Q)r##UQ}O z_UQJLvrF$kxpw)|rMpMkEe6!CU%B?D|K(pz*0!E~>BSEo9>4kBcaj@j*}X~*a^E#= zv)Dgd+r9F=_aA=wAN{Rwe(igUg}rtA(Jy`e)!+M-kN@dwckeu&Ki)fv`S_zRzA%~& zj*br3E{&TfXOA8q{Pbtv_{V?m-<{vNG#;$&tZn_(n|HqT#@*B>$G|>h)4pF^yYlqF z#nYqn=}2*y0d#w(YTMb~<45nk`~L3rv7d$?egDnrMtx~_C=@q0hrjj0$KL(vLrr%2 z>~4|9^)QVtJJmyNQ6oy+6cCr#bn{d;G} z#|W^s?p}Q17PGx`_sQvOxqp5hT)nndH_JE&fBS-6#7e3k$B#8HNK)vClc9TJPPksurgWItRec(PT1QG%Wy{ql5E#+ow)37RI{) z7&>MKFq1$mDb397~q6mtDah zY*mO+Aw`!BH-Z5=$Apk`u^o}95CI_?V5xk?%mqkUfsj@zJ|>v@N|#d`yPVUqX&3W()Ao#H1OiZ? zd!|x`VN~jHFk(VOHFZP@#VHjlcBy5}ijPR~L#ga4V}S?-0aTN7TrzI4u=EY+;!Jde zS&hj76a@=8VgP0)23OE~j##a*39W3D0hI_HxGbsyieLug=>`Cuo}6Tqsvb$sjtP-+ zj;ivZ|F7#JBbix@9TTZ&)?{W`)J#;=d1tXxHA0Rt2j`eQF-aD72u0xp(D$wPtcW57 zF^i^Jepwa??2G!QoZE#7NJLr&gDd~x%2uwbX0$Q^+v;K{eXo`1U?`WN8G>0>6EhP4 zLjq<{BjhAPK;%ltCJ5@-r2-ozL`Bccj{CR?KGeBy5ugOA5}`<91Y#r~V(yx* zot>XoRhX_1PmYdyHAfUigP3|Wst0Rb)cq9C-_PIqKmKTY9e(r6!y~`(U;g#pqMG{M zu%L=Z&MA>lm<~bGxb{uASk6w~IqHY+9(?P^@BjVZ{nDdH2NTnCy7Il(_742fSHAL> zr}Oyyi?>dGymxdszjbSC_sZzI-@i@q-A{kw`t;_S_{|?YIJmrf`?CjY)8jto<-#s)gvBgwPXan>nn(tt(EtICXY)E#fFyub6&6|Rdgup3Z}#Bi zBxXpW2EfjtT2?WDfp@vkR1^o!wJAD)K4v0Plaz!AY99bKX4zO9XT;oQVyFzu&%)|V zC^^_jfG$){Ge{Ot%d8+){54fw0f6L$grbHbjI362w2hI;aco;h&Y{a7P1mXDpc;DT zGAGHxbb)%di^NqkKvXMUU_gQ*K$I+|$V^#8ldP;sR4P%Ba^w*Ohhk6#j-urZb<9Qf zQ&*LhdO7FPE-{ga<|H0XR`Xu<6=O6aWW=muW+Hhuo4fIFSO*o#F*$Z7{qf1Uulh-f z1Ihu-|j3Wd+jy|Kl5gVYv|y$o|bShe>gIL9tsG-x#u( ze7Jix00b&CCnU-uW}u=M-!NtGil-NmWQF6wq`A1g2jeLa_c`VyDiC92;(Az1&RpaT z#FDdQnJ;GsAUV6hwi1S|RQ}|A5g#!#00tjQXOmK_LqMc1^`KTF8&l4GjJ}G6jGD6` z6iNiDXkJ1!`_J?2{dX+5Y14$oL-(Uc%e9SX z4j-NT>9-I5;`KkM#v5nd=4gE7^z{6PKY858I2cZ*lljp+_PNhX-rGMvI-UJTum2Z= zit`e-wg>;i|JmPu<>usH{o!~2`rGgRqks7K2VMJL{L7yLvzhj>pEb=(Pw(n4JooJ% zzrD3J+T0pGxP9kWzx>5ZJ0DS%oZAp+ydE~Mj;@?tzV_Tx?fK&EH||}#x%>U^-AUOs zOMA4}JoDW38!ugn^V_qT3`hF#{;@gkwQ7&z*?IGck3QYYF$yAwQC%Gzx4oEwhJgpf z@pv+*Nc%V#rR|Niu5H(*BhBZ#yPHhm@!rwKbl5HB!J~6RxVAOEb>s4^>wfa+5s1#t z?DCaC@D-qIyB=~DLaun$^kl@|ncCqj)d3J-nR1F&jd3~(%_1Y(vW?DpQ)#*$iKpXf zt7)-pMXUz{a$Vr=U|!Xph?+hRD@Ov^)Uhjqjzl$fIigD1+FShc{r-oG}LvBeN4Idko!KW7G9I6S_8C^nB9t@fw-dEikO)(AYm*yqd-NIMF4=9vxb1WVXG1ZQkd$T}C3Vg65o)It+wEBT(YS z5_7C78fEKKOrYi~1@GEEYF1FIM;idxWC&qI!^sjmQGMsh5~o)O6AYWeb=7oj9R|z< zDmy!&T`nBs{^?S9JxuuMbTQ#tL%6fQkfhnKx834o-dSDwE?v5OsadwmWrq#U7b*A2 z8(iP=NB8c3?VW@D2e)rb+&91e)xY<9U-*k}zWx4LqLw{#8`J;dKl$hXn}6?1KY8c= zyAKb)`_uT_zxwj~KiSvkFTecrFCn(_MtA%DdnwK@U%f((PtO&TNsYhkMQX`gFRvef!D$>h5~i!QPX(nA@CA_s*7e z6@;wzu4xv*`Fh}AdG3ZJ`OY^!n2v|f-neAw?(dyc-c82CqobzjAS-MczWeB?>$-b) zm(IcZ8dbH+DfeB+Okog?juttVg&SGc%6sRi>Gj6tjg<0u!bf+`h8#zeu$U(mFbBZS zz@p+t74}csUceB}7rCxYV-I4>KAVC7c*jQAww;j_UU}|QM>-nRW+);TcSu8t4J#RBC7t}igRRiz6<20;YpEPHPBIEJ6X3EmK+WRi*Fx11z@Z|J7iG=`6 zj+hNBX9Ivbgr?~+YX||%98){**fA0HZJ!YU%m zz{;gloL>sS7Y0yOBQ7ptQeDlzoo8Kj^`IgKT6Tb|>p=`uiG|QHA%T=K4Uv!`gsK_@ z5dkz4Wp=Uc2Gzh+Riy7?vur@CqpV1zrcjoyvU151mRX1jV0oCH(GZZC zM2M)!LDj6_S`ARlm|gi}N7FTSK8pwdhyb9d5OGXNlOU5RAURPHGjxmv&J4QxhC|V$ z3Y6@*Vm~t}8lI}EEfxZjNy=q_$V{lN@IEz55!`?&0f-11cxQlEd|~CiLZnp+X=!+{ zJTNQ`D1sF-XxT?yEUG|Ep^#3H49eA2S_d@*ASjC1Vw<2cMI$C+$6mw$Srw2O%!2p6 z;%?ce6p<;o>O*IC)|^GMT25Vg2suV%xU{pIB^@0-))>#u_Xp##2&ig|UEf9oc2#I4 z5fK>0E>A|I_N-;_Ja94R()hVhrJR^ZfxyT)r$FcPwyFmj`<$}ls;UO+dso#U+GnvW zKDgBOSpn4)eP#>6tzX$iKsFE{S62pUvc2ircDlZP^*Y7AGhkEbmd)1Mq@T|R9$tQ7 z{rQ`d*S>#SjeqIOpL%eo`ImqC=l_%6{rvyw|MjiCMSpoRJv;3{;-7x!-7oz7bN}SO zI{M(<=Je&WXFmPRgZH1TUv@WN-hAe{r}iEWFJIf(y0oRKLCf1)(01KmQ13lCzqT{J z`Rvx^r!T46az6Xj|K?}@v;V^n)_1G#{-AmKDaXzqA2stf8`xwt*xVSM&N~J-U>9Rr zwDAlju#KID@#xC-=*@SZR70pL@9OaO?fu}r5}zzOs(5?5I-B>W^L}|aBSc^EawbG} zaByx4F7Wl8wUb4Q1}=~)61iTaOBqbPcdgQL-laMW$6lDHXY zs0KuoKs9NJGiL;c00xV$XQTjF_#{QhJRMYZ7$}`EJ;;WT^D^dBtkPxIS0q1 zQPp?3d~TfcP{#9+%Pgj((Uuf2fU?_TQ7s}lB7g#YFV+6acW-L=p>vXO#dt6?P|y-$ zQY0;EE8`G=W&r|b@SfB#b>bMjb3oLkNJ8v@5UV<*UR>oJW6H^l%5+4MAXw#svL_=^ z4Zc=`B#J7R&zZHDQSM_* zDVo`Axi~pH&!Ao3r6i?*^}#D-?@R4T2xtmnP+3-XP9-C)uy#xYKx#rvB8G^oS@nu) zvr_cHMUt8oRqg6#r84dyq!rgdt$3vj@j`~h2nH)R0U8)Hs(d&rSzRYtq-f#IOjL@^ zDrO~Q=PPC(V>E-l=@2P{lp{99$VkY%3RD4*BmftHXeoGIy_ma{jNbD1MMPpx&T-Md z7L%B0)~tjCKxXQE5Gnf0GFk)#NGa9M5plWy%52|OV2q0g11o%p5w3Jr9}d&duqdDi zM2(1w_(v78s+uAwuJTQ-j4lDp1jrRgA~O(*dykg$McXu?t~dl!n@omal)F?9Cs|XMl9{CbG+-6` zEZJqd9Ir^KYV@PEn#E)HGp{+Ivw=ia!tc>B)5w|+QV8@hW3^S}GIfBx#=Cr9_1yARq=zT#&msUEq9 z?;hNI<%J{|#&&x2crYAadG^{De*^aRXG~BboSiiv8Q*A6&JQ1+4~B!QH#T1WiR$I1lTybTp zrrtTrT30@sp3N6s7Zt6E63vh(n-V}TG8qlKlq;exDSEd&Yea!OT29XU7<)z-2K3Gy zogb%ialuM*lYQm!Vx;1b5kSQx#jJv=!s{t$6&(9Dqw2fmE>4sD7+eQ z6BfY@g6#9}CGCcxNp>bT8gi#Bh zuNx;Q6{G=g=CSmt79U^T})^wCV)ijB?}@UGa`tJnTkrvId(k&0T_Bm z%;nBTD=Vz>brd6-DHt0R1v!AB0W*?gB2v*1f|?Q-B?f1!l?7OiF{gA2E0=J*s5MB(kW6fM@~#XY*tmw6w1@jsE9-gNLB<+UJ1}Z zMI=kg;GLBg#1z!jq*+DZh0HA^X)7<)=7JMW7$s`mBv&Ti8z4xT)g znA^pinEi0rb6H}y5=oG-u7V?O zWABl&XxH`Nu#dg-E=y_`y+cAqCMTACTgG}=BsyZdMlUbwYY4Tt~Z|L`|FguOebhv(;)Hquj9pZbN5Z>b-@_LE2d z?4SP2XSSygWA0~o`-3AE&e7)u~T|GYU*EibxduKVuUSxARJwBh$z!q&A zDnPKUF^q?lm+|qV(_2qp);ot!_RbLrjE+ycK7p@rx>lbo+FYjUg6q>jh^lgM*bWEi z5Zbo!NRJL>XA=iibv9qtH3d&yABhPS4))K$vS%6wUbYf~#B5@g!J00{s0s-PRFQZE zBxhz(Eg^pAU0NSh#Z|sG9aWWYx~^Mx#+I`!=gTDK9E9LeEQ*TRC^#TYDS1bs;%b1S z(O|adJ%J)rOv}EnYt9MHL~|VQV7`cw)qxrzWKl;<$cVt~`q(d*OL7iY69d$odler- z5tN$&A%x&HYa2TQBS2(U%_${EzI>uq{z$Nb*Id%z<5D^(cG-rWw)gTf96QZu%3WRKs06C^s0oc1X zcD{3tc|00qk-ByV$A>Da2JFM;XpPwQU57+Wh*XuYgB>e~p_y6J$F`41Kunes0gHA3 z%Y0LzKN7#x5R zkwbRmJ%`fADjFi0nWPj`F4Q8ka;XCt^cE{chinLBB^1DN5@28=K&*Y$rd(Aa6U1jZDxkOGLA z7??;_M@$50sz#1;SDJ5CP%%rQ&S8TF0Xp-`j~n}16pMqWXpYu2pM^FVorix9sG?ellOi)jNd@YK^gS2woz zUpwg&Fam+Cjq97c+n;{wdlNS|2Td!B^LXXTq*=zUPmdm+5u+mx zbv3E0WIC+f-tpNeKvladTVpglI&Z_GU39vAsUA16sy!JR0+S=4WH}}CCV^?PR&8vI z56+t|M*B@LlIr}{bU?0sdF?P*Nb*p4}9BZ1x%}PFZNl= zIb2m0p`|2E%HClXAfnS*6Do8>O-wn9V;B$oY2UekU1Lq(6PYNuoKngPu%h#;S=MZ- z0_usptI8-D5g4^?`Y;(5v_(aEQoMISy5&Ie`QUPvt&Y00Ye0}lDmQmB8ufmViL^-(hR1e7pXcc0hf|v zBFIG+RaU*kt7O}92@|OhBVCN#3(?GoIhTRbO7K#IL*;Q}S+1xlITvGJcnM}EYFLaH zh{R4cyAWcEZH#8<2LXX;HNq+STyUNdNI|+7O#pyA`>YvONLir32IkBIpm$zW%C1;d zgY!i^kh;_&iWw1l@*=|MM1@uzhlm=HWl=C=DmLJYK@Y8-joHP}tLO~M0t(RzHE$Ia zUsw{W0tLXtl&n~%x%>zrerTk~q69z&j-8okS=%EKvs#&>71wZ)uCa>reGh~HD5?e^ zt6KtB8x}Ijpp^Tr>$zeH|p&xgM$_y?al5zdi=smo7=ZO_W1qBmv_cTd$%X!-L31J=xww$xKY-+upW){iDGChWR={qm*rMYHHT5zB0C?Aa4ADq3q=xmt;l zk(fO}FJeX_fQ2vuC|ZQc3IL!hdJBlwl{d_wX|`O%w$BD^D28a@1J}W+h;y8>b}<@b z%vwS`RJ5)cln|8}Cqplqx2A*Bd3$-QZesSB(QG=dXO^59>Pjj{L`7m@qC&-AtXOEa zB?)g(p(zZis&Bgtno<(U#LlRgZjhbJlEjn?ND@pH9FeXtXNnh7*_Gz*!#$mqR-@=P zR^)o1GP(so1pvC({jIhK21r&_0nE%Y09M|ss46(53>nS9P?EY3zeN=*I=`p6SBk<995Oy-r6K!Q)F^nIqyTWT&n1JJe8~}0z`v4v^n~~BARmq zz|~N~u;|KHbYujCGT;_NWX~z35PXS7E}Eq*0z`%d9EqS%{zF#OGBkv8D3|)Wz*Aur zb^=gfG{mcOuawEKYLd#MCL#hf17=1*hd}JgScwU9N}`HZb~`{sA~}mNdnz~_^1zHH z?6?4=)y%R+HCuTH&^fPQ5>u#ZRP1v?K%>>DT~~iiWB>zWVknGOLe-4KgpLriW>bhM z8!I3pfo6@VPm+)rFzae@1co4JMrZ^oq8GVDs}2Y*jL@rN7ZiYDWd|NxD7(-=MV)7YqT)p@F$@4^kYZ8-1@P<` zv7U}i6;%5w#N3~pp0&%lW13DUV%#@PiXEC|HD6WTvgOLx)c}F2YKTa&=?npf14mv( zx~@%{Q4ZLmYjpHj15h*;dvW#SbIEcX_oT3`@!vI`nrcM& z!^hVq>$Em_*tEa-8z1}jSAT4ll4T@|&EoL~_cpgit{OIH=V%eg4@W!IbnC`5l+a%L z>z}>%&e6`*?Hf0q;z9W7zj)&tU+X^g+0onYJvuyY806#xvZU|5cDtK*%t}}h+o-CX zCx#J%e*dj|tU4U>Gq)~xv+l`h)Au5(m#1~p_5@%GjxfeF8VzdC`=|5uHGg?$>+YQ= z8=Irge*9zi9v>Z^_HBfE;EH!#v*jqF8XUze2$qu+9Y41h13K4mtm0LB2$v8d*p`y8iK4~K(g9~p_vh@DCnjV6{+2zYZch&ijo(U?I* z6za-3=K)gXc+sU%2=hLfVK!~LJegD`dVao)EjTNd$x=L#LjXiB%IO>hNeCgE5MbZY zXjJDuHEjo~Rb35y-F7{wdG_Vgf(Gmu0i_p$!r)!)74ag3WF`MxWpu!YX-ZnEv@$EQ z6{TI}BC>DgIKzv{kQuI8T?LC>V$V(6FcA_L@gh5?Vg|qnl5(l~%Nir5jM144MVRLgm?`nWiWJyqI^Awce%?O9DeE`BpsS(&&IDR>5im@*)H#|oBBT@`#C2-tb>+l=Ky2#O}^$Y~Zr z78K2@0x3xnDUwnn0)t+11wjx5$}PFd)`p9Rx+*Z#fXmXU9QIJ+q)n{^0GF3;^-``H zQ!^1z6|oG)WtiXebyNl^-tRYkL^Lx)wxKp+BU zr9a4uYwPPTf9&JO2Zx7Go*X?nbrk_fDI>hY*!H1vX%QtR&px#sl5@c;Sa0e8iOCE> zgbB`Pv%0FPDpVm511OrMELL0>BB>Y)4GoP_PAR9g$r^xZE{G1yqC^xpML`bO_Hrzwx=BfA+@p-Pyx?vyMGzH|rKR zuCK3+>w9~%>39H$sPyE~;UG9RS)bNxqd^ZzG_9|}U{H4*jK{n-86BO@*4OaT_ONMY zX5g4o(|>UP@%aM80F(8i)TguAvTu_MfsDw2J!MtT%%C7}e74Lvkw72gwVn0Dlk5;S zNA7&yAsZ0}M@iI()XY1snAtNsK+!Cc1YDmgAIbu0ZEYl)O<;X}Xn=j&Gdg6PFXEtr zjZyG~#Jud1O5Zf8b5PeoK|nywQj*>6$>Zb2w63a3H-_Go>^qgvD9K&wb%{-%m-%xnuP;r5zXp+?Hxu-L}X@6C?)}z$k3TZ zl^l!aWVW2ufki?cCXQ>(GM~+3j39tuDiDI-+FUnwW${SPspxFBY-4W*kg4ta*e6nU zSSO37CNYy^0t6)_Rxq$azX2v<0swT5Re^wjO%()COf_c&09j4!Rk8vgk{U9(Lb9Wk z#{c5MiJA!_X&HRzN|OUvrZeTzMT6C|Dw@#?Dk!2D5f$q&5xIh~1IQ&7Sj*x8v&djH zMs`x13Xk`b7ih!#|JAA8lZ18^y2=UnXj;?p1|c8-9+F!(yrvRLs@VYR`| z*@qya#XGWW8usXbH#fJDIMh|_=Q;L-82bo_j-6y7#MDM*F+pHrWx%K=ipBXG^I5$y<8>(FMM?JG}n%<>@-V_MHc(M|op2K-T@k{`4%V zSxvfVbWroHo15?6I|hkOV~6K$9RL+_>IZ`=37nqKnpu4AxovVbUo5K{mdp5ufBy9@ z8ySdEn{vu3ZJ$LAz#*Dr?IL7k=gf>lXxko|SbHZqImRx>oCE+*n$|mrSq&^W4vwQJ znO0R@F@fg5AeuwP(o552HA5^fsH7a%hha2uHTg5iIZLzXBnu&E%EPsB%xMrDk*k=N zU2OZfHmZBkW})lroHXxljJsC6t6%-pbFPeo5fO=*m>7Ks1}!3JVakDH1kD)$BqtDX zfskO?G?VF+h`L29#0V%^U0?!WqAcQ8d^lUFR1{WaSLu!ffPkqm(SS-R3T6m=F&zR} ztp}`RtrCJlHggUEfr-j_rzIa;gvc?YBd5Ijji5I*DGGWf&L)hY3QU-DVpdg@zOAV0 z+fGs(_@S^(*C%QvtISNzv19fC1c<64YAI(?P(|bwWK_j+QUXVs%6`JYoWqrZbv2C# zq9Q71A}ZtNl!Td7LBT{+q#Q`zGu=QH4Y;rkbhT(LR3Iw% zD6XiNs~s$3xr`x;5|il)R;i$dmSvSf2Sm2otD34Zc{RvcHTQr3=$xwKz)-xnAIfF4 z?@~BCZ}?;S?3oQYro!t)$c$Fcl6~C1wIEL!KsKA}UuF z05SHG`aboNV%y>Qg{t!GToqzU^W_|X zm_0zqv1MjvpF{|OiL;1_#RTlwAxTcy<)tPhMr2=k^iGnxs;-E+_#v_&kP?{6a=ApD z@alD-zVCt$jE+5kDJk|MpeB~H^r)>X-dw9auJLALCI2_>R z?b-~F&-;SwWW=yhwO#x9k6r!2vOhmr^nKq)FvLON_2J<0;ld0Q1(G?&;LH@lxGFe6 zCQd32Ah1(05qsv@OJ^su@hF_lW9pJ40LCaLkX4Zh27}spvYgwl%K<#&cvRQH&gY$q zI;PsYzUu5aa@NOlO%Hv`$ew?n?sOjZH&Z% z#8vP=ed~RP7wR;)Xpf1B5v>?-zz~Yx>f$s8NGW^I#I!NpS}xB;Y-4@Hdz#JWxlaZr zS=o^q5`vg9p&FI>fJ3-IK(88Yx`+WR5XTGqE3SktW`#^+2x0;X%m|7_vZQK==pE&p zQv#$cCO*`}lwtw^bSNo<0W;>D1AAiUn370J*&(JFhgIbqAy|yNC3FtD=P`A|mcmZo3SCo+xW>`@WBewX_TN&K|k+O($u8@yn(hQ~+nN}{?qW>)F9x?`?Re5Vz(y6Qa@#w@Wc zq`l87W=sIY)OU%TLjyx11Ln$8ARs~n!#*a-5R+&UFhfxQ%Y9c5$Kc63!rX#oSFz6w z!y#DVou?GLtX4Qp>=8_2mjFRB#8_wo&c{$R1hp9|NRr8Tj9{^EljKqobls8+&~b1K zVL(8Pm;htn8)y-2pcx<)R$rspmqY{rLo3@S2J9u1V?s2eBDexk0Rgkp;i0h@9y#D6f|NQsAISg>+>e_7C z_bOMnhh2&TPvb#2J!=M&`tt5{|H=7szWmP5?rv|64o|>Q)poKrogSUdLDKfrcM-Bm zSo2x5s;G~Wa?Ba~ECiqiYeSb}1Ve|=cg^QsefDQReGvOdjJuaN9`2o0jM)qsM#Dh} z0g2kS6@x_!!^(z$(_!TtiyElTX5D1uuI{X#ozG5=&z*y-JKHBGi`d7FHE#rU@TVtD zUBgqCx4OPx&bsZjfq}+l??*TY{`f47>iTRExyIliMRThKq5+e0QmS6?G3uc9jOc#THq7Qd< z7sG&+Y7t8rc%kAmHE>kuyhg-;z|PeKS=2FC!LxG+xo9L*6v)8L2N$Z^Au=-s2%brF zeKNXoX?;BM6_X^9ti=PhY#J9hYwBZPd26~JfSluM5E#6vE4~kdK1uGnWi~Jn(MU)L z0syLl1df7PNk|igvi3J9acHWZoS8aut0it0y{I1vd6kL-2xubZ07EPdz{ThtphQlA zrLqJt0kkXt2rR50(wLys-o<$cP`G6vA|+}}#ZpcpR=Ac$LnPUdPz6=YRSZZ--mJj! zQOVdKi%M1^EL-KQLa4>`m?Y+uC1(=>1L*|`37sM$qGN|lRQA?ox}u<>hN>#a#>DIj zLjiM6ZZJ@=zHegJ0Z>jUIA?v*tdcYS8_B+^89!Vc!G*;5LSRswXMh*4HI9-Kg3=zy8y`U;Cv` zeEr*Rnf5zZHy++OOSAmzfA2TfH`g8hrPp+J`l0`}$mFX(82@O=05I_x-5TV4EAh9EnY#>NbSA%RY zTg)eubwL@LR-OkHF3l;hAEIZN+POhWt286iRrpp`B3ayV@#T2?qkN3 zGu2*l9)uv-iug)J5zWMis9;ks3<0Ii0Jyj$R@Z@9!S~YzcNOSEI)#Cu8Dvp4@#sa& zQJF0QGcUT?d$-@)KinU92OOf9Y95cKz8bD=Zy^TA`Rw$pd?zpdOap+jWpzyXqAF6v z!DwWl^>DbhvDP$AGh0f^F?EaOLQ+Er>tTG}XoMI}p~W7j3e$wl@a!BW=RSInG}#=gxt)i8EQOoWUoIctuw@B5C)nd2e|uRSXO z5(RSTjF~)9QUxl73#p0=KIgIuA|`g=vq_R%dB0q?bzOzvN2B4%a(Q$P?>svD=<_$v zkGe1Y@<-?M)8qa5_2(y6SE*mR@$lz<>oXrcTmHd6`FiSd2==*Oym|2O@#+4f%OC$p zAepzb7e2c7_KzN(T^U{8tiF4c(B1#+=brx0|DPZ1UVbz4tvyX7z06i}TobwX4Qejl$<$b7^yo1{F&^sjh5I-+1rIb2rz>Tu7ZaDD3wCMXq*AgNDj0c1D$tZYg+Gg2x0O-Bj7=;x3VTH>@J~`^g z16bc2&lgROsd7*=O)J0fEJP{AzKbdLo70Jb2C!w0N2kZnzHsG#@Vj67<8S`>sjKk4 z9~}X?lTID10$@zCGCKkeuKEx$v!V*G)S6|BYe1{L9~cH73?RnnT>t<8YTIsm_foTH zQ`hFKVoHDnD1S5F#fy_1E@rcah!;!X3t>u8My=egXi#pX5ISY~t1%&{rG8zuWWHvjWpIPvTfR<4u3!3+Y48!#?n5CF=l)@xx zmVH^3WFhjN3<#(m)G>>Qdgn#ek=DLWs?6-!^(i@~3}{)M_o#rVE>NKXtQ3d>U`9lU zSVAJ15KtM>F&Y>#n*pGq88M=PnnEdh9pr?};4T1iYKCAg)TP)}!4Iop?-*!B={7?| zHZew0Gl#^1vl$r~fvOR(iV-q0GCM+tCCr(Tpy(f|cEy62%@olzUzpVh(G1BkGa`UO zs6j-SowBBw2oQ-4L`)pIs8Q5BJ419xhL%iF6s^GBzyKAHhyhtu9Mc7wjSN)@42#zQ ziP2P$3g7g@qaH4#J_VAETJn)f5q`ncDB(QivM`0H5R-`-nWAx4Ra$w=98*mBaQ|pf zl`SgAoRjF~ohzoIko(v&7W6ax|aC$31i>oZp9@hTR|L4zUi>3GO<1Y*cqv4&$r|aX<#AP*Y z=4m<4O&h&KP#ZA3aBb(^`}@r@y>N4KSf$hoJAU|Zc6!`Qr-K;ft-D8R&_F}rTygxr*KKIPk>2xA}Gpx}g`hnY+jQ7u% z!6>WdoGQnjO>?{KvSuMLP}`o?DfQE7Fv@F_>hY|}6%gpHYnSfdKiHf=9q{=4aCfWj zGM_g~MNApy^X9WJ->8Beoi2X=&+ol@ByGFcJ6g<^c`yunb1?%~X3kRV?!_Tr4kknZ zWamIZEnQp!3MDd7)zF+{CRe65-jj+LSRlT6?V1X{^VXY$SyZaZNw$?3o&XgU%oAxD zw<2T}x;W#DSi1<|VRb+jEl1J!=}IhOW@<#3lCdZ-S5*+PETSlN@Pz2=s*j1#h!AvT zKJNQCtV3P7&9%{VG8|Pd^=&g>genYtK){r>k1^&rss~v#=a@49dO&Ep?3@EaWyq*$ z*@>w+@Eq!)rt^l_14)YF$Ri*in+OvzAh}ACkg{eW?--d%o|z~*LNiiAMiW2;c5I?P zc*f+YY-6Yp$&iaS!w`s9s#*=+mBnw_d^#o(<+1|IQWX14tOg*UfW!pMD$49~79wPJ zYJh~SP%_KV3@kbCP&lfx3+PYkjCb_v+6r z=VwnI-05SK9&+rvBmkf}$Jiq~Fk0WdDyD!L2}oBS127wn24E0#HZkv=C(lG?kW*$x zkvw0X9v(jEn>m=oD9Dy`Cc`pQb+p=ko2-J>06`>|txt)+K`2sTGgYJ>06;CPvT9O6 z#7^4Sc3krAnL(}Z``9&EGZGA>@1ezv~0kwv>cvUA>f=UiWG=>~xv zC@`U^l*JXCO{-rb5Lkv1Wbm|wyjm+$ys;O>eb87)}zgT@yCDish@xP^3|<_ zhX?D^we_pJ{cN^8>$L5<0>|n=Ih(ncDJ{$y#3QV!zw)ekym1tWyBx+@cs)Q zz4q^Z_KV8;)|&@UUB$sT7^atAy!!B9dH7^`p7QS1sjJiMcee9UfQg^!^I-DZPzaP$(8Z;*5uy(!{DHqgG>zpb~eY(ahAMj z(!psz7!3N<_l@-lwx-pC!^5XH>fzd;x9(?e?Z+;OgU@~9${X*_0Gg+6Z976#Tsc2W zBV07>wn~dF>Psq?qp}`eN!F2zJfnQD>S|b3VcB%STT0PaBQfy7tqmuacdtEubbrw_ zs}sG*Rsv@-YvMA#1yC`^#ICpjK*5mFp%*iDj!KU4 zs>fGCRs%D0j>H7eRaK~iuPO(*56(-pPWq}Y;)zhJQ?=96rFWE6&2lH^otq46RUnc+ zMdnJtJa9qLhbmagYGy*F>MJ&bq69}U&zub$pjOTy5Ev6ciTf$`%p!ks#lWQKkW3v} za8yu~2ufJQOKhryL`cOokVP1o2{ntVAp;0x5q6Yg2EgEhbOP)pXCoqW3?4KWfDRd( zDyVtq+SGaP%*<3!jR94&XZFlYXvKDm#Tsfzs#0!fEL(+@F0dens0dNLSa!|>k%%G@ zDGNxU$sz#q%0?_^sI~$*qcKv+Xmo@QSpdkC08qpZNKv&l6)EoAl}_Q`SS8^~ zN${aA6w5}O3mF>_$PpEZm+0zJE$eaBGG=LGqS`~oEV&pn+T63NcCK9i{1<-ZyWf<% zZ@*oSM&46iEDITlhyjs+b51EI@2e8qQ1&^JQX4w}5EDW+6L4nW#6*&|aryA!y@wBO z=h%6V4#_d)UULSfl6!}F_11u?8QN+_DvAOMW?DpIt1%rAT85GsM+mtS03lC^)GxYX zogq$!);9C13d9V|l3G(j1a^oD^`LGRt+cw-`I0(7P}O=^4eGi@Kx8BgK2*V_m;en- zbBw+US&}y*G6CyjPwe_;*>}AQT#p91OM8p_m*2ks+^uWt(}yDbwXgo@)z4i?XZ@TO zVKniBpgFB?t&QqHD2{o4-p&ut(CVk2dttP(cK*Smz5A!1`P9vqKK*hS*025ePH_J6 z=J45XllmlP zI9oIu>yxXy>)(IvN5kL;bvW;}?QA@$pPaRK4v!uCcORT4!^KjnVX&k5tc|e~&wkP5 zdrz99<-N7ZXg2HLzkB-J&H3r+Y-eYKszIOf;mQ1T9?xg6xi#n`#F&~ctxbcOjlA!h zewhV9rvs;1BEk#L?!Ni%BS$bA)>+jB&puZ**2Bs~7fssP40T0Bc6zp0ws90_?9qX( zkI6fGXYX|HOhw@7r?z%>Mn8Rf|L&vt!Ev)W_UZpe)qe-;mYwH);G5Rk`%5R^96IMl z10X;I0t5&G1VxbwR)4&@4eRhzVpZ0-v!iv-0r@0?+M@e_6pDYJilkT6ed>G)jW)Z zxu4*pKMd$1Avj?<{Fnivl$Z~5jG-LIV~q7^fJdjMs+I%la9Gc#Q&rvATz{}zsh-v% zmo7ho1iO0=n%M%#34s_Ri6$+sVnFBqNiFTTCFs6-a)El4)_R9BA(rzn_A_G_LeEYW zxP)JI^_%oKx3103sJtPwqg~l3Ck!T*azQ?38mVwTXqHz>J!-iWMTpL2$D| ztuBX8Vsf{FPbiTJW^!{VTUX)pxHB>UFmq7`i5dEon-`jFVQvGnuoTW;GR$azo;yW= zipZS$>m(u)vT&&nE3(VOt-#&Msc8R%RG7Amy+KrjG?(He>Mbqcj@~R5gJBI`LUs;@ z+G&doJ5I)v$#@G9v#Mryl5AEmu8i!iOi;{Z?q1xui^Z%>ZHNqN1eCj^mRrpn2X(Bf zacH_QpPd};kEXfJ$&tNOHJJBq=~7Y9-)G7a&)kqfe4ByW?mii*YChe9@J)WelbHhSKOkLJ&=y^O|pB>E=^JG}#p!$t(-g@6tXRK?UzjWsA1LHQG z+&>ylb}l`!Hk`owKlq>HE9KXD^N4`1Z{|dUgK;AG!3# zcWz&JVrTdI;jJ4-^`!d7*WZ2OiF5b%7Tr<@LB>MuvRkX@%Ff1gmfA&o^XC5JkFD=L zIL*ysbbjl-x9&{V)<5^z$G-CAR~9pvOzM}v{o3-hyY$Ev9!|dc$}Qk3?Y3Qprqisp zOeH}DJnIgdV9@yS9j?F`PJUwiM}yCKqi*;RuP%;t3+P~_&F*=a+Er_K7tXf%=i zqlE}#=ib8y5YR{+*?P$DFk0y)h^6bu;!-E4$rUGvL>5O<4al#cQqW1&!)H5V3z{6gTMh3T( zy;SKsxWoXimQBqC!?2>1vaAqugvi2{^6BZUB@dB2=fD(0m<$JT5J!V54&r!Hzx&Q@ zaLEQp7`!?Yqz*BsT=dc5s3If`T*nFk6KM;I1dyS#ps4{NB;d@ks_Nl5h7q|Fi6b#{ zJq(sQ%T~qUiW~-m@wgt0VntPqK`M?yj>|;{#C1L7SP4}esw!4NLa3q$g&3lFT-Fd$)>G?Ynj=@%v@Bm{ERoHSd?nzLHc)U<8a=A65xZM&4w)xq7> z9lI`di?&&&u5Fgf#cZ)?X7lBIxtukN#oT61De3Ck7Kq|t;NX^%BV=?Gu4-{NK>~I_ zCN3z85?dxD1EQYX&@;r#YK4qa3Lcq)IVut%CYSHmy*ssz0 zyVEUaMK?-0TcO3OJv^cnFsiFtHt?RgUX;2OLEO#q!{HJbdKspvTd{016Ou$SGtUMF zxl3)EKnnwkz3+C&uAbFnpu!eJbTF8oEYP9THWLCu1J0!ecHTMyAvN=sksuH_u!snS zShIW1sjln!Y|d3YoGwR4gM;I=F`4}KSKj#O2X_9$zxKIf>i#e|7h_f9*SKXD2T_vwq>~ z&YN!^?(H5weraPwaB|W-|Hw{8x^Vvdb5Ec9g-l{?4e{eCE=pKl_RK((Ycr{_0zM(`9-v1A}g9-y=}0Tm{)()q?WQh3$)% zE`Ix!S4M+ThcQGNuT9RLId}iw{dTGIG*zTYHEe(eqj6$Bf8p$~UfVy|CjksR91V8w z?nbGC#L4=oS|$KLcW!$+og+t6V~i=`?fb{)&W=cA?`YnpG^_{9t~s18V$BG+F&f5- z@0?B_ed6NFuibw3%Go#Xo`{5MP#>P=<9TOJA=at0yZ2A(I!XxNZD%PE1@IyWxp5!W zW#Zgr2V+J;F3Fgx=A61P+31gNL;HGu$kqi03&7GjsKK1QQB*v!?f%ee}{ z9Z=W}&6SCPkwpN}vJ#T2Qs+pl=53pVd2?9Lmnqcs>2VvRa->M?SrGyvj3=XRxpYSk zVu0#|nFqs3;D{JE)*%E|LnS8WQE~Q$;dpH_UfUWDN0o40*C>$!OO43RfWy&{jFBaV z$P!`l`_O10?SQ-D2iqYUQDu^1m799O24-XV`G zR}5mGnsP@5KmtH??QN1~j^6XB`y{KGW^X%PPE)hU$H&tLcMoTa*FeadAz-5sU zLMAuQP^K3oLX|;cB0)Cf?78d$YGs{u2OwGH%$$;SEVVj9(moLhUM#Z??xnlfoRGkP zKn!vLs|tmjwTe-V;&8IIxdrZv<=mJn2^_76Gj&ymS^)E4Gz4%&9}EUNJLj0dQwwT> z$Ra2OjO40G6U^HNkOIKPbLY?P>@1c|GhgQ1xp}jkpG>DIb#2#zn_6P5+)Y){v>fBg z9f1keu=i4wpI&lA1nZj;3cR{#r@~b5v;dGhHP2>d2E=5^5KAZ!3J!sYdW<6yq6HBv zpUJGCH8Ps1a&SP*4w}u?l>6fc0x}{pfuorP3Bo*`>buwPZ;k=C;o`ZigFDM#{h61? zV}10+XJ#2+`O>YoUpe^UKlh=Re(*!vH};0Z+a1g9&70TWyY=kz7rm10&EeH&AAR=w zKYZ_R{`5S-4}Ic^uYc>-hd=oE^;-u`vpBoC{gtoY9<51duzhx-A%6U$&%g2dwY80n zH($H9cej1)Q9C#}+1)*yXZ*<1PksF>Z(e=!>X*Ly+QrM4EpG3g?#&uhhpzandnplD z#x6%>cTj*E_xG;t-rrmsww6yH>aWd1nA|nA&-hjXVz~gKpLe z0GR`rtEstC-~w<)0>Yx~VNRVnWK}LkPyh-crLJw;LLTpOmsJ79+`urqMz^XSRMlX< zoV8sOLnVYMgS!gH?VZh0T|YQFYE$P51!#jSU=|rT`3iS{EbBONMSS?jfd-J&RrZ%)`sKF$z**zTo)OHFo+RRLZ}C!u13RZFdD{s7^{k_ z$Rf;01&~xGk)>v^PpOuF(ZdDVdIWR%-usg>7?xDELk~sn)$LVg!h4Ckyl@XYPmqW* zlxtJjK6_QfizUmkr|&_DMfHDxm;g%s2z^&dU|@gIG)W9X0yYS)7jC zU|=TC=HSE>h*7AFMEYua!0CK38IEup+h%!qxbNE4fdj`Nlv2|d(G#j_mo;_SikFYk z0n9X|%)))QnnMuQY#~V7c1DD5Zh&s7P2fhQKDxNg6o?5Osw#kaRxi+LWGsFV7EU<> zpsKq9qA^hjVrt;%!~lTE$Y{CC2Gn1&&;gR0b-Co(Gvt(=kegNy4(FGL;~)6+hu(er z?VE2+3GD}d`11b#&B@m0(c$cOf92KXyjx!%pE6vPI~wubckey<_?d6~{vR&7?#}(G zAiwYNC$>Iuardy9XP9@~CjRz&^KN|Rk8V#7kMHT%-lJ;t;K8iTVQp=F|76A`DhOp( z3B@l_FjnriY};Wyn2aZR*}d@Mi_biMdG~O6cz^f);jHPjQCp_Dxvr1uqvd>?40tg+4fX@XQmJcTc94_3+^M%=vR?FI=i-&0BB1dwjeYjfcy{qBh(Xi`C$CI-5@0 zY`A~2jG+o*+2GQ-ozo^|jpxrk|MdHRko_zN5-reil8;iYz2ih#x>xnwb;JBO5 zyQY~fmyl7YEV>A8&df|bw-&uWOysO26k{aox-MJJEQF*4AWYyOQW|WzidBqJuyowA z#Ck9qp3bMs`GS};8DtL(Xx-ZSIM!ipG+G-BPmd0dPEI*cpwbX!kjq+z-dB0`Ynp)* zk{bYG!7&zaf*P6iU$mN$VPT?ipn^3;Bw}h7ZB%TQZm6!y5C|PGNbb5Ahi%&tbM8C} zk(0O)*F3B&tArS2_~;{#oV##wW3+wY@k{HQ=Qhrs9gaqEG-75BkuVT3fpL&f2V^2H zRV0O4SJq}IH?97t4l6>$!}(-+=*zjnpmh1VIzl1TJFIfVt`4w*VtenG>i3ZTERTKq zspx3th-KmofC?+95@JawL*L#~Dr0(+f04$iq>KA(cEQS()6dGD3&qP{7F&M!jTT-& zs326UvFw@t8@Gq1cy}sTA2UzM+LV?}@jNVMi#(fk(?%EVa=w_g&GK~7$#BRq&YO&Y z*_BX;8M6ltOu|TpR2)xC+_zB!uD;mfJuU--va|NIJcJVWb)w+l%mM)n-HjBe=~^H{ zGBrag%a{>jX3T2Y@?=yE>oti1nPU|MQIOEl)RxPpT`a4?Fi1$|z!-%*8KQS>Tdvbt zGaw>#|j) zS|>zz3rubvB)A%rh-FWCsWw09^YJM*n)LxkEt%NZDE)4Be3lQ~^r;r~-ni zgF6z7P)=zus3b;&Mb1kHED_6xd_FZmFbh9!x%-;RPPkiLfcW!nk58iy`+J`>=Z0)!|n{IB`U;fHlckdo&tak6sZ(M)l`nBc8 zI?Rt-W*rRi#+|*LF>PNw_s4$xL#HQ;>+jv(xwzKB)q~yRz2o`Oyp8jA@9y61d#960 zwV?5EWAgIrd$Afcxm`9mUv^S0kp|VIN{!;lw13p_q0_*q@18RN6qe2Ho$r6&hu(PW z%_oyt?M^$-aBd9Vy16$Mu3CUV~Klluq9aS*4=4&C3qeQ#q>5#q2O z-@3ckc3h7Jr_+X|nz!01E>0JRrwh$MM2p6<0aI)$0(eTDx=W0qV#>$4f<{O-Cc^L0ef0<-i!L*WkSp4~ILnrSZs^zd!$*Yy>&ub8fY zdQ3RD12BL9GY2MiV&s4*#In-S0Rkg35iql$lzNQv7AqSLW66>`0byV5NPxi^fB|dp z_3t@e^*ft`xw~4^=EIZu>B;hV-k#3pN5`|pVm?2ewbp639K^U_&YhcEO4=mv8ZAF= zMCVXgf+mEBSo%jTgMk~No3W4tF4|N97MW?MZD$$SlXb!YdY&{S%jBf0b6d9B@^Hi= z2%g(yZ9|B{kdh){j1`F!m}v(9px9<5GeGZh7b!Rcd0%8x^kt?p;ul054=N^pXYc+W zedn9#0K}yPEf6E2=FYPLdXzXEk2G0MrC~vH?!1RCB4U~6Cjw#=B5_NjI(mQOgf1%} za9~8R()uCg^s!O7Jaorsvl!7L?_2!N&zTsB@*1wv#%ih)T2N0AKJphNMw z5`a@r^%arHWb^do1c3x8R->IOkIhcy?Ac4N-CcbAL!Y^}cjt8X__<4??_56~uAkp| zWcZCY4=-JLB!v9>+pm7)!_R$b`Oa6retUKR<8^xO`OEKnAFuC>&R;!q!R*0a`uaED zz8mYElaogu4R`mNJBR*-uiUAI_3LjP&8Ky(^&frfE^@G>%hr;?#(J_aIGnXCQ7T@q zCXTjjHD}js!lBcIP^dMl$98q=cF>e5R`SM;YX{Sl{nz(B)6KgN^x=8T`&%{w%)9*V zt$S>~NPt2cTkEGMC#Ofv{IrXN>yu&I`Lex7%*O{O4%n)qL#K*@E6-w=-|@j^=sGn_a3}^<(cOOYa5Te_#J~G$#Z^5ih3+hvIuH z=>#leVav*hW+j{qn%vx(!3km&IJ<+TMn%93nURQ~%ZL=PDX~{B&@kJ z%kIs1T(`?sIEav>q^=AA>@fskcJ;w<(00kPN#MYNi5cBI4=M>&6{`webDMJNOw|E6 zggVxB7>EP_%%JO&;bc5`aPOD}LaZEd(ROYg>xd5Omer!05SwY+HmaIk&0I+Oe5He` zC)J853Y2DOW(M$-G7;vitEsY^nRX^ZSu+xfgh4gPIp^FllL3G^A=jY>Ak)GN=n;Fx za=^rZP6*?{Ftw?7=oMO(7XUH116PAs2bob|f>^QCI5{(&FJ`H7;^V|O57Mvy!F$^q zHHYpeKKZ@@r)OUL$hEy{FyIe<|Hr=k^>2OX#izWx{{A0-l-JjP;pe~fk(ZttY>$5F zH@^15bI-o=&QS{I4j20NH;>-9nT}^ms?UD=_0vX==JUiMVC%Cv0|Oa3FcR;dCS;a+ z2*8Svy2QfBkWvnjW5BFEM6Y~52>leY2x#c=;KBWq1>5nSvSyp zwq#@^Y_%j4L#quNpinkW4%5R^VJhc~`{`>nUHG3bN+!;Q@gYa5#f_Xa1sO;y?E+L^N#E{U5lbI#hYL}t$JE2{bG z?o6ucZU9*NW&;OPCFUTpSUDXuR?>A{ms&I@M$>M6vL0ic&KKv;(O@`Ei>`Gy^H|p+ zv5HX{N0ZU>n=iff_Uorddkg^?TouSykT?e_8g(fLv%n1CRG_{H;JzAf7B(CKVys+< zQS}wLP4SQ366y zhusmhE0uWeOa!G5o)8%r`>hGUT*2H-+osKJ6DlEyt_gDPvoM*V;CxCMNs-H8g()ar zjzS#m)HL4e6_xDd9+&{eiVE6Xjn|JSD-@SYHbbohtbh?->R1<)O7-AG+A%>iCP8kqL3=L9h zG;5FwsIU_5BZ-8b} zUMOTUZM)0_RP3|a%k+l3BKm_QhITLUhQ896>VvxW@{q4ro0Qx3zcOuX>6%(=`98ae|^RvIS zR#j^oql3E#=N_>$SD(&bdgIpJ1v&rZ{nvIT;mk(-_)Ek4hc`SmAARnrufIbye&RR2 zdVk}b?JmY&dTpw4_SP=%b<=~HcWp{tCWv>hFOX2ZtE#GLt?NPnp}-h1C5=G>ArZ96 zm_wHo3VoiX>$)J4Jd=BrFoy=P%;L%bz_BNHp}Qf#V$pQVghVk$Uk!@wVFU&GiG0xk z)Quf;O4HfATQ)%G2GwAQECv+o0b0TRDiNiWs7w)oR=O*c+c&Wz39u-+BecueayD%z z$LG$Tf8v>|HHMs)+h;ZynnxeGG(S0=O{Z~RfaChc87Un~+B-o@G4X0Bxe|K`$jx(? z3b>#w%!5Ilv$|yjP|eHbGNra_mTjwRll7C?@v>R0jnC9ooSmEuh>$f0CM1-A2o4HNn88b~NY$+n4th8Y zxaO*^EahzMMs4!kfkR%Kj4z+PaOvXJ7vA^m^Y44{$|H}Qy?kLjnUvP=W!ttVv)puA zfE2{;Bq2(SM9iTQh{Q}HDBs;E!5In2qa(Tb^mHa^y1qFHHENgBqR}q7fg?L1SZ|~& zGKZUCUKwoE+#Fre+|5hB3%H^=nS!gUdhT2^nV}h)_ML~trcj!Y0ZKfJg+dex1WdpL zM974Jkjc4s!k0w?(EEaBCY18`$$=4jL?EbUHE>pNECNz#pD#%7;w~xYA}`5q)ZTSi z))_0saGEqH%PFa#=J z5Q#uUoGdq8+a%_Yy9`3WC7G{WOcPCex_3X#4uaWyxtL9-`+K{cT56Zz+I0=OnK2@% zssU*35ZQ|4P9W^A8Ci1Ik%U1#sHzG%j@Q=FcTF{S?R+`EfA?O~wvnsJ`UDY|DQnh} z!A1c0tQp32OgURp11*y?Fevyy^B(IPm;q4&r!J>dik2Xy#Kc4hR*XI%M1KZa#>;i7p<_T`FOz$%&BKF73_i{_*r%H@_4RpiQlu+t}XujoBVy15|*>QYzk@C3A)cl9hxhFW`}e0$Jbtl) z@bY)w*gm&&dUAa41^Z7PLr39KKaOdZ(mn3a7bMXh&ij9*VP~p zGs19CkAfbYsH?5aDAo@+`v4CFfpUs0t{^pa>c4h(*Q|&%8U)Q=MQNLiz_V$}Z4QHa z6i3@T+gC4Ldh)5Ko`2!F#~yujeS1r!;>c;)zWU8qHMQM**0i~4GXMptxt2j)0a4DD zv#DiAC#r)4VG`!ia5&jm+t}J!-`c1~BSa*I5Ta~^;domn0I*<@aOptm-jhG6ynxT-e-NDJd zFdq8z0}wG03ScB+q^gQTs)b34U5BXjKbKzU(ueF&1|r3DopRP}n$=V8Qii5O>{;e4 z0m?-}gqbA>GjRya)ZZHfLxrIpj5pTLUp)WRbD+s{+b!mc)1#A{*KXdte)ILW-g)z_ z*WbDR=F#zk#bRmh9JxC^5NqO~N^LTOjE?S_TdBe_O>fi5Q7R;IfvGgC6^r(a9R?7-&Sx?ew zN@;CtXMcZpFdC9;PFWopr0e|poqOlbT+FUkiZ;npwypzX>#~c(8@Erd^3LvY3!aS) z3{`v4Wzps>kT_sS>h8ip!Q7zm*cnRo11o6}q>!ML{KQx_T?Yt+&zCYvY}x%`@-8jkVEux#&zGJ9yW&T?^n~DnWwK ztnHAjcbyBZj57{}G|13pXS5dVv$$F=p1i|5Zj{`h0h zzwq3XPd|3{?3vMIOhBpA`N`?MH{O|@%-d!jBp7flaWq~(cXelTYil%`gjmN>6#{uF zhyi!YNwsZK*W}c8i{)amSl+n*;Jw?ohvV_tb7#+R@i(lS- z^X2zF{wP#~AqxPxt7Tx#?C7BA#fNFG+SkE*A5{f&LMQZ+twm-gDTBJSXgg(f1VjT; zNPy@lrL4R!yhTJLuLd>@*<8WQEMay)Ev?B$;M^&wP)eQ~)RfUIpL)tkec_NiYo^vL zb(+;X%Y7Kh(5b{z`?7R51SdlX4#I@UNEAdESqLJCBO3`5FfutBA+dlEi9{hr0YO0) z3Lq>XSV?4|ItHnP1!IgX9FRj`BxFKj3=}z*G&e9YLFwh~0}drF^6>Ff03fFttGcQ; zs;$c#kH2uGYd_jfn+NyzZ@l~N>#uy{z4zX_cmMV~cWzeQETpAQO;srlTkseIhYBSu z+v&YKcb4`ks6qs3WK^bGjt;Yb7RCp z{m*K`P6$Czjfw}M49PP%xfKvhb|v%FB|;1$rG5hp%j?gGPN=ScL;x&eo=e9j3l}Rk z6SXN7YztFaM;wVK_2Ar@b7#-)y!_4OXf$ryB{~Ai=Fa5gaBn)DZEtATwgw0}=hP9i zxq1d7&g!w=Y+6a`4n{-;mky4GUXbQQ(3uA2++_n$cLdX%it*7*vB;pk1;#8}p{uK! z1k6qpYCsPlh%k(G6?oCYL(#S$>mnj}84Xh)ZxdIshonEW=#`cGA@8`idt-0+zcGr#=XuYBPBPd|ET z>)9u-eBbj=?!I@M);ACKPj0?<>&niSrBi|4Y6)(RLcJru*Ip1Dxm3lIr)@cK7~$VmhG5>2V8rDz zwYhcE*>bVyIz}wUd&3 zjL`#ea7Pw&=+rvJ?3k4e(Gke95fBxtD z8Vm-*Q9Y>Zx~}U`*Hsl`j4_13QjV%f#p=e$%rcOAJY2u9{qzT){NX?Ose}7_d-ra> z@%k&b-n+BZWfHc43uqgotdvt;ET;SSZ$7wj?Ral*XRy)Ru8H^Rt}BjpcNz(W~ya1x30|qP*p-y`qT^&iH(_ulghQ{iI_VxLbe#R z>vAPDtSUkZF*HqMss>Jk)o|eIgxItlFnXupCpa>@mAVpUmDWRM8q^gLA#mvsVa8rND(n=1(EF#{-BmLI zxCBIO3c|}QXV=9dTX+A)gSE-{aR1J4{?>0lxOMxjYwtGp@BZE&{O;`s_c;uX?{|9# zyXVgi1>DW7DC}S>KYBl>D4-j1Fa+etmJ2#qiyov74iF>~%>kTQ)+QrQ{?2>v?(Xi* z=ck(Tne8nAIGs(;U%5J|M`zFOFvpXl>$Fm(bi1%Sn5L+I)#2-#hj ztW@|pIv6O`gXpayp=Mj=TvhcmPd)v?4}av@=bwB0sV6ti?SOmJb|;4i*KfUjdVD-T zIc}GV=5a9I-Z_8f%*C^3AE|0SIz4{p-8*}`X)>xGd*aghN6(BVq6&w{r)|?rPv@!> z>JaP51RQuY9MrLjfnpF53c$lNXU^_yJ@WYDx9{A(dFS^0?YEzN;_;pJ&8BT@36;p1 z^XG59`PR~15~d8ogVJr*xq0b2v!rd;bxX}{cGKi((KKz-cI~pAFBZ+bX}UHYcWJgv z%Vb^WvsTqH8yP@WR|JZYCAhH&0swK15|D!mJCTt929mmBt%*<~kst>a0|tuD%#kH< z6c&_<0;51x2}J@#5>zk6Jr2Ntgo?}z0D@G+;nf6yz<~jX$jQ8P7G%Y8;z2X@oYV<= z7N7x9V5S&jr~@dJ;X>|GU@n6&FW$4P1m1Po%)8VTOZ>c5UFLk6RU4u<_(B2-hZrP? z#F}H|5TZm1A%;jXhAL8B$4WvKV-;mEtU?IHI;=xg<=MP_`Rm_YpEcqw-_kK8i^EhmogCsi8(2t5wT_!VPR5LW)vwL!Q+!>I9B&JW{3Cgzy3!zgbG)n1yzGkZ1sha2t-_VtP;XPDr_z< z%s38QhK$1E4k9$Ds-3N^y}S2s-MZbjZI|HWaDRJag8=5U84JZh98?uB9UdP}r?XNi z-Ygn73nCqYFqnIwLZ1Nu%MRLKF2NL$+}zbX1|~$yo|3i=Z0-zSc=3ggee4Hc`o0fL zHg;ka7qjKJzV?l~cWy4`GqW@qkE$xJudhGy*!hbWFPypf*km*|jVFiAd^%rS+j{cZ zX9lB+877081Q;n1fA#C%`RbRy__@!0=5&$wZtUgUt*;Newi}M>x~l3B1~Hu7+?i~Q zB?=)9tKpN6J~*BtVYlz(zsM3SdIb07y-N+`Y_=y;S^3WF&HP1WcN}hpIUw1HxQp z)Wl$=63&dNRZx*4g%Y7D^k!Ceb0Y$!483s;NKn~{Nv(8&IWj9iE2x1WkRSjPIj|5j z5db3*IT#9~J1}E8Tr#m+U!?%6{}sfX4GnWLKr>a(4g@J@Go9ydvCMF)pbN7pA%-Xb z7KBsRjfULJ7vH&Fz4Enh-MV#S|K5$(e(TaBS3mWskG=Hl`ybmme|i1!`+LV55(Y_c zUi+gv_wK7hWE2U)qz)mlXNZyAQL~0hpy2J$5u6lC(#pWpfC&3V-b|!SzDZb=&`bfW z2x$)Gl|$=o7v-?a!$AdY|C@t4TDC?T;Sd5>gE~-Xx-O+;mV*RzG}91-glnb{M6)`A z5QSb?a3UZopeELY$&CTZfC?o_A`>0JZ9}sv}ML`E=B3B^l*Azqw!tSMUsc�LvK=a#jUr;Se}4y-%VS951+< zw{6FW6gY+e28@EMVej{>{?I4bAC8jC+1?xh>beRoLOCKO%fpV6M8&MhRDIk_|K1&IZp)a>0M<--PXxirN_IXpA&lVI1RSYnP{eyd# z&OBcYWWHQ7a}{MY9+Sw)>9GMw7(_{65ToSWX@-QRt|UFitH@J`kgZHUA&3})CnMnb zqD=h<-ti;rEd2AmDhscTi;Ny*vS^UwdvuYLae-~Zx^&jY}G*)5u- zFdI^{TrQiG(xPe8^#^xH)8V=8o%QvRdnUw-=gw(Puf6ludpB>6$CDUh>N;fLI{wbL zU+$*M~GzV`L(NvWfbLw)| zoN^((lt#)TPl7rMxt8`7r!v*|lA`KaeMsJ`(&pxdrdViU03k}jK9K)&j?ba-%=MiH_G7EmK|7E$n=REW?~5Y2%M$k7Pc(H+@)b7bE!Q}$Tw zT>xBI*?^7^iM40pmEf)>L2!(rN@kLJon44cdT_^JFhnBFsq2=i8YB>tbyfu?Cgy&{ zCvi_cs3QOb5i{3z9>PReoCORPO~(N(IU?kof&jWRlOwrlHYE&5DCLM|UQitUN&F!> zh6x^mS*e5p`ZWfHhlAm4KI;v(UXmy18G=MWViC>>!2=O9mHShtiU5P*K)d7)1};E| zsC`P;B}mtGh!6w;T!;ya)DMi%WsPFy$vV(!dhE4Yf92Sldgf&(XF;?A6VQb*bD z5Ydsy4GGW-Laoao)&NEXN%ecizy0#xkBlEiiJ^Zb7_kbK2)iW&lR%yg``hD6B_ev5 z_{5?Q8jzr>l`3vkN961s7;Ua^Lgt;F4R8)U6ywzl?; zP7aTbk!WLWeK~Jph}EDT4n_`;lLA19F|eRRPBy59i$!DLBm{yfsj63jinjp}6~Me8 z!b+iKt{Fb?{-;0n$&bJP123*`?dMVboUR`MGmjgBUMdyu7|XCMHBvv(jSGr8XtUDh3(6`klA-caJ{(`5)AjO&!ey zk=!f7IIbt7Vb)rcx69>pF~4``-eNJnc2N^f&ldVx4)J#mmQdLYr(I|>ZWfav2rRN8Ml7s;OBUzi8zEI6g9SDI@ zdrLn$6P2oXQU~lIKWK>RH35}O4!QMmB!IfAdQMtOJP@^Lf9B?j(myw5X4%}Fm~-z= zbwq*;W{L{sonmg;OwH9mQ*mc#{Ggv&@MTy`lh z4M2sJ-Is8qIpmbIj|&nI#!$JMFcUyZ?Qm_KnVYtVu@sovWQ+McSq6s~0ee@d2Vo=# zOgU#WjVy%yMa0ajIP4qI$VD(lQnR{d0CY79fJ}t02u!7{h)bKfo2fYhb~!UKB07p@ zC!i8YHZ#|fE-aR1cl96~>Hy@HjTxb=u3&cF~ z9<7a+vpKo-OgeW<#)5!g+85w>56hMq3QH1;_U@(m%7KYYtv_EBDlH;QP_yjjZo)Di zPYzG#*|jJ!s(r5?02x>^A64QC8oX;3WiMO}Gr#9D@I7ppvKfTPbtT}r90lA~S`YTc z;J9*-_1@EdUo`||V)n?$0V#;oF^poka`wXh!SRK2n-BJO&9i1YnNHWo9kN+>Zgb2m#V7hBuG{(jdjMFJp>Mnj?s01l51 zV;$BvH@c$#VGzzxt&{VPkFb;`2{mxO@THW_x>5kLtZQUVH2HZ{NIm=io4f zbI*MGPyJixFJ6A^iJi?+WMIuofPIk+f+E&+o-ezqsvY2Gf8jU(%#VFMhN#QD%*ov) zaMQNt83~ysunej&91N;@G@H#1PEG;k{JCw-8iXIYc=`DF^!l|MJ7>>Ms<>P%>tQ&1 z{=B8}vyXjb-l%#ycV+YNWab1U-YTTbM1q3?k`95y5gi~3TL$3*T-5QTA_0j60LVyg zRM>g_2p_?LdnSKxH!G}cRrOq=z|c9g3Z1&Nlv45*a)RWVQ)<+(6cC%CnR#~2s-&9D zDL1-IoiG!#7x*6&kgK|g6!G2>5e*%>)M>VeOhjGN_HSA@24o=z=fFV%AQ_;VS!bv& z%;ZTJkqFROxaYgAFuW!7RDjlKCd8ml0>wS2SVnuC0kzH0v}HLeZiXV12g1N53qdRl zh$*Kc;JWEFwE!NOIbale^7%{u@qhXs{)NBzC;!Pm{-;0tGe31SzkmDw?&0C?E3bd& zeJ_3R;~)L83m2}u@y^%o-F$QZ{(Y_~lQWUCGLRD{OKz+V2>_Ye+&ZErH3yVn2AYin z3o(`^&LY0`aWO3IM&O3RWQK(9NCts%o^lBWxdS1#U7K>yDFA((n% zC*}~D5Q7jJVHE(JiAbDs*a<tj=f(8OCr}wy2gMQ3WBo7nywD( z=$_5$N>ZyO{D^`?z)YI8Uo%&0c%g7x^CVs-aWpSn!IQ-Z`OlB zrNQpqJ3vMcVQm;CRxwH|#l&r8$O;G$Vq^(CyP;G$hfnyVd@g((MuN} z+`V=1;KBHjM-Y5e4F*ASJG*=EHp;jjj@rC9n4PSxj|jzuLtWLwdNhtiSXDua%~41Q z5GgP+IB`!}EdF4?{(R;AA;AF&icYiAbIeNZx*H&o!F1q<*IgoJSu}A;<|MhSD z(C7c;-~Dg?)~m04Vb-0@?(gj${Q9?E`T7rj_){-F_R=tpO<%dYe;3KJ;XOfHL^ zHOnTOGLeXkMgu19+J>o=FleYExDf$3gdk>$$VI6aWI(Edq=dp6nA8*ir3fNS44_5e z0m@lT3>b3GITry2-Mmov)J&}ay4+05Oi^9SLOdMRZo0d_*I8mBW^QwbY7z|<5TR{b zHC-$h^JN!_&0N6{L72(O6qtfiW@b<;*3uFmA@U#<3C9?6Rv}i;{j8OU2wlO{kO0)3 z+|=C)o!Ys;=m8Nhb;`u0@4?K`Ju5Ic_OPgeQbIE)#OzKENZ4Dl`zwvL&GM~xzjJr@ zW|zF}%t0Y_Ky)(QTeP#3It5ZS*Dis}`o{QZ{}>Q7tCp8ev|X1SmEgkmiKe6`Y-0JPE>2&W~`4sQo$&Xw- zLsiYJMe6`Ysp_sxxwZAl*o40LwLkpy4}8dRkS&`LGX_CKWF{x-r|^WX=AF6lAOu`r z1A}`H_Cu)FM}y$BHkz!ht?xfLxN_-oRSCL|qAwSF>RW+oCsX&$_4@Hg&TO3-$0`7! z$O@ZVj#CKP91G75`aW2szIoBU3m54db+p_MdxZfC$gx}<&B4HOwxr%B%j#WLO`fx= zgQ>ew=|!w4B0&-~;XxR|U>MJAL!{|+v3GbppHAn~dAIBU5xfi$R(=R#4us4YW0YDX zl6y4stjR5byMw0eS+jN#C|O28sq4C`V+@4czqd=<4-p12L|t}--WxT934wvIx0gWw zYhl`v8<;CLSq+R?SSZ9OK}e{MQdJ}lKxi3^$cQXT2*3o)1mc1OjA%%dNkGQ6j$#5w z)n!hR>r+d;j6!O?dDrQ_QU_$3(vju&b2qb z_44nh#jIH_)Y2e^oGq&|IU<2Mm(Y7zWXgd4VN}6;!VD3C1ET-{Q(@mi=^-fzY>4(TTaRQbBVrl82hE04-$dYI(&$@cC>C1oOr8XnHtX z%u9!nyGcj1Krr1od!AWB5J?G<+;aI+u@v|ym^<|YU+#5@B6uPp)1V%trZZ0p|f15m0*`rETx-=72pDBzy6gBuc;Gi#^@G0FOx+w2Dq9CGhxNTfXS?Ew_wItt1qE+fR+`f%rKa#9P&z)5D_zb4@)CLHnW5M z!{eh3#?dl6b1efEZse!9$m%6TPbIyzqgS5-8ssaHz0fLuS zMKjZ^%tE0^6R{AW2z)%5G_z&kc;)J){lmS<<|ZPlgPO+3j53=|kB^R)&2o#qiVOg! z^CQFIY<>y?#BAo-yo}4tG%}ld({;f3$eD8=c>M7Xp4&RUap%?F|I%Qc+fn`g&;QtG ze&WXuPmV7>cJA=Tt>5_hpT+2zW>B>iq?GkP`+xtNv-fuYpZ|~l`O?|+{U7@1(ZMUf z`D;IS<*|>v^wPgG8cJQk$;qN?n-IgaYxa)x`R5)6<>_#!r#^z+%HhX&qfp*qqLPTfJUwreAzO!7maU4v2R8NBNVtP8J ziE9P~=FmAxjIj=@-eWg2EIYjw7&Ev(T(@9mp+?%!#e*1!lI z+*0a5U4*k4GC6WXT*oSeKq!Q$Zk?sYVzD?n7OI4>>wAB^ol2_e;o9WP;LIGF+$k`C zGqVE|vQ$zw7IOqJNKOjMUPKaL#b0=~`G)>vb01W}NyINAJ0*X)|0&*vF z)tp@fMI4-69LUX45FIQq2xCP-0K25t=J{g58`s;7K^Fts1?DNlk=z}kP>g9L7-Q2lRf^cR_?wP2FPKDv%0Hma9#7H85AjIVy zT6k$?t1CY*0(4ttpt z1VT5GlF2C2NNJgNWK5Po2r@V-Q7qEGX-)u)tY+nW-R9iq-Fn}KI{+~{VllI;spjP1 zE32!cyA{Bz!utBAfzRi2_u^gvF0dI1;GR;OH0A8fCGiBVo>3h@0n|0Csv8(YVkq$- z09Pj@mXJ+DRcZFrrGBIBF;?K7T@i@^wXkxLi%MS%C}?J|vYfk9U-c~Lpo6Mr<}T}Z z)uPoF(2p$uif8xXxcA{L;JX_Jes^n#v5p~V&Zfr1DJ65uJ_j%)7A5bwJP%Dk08sFf zg+~c2p&+>p(2Xq{6GO5p2xd!dSA}@t?AaSP?+~z-G$f)sNZY2~QQ^>ogNe0Enp0vV zA~S`a5kcn0M4$!?*)lQ{QKuc4hgc)=axqO^ck99aM=xC5**cR_heqHLfELSUK0R%d z5_pI;0GMl+ayDh=N+Ohdqm!`*;_TR3o>cK8?|=W3YomB_^6r*>eDb-cHZP2f^K;K%_@i&U`Op5&FaC)?`_zB%zyDLe z@<0CW@BjAeFW=l_tj=6HruuZWhHX0h?O*-rme)S|;U7Hix-b0xYgaGrR2Ue-!O3B* zur}FlQs*meg^~NwtF0Dd1OR6tceJdoR z=i|lbXfhf|tkFOf&47UrxlNEwf&~sv7;<)DQ8f}q!|cWx5Gb=t;Tt2MgKEK-<}O)K zt*WS=*vOoMixRkCR@c4>p@5N%6*L18x^Ts=GzB)ZU!P0uhZ06)-078BD zYJp!7tniuK`z?!QvTW)IpsucNT9Cb~tG@M3jl?7p5GduOs-1PFRu5tff`lU6b*%)c zLojy|(9{tD5g`FN8CX^&7VoWp&eVs=SGf%bbah2EGgEK_lfpW65K}c^55lU zC{ZM5Ez{=Jp{tPLJgbUGW6SbAtu}_T>aRSY-?J|uKoAMcs+kZp7dwu*XCh|jvO7bM z>M4!d4B%zIL?X*hL_}P|6-Xq4npI6%xam3oh!O7}?3VLn-;N6a0MeyIj4a&cPE!(= zl#;t5q2&zVEYkb7w8sOL82|yBswonQnOoO5;_1nBYx_*o-WrYK+N?==JRFK zme_30mby&9Rj2?!%a-R3ii0?m66IzGpawFiKJ(&j&n_v3fFMjC{-gy1p#o_G7{`(oLKfToO%fIsXm%FEDuReP3-pRDR^X9kTUNm@gFui=? zoJszdU;OqDeE36J;MA*_UJ(MzYhE^QD+-?@GNGBd6Z&vijYq@7*=&2cIf!vs54N|? z?CtMezIa6>j7H;8J-&V8#+eIGcU{t?ZDUBw={!pn0jNkSxepx|;{-T@suy$+IGTFN z0`|o{1=;BUee7^`!NlHOXkJqxHgZ5o^yu;r~wffk&yz6 z3I|~*q=X(WL@6s!2J*@UoU{Xv=;mD7)*V$X1_5^_4}c7qGPFy>IJQOI%`59*{KsT1YyQl#hf+N)OAThiI@ohrnXeOSeH_<_f%CZt7ADH zgM(qOcbB>jViaa(?ks^TQZb*Qa4AJX05Z2yYC*&fSQcSsXxb*L1Arif7+8Qg)Z?m~ z&fApc%{(U+mb$8#goS%eiNOF)Pfi@nk}->jSn)YAu*3fTp0ACYW+{=~xGeoi)zQhU zET+zATCUow@hSizX4i-FTXP2jcVi|~05IX;$btA@UCLK8T7Yalf0YpRyDrdGJHvNx z0^i*dM8v_f<{~R)RWl33?zpm9Ag}h%APf)Z*!_9OOqEEG(7oKdAcRPM$X!q}um z6=D^u#cY1*+@;rFdvkC1Xt8K4b+N9w4*7KIW>F$b0B{2{vJ469XnijRG@gc$zVD@H zu5Jw8d-;uSceX(oIWFBA!i!fgeCp5r$Z*4d;ivwGH?QA#u(uBuo_gVNTb%y2|Kwl4 z_F(@vfA;70PnM6Ly?pn^@oc{L>dUWoQ+x8swX4rRw!NcY_~-xh=FRn)`SBn8 zkxyT`3cvibKlA!K*B^P`XAu=NwGKC5d-u=%Z~t|LRPg+TU0CePSdJe+UKp6g0%pb{ zs+o|Y`}$zKSS$~Zk1m}(8yU}?IrGY^_cdoC6hiWRdVK6{o2kxiO05y+3)(8qc z&B2_qnws@`U_pt2tAnfcZoX0>(ED=T9Qtd5`)V!>z15-TkMwi7tc5pSBH&g8CRfv} zy#Ye`jwU0T-7~BbaYUU z%n7y49s*W@sz}04<_16rRVAjm$(ey3f8x^L{@Z`^pZt>#{M0}E@9sT#PlUTJ{r)$8 z_q}WHefCp7@*_X`6K}oo@;lew25UtycP$gTQZ_60o1C?(xXj*qs~~_EGP7Bk@e=_c z5_mEbEIYaZAh95n5j!@iD+ki5s_H6Mp=!NlMpZ}8BIH0!3T{+Nya5q`MN-OYN*vt) z*$Z$u<%9@@tV^qkO=d>zzicn5Qh>iwKXb`G8FH^GQ0A^O)_kf60 zYB@+aWSdu23?UM8>aw~nW(~O`2{`1G4KU4?-p@R&{8)3gE_KKVM2>!Ze55(6JD6tz zb?9>LW31*536R|iMcb(GsF{JVT%wC6Q`{imJ+7Db!4hZ(L?S^dr>41jLcKh=S@yfrd{yxVo|Fq({;J9_)SRx4$(sI2+?ygn7<;kVaQ8 ze&jFy_~zs1fB6@G>FZyAwO!1G;otsCAB~gspZll3_P744_VqV!fBW^5r!UE0`uBeH z@)OlR{_p=mHCTNACm#EuPd-J|-QPX9|N5Od?NhN$L! z7}I;_fS1y8_a4Ix1m*~?rUmB;0N@1=4Bl%;rE9=S(yvD}^l>mP0zhdBD-0Jc7-vKz zbdvtlXLSQaDRT!ka}X4inh;EzD0$E|ty%^xZV^{^Pv&H^eYb5BGMZ&{=w@viEhpnDr>$5;10|-KDkqWjgz0%`#XSVCdcmA}9wIcH>PQkzTyFrXN?t7f;#Ma|7n12B;j2p7%;xVf3fV8TcY z?iP@^3ILFs+%@gi&geh-KmC=De)7YA^S}Jhzy9Ul6X9l>uFc>5*snH!}m|jD}QR_7dH)E~Vld z=~6OvVKB9}YYCl0V1^(Y2ySi^Sk*X6nYV=)+Afg@I=X=p1fURNsh8Jefn10Sbgbta z6J>QGWEKY`WQU9fZXm>2kTM{7PMS42uplezL#$UhML1+Y1oO7hdRX%umu=UyCFD>Z zB?t4h%~W-5$`4211ps4Z-vIzq6{_65O-YcvUyl*oLkPs;;Og$Ct{K>wh=54Q8NgPj zq7ud}{t5?J>Ad~p4vr6JF1G4Y1&Be2FkA0S?ze%JC&-~FYy_pLwm*ISEBnEB{|kYT zi`MErVhOOImyr;RsHEo*5m}f@LrFhIfuhGCQaP?OQBDfvUE9nT^R!n=8}an`RCCc0 z3K7FI7?Vk{=K_Kwk}xu*Wm8@SCI;d@dDaUHjzHzTNJu>@AAyC4$qg3G{M?1}o3}Tv zzxmeAxw99~pUt3cS`sr8j@Q;#wT>LB+B_rF2oPc&sygRoR{hBPAAkSFv-jV8@AiB5 zCm>fwwdAfdIL%7igU|oj&&%5IXMg%2}#w6Xnd3}HX6ilIm&wuuVYF~Z+ z`A4<6{g3~_SHJkB*PnXd<#U(Uf9{vQLFCuo-Q8Fp?_AyJrqjLI;!i*O;p?y8`WOG= z*TQ)1k;gAjMk7=Iy{~@d2R`~?=bl?)BrzMOJy<%Q6YDM}(cSJYKf-HiQ!SG=J zVA(9z!(=$9H`X>zj*rG`V-eceT#o|X{6Nm0&z9TVMGFT9hh3*t?3;!Pjn&EecV#gw z_g_Zw|CBo!pc^V%DU3Jo2LuLIzSMq5(C^>?ZtBJJ2JYF)h|bEjy6g+?uBALIshJj1 zw40U@vAF?}Wh=d#YUXY^gM%6Z6Y*d`Svd}pW-BmjRX0Pll$w-s+cc-M+b5?dB69i4 zm1m!Ob~axgAD?e(m1@`t~A@Zf%Z(4-_HC`KMwl(9{XLwN(3_Twt(kN@7gTFpyIYS|!4 z$6s-_p@LQv0>!ezt`fehrL!M57mlhEA~JzJ#PQjOrn;+`f5OPXJQtHqU@G82LYNh0uZW#s}qZA?gIpBC(~J(V_P5f`rc2D z<#*g%&|J9qo(PfE0ENpKy}UqWs!$rZ%5{nmfy!Q-0ibls4#q=An$M?;#p%Y{8WY{S zduOqjUwZ8ETX!A|2jkhS9go&WlcA>0s=6A~U6+eZE$8KOdAz$hG%c=J2^b*LYI_VWFE{;AKs_~-xK zPyS#2`v3CW6Yn3^m$%P#x9;p;x%9|mk6b~5#dLY_VDH60^Q@XV0x^=6xL~;suihY~ zNQJ=NP1T(Z0UVg5*!b#LGtud6wmz9K)A{YQH?F<&$fJ)s!sf>2sN((mcec+zO15d; zB23n%)9L(lS)CcJ-srt~lNk#LmR7ce9uJ5Jf~W>orUBM#_~_OT(WnfvN~BP&52F?hk$FbD#LWKlXbse_{9fwTwm#*^C)P$Q=|gt1>V%lOuu? zlNkXxOE2st&FE-=%pq2kv!|Tr?V`!q)MnO15fO2BFdq&!9Kh6tc~IAL&4|d{*W0M9XcixQJlkl;J=*Ox;U|^UC$?ecTs7 zII1eM#ELy@&Z`(07QhS|qUq`s3vK|eio~D*fY>uP0U6P})WH#wt1)vCi$VxZ>b}R# z(@U)mkTsWqH9%fnejXnD9aqcocb8=BujU8@jO@-FN*+O&O_dPL3!=`!jfqW*%@3Fu z1)u={ml&wE5JWreFDE4><_y@27hVX`y$=-y0SKiuva%3?3!wwC5D96rB6~VOZ%^z? zM8MRT1c-qUkt{1qJUBeOd;9L%WTWf4@o3|nH{UyV*>}!t9(i|gaJaR(Gg({fQmPE< z$>8Mlcxz)cUoOA%>dRYeqaXXBPfzdO{Q9?EKR#NHAZ$jg9NwMgDMB7r&%giq4}I+8 z|MHjq#jQJco_X@}*`0Aa3%~NaZ@zbPdfWongO}gAckiG53@O~YKR-B}Pc}x&*0SZz z&Fw|AoSrtXe(lZSAT+0kf9>DHHaXW~A%4uWt_P$t0#m zsibcJiO9`*3ab^YRYCO`Lh&RcI->Lr1_z81BFE``u4ckK9*>u8XJCVBATh45ZCty4 z9VJbho0~yMq7VWJ zT28rbh>SQe8@lDx0M^zt07#fKlVplnvSu522vU~LY-;9%!HAKa01LYSy3&KKj)v|a z07-!=FV(Eb1fEIN)jTi=NSUeE0ZVre_kGEZOa|DsE|5R_=}-LhFZ|U1`LF+tKm46v z%h`$G;P~XXzVYkdxBTd5KKT=Gpa0e$eC2nV&g&`&0RpC!ganCKtO_MW2Nwh+E}OoZ zIzX3F2y8%Eb24kY)O8&RH;p1en-i$7uMGx+Vd?e|36?XU#}L#k#86UN%SFRPDXW>5 zw~lLx7C_FKM6xDuH1|q^MNc^+7hMn}2s4+1g}LXPJv%WY+Gse;-2%Xv0u!}4LkWr& zUp+Vhtm3o%>ZdtdHiB}d?Pr=qXr?)(x~>6AG}Oyh)i0p#2w<8@A2A|xVXCd7xhdxm zM%Hy!ny=LzoujBat-4JaP*sUo-GQ8l)lG=ADnG1yRO@$5?}s$xXjzdcihOrb?{7B! z_@L|3U|g3boPWgt+OPYL2tCXVfQSi7oP`0LkTVsz4_VmViDk9JSk4frxW4;t@uEuy zkqnN#u-{*r%)!A4N|X|Nkp06=*xlTPM4Dw&WCA4Um4WP84Lj~b7yZXFa8Of5Qa};X z9ulCcx|}a&vl9S*^*gV9=0|=c)^(Tj<*S$8zV^;XKm4&e(7n9}YipD3ovqn?ab{4D zN0ZsnNt?R1n;q}of9~81dvCpY`>l6QkJAv~4EhlLpo2qEoeaMJGavqwf9_Ae_S!4o z{PwHkL3m_i)K2HmJpc6n^fSM|xv_p`bG^$31o!SA$H9;E;2-?8Yo{ zz-P`5Uitcs{nPf@7q^cNcJE)i@y~zp|9Ab}>yy#e-ofEB&*ZOs^~=vc^Mq-6!(qSZ z^#~nADEzMeu80nf&ZQfu7fFqP5HN_uSkLB1IcH%S)YWh_KAp}u;GiCEZ0t1M+soNB zcbN%N&SZ9YaQLX5FX1XcRLx91e~HUu!C*jWR{Y;tOV=?%AD%8qPwW{{#U|qBi2Wg{ zEL6pr?fo;aIjd^U*$hjQrTn%40RR9=L_t)?gR6tO7idJ*qW(BqiA$=M*2)Z^OMl^i7wX~f{mw77jpqPD3K+on}5@LwlbVQH-Qvy~fw7u^+{2+Lzv?#Y%rtgB+l zC|%gpYoX9j%H4<<*)5qPb#3SUodwD%j(RX5mj$r2${>OEM4oauR|6`2Vx<}-S*|6< z5QxxOw{fjDMGV2?z+i@WNCU^PZYmo@2~82QJV(>-~C& zTJBBmOhv1}Akrm+J}~NL3II?FFjucT0F+#lOi!oo2;|E`@^KQDloJ7E)ewY`N@l>E zhzT`=6_A#js)Lprk(=g})ZKDpnue3n$;t7hM;^Iz=gWtO`;R~Qy?)Kp^lI$IxzIb+Px@;XO3mhFZt}qx72i7h}g0@?}_`dU>{q##u zeE5;QJ9qz^|Lo_nhP$`!E>BMPtNX9L_3r6(d3ZX1aB#SLclY#Y@A(&gB6}t(jY!4l zX#fDs=mrHz=7{9@-RT&(8#_7iYF>hYW!W@s+to30;4^1-_V*4hoI77tvbi;K)8+iw z+`vt9B9{Hbqm-=*(nnFCblf9=7cvOa!xIF!S!4u9#d2;c*c!G4rf-eT704ZX~O z-3(E)tC^{0)tuEFRI{0RS!>iSm%(R|j!N3M_9~W{PcvS<0yn|4w^Lv0R;mb3 zB4`dmh_+ZR&?5o@lXx)CYOLTXtC>|*rJCE6G$#i@VsjwrPu8g94#-v*3JY zY+ak$Hl?hDm{ns4TN|5ZmNhMAi`3=A>1h>MQ#YvEI&xLTX1QQvbgX0SatcAfJ++Al z)wJs@L`a&NPQk5?19fOqgT!?eb1uwfY}=EhgF&o26J}^q=KyNC>6Sq&a3%tgP=-*% zh={7DmSZ5zid1L`_;3L301jGctbGq}@jw|E05GN8^&#bw%=OYsP;eQpmI;BGxL-l3 zJh28Yfm2FIWJv)DdF9$9a8-9G#hMhPJXcI)y~06e=zTY~V;KV!GidRIB3d>;65_rL zz#r}vh34Tor}8mZ()&ZNDX!F?{;FOc-J)cb*qE6ylN$h%Fu9|dB9~2(iZ#3}LO@_; z?1xBP+)>Vq!$FPAWy3DU(0)%v?B^wA4RaKC8Lh3&PZmsE#{o0fgKEA|BKiNb_2*%> zWmkP5xQ4yYaPN)r#hYKwgHovxfzZHUA%PH>QBcN!sVck7XP1X=+{R}3+>PhLRo&R- zwvC}Y0Cs_)7zAb(2&6)-IkL0MkEdGn4-mqK76LWJRPoyi`$|L*HgetvrU&;&}b^IUEga5xtP z&V#sqcJsUc;%vlrIB{XKX6(tkOaELy6{I^fZPJpJOe^}5>KpKK0e zjCI5Sg%o9s3}VYs4PW;5*Dq(2|N5VP^Xf~xXDHCqalEjp=Vo}f!h&~VF=~2?G}sW z`lt>>J6l`NJpb&aie?8}ZoXK~yK-&+2psmZOVwrw(G>whKkm`Vc3 ziOaHpI)q>Z1d%}1sH1u z0RppWhegrlq`+#dmJ4Ob2%0P!XBLJS01E6INg^dQv(jsJw9W>D3hG=5S8HW{p z+xx%nEno7bfA9PM_9MUffpRbe%Lm81|MFk`v+w%0@A=N}_$$BoOFwjrQRP$y=sDE@fJml;G6Kl3s??w;3R90A1TZNWo74{L zk}3Ketp1}&YM`kZmPH`jURw(x5U?}#j4&`o#K7FPsb^IX5n)O>ce#g?Kx6RSGerZJ z*l`TL;}p+RWnR(XoyLgw9YuQ6%CJ4VHfHyvh>p6gTQiv#X8~ z1XL9jkyLsqTz?JpO&>6^j|_mQJ|Uu!K?tGgl!(>DAaAXYMxttvv-qr|3M;QxLPiF1 zQUQX5`(OW-{TqAJ*~Bm(pG;eoE=kU92mz6EN?>fN(nsa^g=r3Z5HW`$h9U$ewAAIx zm#-Y$x(x=G&Rx9r;)^2P7rpr{zwpZ+2!x|*@WgXZ!`rc_2FE8yCkNv?mTPr+>-pyo zukS~o66{QjXCmI1H#I;xDsg-B!G~XSN?CKk|{Ek*-@?FBg-O>n}ZhJX=iD?1dMf8xGcD6>r_T{n1B0wrCFL z)BUE?W|>As{F<-(rUxE)Kq!{pI+)uB_aw`HA7l#1Wx<;4=;Y}9xh*gq4QoldT(+Sq z*4NkT0i|Yw+Jth{wW%zIdAXd7XB!&>Ty+UxX2=GB3=B>O1W+SGKy+)SEYROrZjZ0% z(=Z_z0eLC$c*Y+3R51}ZL~$9Ih`0aX{2$2zNh~Kx**C+^Hg!6W_)51DvM@;$Xo{EOv+gjaWKn4As}TCuz;va z$vaUZ(a2&^(a10&s%cOb1{017>VslQC6kXp0ODSyjcCXkqug_O```S;kN@cZa^Fw? z+aIq;)<qYrgKU{N=y#>%aPcJ@JJn05oMmw4#hTiApcdHc)e6640_) zW`N0TkxfP2D43WqbveeOh@ok^sw$gisj6+KWeik^=%2_dIoz53+N~!>miD=PwXqHtX2gN2QNh!xz0ASW!(EyRMWMWRD zjMTP^5O8O6D;qKsn7fKm5{ZFL5lo26(eR?J%uI*~89^}wM%D{EXTfdzeGdt}Tni8b zATXZW+3w#fA^;T2Ozigf)Mwn1>)42)8U!#C>r=Yi+L8b~a}EhDtCRg~6-Hv9uBJzw#T;ZAY%fk$o}G@Bb?eHix+1xYVn+~yIe&GwB@1*^$U;X*X(ZTsko4@nBzxyYD^bzAcEKQOm8=g6P`K?D^b~3*7 z*khkQfBw<~_udtXGRDa6VYlMgS|CuYhJh{b@7;RvzPpi`Ic{%nFXzj(;b=J8*xA|{ z&!*Gq$$C9VSrjv<9UqM^T-pv{#TQcm@;BX=TS|aHKxSqjW{Ak7Y7hX$1BKnY06@79 zAOvT#ofdmF$?BdzqMEv-<^Yd5DrBN6AX&xyKU{Ux?|^&gM{TwCa1l+9Ev=}gVCIQ< zSw$r&xVB!!z}ZSFNx@k~CZ-@sSNuRQfTV(kNwWk1$RTK1MV}f1vkHnBQBZ?U)u^aq zG1Wy*iGYnoEK3Y(qAG0K22d!NSk1cm!W20K4h1M7p)Xevv4s?4$gBzw9ZL=z2+b^K z6C*NKVrEb66mrL7%7Nl1L#r~(ZIzr zWoZ*(0K%LznuUmx)i3xqH_QZpWmSO!hk(re8wpqxB@#)_gcKr!7?G(cnu6qDgx1?r z5djmR=dtuLv`7I=a{%BXdbW(2TX^;n<*s=W&5XVPMvt&y1j3xu)CADnI;}U==-X-^ z(ou8o>EFj&VNd0EF%=+!p{SYxp%IaI){&!~L5V0vPLj~j2ftGt00MM55wG+@rzeAB zK`Wx)=_x=i>NaBzB0|i;L)K0?M(T_Ua~2Qwt3Y5TLXzC?gq@3(B%3+|*GaypS;2sr z{jTe6sfaA+pj2>0gHR3D)&`@+`k4!p$;o0mX_pI2O_$r{e5R5_l|uyUVpS*@Y8Dj` z4KcdaHzb-j?b%qbZERX6+gs;N?F*0m(Hmd?MgVG--LM)>CzEHNdx5b$+}q2`IZN~8 z=Z^9sSAYf5&M@AdE=Pb{+r!%#MjL0|`mV2d`R0uefBex7`3W_Z6S}SK)jz=5y zwO@Mf#4H_erejY@L1K2OWJ5&a6k->@R}4Z_MGion2#4A9&)jIw886FG*Dh`pvv zdXzsW_2f~H+7{1QRxsym8knJGH9!?LLse8kZ*o1)L>*g_l_tRl16Qm;&m1K}k$%_U zg^d8Sh*O4qF`+89GC@$ckRT#}oFo`#%(v{lyDN8dRBTxu+RRRW50CTO2q5(uFs1ur?s1=bi7zdL~45n<+ zNC4t0OeT_1U`?i~gNm6Tps5<5AwUK}erW|HGQYEoNmLP$fU;&)MMh1TIRasfB(X;YnKZ|YL`hgS9Pq1 zWnI-o9E!4S8&FGKJDJS|=-KCATHD$wDXNHN+1lD(%%*cIw>C!y`-jp=Go7PqU}zT! zEf+xd?+kAp%uA&8wR)0u-&?M-}c@RwtoL3 zzxLwc@wEH&*7n&S`zPO9*YSm?UTPb?{`BL-gX-+rH+|}L_uuoHy4YIZ*d8yM81Zm# z@7BxL?z#W9?eWRM?fv!5Z3GyO1}UXwYGPS!Y@S!4$;o)9TNGuLbEk-l`FuQCY^+xR z}hOb*@yyz0&>n>!PTHHCzFZfhActzqes3(AyBWC0;%G7H8*RDxv%DHrsb1QKK|qTx4-dS-|$uMc<)C( z_51VVlcFjdPh^%C!_BaG}Ua=8HB90I+lALCn{N>@xyn0BEx%Q3LbKC}+#2$Y25(Sw(ZsU|tP0m-QMV5O`QTV()vwLmHvlAZA57xPSsqN3 zO%ch=RIDfqVIL`!D7d8%tQNcouv{+i6-EB50ib?Q(BHezOL;xRNFnPN~6)@m+8yG+OP=|#r#m##GJ zB7`^`49hwuP3s$@z5UyFUb%03YxCOGmmphCX}d0?TBCUgaNow@*3q&gJ-acSh)O8m z{*`ZC-(36o|NINrZtiWaRbTr0D;Lg|M~4Sz&Yho5=fCyapPsdFbOhA^nrT~Z7MS4H zt=V|KEDFASap&v5`YS*2>Cc`V?YAwD2HP8Ji*uLG`b+5s=^C=lAb7}Vu*l5akH`#sw2`lQI+-dkGOYO<=$(2i6zIjsU31Yo7ZQo?LO`H3HdZXEs z5d2HH3Jd}RV-{61-%R=zM@`f*3g6T@#Sr@Xtk#Ra?9>Iwn|sUs64*Dj+Ivo^_VA7y z$MygAc81WyN0J3h6np@6b*P=Pr+TRZ00<~}MTK6{W&lWC2O;<{MB+*uGV5~{eLo(m zh!HJor%BaN5j6TCCuOY;7$3z@o$QV8xO7Q%JSywO zbS`babjR6Shx_okNAJ4#j%Pl9qlk9TrE{CZc)WkLEM@0>`NX4NnC>6E{e6Gwi9g(W z{TIEt&|8oE(H~nF@9rM1pDRXYmDzOKb}0l(SwK-q$?2b}NT{kS z8nKsn>(2%31zNprlLL_iWZ1{V`_2k5IW;u*yW3Nmkbw`oAX$4r-_x8_z`#^hv$=t& z?}7X8&{LxcP<7$Dtn_F7m;mZvf{OS(=`OR*iC9TF&3!EZe34<Z5s?6u zLSWH^N`lx=NkE)6a4{MTm-EA%GEq=Oan~bI3IL4|8Z`u=Btpq~HXT=09XXbDsRXXI z%sE*$C;6bV0T`lXVF+a4WOWiPB8zGyPFYHfTC!;B06SK-kTZxvh!EU`*iUg$VlYG% z!LB)be(RtA+aLSK|KQ@U{?xypFAmBWPL2=$^Dq4LSH1IHU;kC_|ILs5%8eIauvh{@ zN-0am5VGV&(>84uaE!%M$PtjtGzmz~O>VN~>3othWEDaz0(D((ns%9zC_33v4Z2Pw zn^W8Yte+T6J#B_p`RuB0cWiF@<}C*R5CRbqa!~{(HuWx8JirYdv{vk)9qWyDR;w)t zF{t&kDlw`iBp+N2lpQDO(}N8FO+}EvM4Xgqsz9hF=;)`I2$-n{xT`689R=SgfMoTa zxu2X>6-2U>MUh2JfqEZkm;L}!&#PFK^4up^03s#qj}HJ1%uWTt6~(ik(GkH6JrwM8 z3DC1pVKo4(4g%_(KGhTmz4f!ez2BEpmC%e3F)$b~6>1q&b7z2tWI-{eXex!7nPdYq22)`xiG|S^C~~N)7D>Cbl(y}bi&?n_ z0%f^=ba?CF@IZ;nFf=H(w|3et7on;L!)CcmZKkbYqSNurvM_NZ(k@j3+Z+dgsclA8 z1xg5V_Uz7AyzA|Iw|77J#Alk+5J$uO@kc*@{?huSvnZJhc=-OyFWx+!Pj54UiQL+s zji(DJ@inix^Y-p;Ks$GS`}W?gr$7G%GyS>Ex$g?sNi|Lhaj z_~7Qr@xf?x2V^Y=)pE8digL6z8dihp!L@ciBgm<1sVFgyX0z#H*^EZD_s?kP+f`R$ z0Q6)s18;z>cL}myw5$NV$PWli2^GPduju@JA3(EnLBaLE(o4fcz_6zTS)UDKqNXUi z@(nVP{)|!4e5xTnJp-(Fv2zs3{LBzl%^CVgquiev{?c7U(6dUxUBJALQV~E@m%ZkQEfh{Y77)Av|CKCw=S=6#}U{#4R^fV79ZA}6NB2#1R>rWwAL@1EZ$rhk& zNJv0rij;I~H2ix%^23)docnkG{Kt-uZw`uLmhy)_`3EP{#dmz;|M1vnANklP{-|j> zAfOQ1tZhydmTp>YOI{*79Ko*%Tm$PM;P24!pD^gZj)gk{& zDx;@Unn903yIqgFjGdM;L{gKw3`ht_B1WDL0%$5|NIiAcg0W~uuWdkx*{^EXIXib8 z5CIgC{Nk_`N(0WmSpg>Kmy=~wQBV|I=?TnLW9kjN_-x#>cL-5U(NHX-n{80=AM_L~ z?Gy>YwE_UZxZ0~x-(VApC=t0yTTjUqF|aZY{D@QY;;%j4|H|2cufqN9)&CD)F$eWS zz!kS(PIGqt2ZhMwBYw{6*KbebQ4-$@`wD_lq|YesnLCKY%s|L)8n;TA_VZIbS`u8j z!idNeo#lxppoPN&f!tnNoRo?NRg7pBQmJJMv7*5sE!$$WRju!wzdWz%uzzrSZM1g& z%K4kuuC+Ojh8ymRXURlTyIhEA#Wp}1AeI5P0)w<`9LS)xuGrf6()Yfj+FARFpZTe$ zpLt=eEbqPltjrgON6VaXP}Qk}<)VB1g=>*RHRO%aFe!9hGaL;d(#u!(QrDGLaq;|_ zkAC74H(x&4+8SKHaZnDbOLvYq3{Q?G%f<5g%U7!^4$7zk*RS7x&E5ANj}I5i#oB1S zY11b^`siyPc*BQ(@AqSXc6ZToSj=Vjqc51^zkmG4M{67+4dTYR^P7kJH(P^Cm(SjL z*ToxGufOeW5A7{Kxp%FZF74XONA=qJ-u1n;t@Y!B{g+>S@yeA;YP>!grsa}}Hn-Nd z)oR3gHDY!mCpt%A4jcL#A^nSe;_i9o}o+7jIp>*&_yzYaUdPa#;h%VhQuKnJJf2-{>RDr=5Ii(idG}7M*04Khn=L3Ls zP1~gokphRLnUKvu4ZAMqoV|bVb-2#r>Xk`ejaJM8bwQkw^*Qg3;Y0t{fV<)Z4CoGP zW~vaQhq(e7X3eCC2wf-SwcPEj2t~x)F+mIi5t696`GdIDkAklD5PmRtL@EM((vusHR{>jNJR| z`+!3QlK~(Blk@A4iADNb=ycS<)u?X&4F~j3oddl(Spk5V#e|9b@Zz2h1RN}jxa8Tu zIFLhC2&5tk#vzCUJqR300YeO)r0BWAm*44|` z55`L!lvEZ2BHY*<9-J)aT{oU4WXO^Ru|BuGIh#SdNFVss-#gly6-78XT9hLSR#?Ki z4r7D2yyeZ;Ufe^&qKKMO$%E-^kpM$1X5;1U+mk1r{N(N@e*gC2(O`4)@BVLp^~pz{ zJahT-!QS4_|J=vZ@w~CPy|MAa_2-|yx|~hhMT=`2qcb}j!!nlZqc4B&TaQkTHIdF1 zPd|Beu^4Y(zVF(#{p-6gWqCVsD2hT+Dd5KT+L_I@(yDg0(6o%?{#>THo6Tlt&TTpV zD58w$8(Kg>H%(bxSqK2&H$R#>dAaYSoRTiR>yLo}1ByXFPLK)RV%{TDOu=B4Go}cl z(nB+5*-nkZ#aw`eE7h;6E6p9OHz%E$_=aGm4L0rRrP>1nx>_}yI-NTE&(w2md_|#V zKxCjXu&Q;-mKl)(s3D^PiU&C<1EEV5B2ifq2gu7-4cVfp08wt#5VQgEe2rQHhZuRb zm`}$CRasCh7|ejE2&!U;WUMNdjRT_+0tk_z8LH);U1F4wP`rC3$vG5(bI_`c*(4*2 zPzI|46pUx*a4!qYE3^B;fpr@!glU-uW@ z|L1<|cYfpQ?mpGU#dAB8=`80Y3XA0;_x)eb5(Wdwnah%xFeP%_55XZQ0Tcsb%Hr8u zxT3rOL9c)%Bt{Ma!H^J$gNadO_j8Ly>F9Qhk<24$jo1sdk!LugdTf9bcA1fgU1P=O zI5l!WB+fZOM5X`;93U{OS%%)y*dhR+sJpWDV}gMfSs@N+LV%8l`e8+Y{tSK74rr#t zz0ZR~Lk>=%@19tDHoUqFpzkMoe&ouxlULe%A`~@3U<#tHRB%le10wsq)O!KhtEoON z5PNma|8LI$=0ND3KB#JpQB3_5^q@v`Za4a2if-LY% z#DqvGL%(ESwE<-jK{XH)2w3lZ3P=D9DnbCz3o?3oyE;Fk_sF$?K*lNM!FrS~sYdnj@mQgb#0UE)U^WO`GnZ<^0Oyc~1{GpfG1T?5=ic+}-!z#Y z|GS_0&wz05{MPaQtWXIzv*I{q};N&=+*(~b1E{LD}!c$-IwzqDb zIXkTI^UuC~<)QOoP|wD*kN*Bg_imi*f8yz#OFO{yrC<5LXb?^gPr5d9HQd-4KJ(PA zW`S3q8aGXP^3%^rKnk|GQQmj|eMH4$Pd>A~J4K4O_iv1kkGIcW>Ze`AVo+^upDlp% zVs1?vwbmxyIJ*0-J?8aAm%m?>Zpw$1=_3e zSMv29eSy;zt|KMppdCb|Eh`??_3>iDhA2rDiPXAYy>UY&NaOv8;-^4qRkW1|T9)!EAzxX5zO2nh_Y88ZZ$g!CB zfXtRvGhw!niK>dK0%BN&&YBUkd-`kYVhG>xEnicOHh$!9|Ly&&k5@6wo9^>3KC}1p zdtd#|ul%#``<73A?8BdZ_QjKfqv2??#RgR)jSA|fi^g4+0#PulV_9%qE}FWm+IFET zjLZf10#1Nc3km&>6VYEVAsgz}=H`R)x98&H}xho2Z$PAI)!^GXt2oY69#0WuDkyuT8c*}?wRLqN=i24x2%!n(? zeK9Z)5h64NTzS~903~quD{uYgsRP66h=8c7@gQ_$( z=Az~(W=Pnz&9K@yI6CN3YpUliZXF*_509tgS#xxFJQ%H=zp#0HFn)P=f8N@74rk8< zP->RV(TSFWN;^F|$W7DUKEBFgZL7ck+mEItEQOwZ;vjYQ*y2W6SVM^r zx-6(H^`v13)Cz89a_!pTw|)1w9$mdT8f`6S-QKlp1RsX15k(iJuFw?S`O(wA@t2G0y0W*YHk^+T5rizpe0J3j1i4@(D6b%9rI+q$% zML>m3QCTU876B7jW)>77Hw^(XEJ7bx&tRzN#|esrg1`E`Zwo*6&wl7{{onUqeyXlw zoAmZ%@@pUZ;MMyd`sS~C--R<@_~;X#oQ_Wt;b3!gc5`cXyj<(ruIr9Z#(|(HLI}YL zmudoxzH#hQ3J^@P(?qQANdchu7BK)*EdoDq@4XiJ zV9sa+hr2Q$pk~nwNR&l^Fr&B^E-1n(n-|4*UaQ{`AgfZZF6)s`LNX_#d8?r-yJ=Nt zUNW2_5)lSYT_)>uTFr@Ogv69Z0ulLNz!l4&01N>LK^@#O6ZJX{6gV(B+A1m` z1_HBHGA6jmJ)k?lQAAKM1TqwF0FV_Ckd2YpL{%aX7ZD8FE~EQRlr;{k+|Daw&OjDK zU@fdY#3AoLD>tqVn)YlJcBz2=4d%0#_bX{ErGYmy> zJnlr6k;7!XJa_hdSypYErn9`gj$*NGG+`!$&Fx|2u=FHmWaMT%=RS_Fq z=1$e7(>V>xX_J_F?;vM8L4(Qh*p!fQv=+CvMrXD*j!#aS+4Prx_7|r|<8wO~9)A79 z4AqU7ckh1iO-K+ZNbc%kefHe>(P&Mukv28+dDvJ3O9s|07W0$Y`qrkq&#v7 zVc&iS>XWCUA*#6h4@%GEgPx|Nn3EqGno>@Z9mns{1K-n{0r=Y2ss4SJ4z@Z2oU7yQ ziULZ%;}!+WZjhcoqP5W=Y?WWRUIT_y~wjR8KK~)Wi$n=fkQ}La#%@{gZoZPMPSHXQqdagoHGE5 z84=Wjs%z6~;(#n3DoVK%5NT5)f`o|xQg)7&e{XMF572u+IJ?0iAQg9EN7r_FHp!{O zXiuHCQ&0y20wOY@vt@|TH=Bh0UjPvMYKmacyWJDI1b+p?l0T;}T~@JRgo`%yBRKR* z8$>nNUA|K9`#GhbaaYUX9>X;NVh)iJQ`3PlGcw0ekffZm5|St)h7cLigI0vQ#eIV*6;xf^Y)&AKKv85^moh|I;AvuEzOrpc-mw7m> z0BJg#$08Ov8;}}yDAh2k*zNsk)*M68ahDxN+}50m!EcYd3!Le6`sj(h(4 zO{}Vj-~waG63CN}$viHZ08C8DdnHEUz{6oNotWf=9{fQDN-CXzbzlIu#@wMX{q2S@Lt_vPFi^raPetToBX&W@;B8DV@*k#FC1DpDs zgy`1gv?@J@K!m1NRTUv<=%npJ45wzDt#;1S>Dq*y`;9vjKt#j@%mj(RT?f=e6bTI= ziF0sjdBa%+3DrZ~sh6FTAqNi3L?J{%BvT{8qAdLK z00buXg*J0QLys#6%w|d)*eAZgtJQ}Bk@ro$5b@_?WLHr4%VaembWJgmGm_MeC48LX z5CEQk@x>R=(RD-v!4<9puYBMsvh*tS=X4sdvMhJbo@Ij6N?*r;FtEQWFaz?uIz|LW zN;xr7FTPtnov0F#)9C!q>y%klC1%Zv#6+xK*x5&(A_fg@*Rso37zlw80!N4#Jne-I z3^_6}3#c)8>W8lc&>RXh&0>;O-8-M!E&yWPmB&ZZQ9(5q7uVPCyRiQB2mfoadwnPJ z-DTh|NtQZtgZZx4J@C+%zVpTBuK)N?{v<%;U|fcila4}|H?UMlZDyu6YbnS@)J{Bx zcD`&8!B5Y&=pv79nzG$b%=x^A$&&RWcl zr;F6yzJ5rV#bKxL%(>bLQ#`Pc_qJV<#JOK8bZ* z%T&!~Q(am3`xU65S;RyY$$9+9xPp{bvKfE?XjUZZLmvGA00RXPm6RQ*VPHd0GG;>b zC79+eYfetTG*ly15hmz^`^@|n?R$1LGq-XR_pWrdkL$Y>9F*15BpEdU@af-!X&k$; zQ)a5q1YrF{zalQ50#mAjDdiAY1=Nf!AoOX!?(|}&M3_<=7>A=lRTcAg8Hi1?ns3-h zk|9EFn*~@1O!ayU0TGvr#e8~Flw}D6;>vl32GS!;jy9PA1N0#xiUva25UJq6ibmiu zSYU*d2{dQ*TNgorN=7x)Ok!w?MN~#8z(u!4ga(#!3PfM^?sw9^{ulrB@BZKmkNq(V zo9a|sq?3R3t3UtWf5ZFV^%Y-vUggs6jgOAIJyD3X;2aQqGAv6h(|oNChB5j8a(O;N{a)V-a#n%&b{rS+rf^z@Cg<6or{_ zWJyxQQdNkesd9_}W@51jhUUN&afl&wNvgV1)gnYy1%((mixN@ScA*HO35lC#NrWN9 z?2WU50p(r;;=h(--6EodNCq6kYN3Dz78ohUBE(Ka0ZkGhg9vl*>mMnQ3ugKu)%t7c zm4m@nND%l|+}zww11`W3$XebTXc+ zT9>UVX>%P#pli~(oed7SK7ym^ICXaZ{Dy!on}vwP$R)#IG%N}njjD$px_7v>mN_NH zq9|47C-YRs__jB{?)Ye$bK+?SMo89n2@@hR3p0|50EQxRRh66DYq1QwySw9~Sv4#W zU{IH>;qARVncjNin-(W~N9Qh|t?Ftrol(S4l{@FpSFu{orc0BqU1TUzv#jgTEM{HX z#u!x%093L&&!~CAvjKvMT5tch%&Ge9XoKil9F0_6ab~&R+?I;T07Ctk^#esrEWAF!SUv6X;DnR{~(Ktvx9j zPw`UU%k{U61D{Hgg$R*AbM_4CK(rFHnTQb}ki|fCUC(E;+z9|0F)|W?3FNNx^pGYr zfl6eGM2l`Qn~%%E5D9?;Gl}M)WGCKx~puj9(%mzN- zB#$iwh(XoR5IQkMLn;}Q2r5JX_wpbF6|2hdu6KM%_|bpzkN@ruJoor#2UW;js}yI_ z{nQ8kuZRAp|JPf;>g(1%{2QS-IHb$#_q_4>+mAo~(&ug+Ot0P8g$%>>(QsI05s@5_ zFmPEEP1i|Iu8Z~7je!9fFqzu==K7tN@3{Mp%c)zoO{c1)whEp%17Iekz|6p;BFO9m z88N5St;FbVUT;0EJOopt5LB|LmPLhNL>xj0u>b%j21H^<4pl`JB*uaQGm~#Yy%~4< zlxF9VAu*|nxK?RpSmb}(ex5~g!kTZmI068KA|iokRFz>I`lwM5ie>^pB91{EsO;Y` zK;jUBPsO5=RaK1{6`*Ssy2QZCc|(ALgf8N8UMwgXyS)$g5=}eZ55I!{tOPr;DAv}t zfVeCQMjnlVx{v~m(4$!T5!@NE{qfNc-fGV0G8apm_-zM$3@n!c-6>Kbi`$5j(?Jw~ zfWv1#{JSw!KwS*A5tUV4glK4GEb5{}LXM$7$V23!43T45grW$c3}sOkRZ&-URS&AV zs;d|Z1VT|X1Oc@yc`@C+{`e;zx%#P(-j(EX9nKcWDvg$HO4BTFc*mc)^1y?)_iz9H zBOh5d%X4SW&9@e2(qt3m6K z|Mf+H-~FvW_{bxVrq)g-$4C47&Fs#4SdJ%)ESbw<{p`+}we73(lLhE}GL0inO#^vo zscRNXUJppvb(xuyyI-l{YA{?0gHhcs2FyUSYR=lF1ZElu1nHM&c4;^9fXR4 zO_dC~PE1WC=dRN%Nps5Cz&cM2P(=h+i$U*4<=}t+fdC#a+jjvXrxc^!eb^j6Qa5-& zM30XII0Z)i!~H>fWU60HdqkQMhz4a;VL(7MF=A5l04N1RG^0Q{b+IUhgTXOTCWWkk z0LB0Wppp?f1xQ`n<|>u34k0jcvs^3}^B7BoMXVxtv;ssxCI-zp1^{=!QDCM3F=;l$ z7?_PRW@aM@3I@munt>dLO#&{{vk<_t0h21^`LAlv6J)H%v)b zIz(iYe!Bz;VrbB$*4%RefVh`Ucz=NIQ~{{!2Az^}HqfMfUa8ME>UKFki<+9bYQex| zV&ps*G!R7W6)0eI8Z!e#K%@+(WF7>-J~lN=cDanIs*0cQ$J2=;x?lu-Y^VAr!%?EX zVd^=1f9k`)K#{;wm#^Kp(KIbnL5PtG0}~K6K{J5f(%I1bNI)bIBV!;>0Pm-0&BBq@ zqz|)HBSb)mA&LmckWvOh%?hH-j4`ODFj(72%f(_bfjr55>a^yjL*N)A5rVoHhByQc z0mvu>Bt|A62t^qqQ&|m$!*V#NtBtZ6m4qcnSv2kOQCyr%UwT$Ucr;ne^UbztYl3mB1!o3Av8GL`#HD2=wQf)k$`Dj_ zSi;uEPz|#fF`2=1-c=Ps2M0&Ti{&Dam4n!xEGM((wGZEQIO`To-dw8>j^-CHtu0bR zNr3G5;P}CZADrwjU@7CYTrQI28~~qr^yRaI;g!vE_uYH{*7{BnV%Unyv@j3|<>Yv; z(|qgj*3HA)MyAm=wl-e(hSyv=w{!9G#kwqBy!K+O3RUU4_F(_kVlkN&S;f}_rsl<+ z2u;Hi{+wSHjqWsvWOyzCIEB+u)hM20P1(V zuJ%$z1nYfcR?;#R>?uX=?PiDw>Qrst^4jVfR^9(8Wy04BU~a9w($uISNY<_cj#U=l zVgecU*El#*YLbm9sN}kgbyb}#Pa>E8jN0?FRU|{krdh^VF~<<7DB^UnoKH^5x+bDV z5{G~s&9tD1XaJOxkrF^crYwjBm|>f<#$W=(EZIUtGhjD!RBg=&B!q}&rU3wwAOK`l zMbdzZP{p3jVQAEk(kQKPpf7se!$17@|NcMx{vX)A`mCrllg0u9P5%DR{Nw-c_x;El zzw&+U;~)OWhkotz*FSgvTi^0s?|RpJ-}+^redgM){?YG#?)vkK#R9nqfx#>_Eu!a4 z1rPuPj!{jSsR%5pCx=Jvvc0+cawG<@l1mIElFWdSl|apy0uY!uU&YiQ=MDf;O1{I- zSwK~x7lZo})Wi&=r=@vXkehRg5Rhti+gT>gDFLAocDZxX4yY*@JL`rJ6cAYiFaY|4 zlI5Ixj0FG;RDjrBGkaEocP!v~<~~2m0mgoB?6z?}!t@2!2mz4M`aBpQBt$d<&4x^b zlyfo#LdvQsX8AZBcl-@L6Bs~%*4Lh?lsh=S+ewk3}hlRBg54%j6eY_2T*{3F)SC;@LhlT`xaTc zW=@hdC&{U8m&b?4?XuBj(>06vY%!ah45(loyX;u*ddKVTdCM0M2Ju55`ry&=6o?VIEJK#On85HX zumJ;0BnIn}CQX{d7$(d#sADL&>vC2Y49cDgrK#i~SuEMbdv|W`?ze5Xu~i-%wg^GA zEttD3Im4hX^Ri8lXSe40{&o%Gt zySL`ew98#PPgPl_49n2gWw91%yDIkQ3!ODh)7Ay2wvimB<72b)WGqNP#!O;X#E{KQ z3<%ql{fpj}NR}jGYFQ*n$|(a#ixxQ(wS=-INj3oUhj>TKD~xpAG%av1d;B zO>{Z{_+;UIkmW2Pl9~~K6GEJy=L`-5Xi`(e5{!mx^>}=uS$!5qLyW=9&}kH@%eifu zc~Mreh(*^m&9q&v#So1N2nZ~L5COTd8-nz<1pr_vGjl+b+!?5bP%v>u0Az*id=O9# z7&HNzAOeyFCg`AwWJr=xz#WnaA_OKjBf=0_M5Rqdp_-zx+dQ`0DpoxqalbfAX0}KK6$n`{Vb&_q|{7;2U0d;hTTs6CZ!% z*=L^LKg@u@K!lP-RWWER7yz?mHABE;W&nprM|gY~hzTjR9r(NA;#dSj5)tnORfT*p z3aBc?oVw2K|NHf?d7rKbK#ZzN9v$V=4fa=yr>UI2*N$bsnmKdYFS~u2!zlgd9k~$1 zy$ldn-|RDIUM-M%=Cr>eJl$RqB6x-_K+jIw?yY9UrNtt7Rwjd#B(xrCbMPAUC!-nvFN(l zY%y=9idnvo+HdY0A8n_6Zh)jIF90HTqE(0;0w}!ctKU;^ zZ$I{#Kl$K?Ki&w=rri*7QBY*KaE?j_0X#_w0YeCFlgonEM#JH#tT>hhwOM8-9WbS& z+nd$eC>8}2k&ce%Imz=c?Y-fR7yjf^S9dn5JMSE*rsaILY$Rk&31%nl`JK_Zi)XHX z{-_?%$3FGR^>gcA{qzOVhp%jYg_o!c%VV>O5c07a1wx}+%~sF9*J6tLPD@f|x4 zzU8%FuJ8GC-~Rr8`Gf!94|Z?;=CAxly(`G7Ls3>u*G&%ho0Dm* z3j)!uOF5-3iR6@X?wX~lrj%53_V8a7Njdu-Sk3ME&D6z)sq6GqfNO?kNp#hvgPD=X zRcJ=H=M@L$6hOfg-7mt#9CHHp_^qBU4r&0}8;aSAy$Y|;|83>v;;@mSN9!1P_8EZp zk_;jm5!?;9pFIHC3409SM*`BaX{tCViWnj%$pNB?ATtOclg|Ta*tAWos^xM~3;TRH&jzs3y79K$LP;Q4xqmL14);hk%M922fBGLS#f@ z#oXHtbUsmqU`bW|WXstw5(YPbLNL&rAQb#f@BiB8p8o7F{D+^^r6OrTWdnSBcKi>2 z@*n>A|NKL5{N_IwtmVfZ`|#e$zyG;kdg#)}|K|66)BpT6Uvc;P#;DtarXUm3}0XZNkT6X;oqvuUY5+frdV#+BYA`!vLEx^Rk(6VAd(k3@K%QP8-;7yM8j6gqvdQ)r3hxpIj5ARYr3}SFr_n_<>m9Eiv!Eo zZvOP&{DIBuhnJ$AEpa`73TEaOAtx;WCz{vpICIAvzX*r%4<7l{%eTg5QLS&4!>s_R zDT;u}+BQ#SowLbP?$$TL+4Gy0x-#bdqm$*bRSZqa5sG0|A52bSu`w8id+w^Ae_4)h z&4cOZK6`a%vpk+I?!D*Y_U7iz8{-gYeQS8{y&EU{Qw()6+Ss{Lk2cRe_|O;qogeA09165M#4US*_EY#fZYm@nU>1MTMNY@e!Q6JRpu?P*uaxNM>uJwHvcV zH6Kep(>7p5Y?{SvJl>4kX3*tqNKHyftZQ}Ib~#DPId!RPI}uHiQ`e;~<&-pwYW72; zUj;ce({vT`s2==oy(>dMaQBV4nW=eI!_(Wu#AP%hW@ywelYo>#!N4Fh zQq#;sQH_RcM~kt!QGoPz${5H@!B}!?n}%Zuk~HUnL)R?3K~r#@yVgwIggT|xz=V)Y zDFi@3#Y{@TSp^Z*0CGlz04#=1$OARwfQ*RAwaXH?h`}^lWMwoE0k9%KD`?CL60D5G zW@PBR4oOni{I`GTZ{56p`@_HXi;(5uXbga7&khc!?LYjF|K{)g_uu#Md*6TK+U}R$ zdF2D=aQHa!vBJhwfYEe&j~?z)`RASDTrC7ZLy0WF3Y z0;5V9i&%2EYz;xO`Pt>AvuPT_8W6R}2xvs|ilwX(_a)KHjCyx`W;TOR6vQOfS8>T% z%*kcy_C(%w``qaT)AyQm`aS(bN(d+hUNB7z46^q3J?%^d*g2KVg6=SXq@K2{MYhi%>rUxxQf4{n)FXV1*L(71{_* zwEuIhtXKWjBK9$__R8wm{8U<<0GvQ$zfY&X-YYd808Xh!$h&+JHidfES)yUE{Q~1t6ZD`uY3R3(V26H$0yI;c##7PH-{K-xJHXb*I915Bmx`T zk&B>NV~FcJ8+p;pME0&vmJ2Y0qRz2oO){M?fMxggv56dy@%*K=x(?b|S(b}MYXDa+ zUue7BE>x86z4Ok4@d+VK_9w$wefu|k$9I4CUs@ZSqoplB|I*&A+s#Y6yI*)|arI`E z8eXQZOFAAmCJka>=5heJOJzBSkXr<{C+iPfdiR&!^~JAw*Bid%laD<%U9{U4CX0Dz zI+-rTa5$`H%jWRp7&X;{YWLRk+~u9oaIKB&gL-?jo19x;ySaO?Jf3!E##|1{VbyiZ zqr=;0&TXX(S**=@zG&Mli+R&Esq4Db<&;v_rPOsft7*y(&G%9fK>!2IJt0VXln@LI zM8tY;PB4Wmg6Nl_8K^IKZRG{*om=nsVBX_{#nHI~gVC29W7_k;nFzixi0Md7YC?a{jh*hCoH<`6e1ni|5O;YMI zQIMRgveYb0prE;Ji8%rqL6-%L0MwNnV5T1MOhLs%Tc8iFvr`=jSRWATy|#i85ZKD2 z6jT+_Rpw}5>P}_8P&)1SdtAvOw^IPoEhJ!tc)&^&=0%izuj*R*el2XhW4|duFDF}F zggxVO1+Mg`i5F1mI>&!_b@k$k^4|kN{o?~6BRPl9p`YH?*Ue*BKY_2D5Vpdv08aVP zt4D=bA9bE{wmKgAqpt7Da8>I2bpyU~EUr$2V91^%nIWkvG838zksCb)M(917(9n-F z5%FkBFmvXA>K5B|7?x@L>{FllrC)gIcYk}de{%OmtlN~1+M*(3EC~&v%R;DJ#t`c5 zH@#Cw7j|!*jLzQ?>d#N-(=2*8&4{qE&4aColDCSYUE-{<^_{_}2sdxfuia>3aAXn6 z2xKJ6O_Pf%QXr#5#@Z5xINqOcpC5EBT)aHm*{)xB>2^`ZV%hBN@2wYWjZQYJ;oI(e z_e{rLu8z29}lg~b<`v)*SNwfL zwEV);S0B6U(_fM?ER-9DK=fAtzOrYuQ?F+#Rv?csPYEf<@Yw-;A;n`KuPWe0UE zhjl0}Z*D)mceH4x^TnhBNO>LF_IP~UG;Oxfbm@59EZTOlXxgrkq+Od#E#<7a7rz_& za@Q1{rQnu6nyshjgSlG+imIYF{sso(`drOm8QdPuR8&lXPIp&I4r-bsK7igU`HkZ$ zvL3)8C(>Dx20=l=gN0pd@0YkM{Cnb(-!McD4|{3oyFVw6_~lDxs*y>$7TNVG=+F~I zchT(`O3a*dq7clK19PCZ>4+*s0tJ8!L?K2r0#yQPmrG_Gjn+6)!PL&iu?R>(m62$< z=u+DnjEbtvoe?7k3NfUVMJ0y7MU?CZA>?dEs48VF01%8YAesU>9IUOX1vW6lXiQ=r zM+_=th;@XNg#w`>nPvne$|jS^Vo+EA>Zg931r#}b%|4ypivNhD_x z(}FOT!=`Nwp)7-!4JrZ$ZrfB4qZ%2ereQ`EY;zYvh=2krop5jly14y3qD8<&h}QGR zC@}P*A-69ABxFRl0(DD22PB;x%!CTP&{x5SD!-cSw3+V#Cr43w9|teby)`=6DdzD? z4fo=^Du&pX#}&=Sf2yyf2&=IHS3W?rS|1=Fu#u^dnXJlo4{Y}RKdw%dzMTg*SO8^m zEyd~g^bfl_J6<{9`flC;Tyl-8^}^{Y+vrvB&}#Ysf>$2xbQT|uJ}VhxOQ9t*?)d<`(sQz4@2F>w6wJfA431=9lUD7uJs!GFzsm$(?j!DQhNJsCHQ* zVqi|zW>h9wc1s`)MQC$7Z<|;Rcxy15HJ|(Rr$-Onw=t|?rrmN`3~JF_1OneISw0-^ zF6NVqbrEBjkHIFvpgBxA}FmEgNC+}AffMny=5~5LWGrQp~p^PwSiojUn7u; z3KJDorI17nK^YjpRAn}w)Wup(5ST*}L0!hh*zUXveKo_;+YRKpdy~l#TGQZgAa@bD6*M3Uw zGYnRf!|IVvkHyu`LBCh31-~Wug^wOPZn-L4>WTI`Cz;HRYVF3}q2(|_ot6bHOCJ6?1D>o?ag>CN%v+L1-jqEjhFwVf}TB(t`eG|M8!L0JT{oXvtou*j?#vsp%f zK*#grcBW^}p4&Z{Ucd27S(d?~um%c3nId*Mn?cv5Kq9#*1e z%QoJbK|Ke z(;~HT*%?1~eE2W^%YXPg@B5Z7e(=7JeR?vD43eW5g#c)pBov`b-R4?7S+u>~i)vsb zLP;74V^Ot{+mtzSQ3O#F6*WjXM`p>1nbZ(N98`7NbO0a_I?0h4xL15DfF!q(An`dA z0XZNdg&?U@&*^n*Wl;iN^{YrsE8LrORkK0NO@7o}_`RkwnF_BeT@RP{Q9Rzjds_`X zIAmt#%OocntZJjLK~Dh@_5ZD2=838AO?ojFiWwkK=ywCq8*P(k0Q8l&7r*qi8mSLR z@R5Mf0<)r;h$ROVpE!~K z{SANq{TJT!&U$<6EpL9)z4zUJ?dsbPj`ofYcNf#i_+a<=XVlvy@7`Cac2i1M| zz4kl4=Py0wph5#AV28dQM*Sqj8)n~GYBs!U1H6p)mPqgiw7);@?1>pG_Xv~9bbQW1;Z{2h^k1tSqF3NDzzikv!8;6NcnX3D8osuB^fAEks@LMXB4 zr}XkYW+p-;0wHI^8v$ycW-h9IFmOLX0+>g5X&}ldV5ne>JpqT1RF%1RNEYjTd(0ev zG)5=jV~<_-e$ws)LV)Bp`T%C4NWlyQ6_6ARObG}P%tX!ef&G_3L=0*ef{0KrqXlME zQ6~1^!oYef9>CQK3xIupj=h?S5JMznNf{_+U$YSbpeb-*$w}3a`}W<1?yu50^xPr@ z5SIp1KNL7x%YFPNTJQMY4*?1*aI2qb{F?(KkXp#nMJ7my0TGF^LGR#&3JO6(9Ldq$VO{0;RNzoESIo?L8Bj;6C) z2kq`|ck^m@^_Cp%P3Fz!T7g1)pa0~if8ynxw|?T>>))_(*F%GIcf9uQ`|R%fRbV!r zw)5HH!QQR?lY`ga9xo14KAKN&@9p2398XiDi{%^@rn9LZ%*j9n=Cd?dAB-o9wP9Uy z(58FfwdX6Ql1Z9&G^h@bW}Bm}Z~NA79ni)Tzwy!0`c9;(DC&6rY;ot=HF%A+lRRHE zhlk^%6Y1K;{&;-j8ZNs1>xa$JVRJI?+9oZRZBAY45YyCh129k6ShH9xE?&7Z91M=f zbMw6qB<>e#Hl0py?d>O3Vk|?MrCS@;YiCBsFN1VxYq)mT_NCh|E|%k_i1V_Z*JWIs z98Ky0*PBFCgs{lVlvP!P4Bh|D#MDF;(A=}C7s!*k!rs(!&fq)^69Z535=6@>i>Qfm zKt?ju9;{bGMGAyHy}v(5nAv@$5nM6@fY7r)M5*5>su?hlp)j$XWh|o1DrwnE}XHh}tZ}i5!(6y=S zaxCh4SOOXv6;;83rA_mb#Z1N=W4ml1AgXE-H3i{%eMkuPaEM0rXszDbtk>45D7h#@ zIT#HGF)|U0Lf7WBT&TeyMr%3jO5v9-S5b{S2xY2bjSLP`PxHX2s*C8$7AMo};` zc4v4Lr-UPOyA)6KTFe$0n3&WoMs~GFAWk_0^e91JK}~CpfCz*tT804Z^&H(cGj%!k z78)p7O~m|~iKQW!q=JGOsN@hs>T=-VV=Vy!yU%@eP8pJ?#;chz!>KgTCj=x2Oim{w z3{E>zC3IIULt_FW7a0?Px*Qn{tdCDw@kRjyI}g;qZiOT9&VA??vMZ*&A+Uo#gbtRX z`t)Z|#DM6;+88)QqMnWy0{GO9;L)LAz8wL#DCz}`y{thvgg*1#4+wYB17}aKz%E4h z&hweaaid;p2gH!Vuqs1{j0A2Mo(&CDSFo3YI^C8aCm})wB;u}XOxiP}qW#=QZ~w~A z4{zMsQrM_yw8rb(#f2}rfB1&il&^h#8@9yC%?Dn4_KoZ9bT-?&DMts3-RINpwRCX% zGT~jpZcOI8&wqaJ<}-P?GdzFc{JjsKxo~NH^ISb#Uq2f!oxAgO`ECMOG)DOI&=Bb`G@WdA>1?P zWYaAd+i)!7!W`}h5#8n zfYt(s%-AivfH6x3aJ9Mtf@JA!Y|WTM>RK}p6*0+O)|3!UB?l%8CMl`+=VqYBM5~}} zGf1MDfc&BW026V0a8&~&Qzb+IHF1>E-QSFfa#lt}Llj^PF_KvZQvy`$*LJ1`ZBj7+ zBt}VDEfJcE8lhtgnnlGj0HV3a9TS;p;65?QM|wuo+?8d~wn;$=aahGF5->?>{H1p7 zBE}*HuIrFf4iOk5Fc}CzQPl+~lx0-SWDPT?W^wfLEsPa~0;s4*8(5CEwl|n#J*bA& zAO!*tGB(04X)IfMwAA(>*%p@?-EV=zTwTQ0j%$$PgB>N6WV z8v~9d1vc$v2C=Mio5xMgvsRhk^IhNdxyPS8c<~8nmRmc6cCmQo_U`9zJoB~hd-q45 z|H7k_2}j=87@= zMTZc|z=8JWS^BLa#@2n67MWEcoj5{@yaoF$(^F9HT;Wf=hapi>KghB=Ef zbPa&Gbkj;n9hio-frthUW}rzz2w9Ynljb6Fk_=!*prV8phzzi46EcWO?|l!LayAeI zLSjI7cs0!yh}8g$7=Q@@0|1y|5u%D9DieuVN|}kt?P0S3GZBhpQHe!QNsugnB$Al| zGWSLfW=JHcV#*u{P$UHk3Y5f3LIf)Ub)5zd+ON@#0;>Qb_n2#6u(@|$Q4M9CB^9CY z;3XzjmEKDfu$SlqsG5-xf#bReZno;yUPuh#{JEXfNzZLk(X4)ksgd)51t1^-0mmpx zKmn9k?817nIezMi-~9QF{XJfG=ZAy!HQqQg*tlb-zI#qA_*GcZWuG8G=Y|%{Tv&r~i|7dsr@aEC{VE^_@5DLJ&a~Uq3 zJ+n9IIxep5O|dANCarI7k476G_`okd|HM-p8^e1x9=`d~H4KvSLZu}V9>j_0eF%Lb0~9zJ14)o%IJIrwA<^A0OQ~x%%9-!{hPO`?qhLoHR)y zI?m>rcm*abI7v5|oiy{NUR&>`%PgiqYv<3#>Rcmo?%bVMcJ#sH$>Q-phQ&ex9dymc zj!>*Z926sLwG}E1jno2j?nIJALJZWkZ6qY406;MyK~9*uPDDXKQf4Ew3|&V6tXdYK zo6kU$O9u16RTZ_Ik|luvWQ>G4CnmD4iDluLqG}e(NI<<$ETSuZi4gjbFH=*=g6MO( z0;uy0RkDa?5mw1+Xdp=p0G*m-8Vn&&n}rDkgo%(54Fya=3;+!>XjZkHbIRHKB~foy zoRB9);VNh%>zKuW*pNuFWDzsuP)L%%!Bxa6FdU9Rwawk!mIjEdA)u;|N!N9TA;gFV zfT1i0S$R?qh(lRdML8g@hwGchWhjeK1}><}?ZOg=5X%A)yEajXh8$x>vEU+x7-K!) zP!@v$b7U$ggh7RKOZzvkPKV`-PnJ2S*{>|wQqj$lOG$mGJ-&j4(h5JR1}B+l?>HLv>et1 zSZ#6YG9tD~LQy8sA_Nmll0zA~ltGjT%c|;3m>J#u5wH(|&K6_A6uR7oP^R3KA@+XD z9vcN8+fkH;7{}n0c zB1UGYLLrvHv%mnsjEL(HH7OS%OJ<^ElC%1vmD6XIb|UL(d?4~Atg}&ND4qGW?e!}pehLY+v`^P6Ii|KrFJYP(^+3X1FNx@gE(OOYdT*r;UFqAPCRV?cw#(JaJIDfG$ z?=B*VT9OnrHf^c`9ZjZBKlAwpmpkikZPD6gvlyRTym00IJMaCS5B%E8S9gmFn~Mv8 z;>N9;W@@OUuRKh*eP#K*?Cg5-4JbYs29ngnC$%yYD`m z`Ap#5tQqZgM(aOq=C$ZTk#D({*j`s?nu0cYew14f)bTuNfa7 z-+S@)?WZU=|q) z=1@iyH7hut}&&Y zq(y?X?An~#CeNB>w@f*im}%FfMQyp3gp@^xG;b7DZ@c86VX1DWZ`P7Zw>o4vu zn(pxE03alT^5UiM{2%|^w|@QCTv$J2Bu9&+N`K{B-}Ybr#lQWpKmDICj5dnp?0jMK z+tVt#t4icz*>+C8^wyWqh)gX7M$HWxre=w;^dza}Vo4Z8q^zTvdWxbN1ne&}W)94$ z6Gz=el{p}W$bG7+bokpWc_OHQc_rAU^P10bTCaUg&)8ffZwv;cG(rJ{x)98e_@1!gSEn7X`5 zW5N&^0J@yZSTIppg)G^aEQ``ALK#4EE<+?FMbHefh@coagcxE<$ufY&7{O2^Gc$3J z-ltTNsZUA(NVzFO-5V#lGnV%DcFw^I!KuH!b$cuVpu0MO+Z%-f0my_5092qXf;00d zP*em!0YgM(TFj*?Fn8&~T2W7)fBIMcL;JaB?08uZ%ANBOtFGECp-f$Sl6NO2WW*G! z?%5}%XD-sk2jYFNi|6kNo7;xWRT;|yi-2{E8yqnp!5Y#9#D^3RG-iT03;%!k%&6&+KH%! zz$F5TfLWjrLZC>QFd7W1(b{0Nb?)58+QxEmBw{F#wPOfI*f!m4K55!H0Wx83rES|# zh1Y+>+o8kZ_SXKh_)<&{-*;sg@x_;)Uo?wp+W_q?XD88`!Y-_@4H4-7N7bJP>zZX} zec-d!+Izq6H=Nswt}~t12s}JD%yA-uK;mueJJ*{hcg1qyDIS zBlE_+=bmrxwV(Aozb6DHNG+<-GfSzZP9s?*bMqzwshCNzR^3%gnQB$lQgbO&&f~b* z<#7-omq7?M=V}$j-Cb4T#cDGG7Oh3A)>>(Aq2NFgLdPQR&U<`6OB9h50tG=5jxlzr zi}S;DC4?v(V~jC$2O*{u`xIvq=wRL@n#JB?BxWKZzHFNS3Ah7wZS>-hjDk5KWWbp{ z24aB88O#y`B@RKNi7yNY)WpQ-kJfzlaDDv3 z^*{BW{Cj`=5B)0-zxl@3f9dC^FMn=&<8zk%ho64zx4!uJ!{VpV*H7+iU|w_w$4BP$ z@WCU6k|QKmts$V59X1Sdj1rh(YDL{3QCPyX%M5_MPtnP!IZZfdt!&Qi8M(^@4Evk1tVsYeZtAUq7)Zq|zghr6~8nPRqQAeX7YJ;h*Z)0jm##Ngs7 zq%svDUM%`NWw{#R$f*9Z5nAaoFXQB4DaDL}mdqZ#DxnS*am)DTW5-?OJ&?VJO*D>tr#CwEA|Z zD9qfIq97Wl4BmfR?q&SIFj1{QbB4vs=o}GK5Fv&eSQYMz?tlp-tX19HCfQnJSqNh4 ztw`SV(r`flgNXqZ7AqFO*7s5s9xi2G^TOH;p~6!3-Vl$-)$Q8S`;%{d{%`!3<+ZQ$ zr~24pcKgZsva2z9f5`Ajr_!4`$U+F1$*y{-oAYhCIK!Psi)WteZrrZ5LtRMM$39Bn zcGA)w7(*b0j`~@|@s-<;Uonu|hs{vUC)IJB*6Z!|d^@h!!)7(?*4y1~vm3_UdN+;R z^D}bsiS6y2nU02)y$z}fu&p` zW!pZ#u?;yyh=8bCG55sc)u5VP8A{_2F3u0Po4%XPQukjJ$1TWW zkm|xweHOcbR}6lSehg}l0RtG*Fv)@tV9;!floh`RuH6LF|I)O7!sVd|h#3L&_= zFbq{#L_DUDVn|&Kfn!XwS>N|5_7dZ_t#`yu?#(+Pf!jY4g+pvyx%P1-Zp}){MDCgP zAERdC1`$D;E0Nnh@Lv4#(!YJ_=x!Y=+{}$cA<--y9`x6)9=+vn&tv|%fADh;-}vfN zPe1m7S6-P^$E=_F^sk-1bN+Ari9h=4M_#myvxn2yU;k>}o;>>M+h6~}*Is$ z9$Vm8<@n9J>^6S&E6(#7c1oC--XAO-r68fadc#GmD7B zv{Z*pRn1Kj3uwI>XMNvyEJ4h{#Hs~gt;T?as9Lld8ZkaL#h)2L#2Lf^iDRuYjiWnM zz3WnQIi{3$yJ4|hFjKASOfkiUgmIdtDaRBXp86i}z&wsQ&btspZR-Ibdgwxs5ay{H zrs;6euZPJ>Ddqrs^{f*NAxhO!t925gTBcgOxN7W@u*~~@(anZ3S!Wysz*H3E?Gkjg zKE)uSRWT$o|CVv5r+Q4AoerRr2DN(ka+ zwW`)yRGFD2NDR#;<>s{%7IESrQcY7#jlCZGpk~BkaD{eVs<}8Qh_DdM)y%85KC`Zy z<(h@OIjbtZ9YSYNTiA13i|>a%fZ-xxpTVz?AOTDyu@e@qrt>Zx&k@hQ{1bon#||Hz z^r!i$`Rv)}=7%>s8*p$OLS)oRn#C;iGg%(Y7xT#7HgomKE>6$ZpB~Ro`X`>~uHOt- zu7}w{PV>;uyM7kCM1cWnCX@Ga+|FtZCNIxYKf{dh@Ff_zW+P<5@gUhmcy=Z{Vv ztu7vwA#0h=AKklq@2z1w3^hx@EV9V_^G5G`^@HcD^J{l*?cRK62JB1!TIhn2RT&aR zTAZmBQ@A-CMG=WJyud450y*vdiS9()<(kKu%l3S3b#XZB^Ku*~7IsoVh{G`HG)`oQ zsVh>6rMmSYsgmY;x*A^o-j83uekETF@85i2eSWq+Is5wS-~7UFy!NFpz4ndIedXR~ zeq%juD~u63q*)*PzV8Ch`&l=e^?jNx4u~KW2*EXkPK2rJdZEy#$Si#d!W1Qv%$PaE z01z`bg(SofBFDrbgdics5MukkQDO`fBt#BiX_h(cz1IW;5Hpya-gDuQxskEMjCgPS z2SCn%dS!A&>!bE+s#;sqv4oI73YkYrkz?f8F-sEEX|XSW}_mfef1~pGfMlW=0;&NQY%@v=bO_Kotq%KeZ2mQQ@_V=tb5_4Qx+>3{qW|LTt)oSZDT=ZEUE<-z=eFMQ*l ze0sC2$H!M$wO`t(wPYGM>ub1vaq-~EqvJdC#mni*bC*o<#D;e;yMcs5 zl2VEl2U{ui^kUttxlsMgt{)l0CH7l=8&SP zHnwglMYZx}q(Fp7$jkr%14IT)S!W@J6w_2nRqbMnTaW~15f-jaYF2ag;EQ?JQ;6lk z7ysH{?Y{Z;(+BhJN_g(+baB`qip_E`FXKYab@iM-oP zU-|sz;$m}lIt;sXej&4i?&xZFaF``JN$ltItA~a2{8;o<4r+)n{N77jv%5#l_)cw{P8f^2rap??b1LE;iG&*_}<} z*b2x}j9n)Zg0MJ*1Gjjf0ffMuh*C^@7A1q?J|gL+g`K@U1%o&lkr07cnkl(KQCdLq zGM3ov8q|`(*ki9824YfI2+S1>b7vt_XBLBrv=k^qoJ1NrO@xeNU_xLgrpQjrA&@{M zP)dO#!c3A7i-@8%wn7-rLRw7$09s1Uxn`@QkGYh()|&UE=W$y~u4Ac_P9wp1nAT3Z zowA@|P=(0gEE3f$SWvHIshf3{qJ+-aj;?kGR}Kyqi+d4p1O1?W%BRYH1^QgjXx$v>SH!@7;ZH_w9S%dTUr+ba5&&kxlRZ##>jf zEFMP)M+a9-zx+>s<|qF2e}3_;cW#Qja(wvYl|;3?v%Wukb^Y9Jqg?96J0Zo@BlX1$ z0mrwO+wFS1SmpZQpCy zCe>^<45N-!&9`~&)u){2^BFlw%LfrP>^~DKnMhmSatI-hr3);9OD?f3jh%9?F$4ld zkr1V7HLU@xfVh);V1~Pv5<_SZp}tS;dT0TZ`C?W~%T#75HtoA+H&sOmG1Od(R}He- zZ6&sLXm^JMp7qfX-E0~&2dPCHX|9^@bga3uTQ1d@(ws>UoZwZ96*GyEMCP-or7XHm zBu%9XAxeb0Kw^ruYWp97a#bPSV{=s9xvDn8r;fF1l^|gBv&6(DPqpo9g&VpH5?WHL z?>kiwy-c+Vhgu7OuIng}nb+dv=tCqzts0m?Bzt4EC{g>#qV4Po`+ORK+!fST$w7jH zVuuhAidHm4a8?8l5?lD3fe=tfi(TjfbxvRZm7iaI?w>5a@y_#C=Fcr<9ETK(<$$Om zMy@AK2 zs;c6|;95%snHd&5mI7@)2AYUi#EX`^!?c4cQLt)7s{{f^uGpSk9NjtwiL)!I5ojj0 z(80~)gX>2xbaMs6t+tK{Cy;^Mp{?@Qs#dKIHBjrKbBB-_oJrtB)?U!wYP;Hm-m;RH z#glt|+AqJ zA}P2NVv4h5W{{wn^Q!20QOZW^c37S6wlcn zOxe|}N~pv$M;|C2*x1eNs^$5tlg?ttoLr|OabYH=qm%g*=^{Jplmbid_wu8Mo3}QV zK!xDSshh!R9xwy?#XNR_J&Ht`FTowU!`ZFt)ODVEN=atX0~MW&)SXRrH;rDV&1!RY zx;i^Kd+^R9RMpK|EN>q?N?eXR+_`#veYSr3&f`Duqo4StpZb;0{Mx7f`~TfPd}7hR zdVT)v)vMF#`Fxkpcf-8srkr10b_A%}`S~~!@>JqX7DisR=U6tAB;Gw*={ASh@i^Jp z!w()EeCv(P_Vur=15b0t!TiJD{T;Me&Trh{K8&Gm$LZo?v)yf`oW0gzJ?w^|4AV61 zrXgExdVNu7DW%k^*-Q=2L71G~Fq_R_E&-(!LaTNptyLt{TGY@SWf4Tg!d%=UQ<|s7 z)>k#YY>rDQ5>Qpyy^AphZicSqVlFJDOsS9C-NeGC9zqO(Yqe6e4?%>z)G6!H(IT?s zu@38w}{%G8|OZol#XwwOqZ_(sywdrQ}+Rr`Xl1L*AvnCyKQwfl>%H z*O&yvX`YCXvKbJWoGB24s7^V?6h!h=YOO@hQ8@54mIgmmRa55>QWsgotA!K*(vsC} zF`u=B92}eNrtkZdQkq3oi$S!i5P5B>5)T|f6j(SA2#fh_8YkgapQbDvLn@|j22cpB z>L4aq;LvFu0d@CUyc82*G#4Xr$+;7WefPOf{^Zww^6$Q2!wW~@$y>M2?!M&-Y2MYb zl+7+PcOi9M51L{}w2|qZbH~A4gL&Zo@F>m~i(60E-NkTnKdXQ922X#Y&7_rM(c;Pn0$*t1>%jAvUTn;$HJ%TzQw z89O`2&e(4~`^0bi-9Iu)`h~~-=P!QcGk5R4y`2h-gB!zzBt&7;AS@jlb9JrWk``2J zt+_BL1W|V_N+QI}EIF4z(E+fpH@jTb*_?P+E5NW^#%a5n##|jF#9iOP)F}`}tHX4D z@#J&2iGrhWA9P!!Pe}x0VS;KQ5~l`0H)paILIJzEq#!0@p*i%(-CSL*J^c1xti31Q z%#E@43&Fu~*%L+fp>ua@L1Kru&1FOWx$X1y2#p_o8GZq`TB!EKAXo?lqE`C~fQ=lC z5*=pjmbsQw#6`XDQrSGxi;FIl>HLwc z^l*0T{<{y$gA0msaqdFV*LVcbrd zA|E_tQ>Xqo@tT0D_2Ar84cGUbRfM)+vv5%+oY&H=8mJ z+x3u#aoi2HDi1LAQq7?i#oUT&$(fi-wHTxp4ZPyNdO@HdwCPo9|UW4Gsz-HCi9KK0QjhX=#yHy_2+v+(q2H?4LZK$2sfy2V_s zT*VX59RI8T7yi)qdYBm$Bn}623kA9n z=sSr579w);*`zdTD6d(1|evzXlbL+DufOX4kQ4? zMXji++Tp>K8+RUm>(RqphH<-|hHWkNshd~-^q=|Dzw?KFw2zB%yAvh}UEd!~4^H2G z>nqQF@5jtpy_gw_LR|na)y4wVI!xwXb0%kw!Yqga!oEOc&rV?oG?#tL9lMr7Xve+- z*Ei)_^^(u0^eAYx{Foh3;F#-LbEMG=Rt& zwi8~1TK1Z?DoyTrESrnOL)?Djv8x{DyW^V+?z`IGwth~06oT;lKu;$9C;zZNz1ZcP zDUU+cteh4$x_SM|S=~tAS0-6><90LFadC8f{`xyuO=)grcQK50+|sztR=E~O2E4#g zT^WTS126#rDDVmnAchPm1_;nY&N0s62huI(F{f~JeDL+vddTrn*0aoEarD98^Mj9n z=mUq(K7HlZt;5H!0U~qBn=#E|K$5dfuU$O|k?-%uVbnnhY|F$E=x5R|y3{4=Ws$nKUw=^5 z(k#^}vl9nVxEsb@H3I^+L!O0h*`vsx#VGFVjW90vr>tuOhrUoEri%oAGOw)g6|W*>_ELy z$g~o;RE;74c~yzA=>FBbn5Cg;&BfuJa1f|^sn*yJs^GvBgo(APtBD8`b$xQBDHms| zRf)Lu0>>cCTvbzw)pVK)2kK)qHJA~L=329g#BrFUlUbi)7iy`c)K){^K6ESsvml{P zWhx57!EzxfPQx(eEF>H_#;9r~X8}UuJeGbjn|Ga=J1CcIN;+SkeQ}z; zan{|qb-3HzcVfc9!SSu-^($9?%hO&g-+goY;NF8Tef4U(Hn+Kr@$B_?ZhSXh+J-La{tP7s{`0};eFTZk={1c{T>Ql*^p>D@p% z)(=m2n~U`@tcL9_Pusk!IS-{yH4kGMH@nStwb60A-3{B#YQ4F*Sg$s_-6jtJa!NO5 z{rCOg?>@UYKRrFIbvVEK=3|Q`bq8u}&F=(bitcuL|6Pk97FTahUg@HiYUU)7_D#P` zrd6w!I<7V-7Q4tHguZI2??>Pwr)j8+upW{M{9Xr z0bC7gR@JH^yQ*r|TB?p}YPGo4s?|)@P~A+`yjC_;)z;qvH8l#{XjVcnLS=HXk_?u& zR(AcBYw1ct|8UG(#mEPbxlOyXH|ojh=5#pV!}|GetUmMQN_9>3Oij(XV9-j=C^%R& z2BbY=V6gx*0|bF7+UyCU1{EuqS(3$c^QG_o5C3C+?1mD(jG+ML{z8p)i1vG^z+a3vsoi=P$T5*$sXS7rS@Je zg72m5OV<(YLE3wXsp05Vn}*^J)qNhXRLiAow%c)>%5KQD44d8A`Ng;1c>T4nfBnsO z-`t%(+U@GqqiZjG@B;@opC}~#m4lcEI(zinf8Y}zdG$M@ggTWdoWha_pZ%wweBxuz z-FWI*)mk+(i!hm*w>C&P8OUL|K)sH+YJM?i^O&1k3;c6&L^p1hRt?IKU2(5*T2H2A~2UGPjusxmGg->H@J90&|e!v>D2A z9|px??6vAJ`Zn7pmj~zh?YnyR?t{;M>h5p+#_HZZE_t4eSx?q= zA_r4#{?WFWF&cmZ5IBJ` z&U)DQ=etRSONyI-6@1-WJ({K4$JgHf{P7DfUjV^CKEHY8gYz4=BrdHYnOm+@l^Kzm zH<3VlW~Hh#i@LTCHHmP;bdX38YK9ky5HYYjwZ1R9XRd1tuFFfnIpZM?ozwdnQ zt53w#b6Aye9+-jzx1s8;GHWr>>2?J{=$EZYZ0L<-tsgR&H+;fdi&xT9x;IGa@wvkD1*NmIRSdw{{4B*zWix<}`|S0C5tPXuNMzgMHy z;M;tOob7I1UzFOka5R8Yt)C#*O(|dgjj#UH-~XS!@cGYw>YsmdEY(bi$u!@&ee>BD zp7`LaABzmD#;J30+l`xHb9nVyJLY2$H{bIWD7e-3;okGXqlAv73u)Hn#iDLE(`Geo z*W*gv5c>s7jPr%`k^0CYE(A9TLM&#!Cm%Ly0tgK&XzpfUb2xNwzp$d3t5>tEuIgE# zCf@K5EWyp#00*hV5J2oqWMqy0K<&b3?8&3*#ArQ30ejA^Ll)$!2e;opPUpu9oY7_5SzUQkAmR77DGF76j&mLR#>u-TG?}R1%2Ul#){@n>C67~?rg2yEq$Qhs$y2VT zxlChj<$7ARp+nUa@RS5Vh#`b#ANRiNi`H3+aJI1q3DrZ_1v3l8>XWNEKpf2KI8_3r z`K({e2{aDlyqitANTkK#a&JQc<_C+Ks}`pe+_kC_DNCrPB8}gr4in*CI&~9~s!H$_ zMI_Wx+R>I`ELDZY-NJKEKR%3g9Cj>Zsw{y)dCZbvyY1IL_o-R7xl=+uDo9{1ZZ*dg zx}znSx7(rAT!;d}hRIF0CoAvz!+MksNBtastvFxNHa9{y=I6|9(wyX+LeX4|}}!dzEkPMhVQ;2;M?LB+v|! zY&-q=zwqyV>r0i<&zHB&s z5R`^M`S> zh0Ti94m;+WVq6MMlbN!G*~nc#ATVpKV#J>MS<>yUn31r-2_74i3(#8UNs3icBIl~D zkH#Y?SA-xs>bg#Q93 zcqNbLTCA+Lb#;-?M?0VDs_JIbGfM@AtYfb0i@YA;H6`LuRm=U03vwIb!=zvYh7lPz zZIjz>i`5Cx7G{PSj6t1qKoFoaMq|@kEpUY?l#B$NKmva68Da3fWKmI?A55{)!p$m9 zBtnrxHB9U6qVIa4k)jtq`NBJ!*Y2Jk3SB$0XYSPFR7iGgkva%$bib?ZAbCcut+r&#cesdsMt?Jb@*IH|ZL$%bR zCD)S2-F7JBG>+r2-i^Cq*zJbhb{fZ0$K7_D%j8~myIsxMK$i1*QmvKLLI^SNE@xL| zBCCa@Im>Oz;skS+7)7Y6R<(w5aR-!$ zi2_rG!T6dVVI_?AwF~C;98{R zY#H3|-#saLzPNI@cH!=S2Fi)Ry$ zfgxRrT`;9-7|pnwb*b;Cp=8xPmD|udjX_{yXW173?fs0G$kWTjL=%UY$*^A^(6^`6 zmV3gtN!vu^dxSaQrDOt6y89ph+rRVX+wbg(mZ@C7b?cd@pMCnd=WkxS7KPt<^|&?+sIV z^bdayFo~S{xps-1&#R|-SF}(qwi`<^SEzX;FYIopr66!FlQPX`@nV}-Q!Z{wlmcgm zM5;xFh~d)eZ%OvE5nC2P~kyjPN=(=B;c`Bo%@^}D3#prhF2vZ!lu{Q`KZ_|O>C&w0x)ghswv=q8S?1)fx`)LQ z6A3vfI}tmPHDD|ef-tN#oTxP+SL{;KHTCwhzUSw+?vy6~I@5dgFmP%gFJWz&p&Fan2pLCpGN_r8WG5oE8bfd=EwYISaX3OQf~u!q2c6hahJ-3MQTz*PkOJYHX>czk%H4;Tku z*1B`88P!ftPS1v25cGYxa`m`N-P6y%bo1Gd{Q5upsW%^-J$3tK=DXvAx1-Eof9p|i zc=XNJ|LNcSOK*Mo3$J|Qw?6&;k6yX`WH+CCZ01UBO>aVQ*M{G7wT2!vcM_pl*U!7I zEWFk-4t2`4s^NLCoGiBoTx4x$eQ>C^eA`)h1B4IZN6A}Bj z?*Ov*%5eV|OM-Su0hiT3?J}sfHt4=PD9nE5@BQ7s^*8Gxx(b+p?yHT+igaQ}OLoOe9<>j`8r55MfPyKCWvaeNXM-?$iB62cg zcjXw(h?ttV7&wHkn_W517gwjt&WG z5J=ogwc6A~gh0{7Lv!?#8q~;~z^s58gIqmSt7E3()J$2fu8yiTS)C@$#dY6F0HB># z?Yw0;$hoy55nLRRPzzHA72Tb6x_jaA#9@AL{rchh+0Xpc-&e=s!JMXxwN?t~Dgyhc z)s!Mp7pVtBh`=cZnkMt2fVrx=8BMyMs2SD?lOY?RUZ4&XK%A1WRU3xtkSJv8X0Apc zk06jSITMmdnI>iu4>R5Qb|n;82s_!;Oy&$T0R>Jem8z<0rjzS>QUwO5yt6TYVt`v{ zu@iFvb<9!8#m(Ao+d!A&+rC)Gv<>_FaUKOIFapL}^fHr?h;83VOGB-t%Wu}@u0i>_PD z(xUHE>Qa;x0#l0(5D-ZiOu{h;MInOkuLf}Q{n0+Txc~IG<$;?+_SXO$#0Jw=fU&Q; zzRVo8eHqlfy*!A>-CAA1zQxqRAR}jD6A_Mqkc5a)t@);js1d5!OonC4*`h`#aPk-xYf8rmlrtQg) z@4xZ(Z~W7rd+eoGU;6MzpZVy=pLp@*!`nAP-%%7YgBckEAXo4uMx{;PZ5on*MpTL& zmF%@y;XO||&gR?gdVX*vgk-T<6Gd93b2~ws(%pXCtQiJZb@jF=CT?+kXJ(0kEwA$S_-njRr*FOC0`({Un zSFc|Q&puW1bEW9CU8&5xXbAk+(@)M9hfYS^KErS)xHczCd#w<)A|imAs=hC6;3y(& z87yQ~FbcX57aEscKOX4u)x6qH7n^C^4VzurjJt7XStx8^)%KpS~ z7y<)u(P9cA2dNdRYJHSey3AnJYLlD0l5;>Gf>A7jTnyDq$wU+!fn1)&AZiQO@Id*^a<(6?u`zzpW9qt+S6#8it0go6+POGb*2 z;eaP_fla6_p9OeAP#A#=DuY8K!J;KV)XrD4)%JPsKla??Z-v9RV|hJ3ygN=Sw$LY% zKuqIU$EjYN4J{W94&=hpA}GfDo)i}mFr^e4%NcIM;v_M2EFCeYu1hKPv%Z^k6jJJY ziqdy!K3jy?r7-K`EQT%y5fO?ZL>3W=QKZep90N;W5|FS6GlPhUIEb)gA2JdELe6k& zX?kMpW6uo{L*zuJx)0TutGTMXD-4BT?$&zko8h|sj4E+!es|*LLus>J+l11-S&hgX zEaKoQB#8xx2ec3(Vj*Gy)i1w(`tbaS6sUu#IRz&H87y5=8A_Y(fSu*x!J~uO z!4l!lak{k#ca8s(KlA4vzcF7K-d#TO0|HT(S_Q9t= z_>reye)agV$I@~^t&+lwjl8{Z0r(!NLM%k?5>xM1RcqFwj7E1YRlS>cEJP4;5^7&? z2Ql{NKHS|5ZVKBU@3t>uZcYntNjb}Tuky60Vgn}A{vH4tw`{L!f{0)&F(mnwPyWJ} zUi~KQOy?9Wnay`1Ii^J9S-8md0J_ zW8ZqA_u|#wH|b5Ybl3M@9W7+rG8L_Nh)H0;1Xp*4nRyDtix~R2Sat_Tx*MkTHt)81 zvz?}Kx8AJ2{>CunIPc=Ya&i55dHeeO%29uGDDxRd<}RtKs*y-bdQlLDP*k`zz#H9=+f4xCZnd+5#( zE0hsi^1Hws5byzbf=%Z4-W>JKZ}B|cTJ(=S`@-gh$A^Q1`@7-(ZmVRwY085`q~>aH zHAgWpK&Uo#T?g2dE3=Dm6ltPYow63KQ!e9Fs;YWBib=%Xi#mjI$r5Nb@4=ixA{U{) z>t@6p`jk@OfEYVM?7EIgLh54|ByvbG#1I?dN@5BrNlYPzD8fO6gM=74gocKd5L*X~ z5EHSGNP}G)8C}8)*;|OV6&wltvO<6fZnRf)TP(-T97eS{tpRTas#Yg?xg{}p2z&XI z_C@i&_ntUdgoIfn2!kaZ9vBdq)O~M9K`VvvrfSqZyaig+&8P+IgiL(`*o-bNF0z8) zym!UGOwF|m^4I>0zw{6P^Z)c2N%j2VtWF(7-~m2^_W(gEyzZB5lz|=`WHz9Nl8zw+|?Kk-{${*Lc{;>Gt}y>+Xb z&sg>~2;@flfu?;+8{COnSc$b{t;S5daeC+OTQ9!yexYD+rpv|e<%xZ%?>8@yzekg3 zzZl>xVB#$bt9JP>?dbVzt+pkP+24>N5{1boVkNna<`pJXE^7!hlHy=J6 zPA;xY_RPWT!#A%U2KDR5ANs+6>HRp@;kxt?W`~0@KWJsxq4PA>eW!xi?TaaMNv#s-Ay)(Wf<}>?pB*= zGY;F$IBbX2MJ^*n%�R9=m;X=ZXI6)&A%pEf?I03$?1>S~8JXsopqW2G?o~f)D}1 z?p{j8RCBE;YFZl%4u-(Y-2T>-4DQuTwW^twu|_WC;v}EnOXGRmta%)RIa}p%x4w7Z zt4)K~p@@NXnkP9sxw!uL)fc|+JMX`B_v8z2ro(x^xKe{g(ep=VkKI}B-g>ybcX4!l zcy@MiyzJk-d(jbgLt&Q65F!KBYRZfd&A5SloP$=s{H=0xUMZX%PNu3Lh!|kZ6oA;0 zpYUML>ZUG4?xvu6nN2Y^y95wVf-no&>F_WeE&8&q2ZwzO!LvSm>(Makb{wmF2yAZV ztnQ{Bg!);hRkPZZOA>)OiL~dey45zQKnOIUwDPC`5L!iT`^+d`&JJP!s@8-cXy$z; z<`hDOnJR&bTC4B9B(FG#)op?{hnE8tCZGvcfeK#UIeOs-zmH%2j@9*B!}8$X`TCvv zCzNIe+wO+R%62_iDcjv{y{unL|iXL?m?~ z#MpHyrI5OiID{lA#SlV_ftgy*F|mZeEHS3oc%D&GNFj0Hc5M*RUR!Fk&8EOG3k+)B z?ABI0S?D^Cg+?TYa4gm&NgB5N353;#MT-muHfinM2Ksnj4v^w4Ile zo4YhkHU?%|t=741zzA?9C%{91*ZQCS*q{34zxG$2pkU)zO06J<2Y3&3@b*|1)CkPL z1?V6ZB(X!^mHBKszWQrl`LZG}5eV+!in;*xfPg4)0gsMJON9~B{Am8v3wJ*99Up(; z!{7DvOCP@a^b^Z#*W)4)$F?m54CZiqZ!gx816s=Y`TE_5_df9c_ebuOl$ohvv-kJk z?KmNym=Ewf; z=lAb-w3tj|^zm%qi|uzG9sc0q!EDh@9i5E3!^a*!eD&4&b1yEQc=^gx&mG>niB9gl z`PR3-`TEzs@y&x-|I`alzw(g}UcYvOLa6s};pmbeeR^I_9z+@-XVOE9=#;T~C|UxEr>cHByHd4{twq_~a9ZkKgXE zUrR>^)CURzSS{MbGb z@HED+sR9P71czpj*WEV7;3dOcD7YzwPOAzBQt#%y#7HTGAS7Yho$7Wi`Sj||#p&JC zaP=z9CJB7+)34h`^J*tlZJe0+-LK!9rs73SktfZya%3{^h)FHP$Us)Tdc4?8dAFVn z%`8cim*%nKeFDJSi_w7qDwiJzf?Zw-O^av;2Y1AMjBcilUjhe^VGM3Cg90}o!wNKk zBdG3w=y4XqlXsr|ksmnx?Y}L(_*_{WZ$~-bO{eF(N2~SO>1wKZ+~)OmGffj!%fmPg zgVx+0g*s)+g{#48nVi*ChdefdnF$k-x@xg$7_x}R7~mlCQq`T9#MC)RN}+0HCT1a! z!L1kxnQ41Vv~p0|YrgD23sJU=9!SiU93s-@K9;~ym?^~Ad?Xy3$P|c!#4f~`Vv3Sd zObw_TQVdao2n7-jtxh0_gw&@Z)f&Ua%uM&HHCbDr?w z@(X|eC$8)?m^G1+fCOs#4}%#VQD;C04DbLp00=ChMm|y7lxczq%V^4=0CJeX`Zk_} zjb(Be(P%!PHe<+4@c1*gU;X%pUiq%?d*=NgdF;6tj&9xxvo3_@e{wI?>%Mja?pp1Q zZ+=VW>B%Ra3M6gJmmu%a4d~mA&CMFHA8fw8-9_6B?Dx9xw$*HV;5|p;QXpztQu}3^ zGQ`sL#q*!}5B}qS{!@SNJ3jd8)#D%d+&4e@`fI3QsS$+5I+b=!+*v0PR z-G>*e!sEewoFDu3C!YVlPu%&Q51*f{7SBF&_35WJ?>_qVU;m~1j~>it2OoU()#pF_ zzOG+zz@@Xnskxe5-S((XH*G@Xp2wpVlTkIDN{jNk8cbo%s#Xee)?9roWg0#2%6e1A z-FSXBZO85TTB{D5&3ZQ&BOWa0w{Km2@rC8BTZ`*gXGhC0O9;?roH8#(OP#cuYGrQW zY)L5rNL|mui2EAVwtgYNty--%PH>Qu8wqJGISP|`h@vIuaSEQL3^Hzn!Ck0h9v+;Q zdvDjXlW}vBPtR?$tJ^aWvJp?}(~hQ{M96Gjot*n{c;(K=zjJo`eVi8P7u~h19wMa# z5`-qXf}+5Mg__@sxE+V?rX+BK@0DQ&liOaT=B6pSI?QU_tGaEFAaSLAe9!mYBM>Ha zxCK@o*N=Ywr*>cX{I&1?$o>0oA0E)^o%>-_UOqA0y*EGVE$_nexNNrU*u3-L?47eR z>a>~0ZJvf42wjYM(hyh`Bv>w%(^SWCDlh@}5{3Dqj0|${1Z^xxg}0s^L;LV+0y|ur z`iaKyBZdo@fdF(6CrToddo^o%y*pKNgH@QJZ5~K~38t_Sltsdws~`9`e(1*ceSiP_ z2fHiBG@s{e=R4i*#?5ZpO_Mok@sew^-~d)zFT9!MYPC)~bDgZ_Y09NcyVY3okjto= zcf)4AUV~hiYOQ8A4%^*k&}!3eVCGWtGz=mfQm563sg~M$QNYX+ID}dX5ycRQgPF51 zoFouhf3LZh%z#x>byHV&xP?IKj;1dYu|Um5#GHbVd#k8scalh8mk2O(5D6)!F7=6G zKVK|TnlEOHF7#dMqXdu$v4jvsLW+$N!9kcg#Mp%{NQfZ{vxw{u|2D=t5nsm1c>fZ- zEO=;}d{;NM_jZ@Y)cy$j=qxA&b89D=n^Id+!&zjSruEqwl#>9~6n{5(h8su#hDSJo zD&VjQUf~KCWYR1eZukAB$Qr6v=iqU#wmUc84B#*~7m&FhAS4JO8c@{V{QAS!zy6Qy zZ~x<~cjoW^;Ir@h$j6_4@xynXedV#IpI_d%8fOa=C-s`M7Y%(^%XIzf&A{9&op91d z@(1AB&UJ2#W4!mQy%d7>2mZHJBKONhgXKED=nWpeem7SyZ~Mm3FJ1La>)+^#_-GU-22K;fBNoN zZanro{>cCJ*^hqshhO~HH=E7FZ{2_W%isL^*MI8D(Us?3dj8gvw_@x(a8rVa#Gtg- zs01MQ>S*`411@AF)LIl^v?qKZu!(EpT79ZKMxPS*T^Q$SHZPmiY&M@Z>x1R|rs`(B z-fV}HFMZ|Y^{??_wzzuz>T^#VKXGS%>sEiT42xd+6#HZ}ucc_wS|=SFX7R$R5i=o3 z3e16{NMMOEbs=k|ZWV3T7>1VU!t8#QLT)CO9Kfy;wH0wwLBpP{buRfN279C#qS5T$4 zX{pxe)+E#tOI|C88RG6_vQLeWb92@2zYUBW+J*}U(LUPZaA3b`B1+zZab3v6@cO^_ zTK(LYpZXo&_uR*S@A~Ke_JQqIZ=F5=uYDBH-udd!{*%Sc2~Q zAAaqP@$SXhTlZyBp?(;~)%n(3XCaSCM*Q{AjLrIc|P@;H{7$6>0fS}jN|!(`b_Gn^|I z7;zvolfY(9uo#3zByb!5s@1?yGZI!+;-=0znL^vDsInvgyWP&ceLM89LUT=V3-pjL~)RGDMX1Wu*AMw3a1cLj9rY(9jDmE*mp6dFzfpn$G!_O z#=tSf7-EVkC6Sht5)L9HAThw))S=pTD#FXYYLR*M9DchgWA$J$L8DkAD1>kACc_4}9#gr=K}GJVC)0qm%1DFa?s`50KfUaYWDzOdubj3 zlM%d>NAKQ0d*}YyTW@^hxi{$IoBjFS+o6B__D!6epQ+fDZavli&(}V6E5@yA35kZ` zSnD#M`X>JWJ~e#$Y*(MW&VT31AAa_a&u%_(5Du1y%jaKxVH$?BchBE??Hk|x%onab zc6{TRJI9aRlsIE)a5i-#t9nUa0J*ugsWXr{xq}U*svD6j0VEU{iMfw7Pox@3wLIBo zDBHvCVwG11Hcr!e>yG(ixhxrh*5|9$SKl~)?aOfy(~WD_pMPfY*yD${@64}W?+%x- zi(MZ<`oxP_ns-<%+~B1)2f7=1DXO(Ig%zj< zy~19hx#nS;OH5J1j1FdebdA}fNT+_(k~}1kj;Imlr8-F2s=B$jd+X*lBXuLRUPCv% zjCD6M-aY}?xKVBA2DirHXft&CmO;z4nGyC`q|4*o zYIXl#oc@c?eeuTaCx76#-ua$S96j~a;>PvWI6Qc8BHSzXjhSAxMKqeSNg#7$=Jsq4 zOzIFJ^=3y#;K;&_$O{L|!{LEjn_`Wbti6&6wJ1@8QEP=+wUX;FO}ou@bFmwSakts* zw!5he<8C)j>tQ#Vmpn~*Dy8I_3qZob-Sd<+vZ?{lX73`@DN6{dwY~9DH=oa!K;v$B zi0N`FZm+>`#)X0dKi9-kk53y6H z5IbR^5MoLprqp#YbRh z*FXF3|2zMu&6mGw4;J0+Cm%n$c_qXdgx%Cty+wZ->BpVSyePav4ca{I&2R~@ z6Jsw0_+{mbs^@Ck$u?s-Tj}|Bx86t>Zh- zE+4ydc>88|btwlu1yVJr+N9%Vja+h`P|bC(sx`(IhafSf!vpDNA;wy4A3-o>hN;48 z$&+g(RUu<_)QQRnS7YNYQUpS90SPzPl#{p-oXLresS-on4C-~tP4;y-3GaOu*lRjX z&}k1mHwC;Y=(wbQXi;WdvZb8dpz5x~?CL^POGRU>vH-%&U=Z)?(pqr@D!7waR;yq6 zz6+B)OWo2txvu7^7p+e@Gt+pUnIry%9VFtduLpih%jD^ z<7V2dvcVHrAP$XSg<+p2BL)acpwKd6L;`lN0(MZvrL_cP-~K$fVIRqL5Wy7K!rHx3 z!2lINNTioj=XGQ}cJt`FKJeuC{MN@m{Go9C*!gaEy4q9=0K;s?A)5vwcNZo%Zw*;+ zXCl?wKvLBd1~nr(=M&9g(~d^w~sn;8kaLqte8NpOcW?i*2CsspZS zRkfBfjd_~#ZZmAw+bNfEn6~RlwUnt$Q?5ScJmjhDwxfBScH5eVaU4~_BxRb^vyR#M zJ=NI>=1@4&e9_J3%cH|-9L+0{*F2#m0|-RWVjS4iN!ZPZCB;-9fn1XgKmJMex0ZQBq zZ5;rhIos;qy#RwJ5CLtMRrmLY6`&U!9WCa!AAjN1kAC&tdqb`t|Nig)fgk?epZ=9! z|LPY$b@!WZq!cL5L=JxN_x#S^|A+rjzeoga(9rh%yMO86a_Ya7dVUk{!GH4w;J|)h z^8JUVNrRV|38Lx`zWMO4{}+Gxm;TvLoIH3ak-q=MkN?sC{r_WS5B|zu{ISn|@hga7 zD0y<85qEt!4xxil1C;BsV53?)`#T*W{Xz1NHEt%kjXVqm%&3+7v`q>VXnz6h#Xh`OeCAE*+)w%KD!_YE zN?Q>)!`RJK_tSv6s+3?|O{q4?p>H_dfTfw?F&UQc9KC zY8b8@E;cr#C>@7+<*627yM6si2;6n);r+Mgx0a*1r+EA0&m8`??}|KxbX1GaXUn>9 z47<3H^*7(W`>S8P{^YaUlT+Pk-C8-@l&x+aTs(O5RI_HDxdF$k~NXInowKS?j@l15#)B~x~h5eVU}W5E3>rrA}zI4ixOCx zd_hgJH;07K9%|JacG8)}t-baTW0W8Og%AkD!q#j$Oto6u(C$_KQmnp*pQrZM3nF)^ zR@Dt&bG2zGc`B=up_XZPKJ7*uwq@8}^z3ZCo!|usZQ6JrSunpxgzX;1QI7rYW@=Du+iP!keF= zf(qP0J&-^K-$E$HjdSk8tKa#tfAin@KmO{ke&!$i)gNCR9lZ4Z50+vVXPY}uKk;w> z`+w}^_rJjX{!o7}4b~RDbUFNUt8H!P*5zy9J$dNcmQureI|5uP!3}pn5ZmGxfBqN# zt3UN)U-;~&Lx{isM}GGo`nUeqUz0nab)!}>8^Oqq!n(=>TVo7MmH}Ya1b*%i8;tfRlOQ1 zytqy#AZl3>b63?`p>>ZxH3K<0+NIdyCRPipwO;uB6P-OoaS9L?ax!H%GUpiFm7t{* zkf5kf*|WlELNzF}K?{-5cI4`|UhCQU_|BV)MSSDaUp~2ees7gMb%*mb`ULajoX6UQ zAVjz4hs2awSG#R3WqB}92VEZ#j?*YQ?WWmsK0CUy*{+te9(l@}v4dpTc(I*#vtd2< zp|4qKBDm!3*l{4h%%+^RXeErMS*yVVvzao;VFBX5$ve#;0D-B(qXZ)eP^+@j=2ZZN28cm+pbg_pbexCdk3Rd#5C6-zzvBn^@W}z#?S|c08xbG| zF*uV)P*vS8@=VEzgvhB@bw{fjGk2}#s^$=8bJ~|uw1b-2&6!&-DZtJo4HdWl1)R*B zna!QRwY4BQtX5DPuEE01Iun9`*<5Q?CyEj;4T1;}EPbZ_NrY?1J7e>^&D*??ZM9kkd)u;xn zR_CU5JK4PJ#`VrFiTY8vxVz~D;>!hU-K!)7m|Uiq+Vng@Gtm5J|) zW#2Eu7$ldDegC#(=?N`LVl!}tU8ht6UpqQ@V7hwn&ZmF*mwxAu{I2oQw~h|3|Diwm zN1u4?aV~MZz#DIW`Sju0;^tB8V<3lDHzN>H>&4pN2evPF$G+lf|0bZzBX|EB+Mg8I zyK&nFu?>@CH7?Wl|E?c=`p#2-?mzl}-MjlGmhp|Rede*}U;3ec^A8^$-}=HAUt4YN z-FtX;wi(7+89W2eyMFoD(feNfz<2+?A6+btu0DRNKbW=Uyqhvht2S_Vb87-K0bv#o zy5pl8SF}8{d$hi|`|#a2zeT182aBVtS7X-)A!mWrss-8HN!kSidLt)8uPwYAM;+cBM=&yL2$@v2zetuHngtBYUx^7^yi zTwFc6{?hYDFFtql%yS2Kp6rg7q3>|mGgB?q%;!@ccUv2Wx*N)FtF@Bu@|5#toEz+wo|@)R(Cn5vdAdBDFeaE zX4Nl}?RH%s&%XJV?KU)J?sReE=qRLjUw`NNl|y*#LLhfkN6jG!K##I!uSy^!iIP2W zj$<}imG#9&i)vR-{LFJ%1MEKQC9`Z++rxv|b{YsWQqH*u1#=G|6P~3Gj792I5WB9D zRd_A78%MaCYE?B@g(?hyfMQF)x=Y(QfhlqfY(*o4fi8AdY9Nj==BmMzh=ft$y;6ZX zfK{njPn!%9WG`u&EwNo2|-nf z!Qhlovu5>LsaDO?Fb!qBo7NXaZQ8C@<2Y=#tKFE#ycvg~+F0^5=0Qu9m>h1-)s$J# zMHMcJ*e?)e7*BUY?dM#|q*c55Jj~~%WZ_`Z>E!g(AOL7D_JLqs^uO_!{_5ZSfB*TE z=@7Vhcvfn5cqAfWagv=?3DfVOqbd-3yAmQKhi#9 z;<~3$?P*y0ZIcc)b;#fWP#{8Psk5W`?Ap~|{@OPcP>K zdBry#JjkP8f9k^@`PheEdilA-;|>Y#U^8_8o`XzELUuctw*~U%6ePY z+p^i#;X*Gia^8&R7e#eiU1X-wOLufMyZ!i$S3j_P{<)*4o=8VaT0|mfb0iehaVVQ@ z%@cCYyKHU}LhNVV;$U`sz$v)24J|d-rPs!$&R!wR4MX)-z(B(0rlan+!PIteN@no=tCh&WoY$M-!9~sE^4460 z*WY@$d-J@T_o?qjs?{fqlT&3f+ikm^y=WfC&FLBS77q@q(p%qnaCO!xCUdI_8x4wC zNamCsLp5##c^(hu3ljHgTuU%&*Ez9^JDHLr5|O!h%bt>J^RcitYJt0(_8}J4TH5nB;4qTqfe!27m7$U*ltTw++11u7=I9;;PAuK|~6^iOp&Tf?YzI9#* zvsZ0Ov4I54?h4zhQ||5NDQexK)7(P5oK0)dz~pM|$VFYEDdzJCb9ZtWL^!gVha?0* zXf~Ut5K|0HlA;8W#Vk~HHyv}$wQ`iI4)$rtM%Zm9H_Fp^u^Ef!_4(kiJnH&Sw$oJU(dmi0v_=7Sch|ragZ;%n{}=wj-}vj(qa9n3 zY|zR8lUEo!L$yisb7H&HOd7E^08JANmp|!11=W3>gsRNbqoZZ7{s?mep2q*8x4@4>dbe*3AH z-}iyv@`>;HuBTpo-_57499)ed!o9_~_ay~;FYNwJMQAwab_(qWalH4>B_OD+pUC#} zV>?C-24;3OX0N%?x&X5>W+o7Ej0`XZMzd|wUXnIz50QpIy!?7GSu5XdO8%a00p`ot zbF-i*rrqxJt$P>u@8#85AJf6X;bML$F)1supvCM)6&NSoO*CrRtn2!so}HKT(|UR~?8e>6MXojun_7LS6Z*MdnO}eZD_356 z;o#XjN4IaLqXUGv-$kg^vDkJ~c2gdAR*F><4yo_cybp)Vut=bA8T`A{=3RZNuwqk2 zRm(*+n--Xg7&A+}#0qYA)7jRx0|-IHrwSr(RxWeZ3OJdt2*bQ+vEl};>c#f97dXLK z8=$!Y>Tc3W*^ME^G7Nbv^P9^gi}AsBS`7#DZnIm}VN3_?Hs*3E>;r}Z!uRSgs(g=%nwJSim+k(wt=Qw;27Vg+DgQmZow3D~V?b~Vo$ znG=brxp`vF)fr|Wj#NrvI18(pgD0y3X@aW31x!E^a`nIh7BHz=+M>(aG@FrU}Ws$@O5dTTg)Fm19bY|r(~cF6$G@DVz}Dp&!44K@K8Jfe=Uc(}ar zTR;BPkNzk>|B>7uj9PNeUX2?JO9<6eO$hr&S2K63MMXNzN1IYZ&ctdUqH4yV?Xa75y@WJXt))s3ahO(9P>h`f7nToA20A+}%r+z*8*|9ElDgqt&?gxdMCZPFsw@ z-#a0(cO|!tQ=6EXzzzYmSXlxRfgAz~@UmYiBY?3FACm4}Xk_pVD0oCUrsQ6(FAsK6 z1};DJ2mj!2`_UhL@ug?p`sO=7{^LL1Q(QfI=j)$-{d{vVFZ$MY-AONLez;s7-@Nnk zN5AU>-~I6yKJwu^&px%hwv@;qptUr-w;OEN!rlk5e{=pXtwHT!dg)=>CuSD5S`)zMaZ$tD9|JZ&9*Hiix!-2?OTDXv**Y-WF8XT4C<(0&o+B*J7r5+(?wb zT%5vePSbAM6$@A#Etn2Lec&BnkG}EsevaMA*|a*3R@d*`J-C+2_U!cG!>3<*l0@!) z>Z{9x>$8I-K^+wWRP)I}vFysYDgB~za?K<1NJUYL8@YPT1SL080B(`My8l42HFpvP z;@WBs-Kp&_U28Q38L^pFhXPEjjY(DQhjc z)GE%^DohDx>g`+6%!WMmv1f!fNC*pjpXG2SGi@v(77om2K8+K|ghkEa%^jq0s5zL} z#R1hClWezxh=@ovvP<@i)=ESnM256_M{fdxX^_B7?o`ywodN|8)*kDyk`-o7B&lOC z5ke3xZeGV+IFhQ1h!eSz1kO2&P}^iHkqA_sT5YQlF{!zPP|aJ7nyPZUf_M)ljFLj? zBoMcZ2tkO9C~*j>U(9XF$%4^K~3*;4&5!6W# z+fyeE5Dvwv@1q0cErt&>&;S=8f@Y8ae?Wl0J2#HrT(7o~4<3K&$NsCo@Wiu^|Brv? z?|$~vpZQhHaM!cXKi~qJ0HrD-vjl`}Y7V zm*UdCn*u~`ibkF_Yb&hw#Av_B-iOuxjrRm8dhc#(_lbK8(J;Zm&Z?N!yNkQ`FWz}~ zxH$7V&EtG|a5N7+wJHNsuS(jmCHtKD-VfM1yd)4u2$QfeC;$Nn1OYKMsT9@V&So@L z+Ya{VVtTYHXBYMCbbJ5q_VoPp^kH|{S5Ha>M~aNuAV)f9w_e{`31o|=svZ`<6Zib;l}I39)N5vqzR zA&{CDTXrH60Hxfjx;evA(;;WGOk^NqVJ;kc+qPS~?bb%Sx#3N&9rb8L1d0Mcb7$I> z_t3K&fgQ>qcQdD~L}ZqlcCOa&_N-GQSITX(o3p2t$v7;nh2=pVZ9dxyP|w}1m(OwA z1?G_EMA=iDr;XI4JM*0@TLgj&CDkqiBuWw)q`?j3A(FT=Q;Z&jSpbJAG=q#wgIn4n z))J)U;*>L32nJ*qA!H3gtPVk%cf^v+L}HUVB32Lq+1-%K!Z3*4J@k6VE;;K9Ic2HFe;eJAAb7$gELa52xoU^I~5#pqlQ|IQ) z{mf3pX4;c&#O(crPk}Z!ck3#MNbYw#yi3YN>MC6FeJQ6@+B}qNhV@-_Qc^m{%m}@GKPR`6?&VfUyLf3XqCPB=|+_NT*!AynO!3ENjAlX5} zrly`nddr8lOF^iF-53Dk7`g2u3K-d|YCs0vYpQx8RS4c6Z62biDzOBji{~!C_R6_C zoA++t+`w>faP5E)IaBVsW;)&3+?b^VCc#q=PA_|gfWR6I$w&RGuqs}xBc~d8!x=_(y#t%H5eX! z`1o7j^{%&m@EgxO^}fTWj#f)!u8El(#lmdSU>7L8ypU4k_&-01%i(M&-hpLB)OyJ% znHw+IuU+4|a{2o8>tn=0y;RqA9f|rxHgYvb>3R0|{K3u?0>B7?CAbho5!uC= zU@;(yyF0TBV-$H@$=Y~)`gq>n^PR5R*qq$m99_BAZQklOwmdb19EMG5FI=46xwZ4` zSL&n32WK8#IsN$f*s*$jIgBI5LW3|G*Ll{`=5Ctq<-MKUOv6qsOXIk_M1zWi7ef-% z)E%Cci71d!;2`A68So5E=H7W`j$x^;#*^WZW8KzvdwX+s^JaT@i<8BQtEEv0WQ?SW zY!n2529SdzkwXOr*SQT3tmK*8c=fefy%cU#L!;TspYc+I0KI?UjR* zZf_PEovf~e+~lqqATp5&`MeVu9erdaH7%>NrJTArj6O}`mb=tU=Q)GSgQ82C%%<6` zbQ(kicFhFpQl`M>&z;Uv%6v6_{)uou;1Y@W?_aWV+v}f zS)KaizCzeCP|{M|yd({jtTGVBqpEE?As{>0V-?EmuGwP<#MB?LYB^`P7F|fF*^GPd z`IK2Ah@6X57(u8Su!NwMl5q6Ub`48#f`F6hpsJaf1XUHRwBX`|D`6oa5sZabIhiX98=|>-0U0xo=iX$WE?ai$_w{D+({sp*M(~b=zU<7XA z$t|HDnY9B=^n?K!IF!iOLS z<{*huQJ5#qknE-;)zT;~t(j80$MET3|VE z0(J`7O|oZJdI z;;!yS3$upeUJ!y1gi_Mow!3w2cIkTFxo5lE5+w{7C5E<&M(*q-W*`JMbx6?ZOjDbt z4T;r<4s|zfM$7eE9!}S8z5Z)|aN_aPvJ_@FuhD#OFsN;JDh6%4c5^$;n%ao6ix(@L zY!GI+i@Ot?oGJ-KPV6aXlTeC%HaAd1VjjSxrVxQt5=4cm4ptF`z)?aStKqO-Uk<}1 z9t@DdM5O8&)^y&@C?%WE$%{o`wyrfx+LhEuGg8fqTwgU+^_)_tmNaX!E?G9$ytu(l zK(a7stQH**_bX%6K{LDs1f(!QbQDg)i-SOhGpJh-0}a?Adq`H%Q{VSZYj1zNxp!+? z9UQ&m1NqR&xg(i*MsejUEPj{GGzShPC&sda5ktj9#elHYPB@hC(=y+}Jp?ZH%w-wU z7H3tjWH-xj?Yg#0qhX!1xseEEHL$CD43)c!1l442sZGVCt*Rq2bzOFcYbN5rVs2nk zcP+xx5STb=iTdEKYl<3yh`Q7b2jjA6Wae%v5{l1HcC&0wEJ8VD62T%%kph{!tC5og zNhuL?le#JfkeF$#BD1(#)1^UGH?1u%kD$IcZPpL2?(MX6{pKxqAwd*2vk>^z7hnF{ zf9rp`^y;~#(dgmRXO`DjkUQ&|fZ|zU^Lf+kv{$ZO-n?@sbUGr05}J{7Lh(!N0UzoO z$`%JB03a+hKLJ#MB2WV(P)!nvBTE%{RMkz}vMDn&OTnGZDXS&bB2byPDOR#IZz#%6 zO1sD_%WHSKnTGh^{`LR%(Wl=1$)En2*PefN>*|e=6hJ^8g3HobnM+zC79j#94PL*+8O_mQGKmG7~-}~76pE>>RcOH7^_+Sz^f(UMa>-`$gUsLy$LWO2TFL+v% zfkG_Uz8&d85k-AceL4LXmBf0scfmd1pECA27VHnMi+?AWTgf~kg7(fg1*-xPGm#s4 zkmFGLTnKH5EWKgORVjwYc2V6phrkn5X+;FsRDjg1934C9P&EPv zgvvw3b@ahN>VXU@8V^u0hngcJFa;qg)tA76$PjoCmlCmFl-ks<1j1pCtYk*&$jLO5 zDRL%LQ!Np>uI`#Gt9G4sozCXkw%u$`n|a=w<=I^4Go8MW{>>k)j!v$8`d82X_Gj1L z{>12+59{$K_F|o~5C(Hgl%rLN*$RO&HI-C@-io|T@YPsy6hZr?0 z31xT9$=pMX62u%17pM8Wfk3rn*=t=5Obtxt=2;zJ31xLvRS67-lQTIb0^qPn?x)1?rj8`XmfM)R&+SzT%NW_0!DEiYt{dyJ9TKJl}^_)mZ0 z$0r<4o;o?G>dmbkOS$c4*37%q4MG?WtI2rT+x*;1FR*$I9(D+x+%-X)o=57LVP8PX zi$4^5#2*n9fdC(ZBQStOPynJxCDL`N(8k3?XtBdf%wUNSq-BykKB<#a zj`%_em!h)&D2ri(dWBM7Y)V{kLGGQb>^1eO-r^C+MNH&<&qG9DFi0S|({^KbYtObf9H*z=&6*_;h2vI;iCkC?&DhPvtNQgpJKnPUDf(SAQ z>PUuJNl_GX^Wz|>~rN>o7O$cGLMj~uB_og5rH8J3r^Gz3R?Lo-i1 zdp4Ug0Aj9&Tvs%xWqFCJh``_?V0H6i*Q0P>I3^jD0x3ccgNG1>h_g|XY-fu3EN$)5 ztg*eBwv9{HuGMa4Z3j0Ff|SU;+n#GqgIF;+S!d+pRlPF7JTYLY$U>YlLKG4)sYn9H z$|a(%AZ$@6MuLDtpYsq19KZ}>1Yr>h>|A`?7(~S6fRS6V{w9Eoif2xdMiCqLBOu88 z^%)Uj1{Y-)_i&;2-wrRK%}!+tyIdS4IJd+FJFz=7!wdh=?@zc{5fgittnD;)Ztm(* zuEQ!Q6UR!sj?+BvY)>z|vHj&|FMj^H-JLrfJODK+0Rb{aQs_e;c=yFSv$dz+`pCC@ z;Ltmtl%ZVt&0qS$PyNfPs&VYZ@FU+ediyi9eyl^(gb?Dq>ENo_dkDzf%?Th80<3np+hs zF4TI;=2jFHF~m+2GnIt_Vv2Po9L!C#iO_5|6A@>In}b4(#q_7lU+7^nv7&o5j}q0K zm|TmyAOoac00g9*ojB*DJ^0U>wHj2aW@c^EK?DE=VdA!JC(EnbTRU|1)-CJ6fmwYp zuG{(E-~HQv|1-b-t8Y2|P+fCMy1TcxGn;x&HOtZ<3h{VY?e6S8c>0lRmo8nta=FSh zR2q|7$}MsO*Pi*=_ZjccFI5Mkwfi)GL;2peMEFToR#Nuy~0V6?RH-ZqHGEp*g zE)b%@VCl%=N8b95Gw*)yTb_RBp+_EEIeajTDh^~tJlX%x`wqQ7t9yNEA2tFkxLtSi zJ_C3k(Lv8V-=Cfq1;7^{FA76%`i+HsY57;`1$2JD@SqG%{ARR>%hE%pG;eoqY;0V- zoNnF-dph6{i6b*S6ETb&=9b*cN#ZbvmSQUX*k2Y&0#k{V7MKtbQ4kCwg#ZwVfXRz3 zySilZronE?+gth8UEkSEckhU1Pg99d0t;g>kd?Ktez-dGFpeF;%EXo`mI@A?&24wC zP2+8L;ZO(EmBd=c!??V}!>D~}b~go684Jt}3$~kyHD@ySWXL7}G@L88{ZntmT-hS!D zjpv@f`SOdqJGT;Mg(Y5FIdtg#?~jkZc-YCUA=M3vOC;TURj;ox_<3% z{@?$fH_pFt=Aj37_x5hwxP9~9T}95h(Y7VBVW^i!qqWs#OZxDG55N5N7rNr=?nOcS1%QEKHD2sT0uVs~UV#`M7dr&#Pk#W&z4~*%jL?4fD&>iAopdf(eV@WBV4e0u5dfqJEufy6 zoc7nTfc=|(p(a`k556d_h4{K(Aj^EPm`AvemoIb0{@74@hXq7e03;@db~?LrXY;j7 z>Dq=&yP-r_BH|KB=D7v>CK}nG#e0tV%@QQ;vydP}F%(4Z1W`yT3IZai znX0te_h!DchwW|b?P0#v?(PIrYZDO}3yp_)bwbCF#xp0v(POl_#)FYVEUV3$w6~jQ zGo(bS%*<5`gPMnts!FP$%oHPpyqJc$gUPz&=H$fg)MlS&%8HaJJ0%r$^|6pl}Gp&g{l;DvksoK@4TTs_acsaL;8~!~wVv z+#ysf`dOHT7l{~vnQl0F7)0w*o;J?cdeCn11>nvwe;L}w~Ts=+iHh=i!z7a~H0lyf_qRfEd2 zQy?;rL7KMdRjB67+;v$(NUEwj9t{DGl@xrzdoYT#8h~t?vr^zHRxwunQi7vt=T1_V z=E$8zdet96-OP+jkDs$yPyf4l3{h3pjJSkUdyEl|?PhJm z97aoH5@z@9ot?>K<=)Peu3fvCoh+w{!(bG?^5w7o*gyJln|I?noPGJ+h3nU>Kv*a% zbpTKc9bB1=>cP+P=y4p`6r(65z$2uq?_zT&M~K5R|C0 z!W;K^x-Cw(#dUYTkPsl?3@;*@A|xe1;A2PyPDm9nDz0-yP6LW%`%Frl85nHl;wJY_woC0f%oXpnQ96Lf(%~vl6;g zypat`62Q)(SAG!(Dq=?Sv4ZMXw@| zk{8SF1eRV!CIy*cD<1v@#AWX5hxh9U=t=!Pb#`BFv0vtm%V7p0FV@8bIGD(s#X~6T ze*Lar@vZJJ|NitTSDJ(SO>-vi*RhL#Hn}Sn+qC3_5>twzTIQbkD;FYzZ2t~s2YIpU zBw;b{j4XSbuy?Q7yk*nP`Gwct_`T2Zh&iPUCn5%14OLyWUCJrPSerVLkH({< znz}Td&8k=}O~xEVbKczASzAA_Go1x@L$Tfp*qhI;T)Q3uE5}z}J$L^4C50|p%5q#0 zh+4B-J9|rml#-u0_277QZN9r%WvnnqlBQg#_Yd*Kp}xQ}xbNb76rTE~8;eb`qW4Gn zg5p3Vc=I13fD7gW(#3qh3j;?16%*t#H_zb!iad$I0W2_9lL7)jBftss zFG4C(Cg#3FM?c;B{ohxEjCZcsxAWciLWS~7kY z$__6sLnX->p54id?ixdqken?Bnv6p=z<9~qxy@#r zJML(ZJhkfc?R;}fUp=3eSLwt9@xfDaB$cf5ZE(U-E z#$1^ytHBrXH2}n*DrG}jC<+35Njs6nkxv9JrpjrS^N7Ort-a}&UYcLNSby_IeDu9d80@ybG@KAo6{9;0&ccA8Ecq5UQ7+!m;;=bK z$0BXJ)TY3dL{g@}fkV!T%$dAxx>&)*i&Lqp8v4BF;ya#GHp_72l#&{2nRy(HAe_xo zaaU4v(>7(1P@a*6b2e2oG6pAA?kxqVL{P8;ByKJw04f%R=bQ@C$_t){iPg1ib^=CD z!t5+jLh+Ib!e(v-#WiR$;lO?9n@4!qSC?eXw{LIUx^tI^E?>WV{>r)hrq!kUCy&E{ zF57I@qRUrq4VOogcDmaYksR6*-(FyPyFW3OYnB#!CHJABa#3@SzZZOezXo720o-qK z%EFcOuSYsK_vRB;IEHd$^P({_AfT8f^aVrV2CKj&*g60NkB}LNyl@g8$>CI7>MBs> z9^ht~fFY3KDYL;1+`<7$P6?>o)w-Qs-QC@L;nuBJ>)#9G<*KgNP8~gP>cK;2-typ6 zPal5#vB}}}Fz$oz;SMjay<7}E09)wQ{wDPOB;$ou(Y{V}Q9MfI1p4N$Px&d>Y}q|F zb1&c^6P4g|E?!u@*5=NK6bG8^$ zOH7ry?`7ZJXm8%jFTFCGEQj^OeBxLn?)i0FAMXkb8)cYi)PPD&vLJFdiN&xCDn+oqQ4qF5!F8nZ2;Wg*IRzCNKJt-d}_PK-j78--%1*=+F0F&sJj4h}_8;C}F&` z=vex-#NxnH_6=AfyE9-V+nQp9$UcOO)>e)^67k;kcYS1X<`ZxH;wR(Yb_~s4cCOe0KF1q_afUkMBxQ*AfvaLCCxSI}D_h>KL@k9r}dqAkrl>Vu$CHiP&IZ z?=-_RtZda$mpfvrs+_VGE+vXv?rS`OiYUm*g##=X6ez*7nv|_TQ#a3~U?wKDti=_p z4{f!qW~vU)X5wa!ViQat%h@yq0Nz_g`OVvRHRY|Xo!8F4p5gnQCxH7~Po;ZwPmU(r z-R-TlV<#yNx@P7fmQ)@`ab@q1D7$}l7x|52VfZ8WJNjOkvpCq?d*7&jJ|OJB0xTYr zu&@a#bYue1vkN330i}LH5drQ%#A2Bio_d7J6QC%oj|{jJiGl;*K@WQx0d;S5QGSjx zzBtG;P#`)Z7PsBj+^m5O9Nlc*ZEkw3uV21!mA^Dz9-TjVVsiB4;kQ1q{+7p&JpRPW ziKBzHm1MgGBZM4FP0K;^rh|K#;PDMDQ{hF|4faF?#bu z51|yj9(m;8<((;#mD0);CnIt&_t#y~4zN1Fz32=5hy}0<3qjS2iq5Rdvz*liF${*i z;gy3p5Zt+8h=)ilB;+W*BL)e@4xX4OvJ;`q!+qwlpk$&85ZI~MMlB?*L|`IeccSe3 zQnaF6>J7k1%D@CCD*-$dIFe^8rfo*A&(cFsDzr>g|vr`BWj#yP- zWb?q1DOp(-5oVSUc(ym6ERR{ptXHOpu%@o-+A3CEPHq&KQkx=+svB%N?8sDb^!)Qb{Zs$!=B+!W zO%9MmE^7_fSQ1$m!Yh6rXDfdwAm5gdWCy%K>8t5BE#BXI0( zsvK~J1^~UeX{jHGK#5GMK|~&lORYng%*jF70bm9wQD%x>Tq}6fGM9j)g;Hiv*Y(P#OPnDVJ`F%QVavJU-VbTT;F3b1L1QDH*uf4w1u&*xAjokAXYEiHTT*D~MPK2n1m- zouoQhMmFSJ7R{W=6$q>#b3tThvQfm4X?e(#u?)vF8e&*eteA;9r7nBZ(5&^WlpR^U z?a0g(mJ+Fx7wIICnzb!0ge|>gbnyr(Z*Wm?F_j+M(X&!S{kZPs0Cg*2WiIU08%#Sn zK@1FDtoW9Tki`IECng6&q@U6HewLX4DIE+kGaJEOfV>ikdE<>McXu{ZieRfNUVq@| z^2##216YlUUoC}-%JOy(<-0RE3z$e8fx+y=J+V;^cMc#XAr~sW1Bk!^VTe!^7jc%p ze=RtCxvrQ|5Futr`4ks5jWe+q>1FX}HMpl~<3u8Hlyv&J7hn1Z|8Vc6XT6$F+a|%7 zXwPX1yA9eK)Zs(#oIn1a*RNflEUj?GK?Z6m#wyk|5hYbFJ}pHV#U)~?SV1#0dx?Mn z2?Zi2k)qov6OAxL3T%sF(@k!5jDxCbQZi>DVnPUF3KnWoy9i!#3Bep$GZAOYAkXc-PbECMWCA;Ztyr|1v9oXpR2wN6nscaxSyUBXptOGf-yLqI=9L7J z053Bqdp099(|L?>I36+yxu>oTA*7V(;>C-a(5CtBeD=aCuYda2e)X3>@$&!#Y=xWZ z*(Y9{zP++;u~MXyYIuBYz1!IynJ$67OFN$DJ<+%yTKjP;JZ-R?bqRWDp`FI=Uji{&6souHj2m%v`DMDBvWIdTd3}iDiA*R42(KVCxLGLhNu_I09MQpR!`Rg%x^loyc*IhKd zy^_))?2I1DBX%N}B7|iW5Q!ret|`>FNie2<|Bnbkh)O?BD4>3^d@lPI4gf(SvU(Mp zwc+IpXJ5H?{qc`}<9O-#Xb@)?&fk6U*<(jf9X@`H)Y-EeGL%S|QDAW=E>}U{QwTu_ z!X)KzXRsiMlOQlLLAYNhS$qm22myuRCx!^3P$>^UKOzW&8Ii$^K)rsy?>vg@7Wa7! zIk$T=2>~p_6zREFp8ebZ?~Rwgm~C%vO$DCAcYxc_ZTMZTpZY)lXny4F|Nh_qT0I<( z2P29y9*@+_gke@vd>}A~lZ%AHQgaNdN)p^lsghprB;-uy#Vg(2Emo0A5Ti&40=ST{ zaCQ@7FA1aWOyVVT%}ipj@=-|;c9Re!ghe(%aMzTwxk(jWjf$`~FnY|;UA;%{%!Q?Z z*dYo80Glg=9c~#RMDqfr!3}|kNQ(KDGX;^h>r$6vh#`tF1%%>bVXk!U{5kD>x1Dcn z-2M7Xul&@%{K=P|dx41-&VW6{fcp()QK9r*56mLyCA9N33b|<;sWf(2dfs#2N~qmpk7^Fd+6xuTOL_^{E35)zisXG>B*^6VQFA$niDz7`Vt8kWd5ch z0JuL5cu)M%zT9m8H;9F7uO~!$&ZWRlVCGPARMk<;sffubkn>tjXN@{TRq)ck6R7ibSM9u!u}N^#TGJVap!`h76TDD2*Tb&n{Lb`a=%QMMJ)zc;7&|N zP9%;3&2ym)985wM`$r~26z*JnRP9*v`>#Cv$N%U58wR84jF%@PZ|Ae~7hd@3pLyuu z(F4bh3#c|EH7632OH0Sewwxc?LY?J zfN#U@lI$Ki_`{$0#chOt{?Go!K&ojobz>RC&Mb9J6oC^9WgrwktE^S5NCeayod|&_ zurQbOc@cFh3RrTZ;`d)nyBN$=Wd7#l&{CB_!T=E$!m%jy&D7M0rAsMPkwbJR0!5LW zvlGf52swmBH3J}KFP5~{pS{#y-a^zb-cyjWsz)fQ?7%G9v;=FjK|&M}VplkfI7sd? z6G99H!W4!`%$n)!t7kRi-rnxs&i23m)NlTqpZXWmt=)2mc)w`!W(pRI9uxiE2VB@! z21`eV<950|G#td=AfVs0asXgMug6`8x`;S914KRMUR)`;p9uO37VHHm?Y|5z3@Nz0s~QWr z#HQxguU|X$r@raTH+*P&Z$6#3<2nq6Ly6dX?dBi+{lE9=LjYA>-+42S_Dr9OG+@e?pphzJo(efu8+LLe3wfP;%Li350=6T4~nZrlA>9#c8 zn5k*IIdl%_0)s~1O7EjL`%7Wvc*SA}=x?1~$M~ih0(~vnZ)Tkl1T;f7FvG&40F=+l zLMLkA#kAr3549KwApPU>n@WQ1L;teyEGitpn_jBZV*j8lUsVYKl|*t^Kaba;bgeJytJ}13^G_7GYbLJot@3QxAtz|YqxIBHuiLPo2R*I zyOlw`R?EQy~xVKx*a0n6g2?gbp zb|Hc*Q!zFVjtnVmMjHi-8s*s(#Qdf5_nle=8n2 zK*5Ulk7E{=avj+sK90EP2HXL2W``1&wGRTr7fJ&6P?%9>DRZ0?83$q%To2^lkGfJm z4Px)nD0T)38Hj`(W`IfDN$nS3I2#^0{?Y&X`?n19ZqChYZF#bC{N(zH!+Lq^Gk@(b zzwN1KCNjicC!|33l&D1dmPVn-I^iIul6f0KnRQsC_X}KRhX7`XzzHEz zV6Zq7NkkPu42eBME(~G{ftV2jvlXQ*6Oq8p+=yJwiJV!HH)q}{f>zs`cYgD?uKf0I z-MxF>fGso(iBxSkF4y7fjDzob```JwpIKQxdggq(QjshGp8Hi(aXEleK5SzJKW)Xy9*FgZ64F>c=VZ3R)?LTI~2nzR-XS)zJ zx%E0-%Q=9Vgrr!CI=FBhC^Mz-BEqXU96$Hmm*Z>diU0a8@Df!3!3)T~&-O#_cF=d{04>&5e9^rS7jTh?AwnXh){+Pgu z2|D!NR6KY&XZtX2bApqANqdI{TRZ~8*`52Pu<}z2F_-tnj7TkKqdS}1ub#hoWp5`O z8StUQzAmflE34z-f#b)I9yu|pMqm9$|LUX+-umccp4IbQ81AVD37sIlLy!|-RG!Wx zlw=5J=Rm|#){Oh@fwCjU(TiG1#8G;D0S5?3#3h1c-_*#tp#Qyl9p7*Ayco$r%N_#5 zB!-%e^6r!+>wL#9U)y>13uk}lGufM6-_hR4I^14f{=KHVI^<`*`#b-KfAII0hD+Ui zI@{Xp+IgEBqM+92dSrDgJ|@XJ*PPR=(`GNhZ9bct`)uAc^CqQPGjH0IQrAtJslnB~ zOR1T)L}ZF;P?1nd>V>)Ok6Gcu=T{-CtD)bIbai5IBo6N6SqU!8=2leIPDDXe%@rYp za#o4NoO7X3h&iV$A|jMjDToVa?R8^t=Y^O?b1o%SVsM7Kf?(<)NC_H|AZ{QbBs2HW z<<6AY{qEg+H?LnuLAi=zp|m01AC!x#z;Gs_;&f_m44C^or$b9C)^wH|rJmI)SGx7Q zNr*JFTz4t@Bv<|9|WF_gRlV{>Otk|uYT#}-}>~YUw!^uL(7kU;C&x> z@7oWbJifF%3?okFUfV{Kmt(9Wb8m^olQ>*n86Q1<%K;LW{VXOVP(#pLN{V{c=HamU!~gNiX2J(n0k%R1xmGmMgC}DlG#X znG;M(CtgYvI~BUQ$XB@EHgH#VgrI~B&t^^Pq>{Hj@z{f@n|XFJA=$g$H1pf%{_wSH zb(oBXXHGwI>hJ;Iom(>_qO3`~2Ck;%MS~?!U?&bCSZ7WIOWBAca}e`Po(n_J-&no% zRYN3Tv2s2Xo>L`cTH++>cs`i7E^>;|KLv9RY8CB`QO(Z=I#8`pZ@KC`p^FN zW^WcaSXqGWePU3&0!l;@`Ve5CKi8;=mFF}J;rQyoVM>_Ky0%Tw23~UD7CiEOCSd{k z_o5)VPYjgHiWbDBFHY_^pZNVDUg+=!x*&neT*7#hqO`~+7u`KAfEN(FwD}9!PhWuq z-UF)(&|lv4>_w+kUSt4;g%+qhUF;d%ERbc2*>eRg-=&Kh!wdGf0tJ}7sVH(k1h|5# z{=MQEaz(MHmN>eQQbaHHjp~J^M{Jvu!{=nq)!;?dYs_}q{HDx9ap+u#c!zpn8 zOK`wpZbA?iS2J*oVSaD#_dfjxpZkM9+L_JY`u_KR!+-JZC(j%kjiiu;dpYl_C(x`j zH=m4$ld)6)+HCXAh@|%&@!}I8AUL^7%G71-?9OlB+Pd_{jf<~ey>a={#;wbD@4m4! zz3R<&&d8=qRefl*{Fe2@r&re>UOsRvt{sr23DI&6gVuODXYCqo2nw4=W{^54MC7b+ zb9N4ryTlTVh&Xt*0+IkAb0;-|Lyfes-@ZU?u-|f$_ZW~6`2rE*1*Akp_zw^xMk;H$ zAR+Nc{R(HOct*pbkfJ)A`JIowHMh+rlCgLIrq98Euy$j&#mRy|+HN)z?M-_Ve zPhyC{h%6U_g1|1sF@{ksp=OQ@NenS^_lhfMx3}BKWLOV4AfTO7lhHQjY}SI@O~pyg z$vvA1vs4vJj8)|^fZ-ezMnRm63sqadI#qLXQ*+l`W^J+3`xWAk35AL`m^(2g%}y{b zk{>Ar+^wjq3Io8b&Y`%W*5f#eLBu_bEAhSAwdZV#)k zCkZDW{^7s%f0-iBclQ4BU;RtqD$QoO-C z(~=WOoK0PbLU2GrRD%eqDiIqfC3m1d#6czD0)V;`rtE1vCe zVgfUlzyK?xk(xWx@BjYq5Do8cZ2a6W{NgYD{LjKoz@8*%G$cb(6#g3X@}e`V!ZB?2MfGqAM;t9s`p!L01mPRP`<#w zXuspy9|Z!~dl_h1H1vI%LHeVAA2sT0z18!ics=B^= z^X9+(v48&6uYP^$=!4(=1K;<~4?VTKHsat~T0({}JB-|fI01L|^!b~+0z7hj^1!HO zGE3HZ30T+C%5g!!YF`_EPMsL8u2rFqs9@Y|$!s3nvpHM^22<6{#02V6<`9d@ z&KwlQ+~5X+3%RS4xn)pM)WAWc2I3yw(E=0o&N!tHAto2roI`Zx-cCD1>ZtBa zTs&{GD%a6r0`!y?vzbwVD~+Bq0zv2QCh%`x8796-AY+tGhcitC^;hn%QhVPi@!bY+aYTrfb?}I&D(Y zSvyaunYV2-Z<;Q3ZSE|0T{19;Rgp~&cFIR!C+R3XPZ+dsU;8zxq2TD`KD=kF$8$uk&S=`O1K>E*4LgziH+ zh&^v%83j>J1e=2wn-Kkv$?zFG0#jH8s(}cq0f6Izt`KmBaZj8Hmz86%!;>2eCsWU< zX0@hS9>s36X}-Am%8RdFIDPWKdq1-Fp7$Sl`y=C}QP-6$DIbhtRu4hU+;Wx@P*;t@ zAl`TY3PJV0_2NJIU;oJ~FFyaslkfh4zxmfrojw^VBsWT#gG6E`gJ$94*Ws1n^oSmK zWMw=1jXOJ6wx=uWqv1emgh0d;$XywnJ*$$zER_*3uq_QykE_E+>PP9I=_8%dywhgd zZfxvbzkK`J`5U(`T)Ohcg=-hzxN!T%%dfuJ+AdeTI$An49zAyW(4*rcZ$EV8`08*; zd8E~-BtE7jZaF70kx;?S(mv^04qPiHngy#S2-G3$>cnEkW&Mg^r4aXMynE3|7IBrM zSnd!DwCOm6Y!o73(8k=LMn*(|C5p95Rmf_^^^2G&)Er<|7H8l_N}gMwgF=wWC4$98 zK`>H^QJAaYAgg(zwqws~=EUxdz*Kf{oFE*mk{~eza;MVr2{^e$H&=GI*2=0}3Sht# z!L68EyMxIb2#vY9NfDQXfryB?M>b2ADQ71Zc2;-Kno?2%N{))N7j%sTEZ$2w6Bt75 zJtEGei^eWxak0j@XAzm;(BjZa2FqF16htX!+HO76c~rrzi=?nYAeyJha^TRRLrcf+ z?Oodg{J}GS#^n2rL&s zspy6YWF_X%dea;&>jJ9XJhg4t%$r%$H1jUCZPPYQ+ca(0b=|y4ZEELDGw<3iVWupE+X+XBfe-({U;g3M_4T_OyPdTLURxUs zYqaxrIIIcf{xk+9wZ>5o!ywR^QwLr=fAcduN!{KmysUqAcW zy-Vj`zH$5Y+ZWJXoOnJkj1R1=zUAPdM-Co;eEsD5>wXs%z(Mr>16^oskU) zPJlAAxaLJK!C+E1ru|I=aZ@!b0!#_Pk&9Z71*XnbG&7Py-GPWZRm#jPswnE5oXuNx zXO|94#A-}M$7b-@We{Q+5Jxa`2o;4IL%74bu1RW6nww^?ZS%aFHBCp%vu0;FKsRk+2vNG}d@!z3 z0x1}rQx37VWDO|97-NV)WFB~!c5`R4tcX#VSSmN+7zQH3#(@b`#Q0N; zw?61q&kGkAElt0>WwR1Gqo&!j=G1j5C)3<@xox|Y+ga1j=kum*r}K6;YjT%%r_*Vd zXKg#}x=dumZO*wv27vUt1{|}v{&_Z=X*oS9(Va$FRcr)pSz7kl?k)8HD`GQE;hw}HgmiK%7{{smTdN7Ck zHy=R%Ahb_QVDYLJFS{Qacu|4qKF~x08-NGkJ){U8g8aTJX&+zW(CaOF$tmE43al-uCuy`jJ0# z_>mKng9qzDEZdt|v$c6x4+yyO?DN0!Q$PFCOV6Kq$5Y?)L;ub8oVPR$R_ba}H(k5F zwlW#l5)jOm#-k9>GjndUc^e1ATeq6EwK^x;vvlt2)~9~wkDh=2h1qm|;(?P7oO$5r zv4aQKR*xT8TR*h6JdBeFsksgknWC#GRFy%+14hl-tZFcZ5XCAmgq+AE8@6}Sg*WcK z_VU&9FI~QM>GhkJ&+Xj1q|IIOS@CZ zE0Mc7pl%9X?%vh!ZKfuo#>vI1@COA}gu6mDP7I zFpdeHDZxW^{@inqJ#_M8-~Mq+?An01 zU)r)_zGvnRFHXos6lQ%dSaeZlv`D+@Wneuw1h^B!REy}Vta-S*YM%m&zPmLhl*h4x zy{NHpFAs7ja$y0e?5Qx3xw%?7GMFgDP=wB4^w^BM6}-pIix&VRr9#7~6S;bBTUDR$ z&EP)o+MF~wrn6bo@wYtQHqEu`SGwE|2azQxnSn>cm50u}ZR_5>voAkKNP$UQk1nkqTVCllclKsG z4%c3g2nBR8axDtLKJwllsoa+eS`-Wis5p|oNy2~s&yb=M%`Gh$RP1-_`%kmL^!r1+ zXfbI)0;Auwu|@gWM}c}anEJo<|7VL&El&TweD8z%1IQk_2pNDJ+V5BOWut`Ecn^sH z15oVYBNAW++ktN&&jF@USX!#;VO<4uIqlB0ZGdJ3EF%)ECPbkO7a%h;Q-F^N!GSQ` z^MM>Xaqx+EPfnkC^!@J|A6jnPJ(l3^qhYnSx^(yI8yB8^cDA=&t*lo^j_)Xc;Y-)Q z^#}jh!NUgvR3&U}ZAysihu22YM?oe_1GoidgU@G42Gy&tZSHQTvzIQs=lzeIeDGki z)4g%w_Rsy&uRr^hSN7&xGN{&;*Q=#f8jP2gS0`)B2am5EKY8%f;njx@t*i{=sFEO| z7LM$J*iF4el4sMbst$!n;1DH<#6ZT)yxZBHZEQ_9?(M$%#<^>kFJ3->;m*}_w=P`J zjp>rcWeguXeE4mvhu(Se*vWdlT8(PfDtFo20(cmy06D2R+%#Dx5x5ISw~P{*Kyb|< zGqqxzlgZ#d*R08B-Mmq3H(f(Co2Pl}t){7Zle!$x=C*U!%$=3KEoHaNXjwa@E?L%K z?kQA=p{hp1K~4402Se1tbsUU&FsiGSVH}NXi4wSy5S+Lk4nicAL1Zl~g|IUXs*sU+ zAgMD3=eAXaceyDW4;_L?qsp2+IcI~Ds!{+8LXap`4v@(1HX4mA#I==Wi!2-=yfPjP zM#ITu5-X4(l6GCw%$n(LgY5IJb4QoZDneb=#z8_h1w7|u*5%fA=G*h+&2H0nd1H6x zZD%2~*u3T1tjXP^>#Wnc&!_DiyD9B--S)iO+t}XSN@+Vl$EYU2F)SYkojG{;iN_yY zUp;(ib%lBNVw&2Ao?7|%x3&Yn`1O}R|AntNHh=m(&j?YoH`8i394?fwwVmDRP~-Mgj1&Z>&C6y3u-`)_j(!!A}-={_;gpVUQ3 z=`aG@0=kj=902WqC4mg8Ui|$jy~!}{%^=dETk8PO~z~puT7Uim}gQ;%(PU=+)Y!SRAIno zn-_0?{?5JUSFe8N6U1q}wqovqWH=ceT3KE{dGhJU9!$+dsk%PB_vzpI%Bc^1lKsFG*c;&{eyZ0V@=orPi5~(V2cV@P%+T_*Ah%kNm<*T3i?Js}J$KP}6=<DHBBxqfy+dVFB3)x*`{a7mU%!m+9( z4+ddaS9KkR^?OxU zSV)RZsyRqlLI}j9*_zJPEg4KT0iCK_nPd$vf%?2&4pk8AI+(k)+0 zJ9poA@}06W7(M=EeD@Rcyz|B@cYp6QpZ75M&^LecpbDF_T^ZF)Hk<9*ycWdSQ2J+&nRva zR!DBk)|px^WTl`mT$-~Nu`Ly!35bz49SC7}XPR4W=iPkXplpMnH<^(T*Ow1mym(1H zk4N>rjSY3K!?3D{sk33kjmsBSpIBdCU7zmWbr4BxQ)+Wp3s)TGmVKcbCU$3FIb zfBLN-e1;}<$u}jow(TG^+iOog_1Ig#`6IKf?bkm4)j#}|U)j2LHM_2x1@{VodF!sk z9iyGM6UHD`qsnTt(D2Z0zI~G;HLdP3=9H(aOG{@Tj>9^IFgSkV!S#_hzFbo`pGgc^ z)6#e}8q}}8`nu2OZ-4wTy}fz(=s}AFGjnWiY{zMM0D#SW~ zX)vCw9X)j7*wNMHrGsme)yZUaX?ZdnPR670s2;{x2Ld4g;$|>5)PdtlJz5?eIdSNn zPrl9Dc6ZwDO`Get?!5HI*&7!xU48Y^D_5`GzVza-X~%&FZbz#6(e(qzhHFR1E5{EW z91Yed%i~c99HX^UZLQte3;; zWOX!L8x2RJ$#^(kT|e>ou{anHKK3WRLuSc0H?Mx~+2=p=+ba4^-~O@DTOQfk-M(@0 z(yOmrSvs`(UElLvqj9`(>5jDZUb7d&phlXk4${ThzyIWKKK#gIPkh5SbZy?7?^>Qi zoQQ*jz$8quFGjkN1su0Xgi=iR8n-L2~VS?ri zGh)lCfrA7=Z&;F5H7AGXzO^lkxRfoGva?f+qY1@&v-mShZz&}rjLKD26=np>T?YpT zH&u?5yN*NEI|Q)iwk3jrb24MZYACGX&fQJT*qxW?!!i*$IT^6NcHq{X+o_$6hD&$1 z?u>^^AAIJc4?cS4?#9Mv|HE%}t{lU;*Is!06HnZ^b!|4^;aCw%+jc_#7mL&FmXXWcZiO%ALKh67N}ZH|G=vA4H-=hnvR(WO837yiWi zzxjQ?_p879xnKYEz3sgbr8N<#w}x3(Vo7EJv{XwJpWeF@l3B%dg;`Q{M&Nnsu5WDa zoWFYN*xJc6r#ouj@%RU>2W{uAcOYS?Qo#D^@;vdg&!1mgUOjzceP?HPd3nUl4B6Re zCTp>6+Q9kf+RBSBpMCP_$L?+BXWsYrV@vs|({K6CZ~ege%a_&d?Ag~YUV44w*5&zZ zyJO3&haWgRJTN)))cZCsoz~&(b{-r zZSCmMV@ufe*0pQr?_Ilg^|`9ujcpDMAE2d!!?p2vzyXbR=I(=PK-A1< zyJ>HV?(TGUrM|?#YrfDa>3A z3|Cb(AuyD{AkPkR%bMXJ^_1bR*0)E@A(Bf75p6df#(J;3&a0ErMCi%;9F@QY_TKE@WJ*08Bv$RK+O7u^LY%OGiKc?cerizwb}J_0dzu z9y;>O_kT1u^TBAfgI#|8>aDGAd~oueAOA0Pw*BQleD=o8dtMJGgF&dm-J9oLeD=#P zeEmG<^t-?FJMa01kBrt1498*zq(sEUsn^Za ziL|(oqVH4tylqKiECiBD0F$aNUsBY@vCBotio!)tmRw=#Qb#kSrghaYoi}^C zd(-Jud%;^@jRrg*z%12E_2$NA;K9j<9(&@Q?|skv-(w`Fj-2|HPyWK@#`TbCYwPa8 zLx(S2-e!(jt#gwYVih=dxrbBwbzJiOt{ngQnr=B{i<4eysA(YecsLx@!>WprB}icA zAk0*3u30S;*g(vbbIw^Eu4YMd)@)gmnwhmJWz8kKITv$`#g^H^$+^q{Zw8utzc#!X zsw(9~@7D}>^Z}`iV&6xF_Q=43Np^?l#ZRq^Xo2DoH$|R7-t&#$_R&B87iLDASFRs_ z=upnht=DesO}A(5j=7V_a6DdFU0q*0(9HKP-JGu+UH{&{{+HkW!T0~-KlrED&!2aI zybG<3g-j7adx`_Z)70ji8`Y*2YqNTAHt(`HSb{+>U)$Q5zrOm`2W{oj`1H!_FT8x{ z$k8FK8C=L&TqJGWyz$a=FR!jVO*~$njNu58Up)Kz!Gi}jH+L>wc>RyQ_|mZx>s7@+ z`>+4QsnaL!?xr97!EZlveEI#)JaXsu?e{$S(1i;(cJ5riv$uQx@-=a_`EI*`*T49? zX!EA==k)BgGUanJ#ggk!PTWv41+ojLa4Z7P$1&KE9>LcBja~JdSW{7_U6rO+O*I+ zcQ$6bTW7DI|H^;-!sSpNR zLJqe-o-S;rSo@30_rHPO)R@}+mIXpVdffvkrR>=gJF-o=P4j8HDZ-W$GrJmsEZ6mD zWwNqVA2_&v^Ujvdr)h82q@B@tVqMpENrXaGr*4mc+^GmTv%rYqT?t1v24}A%u%&En zRQjI`mSFA-W#TGUjdtk6boyyqw3ko=#tK4WIQ7$f8Nqo5+rlgV;`z_N^88o+;$QvC zPd)t5%`4XqO~RSCtzI~H`SO*^cQ-fR_T<~)2ue|W( zFTD7LSGQ*2PC#jV znW&_(n!ESQQTq>05>TR+s9&kG{vXLmh*^XHbu}jhat2g^tj!Vwc_5sxS;t>UJeUC94u_z%3MIQI8p^Pkm}&_YF!UU!#YF}pf9`TRB}Pw zi42}&tg@;|U{EE@EMU^&fm(vkQ0(H#%vH5XDW%l3O`Ezdr<5~XwSO`F3oJL@K7~Mw z8+Gy5{|uX23=OXE0M?R?yU=cyFVg;kChz5*aG*k*Bh7%v-t(UK{jrb6iCw>XLk9S* z-}s|9UU}8KW^4OiqO5A8@vy4L%d5-B9(eeHQ>Ts|dvNpKJ%CR?e&)aaJO9^z^JD+` z+zT%UU_`teqINmCIqc@izx%2JrwzumKJaY0o zzw={1@y~wt+#45H*2B)$&U@eYT556@Eza%Ehi2g|CK8jAAj_*`NsD1&pkgL;62~+CvIH$(xvkkjw}y1wzj01 z?@ogaMjJc3yL)