diff --git a/README.md b/README.md new file mode 100644 index 0000000..f0c9e64 --- /dev/null +++ b/README.md @@ -0,0 +1,67 @@ +## relu_main.py +The relu_main script is used to run and log sparsity collection and optimiser for different relu tuning policies + +### Notes +Assumes the existence of a the following directories: +- ./runlog +- ./runlog/<arch-name< : Directory to store sparsity information +- ./relu_thresholds +- ./relu_thresholds/<arch-name< : Directory to store json files containing relu threshold information. Annotated to onnx model by onnx_sparsity_attribute.py +- ./onnx_models +- ./onnx_models/<arch-name< : Directory to store annotared onnx models +- ../../fpgaconvnet-optimiser/fpgaconvnet/optimiser/<arch-name< : Directory to stor eoptimiser outputs + +Uses the krish-skipping branches of fpgaconvnet-optimiser and fpgaconvnet-model + + +### Usage +''' +python relu_main.py +''' + +### Flags +- **arch**: model_name +- **relu-policy**: relu policy choice between slowest_node and uniform +- **fixed-hardware**: Uses fixed-hardware and does not run optimiser. Must provide "platform_path", and "optimised_config_path" flags to load fixed hardware +- **normalise-hardware**: Runs optimiser on same DSPs for dense and no skipping windows +- **accuracy_path**: "model_path". For fixed hardware +- **model_path**: Path to sparse .onnx model. +- **platform_path**: Path to platform specs (.toml). For fixed hardware +- **gpu** +- **enable-wandb** + +### Parameters you may want to vary +- **THRESHOLD_INC in relu_main.py**: Amount you want to increase ReLU by for each iteration +- **--gain flag in fpgaconvnet-optimiser cli**: Minimum gain to push fine + +### Example Usage: +#### **Uniform Increase with changing hardware for resnet18**: +''' +python relu_main.py -a resnet18 --relu_policy uniform +''' + +#### **Uniform Increase with fixed hardware for resnet50**: +''' +python relu_main.py -a resnet50 --fixed-hardware --relu_policy uniform +''' + +#### **Slowest node Increase with changing hardware for vgg11**: +''' +python relu_main.py -a vgg11 --relu_policy slowest_node +''' + +#### **Slowest node Increase with changing hardware compared to normalised sparse and dense for vgg11**: +''' +python relu_main.py -a vgg11 --normalise-hardware --relu_policy slowest_node +''' + +#### **Slowest node Increase with slowest hardware for resnet18**: +''' +python relu_main.py -a resnet18 --fixed-hardware --relu_policy uniform +''' + + + + + + diff --git a/imagenet_activation_sensitivity.py b/imagenet_activation_sensitivity.py new file mode 100644 index 0000000..27306b9 --- /dev/null +++ b/imagenet_activation_sensitivity.py @@ -0,0 +1,190 @@ +import argparse +import os +import random + +import torch +import torch.nn as nn +import torch.utils.data +import torchvision.transforms as transforms +import torchvision.datasets as datasets + +from utils import * +from sparsity_utils import * +from quan_utils import * +from relu_utils import * + +from fpgaconvnet.parser.Parser import Parser + + +parser = argparse.ArgumentParser(description='PyTorch ImageNet') +parser.add_argument('--data', metavar='DIR', default="~/dataset/ILSVRC2012_img", + help='path to dataset') +parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18', + help='model architecture: ' + + ' | '.join(model_names)) + +parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', + help='number of data loading workers (default: 4)') +parser.add_argument('-b', '--batch-size', default=64, type=int, + metavar='N', + help='mini-batch size') +parser.add_argument('-p', '--print-freq', default=10, type=int, + metavar='N', help='print frequency (default: 10)') +parser.add_argument('--gpu', default=None, type=int, + help='GPU id to use.') + + +parser.add_argument('--ma_window_size', default=None, type=int, + help='') +parser.add_argument('--calibration-size', default=4, type=int, + help='') + +parser.add_argument("--accuracy_output", default=None, type=str, + help='Path to csv file to write accuracy to') + + + +def imagenet_main(): + args = parser.parse_args() + + # if args.output_path == None: + # output_dir = str(args.arch) + "_output_relu_" + str(args.relu_threshold) + # if not os.path.isdir(output_dir): + # os.makedirs(output_dir) + # args.output_path = os.path.join(os.getcwd(), output_dir) + + print(args) + + random.seed(0) + torch.manual_seed(0) + + # create model + print("=> using pre-trained model '{}'".format(args.arch)) + model = load_model(args.arch) + random_input = torch.randn(1, 3, 224, 224) + + if args.gpu is not None: + print("Use GPU: {}".format(args.gpu)) + torch.cuda.set_device(args.gpu) + model = model.cuda(args.gpu) + random_input = random_input.cuda() + valdir = os.path.join(args.data, 'val') + traindir = os.path.join(args.data, 'train') + else: + print('using CPU, this will be slow') + valdir = os.path.join(args.data, 'val') + traindir = os.path.join(args.data, 'val') + + print("Calculating MACs and Params") + calculate_macs_params(model, random_input, False, inference_mode=True) + # define loss function (criterion) + criterion = nn.CrossEntropyLoss().cuda(args.gpu) + + # Data loading code + normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225]) + + val_loader = torch.utils.data.DataLoader( + datasets.ImageFolder(valdir, transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + normalize, + ])), + batch_size=args.batch_size, shuffle=False, + num_workers=args.workers, pin_memory=True) + + + train_dataset = datasets.ImageFolder(traindir, transforms.Compose([ + transforms.RandomResizedCrop(224), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + normalize, + ])) + # calibrate_size = 50000 + calibrate_size = args.calibration_size + # per class few sampling, different from random_split + # https://github.com/mit-han-lab/proxylessnas/blob/6e7a96b7190963e404d1cf9b37a320501e62b0a0/search/data_providers/imagenet.py#L21 + # assert calibrate_size % 1000 == 0 + """ + rand_indexes = torch.randperm(len(train_dataset)).tolist() + train_labels = [sample[1] for sample in train_dataset.samples] + per_class_remain = [calibrate_size // 1000] * 1000 + train_indexes, calibrate_indexes = [], [] + for idx in rand_indexes: + label = train_labels[idx] + if per_class_remain[label] > 0: + calibrate_indexes.append(idx) + per_class_remain[label] -= 1 + else: + train_indexes.append(idx) + """ + #Randomness handled by seeds + rand_indexes = torch.randperm(len(train_dataset)).tolist() + calibrate_indexes = random.choices(rand_indexes, k=calibrate_size) + + #train_sampler = torch.utils.data.sampler.SubsetRandomSampler(train_indexes) + calibrate_sampler = torch.utils.data.sampler.SubsetRandomSampler(calibrate_indexes) + + #train_loader = torch.utils.data.DataLoader( + # train_dataset, + # batch_size=args.batch_size, + # num_workers=args.workers, pin_memory=True, sampler=train_sampler) + + calibrate_dataset = datasets.ImageFolder(traindir, transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + normalize, + ])) + + calibrate_loader = torch.utils.data.DataLoader( + calibrate_dataset, + batch_size=args.batch_size, + num_workers=args.workers, pin_memory=True, sampler=calibrate_sampler) + + + #-----------------Model Quantisation---------------- + # todo: measure post-quantisation results??? + print("Quantising model") + model_quantisation(model, calibrate_loader, quantization_method=QuanMode.NETWORK_FP, weight_width=16, data_width=16) + print("Model quantised") + original_top1, original_top5 = validate(val_loader, model, criterion) + print("Accuracy above is for quantised model") + original_top1 = float(str(original_top1).split("( ")[1][:-1]) + original_top5 = float(str(original_top5).split("( ")[1][:-1]) + # use vanilla convolution to measure + # post-activation (post-sliding-window, to be more precise) sparsity + + #-----------------Variable ReLU Sensitivity--------------------- + relu_list = [] + for name, module in model.named_modules(): + if isinstance(module, nn.ReLU):#or isinstance(module, nn.Linear): + relu_list.append(name) + + model_copy = copy.deepcopy(model) + + for relu_layer in relu_list: + min_thresh = 0 + max_thresh = 20 + while (max_thresh - min_thresh) > 0.01: + recorded = False + for threshold in np.linspace(min_thresh, max_thresh, 21): + model = copy.deepcopy(model_copy) + replace_layer_with_variable_relu(model, relu_layer, threshold=threshold) + print("Variable ReLU added") + top1, top5 = validate(val_loader, model, criterion) + print("Accuracy above is for " + str(relu_layer) + " with ReLU threshold:" + str(threshold)) + top1 = str(top1).split("( ")[1][:-1] + top5 = str(top5).split("( ")[1][:-1] + output_dir = args.accuracy_output + "/" + str(args.arch) + output_accuracy_to_csv(args.arch, threshold, relu_layer, top1, top5, output_dir) + if float(top5) < 0.99*original_top5 and not recorded: + min_thresh, max_thresh = threshold - (max_thresh - min_thresh)/20, threshold + recorded = True + + +if __name__ == '__main__': + imagenet_main() + +# \ No newline at end of file diff --git a/imagenet_main.py b/imagenet_main.py index 8a2621c..46afb15 100644 --- a/imagenet_main.py +++ b/imagenet_main.py @@ -12,6 +12,11 @@ from utils import * from sparsity_utils import * from quan_utils import * +from relu_utils import * + +from fpgaconvnet.parser.Parser import Parser +import json + parser = argparse.ArgumentParser(description='PyTorch ImageNet') parser.add_argument('--data', metavar='DIR', default="~/dataset/ILSVRC2012_img", @@ -35,9 +40,24 @@ parser.add_argument('--ma_window_size', default=None, type=int, help='') -parser.add_argument('--calibration-size', default=4, type=int, +parser.add_argument('--calibration-size', default=2500, type=int, help='') +parser.add_argument('--relu_threshold', default=None, type=str, + help='path to json containing relu thresholds') + +parser.add_argument("--accuracy_output", default=None, type=str, + help='Path to csv file to write accuracy to') + +# parser.add_argument("--model_path", default=None, type=str, +# help='Path to sparse .onnx model') + +# parser.add_argument("--platform_path", default=None, type=str, +# help='Path to platform specs (.toml)') + +# parser.add_argument("--optimised_config_path", default=None, type=str, +# help='Path to optimised configuration (.json)') + def imagenet_main(): args = parser.parse_args() @@ -60,13 +80,14 @@ def imagenet_main(): torch.cuda.set_device(args.gpu) model = model.cuda(args.gpu) random_input = random_input.cuda() - valdir = os.path.join(args.data, 'validation') + valdir = os.path.join(args.data, 'val') traindir = os.path.join(args.data, 'train') else: print('using CPU, this will be slow') valdir = os.path.join(args.data, 'val') - traindir = os.path.join(args.data, 'val') + traindir = os.path.join(args.data, 'train') + print("Calculating MACs and Params") calculate_macs_params(model, random_input, False, inference_mode=True) # define loss function (criterion) criterion = nn.CrossEntropyLoss().cuda(args.gpu) @@ -85,6 +106,7 @@ def imagenet_main(): batch_size=args.batch_size, shuffle=False, num_workers=args.workers, pin_memory=True) + train_dataset = datasets.ImageFolder(traindir, transforms.Compose([ transforms.RandomResizedCrop(224), transforms.RandomHorizontalFlip(), @@ -109,6 +131,7 @@ def imagenet_main(): else: train_indexes.append(idx) """ + #Randomness handled by seeds rand_indexes = torch.randperm(len(train_dataset)).tolist() calibrate_indexes = random.choices(rand_indexes, k=calibrate_size) @@ -132,14 +155,39 @@ def imagenet_main(): batch_size=args.batch_size, num_workers=args.workers, pin_memory=True, sampler=calibrate_sampler) + + #-----------------Model Quantisation---------------- # todo: measure post-quantisation results??? + print("Quantising model") model_quantisation(model, calibrate_loader, quantization_method=QuanMode.NETWORK_FP, weight_width=16, data_width=16) + print("Model quantised") validate(val_loader, model, criterion) + print("Accuracy above is for quantised model") # use vanilla convolution to measure # post-activation (post-sliding-window, to be more precise) sparsity + + #-----------------Variable ReLU--------------------- + if args.relu_threshold is not None: + f = open(args.relu_threshold) + args.relu_threshold = json.load(f) + replace_with_variable_relu(model, threshold=args.relu_threshold) + print("Variable ReLU added") + top1, top5 = validate(val_loader, model, criterion) + print("Accuracy above is for ReLU threshold:" + str(args.relu_threshold)) + top1 = str(top1).split("( ")[1][:-1] + top5 = str(top5).split("( ")[1][:-1] + + + #---------------Sparsity Data Collection---------- replace_with_vanilla_convolution(model, window_size=args.ma_window_size) + print("Vanilla Convolution added") validate(calibrate_loader, model, criterion, args.print_freq) + print("Sparsity data collected") output_sparsity_to_csv(args.arch, model, args.output_path) + total_sparsity = total_network_sparsity(model) + output_accuracy_to_csv(args.arch, args.relu_threshold, top1, top5, total_sparsity, args.accuracy_output) + + if __name__ == '__main__': imagenet_main() diff --git a/imagenet_training_main.py b/imagenet_training_main.py new file mode 100644 index 0000000..8b138ff --- /dev/null +++ b/imagenet_training_main.py @@ -0,0 +1,544 @@ +import argparse +import os +import random +import shutil +import time +import warnings +from enum import Enum + +import torch +import torch.backends.cudnn as cudnn +import torch.distributed as dist +import torch.multiprocessing as mp +import torch.nn as nn +import torch.nn.parallel +import torch.optim +import torch.utils.data +import torch.utils.data.distributed +import torchvision.datasets as datasets +import torchvision.models as models +import torchvision.transforms as transforms +from torch.optim.lr_scheduler import StepLR +from torch.utils.data import Subset + +model_names = sorted(name for name in models.__dict__ + if name.islower() and not name.startswith("__") + and callable(models.__dict__[name])) + +parser = argparse.ArgumentParser(description='PyTorch ImageNet Training') +parser.add_argument('data', metavar='DIR', nargs='?', default="~/dataset/ILSVRC2012_img", + help='path to dataset (default: imagenet)') +parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18', + choices=model_names, + help='model architecture: ' + + ' | '.join(model_names) + + ' (default: resnet18)') +parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', + help='number of data loading workers (default: 4)') +parser.add_argument('--epochs', default=90, type=int, metavar='N', + help='number of total epochs to run') +parser.add_argument('--start-epoch', default=0, type=int, metavar='N', + help='manual epoch number (useful on restarts)') +parser.add_argument('-b', '--batch-size', default=256, type=int, + metavar='N', + help='mini-batch size (default: 256), this is the total ' + 'batch size of all GPUs on the current node when ' + 'using Data Parallel or Distributed Data Parallel') +parser.add_argument('--lr', '--learning-rate', default=0.001, type=float, + metavar='LR', help='initial learning rate', dest='lr') +parser.add_argument('--momentum', default=0.9, type=float, metavar='M', + help='momentum') +parser.add_argument('--wd', '--weight-decay', default=1e-4, type=float, + metavar='W', help='weight decay (default: 1e-4)', + dest='weight_decay') +parser.add_argument('-p', '--print-freq', default=10, type=int, + metavar='N', help='print frequency (default: 10)') +parser.add_argument('--resume', default='', type=str, metavar='PATH', + help='path to latest checkpoint (default: none)') +parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', + help='evaluate model on validation set') +parser.add_argument('--pretrained', dest='pretrained', action='store_true', + help='use pre-trained model') +parser.add_argument('--world-size', default=-1, type=int, + help='number of nodes for distributed training') +parser.add_argument('--rank', default=-1, type=int, + help='node rank for distributed training') +parser.add_argument('--dist-url', default='tcp://224.66.41.62:23456', type=str, + help='url used to set up distributed training') +parser.add_argument('--dist-backend', default='nccl', type=str, + help='distributed backend') +parser.add_argument('--seed', default=None, type=int, + help='seed for initializing training. ') +parser.add_argument('--gpu', default=None, type=int, + help='GPU id to use.') +parser.add_argument('--multiprocessing-distributed', action='store_true', + help='Use multi-processing distributed training to launch ' + 'N processes per node, which has N GPUs. This is the ' + 'fastest way to use PyTorch for either single node or ' + 'multi node data parallel training') +parser.add_argument('--dummy', action='store_true', help="use fake data to benchmark") +parser.add_argument('--model_dir', default = None, type = str) + + +best_acc1 = 0 + + +def main(): + args = parser.parse_args() + + if args.seed is not None: + random.seed(args.seed) + torch.manual_seed(args.seed) + cudnn.deterministic = True + cudnn.benchmark = False + warnings.warn('You have chosen to seed training. ' + 'This will turn on the CUDNN deterministic setting, ' + 'which can slow down your training considerably! ' + 'You may see unexpected behavior when restarting ' + 'from checkpoints.') + + if args.gpu is not None: + warnings.warn('You have chosen a specific GPU. This will completely ' + 'disable data parallelism.') + + if args.dist_url == "env://" and args.world_size == -1: + args.world_size = int(os.environ["WORLD_SIZE"]) + + args.distributed = args.world_size > 1 or args.multiprocessing_distributed + + if torch.cuda.is_available(): + ngpus_per_node = torch.cuda.device_count() + else: + ngpus_per_node = 1 + if args.multiprocessing_distributed: + # Since we have ngpus_per_node processes per node, the total world_size + # needs to be adjusted accordingly + args.world_size = ngpus_per_node * args.world_size + # Use torch.multiprocessing.spawn to launch distributed processes: the + # main_worker process function + mp.spawn(main_worker, nprocs=ngpus_per_node, args=(ngpus_per_node, args)) + else: + # Simply call main_worker function + main_worker(args.gpu, ngpus_per_node, args) + + +def main_worker(gpu, ngpus_per_node, args): + global best_acc1 + args.gpu = gpu + + if args.gpu is not None: + print("Use GPU: {} for training".format(args.gpu)) + + if args.distributed: + if args.dist_url == "env://" and args.rank == -1: + args.rank = int(os.environ["RANK"]) + if args.multiprocessing_distributed: + # For multiprocessing distributed training, rank needs to be the + # global rank among all the processes + args.rank = args.rank * ngpus_per_node + gpu + dist.init_process_group(backend=args.dist_backend, init_method=args.dist_url, + world_size=args.world_size, rank=args.rank) + # create model + if args.pretrained: + print("=> using pre-trained model '{}'".format(args.arch)) + model = models.__dict__[args.arch](pretrained=True) + else: + print("=> creating model '{}'".format(args.arch)) + model = models.__dict__[args.arch]() + + if not torch.cuda.is_available() and not torch.backends.mps.is_available(): + print('using CPU, this will be slow') + elif args.distributed: + # For multiprocessing distributed, DistributedDataParallel constructor + # should always set the single device scope, otherwise, + # DistributedDataParallel will use all available devices. + if torch.cuda.is_available(): + if args.gpu is not None: + torch.cuda.set_device(args.gpu) + model.cuda(args.gpu) + # When using a single GPU per process and per + # DistributedDataParallel, we need to divide the batch size + # ourselves based on the total number of GPUs of the current node. + args.batch_size = int(args.batch_size / ngpus_per_node) + args.workers = int((args.workers + ngpus_per_node - 1) / ngpus_per_node) + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + else: + model.cuda() + # DistributedDataParallel will divide and allocate batch_size to all + # available GPUs if device_ids are not set + model = torch.nn.parallel.DistributedDataParallel(model) + elif args.gpu is not None and torch.cuda.is_available(): + torch.cuda.set_device(args.gpu) + model = model.cuda(args.gpu) + elif torch.backends.mps.is_available(): + device = torch.device("mps") + model = model.to(device) + else: + # DataParallel will divide and allocate batch_size to all available GPUs + if args.arch.startswith('alexnet') or args.arch.startswith('vgg'): + model.features = torch.nn.DataParallel(model.features) + model.cuda() + else: + model = torch.nn.DataParallel(model).cuda() + + if torch.cuda.is_available(): + if args.gpu: + device = torch.device('cuda:{}'.format(args.gpu)) + else: + device = torch.device("cuda") + elif torch.backends.mps.is_available(): + device = torch.device("mps") + else: + device = torch.device("cpu") + # define loss function (criterion), optimizer, and learning rate scheduler + criterion = nn.CrossEntropyLoss().to(device) + + optimizer = torch.optim.SGD(model.parameters(), args.lr, + momentum=args.momentum, + weight_decay=args.weight_decay) + + """Sets the learning rate to the initial LR decayed by 10 every 30 epochs""" + scheduler = StepLR(optimizer, step_size=30, gamma=0.1) + + # optionally resume from a checkpoint + if args.resume: + if os.path.isfile(args.resume): + print("=> loading checkpoint '{}'".format(args.resume)) + if args.gpu is None: + checkpoint = torch.load(args.resume) + elif torch.cuda.is_available(): + # Map model to be loaded to specified single gpu. + loc = 'cuda:{}'.format(args.gpu) + checkpoint = torch.load(args.resume, map_location=loc) + args.start_epoch = checkpoint['epoch'] + best_acc1 = checkpoint['best_acc1'] + if args.gpu is not None: + # best_acc1 may be from a checkpoint from a different GPU + best_acc1 = best_acc1.to(args.gpu) + model.load_state_dict(checkpoint['state_dict']) + optimizer.load_state_dict(checkpoint['optimizer']) + scheduler.load_state_dict(checkpoint['scheduler']) + print("=> loaded checkpoint '{}' (epoch {})" + .format(args.resume, checkpoint['epoch'])) + else: + print("=> no checkpoint found at '{}'".format(args.resume)) + + + # Data loading code + if args.dummy: + print("=> Dummy data is used!") + train_dataset = datasets.FakeData(1281167, (3, 224, 224), 1000, transforms.ToTensor()) + val_dataset = datasets.FakeData(50000, (3, 224, 224), 1000, transforms.ToTensor()) + else: + traindir = os.path.join(args.data, 'train') + valdir = os.path.join(args.data, 'val') + normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225]) + + train_dataset = datasets.ImageFolder( + traindir, + transforms.Compose([ + transforms.RandomResizedCrop(224), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + normalize, + ])) + + val_dataset = datasets.ImageFolder( + valdir, + transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + normalize, + ])) + + if args.distributed: + train_sampler = torch.utils.data.distributed.DistributedSampler(train_dataset) + val_sampler = torch.utils.data.distributed.DistributedSampler(val_dataset, shuffle=False, drop_last=True) + else: + train_sampler = None + val_sampler = None + + train_loader = torch.utils.data.DataLoader( + train_dataset, batch_size=args.batch_size, shuffle=(train_sampler is None), + num_workers=args.workers, pin_memory=True, sampler=train_sampler) + + val_loader = torch.utils.data.DataLoader( + val_dataset, batch_size=args.batch_size, shuffle=False, + num_workers=args.workers, pin_memory=True, sampler=val_sampler) + + if args.evaluate: + validate(val_loader, model, criterion, args) + return + + for epoch in range(args.start_epoch, args.epochs): + if args.distributed: + train_sampler.set_epoch(epoch) + + # train for one epoch + train(train_loader, model, criterion, optimizer, epoch, device, args) + + # evaluate on validation set + acc1 = validate(val_loader, model, criterion, args) + + scheduler.step() + + # remember best acc@1 and save checkpoint + is_best = acc1 > best_acc1 + best_acc1 = max(acc1, best_acc1) + + if not args.multiprocessing_distributed or (args.multiprocessing_distributed + and args.rank % ngpus_per_node == 0): + save_checkpoint({ + 'epoch': epoch + 1, + 'arch': args.arch, + 'state_dict': model.state_dict(), + 'best_acc1': best_acc1, + 'optimizer' : optimizer.state_dict(), + 'scheduler' : scheduler.state_dict() + }, is_best, dirname=args.model_dir) + + +def train(train_loader, model, criterion, optimizer, epoch, device, args): + batch_time = AverageMeter('Time', ':6.3f') + data_time = AverageMeter('Data', ':6.3f') + losses = AverageMeter('Loss', ':.4e') + reg_losses = AverageMeter('Reg Loss', ':.4e') + total_losses = AverageMeter('Total Loss', ':.4e') + top1 = AverageMeter('Acc@1', ':6.2f') + top5 = AverageMeter('Acc@5', ':6.2f') + progress = ProgressMeter( + len(train_loader), + [batch_time, data_time, losses, reg_losses, total_losses, top1, top5], + prefix="Epoch: [{}]".format(epoch)) + + # switch to train mode + model.train() + + activation = {} + def getActivation(name): + # the hook signature + def hook(model, input, output): + activation[name] = output.detach() + return hook + + activation_handles = {} + for name, module in model.named_modules(): + if isinstance(module, nn.ReLU): + activation_handles[name] = module.register_forward_hook(getActivation(name)) + + end = time.time() + for i, (images, target) in enumerate(train_loader): + # measure data loading time + data_time.update(time.time() - end) + + # move data to the same device as model + images = images.to(device, non_blocking=True) + target = target.to(device, non_blocking=True) + + # compute output + output = model(images) + loss = criterion(output, target) + + reg = 0.0 + for name, values in activation.items(): + reg += ( (torch.sum(torch.sqrt(torch.sum(values**2,0)))**2) + (torch.sum(torch.sqrt(torch.sum(values**2,1)))**2) + (torch.sum(torch.sqrt(torch.sum(values**2,2)))**2) )/torch.sum(values**2) + decay = 1e-6 + reg_loss = decay*reg + total_loss = loss+reg_loss + # measure accuracy and record loss + acc1, acc5 = accuracy(output, target, topk=(1, 5)) + losses.update(loss.item(), images.size(0)) + reg_losses.update(reg_loss.item(), images.size(0)) + total_losses.update(total_loss.item(), images.size(0)) + top1.update(acc1[0], images.size(0)) + top5.update(acc5[0], images.size(0)) + + # compute gradient and do SGD step + optimizer.zero_grad() + total_loss.backward() + optimizer.step() + + # measure elapsed time + batch_time.update(time.time() - end) + end = time.time() + + if i % args.print_freq == 0: + progress.display(i + 1) + + +def validate(val_loader, model, criterion, args): + + def run_validate(loader, base_progress=0): + with torch.no_grad(): + end = time.time() + for i, (images, target) in enumerate(loader): + i = base_progress + i + if args.gpu is not None and torch.cuda.is_available(): + images = images.cuda(args.gpu, non_blocking=True) + if torch.backends.mps.is_available(): + images = images.to('mps') + target = target.to('mps') + if torch.cuda.is_available(): + target = target.cuda(args.gpu, non_blocking=True) + + # compute output + output = model(images) + loss = criterion(output, target) + + # measure accuracy and record loss + acc1, acc5 = accuracy(output, target, topk=(1, 5)) + losses.update(loss.item(), images.size(0)) + top1.update(acc1[0], images.size(0)) + top5.update(acc5[0], images.size(0)) + + # measure elapsed time + batch_time.update(time.time() - end) + end = time.time() + + if i % args.print_freq == 0: + progress.display(i + 1) + + batch_time = AverageMeter('Time', ':6.3f', Summary.NONE) + losses = AverageMeter('Loss', ':.4e', Summary.NONE) + top1 = AverageMeter('Acc@1', ':6.2f', Summary.AVERAGE) + top5 = AverageMeter('Acc@5', ':6.2f', Summary.AVERAGE) + progress = ProgressMeter( + len(val_loader) + (args.distributed and (len(val_loader.sampler) * args.world_size < len(val_loader.dataset))), + [batch_time, losses, top1, top5], + prefix='Test: ') + + # switch to evaluate mode + model.eval() + + run_validate(val_loader) + if args.distributed: + top1.all_reduce() + top5.all_reduce() + + if args.distributed and (len(val_loader.sampler) * args.world_size < len(val_loader.dataset)): + aux_val_dataset = Subset(val_loader.dataset, + range(len(val_loader.sampler) * args.world_size, len(val_loader.dataset))) + aux_val_loader = torch.utils.data.DataLoader( + aux_val_dataset, batch_size=args.batch_size, shuffle=False, + num_workers=args.workers, pin_memory=True) + run_validate(aux_val_loader, len(val_loader)) + + progress.display_summary() + + return top1.avg + + +def save_checkpoint(state, is_best, dirname=None): + if dirname == None: + filename = 'checkpoint.pth.tar' + torch.save(state, filename) + if is_best: + shutil.copyfile(filename, 'model_best.pth.tar') + else: + if not os.path.isdir(dirname): + os.mkdir(dirname) + filename = dirname + 'checkpoint.pth.tar' + torch.save(state, dirname) + if is_best: + shutil.copyfile(dirname, 'model_best.pth.tar') + +class Summary(Enum): + NONE = 0 + AVERAGE = 1 + SUM = 2 + COUNT = 3 + +class AverageMeter(object): + """Computes and stores the average and current value""" + def __init__(self, name, fmt=':f', summary_type=Summary.AVERAGE): + self.name = name + self.fmt = fmt + self.summary_type = summary_type + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + def all_reduce(self): + if torch.cuda.is_available(): + device = torch.device("cuda") + elif torch.backends.mps.is_available(): + device = torch.device("mps") + else: + device = torch.device("cpu") + total = torch.tensor([self.sum, self.count], dtype=torch.float32, device=device) + dist.all_reduce(total, dist.ReduceOp.SUM, async_op=False) + self.sum, self.count = total.tolist() + self.avg = self.sum / self.count + + def __str__(self): + fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})' + return fmtstr.format(**self.__dict__) + + def summary(self): + fmtstr = '' + if self.summary_type is Summary.NONE: + fmtstr = '' + elif self.summary_type is Summary.AVERAGE: + fmtstr = '{name} {avg:.3f}' + elif self.summary_type is Summary.SUM: + fmtstr = '{name} {sum:.3f}' + elif self.summary_type is Summary.COUNT: + fmtstr = '{name} {count:.3f}' + else: + raise ValueError('invalid summary type %r' % self.summary_type) + + return fmtstr.format(**self.__dict__) + + +class ProgressMeter(object): + def __init__(self, num_batches, meters, prefix=""): + self.batch_fmtstr = self._get_batch_fmtstr(num_batches) + self.meters = meters + self.prefix = prefix + + def display(self, batch): + entries = [self.prefix + self.batch_fmtstr.format(batch)] + entries += [str(meter) for meter in self.meters] + print('\t'.join(entries)) + + def display_summary(self): + entries = [" *"] + entries += [meter.summary() for meter in self.meters] + print(' '.join(entries)) + + def _get_batch_fmtstr(self, num_batches): + num_digits = len(str(num_batches // 1)) + fmt = '{:' + str(num_digits) + 'd}' + return '[' + fmt + '/' + fmt.format(num_batches) + ']' + +def accuracy(output, target, topk=(1,)): + """Computes the accuracy over the k top predictions for the specified values of k""" + with torch.no_grad(): + maxk = max(topk) + batch_size = target.size(0) + + _, pred = output.topk(maxk, 1, True, True) + pred = pred.t() + correct = pred.eq(target.view(1, -1).expand_as(pred)) + + res = [] + for k in topk: + correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True) + res.append(correct_k.mul_(100.0 / batch_size)) + return res + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/onnx_sparsity_attribute.py b/onnx_sparsity_attribute.py index d2d0555..88f9b6c 100644 --- a/onnx_sparsity_attribute.py +++ b/onnx_sparsity_attribute.py @@ -87,13 +87,22 @@ def _layer_name_translation(model_name, onnx_name): sparsity_data = toml_data[layer_name]["avg"] set_nodeattr(node, "input sparsity", sparsity_data) +def annotate_histograms(model_name, onnx_model, data_path): + for node in onnx_model.graph.node: + if node.op_type == 'Conv': + layer_name = layer_name_translation(model_name, node.name) + np_path = os.path.join(data_path, model_name + "_" + layer_name + "_histograms.npy") + channel_wise_sprasity = np.load(np_path) + windows_data = channel_wise_sprasity[:, -1]/channel_wise_sprasity.sum(axis = 1) + set_nodeattr(node, "window sparsity", windows_data) + + if __name__ == "__main__": parser = argparse.ArgumentParser(description='Export ONNX model with sparsity attribute') parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18',choices=model_names) parser.add_argument('--state_dict', metavar='DIR', default="/home/zy18/Downloads/Pruning Results-20230815T143526Z-001/Pruning Results/weight_sparse_50/resnet18_classification_imagenet_2023-08-12/software/transform/transformed_ckpt/state_dict.pt") parser.add_argument('--data_path', metavar='DIR', default="/home/zy18/Downloads/Pruning Results-20230815T143526Z-001/Pruning Results/weight_sparse_50/resnet18_classification_imagenet_2023-08-12/software/transform/prune/activation_report.toml") parser.add_argument('--export_path', metavar='DIR', default="models") - args = parser.parse_args() torch_model = load_model(args.arch) @@ -108,4 +117,5 @@ def _layer_name_translation(model_name, onnx_name): annotate_sparsity_from_toml(args.arch, onnx_model, args.data_path) else: annotate_sparsity_from_numpy(args.arch, onnx_model, args.data_path) - onnx.save(onnx_model, sparse_onnx_path) \ No newline at end of file + # annotate_histograms(args.arch, onnx_model, args.data_path) + onnx.save(onnx_model, sparse_onnx_path) diff --git a/onnx_sparsity_attribute_full.py b/onnx_sparsity_attribute_full.py new file mode 100644 index 0000000..c68825a --- /dev/null +++ b/onnx_sparsity_attribute_full.py @@ -0,0 +1,131 @@ +import os +import numpy as np +import torch +import torch.nn as nn +import onnx +import argparse +import csv +import json +from utils import load_model, model_names, replace_modules + +def torch_onnx_exporter(model, model_name, random_input, output_path): + if model_name == "mobilenet_v2": + replace_dict = {} + for name, module in model.named_modules(): + # todo: relu6 creates clip node + if isinstance(module, nn.ReLU6): + replace_dict[module] = nn.ReLU() + replace_modules(model, replace_dict) + torch.onnx.export(model, random_input, output_path, verbose=False, keep_initializers_as_inputs=True) + +# https://github.com/Xilinx/finn-base/blob/7c2603a95e90e4de2575020e575c24eab6a15889/src/finn/custom_op/base.py +def set_nodeattr(node, attr_name, attr_value): + new_attr = onnx.helper.make_attribute(attr_name, attr_value) + node.attribute.append(new_attr) + +def annotate_quantisation(model, weight_width, data_width, acc_width, block_floating_point): + for node in model.graph.node: + if node.op_type in ["Conv", "Gemm"]: + set_nodeattr(node, "weight_width", weight_width) + set_nodeattr(node, "data_width", data_width) + set_nodeattr(node, "acc_width", acc_width) + set_nodeattr(node, "block_floating_point", block_floating_point) + else: + set_nodeattr(node, "data_width", data_width) + +def layer_name_translation(model_name, onnx_name): + onnx_name = onnx_name.split("/") + if model_name in ["resnet18", "resnet50"]: + if len(onnx_name) == 3: # first conv + torch_name = onnx_name[1]+ ".1" + else: + assert len(onnx_name) in [5,6] + torch_name = onnx_name[2] + "." +onnx_name[-2]+ ".1" + elif model_name == "mobilenet_v2": + if len(onnx_name) == 5: # first and last conv + torch_name = onnx_name[-2] + ".1" + else: + assert len(onnx_name) in [6,7] + torch_name = onnx_name[2] + "." + onnx_name[-2] + ".1" + elif model_name in ["alexnet", "vgg11", "vgg16"]: + torch_name = onnx_name[-2] + ".1" + elif model_name == "repvgg-a0": + torch_name = ".".join(onnx_name[1:-1]) + ".1" + return torch_name + +def annotate_sparsity(model_name, onnx_model, data_path): + for node in onnx_model.graph.node: + if node.op_type == 'Conv': + layer_name = layer_name_translation(model_name, node.name) + np_path = os.path.join(data_path, model_name + "_" + layer_name + "_histograms.npy") + histograms_data = np.load(np_path) + histograms = histograms_data/histograms_data.sum(axis = 1)[:, np.newaxis] + set_nodeattr(node, "input sparsity", histograms.flatten()) + +def replace_relu_nodes(model_name, onnx_model, relu_thresholds): + + for index, node in enumerate(onnx_model.graph.node): + + # find a greater node + if node.op_type != "Relu": + continue + + + # remove greater and where node + onnx_model.graph.node.remove(node) + torch_name = layer_name_translation(model_name, node.name) + + # create a Gemm node with the matmul weights and add bias + new_node_name = "/".join(node.name.split("/")[:-1] + ["ThresholdedReLU"]) + new_node = onnx.helper.make_node( + "ThresholdedRelu", + name= new_node_name, + inputs=[*node.input], + outputs=node.output, + alpha = relu_thresholds[torch_name] + ) + + + # add new one + onnx_model.graph.node.insert(index, new_node) + + # connect node and ThresholdedReLU node together + next_node = next(filter(lambda x: node.output[0] in x.input, onnx_model.graph.node)) + next_node.input[0] = new_node.output[0] + + +parser = argparse.ArgumentParser(description='Export ONNX model with sparsity attribute') +parser.add_argument('-a', '--arch', metavar='ARCH', default='vgg16', + choices=model_names, + help='model architecture: ' + + ' | '.join(model_names)) +parser.add_argument('--data', metavar='DIR', default="runlog/resnet18/uniform_relu_0.085", + help='path to onnx model') +parser.add_argument('--dense_onnx_path', metavar='DIR', default="models/vgg16.onnx", + help='path to onnx model') +parser.add_argument('--sparse_onnx_path', metavar='DIR', default="models/vgg16_sparse.onnx", + help='path to onnx model') +parser.add_argument('--temp_onnx_path', metavar='DIR', default="models/vgg16_sparse.onnx", + help='path to onnx model') +parser.add_argument("-r", "--relu_thresholds_path", metavar='DIR', default=None, + help='path to relu thresholds json model') + +args = parser.parse_args() + +torch_model = load_model(args.arch) +torch_onnx_exporter(torch_model, args.arch, torch.randn(1, 3, 224, 224), args.dense_onnx_path) +onnx_model = onnx.load(args.dense_onnx_path) + +if args.relu_thresholds_path is not None: + + f = open(args.relu_thresholds_path) + relu_thresholds = json.load(f) + + onnx_model = onnx.load(args.dense_onnx_path) + replace_relu_nodes(args.arch, onnx_model, relu_thresholds) + + +annotate_quantisation(onnx_model, 16, 16, 32, False) +annotate_sparsity(args.arch, onnx_model, args.data) +# annotate_histograms(args.arch, onnx_model, args.data) +onnx.save(onnx_model, args.sparse_onnx_path) \ No newline at end of file diff --git a/quan_utils.py b/quan_utils.py index ef5c6c4..234b0ea 100644 --- a/quan_utils.py +++ b/quan_utils.py @@ -2,6 +2,7 @@ import torch.nn as nn import copy from enum import Enum +import os from utils import * @@ -41,18 +42,18 @@ def linear_dequantize(x_quan, scaling_factor, zero_point): return x - +#Asymmetric Quantisation: x_q = round((x_f - min_xf) * (2^n - 1) / (max_xf - min_xf)) def asymmetric_linear_no_clipping(wordlength, x_min, x_max): - scaling_factor = (2**wordlength - 1) / torch.clamp((x_max - x_min), min=1e-8) - zero_point = scaling_factor * x_min + scaling_factor = (2**wordlength - 1) / torch.clamp((x_max - x_min), min=1e-8) # Calculates scaling factor as shown in equation for function above + zero_point = scaling_factor * x_min #Corresponds to most negative value represented by wlen-bit if isinstance(zero_point, torch.Tensor): zero_point = zero_point.round() else: zero_point = float(round(zero_point)) - zero_point += 2**(wordlength - 1) + zero_point += 2**(wordlength - 1) #Corresponds to zero by adding 2^(wlen - 1) return scaling_factor, zero_point @@ -62,6 +63,7 @@ def saturate(w_quan, wordlength): return w_quan +#Takes a model as input and can call a function with wordlength and quantisation method to quantise base don quantisation method class WeightQuantizer(): def __init__(self, model): bFirst = True @@ -70,10 +72,10 @@ def __init__(self, model): if isinstance(module, nn.Conv2d) or isinstance(module, nn.Linear): if bFirst: bFirst = False - self.w_min = torch.min(module.weight) - self.w_max = torch.max(module.weight) + self.w_min = torch.min(module.weight) #Single value + self.w_max = torch.max(module.weight) #QUESTION: Why don't we use torch.minimum givng us tensors for the first module as well? else: - self.w_min = torch.minimum(self.w_min, torch.min(module.weight)) + self.w_min = torch.minimum(self.w_min, torch.min(module.weight)) self.w_max = torch.maximum(self.w_max, torch.max(module.weight)) print("weight min:", self.w_min) @@ -123,7 +125,7 @@ def get_scale_shift(self): def forward(self, x): - if self.gather_data: + if self.gather_data: #Collects data about the x_min and x_max to quantise the input features if self.quantization_method == QuanMode.CHANNEL_BFP: channel_num = x.size()[1] x_block = x.data.transpose(0, 1) @@ -160,6 +162,7 @@ def forward(self, x): return x_quan +#Function that performs quantisation on the feature maps post-activation def activation_quantization(model, wordlength, quantization_method, calibrate_loader): # add activation quantisation module replace_dict ={} @@ -167,7 +170,7 @@ def activation_quantization(model, wordlength, quantization_method, calibrate_lo if type(module) in QUAN_TARGET_MODULES: module_quan = nn.Sequential(*[QuanAct(wordlength, quantization_method), copy.deepcopy(module), QuanAct(wordlength, quantization_method)]) replace_dict[module] = module_quan - + replace_modules(model, replace_dict) model.eval() @@ -219,4 +222,16 @@ def model_quantisation(model, calibrate_loader, quantization_method=QuanMode.NET quantized_weight = weight_quantizer.AsymmetricQuantHandler(module.weight, weight_width, quantization_method) module.weight.data.copy_(quantized_weight) - activation_quantization(model, data_width, quantization_method, calibrate_loader) \ No newline at end of file + activation_quantization(model, data_width, quantization_method, calibrate_loader) + + +def output_quan_accuracy_to_csv(model_name, relu_threshold, top1, top5): + file_path = os.path.join(os.getcwd(), "runlog", str(model_name) + "_accuracy_var_quantisation.csv") + + if not (os.path.isfile(file_path)): + with open(file_path, "w") as f: + f.write("Wordlength,Top1 Accuracy,Top5 Accuracy\n") + + with open(file_path, "a") as f: + row = ",".join([str(relu_threshold), str(top1), str(top5)]) + "\n" + f.write(row) diff --git a/relu_main.py b/relu_main.py new file mode 100644 index 0000000..eb99934 --- /dev/null +++ b/relu_main.py @@ -0,0 +1,308 @@ +#Imports +import argparse +import datetime +import json +import toml +import numpy +import wandb +from torch import nn +import os + +import fpgaconvnet.tools.graphs as graphs +from fpgaconvnet.tools.layer_enum import LAYER_TYPE +from fpgaconvnet.parser.Parser import Parser + +from utils import * + +#Get new throughput function +def get_new_throughput(model_name, net, sparsity_path): + + for partition_index in range(len(net.partitions)): + # print("Patition:", partition_index) + partition = net.partitions[partition_index] + for layer in graphs.ordered_node_list(partition.graph): + + #Check if layer is a Convolution layer tha can benefit from sparsit + if (partition.graph.nodes[layer]['type'] == LAYER_TYPE.Convolution): + + if len(partition.graph.nodes[layer]['hw'].sparsity): + layer_name = layer_name_translation(model_name, layer) + np_path = os.path.join(sparsity_path, model_name + "_" + layer_name + "_histograms.npy") + histograms_data = np.load(np_path) + histograms = histograms_data/histograms_data.sum(axis = 1)[:, np.newaxis] + partition.graph.nodes[layer]['hw'].sparsity = histograms + + net.update_partitions() + + return net.get_throughput(), net.get_latency() + +#Layer name translation function +def layer_name_translation(model_name, onnx_name): + onnx_name = onnx_name.split("_") + if model_name in ["resnet18", "resnet50"]: + if len(onnx_name) == 3: # first conv + torch_name = onnx_name[1]+ ".1" + else: + assert len(onnx_name) in [5,6] + torch_name = onnx_name[2] + "." +onnx_name[-2]+ ".1" + elif model_name == "mobilenet_v2": + if len(onnx_name) == 5: # first and last conv + torch_name = onnx_name[-2] + ".1" + else: + assert len(onnx_name) in [6,7] + torch_name = onnx_name[2] + "." + onnx_name[-2] + ".1" + elif model_name in ["alexnet", "vgg11", "vgg16"]: + torch_name = onnx_name[-2] + ".1" + elif model_name == "repvgg-a0": + torch_name = ".".join(onnx_name[1:-1]) + ".1" + return torch_name + +THRESHOLD_INC = 0.005 +#Main +if __name__ == "__main__": + + #Command line parser + parser = argparse.ArgumentParser() + + parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18', + help='model architecture: ' + + ' | '.join(model_names)) + parser.add_argument('--relu-policy', choices=['slowest_node', 'uniform'], default="uniform", type=str, + help='') + parser.add_argument('--fixed-hardware', action="store_true", + help='') + parser.add_argument('--normalise-hardware', action="store_true", + help='') + parser.add_argument('--use-old-sparsity', action="store_true", + help='') + parser.add_argument('--runs', default=100, type=int, + help='how many runs') + + + parser.add_argument("--sparsity_path", default="runlog/resnet18/", type=str, + help='Path to sparsity log dir for old sparsity') + + parser.add_argument("--accuracy_path", default="runlog/resnet18/uniform_accuracy.csv", type=str, + help='Path to accuracy .csv file for old accuracy') + + parser.add_argument("--model_path", default="onnx_models/resnet18/resnet18_uniform_relu_0.0.onnx", type=str, + help='Path to sparse .onnx model') + + parser.add_argument("--platform_path", default="../fpgaconvnet-optimiser/examples/platforms/u250.toml" , type=str, + help='Path to platform specs (.toml)') + + parser.add_argument("--optimised_config_path", default="../fpgaconvnet-optimiser/fpgaconvnet/optimiser/resnet18/resnet18_uniform_relu_0.0/config.json", type=str, + help='Path to optimised configuration (.json)') + + parser.add_argument('--gpu', default=None, type=str, + help='GPU id to use.') + + parser.add_argument('--enable-wandb', action="store_true", help='enable wandb') + + args = parser.parse_args() + + #Initialise wandb + if args.enable_wandb: + wandb.login() + start_time = datetime.datetime.now() + name = args.relu_policy + "_" + str(start_time).replace(" ","_").replace(".","_").replace(":","_").replace("-", "_") + if (args.fixed_hardware): + name = args.relu_policy + "_fixed_hardware_" + str(start_time).replace(" ","_").replace(".","_").replace(":","_").replace("-", "_") + + wandb.init( + # Set the project where this run will be logged + project= "-".join([args.arch, "relu"]), + name = name, + # Track hyperparameters and run metadata + config={ + "platform": "u250" + }) + + #Initialise relu_thresholds + print("=> using pre-trained model '{}'".format(args.arch)) + model = load_model(args.arch) + relu_thresholds = {} + for name, module in model.named_modules(): + if isinstance(module, nn.ReLU) or isinstance(module, nn.ReLU6): + relu_thresholds[name + ".1"] = 0.0 + + + #For run in runs + threshold = 0.0 + acc_file = "runlog/" + args.arch + "/" + args.relu_policy + "_accuracy.csv" + if args.relu_policy == "slowest_node" and not args.fixed_hardware: + acc_file = "runlog/" + args.arch + "/" + args.relu_policy + "_changing_accuracy.csv" + for run in range(args.runs): + + #If old sparsity, note metrics + if args.use_old_sparsity: + assert args.relu_policy == "uniform" + # Note accuracy + with open(args.accuracy_path, 'r') as f: + lines = f.read().splitlines() + line = lines[run + 1] + line_vals = line.split(",") + top1 = float(line_vals[-3]) + top5 = float(line_vals[-2]) + sparsity = float(line_vals[-1]) + + sparsity_dir = args.sparsity_path + "/uniform_relu_" + str(threshold) + + #Else collect sparsity + else: + if args.relu_policy == "uniform": + log_dir = args.arch + "/uniform_relu_" + str(threshold) + threshold_path = "relu_thresholds/" + args.arch + "/" + args.arch + "_uniform_relu_" + str(threshold) + ".json" + elif args.relu_policy == "slowest_node": + if (args.fixed_hardware): + log_dir = args.arch + "/slowest_node_" + str(run) + threshold_path = "relu_thresholds/" + args.arch + "/" + args.arch + "_slowest_node_" + str(run) + ".json" + else: + log_dir = args.arch + "/slowest_node_changing_" + str(run) + threshold_path = "relu_thresholds/" + args.arch + "/" + args.arch + "_slowest_node_changing_" + str(run) + ".json" + + #Create log_dir + if not os.path.isdir("runlog/" + log_dir): + os.makedirs("runlog/" + log_dir) + log_file="runlog/" + log_dir + "/log.txt" + + #Store relu_thresholds + with open(threshold_path, 'w') as fp: + json.dump(relu_thresholds, fp) + + os.system("python imagenet_main.py -a " + args.arch + " --gpu " + args.gpu + " --output_path runlog/" + log_dir + " --accuracy_output " + acc_file + " --relu_threshold " + threshold_path) + + sparsity_dir = "runlog/" + log_dir + + # Note accuracy + with open(acc_file, 'r') as f: + lines = f.read().splitlines() + last_line = lines[-1] + top1 = float(last_line.split(",")[-3]) + top5 = float(last_line.split(",")[-2]) + sparsity = float(last_line.split(",")[-1]) + + + + #If fixed hardware, parse network and get throughput and latency from fixed hardwareusing collected sparsity + if (args.fixed_hardware): + config_parser = Parser(backend="chisel", quant_mode="auto", custom_onnx = True) # use the HLS backend with 16-bit fixed-point quantisation + net = config_parser.onnx_to_fpgaconvnet(args.model_path, args.platform_path) # parse the onnx model + + net = config_parser.prototxt_to_fpgaconvnet(net, args.optimised_config_path) + + net.update_partitions() + + throughput, latency = get_new_throughput(args.arch, net, sparsity_dir) + + log_info = relu_thresholds | {"top1_accuracy": top1, "top5_accuracy": top5, "throughput": throughput, "latency": latency, "network_sparsity": sparsity} + print("Logging:", log_info) + + #Else annotate sparsity, run optimiser, note resources, throughput, and latency + else: + #Annotate sparsity + if args.relu_policy == "uniform": + dense_onnx_path = "onnx_models/" + args.arch + "/" + args.arch + ".onnx" + sparse_onnx_path = "onnx_models/" + args.arch + "/" + args.arch + "_uniform_relu_" + str(threshold) + ".onnx" + elif args.relu_policy == "slowest_node": + if (args.fixed_hardware): + dense_onnx_path = "onnx_models/" + args.arch + "/" + args.arch + ".onnx" + sparse_onnx_path = "onnx_models/" + args.arch + "/" + args.arch + "_slowest_node_" + str(run) + ".onnx" + else: + dense_onnx_path = "onnx_models/" + args.arch + "/" + args.arch + ".onnx" + sparse_onnx_path = "onnx_models/" + args.arch + "/" + args.arch + "_slowest_node_changing_" + str(run) + ".onnx" + + os.system("python onnx_sparsity_attribute_full.py -a " + args.arch + " --data " + sparsity_dir + " --dense_onnx_path " + dense_onnx_path + " --sparse_onnx_path " + sparse_onnx_path + " -r " + threshold_path) + + + # Run optimiser + if args.relu_policy == "uniform": + output_path = "../fpgaconvnet-optimiser/fpgaconvnet/optimiser/" + args.arch + "/" + args.arch + "_uniform_relu_" + str(threshold) + elif args.relu_policy == "slowest_node": + output_path = "../fpgaconvnet-optimiser/fpgaconvnet/optimiser/" + args.arch + "/" + args.arch + "_slowest_node_" + str(run) + + os.system("python -u ../fpgaconvnet-optimiser/fpgaconvnet/optimiser/cli.py --rerun-optim -n "+ args.arch + " -m " + sparse_onnx_path + " -o " + output_path + " -p " + args.platform_path + " -b 256 --objective throughput --optimiser greedy_partition --optimiser_config_path ../fpgaconvnet-optimiser/examples/greedy_partition_throughput_residual.toml") + + # Note throughput + f = open(output_path + "/report.json") + report = json.load(f) + throughput = report["network"]["performance"]["throughput"] + latency = report["network"]["performance"]["latency"] + resources = report["network"]["max_resource_usage"] + f.close() + + + # # Create resource toml file + # f = open(args.platform_path, 'r') + # new_toml = toml.load(f) + # for key, value in resources.items(): + # if key == "DSP": + # new_toml["resources"][key] = round(value/0.9) + # f.close() + + # # Write resource toml file + # if not os.path.isdir("../fpgaconvnet-optimiser/examples/platforms/" + args.arch + "_cifar10_uniform_relu_norm/"): + # os.mkdir("../fpgaconvnet-optimiser/examples/platforms/" + args.arch + "_cifar10_uniform_relu_norm/") + # platform_path = "../fpgaconvnet-optimiser/examples/platforms/" + args.arch + "_cifar10_uniform_relu_norm/u250_" + str(relu_threshold) + ".toml" + # f = open(platform_path, 'w') + # toml.dump(new_toml, f) + # f.close() + + + #If normalise, run dense and sparse normalised + if (args.normalise_hardware): + pass + + else: + log_info = relu_thresholds | resources | {"top1_accuracy": top1, "top5_accuracy": top5, "throughput": throughput, "latency": latency, "network_sparsity": sparsity} + + + #Log into wandb + if (args.enable_wandb): + wandb.log(log_info) + + + #Update based on relu-policy + threshold = round(threshold + THRESHOLD_INC, 4) + + if args.relu_policy == "uniform": + for name, module in model.named_modules(): + if isinstance(module, nn.ReLU): + relu_thresholds[name + ".1"] = round(threshold, 4) + elif args.relu_policy == "slowest_node": + if not (args.fixed_hardware): + config_ptmuarser = Parser(backend="chisel", quant_mode="auto", custom_onnx = True) # use the HLS backend with 16-bit fixed-point quantisation + net = config_parser.onnx_to_fpgaconvnet(sparse_onnx_path, args.platform_path) # parse the onnx model + + net = config_parser.prototxt_to_fpgaconvnet(net, output_path + "/config.json") + + net.update_partitions() + + # Update ReLU thresholds for slowest node + replaced_layers = set() + previous_relu = None + #Change slowest node in each partition + for partition_index in range(len(net.partitions)): + replace_layer = None + max_latency = 0 + partition = net.partitions[partition_index] + for layer in graphs.ordered_node_list(partition.graph): + #Keep track of preceding relu layer + if isinstance(partition.graph.nodes[layer]['type'], list): + if LAYER_TYPE.ReLU in partition.graph.nodes[layer]['type']: + previous_relu = layer + elif (partition.graph.nodes[layer]['type'] == LAYER_TYPE.ReLU): + previous_relu = layer + + #Check if layer is a Convolution layer tha can benefit from sparsit + if (partition.graph.nodes[layer]['type'] == LAYER_TYPE.Convolution): + layer_latency = partition.graph.nodes[layer]['hw'].latency() + if previous_relu != None: + previous_layer = layer_name_translation(args.arch, previous_relu) + if layer_latency > max_latency and len(partition.graph.nodes[layer]['hw'].sparsity): + max_latency = layer_latency + replace_layer = previous_layer + + if replace_layer != None and replace_layer not in replaced_layers: + relu_thresholds[replace_layer] += THRESHOLD_INC + replaced_layers.add(replace_layer) \ No newline at end of file diff --git a/relu_run.py b/relu_run.py new file mode 100644 index 0000000..1f352e2 --- /dev/null +++ b/relu_run.py @@ -0,0 +1,93 @@ +import datetime +import os +import subprocess +import argparse + + + + +parser = argparse.ArgumentParser(description='Low rank approximation experiment') +parser.add_argument('--gpu', default=None, type=int, + help='GPU id to use.') +parser.add_argument("--model_path", default=None, type=str, + help='Path to sparse .onnx model') + +parser.add_argument("--platform_path", default=None, type=str, + help='Path to platform specs (.toml)') + +"../../examples/platforms/zc706.toml" +parser.add_argument("--optimised_config_path", default=None, type=str, + help='Path to optimised configuration (.json)') + +parser.add_argument("--accuracy_output", default=None, type=str, + help='Path to csv file to write accuracy to') + + +args = parser.parse_args() + +#python relu_run.py --gpu 1 --model_path ../fpgaconvnet-optimiser/fpgaconvnet/optimiser/onnx_models/resnet18_sparse.onnx --platform_path ../fpgaconvnet-optimiser/examples/platforms/u250.toml --optimised_config_path ../fpgaconvnet-optimiser/fpgaconvnet/optimiser/outputs/sparse/resnet18_sparse_hetero/config.json + +''' +sweep_range = [1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536]#[131072, 262144, 524288] +for window_size in sweep_range: + test_name = "sparsity_run_ma_window_size" + str(window_size) + + start_time = datetime.datetime.now() + log_dir= test_name + "_" + str(start_time).replace(" ","_").replace(".","_").replace(":","_").replace("-", "_") + + os.makedirs("runlog/" + log_dir) + log_file="runlog/" + log_dir + "/log.txt" + + regsys_cmd="python3 -u imagenet_main.py --output_path " + "runlog/" + log_dir + " --ma_window_size " + str(window_size) + " --gpu " + str(args.gpu) + + with open(log_file, "w") as log_fp: + log_fp.write(regsys_cmd + '\n') + + os.system(regsys_cmd + " 2>&1 | tee -a " + log_file) +''' +''' +for model_name in ["resnet18"]: + test_name = "{}_sparsity_run_50k".format(model_name) + + start_time = datetime.datetime.now() + log_dir= test_name + "_" + str(start_time).replace(" ","_").replace(".","_").replace(":","_").replace("-", "_") + + os.makedirs("runlog/" + log_dir) + log_file="runlog/" + log_dir + "/log.txt" + + regsys_cmd="python3 -u imagenet_main.py --output_path " + "runlog/" + log_dir + " --gpu " + str(args.gpu) + " -a " + model_name + f" --data /data/imagenet -b 4" + + with open(log_file, "w") as log_fp: + log_fp.write(regsys_cmd + '\n') + + os.system(regsys_cmd + " 2>&1 | tee -a " + log_file) +''' + +def relu_run(args): + + sweep_range = [0.1, 0.15, 0.2] + for model_name in ["resnet18"]: + for relu_threshold in sweep_range: + test_name = model_name + "_sparsity_run_50K_relu_" + str(relu_threshold) + + start_time = datetime.datetime.now() + log_dir= test_name + "_" + str(start_time).replace(" ","_").replace(".","_").replace(":","_").replace("-", "_") + + os.makedirs("runlog/" + log_dir) + log_file="runlog/" + log_dir + "/log.txt" + + regsys_cmd="python imagenet_main.py --calibration-size 50000 --output_path " + "runlog/" + log_dir + " --relu_threshold " + str(relu_threshold) + " --gpu " + str(args.gpu) + \ + " --optimised_config_path " + args.optimised_config_path + " --platform_path " + args.platform_path + " --model_path " + args.model_path + " --accuracy_output " + args.accuracy_output + + with open(log_file, "w") as log_fp: + log_fp.write(regsys_cmd + '\n') + os.system(regsys_cmd + " 2>&1 | tee -a " + log_file) + + +if __name__ == "__main__": + args = parser.parse_args() + relu_run(args) + # models_run(args) + + + diff --git a/relu_utils.py b/relu_utils.py new file mode 100644 index 0000000..a355773 --- /dev/null +++ b/relu_utils.py @@ -0,0 +1,74 @@ +from torch import nn +import utils +from utils import * +import fpgaconvnet.tools.graphs as graphs +from fpgaconvnet.tools.layer_enum import LAYER_TYPE +import os +import torch + +class VariableReLUWrapper(nn.Module): + def __init__(self, relu_threshold, relu6=False): + super(VariableReLUWrapper, self).__init__() + + self.threshold = relu_threshold + self.relu6 = relu6 + + def forward(self, x): + if self.relu6: + x = torch.clip(x, max = 6) + return torch.where(x > self.threshold, x, 0.0) + else: + return torch.where(x > self.threshold, x, 0.0) + +def replace_layer_with_variable_relu(model, layer_name, threshold=0): + + replace_dict = {} + for name, module in model.named_modules(): + if (isinstance(module, nn.ReLU) or isinstance(module, nn.ReLU6)) and name == layer_name:#or isinstance(module, nn.Linear): + relu6 = isinstance(module, nn.ReLU6) + new_module = VariableReLUWrapper(threshold, relu6=relu6) + replace_dict[module] = new_module + + utils.replace_modules(model, replace_dict) + +def replace_with_variable_relu(model, threshold=0): + + replace_dict = {} + relu_thresholds = {} + + if isinstance(threshold, dict): + for name, module in model.named_modules(): + if isinstance(module, nn.ReLU) or isinstance(module, nn.ReLU6): + new_module = VariableReLUWrapper(threshold[name]) + replace_dict[module] = new_module + + else: + for name, module in model.named_modules(): + if isinstance(module, nn.ReLU):#or isinstance(module, nn.Linear): + new_module = VariableReLUWrapper(threshold) + replace_dict[module] = new_module + relu_thresholds[name] = threshold + + + for name, module in model.named_modules(): + if name in relu_thresholds: + new_module = VariableReLUWrapper(threshold) + replace_dict[module] = new_module + elif isinstance(module, VariableReLUWrapper): + new_module = VariableReLUWrapper(threshold) + replace_dict[module] = new_module + + utils.replace_modules(model, replace_dict) + return relu_thresholds + # for name, module in model.named_modules(): + # print(type(module)) + +def output_accuracy_to_csv(arch, relu_threshold, top1, top5, sparsity, output_path): + if not os.path.isfile(output_path): + with open(output_path, mode='w') as f: + row = "Network,ReLU_Threshold,Top1_Accuracy,Top5_Accuracy,Network_Sparsity\n" + f.write(row) + with open(output_path, mode='a') as f: + row = ','.join([arch, str(relu_threshold), top1, top5, str(sparsity)]) + "\n" + print("Writing to csv") + f.write(row) \ No newline at end of file diff --git a/run.py b/run.py index 628f05e..be5bc5b 100644 --- a/run.py +++ b/run.py @@ -26,7 +26,7 @@ os.system(regsys_cmd + " 2>&1 | tee -a " + log_file) ''' - +''' for model_name in ["resnet18"]: test_name = "{}_sparsity_run_50k".format(model_name) @@ -42,3 +42,49 @@ log_fp.write(regsys_cmd + '\n') os.system(regsys_cmd + " 2>&1 | tee -a " + log_file) +''' + +def relu_run(args): + sweep_range = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10] + for model_name in ["resnet18"]: + for relu_threshold in sweep_range: + test_name = model_name + "_sparsity_run_50K_relu_" + str(relu_threshold) + + start_time = datetime.datetime.now() + log_dir= test_name + "_" + str(start_time).replace(" ","_").replace(".","_").replace(":","_").replace("-", "_") + + os.makedirs("runlog/" + log_dir) + log_file="runlog/" + log_dir + "/log.txt" + + regsys_cmd="python imagenet_main.py --calibration-size 50000 --output_path " + "runlog/" + log_dir + " --relu_threshold " + str(relu_threshold) + " --gpu " + str(args.gpu) + + with open(log_file, "w") as log_fp: + log_fp.write(regsys_cmd + '\n') + os.system(regsys_cmd + " 2>&1 | tee -a " + log_file) + +def models_run(args): + sweep_range = [0] #, 0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10] + for model_name in ["mobilenet_v2"]: + for relu_threshold in sweep_range: + test_name = model_name + "_sparsity_run_50K_relu_" + str(relu_threshold) + + start_time = datetime.datetime.now() + log_dir= test_name + "_" + str(start_time).replace(" ","_").replace(".","_").replace(":","_").replace("-", "_") + + os.makedirs("runlog/" + log_dir) + log_file="runlog/" + log_dir + "/log.txt" + + regsys_cmd="python imagenet_main.py --calibration-size 50000 --output_path " + "runlog/" + log_dir + " --relu_threshold " + str(relu_threshold) + " --gpu " + str(args.gpu) + " --arch " + str(model_name) + if (model_name != "resnet18" and model_name != "mobilenet_v2"): + regsys_cmd += " --batch-size 16" + with open(log_file, "w") as log_fp: + log_fp.write(regsys_cmd + '\n') + os.system(regsys_cmd + " 2>&1 | tee -a " + log_file) + +if __name__ == "__main__": + args = parser.parse_args() + # relu_run(args) + models_run(args) + + + diff --git a/run_annotations.py b/run_annotations.py new file mode 100644 index 0000000..d2b0552 --- /dev/null +++ b/run_annotations.py @@ -0,0 +1,10 @@ +import os +import glob + +if __name__ == "__main__": + for model in ["resnet18", "resnet50", "vgg11", "vgg16", "alexnet", "mobilenet_v2"]: + data = "runlog/" + model + "_sparsity_run_50K_relu_0_*" + dir = glob.glob(data)[0] + dense = "../fpgaconvnet-optimiser/fpgaconvnet/optimiser/onnx_models/" + model + ".onnx" + sparse = "../fpgaconvnet-optimiser/fpgaconvnet/optimiser/onnx_models/" + model + "_full.onnx" + os.system("python onnx_sparsity_attribute_full.py --arch " + model + " --data " + dir + " --dense_onnx_path " + dense + " --sparse_onnx_path " + sparse) \ No newline at end of file diff --git a/sparsity_utils.py b/sparsity_utils.py index 696492e..dfe57ec 100644 --- a/sparsity_utils.py +++ b/sparsity_utils.py @@ -19,24 +19,25 @@ def output_sparsity_to_csv(model_name, model, output_dir): if isinstance(module, VanillaConvolutionWrapper): if bFirst: bFirst = False - with open(file_path, mode='a') as f: + with open(file_path, mode='w') as f: csv_writer = csv.writer(f) csv_header = ["Layer Name", "Layer Type"] - csv_header += ["KERNEL*KERNEL", "Avg Zeros", "Avg Sparsity"] + csv_header += ["KERNEL*KERNEL", "Avg Zeros", "Avg Sparsity", "Avg Window Sparsity"] csv_writer.writerow(csv_header) with open(file_path, mode='a') as f: csv_writer = csv.writer(f) new_row = [name, type(module)] - new_row += [module.kk, module.statistics.mean.mean().item(), module.statistics.mean.mean().item()/module.kk] + new_row += [module.kk, module.statistics.mean.mean().item(), module.statistics.mean.mean().item()/module.kk, module.statistics.histograms.sum(axis = 0)[-1]/module.statistics.histograms.sum()] csv_writer.writerow(new_row) np.save(os.path.join(output_dir,"{}_{}_mean.npy".format(model_name, name)), module.statistics.mean.cpu().numpy()) np.save(os.path.join(output_dir,"{}_{}_var.npy".format(model_name, name)), module.statistics.var.cpu().numpy()) np.save(os.path.join(output_dir,"{}_{}_correlation.npy".format(model_name, name)), module.statistics.cor.cpu().numpy()) - #np.save(os.path.join(output_dir,"{}_{}_sparsity.npy".format(model_name, name)), module.statistics.sparsity) + # np.save(os.path.join(output_dir,"{}_{}_sparsity.npy".format(model_name, name)), module.statistics.sparsity) + np.save(os.path.join(output_dir,"{}_{}_histograms.npy".format(model_name, name)), module.statistics.histograms.cpu().numpy()) # np.savetxt(os.path.join(output_dir,"{}_{}_mean.csv".format(model_name, name)), module.statistics.mean.cpu().numpy(), delimiter=",") # np.savetxt(os.path.join(output_dir,"{}_{}_var.csv".format(model_name, name)), module.statistics.var.cpu().numpy(), delimiter=",") # np.savetxt(os.path.join(output_dir,"{}_{}_correlation.csv".format(model_name, name)), module.statistics.cor.cpu().numpy(), delimiter=",") @@ -50,8 +51,9 @@ def output_sparsity_to_csv(model_name, model, output_dir): np.savetxt(os.path.join(output_dir,"{}_{}_ma_var.csv".format(model_name, name)), module.ma_statistics.var.cpu().numpy(), delimiter=",") np.savetxt(os.path.join(output_dir,"{}_{}_ma_correaltion.csv".format(model_name, name)), module.ma_statistics.cor.cpu().numpy(), delimiter=",") + class StreamDataAnalyser(): - def __init__(self, stream_num): + def __init__(self, in_channels): self.count = 0 self.stream_num = stream_num self.mean = torch.zeros(stream_num) @@ -71,9 +73,9 @@ def update(self, newValues): self.var = self.var * self.count self.cov = self.cov * (self.count - 1) - #self.sparsity = np.vstack((self.sparsity, newValues.clone().cpu().numpy())) + # self.sparsity = np.vstack((self.sparsity, newValues.clone().cpu().numpy())) - assert newValues.size()[1] == self.stream_num + assert newValues.size()[1] == self.in_channels self.count += newValues.size()[0] # newvalues - oldMean @@ -90,11 +92,26 @@ def update(self, newValues): self.cov = self.cov / (self.count - 1) self.cor = self.cov / torch.sqrt(torch.matmul(self.var.unsqueeze(1), self.var.unsqueeze(0))) * (self.count-1) / self.count +def total_network_sparsity(model): + ops = [] + sparsity = [] + for name, module in model.named_modules(): + if isinstance(module, VanillaConvolutionWrapper): + sparsity.append(module.statistics.mean.mean().item()/module.kk) + ops.append(module.ops) + + ops = np.array(ops) + sparsity = np.array(sparsity) + ops = ops/ops.sum() + return (sparsity * ops).sum() + + def moving_average(a, n): ret = torch.cumsum(a, dim=0) ret[n:] = ret[n:] - ret[:-n] return ret[n - 1:] / n + class VanillaConvolutionWrapper(nn.Module): def __init__(self, conv_module): super(VanillaConvolutionWrapper, self).__init__() @@ -107,15 +124,21 @@ def forward(self, x): # compared with MASE implementation # differences are: 1) torch.nn.Unfold 2) random sample patches - #with open(f"input.dat", 'w') as f: - # f.write("\n".join([ str(i) for i in x.clone().cpu().numpy().reshape(-1).tolist() ])) + #Write data to a file + # with open(f"input.dat", 'w') as f: + # f.write("\n".join([ str(i) for i in x.clone().cpu().numpy().reshape(-1).tolist() ])) # https://discuss.pytorch.org/t/make-custom-conv2d-layer-efficient-wrt-speed-and-memory/70175 assert self.conv_module.padding_mode == 'zeros' + #Zero-pad x x_padded = F.pad(input=x, pad=self.conv_module._reversed_padding_repeated_twice, mode='constant', value=0) dh, dw = self.conv_module.stride + + #Number of filter, number of channels, kernel height, kernel width out_channels, in_channels, kh, kw = self.conv_module.weight.shape + + groups = self.conv_module.groups in_channels *= groups batch_size = x.shape[0] @@ -123,11 +146,19 @@ def forward(self, x): patches = x_padded.unfold(2, kh, dh).unfold(3, kw, dw) h_windows = patches.shape[2] w_windows = patches.shape[3] - patches = patches.expand(out_channels//groups, *patches.shape) - patches = patches.permute(1, 3, 4, 0, 2, 5, 6) - num_of_elements = torch.numel(patches) + patches = patches.expand(out_channels//groups, *patches.shape) # dims = (out_channels//groups, batch_size, in_channels, h_windows, w_windows, kh, kw) + patches = patches.permute(1, 3, 4, 0, 2, 5, 6) # dims = ( batch_size, h_windows, w_windows, out_channels//groups, in_channels, kh, kw) + self.ops = h_windows * w_windows * out_channels * in_channels * kh * kw + + # num_of_elements = torch.numel(patches) + if (self.statistics.histograms == None): + #NOTE: Toggle the commenting for the following 2 lines for per window + # self.statistics.histograms = torch.zeros(in_channels//groups, h_windows, w_windows, self.kk + 1) + self.statistics.histograms = torch.zeros(in_channels//groups, self.kk + 1) + + if torch.cuda.is_available(): + self.statistics.histograms = self.statistics.histograms.cuda() - num_of_nonzeros = 0 y = torch.zeros((batch_size, h_windows, w_windows, out_channels)) if torch.cuda.is_available(): y = y.cuda() @@ -146,15 +177,38 @@ def forward(self, x): wend = (wi+1) * (w_windows // self.roll_factor) patch = patches[:,hstart:hend,wstart:wend].reshape((batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, out_channels//groups, groups, in_channels//groups, kh, kw)) - patch = patch.permute(0, 1, 2, 4, 3, 5, 6, 7) + patch = patch.permute(0, 1, 2, 4, 3, 5, 6, 7) #(batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, groups, out_channels//groups, in_channels//groups, kh, kw) weight = self.conv_module.weight.reshape((groups, out_channels//groups, in_channels//groups, kh, kw)) patch = patch * weight + #----------------Zero Histogram Calculation and Update----------------------- + #Patches Dims: (batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, groups, out_channels//groups, in_channels//groups, kh, kw) + #Histograms Dims: (in_channels, h_windows, w_windows, self.kk) + tmp = patch.reshape((*patch.shape[:-2], self.kk)) + + num_of_zeros = self.kk - torch.count_nonzero(tmp, dim = -1) # (batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, groups, out_channels//groups, in_channels//groups) + + + zeros_hists = F.one_hot(num_of_zeros, num_classes = self.kk + 1) # (batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, groups, out_channels//groups, in_channels//groups, bins) + + #All out_channels have the input feature map and therefore same sparsity, can squeeze those dimensions + zeros_hists = zeros_hists[:, :, :, :, 0].squeeze(4) # (batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, groups, in_channels//groups, bins) + + #NOTE: Toggle the commenting for the following 5 lines for per window + zeros_hists = zeros_hists.reshape(batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, in_channels, self.kk + 1) + zeros_hists = zeros_hists.sum(dim = (0, 1, 2)) # (in_channels, bins) + self.statistics.histograms += zeros_hists + # zeros_hists = zeros_hists.sum(dim = 0) # (h_windows//self.roll_factor, w_windows//self.roll_factor, in_channels//groups, bins) + # zeros_hists = zeros_hists.permute(2, 0, 1, 3) # (in_channels//groups, h_windows//self.roll_factor, w_windows//self.roll_factor, bins) + # self.statistics.histograms[:,hstart:hend,wstart:wend, :] += zeros_hists + + #------------------------Average sparsity calculate and update---------------------------------- tmp = patch.reshape((-1, self.kk)) num_of_zeros = self.kk - torch.count_nonzero(tmp, dim=1) num_of_zeros = num_of_zeros.reshape((-1, self.conv_module.in_channels)) self.statistics.update(num_of_zeros) + #-----------------------MA Statistics----------------------- if self.ma_statistics is not None: if self.ma_data_buffer is None: self.ma_data_buffer = num_of_zeros @@ -168,21 +222,15 @@ def forward(self, x): else: self.ma_data_buffer = self.ma_data_buffer[-(self.ma_window_size-1):] - patch = patch.sum(-1).sum(-1).sum(-1) - patch = patch.reshape(batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, out_channels) + # patch = patch.sum(-1).sum(-1).sum(-1) + # patch = patch.reshape(batch_size, h_windows//self.roll_factor, w_windows//self.roll_factor, out_channels) + + # y[:,hstart:hend,wstart:wend] = patch - y[:,hstart:hend,wstart:wend] = patch - if self.conv_module.bias is not None: - bias = self.conv_module.bias.expand(batch_size, h_windows, w_windows, out_channels) - y = y + bias - y = y.permute(0, 3, 1, 2) + return self.conv_module(x) - if self.run_reference: - ref_output = self.conv_module(x) - assert torch.allclose(ref_output, y, atol=1e-5) - return y def replace_with_vanilla_convolution(model, window_size=None): replace_dict = {} @@ -204,3 +252,6 @@ def replace_with_vanilla_convolution(model, window_size=None): conv_layer_index += 1 replace_modules(model, replace_dict) + + + diff --git a/visualise/visualise_acc_vs_throughput.py b/visualise/visualise_acc_vs_throughput.py new file mode 100644 index 0000000..6f63762 --- /dev/null +++ b/visualise/visualise_acc_vs_throughput.py @@ -0,0 +1,34 @@ +import os +import argparse +import pandas as pd +import matplotlib.pyplot as plt + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--filepath", default = None, type = str, + help = "Path to .csv file with accuracy and throughput") + parser.add_argument("--output", default = None, type = str, + help = "Path to .png file to save to") + + args = parser.parse_args() + + data = pd.read_csv(args.filepath) + + fig, ax1 = plt.subplots() + fig.set_figheight(9) + fig.set_figwidth(16) + ax2 = ax1.twinx() + + ax1.plot(data["ReLU_Threshold"], data["Top5_Accuracy"], label="Accuracy") + ax1.axhline(data["Top5_Accuracy"].max() - 1, color='k', linestyle='dashed', linewidth=1, label="Accuracy Loss = 1%") + ax2.plot(data["ReLU_Threshold"], data["Throughput"]/(data["Throughput"].min()), label="Throughput", color = 'r') + + + fig.suptitle('Overview of relu thresholding for resnet18') + ax1.set(xlabel = "Relu Threshold", ylabel = "Accuracy") + ax2.set(xlabel = "Relu Threshold", ylabel = "Normalised Throughput") + ax1.legend(loc = "best") + ax2.legend(loc = "best") + + fig.savefig(args.output) \ No newline at end of file diff --git a/visualise/visualise_all_networks.py b/visualise/visualise_all_networks.py new file mode 100644 index 0000000..a16c64a --- /dev/null +++ b/visualise/visualise_all_networks.py @@ -0,0 +1,12 @@ +import os +from visualise_network_sparsity import visualise_network +import argparse + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--data", metavar='DIR', required = True, help = "directory with sparsity stats of all networks to be visualised") + + args = parser.parse_args() + for sparsity_data in os.listdir(args.data): + print("Visualing network:", sparsity_data) + visualise_network(os.path.join(args.data, sparsity_data)) diff --git a/visualise/visualise_layer_sparsity.py b/visualise/visualise_layer_sparsity.py new file mode 100644 index 0000000..d608ef8 --- /dev/null +++ b/visualise/visualise_layer_sparsity.py @@ -0,0 +1,70 @@ +import numpy as np +import os +import matplotlib.pyplot as plt +import seaborn as sns + +def plot_avg_channel_sparsity_distribution(data, ax): + ax.hist(data) + ax.axvline(data.mean(), color='k', linestyle='dashed', linewidth=1, label="mean = "+str(data.mean())) + ax.set_title("Histogram of avg sparsity of channels") + ax.set(xlabel = "Sparsity", ylabel = "Number of Channels") + ax.legend(loc = "upper right") + +def plot_channel_sparsity_avg_histograms(data, ax): + avg_hist = data.mean(axis = 0) + ax.bar(list(range(len(avg_hist))), avg_hist) + + hist_sum = 0 + for i in range(len(avg_hist)): + hist_sum += avg_hist[i]*i + hist_sum /= sum(avg_hist) + + ax.axvline(hist_sum, color='k', linestyle='dashed', linewidth=1, label="mean "+str(hist_sum)) + ax.set_title("Histogram of avg number of zeros per window") + ax.set(xlabel = "Number of zeros in window", ylabel = "Number of windows") + ax.legend(loc = "upper right") + +def plot_channel_sparsity_correlation_heatmap(data, ax): + sns.heatmap(data[:32, :32], annot = True) + ax.set_title("Heatmap of correlation between channel (reduced to 32 channels)") + ax.set(xlabel = "Channels", ylabel = "Channels") + ax.legend(loc = "upper right") + +def plot_channel_sparsity_correlation_histogram(data, ax): + flattened_corr_data = data.flatten() + flattened_corr_data = flattened_corr_data[flattened_corr_data != 1]/2 + + ax.hist(flattened_corr_data, bins=20) + ax.axvline(flattened_corr_data.mean(), color='k', linestyle='dashed', linewidth=1, label="mean = "+str(flattened_corr_data.mean())) + ax.set_title("Histogram of correlation of channels") + ax.set(xlabel = "Correlation", ylabel = "Number of Channels Pairs") + ax.legend(loc = "upper right") + +def visualise_layer(corr_data, hist_data, output_path): + corr_data[np.abs(corr_data) == np.Inf] = 1 + corr_data[np.abs(corr_data) == np.NaN] = 0 + corr_data[np.abs(corr_data) == np.nan] = 0 + corr_data[np.abs(corr_data) == np.NAN] = 0 + fig, ax = plt.subplots(3) + fig.set_figheight(20) + fig.set_figwidth(20) + fig.suptitle('Sparsity statistics for ' + output_path.split("/")[-1][:-4], fontsize = 16) + try: + plot_channel_sparsity_avg_histograms(hist_data, ax[0]) + plot_channel_sparsity_correlation_histogram(corr_data, ax[1]) + plot_channel_sparsity_correlation_heatmap(corr_data, ax[2]) + print("Saving in output path", output_path) + fig.savefig(output_path) + except: + pass + +if __name__ == "__main__": + layer_dir = "/home/ka720/sparseCNN/runlog/resnet18_sparsity_run_50K_relu_0_2023_04_05_13_22_35_310476" + corr_data = np.load(os.path.join(layer_dir, "resnet18_layer2.0.conv2.1_correlation.npy")) + hist_data = np.load(os.path.join(layer_dir, "resnet18_layer2.0.conv2.1_histograms.npy")) + # print(corr_data) + # corr_data[np.abs(corr_data) == np.Inf] = 1 + + # print(corr_data) + # output = "Generic.png" + # visualise_layer(corr_data, hist_data, output) diff --git a/visualise/visualise_network_sparsity.py b/visualise/visualise_network_sparsity.py new file mode 100644 index 0000000..ddc7413 --- /dev/null +++ b/visualise/visualise_network_sparsity.py @@ -0,0 +1,87 @@ +import argparse +import glob +import os +import matplotlib.pyplot as plt +import numpy as np +from visualise_layer_sparsity import visualise_layer + + + +def visualise_network(sparsity_data = None): + + + data_spec = "_".join(sparsity_data.split("/")[2].split("_")[:-7]) + output_dir = os.path.join("figures", data_spec) + mean_list = glob.glob(os.path.join(sparsity_data, "*_mean.npy")) + corr_list = glob.glob(os.path.join(sparsity_data, "*_correlation.npy")) + hist_list = glob.glob(os.path.join(sparsity_data, "*_histograms.npy")) + data_list = zip(corr_list, hist_list) + + if not os.path.isdir(output_dir): + os.makedirs(output_dir) + + output_path = os.path.join(output_dir, "layer_level_overview.png") + + try: + network_overview(mean_list, corr_list, hist_list, data_spec, output_path) + except: + pass + for corr_file, hist_file in data_list: + output_path = os.path.join(output_dir, corr_file.split("/")[-1][:-16] + ".png") + corr_data = np.load(corr_file) + hist_data = np.load(hist_file) + visualise_layer(corr_data, hist_data, output_path) + + +def network_overview(mean_list, corr_list, hist_list, data_spec, output_path): + def plot_mean_per_layer(mean_data, layer_names, ax): + ax.bar(layer_names, mean_data) + ax.set_title("Average mean sparsity per layer") + ax.set(xlabel = "Layer name", ylabel = "Average Sparsity") + def plot_corr_per_layer(corr_data, layer_names, ax): + ax.bar(layer_names, corr_data) + ax.set_title("Average Sparsity correlation per layer") + ax.set(xlabel = "Layer name", ylabel = "Average Correlation") + def plot_non_zeros_per_layer(data, layer_names, ax): + data[data == np.inf] = 0 + data[data == np.nan] = 0 + ax.bar(layer_names, data) + ax.set_title("Average percentage of full zero windows per layer") + ax.set(xlabel = "Layer name", ylabel = "Zero windows proportion") + + mean_layers = np.empty(len(mean_list)) + corr_layers = np.empty(len(corr_list)) + percentage_non_zeros_per_layer = np.empty(len(hist_list)) + layer_names = [] + for index, (mean_file, corr_file, hist_file) in enumerate(zip(mean_list, corr_list, hist_list)): + + mean_data = np.load(mean_file) + corr_data = np.load(corr_file) + hist_data = np.load(hist_file) + corr_data[np.abs(corr_data) == np.Inf] = 1 + corr_data[np.abs(corr_data) == np.NaN] = 0 + mean_layers[index] = mean_data.mean()/(len(hist_data[0]) - 1) + corr_layers[index] = corr_data.mean() + hist_sum = hist_data.sum(axis = 0) + percentage_non_zeros_per_layer[index] = hist_sum[-1]/hist_sum.sum() + print(corr_file) + layer_names.append(corr_file.split("/")[-1].split("_")[1:-1][-1]) + + fig, ax = plt.subplots(3) + fig.set_figheight(20) + fig.set_figwidth(20) + fig.suptitle('Overview of sparsity statistics for ' + data_spec) + plot_mean_per_layer(mean_layers, layer_names, ax[0]) + plot_corr_per_layer(corr_layers, layer_names, ax[1]) + plot_non_zeros_per_layer(percentage_non_zeros_per_layer, layer_names, ax[2]) + print("Saving in output path", output_path) + fig.savefig(output_path) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--sparsity_data", metavar='DIR', required = True, help = "sparsity") + + args = parser.parse_args() + + visualise_network(args.sparsity_data) \ No newline at end of file diff --git a/visualise/visualise_notebook.ipynb b/visualise/visualise_notebook.ipynb new file mode 100644 index 0000000..16e47a1 --- /dev/null +++ b/visualise/visualise_notebook.ipynb @@ -0,0 +1,186 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "ename": "ModuleNotFoundError", + "evalue": "No module named 'seaborn'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m/home/ka720/sparseCNN/visualise/visualise_notebook.ipynb Cell 1\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mmatplotlib\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mpyplot\u001b[39;00m \u001b[39mas\u001b[39;00m \u001b[39mplt\u001b[39;00m\n\u001b[1;32m 5\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mnumpy\u001b[39;00m \u001b[39mas\u001b[39;00m \u001b[39mnp\u001b[39;00m\n\u001b[0;32m----> 6\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mvisualise_layer_sparsity\u001b[39;00m \u001b[39mimport\u001b[39;00m visualise_layer\n\u001b[1;32m 10\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mvisualise_network\u001b[39m(sparsity_data \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m):\n\u001b[1;32m 12\u001b[0m data_spec \u001b[39m=\u001b[39m \u001b[39m\"\u001b[39m\u001b[39m_\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m.\u001b[39mjoin(sparsity_data\u001b[39m.\u001b[39msplit(\u001b[39m\"\u001b[39m\u001b[39m/\u001b[39m\u001b[39m\"\u001b[39m)[\u001b[39m-\u001b[39m\u001b[39m1\u001b[39m]\u001b[39m.\u001b[39msplit(\u001b[39m\"\u001b[39m\u001b[39m_\u001b[39m\u001b[39m\"\u001b[39m)[:\u001b[39m-\u001b[39m\u001b[39m7\u001b[39m])\n", + "File \u001b[0;32m~/sparseCNN/visualise/visualise_layer_sparsity.py:4\u001b[0m, in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mos\u001b[39;00m\n\u001b[1;32m 3\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mmatplotlib\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mpyplot\u001b[39;00m \u001b[39mas\u001b[39;00m \u001b[39mplt\u001b[39;00m\n\u001b[0;32m----> 4\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mseaborn\u001b[39;00m \u001b[39mas\u001b[39;00m \u001b[39msns\u001b[39;00m\n\u001b[1;32m 6\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mplot_avg_channel_sparsity_distribution\u001b[39m(data, ax):\n\u001b[1;32m 7\u001b[0m ax\u001b[39m.\u001b[39mhist(data)\n", + "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'seaborn'" + ] + } + ], + "source": [ + "import argparse\n", + "import glob\n", + "import os\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "from visualise_layer_sparsity import visualise_layer\n", + "\n", + "\n", + "\n", + "def visualise_network(sparsity_data = None):\n", + "\n", + " data_spec = \"_\".join(sparsity_data.split(\"/\")[-1].split(\"_\")[:-7])\n", + " output_dir = os.path.join(\"figures\", data_spec)\n", + " mean_list = glob.glob(os.path.join(sparsity_data, \"*_mean.npy\"))\n", + " corr_list = glob.glob(os.path.join(sparsity_data, \"*_correlation.npy\"))\n", + " hist_list = glob.glob(os.path.join(sparsity_data, \"*_histograms.npy\"))\n", + " data_list = zip(corr_list, hist_list)\n", + "\n", + " if not os.path.isdir(output_dir):\n", + " os.makedirs(output_dir)\n", + "\n", + " output_path = os.path.join(output_dir, \"layer_level_overview.png\")\n", + "\n", + " network_overview(mean_list, corr_list, hist_list, data_spec, output_path)\n", + "\n", + " for corr_file, hist_file in data_list:\n", + " output_path = os.path.join(output_dir, corr_file.split(\"/\")[-1][:-16] + \".png\")\n", + " corr_data = np.load(corr_file)\n", + " hist_data = np.load(hist_file)\n", + " visualise_layer(corr_data, hist_data, output_path)\n", + "\n", + "\n", + "def network_overview(mean_list, corr_list, hist_list, data_spec, output_path):\n", + " def plot_mean_per_layer(mean_data, layer_names, ax):\n", + " ax.bar(layer_names, mean_data)\n", + " ax.set_title(\"Average mean sparsity per layer\")\n", + " ax.set(xlabel = \"Layer name\", ylabel = \"Average Sparsity\")\n", + " def plot_corr_per_layer(corr_data, layer_names, ax):\n", + " ax.bar(layer_names, corr_data)\n", + " ax.set_title(\"Average apRSITY correlation per layer\")\n", + " ax.set(xlabel = \"Layer name\", ylabel = \"Average Correlation\")\n", + " def plot_non_zeros_per_layer(data, layer_names, ax):\n", + " data[data == np.inf] = 0\n", + " data[data == np.nan] = 0\n", + " ax.bar(layer_names, data)\n", + " ax.set_title(\"Average percentage of full zero windows per layer\")\n", + " ax.set(xlabel = \"Layer name\", ylabel = \"Zero windows (%)\")\n", + "\n", + " mean_layers = np.empty(len(mean_list))\n", + " corr_layers = np.empty(len(corr_list))\n", + " percentage_non_zeros_per_layer = np.empty(len(hist_list))\n", + " layer_names = []\n", + " for index, (mean_file, corr_file, hist_file) in enumerate(zip(mean_list, corr_list, hist_list)):\n", + "\n", + " mean_data = np.load(mean_file)\n", + " corr_data = np.load(corr_file)\n", + " hist_data = np.load(hist_file)\n", + " corr_data[np.abs(corr_data) == np.Inf] = 1\n", + " corr_data[np.abs(corr_data) == np.NaN] = 0\n", + " mean_layers[index] = mean_data.mean()/(len(hist_data[0]) - 1)\n", + " corr_layers[index] = corr_data.mean()\n", + " hist_sum = hist_data.sum(axis = 0)\n", + " percentage_non_zeros_per_layer[index] = hist_sum[-1]/hist_sum.sum()\n", + " layer_names.append(corr_file.split(\"/\")[-1].split(\"_\")[1:-1][0])\n", + "\n", + " fig, ax = plt.subplots(3)\n", + " fig.set_figheight(20)\n", + " fig.set_figwidth(20)\n", + " fig.suptitle('Overview of sparsity statistics for ' + data_spec)\n", + " plot_mean_per_layer(mean_layers, layer_names, ax[0])\n", + " plot_corr_per_layer(corr_layers, layer_names, ax[1])\n", + " plot_non_zeros_per_layer(percentage_non_zeros_per_layer, layer_names, ax[2])\n", + " print(\"Saving in output path\", output_path)\n", + " fig.savefig(output_path)\n", + "\n", + "\n", + "\n", + "if __name__ == \"__main__\":\n", + " sparsity_data = \"../runlog/resnet18_sparsity_run_50K_relu_0.01_2023_04_05_13_22_12_846615\"\n", + " data_spec = \"_\".join(sparsity_data.split(\"/\")[-1].split(\"_\")[:-7])\n", + " output_dir = os.path.join(\"figures\", data_spec)\n", + " mean_list = glob.glob(os.path.join(sparsity_data, \"*_mean.npy\"))\n", + " corr_list = glob.glob(os.path.join(sparsity_data, \"*_correlation.npy\"))\n", + " hist_list = glob.glob(os.path.join(sparsity_data, \"*_histograms.npy\"))\n", + " data_list = zip(corr_list, hist_list)\n", + "\n", + " if not os.path.isdir(output_dir):\n", + " os.makedirs(output_dir)\n", + "\n", + " output_path = os.path.join(output_dir, \"layer_level_overview.png\")\n", + "\n", + " network_overview(mean_list, corr_list, hist_list, data_spec, output_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABVcAAANOCAYAAAAVrjzgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdd3gU1dvG8Xs3vYdAAiFAQu9Neg0IAqIoRUEsgICigP4UQcECKGAHUREFRBCUqoL1RRTpRWoUpUPoJaGF9Lbz/rGwsiSBEMqQ8P1c117Mzpk982zJkNx75ozFMAxDAAAAAAAAAICrYjW7AAAAAAAAAADIjwhXAQAAAAAAACAPCFcBAAAAAAAAIA8IVwEAAAAAAAAgDwhXAQAAAAAAACAPCFcBAAAAAAAAIA8IVwEAAAAAAAAgDwhXAQAAAAAAACAPCFcBAAAAAAAAIA8IVwEAwDUZOXKkLBaL2WVclZkzZ6pSpUpyc3NTYGDgDd1XixYt1KJFixu6jwssFosGDhx4U/aVG9e7nmXLlslisWjZsmVX3PbS133//v2yWCyaPn36davnauzevVtt2rRRQECALBaLFi5caEodAAAAuL4IVwEAMNm///6rRx99VGFhYfLw8FDx4sX1yCOP6N9//zW7tAJpx44d6tWrl8qWLaspU6Zo8uTJZpd0VdasWaORI0fq7NmzZpeCq9CzZ09t3bpVY8aM0cyZM1W3bl2zS7olvfnmm9kGzwkJCRoxYoTatWunoKCgKwbl8+bNU8OGDRUYGKjChQsrMjJSP//8840rHAAA3LYIVwEAMNF3332nO+64Q0uWLNHjjz+uiRMnqk+fPlq6dKnuuOMOLViwwOwSr+jVV19VcnKy2WXk2rJly2Sz2fThhx+qV69e6tq1q9klXZU1a9bo9ddfJ1y9SuHh4UpOTtZjjz120/ednJystWvXqk+fPho4cKAeffRRlShR4qbXkR/kFK6ePHlSb7zxhrZv366aNWteto+PP/5Y3bp1U5EiRfT222/rtddeU1xcnO6991599913N6hyAABwu3I1uwAAAG5Xe/fu1WOPPaYyZcpoxYoVCg4OdrT973//U7NmzfTYY4/p77//VpkyZW5aXYmJifLx8cn19q6urnJ1zT+/UsTExEhSnqcDuNrXJ78oqM/rAovFIk9PT1P2HRsbKynvn7nsXO37lZKSInd3d1mt+XNsRWhoqI4dO6ZixYpp48aNqlevXo7bfvzxx6pXr55+/PFHx5QlvXv3VlhYmL788kt17tz5ZpUNAABuA/nztysAAAqA9957T0lJSZo8ebJTsCpJRYoU0aRJk5SYmKh3331XkvTNN9/IYrFo+fLlWfqaNGmSLBaL/vnnH8e6HTt26IEHHlBQUJA8PT1Vt25d/fDDD06Pmz59uqPP/v37KyQkRCVKlLiqfeU05+pXX32lOnXqyMvLS0FBQXrooYd06NAhR/tHH30kFxcXpxGYY8eOlcVi0aBBgxzrMjMz5efnp5deeulyL6ckaeLEiapatapjeoUBAwY49R8REaERI0ZIkoKDg2WxWDRy5Mgc++vVq5d8fX21d+9etW/fXn5+fnrkkUckSTabTePHj1fVqlXl6empokWLql+/fjpz5sxla7zwmu/fv99pfW7mEx05cqSGDBkiSSpdurQsFku2fS1cuFDVqlWTh4eHqlatqkWLFmXpx2KxaNu2bXr44YdVqFAhNW3a1NF+pfdOss8h2qVLFxUrVkyenp4qUaKEHnroIcXFxWWp+0r1SNKWLVt09913y9/fX76+vmrVqpXWrVuX42txscmTJ6ts2bLy8vJS/fr1tXLlyizbZDfn6oX398iRI+rYsaN8fX0VHByswYMHKzMz0+nxp06d0mOPPSZ/f38FBgaqZ8+e+uuvv654evrIkSMVHh4uSRoyZIgsFosiIiKu6nnn9HOakwufpTlz5ujVV19VWFiYvL29de7cOUnSn3/+qXbt2ikgIEDe3t6KjIzU6tWrnfqIj4/Xc889p4iICHl4eCgkJER33XWXNm/e7NimRYsWqlatmrZt26aWLVvK29tbYWFhjmPWxVJTUzVixAiVK1dOHh4eKlmypF588UWlpqY6trFYLEpMTNSXX37p+Gz36tVLkuTh4aFixYrl+Jwvdu7cOYWEhDgdly68vl5eXrnqAwAAILfyzzATAAAKmB9//FERERFq1qxZtu3NmzdXRESEY57Ae+65R76+vpo3b54iIyOdtp07d66qVq2qatWqSbLP49qkSROFhYVp6NCh8vHx0bx589SxY0d9++236tSpk9Pj+/fvr+DgYA0fPlyJiYlXta/sjBkzRq+99pq6du2qvn37KjY2Vh9//LGaN2+uLVu2KDAwUM2aNZPNZtOqVat07733SpJWrlwpq9XqFI5t2bJFCQkJat68+WVfz5EjR+r1119X69at9fTTT2vnzp369NNPtWHDBq1evVpubm4aP368ZsyYoQULFujTTz+Vr6+vatSocdl+MzIy1LZtWzVt2lTvv/++vL29JUn9+vXT9OnT9fjjj+vZZ59VdHS0JkyYoC1btjj2d7117txZu3bt0uzZs/XBBx+oSJEikuQUzq9atUrfffed+vfvLz8/P3300Ufq0qWLDh48qMKFCzv19+CDD6p8+fJ68803ZRiGpNy9d2lpaWrbtq1SU1P1zDPPqFixYjpy5Ih++uknnT17VgEBAVdVz7///qtmzZrJ399fL774otzc3DRp0iS1aNFCy5cvV4MGDXJ8TaZOnap+/fqpcePGeu6557Rv3z7dd999CgoKUsmSJa/4mmZmZqpt27Zq0KCB3n//ff3+++8aO3asypYtq6efflqSPUjv0KGD1q9fr6efflqVKlXS999/r549e+bqPQsMDNTzzz+v7t27q3379vL19c3T87705/RKRo0aJXd3dw0ePFipqalyd3fXH3/8obvvvlt16tTRiBEjZLVaNW3aNN15551auXKl6tevL0l66qmn9M0332jgwIGqUqWKTp06pVWrVmn79u264447HPs4c+aM2rVrp86dO6tr16765ptv9NJLL6l69eq6++67Ha/ffffdp1WrVunJJ59U5cqVtXXrVn3wwQfatWuXYxqAmTNnqm/fvqpfv76efPJJSVLZsmWv+Dwv1aJFC33zzTf6+OOP1aFDB6WkpOjjjz9WXFyc/ve//111fwAAAJdlAACAm+7s2bOGJOP++++/7Hb33XefIck4d+6cYRiG0b17dyMkJMTIyMhwbHPs2DHDarUab7zxhmNdq1atjOrVqxspKSmOdTabzWjcuLFRvnx5x7pp06YZkoymTZs69Xk1+xoxYoRx8a8U+/fvN1xcXIwxY8Y49bd161bD1dXVsT4zM9Pw9/c3XnzxRUd9hQsXNh588EHDxcXFiI+PNwzDMMaNG2dYrVbjzJkzOb5OMTExhru7u9GmTRsjMzPTsX7ChAmGJOOLL77IUm9sbGyO/V3Qs2dPQ5IxdOhQp/UrV640JBlff/210/pFixZlWR8ZGWlERkY67l94zaOjo50eu3TpUkOSsXTp0svW9N5772X7eMMwDEmGu7u7sWfPHse6v/76y5BkfPzxx451F16D7t27Oz0+t+/dli1bDEnG/PnzL1trbuvp2LGj4e7ubuzdu9ex7ujRo4afn5/RvHlzx7pLX6O0tDQjJCTEqFWrlpGamurYbvLkyYYkp9c9OjrakGRMmzbNse7C+3vx59kwDKN27dpGnTp1HPe//fZbQ5Ixfvx4x7rMzEzjzjvvzNJndi7s+7333nNan9vnfbmf0+xceJ3KlCljJCUlOdbbbDajfPnyRtu2bQ2bzeZYn5SUZJQuXdq46667HOsCAgKMAQMGXHY/kZGRhiRjxowZjnWpqalGsWLFjC5dujjWzZw507BarcbKlSudHv/ZZ58ZkozVq1c71vn4+Bg9e/a87H43bNhw2df9xIkTRqtWrQxJjluRIkWMNWvWXLZfAACAvGBaAAAATBAfHy9J8vPzu+x2F9ovnM7brVs3xcTEOJ06/s0338hms6lbt26SpNOnT+uPP/5Q165dFR8fr5MnT+rkyZM6deqU2rZtq927d+vIkSNO+3niiSfk4uLitC43+8rOd999J5vNpq5duzr2ffLkSRUrVkzly5fX0qVLJUlWq1WNGzfWihUrJEnbt2/XqVOnNHToUBmGobVr10qyj2atVq3aZeer/P3335WWlqbnnnvOaU7JJ554Qv7+/td8lfALIxgvmD9/vgICAnTXXXc5Pcc6derI19fX8RzN0Lp1a6fRfjVq1JC/v7/27duXZdunnnrK6X5u37sLI1N//fVXJSUlXVM9mZmZWrx4sTp27Og0t3BoaKgefvhhrVq1yvH5v9TGjRsVExOjp556Su7u7o71vXr1cho9eyWXvg7NmjVzer0WLVokNzc3PfHEE451VqtVAwYMyPU+LpWX553dz+nl9OzZ0+k0+KioKO3evVsPP/ywTp065Xh/ExMT1apVK61YsUI2m02SfX7YP//8U0ePHr3sPnx9ffXoo4867ru7u6t+/fpOr9/8+fNVuXJlVapUyelzdeedd0rSdf958fb2VsWKFdWzZ0/Nnz9fX3zxhUJDQ9W5c2ft2bPnuu4LAACAaQEAADDBhdD0Qsiak0tD2AvzJM6dO1etWrWSZD9Nv1atWqpQoYIkac+ePTIMQ6+99ppee+21bPuNiYlRWFiY437p0qWzbJObfWVn9+7dMgxD5cuXz7b94tPlmzVrppEjRyo5OVkrV65UaGio7rjjDtWsWVMrV67UXXfdpVWrVqlr16457k+SDhw4IEmqWLGi03p3d3eVKVPG0Z4Xrq6uWea33L17t+Li4hQSEpLtYy5cNMsMpUqVyrKuUKFC2c4Fe+n7ntv3rnTp0ho0aJDGjRunr7/+Ws2aNdN9992nRx99NEuoeaV6YmNjlZSUlOW9k6TKlSvLZrPp0KFDqlq1apb2C+/rpfW6ubnl+iJwnp6eWeY8vvT1OnDggEJDQx1TQlxQrly5XO0jO3l53tn9nF5Odu+vpMtOZxAXF6dChQrp3XffVc+ePVWyZEnVqVNH7du3V48ePbK8riVKlMgy53KhQoX0999/O+13+/btWV7nC673z8uDDz4oV1dX/fjjj451999/v8qXL69XXnlFc+fOva77AwAAtzfCVQAATBAQEKDQ0FCnACI7f//9t8LCwuTv7y/JflGXjh07asGCBZo4caJOnDih1atX680333Q85sLIs8GDB6tt27bZ9ntpKJTdRV5ys6/s2Gw2WSwW/d///V+2o+wuzDcpSU2bNlV6errWrl2rlStXOuafbdasmVauXKkdO3YoNjY2x3lpbwYPD48sV1i32WwKCQnR119/ne1jcgqRJGV78S9JWS6glFc5jWw0zs+perFL3/eree/Gjh2rXr166fvvv9fixYv17LPP6q233tK6deucwuirqccMVzMS1GxXezGm7N5fyX4xvVq1amX7mAvvcdeuXdWsWTMtWLBAixcv1nvvvad33nlH3333nWMuVSl376/NZlP16tU1bty4bLfNzdy4ubVv3z4tWrRIkydPdlofFBSkpk2bZrlwFwAAwLUiXAUAwCT33nuvpkyZolWrVjldqf2ClStXav/+/erXr5/T+m7duunLL7/UkiVLtH37dhmG4XSa/oWRZW5ubmrduvU11XilfWWnbNmyMgxDpUuXvuwIV0mqX7++3N3dtXLlSq1cuVJDhgyRZL+Y15QpU7RkyRLH/cu5cDX2nTt3Oo2sS0tLU3R09DW/DpcqW7asfv/9dzVp0uSqA69ChQpJks6ePeu0Preja3MKZ6+Hq3nvJKl69eqqXr26Xn31Va1Zs0ZNmjTRZ599ptGjR+d6n8HBwfL29tbOnTuztO3YsUNWqzXH8O3C+757927HKeaSlJ6erujoaNWsWTPXdVxOeHi4li5dqqSkJKfRq9dyivm1PO+8ujA9g7+/f65+JkJDQ9W/f3/1799fMTExuuOOOzRmzBincDW3+/3rr7/UqlWrK35+r/XzfeLECUnZf1mRnp6ujIyMa+ofAADgUsy5CgCASYYMGSIvLy/169dPp06dcmo7ffq0nnrqKXl7ezsCxwtat26toKAgzZ07V3PnzlX9+vWdTv8NCQlRixYtNGnSJB07dizLfmNjY3Nd45X2lZ3OnTvLxcVFr7/+epbRiYZhOD1XT09P1atXT7Nnz9bBgwedRq4mJyfro48+UtmyZRUaGnrFOt3d3fXRRx857XPq1KmKi4vTPffck+vnnBtdu3ZVZmamRo0alaUtIyMjS3B6sQsB14W5ZiV7EHTpSLuc+Pj4SMoazl4PuX3vzp07lyWkql69uqxWq1JTU69qny4uLmrTpo2+//577d+/37H+xIkTmjVrlpo2beoYuX2punXrKjg4WJ999pnS0tIc66dPn35dX5+2bdsqPT1dU6ZMcayz2Wz65JNP8tzntTzvvKpTp47Kli2r999/XwkJCVnaLxwbMjMzFRcX59QWEhKi4sWLX/X7K9l/Xo4cOeL0+l2QnJysxMREx30fH59reu/KlSsnq9WquXPnOn2GDx8+rJUrV6p27dp57hsAACA7jFwFAMAk5cuX15dffqlHHnlE1atXV58+fVS6dGnt379fU6dO1cmTJzV79myniwFJ9hGpnTt31pw5c5SYmKj3338/S9+ffPKJmjZtqurVq+uJJ55QmTJldOLECa1du1aHDx/WX3/9lasac7OvS5UtW1ajR4/WsGHDtH//fnXs2FF+fn6Kjo7WggUL9OSTT2rw4MGO7Zs1a6a3335bAQEBql69uiR7kFOxYkXt3LlTvXr1uuI+g4ODNWzYML3++utq166d7rvvPu3cuVMTJ05UvXr1nC64cz1ERkaqX79+euuttxQVFaU2bdrIzc1Nu3fv1vz58/Xhhx/qgQceyPaxVatWVcOGDTVs2DCdPn1aQUFBmjNnTq5H1NWpU0eS9Morr+ihhx6Sm5ubOnTo4Ahdr0Vu37s//vhDAwcO1IMPPqgKFSooIyNDM2fOlIuLi7p06XLV+x09erR+++03NW3aVP3795erq6smTZqk1NRUvfvuuzk+zs3NTaNHj1a/fv105513qlu3boqOjta0adNyPedqbnTs2FH169fXCy+8oD179qhSpUr64YcfdPr0aUl5H22Z1+edV1arVZ9//rnuvvtuVa1aVY8//rjCwsJ05MgRLV26VP7+/vrxxx8VHx+vEiVK6IEHHlDNmjXl6+ur33//XRs2bNDYsWOver+PPfaY5s2bp6eeekpLly5VkyZNlJmZqR07dmjevHn69ddfVbduXUn2z/fvv/+ucePGqXjx4ipdurQaNGggSZowYYLOnj3ruMjWjz/+qMOHD0uSnnnmGQUEBCg4OFi9e/fW559/rlatWqlz586Kj4/XxIkTlZycrGHDhl2nVxMAAOA8AwAAmOrvv/82unfvboSGhhpubm5GsWLFjO7duxtbt27N8TG//fabIcmwWCzGoUOHst1m7969Ro8ePYxixYoZbm5uRlhYmHHvvfca33zzjWObadOmGZKMDRs25HlfI0aMMLL7leLbb781mjZtavj4+Bg+Pj5GpUqVjAEDBhg7d+502u7nn382JBl333230/q+ffsakoypU6fmWNulJkyYYFSqVMlwc3MzihYtajz99NPGmTNnsq03Njb2iv317NnT8PHxybF98uTJRp06dQwvLy/Dz8/PqF69uvHiiy8aR48edWwTGRlpREZGOj1u7969RuvWrQ0PDw+jaNGixssvv+x4nZcuXXrFukaNGmWEhYUZVqvVkGRER0cbhmEYkowBAwZk2T48PNzo2bOn4/6VXoMrvXf79u0zevfubZQtW9bw9PQ0goKCjJYtWxq///67Uz+5rccwDGPz5s1G27ZtDV9fX8Pb29to2bKlsWbNGqdtli5dmu1rNHHiRKN06dKGh4eHUbduXWPFihVZXvfo6GhDkjFt2jTHupze3+w+07GxscbDDz9s+Pn5GQEBAUavXr2M1atXG5KMOXPmZPs6Xrrv9957L0tbbp53bn5OL3bhdZo/f3627Vu2bDE6d+5sFC5c2PDw8DDCw8ONrl27GkuWLDEMwzBSU1ONIUOGGDVr1jT8/PwMHx8fo2bNmsbEiROd+omMjDSqVq2apf+ePXsa4eHhTuvS0tKMd955x6hatarh4eFhFCpUyKhTp47x+uuvG3FxcY7tduzYYTRv3tzw8vIyJDl9TsLDww1J2d4u/AwYhmGkp6cbH3/8sVGrVi3D19fX8PX1NVq2bGn88ccfuXr9AAAArobFMG6RqwkAAAAA+cjChQvVqVMnrVq1Sk2aNDG7HAAAAJiAcBUAAAC4guTkZKeLl2VmZqpNmzbauHGjjh8/ftUXNgMAAEDBwJyrAAAAwBU888wzSk5OVqNGjZSamqrvvvtOa9as0ZtvvkmwCgAAcBtj5CoAAABwBbNmzdLYsWO1Z88epaSkqFy5cnr66ac1cOBAs0sDAACAiQhXAQAAAAAAACAPrGYXAAAAAAAAAAD5EeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAOQB4SoAAAAAAAAA5AHhKgAAAAAAAADkgavZBdxoGRkZ2rJli4oWLSqrlSwZAAAAAAAAuBo2m00nTpxQ7dq15epa4OPEq1LgX40tW7aofv36ZpcBAAAAAAAA5Gvr169XvXr1zC7jllLgw9WiRYtKsr/5oaGhJlcDAAAAAAAA5C/Hjh1T/fr1HTkb/lPgw9ULUwGEhoaqRIkSJlcDAAAAAAAA5E9MuZkVrwgAAAAAAAAA5AHhKgAAAAAAAADkAeEqAAAAAAAAAORBgZ9zNTcMw1BGRoYyMzPNLgUmcHFxkaurqywWi9mlAAAAAABuIZmZmUpPTze7DOCGIxvJu9s+XE1LS9OxY8eUlJRkdikwkbe3t0JDQ+Xu7m52KQAAAACAW0BCQoIOHz4swzDMLgW4KchG8ua2DldtNpuio6Pl4uKi4sWLy93dnYT+NmMYhtLS0hQbG6vo6GiVL1+eK98BAAAAwG0uMzNThw8flre3t4KDg8kKUKCRjVyb2zpcTUtLk81mU8mSJeXt7W12OTCJl5eX3NzcdODAAaWlpcnT09PskgAAAAAAJkpPT5dhGAoODpaXl5fZ5QA3HNlI3hFDS6Tx4DMAAAAAAMiCEau4nZCN5A2vGgAAAAAAAADkAeFqAbRs2TJZLBadPXvW7FJyFBERofHjx5tdBgAAAAAAAJBnhKv5jMViuext5MiRZpd4Sxk5cqRq1apldhkAAAAAANw0a9eulYuLi+655x6zSwEKPMLVfObYsWOO2/jx4+Xv7++0bvDgwXnqNy0t7TpXCgAAAAAAzDB16lQ988wzWrFihY4ePWpaHWQNuB0QruYzxYoVc9wCAgJksVic1vn6+jq23bRpk+rWrStvb281btxYO3fudLRdGNH5+eefq3Tp0o6rwB08eFD333+/fH195e/vr65du+rEiROOx/Xq1UsdO3Z0qum5555TixYtHPfj4+P1yCOPyMfHR6Ghofrggw/UokULPffcc06PS0pKUu/eveXn56dSpUpp8uTJjrb9+/fLYrFozpw5aty4sTw9PVWtWjUtX77csc306dMVGBjo1OfChQsdE45Pnz5dr7/+uv766y/HyN7p06dfzcsNAAAAAEC+kpCQoLlz5+rpp5/WPffck+Xv4B9//FH16tWTp6enihQpok6dOjnaUlNT9dJLL6lkyZLy8PBQuXLlNHXqVElX/htcyjlrWLRokZo2barAwEAVLlxY9957r/bu3evU1+HDh9W9e3cFBQXJx8dHdevW1Z9//qn9+/fLarVq48aNTtuPHz9e4eHhstls1/qSAdeEcPVShiElJt78m2Fc96fyyiuvaOzYsdq4caNcXV3Vu3dvp/Y9e/bo22+/1XfffaeoqCjZbDbdf//9On36tJYvX67ffvtN+/btU7du3a5qv4MGDdLq1av1ww8/6LffftPKlSu1efPmLNuNHTtWdevW1ZYtW9S/f389/fTTTgGwJA0ZMkQvvPCCtmzZokaNGqlDhw46depUruro1q2bXnjhBVWtWtUxsvdqnwsAAAAAAIZhKCktw5SbcZV5wbx581SpUiVVrFhRjz76qL744gtHHz///LM6deqk9u3ba8uWLVqyZInq16/veGyPHj00e/ZsffTRR9q+fbsmTZrkNIgrNy7NGiQpMTFRgwYN0saNG7VkyRJZrVZ16tTJEYwmJCQoMjJSR44c0Q8//KC//vpLL774omw2myIiItS6dWtNmzbNaT/Tpk1Tr169uMI9TOdqdgG3nKQk6SoPHNdFQoLk43NduxwzZowiIyMlSUOHDtU999yjlJQUxzdHaWlpmjFjhoKDgyVJv/32m7Zu3aro6GiVLFlSkjRjxgxVrVpVGzZsUL169a64z/j4eH355ZeaNWuWWrVqJcl+wCtevHiWbdu3b6/+/ftLkl566SV98MEHWrp0qSpWrOjYZuDAgerSpYsk6dNPP9WiRYs0depUvfjii1esxcvLS76+vnJ1dVWxYsWuuD0AAAAAANlJTs9UleG/mrLvbW+0lbd77uObqVOn6tFHH5UktWvXTnFxcVq+fLlatGihMWPG6KGHHtLrr7/u2L5mzZqSpF27dmnevHn67bff1Lp1a0lSmTJlrrreS7MGSY6/6y/44osvFBwcrG3btqlatWqaNWuWYmNjtWHDBgUFBUmSypUr59i+b9++euqppzRu3Dh5eHho8+bN2rp1q77//vurrg+43oj3C7AaNWo4lkNDQyVJMTExjnXh4eFOB7vt27erZMmSjmBVkqpUqaLAwEBt3749V/vct2+f0tPTnb75CggIcApMs6vvwvQGF9cnSY0aNXIsu7q6qm7durmuBQAAAACA28nOnTu1fv16de/eXZL97+hu3bo5Tu2PiopyDIS6VFRUlFxcXByDtPLq0qxBknbv3q3u3burTJky8vf3V0REhCT71IQX9l27dm1HsHqpjh07ysXFRQsWLJBkn6KgZcuWjn4AMzFy9VLe3vZRpGbs9zpzc3NzLF+YA+XiuUh88jBS1mq1ZjklIT09/Zrrk+w1Xs1cKdezFgAAAAAAcuLl5qJtb7Q1bd+5NXXqVGVkZDidPWoYhjw8PDRhwgR5eXnlvJ/LtEm5/xs8u6yhQ4cOCg8P15QpU1S8eHHZbDZVq1bNccGrK+3b3d1dPXr00LRp09S5c2fNmjVLH3744WUfA9wsjFy9lMViPz3/Zt8umgDaLJUrV9ahQ4d06NAhx7pt27bp7NmzqlKliiQpODhYx44dc3rchTlUJPspA25ubtqwYYNjXVxcnHbt2pWnmtatW+dYzsjI0KZNm1S5cmVHLfHx8UpMTMy2Fsl+AM7MzMzTvgEAAAAAkOyDgbzdXU25WXKZF2RkZGjGjBkaO3asoqKiHLe//vpLxYsX1+zZs1WjRg0tWbIk28dXr15dNpvN6ULSF8vN3+DZOXXqlHbu3KlXX31VrVq1UuXKlXXmzBmnbWrUqKGoqCidPn06x3769u2r33//XRMnTlRGRoY6d+58xX0DNwPhKhxat26t6tWr65FHHtHmzZu1fv169ejRQ5GRkapbt64k6c4779TGjRs1Y8YM7d69WyNGjNA///zj6MPPz089e/bUkCFDtHTpUv3777/q06ePrFZrrv9DuNgnn3yiBQsWaMeOHRowYIDOnDnjuDBXgwYN5O3trZdffll79+7VrFmzslwFMSIiQtHR0YqKitLJkyeVmpqa9xcIAAAAAIBb1E8//aQzZ86oT58+qlatmtOtS5cumjp1qkaMGKHZs2drxIgR2r59u7Zu3ap33nlHkv3v5549e6p3795auHChoqOjtWzZMs2bN09S7v4Gz06hQoVUuHBhTZ48WXv27NEff/yhQYMGOW3TvXt3FStWTB07dtTq1au1b98+ffvtt1q7dq1jm8qVK6thw4Z66aWX1L179yuOdgVuFsJVOFgsFn3//fcqVKiQmjdvrtatW6tMmTKaO3euY5u2bdvqtdde04svvqh69eopPj5ePXr0cOpn3LhxatSoke699161bt1aTZo0UeXKlR0X0roab7/9tt5++23VrFlTq1at0g8//KAiRYpIkoKCgvTVV1/pl19+UfXq1TV79myNHDnS6fFdunRRu3bt1LJlSwUHB2v27NlX/8IAAAAAAHCLmzp1qlq3bq2AgIAsbV26dNHGjRsVFBSk+fPn64cfflCtWrV05513av369Y7tPv30Uz3wwAPq37+/KlWqpCeeeMIxUjU3f4Nnx2q1as6cOdq0aZOqVaum559/Xu+9957TNu7u7lq8eLFCQkLUvn17Va9eXW+//bZcXJynROjTp4/S0tIcg66AW4HFuHTCjALm8OHDKlmypA4dOqQSJUo4taWkpCg6OlqlS5fOU/CH3ElMTFRYWJjGjh2rPn365Oox+/fvV+nSpbVlyxbVqlXrxhYoPgsAAAAAgP/wN+KtadSoUZo/f77+/vtvs0spkC73ub9cvna744JWuO62bNmiHTt2qH79+oqLi9Mbb7whSbr//vtNrgwAAAAAAOQ3CQkJ2r9/vyZMmKDRo0ebXQ7ghGkBcEO8//77qlmzplq3bq3ExEStXLnScTo/AAAAAABAbg0cOFB16tRRixYtmBIAtxxGruK6q127tjZt2nRNfURERKiAz1gBAAAAAAByYfr06bm6eBZgBkauAgAAAAAAAEAeEK4CAAAAAAAAQB4Qrkqcfg4+AwAAAAAAIO/OnjW7glvKihUr1KFDBxUvXlwWi0ULFy7M9WNXr14tV1dX1apVK0vbJ598ooiICHl6eqpBgwZav3799Ss6j27rOVfd3NwkSUlJSfLy8jK5mrw5Fpes+JQMWc7ft1gknb93YdFy0fYWi+WS+7po29y1XbQLx56ct72wbMnaTzY1Wi7aaZY6Lqo/pzr+2zantsvXYZGUkJAowzBkdXGVYRiyXFwUAAAAAABATnbulBo1kl5+WXrhBeeg4zaVmJiomjVrqnfv3urcuXOuH3f27Fn16NFDrVq10okTJ5za5s6dq0GDBumzzz5TgwYNNH78eLVt21Y7d+5USEjI9X4KuXZbh6suLi4KDAxUTEyMJMnb2zvfhWrJSclKTs0wu4z8yzBkZKTpzOmT+mlHnL6dES3Jfhx0sVhktVhktUpWi0UuFnuI7GK9sN4i6/ntLBbL+fU6v/7y2zv1e77dzcUiV6tVri4WublY5Wq1yNXF6ljv5mKRq9OyfRs3l/OPOf9YVxer3M4/9uL12fV/4bHZ9elizV8/CwAAAAAA3HRJSdKDD0pnzkg//SQ995zkelvHbZKku+++W3ffffdVP+6pp57Sww8/LBcXlyyjXceNG6cnnnhCjz/+uCTps88+088//6wvvvhCQ4cOvR5l58lt/24XK1ZMkhwBa36TnmmTzeZ8SrtxyYLhvFYXnwF/8SONSx94YcnIum1222e/30v7ubQa+50sfV/UkN0J+9k/h+y3N5ybs7SmZxpasi9B321PdOo/wzDsD8rMpoDbgMWi/wLbKwSx2Ye7zkFvjuGxU3vO2+Z+/xcHyvZtXKyWfPfFCQAAAAAgHxg4UNq6VSpaVJo9u8AHq/Hx8Tp37pzjvoeHhzw8PK5L39OmTdO+ffv01VdfafTo0U5taWlp2rRpk4YNG+ZYZ7Va1bp1a61du/a67D+vCvY7ngsWi0WhoaEKCQlRenq62eXgJrDZDNkM+82QZLG6qlk9q4bbJJthKPN8m+3CfZshw9BF6w3ZDCnzon5shrK0GY6+/ttnpu2ibS/qO8NmKCPTpvTz/2ZkGkq32f+9eH16pqGM8+udl23KsJ3/9/z67Novt+2lDENKy7QprYCEy5cPbLMJb68Y7joHvUHe7ioe6KWwQl4qHuipIj4esjL6FwAAAAAKrmnT7Der1R6shoaaXdENV6VKFaf7I0aM0MiRI6+53927d2vo0KFauXKlXLMJqE+ePKnMzEwVLVrUaX3RokW1Y8eOa97/tbjtw9ULXFxc5OLiYnYZgCmM80HvxaHrf+Huf8v/hbTZh7dZwl3H+kvC4xy2dQ6XswbNV9r/hW2zuz5Zeqah9MxM6SZ9h+LuYlVooKeKB3jZQ9dATxUP9Lro5ilvdw7BAAAAAK6/tWvXqmnTpmrXrp1+/vlns8sxTYsWLVSrVi2NHz/++ne+das0YIB9+fXXpZYtr/8+LvLss89q9erV+ueff1S5cmVFRUU5te/fv189evTQpk2bVKdOHc2YMUMRERGO9nvvvVePP/64unTpck11bNu2TWFhYY7712PUamZmph5++GG9/vrrqlChwjX3d7Pxlz0AWSwXTuOXPN3y/5cMmbasQeylofH1DIrTMmw6nZimo2eTdfRsso6fS1Fapk0HTiXpwKmkHOss5O3mCFvDzgeuF98P9mX0KwAAAICrN3XqVD3zzDOaOnWqjh49quLFi5tWS1pamtzd3U3b/w0RH2+fZzU5WWrb1n4hq5ugd+/e+vPPP/X3339naXvhhRcUFhamqVOn6tVXX9XgwYP1zTffSLJfCMpqtV5zsCpJfn5+8vf3v+Z+LhYfH6+NGzdqy5YtGjhwoCTJZrPJMAy5urpq8eLFatq0qVxcXLJc5OrEiROOKT/NQrgKoMBxsVrkYjUvJM7ItOlEfKojbD1y/t+jZ1Ps988kKz41Q2eS0nUmKV3/Hj2XbT9uLhYVC7CPfg27ZNTrhfs+HhzGAQAAAPwnISFBc+fO1caNG3X8+HFNnz5dL18S/v3444964403tHXrVvn6+qpZs2ZasGCBJCk1NVXDhw/XrFmzFBMTo5IlS2rYsGHq06ePpk+frueee05nz5519LVw4UJ16tRJxvlTCEeOHKmFCxdq4MCBGjNmjA4cOCCbzaZFixZp9OjR+ueff+Ti4qJGjRrpww8/VNmyZR19HT58WEOGDNGvv/6q1NRUVa5cWZ988omKFi2qMmXKaP369apbt65j+/Hjx+uDDz5QdHS0rFbrVb9W3377rYYPH649e/YoNDRUzzzzjF544QVH+8SJE/XBBx/o0KFDCggIULNmzfTN/PnSk09KO3fqhKur6ixbpuTgYNWuXVvff/+9fHx8rrqO3Pjoo48kSbGxsdmGq9u3b9e4ceNUvnx59erVS4MHD5YknT17Vq+++qr++OOPG1LX9eDv76+tW7c6rZs4caL++OMPffPNNypdurTc3d1Vp04dLVmyRB07dpRkD2CXLFniCGTNwl/lAHCdubpYFXZ+9GlOzqWkXxS+pjiWL4Swx8+lKD3T0KHTyTp0OjnHfgK83LKdduDC/RA/T7kw+hUAAAC4bcybN0+VKlVSxYoV9eijj+q5557TsGHDHBf5/fnnn9WpUye98sormjFjhtLS0vTLL784Ht+jRw+tXbtWH330kWrWrKno6GidPHnyqmrYs2ePvv32W3333XeOKRgTExM1aNAg1ahRQwkJCRo+fLg6deqkqKgoWa1WJSQkKDIyUmFhYfrhhx9UrFgxbd68WTabTREREWrdurWmTZvmFK5OmzZNvXr1ylOwumnTJnXt2lUjR45Ut27dtGbNGvXv31+FCxdWr169tHHjRj377LOaOXOmGjdurNOnT2vlypXSZ59Jc+YoXdLSp5/WqkGDFB8fr5UrVzoC5uz4+vpetp5HH31Un3322VU/jwtq1qyp33//XW3atNHixYtVo0YNSdKQIUM0YMAAlSxZMs9950VCQoL27NnjuB8dHa2oqCgFBQWpVKlSGjZsmI4cOaIZM2bIarWqWrVqTo8PCQmRp6en0/pBgwapZ8+eqlu3rurXr6/x48crMTFRjz/++E17XtkhXAUAE/h7usm/mJsqFcv+dIqMTJtizo9+PXLRqNeLR8KeS8lQXHK64pLTtf1Y9qNfXa0WFfX3zHbagQujYP083W7kUwUAAAAKjGPHjunYsWNO6woVKqTSpUsrJSVF27Zty/KYO+64Q5K0c+dOJSYmOrVFREQoKChIsbGxOnTokFObn5+fypcvf9U1Tp06VY8++qgkqV27doqLi9Py5cvVokULSdKYMWP00EMP6fXXX3c8pmbNmpKkXbt2ad68efrtt9/UunVrSVKZMmWuuoa0tDTNmDFDwcHBjnWXnpL+xRdfKDg4WNu2bVO1atU0a9YsxcbGasOGDQoKCpIklStXzrF937599dRTT2ncuHHy8PDQ5s2btXXrVn3//fdXXZ8kjRs3Tq1atdJrr70mSapQoYK2bdum9957T7169dLBgwfl4+Oje++9V35+fgoPD1ftzEypSRNJ0lBJz77wgsLDwyVJ1atXv+z+Lp0j9VLXeqr9+++/r379+ikiIkI1atTQpEmTtGLFCkVFRemdd95R165dtXHjRrVp00YfffTRDZ+qYePGjWp50Ty0gwYNkiT17NlT06dP17Fjx3Tw4MGr6rNbt26KjY3V8OHDdfz4cdWqVUuLFi3KcpGrm41wFQBuQa4uVkcQWjeHbeJT0nUsLuWiaQfsIeyF+8fjUpRhM3TkfCCbEz9PV6ew9UL4emFdiJ+HXF2u/ptgAAAAoKCZNGmSUygpSY888oi++uorHT58WHXq1MnymAujGXv16qV169Y5tc2cOVOPPvqo5s2bl+XU5jZt2ujXX3+9qvp27typ9evXO07xd3V1Vbdu3TR16lRHuBoVFaUnnngi28dHRUXJxcVFkZGRV7XfS4WHhzsFq5L9avDDhw/Xn3/+qZMnT8pms0mSDh48qGrVqikqKkq1a9d2BKuX6tixowYMGKAFCxbooYce0vTp09WyZUunizZdje3bt+v+++93WtekSRONHz9emZmZuuuuuxQeHq4yZcqoXbt2uq9ZM3V5801Z09Jk3Hef/kpIUPXq1dW2bVu1adNGDzzwgAoVKpTj/i4Oim+EsLAw/fTTT477qampatu2rb788kuNHj1afn5+2rlzp9q1a6dJkybpmWeeuaH1tGjR4rIjeadPn37Zx48cOVIjR47Msn7gwIGmTwNwKcJVAMin/Dzd5OfppgpF/bJtz7QZio1PvSR8vWgagrhknU1KV3xKhnYcj9eO4/HZ9uNitaiIr/v5/bk6/vW/sOzh6rT+v/YL910JZwEAAFAg9OvXT/fdd5/TuguBWokSJbRp06YcHzt9+vRsR65KUteuXdWoUSOnNj+/7H/Pv5ypU6cqIyPD6QJWhmHIw8NDEyZMUEBAgLy8cp6+7HJtkmS1WrMEZunp6Vm2y27e0Q4dOig8PFxTpkxR8eLFZbPZVK1aNaWlpeVq3+7u7urRo4emTZumzp07a9asWfrwww8v+5hr4efnp82bN2vZsmVa/OuvCnz+eVmTkpQZHi6X6dP1W2Cg1qxZo8WLF+vjjz/WK6+8oj///FOlS5fOtr8bPS3Apd588021adNGderU0RNPPKHRo0fLzc1NnTt31h9//HHDw9XbCeEqABRQLlb7BbGKBXiqTnj236AmpmboWJzzvK8XX4DrWFyy0jMNnTiXqhPnUvNci5ebiyNozS58vTSYdW63/+tGQAsAAACThYaGKjQ0NNs2T09PxxQA2alYsWKObcHBwVlGel6tjIwMzZgxQ2PHjlWbNm2c2jp27KjZs2frqaeeUo0aNbRkyZJs56msXr26bDabli9f7pgW4NI64+PjlZiY6AhQr3S6uySdOnVKO3fu1JQpU9SsWTNJ0qpVq5y2qVGjhj7//HOdPn06x9Grffv2VbVq1TRx4kRlZGSoc+fOV9x3TipXrqzVq1c7rVu9erUqVKjgmCfW1dVVrVu3Vuu//pKSkpQqafWAAbqzUCFZZB/p2qRJEw0fPlzh4eFasGCB4/T3S93oaQEutn37ds2aNcuxz8zMTEcInp6erszMzOu2LxCuAsBtzcfDVeVC/FQuJPtvxW02QycTUhUTn6pzKfZRrvZb+iX/ZlzU/t+65HT7f9rJ6ZlKTs9UTHzeA1pPN6tTCOt/IZj1yBrM/tfuvM7dlYAWAAAABdNPP/2kM2fOqE+fPgoICHBq69Kli6ZOnaqnnnpKI0aMUKtWrVS2bFk99NBDysjI0C+//KKXXnpJERER6tmzp3r37u24oNWBAwcUExOjrl27qkGDBvL29tbLL7+sZ599Vn/++ecVT++W7KN7CxcurMmTJys0NFQHDx7U0KFDnbbp3r273nzzTXXs2FFvvfWWQkNDtWXLFhUvXtwxqrdy5cpq2LChXnrpJfXu3fuKo10lKTY2NkuwGRoaqhdeeEH16tXTqFGj1K1bN61du1YTJkzQxIkTHa/nvn37dLe/v8q99JIskgZZLOrfvr3+/PNPLVmyRG3atFFISIj+/PNPxcbGqnLlyjnWca3TAuzZs0cJCQk6fvy4kpOTHc+pSpUqTvOnGoahJ598Uh988IEjAG/SpImmTJmiChUqaMaMGerevfs11QJnhKsAgBxZrRaF+HsqxN8zT49Pz7QpIcfw9fy/qfblczkEt0lp9oA2Jd2mlPRUxV5DQOvhanUOZi8aRVsswFNhhbxUopCXSgR6q1iAJ2EsAAAA8o2pU6eqdevWWYJVyR6uvvvuu/r777/VokULzZ8/X6NGjdLbb78tf39/NW/e3LHtp59+qpdffln9+/fXqVOnVKpUKb388suSpKCgIH311VcaMmSIpkyZolatWmnkyJF68sknL1ub1WrVnDlz9Oyzz6patWqqWLGiPvroI8c8sJL9tP/FixfrhRdeUPv27ZWRkaEqVarok08+ceqrT58+WrNmjXr37p2r12XWrFmaNWuW07pRo0bp1Vdf1bx58zR8+HCNGjVKoaGheuONN9SrVy9JUmBgoJbMmaMH1q2TxTD0fwEBipw0SVWrVtX27du1YsUKjR8/XufOnVN4eLjGjh2ru+++O1c15UXfvn21fPlyx/3atWtLkqKjo53mnZ08ebKKFi2qe++917Fu5MiRevjhh9WgQQO1a9dOAwYMuGF13o4sxuVmly0ADh8+rJIlS+rQoUMqUaKE2eUAAK5SRqZNCamXBrRZQ9hzOYyojU9JV2La1Z/2YrFIxfw97Rf3KmS/wFeJQt4XLXvJ083lBjxjAAAAmC0lJUXR0dEqXbq0PD3zNtAAN8aoUaM0f/58/f333zd2Rzab1L699OuvUsWK0oYNUh7mwc1PLve5J1/LGSNXAQC3NFcXqwK93RXo7X7ljXOQaTOUkN3o2VT7v2eT0nUsLlmHz9jnnD1yJlmpGTYdi0vRsbgUbTxwJtt+i/i6Zw1fL9wv5CV/T7c81wwAAADgPwkJCdq/f78mTJig0aNH3/gdvvmmPVj18pLmzy/wwSryjnAVAFDguVgtCvB2U4B37sJOwzB0MiHNEbQePpPkWD5y1h7CJqRm6GRCmk4mpOmvw3HZ9uPv6aqw84FrifNTDlwcxgb5uMtisVzPpwoAAAAUSAMHDtTs2bPVsWPHXE8JkGd//CGNGGFfnjhRql79xu4P+RrhKgAAl7BYLAr281Cwn4dqlQzM0m4Yhs4lZ+jw2ST7aNeLRrwePpukI2eSdSbJPo/suWPntP3YuWz34+Xm4jTNwMXLJQp5K9jXQ1Yr4SsAAAAwffr0XF0865odOyY9/LB9WoDHH5fOz8EK5IRwFQCAq2SxXBgJG6CqxbNeMECSElMzdPT8KNfDTqNe7eFrTHyqktMztScmQXtiErLtw93FqtBAz//C10DnOV+LBXjKzYWLbgEAAADXRUaG1L27dOKEfbTqhAlmV4R8gHAVAIAbwMfDVeWL+ql80eznZkpJz9SxuJTzoWvS+ekH/gtij59LUVqmTQdOJenAqaRs+7Cev+jWxRfaCrto+oHigVx0CwAAAMi1ESOk5cslX1/7PKve3mZXhHyAcBUAABN4urmodBEflS7ik217RqZNx8+lOE85cGH5/P20TJuOxqXoaFyKtD/7/RTx9XBMOVDCKXy1B7K+HvwqAAAAkBPDMMwuATfLL7/YL2IlSZ9/LlWsaG49JuDznjf8RQUAwC3I1cWqEoW8VaJQ9t+W22yGTiakOka6Hr5oBOyFi24lpWXqZEKqTiakKurQ2Wz7CfByc7rQVomLLsAVFuilQG83LroFAABuOy4u9rN/0tLS5OXlZXI1uOEOHpQee8y+PGCA1K2bufWYJCnJfsacm1vuLgQMO8JVAADyIavVohB/T4X4e+qOUoWytBuGobNJ6Y6g9fCZpCwjYOOS0x23f49mf9EtH3eXS6Yc8P5vOdBLRbjoFgAAKIBcXV3l7e2t2NhYubm5yWplnvsCKy1N7g8+KOvp07LdcYfSxoyRUlLMruqmMgxDSUlJiomJUWBgoOPLBeQO4SoAAAWQxWJRIR93FfJxV7Ww7C+6lZCacT5svSh4PR/GHjmTrJMJqUpMy9SuEwnadSKHi265Wu1ha+B/F9q6OIwt5u8pVy66BQAA8hmLxaLQ0FBFR0frwIEDZpeDGyjk7bfluX69Mv39Ff3220o/etTskkwTGBioYsWKmV1GvkO4CgDAbcrXw1UVi/mpYrGcL7p19OxFc71eEsQeP5eitAybok8mKvpkYrZ9uFgtKubv6Zjr9cK8r2GB3ipRyEuhgZ7ycOWbcQAAcOtxd3dX+fLllZaWZnYpuEGsCxfKfcYMSVLm1Kkq0ayZyRWZx83NjRGreUS4CgAAsuXp5qIywb4qE+ybbXt6pk3H41KyD1/PJuvo2WSlZxqO++ujs99PiJ9HtlMOXBgF6+3OrysAAMAcVqtVnp6eZpeBG2HvXqlfP/vy4MFyf+ABc+tBvsVfKwAAIE/cXKwqGeStkkE5X3QrJj5VR84mnZ/39b8Q1j4XbJJS0m2KiU9VTHyqthw8m20/hbzdzgeu3pfM/2pf5+/lykW3AAAAkHspKdKDD0rnzklNmkhvvml2RcjHCFcBAMANYbVaVCzAU8UCPFUnPGu7YRg6nZiW5UJbhy8KX+NTMnQmKV1nktL1z5HsL7rl6+GqiCLealK2iCIrBKtuRJDcXZnnFQAAADl47jlpyxapSBFpzhzJzc3sipCPEa4CAABTWCwWFfb1UGFfD9UoEZjtNudS0v8LXi+acuDCRbdOJaYpITVD/xw5p3+OnNOkFfvk4+6iRmWLqEXFYEVWCM5xZC0AAABuQ19/LU2aJFks9uUSJcyuCPkc4SoAALhl+Xu6yT/UTZVD/bNtT07L1JGzyfr3aJyW74rVil2xOpmQpt+3n9Dv209IksoG+yiyQohaVAxW/dJB8nRjon4AAIDb0vbt/82z+uqrUps25taDAsFiGIZhdhE30uHDh1WyZEkdOnRIJfg2AgCAAs1mM7Tt2Dkt2xmj5btitfngWWXa/vtVx9PNqkZlCqtFxRBFVghWRBEfE6sFAADATZOYKNWvL23bJt15p7R4seTCl+65Rb6WM0auAgCAAsNqtahaWICqhQVo4J3lFZecrtV7TjrC1hPnUrV0Z6yW7oyVJEUU9lZkhWC1qBiihmUKy8udX7ABAAAKHMOQnn7aHqwWKybNmkWwiuuGcBUAABRYAV5ual89VO2rh8owDO04Hq/lu2K1bGeMNu4/o/2nkrR/7QF9ufaA3F2talA6yBG2lg32kcViMfspAAAA4FpNnSrNnClZrfYLWBUtanZFKECYFgAAANyW4lPStWbvKS3fFavlO2N15GyyU3uJQl6OoLVx2cLy8eA7aQAAgHwnKkpq2FBKTZXeeksaOtTsivIl8rWcEa4CAIDbnmEY2huboGU7Y7VsZ6zWR59WWqbN0e7mYlG9iP9GtVYo6suoVgAAgFvduXNSnTrSnj1S+/bSjz/aR6/iqpGv5YxwFQAA4BJJaRlae35U67KdsTp4OsmpPTTAU5EVghVZIVhNyheRv6ebSZUCAAAgW4Yhde0qffONVKqUtHmzVLiw2VXlW+RrOeP8NgAAgEt4u7uqVeWialW5qAzD0P5TSY6LYq3de0rH4lI0Z8MhzdlwSC5Wi+qUKqTIivawtWpxf0a1AgAAmG3CBHuw6uYmzZtHsIobhpGrAAAAVyElPVN/Rp92hK37YhOd2oP9PNS8fLBaVAxWs/JFFOjtblKlAAAAt6n166WmTaX0dGn8eOl//zO7onyPfC1njFwFAAC4Cp5uLo4pASTp4KkkLd9lD1pX7zml2PhUfbv5sL7dfFhWi1SrZKBaVAxRZIVgVQ8LkNXKqFYAAIAb5vRp+3QA6elSly7Ss8+aXREKOEauAgAAXCepGZnauP+MY1TrrhMJTu1BPu5qXr6IWlQMUbPyRVTY18OkSgEAAAogm026/37pp5+ksmWlTZukgACzqyoQyNdyxshVAACA68TD1UVNyhVRk3JF9Mo90pGzyVqxK1bLdsZo9Z5TOp2YpoVRR7Uw6qgsFqlGWIB9FGzFENUqGSgXRrUCAADk3fvv24NVDw9p/nyCVdwUjFwFAAC4CdIzbdp04IyW7YzV8l2x2n7snFN7gJebmp0f1dq8QhGF+HmaVCkAAEA+tGKFdOedUmamNGmS9OSTZldUoJCv5YxwFQAAwAQnzqVo+a5YLd8Zq5W7Y3UuJcOpvWpxf0VWCFaLiiGqXSpQbi5WkyoFAAC4xcXESLVqSceOSY8+Ks2YIVk4I+h6Il/LGeEqAACAyTIybYo6dFbLd8Vq2c5YbT0S59Tu5+mqpuWKnJ9CIFihAV4mVQoAAHCLycyU2rWTfv9dqlxZWr9e8vU1u6oCh3wtZ8y5CgAAYDJXF6vqRgSpbkSQXmhTUScTUrVil336gBW7YnUmKV3/989x/d8/xyVJFYv6qUVFe9BaNzxI7q6MagUAALepUaPswaq3t/TNNwSruOkYuQoAAHALy7QZ+vvwf6Na/zp8Vhf/9ubj7qLG50e1tqgYrBKFvM0rFgAA4Gb67TepbVvJMKSZM+1TAuCGIF/LGeEqAABAPnImMU0rdv83qvVkQppTe9lgH7WoGKLICsGqXzpInm4uJlUKAABwAx05ItWuLcXGSk88IU2ebHZFBRr5Ws4IVwEAAPIpm83QtmPntGxnjJbvitXmg2eVafvvVztPN6salSnsCFsjiviYWC0AAMB1kp4u3XmntGqV/UJWa9ZIXsxJfyORr+WMOVcBAADyKavVomphAaoWFqCBd5ZXXFK6Vu896QhbT5xL1dKdsVq6M1aSFFHY+/z0ASFqWKawvNwZ1QoAAPKhV1+1B6t+ftL8+QSrMBXhKgAAQAER4O2m9tVD1b56qAzD0I7j8Vq2M1bLd8Vo4/4z2n8qSfvXHtCXaw/I3dWqBqWDHKNaywb7yGKxmP0UAAAALu/HH6V337Uvf/GFVK6cufXgtse0AAAAALeB+JR0rdl7yh627ozR0bgUp/YShbwco1obly0sHw++gwcAALeY/fulO+6QzpyRnn1W+vBDsyu6bZCv5YxwFQAA4DZjGIb2xCRo+a5YLdsZq/XRp5WWaXO0u7lYVC8iyBG2Vijqy6hWAABgrtRUqVkzacMGqX59aeVKyd3d7KpuG+RrOSNcBQAAuM0lpWVo7d5TjrD14Okkp/bQAM/zQWuwGpcrIn9PN5MqBQAAt61nn5U+/lgqVEjaskUKDze7otsK+VrOON8LAADgNuft7qpWlYuqVeWiMgxD+08ladnOGC3bGat1+07pWFyK5mw4pDkbDsnVatEd4YUcYWuVUH9GtQIAgBtr/nx7sCpJM2cSrOKWwshVAAAA5CglPVPr9tlHtS7fGat9JxOd2oP9PBRZIViRFYLVrHwRBXpzeh4AALiOdu2S6taV4uOloUOlt94yu6LbEvlazm6bkatbt25VTEyM436hQoVUunRppaSkaNu2bVm2v+OOOyRJO3fuVGKi8x8RERERCgoKUmxsrA4dOuTU5ufnp/LlyyszM1N//fVXln6rV68uNzc37d27V3FxcU5tYWFhKlq0qM6cOaPo6GinNi8vL1WuXFmStGXLFl2aiVeuXFleXl46cOCATp065dRWtGhRhYWFKT4+Xrt373Zqc3NzU/Xq1R2vUXp6ulN7+fLl5efnpyNHjujEiRNObYULF1Z4eLiSk5O1fft2pzaLxaLatWtLkrZv367k5GSn9tKlS6tQoUI6ceKEjhw54tQWEBCgsmXLKj09XVu3btWlatasKRcXF+3evVvx8fFObSVLllRwcLBOnz6t/fv3O7X5+PioYsWKkqTNmzdn6bdKlSry9PRUdHS0zpw549QWGhqq0NBQnTt3Tnv27HFq8/DwUNWqVSVJf//9tzIyMpzaK1SoIF9fXx0+fNjpMyhJRYoUUalSpZSUlKQdO3Y4tVmtVtWqVUuStG3bNqWkOF94pEyZMgoMDNTx48d19OhRp7bAwECVKVNGaWlp+ueff7I811q1aslqtWrXrl1KSEhwaitVqpSKFCmikydP6uDBg05tvr6+qlChgmw2m6KiorL0W61aNbm7u2vfvn06e/asU1vx4sVVrFgxnT17Vvv27XNq8/T0VJUqVSRJUVFRstlsTu2VKlWSt7e3Dh48qJMnTzq1hYSEqESJEkpISNCuXbuc2lxdXVWjRg1J0r///qvU1FSn9nLlysnf31/Hjh3TsWPHnNo4RthxjPgPxwg7jhF2t9sxokXFMNUp7qUOYak6dtZNmw+e0aYDZ/TX0QTFqpS+2XRYsxatksWWoYpF/VQnIkh1ShXS3U1qKyDAn2MExwhJHCMuVtCOEfwewTHiYhwj7DhG2F3zMcLVVekdO8otPl7xtWtrd6dO0ubNHCPOu5nHiEuP07iIYaKMjAzj1VdfNSIiIgxPT0+jTJkyxhtvvGHYbDbHNvHx8caAAQOMsLAww9PT06hcubLx6aef5nofhw4dMiRluT3yyCOGYRjG7t27s22/oGHDhlnaZs6caRiGYUyYMCFLW5s2bQzDMIy4uLhs+42JiTEMwzA6dOiQpW3s2LGGYRjGvHnzsrTVrl3bUZO7u3uW9n/++ccwDMPo06dPlrahQ4cahmEYS5cuzdIWFhbm6DcsLCxL+9KlSw3DMIyhQ4dmaevTp49hGIbxzz//ZGlzd3d39Fu7du0s7fPmzTMMwzDGjh2bpa1Dhw6GYRhGTExMtq9hXFycYRiG0aZNmyxtEyZMMAzDMGbOnJmlrWHDho6asut39+7dhmEYxiOPPJKlbcSIEYZhGMaiRYuytJUtW9bRb5EiRbK0r1mzxjAMw3j++eeztPXv398wDMPYtGlTljY/Pz9Hv1WqVMnS/v333xuGYRhvvvlmlrYHHnjgsp//lJQUwzAMIzIyMkvblClTDMMwjClTpmRpi4yMNAzDMFJSUrLt99ChQ4ZhGMYDDzyQpe3NN980DMMwvv/++yxtVapUcTxXPz+/LO2bNm0yDMMw+vfvn6Xt+eefNwzDMNasWZOlrUiRIo5+y5Ytm6V90aJFhmEYxogRI7K0cYzgGHHpjWMEx4iLbxwj/jtGrNoda4z+6V/DIyDr57vc4+8Z/5u92ej8+IAsbRwjOEZceuMYUTCPERfwewTHCIljxIUbx4irOEb06WMYknFcMkIvauMYYd4x4sLPIv5j6rQAb775psaNG6cvv/xSVatW1caNG/X4449rzJgxevbZZyVJTz75pP744w99/vnnioiI0OLFi9W/f3999913uu+++664jwvDln/55RcVLVrUsZ5viuz4Nvk/+eGboovxbbId3ybbcYz4D8cIO44Rdhwj7G7WMeLI6Xht2n9Gmw+eUdShs0r3KSqrh7cy4k/KlnRW5UN8VSc8SHXCC6lRlQiVKR3BMYJjhAPHCLuCfIzg9wiOERwj7DhG2F3pGFF5/Xp5Pf20DItFeyZOVHz9+o42jhF2N3vkavv27ZkWIBumhqv33nuvihYtqqlTpzrWdenSRV5eXvrqq68k2Q+g3bp102uvvebYpk6dOrr77rs1evToK+6DOSEAAABuvrQMmzYfPKNlO2O1fFesth8759Qe4OWmZuWLqEXFEDWvUEQhfp4mVQoAAG45//wj1a8vJSdLr78uDR9udkW3PfK1nJk652rjxo01efJk7dq1SxUqVNBff/2lVatWady4cU7b/PDDD+rdu7eKFy+uZcuWadeuXfrggw+y7TM1NdXpG6NLv0kAAADAjefualXDMoXVsExhDb27kk6cS3FcFGvl7ljFJafrp7+P6ae/7aN5apcK1KMNwnVPjVB5urmYXD0AADBNQoL04IP2YPWuu6RXXjG7IuCyTA1Xhw4dqnPnzqlSpUpycXFRZmamxowZo0ceecSxzccff6wnn3xSJUqUkKurq6xWq6ZMmaLmzZtn2+dbb72l119//WY9BQAAAORCUX9Pda1bUl3rllRGpk1Rh85q+a5YLdsZq61H4rTl4FltOXhWY37ZrofqldQjDcMVFuhldtkAAOBmMgypXz9pxw4pLEz6+mvJhS9dcWszdVqAOXPmaMiQIXrvvfdUtWpVRUVF6bnnntO4cePUs2dPSdL777+vKVOm6P3331d4eLhWrFihYcOGacGCBWrdunWWPi8duXrkyBFVqVKFYcsAAAC3qJhzKZq/6bC+XndAR+Ps8/5ZLVLrykXVo1GEmpQrLIvFYnKVAADghps0SXrqKXugumyZ1LSp2RXhPKYFyJmp4WrJkiU1dOhQDRgwwLFu9OjR+uqrr7Rjxw4lJycrICBACxYs0D333OPYpm/fvjp8+LAWLVp0xX3w5gMAAOQPGZk2/b49RjPX7dfqPf9dEKNssI8eaxiuLnVKyM/TzcQKAQDADbN5s9SokZSWJr37rjRkiNkV4SLkazkzdVqApKQkWa1Wp3UuLi6Oq/elp6crPT39stsAAACgYHB1sapdtWJqV62Ydp+I18x1B/TtpsPaG5uokT9u03u/7lSnO8LUo1GEKhT1M7tcAABwvZw9a59nNS1N6tBBGjzY7IqAXDM1XO3QoYPGjBmjUqVKqWrVqtqyZYvGjRun3r17S5L8/f0VGRmpIUOGyMvLS+Hh4Vq+fLlmzJjhdNErAAAAFCzli/rpjfur6cV2lbRg82F9ufaA9sQk6Kt1B/XVuoNqVKawejQK111VisrVxXrlDgEAwK3JMKTevaV9+6SICOnLLyWmA0I+Yuq0APHx8Xrttde0YMECxcTEqHjx4urevbuGDx8ud3d3SdLx48c1bNgwLV68WKdPn1Z4eLiefPJJPf/887mae4thywAAAPmfYRhau/eUZqw9oMXbjst2/jfY0ABPPVy/lB6qX0rBfh7mFgkAAK7eBx9IgwZJ7u7SqlVSvXpmV4RskK/lzNRw9WbgzQcAAChYjp5N1qw/D2r2+oM6lZgmSXJzsah99VD1aBShO0oFcgEsAADyg7VrpebNpYwMacIE6aJr8uDWQr6WM8JVAAAA5EupGZn6v63H9eXa/dpy8KxjfdXi/urZKEL31SouTzcX8woEAAA5O3lSql1bOnxY6tpVmjOH6QBuYeRrOSNcBQAAQL639XCcZqzdr+//Oqq0DPuFTwO83NStXkk92iBcpQp7m1whAABwsNmke++V/u//pPLlpY0bJX9/s6vCZZCv5YxwFQAAAAXGmcQ0zdt4SDPXHdDhM8mS7INgWlYMUY9G4WpePlhWK6NiAAAw1ZtvSq+8Inl6Sn/+KdWoYXZFuALytZwRrgIAAKDAybQZWrYzRl+uPaAVu2Id6yMKe+vRhuF6sE5JBXi7mVghAAC3qWXLpFat7KNXp06Vevc2uyLkAvlazghXAQAAUKDti03QV+sOav6mQ4pPyZAkebpZ1al2mB5rGKEqxTkNEQCAm+L4calWLenECalnT2naNOZZzSfI13JGuAoAAIDbQlJahhZuOaoZa/drx/F4x/p6EYXUo1GE2lYtJndXq4kVAgBQgGVmSq1b20euVqtmnw7AmznR8wvytZy5ml0AAAAAcDN4u7vq4Qal1L1+SW3Yf0Zfrt2vX/85rg37z2jD/jMK9vPQw/VL6eEGpVTU39PscgEAKFhGjrQHqz4+0vz5BKsoMAhXAQAAcFuxWCyqXzpI9UsH6cS5FM3686BmrT+o2PhUfbhktz5ZukdtqxVTj4bhql86SBZOVwQA4NosWiSNHm1fnjJFqlTJ3HqA64hpAQAAAHDbS8uw6dd/j2vm2gNav/+0Y32lYn56rFG4OtYKk48H4xIAALhqhw5JtWtLp05JTz8tTZxodkXIA/K1nBGuAgAAABfZdvScZq7br4Vbjio5PVOS5OfpqgfqlNBjDcNVJtjX5AoBAMgn0tOlyEhp7Vrpjjuk1aslT6beyY/I13JGuAoAAABkIy4pXfM3HdJX6w5o/6kkx/rmFYLVo2G4WlYKkYuVKQMAAMjRCy9I48ZJAQHS5s1SmTJmV4Q8Il/LGZdDBQAAALIR4O2mvs3K6I8XWmj64/XUqlKILBZpxa5Y9Z2xUZHvLdVny/fqTGKa2aUCAHDrWbDAHqxK0vTpBKu3mRUrVqhDhw4qXry4LBaLFi5ceNntV61apSZNmqhw4cLy8vJSpUqV9MEHHzhtM3LkSFksFqdbpVtg/l4mjgIAAAAuw2q1qEXFELWoGKKDp5L09Z8HNHfjIR0+k6y3/2+HPvhtlzrULK6ejSJUvUSA2eUCAGC+ffukxx+3Lw8aJHXsaGo5uPkSExNVs2ZN9e7dW507d77i9j4+Pho4cKBq1KghHx8frVq1Sv369ZOPj4+efPJJx3ZVq1bV77//7rjv6mp+tMm0AAAAAMBVSknP1A9/HdWMtfv1z5FzjvW1SgaqZ+Nwta8eKg9XFxMrBADAJCkpUpMm9mkAGjWSli+X3NzMrgrX6FryNYvFogULFqjjVYbsnTt3lo+Pj2bOnCnJPnJ14cKFioqKuqp+bjSmBQAAAACukqebi7rWLakfBzbVd/0bq2Ot4nJzsSjq0Fk9P/cvNX7rD7336w4dPZtsdqkAANxcgwbZg9XChaW5cwlWC5j4+HidO3fOcUtNTb0h+9myZYvWrFmjyMhIp/W7d+9W8eLFVaZMGT3yyCM6ePDgDdn/1SBcBQAAAPLIYrHojlKFNP6h2loztJUGt6mg0ABPnUpM0ydL96rpO3+o38yNWrPnpAr4CWMAAEizZ0uffmpf/uorqWRJc+vBdVelShUFBAQ4bm+99dZ17b9EiRLy8PBQ3bp1NWDAAPXt29fR1qBBA02fPl2LFi3Sp59+qujoaDVr1kzx8fHXtYarZf7EBAAAAEABEOznoYF3ltdTkWX1+/YT+nLNAa3dd0q//ntCv/57QuVCfNWjUbg61Q6TnyejeAAABcyOHdITT9iXX3lFatfO3HpwQ2zbtk1hYWGO+x4eHte1/5UrVyohIUHr1q3T0KFDVa5cOXXv3l2SdPfddzu2q1Gjhho0aKDw8HDNmzdPffr0ua51XA3CVQAAAOA6cnWxql21ULWrFqpdJ+I1c+0Bfbf5sPbEJGj49//qnf/boS51SqhHo3CVC/Ezu1wAAK5dUpL0wANSYqLUsqX0+utmV4QbxM/PT/7+/jes/9KlS0uSqlevrhMnTmjkyJGOcPVSgYGBqlChgvbs2XPD6skNpgUAAAAAbpAKRf00qmM1rXu5lV6/r6rKBvsoMS1TM9YeUOtxK/TwlHVa9M9xZWTazC4VAIC8GzBA+vdfqWhRadYsyYWLOuLa2Wy2y87pmpCQoL179yo0NPQmVpUVI1cBAACAG8zP0009G0eoR6Nwrdl7Sl+u2a/ft5/Qmr2ntGbvKRUP8NQjDcPVrV5JFfG9vqfXAQBwQ33xhTR9umS1SnPmSMWKmV0RbgEJCQlOI0qjo6MVFRWloKAglSpVSsOGDdORI0c0Y8YMSdInn3yiUqVKqVKlSpKkFStW6P3339ezzz7r6GPw4MHq0KGDwsPDdfToUY0YMUIuLi45jmy9WQhXAQAAgJvEYrGoSbkialKuiI6cTdbX6w5ozoZDOhqXovd+3akPf9+te2qE6rFG4apdMlAWi8XskgEAyNnff9tHrUrSqFFSixamloNbx8aNG9WyZUvH/UGDBkmSevbsqenTp+vYsWM6ePCgo91ms2nYsGGKjo6Wq6urypYtq3feeUf9+vVzbHP48GF1795dp06dUnBwsJo2bap169YpODj45j2xbFiMAn7Z0sOHD6tkyZI6dOiQSpQoYXY5AAAAgJOU9Ez9svWYZqw9oKhDZx3rq4cF6LFG4bqvZnF5unF6JQDgFnPunFS3rrR7t/3iVT//bB+9igKJfC1nhKsAAADALeLvw2c1Y+0B/fDXUaVl2OdhDfR2U7e6JfVow3CVDPI2uUIAACQZhvTQQ9K8eVKJEtKWLVKRImZXhRuIfC1nhKsAAADALeZ0Yprmbjikr9Yd0JGzyZIki0W6s2KIejSOULNyRWS1MmUAAMAkn3wiDRwoubpKK1ZIjRqZXRFuMPK1nDHnKgAAAHCLCfJx19MtyurJ5mW0dEeMvly7Xyt3n9SSHTFasiNGpYv46NGG4XqgTgkFeLmZXS4A4HayYYP0/PP25XffJVjFbY+RqwAAAEA+sC82QTPXHdA3Gw8rPjVDkuTl5qKOtcPUo1G4Kof6m1whAKDAO3NGuuMOaf9+qVMn6dtv7adWoMAjX8sZ4SoAAACQjySmZmhh1BHNWHNAO0/EO9bXjwhSj8bhalu1mNxcuKAIAOA6Mwzp/vulH3+UypSRNm2SAgPNrgo3CflazpgWAAAAAMhHfDxc9UiDcD1cv5TWR5/WjLUHtOjf41q//7TW7z+tED8PPdyglB6uX0oh/p5mlwsAKCjGjrUHq+7u0vz5BKvAeYSrAAAAQD5ksVjUoExhNShTWMfjUjRr/UHN+vOgYuJTNf733Zrwxx61q1ZMPRtHqG54IVk4bRMAkFerVklDh9qXP/zQPjUAAElMCwAAAAAUGGkZNi3697hmrNmvjQfOONZXKuanno0jdH+t4vJ2Z3wFAOAqxMZKtWpJR49KDz8sffUV86zehsjXcka4CgAAABRA/x6N08y1B7Qw6ohS0m2SJD9PV3WtW1KPNQxXRBEfkysEANzyMjOl9u2lxYulSpWkDRskX1+zq4IJyNdyRrgKAAAAFGBxSemav+mQZqw9oIOnkxzrIysEq0ejcLWoGCIXKyOQAADZeOMNacQIyctLWr9eqlbN7IpgEvK1nHFOEAAAAFCABXi7qW+zMurdpLSW747VjDX7tWxXrJafv5UM8tJjDcPVtW5JBXq7m10uAOBWsWSJNHKkffnTTwlWgRwwchUAAAC4zRw4laiv1h3QvI2HFZecLknycLXq/lrF1aNRhKqFBZhcIQDAVEePSrVrSzExUp8+0uefm10RTEa+ljPCVQAAAOA2lZyWqR/+OqIv1xzQtmPnHOvvKBWoHo0idHf1YvJwdTGxQgDATZeRIbVqJa1YIdWoIa1bZ58WALc18rWcMS0AAAAAcJvycndRt3ql1LVuSW0+eEYz1h7QL1uPafPBs9p8MEqjf3bXQ/VK6eEGpVQ8kD+sAeC28Npr9mDVz0+aP59gFbgCRq4CAAAAcIiJT9Gc9Yc068+DOn4uRZLkYrXorspF1aNxuBqVKSyLhQtgAUCB9PPP0r332pfnzpW6djW3HtwyyNdyRrgKAAAAIIv0TJt+23ZCM9bu17p9px3ry4f4qkejcHW6o4R8PTgRDgAKjAMH7POsnjkjDRwoffyx2RXhFkK+ljPCVQAAAACXtfN4vGau26/vNh9RUlqmJMnXw1Vd7gjTY40iVC7E1+QKAQDXJC1NatZMWr9eqldPWrlS8vAwuyrcQsjXcka4CgAAACBXzqWk67tNhzVj7QHtO5noWN+kXGE90ayMIisEM2UAAORHzz0nffihFBgobdkiRUSYXBBuNeRrOeM8HgAAAAC54u/ppl5NSqtHowit3ntSM9Ye0JLtJ7R6zymt3nNK9SOCNLhtRdUvHWR2qQCA3PrmG3uwKkkzZhCsAleJcBUAAADAVbFaLWpWPljNygfr8JkkTVu9XzPXHdD6/afVddJaRVYI1pC2FVUtLMDsUgEAl7Nnj9S7t335xRelDh3MrQfIh6xmFwAAAAAg/ypRyFuv3VtFy4e00MMNSsnVatHyXbG69+NV6v/1Ju2JiTe7RABAdpKTpQcflOLjpaZNpdGjza4IyJcIVwEAAABcs9AAL73ZqbqWvBCpTrXDZLFIv2w9rjYfrNDg+X/p0Okks0sEAFzsf/+ToqKk4GBpzhzJzc3sioB8iXAVAAAAwHUTXthHH3SrpUX/a642VYrKZkjfbDqsO8cu0/Dv/1HMuRSzSwQAzJwpTZkiWSzS119LYWFmVwTkW4SrAAAAAK67isX8NLlHXS0c0ERNyxVReqahGWsPqPl7S/X2/+3Q2aQ0s0sEgNvTtm3SU0/Zl4cPl+66y9x6gHyOcBUAAADADVOrZKC+6ttAs55ooDtKBSol3abPlu9Vs3eW6uMlu5WQmmF2iQBw+0hIkB54QEpKklq3ll57zeyKgHyPcBUAAADADde4bBF9+3RjTe1ZV5WK+Sk+NUNjf9ul5u8u1ecr9yklPdPsEgGgYDMM+4jV7dul0FD7dAAuLmZXBeR7hKsAAAAAbgqLxaJWlYvql2eb6ePutVW6iI9OJ6Zp9M/b1fL9ZZq9/qDSM21mlwkABdPnn/8XqM6ZI4WEmF0RUCAQrgIAAAC4qaxWizrULK7fnm+ud7pUV/EATx2LS9Gw77bqrnHL9X3UEdlshtllAkDBsWWL9Mwz9uUxY6Tmzc2tByhACFcBAAAAmMLVxapu9Urpj8EtNPzeKirs4679p5L0vzlRav/RSv227YQMg5AVAK5JXJz04INSaqp0773SkCFmVwQUKISrAAAAAEzl6eai3k1La8WLLTWkbUX5ebpqx/F4PTFjozpNXKM1e06aXSIA5E+GIfXpI+3dK5UqJX35pWQlCgKuJ36iAAAAANwSfDxcNaBlOa168U71b1FWXm4uijp0Vg9//qce+Xydthw8Y3aJAJC/fPSR9O23kpubNH++FBRkdkVAgUO4CgAAAOCWEuDtphfbVdLyF1uoV+MIublYtHrPKXWauEZ9v9yoHcfPmV0iANz61q2TBg+2L48dK9Wvb249QAFFuAoAAADglhTi56mR91XV0sEt1LVuCVkt0u/bT+juD1fqf3O2aP/JRLNLBIBb06lTUrduUkaG9MAD0sCBZlcEFFiEqwAAAABuaSUKeevdB2pq8fORuqdGqAxD+j7qqFqNW65h3/2to2eTzS4RAG4dNpvUo4d08KBUrpz0+eeSxWJ2VUCBRbgKAAAAIF8oF+KrTx6+Qz8901QtKwYr02Zo9vpDavH+Mo36aZtOJaSaXSIAmO/dd6VffpE8PKRvvpECAsyuCCjQCFcBAAAA5CvVwgI07fH6+uapRqpfOkhpGTZNXRWt5u8u1bjFO3UuJd3sEgHAHMuXS6+8Yl+eMEGqWdPceoDbAOEqAAAAgHypbkSQ5j7ZUDN611f1sAAlpmXqoz/2qNk7S/Xpsr1KTss0u0QAuHlOnJC6d7dPC/DYY1KfPmZXBNwWCFcBAAAA5FsWi0XNKwTrh4FN9Nmjd6hciK/iktP1zqIdav7eUs1Yu19pGTazywSAGyszU3r4YenYMalKFenTT5lnFbhJCFcBAAAA5HsWi0XtqoXq1+eaa+yDNVUyyEux8aka/v2/unPsMn2z6bAybYbZZQLAjfHGG9Iff0je3vZ5Vn18zK4IuG0QrgIAAAAoMFysFnWpU0JLBrXQqI7VFOLnocNnkjV4/l9qO36Fftl6TDZCVgAFyeLF0qhR9uXJk6XKlc2tB7jNEK4CAAAAKHDcXa16rGG4lg9pqWF3V1Kgt5v2xCSo/9ebdd8nq7RsZ4wMg5AVQD53+LD0yCOSYUj9+tmXAdxUhKsAAAAACiwvdxf1iyyrFS+21P9alZePu4v+OXJOvaZtULdJ67Q++rTZJQJA3qSnSw89JJ08KdWqJY0fb3ZFwG2JcBUAAABAgefv6abn76qgFS+21BPNSsvd1ar1+0+r66S16vnFev1zJM7sEgHg6rz8srR6teTvb59n1dPT7IqA2xLhKgAAAIDbRmFfD71yTxUtH9JCDzcoJVerRct3xerej1ep/9ebtCcm3uwSAeDKvv9eev99+/K0aVLZsubWA9zGCFcBAAAA3HZCA7z0ZqfqWvJCpDrVDpPFIv2y9bjafLBCg+f/pUOnk8wuEQCyFx0t9eplX37uOalzZzOrAW57hKsAAAAAblvhhX30QbdaWvS/5mpTpahshvTNpsO6c+wyDf/+H8WcSzG7RAD4T2qq1LWrdPas1LCh9M47ZlcE3PYIVwEAAADc9ioW89PkHnW1cEATNS1XROmZhmasPaDm7y3V2/+3Q2eT0swuEQCkF16QNm6UgoKkuXMld3ezKwJue4SrAAAAAHBerZKB+qpvA816ooHuKBWolHSbPlu+V83eWaqPl+xWQmqG2SUCuF3NnSt98ol9eeZMqVQpc+sBIIlwFQAAAACyaFy2iL59urGm9qyrSsX8FJ+aobG/7VLzd5fq85X7lJKeaXaJAG4nO3dKffval4cNk9q3N7ceAA6EqwAAAACQDYvFolaVi+qXZ5vp4+61VbqIj04npmn0z9vV8v1lmr3+oNIzbWaXCaCgS0qSHnxQSkiQIiOlN94wuyIAFyFcBQAAAIDLsFot6lCzuH57vrne6VJdxQM8dSwuRcO+26rW45br+6gjstkMs8sEUFA984y0dasUEiLNni25uppdEYCLEK4CAAAAQC64uljVrV4p/TG4hYbfW0WFfdx14FSS/jcnSu0/Wqnftp2QYRCyAriOpk+XvvhCslrtwWpoqNkVAbgE4SoAAAAAXAVPNxf1blpaK15sqSFtK8rP01U7jsfriRkb1WniGq3Zc9LsEgEUBFu3Sv3725dff126805z6wGQLcJVAAAAAMgDHw9XDWhZTqtevFP9W5SVl5uLog6d1cOf/6lHPl+nLQfPmF0igPwqPt4+z2pystS2rfTyy2ZXBCAHhKsAAAAAcA0CvN30YrtKWv5iC/VqHCE3F4tW7zmlThPXqO+XG7Xj+DmzSwSQnxiG9OST0s6dUliYNHOmfVoAALckfjoBAAAA4DoI8fPUyPuqaungFupat4SsFun37Sd094cr9b85W7T/ZKLZJQLIDz77TJozx37hqnnzpOBgsysCcBmEqwAAAABwHZUo5K13H6ipxc9H6p4aoTIM6fuoo2o1brmGffe3jp5NNrtEALeqTZuk556zL7/9ttS4sanlALgywlUAAAAAuAHKhfjqk4fv0E/PNFXLisHKtBmavf6QWry/TKN+2qZTCalmlwjgVnLmjH2e1bQ06f77pUGDzK4IQC4QrgIAAADADVQtLEDTHq+vb55qpPqlg5SWYdPUVdFq/u5SjVu8U+dS0s0uEYDZDEN6/HEpOloqXVqaNk2yWMyuCkAuEK4CAAAAwE1QNyJIc59sqBm966t6WIAS0zL10R971Oydpfp02V4lp2WaXSIAs3zwgfT995K7u32e1UKFzK4IQC4RrgIAAADATWKxWNS8QrB+GNhEnz16h8qF+CouOV3vLNqh5u8t1Yy1+5WWYTO7TAA3086d0ksv2Zc/+ECqW9fcegBcFcJVAAAAALjJLBaL2lUL1a/PNdfYB2uqZJCXYuNTNfz7f3Xn2GX6ZtNhZdoMs8sEcDO89JKUkSG1by89/bTZ1QC4SoSrAAAAAGASF6tFXeqU0JJBLTSqYzWF+Hno8JlkDZ7/l9qOX6Ffth6TjZAVKLiWL7dPB+DiIr3/PvOsAvkQ4SoAAAAAmMzd1arHGoZr+ZCWGnZ3JQV6u2lPTIL6f71Z932ySst2xsgwCFmBAsVmkwYPti8/+aRUubK59QDIE8JVAAAAALhFeLm7qF9kWa14saX+16q8fNxd9M+Rc+o1bYO6TVqn9dGnzS4RwPUyZ460caPk5yeNHGl2NQDyiHAVAAAAAG4x/p5uev6uClrxYks90ay03F2tWr//tLpOWqueX6zXP0fizC4RwLVISZGGDbMvDx0qhYSYWw+APCNcBQAAAIBbVGFfD71yTxUtH9JCDzcoJVerRct3xerej1ep/9ebtCcm3uwSAeTFRx9JBw9KJUpIzz1ndjUArgHhKgAAAADc4kIDvPRmp+pa8kKkOtUOk8Ui/bL1uNp8sEKD5/+lQ6eTzC4RQG6dPCmNGWNfHjNG8vY2tx7gBlixYoU6dOig4sWLy2KxaOHChZfdftWqVWrSpIkKFy4sLy8vVapUSR988EGW7T755BNFRETI09NTDRo00Pr162/QM8g9wlUAAAAAyCfCC/vog261tOh/zdWmSlHZDOmbTYd159hlGv79P4o5l2J2iQCu5I03pHPnpFq1pEcfNbsa4IZITExUzZo19cknn+Rqex8fHw0cOFArVqzQ9u3b9eqrr+rVV1/V5MmTHdvMnTtXgwYN0ogRI7R582bVrFlTbdu2VUxMzI16GrliMQr4JScPHz6skiVL6tChQypRooTZ5QAAAADAdRN16Kze/3WnVu05KUnydLOqV+PSeiqyjAK93U2uDkAWu3ZJVatKGRnSkiXSnXeaXRGQK9eSr1ksFi1YsEAdO3a8qsd17txZPj4+mjlzpiSpQYMGqlevniZMmCBJstlsKlmypJ555hkNHTr0qvq+nhi5CgAAAAD5VK2SgfqqbwPNeqKB7igVqJR0mz5bvlfN3lmqj5fsVkJqhtklArjY0KH2YPWeewhWkS/Fx8fr3LlzjltqauoN2c+WLVu0Zs0aRUZGSpLS0tK0adMmtW7d2rGN1WpV69attXbt2htSQ24RrgIAAABAPte4bBF9+3RjTe1ZV5WK+Sk+NUNjf9ul5u8u1ecr9yklPdPsEgGsXCktWCBZrdK775pdDZAnVapUUUBAgOP21ltvXdf+S5QoIQ8PD9WtW1cDBgxQ3759JUknT55UZmamihYt6rR90aJFdfz48etaw9VyNXXvAAAAAIDrwmKxqFXlompZMUQ/bz2mcb/tUvTJRI3+ebumrorWs63K64E6JeTmwhgb4Kaz2aQXXrAvP/GEVKWKufUAebRt2zaFhYU57nt4eFzX/leuXKmEhAStW7dOQ4cOVbly5dS9e/fruo/rjXAVAAAAAAoQq9WiDjWL6+5qxfTt5sP68PfdOhqXomHfbdVny/dq0F0V1KFGcVmtFrNLBW4f8+ZJGzZIvr7SyJFmVwPkmZ+fn/z9/W9Y/6VLl5YkVa9eXSdOnNDIkSPVvXt3FSlSRC4uLjpx4oTT9idOnFCxYsVuWD25wVeWAAAAAFAAubpY1a1eKf0xuIWG31tFhX3cdeBUkv43J0rtP1qp37adUAG/vjFwa0hJsc+1KkkvvSSZHAQB+YXNZnPM6eru7q46depoyZIlTu1LlixRo0aNzCpREiNXAQAAAKBA83RzUe+mpdWtXklNWx2tSSv2acfxeD0xY6NqlQzUi20rqnG5ImaXCRRcEyZIBw5IYWHSoEFmVwPcFAkJCdqzZ4/jfnR0tKKiohQUFKRSpUpp2LBhOnLkiGbMmCFJ+uSTT1SqVClVqlRJkrRixQq9//77evbZZx19DBo0SD179lTdunVVv359jR8/XomJiXr88cdv7pO7BOEqAAAAANwGfDxcNfDO8nq0Ybgmr9inaav3K+rQWT38+Z9qXLaw3upcXeGFfcwuEyhYTp2SRo+2L48ZI3l7m1sPcJNs3LhRLVu2dNwfdP6LhZ49e2r69Ok6duyYDh486Gi32WwaNmyYoqOj5erqqrJly+qdd95Rv379HNt069ZNsbGxGj58uI4fP65atWpp0aJFWS5ydbNZDBPPA8nMzNTIkSP11Vdf6fjx4ypevLh69eqlV199VRbLf/P/bN++XS+99JKWL1+ujIwMValSRd9++61KlSp1xX0cPnxYJUuW1KFDh1SiRIkb+XQAAAAAIN+IiU/RxKV79fWfB5SeaaiIr4dm9qmvyqE3bi494Lbz3HPShx9KtWpJGzdKLi5mVwTkCflazkydc/Wdd97Rp59+qgkTJmj79u1655139O677+rjjz92bLN37141bdpUlSpV0rJly/T333/rtddek6enp4mVAwAAAED+FuLnqZH3VdUfL7RQ5VB/nUxIVbdJa7XpwBmzSwMKht27pU8+sS+//z7BKlBAmTpy9d5771XRokU1depUx7ouXbrIy8tLX331lSTpoYcekpubm2bOnJmnfZCsAwAAAMDlxSWnq8/0Ddp44Iy83Fw0uUcdNSsfbHZZQP72wAPSt99K7dtLP/9sdjXANSFfy5mpI1cbN26sJUuWaNeuXZKkv/76S6tWrdLdd98tyT7fws8//6wKFSqobdu2CgkJUYMGDbRw4cIc+0xNTdW5c+cct/j4+JvxVAAAAAAg3wrwctOMPvXVvEKwktMz1Xv6Bv3f1mNmlwXkX6tX24NVq1V6912zqwFwA5karg4dOlQPPfSQKlWqJDc3N9WuXVvPPfecHnnkEUlSTEyMEhIS9Pbbb6tdu3ZavHixOnXqpM6dO2v58uXZ9vnWW28pICDAcatSpcrNfEoAAAAAkC95u7vq8x51dU/1UKVnGhowa7PmbTxkdllA/mMY0gsv2Jf79pWqVjW3HgA3lKnTAsyZM0dDhgzRe++9p6pVqyoqKkrPPfecxo0bp549e+ro0aMKCwtT9+7dNWvWLMfj7rvvPvn4+Gj27NlZ+kxNTVVqaqrj/pEjR1SlShWGLQMAAABALmTaDL2yYKvmbLAHq6/eU1l9m5UxuSogH5k3T+rWTfLxkfbskYoVM7si4JoxLUDOXM3c+ZAhQxyjVyWpevXqOnDggN566y317NlTRYoUkaura5bRp5UrV9aqVauy7dPDw0MeHh6O++fOnbtxTwAAAAAAChgXq0Vvda6uAC83TVqxT6N/3q645HQNuquCLBaL2eUBt7bUVGnoUPvySy8RrAK3AVOnBUhKSpLV6lyCi4uLbDabJMnd3V316tXTzp07nbbZtWuXwsPDb1qdAAAAAHA7sVgsGnp3JQ1pW1GS9PEfezTyh39ls5l24iOQP3zyiRQdLRUvLg0aZHY1AG4CU0eudujQQWPGjFGpUqVUtWpVbdmyRePGjVPv3r0d2wwZMkTdunVT8+bN1bJlSy1atEg//vijli1bZl7hAAAAAFDAWSwWDWhZTv5ebhr+/T/6cu0BnUvJ0LsP1JCbi6njdIBb06lT0qhR9uXRo+3TAgAo8EydczU+Pl6vvfaaFixYoJiYGBUvXlzdu3fX8OHD5e7u7tjuiy++0FtvvaXDhw+rYsWKev3113X//ffnah/MCQEAAAAA1+b7qCN6Yd5fyrAZal05RBMevkOebi5mlwXcWp5/Xho/XqpRQ9q8WXLhZwQFB/lazkwNV28G3nwAAAAAuHZ/7Dihp7/arNQMmxqWCdKUHnXl5+lmdlnArWHPHqlKFSk9XVq8WLrrLrMrAq4r8rWccS4HAAAAAOCK7qxUVDN615evh6vW7TutRz7/U6cT08wuC7g1DBtmD1bbtSNYBW4zhKsAAAAAgFxpUKawZj/RUEE+7vr7cJy6Tlqr43EpZpcFmGvNGumbbySrVXrvPbOrAXCTEa4CAAAAAHKteokAzevXSKEBntoTk6Aun67R/pOJZpcFmMMwpBdesC/37i1Vq2ZuPQBuOsJVAAAAAMBVKRfiq/lPNVLpIj46cjZZD3y2VtuPnTO7LODm++Ybad06ydtbeuMNs6sBYALCVQAAAADAVStRyFvz+jVS5VB/nUxIVbdJa7XpwBmzywJuntRUaehQ+/KLL0qhoebWA8AUhKsAAAAAgDwJ9vPQnCcbqm54IZ1LydCjn/+pFbtizS4LuDkmTpT27bOHqoMHm10NAJMQrgIAAAAA8izAy00z+zRQZIVgJadnqs+XG/TL1mNmlwXcWKdPS6NG2ZdHjZJ8fMytB4BpCFcBAAAAANfEy91FU3rU1T01QpWeaWjgrM2at+GQ2WUBN86YMdKZM/YLWPXqZXY1AExEuAoAAAAAuGburlZ99FBtda9fUjZDevHbvzVlxT6zywKuv337pI8/ti+//77k4mJuPQBMRbgKAAAAALguXKwWvdmpuvpFlpEkjfllu97/dacMwzC5MuA6GjZMSk+X2rSR2rY1uxoAJiNcBQAAAABcNxaLRcPurqwX21WUJE1YukcjfvhXNhsBKwqAtWulefMki0V67z2zqwFwCyBcBQAAAABcd/1blNPojtVksUgz1h7QoHlRSs+0mV0WkHeGIQ0ebF9+/HGpRg1z6wFwSyBcBQAAAADcEI82DNf4brXkarVoYdRRPf3VJqWkZ5pdFpA3330nrVkjeXtLo0aZXQ2AWwThKgAAAADghrm/Vpgm96gjD1erft8eo55frFd8SrrZZQFXJy1Neukl+/KQIVLx4ubWA+CWQbgKAAAAALih7qxUVDP7NJCfh6v+jD6th6f8qdOJaWaXBeTep59Ke/dKxYr9NzUAAIhwFQAAAABwE9QvHaTZTzZUkI+7th6JU9dJa3UsLtnssoArO3NGeuMN+/KoUZKvr7n1ALilEK4CAAAAAG6KamEBmtevkYoHeGpPTIIe+HStok8mml0WcHlvvimdPi1Vq2a/kBUAXIRwFQAAAABw05QL8dX8pxurTBEfHTmbrAc/W6ttR8+ZXRaQveho6aOP7MvvvSe5uJhbD4BbDuEqAAAAAOCmCgv00rynGqlKqL9OJqTqoclrtenAabPLArJ6+WX7xazuuktq29bsagDcgghXAQAAAAA3XRFfD81+sqHqRRTSuZQMPfL5n1q+K9bssoD//PmnNGeOZLHYR61aLGZXBOAWRLgKAAAAADBFgJebZvRuoMgKwUpJt6nvlxv0y9ZjZpcFSIYhvfCCfblXL6lmTVPLAXDrIlwFAAAAAJjGy91FU3rU1T01QpWeaWjgrM2au+Gg2WXhdrdggbR6teTlJY0aZXY1AG5hhKsAAAAAAFO5u1r10UO11b1+KdkM6aVvt2ryir1ml4XbVVqa9NJL9uXBg6WwMHPrAXBLI1wFAAAAAJjOxWrRm52q6anIspKkN3/Zofd+3SHDMEyuDLedzz6T9uyRihaVhgwxuxoAtzjCVQAAAADALcFisWjo3ZX0YruKkqRPlu7V8O//lc1GwIqb5OxZ6fXX7ctvvCH5+ZlaDoBbH+EqAAAAAOCW0r9FOY3pVE0WizRz3QE9Py9K6Zm2/2fvvsOjqhY1jL+TQijSe+9dioog2FBBijQbigocsXdULKiAYEEREXtXsAuKIBYsiKCCoDRBilTpXUqoIZn7x76iHMshIWGnvL/nmYc9e/as/eVyzA1f1qwVdizlBA8+CFu2QJ060KNH2GkkZQGWq5IkSZKkTOfiJhV5/MJjiIuJMGbWGq56fTp7kpLDjqXsbPlyePzx4PiRRyAuLtQ4krIGy1VJkiRJUqbUoUEZXuzWiIS4GL5asIFur0xjx56ksGMpu7rrrmAzqzPOgDZtwk4jKYuwXJUkSZIkZVqn1SrB65c1IX9CHNOWbaHLi9+zOXFv2LGU3UybBm+/DZEIDB4c/ClJh8ByVZIkSZKUqTWuXIS3rzyBovlyMXf1djo/P4U1W3eHHUvZRTQKvXoFx926QcOGocaRlLVYrkqSJEmSMr2jyxZkxNVNKVMwN0s27uT856awbNPOsGMpOxgzBr75BnLnhvvvDzuNpCzGclWSJEmSlCVULX4UI69pRpVi+Vi9dTfnPzeZeWu2hx1LWVlSEtx+e3B8661Qrly4eSRlOZarkiRJkqQso2yhPIy4uil1ShdgU+I+LnhhCj8u3xJ2LGVVzz8PixZBiRJwxx1hp5GUBVmuSpIkSZKylGJHJfDOVSdwfKXC7Nizn0tensrXCzeEHUtZzbZtcO+9wXH//pA/f6hxJGVNlquSJEmSpCynQO54XuvRhOY1i7MnKYUrXvuRj39aG3YsZSUDB8LmzVC7Nlx+edhpJGVRlquSJEmSpCwpT65YXujaiHb1S5OUHOWGt2fwzrQVYcdSVvDrrzB0aHD8yCMQFxdqHElZl+WqJEmSJCnLyhUXw+MXHsNFTSqQEoU7R83h+YlLwo6lzO7uu2HvXjj9dGjbNuw0krIwy1VJkiRJUpYWGxPhgU5Hc03zqgAM/HQBg8YtIBqNhpxMmdKPP8Kbb0IkAoMHB39KUhpZrkqSJEmSsrxIJMIdrWtxR+taADzz9RLuGT2XlBQLVv1JNAq9egXHXbvCMceEm0dSlme5KkmSJEnKNq5pXpUHz65HJAJvTl1Bz3dnkZScEnYsZRZjx8LEiZA7N9x/f9hpJGUDlquSJEmSpGzloiYVeOLCY4iLifDh7DVc9fp0du9LDjuWwpaUBLffHhzfcguULx9uHknZguWqJEmSJCnbad+gDC92b0Tu+Bi+WrCB7q9MY/uepLBjKUwvvggLF0Lx4nDHHWGnkZRNWK5KkiRJkrKl02qW4LUeTcifEMe05Vu46MXv2Zy4N+xYCsO2bdCvX3Dcvz8UKBBuHknZhuWqJEmSJCnbaly5CG9feQJF8+Vi7urtnP/8FNZs3R12LB1pDz8MmzZBrVpw+eVhp5GUjViuSpIkSZKytaPLFmTE1U0pUzA3Szfu5PznprB0Y2LYsXSkrFgBjz0WHA8aBPHx4eaRlK1YrkqSJEmSsr2qxY9i5DXNqFIsH6u37qbz81P4ec22sGPpSLj7btizB5o3h3btwk4jKZuxXJUkSZIk5QhlC+VhxNVNqVumAJsS93HhC9/zw/ItYcdSRpo+Hd54IzgePBgikXDzSMp2LFclSZIkSTlGsaMSePvKE2hcqQg79uyn68tT+XrhhrBjKSNEo9CrV3B8ySVw3HHh5pGULVmuSpIkSZJylAK54xneozGn1SzOnqQUrnjtR8bOXhN2LKW3jz6Cr7+GhAR44IGw00jKpixXJUmSJEk5Tp5csTzftRHtG5QhKTnKje/M5O1pK8KOpfSSlAS33RYc33wzVKgQbh5J2ZblqiRJkiQpR8oVF8PQCxpycZMKRKPQe9Qcnpu4JOxYSg8vvQQLF0KxYnDnnWGnkZSNWa5KkiRJknKs2JgI93c6mmubVwXgoU8X8PC4BUSj0ZCTKc22b4d+/YLje++FggVDjSMpe7NclSRJkiTlaJFIhNtb1+LONrUAePbrJdwzei7JKRasWdLDD8PGjVCjBlx5ZdhpJGVzlquSJEmSJAFXn1qVgefUIxKBN6euoOe7s0hKTgk7llJj5UoYMiQ4HjQI4uPDzSMp27NclSRJkiTp/3VpXIEnuxxDfGyEsbPXcOVrP7J7X3LYsXSo7rkH9uyBU06BDh3CTiMpB7BclSRJkiTpT9rVL8OL3RqROz6GCQs30v2VaWzfkxR2LP0vM2bA668Hx4MHQyQSbh5JOYLlqiRJkiRJ/6V5zRK8flkT8ueOY9ryLXR54Xs2Je4NO5b+STQKvXoFf150ERx/fNiJJOUQlquSJEmSJP2N4ysV4Z0rT6Bovlz8vGY7nZ+bwuqtu8OOpb/zyScwYQIkJMADD4SdRlIOYrkqSZIkSdI/qFumICOvbkqZgrlZumkn5z87maUbE8OOpT/bvx9uuy04vukmqFQp1DiSYNKkSbRv354yZcoQiUQYPXr0v14/atQoWrZsSfHixSlQoABNmzbls88+O+iae++9l0gkctCjVq1ah5xpwIAB7Nq16y/nd+/ezYABAw55nP9muSpJkiRJ0r+oUvwo3rumGVWK52PNtj2c/9wU5q7eFnYs/e7ll2H+fChaFO66K+w0koCdO3fSoEEDnn766UO6ftKkSbRs2ZJPPvmE6dOnc9ppp9G+fXtmzpx50HV169Zl7dq1Bx7ffvvtIWfq378/iYl//eXYrl276N+//yGP89/i0vxOSZIkSZJyiDKF8jDyqqZ0f3Uac1dvp8sL3/Pyf46nceUiYUfL2XbsgL59g+N774WCBUONIynQpk0b2rRpc8jXDx069KDnDz74IGPGjGHs2LEcc8wxB87HxcVRqlSpNGWKRqNE/maju9mzZ1OkSNq/lztzVZIkSZKkQ1D0qATeuuIEGlcqwo69++n2ylQmLNwQdqycbdAg2LABqleHq64KO42U7e3YsYPt27cfeOzdmzEb/aWkpLBjx46/lJ6LFi2iTJkyVKlShYsvvpgVK1b8z7EKFy5MkSJFiEQi1KhRgyJFihx4FCxYkJYtW9K5c+c0Z3XmqiRJkiRJh6hA7niG92jMtW9OZ8LCjVwx/Eceu6Ah7RuUCTtazrNqFTz6aHA8aBDEx4ebR8oB6tSpc9Dzfv36ce+996b7fQYPHkxiYuJBpWeTJk0YNmwYNWvWZO3atfTv35+TTz6ZuXPnkj9//n8ca+jQoUSjUXr06EH//v0p+KcZ7rly5aJSpUo0bdo0zVktVyVJkiRJSoU8uWJ5oVsjbh0xmw9nr+HGd2ayY89+LmpSIexoOUufPrB7N5x8MnTsGHYaKUeYN28eZcuWPfA8ISEh3e/x1ltv0b9/f8aMGUOJEiUOnP/zMgP169enSZMmVKxYkREjRnDZZZf943jdu3cHoHLlyjRr1oz4dP5FjOWqJEmSJEmpFB8bw2MXNCR/7jjenLqCuz6Yw7bdSVzTvGrY0XKGWbNg+PDgePBg+Jt1FCWlv/z581OgQIEMG/+dd97h8ssvZ+TIkbRo0eJfry1UqBA1atRg8eLFhzR25cqVWbt27T++XqFC2n5BZrkqSZIkSVIaxMZEuL/T0RTME88zXy/h4XEL2LY7iTta1/zbTVOUTqJR6NUr+LNLF2jcOOxEktLB22+/TY8ePXjnnXc466yz/uf1iYmJLFmyhK5dux7S+JUqVfrX783JycmHnPXPLFclSZIkSUqjSCTC7a1rUTBPPAM/XcBzE5ewfU8S93U8mtgYC9YMMW4cjB8PuXLBgw+GnUbS30hMTDxoRumyZcuYNWsWRYoUoUKFCvTu3ZvVq1fz2muvAcFSAN27d+fxxx+nSZMmrFu3DoA8efIcWCO1V69etG/fnooVK7JmzRr69etHbGwsXbp0OaRMM2fOPOh5UlISM2fOZMiQITzwwANp/lotVyVJkiRJOkxXnVqVAnniueuDObw1dQXbdycxpHNDcsXFhB0te9m/P5i1CnDTTVCpUqhxJP29H3/8kdNOO+3A81tuuQUI1j8dNmwYa9euZcWKFQdef+GFF9i/fz/XXXcd11133YHzv18PsGrVKrp06cLmzZspXrw4J510Et9//z3Fixc/pEwNGjT4y7lGjRpRpkwZHnnkEc4555y0fKlEotFoNE3vzCJWrVpF+fLlWblyJeXKlQs7jiRJkiQpG/v4p7X0fHcmSclRmtcszrMXH0eeXLFhx8o+XngBrroKihSBJUugUKGwE0k5Qnbu1xYvXkyDBg3YuXNnmt7vr9AkSZIkSUonZ9UvzYvdGpE7PoavF26k2ytT2b4nKexY2cOOHdC3b3Dcr5/FqqRU2b59+0GPbdu2sWDBAu655x6qV6+e5nEtVyVJkiRJSkfNa5bgjcuakD93HD8s/40Ln/+eTYl7w46V9T3yCKxfD9WqwdVXh51GUhZTqFAhChcufOBRpEgR6tSpw5QpU3j22WfTPK5rrkqSJEmSlM4aVSrCO1eeQPdXpjFv7XY6PzeF1y9vQtlCecKOljWtXg2DBwfHDz8cbGYlSakwYcKEg57HxMRQvHhxqlWrRlxc2itSy1VJkiRJkjJA3TIFGXFVU7q+PI2lm3Zy/rOTef3yJlQtflTY0bKePn1g92448UQ4++yw00jKgk499dQMGddlASRJkiRJyiBVih/FyKubUrV4PtZs20Pn56Ywd/W2sGNlLbNnw//vFs6jj0IkEmocSVnXwoULuf766znjjDM444wzuP7661mwYMFhjWm5KkmSJElSBipTKA8jrmrK0WULsHnnPrq88D3Tlm0JO1bWEI1Cr17BnxdcAE2ahJ1IUhb1/vvvc/TRRzN9+nQaNGhAgwYNmDFjBvXq1eP9999P87iRaDQaTcecmc6qVasoX748K1eupFy5cmHHkSRJkiTlUNv3JHH58B+ZtmwLCXExPHfJcZxWq0TYsTK3ceOgTZtgjdUFC6By5bATSTlSdujXqlatysUXX8yAAQMOOt+vXz/eeOMNlixZkqZxnbkqSZIkSdIRUCB3PK/1aMzptUqwd38KV7z2Ix/OXhN2rMxr//5g1irADTdYrEo6LGvXrqVbt25/OX/JJZewdu3aNI9ruSpJkiRJ0hGSOz6W57seR8eGZdifEuWmd2by5tRfw46VOQ0bBj//DIULw913h51GUhbXvHlzvvnmm7+c//bbbzn55JPTPG7c4YSSJEmSJEmpEx8bw2OdG5I/dxxvfL+Cuz+Yy7bdSVzbvFrY0TKPxETo0yc47ts3KFgl6TB06NCBO+64g+nTp3PCCScA8P333zNy5Ej69+/Phx9+eNC1h8o1VyVJkiRJCkE0GmXw5wt5ekKwzt9Vp1bhzta1iEQiISfLBO69F/r3h6pVYd68YM1VSaHJDv1aTMyhfYA/EomQnJx8yOM6c1WSJEmSpBBEIhFua1WLgnniefCTBTw/cSnbd+/n/k5HExuTgwvWNWvgkUeC44cesliVlC5SUlIyZFzXXJUkSZIkKURXnlKVh86pRyQCb09bwY3vzGTf/owpAbKEvn1h1y5o2hTOPTfsNJL0r5y5KkmSJElSyC5sXIH8uePp+e5MPv5pLYl79vPcJceRJ1ds2NGOrJ9+gldeCY4ffRRcIkFSOho/fjzjx49nw4YNf5nJ+srv33tSyZmrkiRJkiRlAmfVL81L3Y8nd3wME3/ZSNeXp7Jtd1LYsY6s22+HaBQ6dw5mrkpSOunfvz9nnnkm48ePZ9OmTfz2228HPdLKDa0kSZIkScpEfly+hUuH/cCOPfupU7oAw3s0pnj+hLBjZbzPPoPWrSE+HhYsgCpVwk4k6f9lh36tdOnSDBo0iK5du6bruM5clSRJkiQpE2lUqQjvXtmUYkflYt7a7XR+fgqrt+4OO1bGSk6G224Ljm+4wWJVUrrbt28fzZo1S/dxLVclSZIkScpk6pQpwMirm1G2UB6WbdrJec9OZvGGxLBjZZzhw2HOHChcGO6+O+w0krKhyy+/nLfeeivdx3VDK0mSJEmSMqHKxfLx3jVNueSlqSzZuJPOz0/htR6NObpswbCjpa+dO+Gee4LjPn2gSJFw80jKNm655ZYDxykpKbzwwgt8+eWX1K9fn/j4+IOuHTJkSJruYbkqSZIkSVImVbpgHkZc1ZT/vPoDc1Zvo8sL3/NS90Y0qVI07Gjp59FHYe3aYCmAa68NO42kbGTmzJkHPW/YsCEAc+fOPeh8JBJJ8z3c0EqSJEmSpExux54kLhv+I9OWbSEhLoZnLzmW02uVDDvW4Vu7FqpXD2avjhgB558fdiJJf8N+7Z+55qokSZIkSZlc/tzxvNajMWfUKsHe/Slc+dp0xsxaHXasw9evX1CsnnACnHde2GkkKdVcFkCSJEmSpCwgd3wsz3U9jl4jZzNm1hp6vjuLHXv2c8kJFcOOljZz58LLLwfHjz4Kh/GxXEn6X84+++y//fh/JBIhd+7cVKtWjYsuuoiaNWumalxnrkqSJEmSlEXEx8bwWOeGdD2hItEo3DN6Lk9PWEyWXPHvttsgJSWYsdqsWdhpJGVzBQsW5KuvvmLGjBlEIhEikQgzZ87kq6++Yv/+/bz77rs0aNCA7777LlXjWq5KkiRJkpSFxMREGNCxLtefVg2ARz5byEOfLshaBevnn8O4cRAfDwMHhp1GUg5QqlQpLrroIpYuXcr777/P+++/z5IlS7jkkkuoWrUq8+fPp3v37txxxx2pGtdyVZIkSZKkLCYSidCrVU3ublsbgOcnLeWdH1aGnOoQJScHs1YBrrsOqlULN4+kHOHll1+mZ8+exMT8UYfGxMRwww038MILLxCJRLj++uuZO3duqsa1XJUkSZIkKYu64pQq3N46WB/w/o/msXLLrpATHYLXXoOffoJCheCee8JOIymH2L9/PwsWLPjL+QULFpCcnAxA7ty5/3Zd1n9juSpJkiRJUhZ21SlVaVypCDv3JXPriNkkp2Ti5QF27vyjUL3nHihaNNw8knKMrl27ctlll/HYY4/x7bff8u233/LYY49x2WWX0a1bNwAmTpxI3bp1UzVuXEaElSRJkiRJR0ZsTITB5zegzeOTmLZ8C698u4wrTqkSdqy/N2QIrFkDlSrB9deHnUZSDvLYY49RsmRJBg0axPr16wEoWbIkN99884F1Vs8880xat26dqnEj0Sy14nXqrVq1ivLly7Ny5UrKlSsXdhxJkiRJkjLEO9NWcOeoOeSKjWHsDSdRs1T+sCMdbN26YH3VnTvhnXfgggvCTiTpEGW3fm379u0AFChQ4LDHCnVZgOTkZPr06UPlypXJkycPVatW5b777vvHHQ6vvvpqIpEIQ4cOPbJBJUmSJEnK5C44vjyn1yrBvuQUbhkxi337U8KOdLB+/YJitUkT6Nw57DSScrACBQqkS7EKIS8L8PDDD/Pss88yfPhw6taty48//sill15KwYIFufHGGw+69oMPPuD777+nTJkyIaWVJEmSJCnzikQiPHRuPVo9Nomf12znya8WceuZNcOOFfj5Z3jppeB48GBI5YYxknS4Kleu/K+bVS1dujRN44Zark6ePJmOHTty1llnAVCpUiXefvttpk2bdtB1q1ev5oYbbuCzzz47cK0kSZIkSTpYify5eeDselz75gyenrCY02qV4NgKhcOOBbffDikpcM45cNJJYaeRlAP17NnzoOdJSUnMnDmTcePGcdttt6V53FDL1WbNmvHCCy/wyy+/UKNGDWbPns23337LkCFDDlyTkpJC165due222w5pt669e/eyd+/eA8937NiRIdklSZIkScqM2tYrTceGZRgzaw29Rszm4xtPJk+u2PACffklfPIJxMXBQw+Fl0NSjnbTTTf97fmnn36aH3/8Mc3jhrrm6p133smFF15IrVq1iI+P55hjjqFnz55cfPHFB655+OGHiYuL+8syAf9k4MCBFCxY8MCjTp06GRVfkiRJkqRMaUCHoylZIIGlm3by8LgF4QVJToZevYLja6+F6tXDyyJJf6NNmza8//77aX5/qOXqiBEjePPNN3nrrbeYMWMGw4cPZ/DgwQwfPhyA6dOn8/jjjzNs2LB/XRPhz3r37s22bdsOPObNm5eRX4IkSZIkSZlOwbzxPHJeAwCGTV7Ot4s2hRPkjTdg9mwoWBD69AkngyT9i/fee48iRYqk+f2hLgtw2223HZi9ClCvXj1+/fVXBg4cSPfu3fnmm2/YsGEDFSpUOPCe5ORkbr31VoYOHcry5cv/MmZCQgIJCQkHnm/fvj3Dvw5JkiRJkjKbU2oUp+sJFXn9+1+57b3ZjOt5CgXzxB+5ALt2wd13B8d33w3Fih25e0vSfznmmGMOmrwZjUZZt24dGzdu5JlnnknzuKGWq7t27SIm5uDJs7GxsaSkpADQtWtXWrRocdDrrVq1omvXrlx66aVHLKckSZIkSVlR77a1+GbRRpZv3kX/D39myAUNj9zNH3sMVq+GSpXghhuO3H0l6W906tTpoOcxMTEUL16c5s2bU6tWrTSPG2q52r59ex544AEqVKhA3bp1mTlzJkOGDKFHjx4AFC1alKJFix70nvj4eEqVKkXNmjXDiCxJkiRJUpaRN1ccj3ZuyPnPTWbUzNW0rFOSNvVKZ/yN16//Y/OqgQMhd+6Mv6ck/Yt+/fplyLihlqtPPvkkffr04dprr2XDhg2UKVOGq666ir59+4YZS5IkSZKkbOO4ioW5pnlVnp6whLs+mMNxlQpTIn8Gl5333guJidC4MVxwQcbeS5IOUXJyMqNHj2b+/PkA1K1blw4dOhAbG5vmMSPRaDSaXgEzo1WrVlG+fHlWrlxJuXLlwo4jSZIkSdIRt29/Ch2f/o75a7fTonYJXuzW6JA3jk61efOgfn1IToZJk+DkkzPmPpKOmOzQry1evJi2bduyevXqA5+IX7hwIeXLl+fjjz+matWqaRo35n9fcrBKlSoxYMAAVqxYkaYbSpIkSZKkIytXXAxDOjcgV2wMX87fwMjpqzLuZnfcERSrZ59tsSop07jxxhupWrUqK1euZMaMGcyYMYMVK1ZQuXJlbrzxxjSPm+pytWfPnowaNYoqVarQsmVL3nnnHfbu3ZvmAJIkSZIkKePVLl2AW86sAcCAsfNYuWVX+t/kq6/go48gLu6PNVclKROYOHEigwYNokiRIgfOFS1alIceeoiJEyemedw0lauzZs1i2rRp1K5dmxtuuIHSpUtz/fXXM2PGjDQHkSRJkiRJGeuKk6vQqGJhEvfup9fI2aSkpONKgSkp0KtXcHzNNVCjRvqNLUmHKSEhgR07dvzlfGJiIrly5UrzuKkuV3937LHH8sQTT7BmzRr69evHSy+9xPHHH0/Dhg155ZVXyOZLuUqSJEmSlOXExkR4tHMD8uaKZeqyLbzy3bL0G/zNN2HmTChQANyoWlIm065dO6688kqmTp1KNBolGo3y/fffc/XVV9OhQ4c0j5vmcjUpKYkRI0bQoUMHbr31Vho1asRLL73Eueeey1133cXFF1+c5lCSJEmSJCljVCyaj3vOqgPAoM8Wsmj9X2dypdru3XDXXcHx3XdDsWKHP6YkpaMnnniCqlWr0rRpU3Lnzk3u3Lk58cQTqVatGo8//niax41L7RtmzJjBq6++yttvv01MTAzdunXjscceo1atWgeuOfvsszn++OPTHEqSJEmSJGWcLo3L8/m8dXy9cCM3j5jFB9eeSHxsmudfwWOPwapVUKECHMbGMJKUEaLRKNu3b+edd95h9erVzJ8/H4DatWtTrVq1wxo71eXq8ccfT8uWLXn22Wfp1KkT8fHxf7mmcuXKXHjhhYcVTJIkSZIkZYxIJMLD59bnzMcmMXf1dp78ajG3tEzjGqnr18PAgcHxwIGQO3f6BZWkdBCNRqlWrRo///wz1atXP+xC9c9S/WuppUuXMm7cOM4///y/LVYB8uXLx6uvvnrY4SRJkiRJUsYoWSA393c6GoCnJyxm9sqtaRuof39ITIRGjcCJVpIyoZiYGKpXr87mzZvTf+zUvmHDhg1MnTr1L+enTp3Kjz/+mC6hJEmSJElSxmvfoAztG5QhOSXKzSNmsScpOXUDzJ8PL7wQHA8eDDGHsbSAJGWghx56iNtuu425c+em67ip/q533XXXsXLlyr+cX716Ndddd126hJIkSZIkSUfGfR3rUiJ/Aks37uThcQtS9+Y77oDkZOjYEU49NWMCSlI66NatG9OmTaNBgwbkyZOHIkWKHPRIq1SvuTpv3jyOPfbYv5w/5phjmDdvXpqDSJIkSZKkI69Q3lwMOq8+/3n1B179bjkta5ekWbVi//uNEybA2LEQGwsPP5zxQSXpMAwdOjRDxk11uZqQkMD69eupUqXKQefXrl1LXFyqh5MkSZIkSSFrXrMEFzepwJtTV9Br5GzG3XwKBXL//T4rAKSkQK9ewfHVV0PNmkcmqCSlUffu3TNk3FS3oWeeeSa9e/dmzJgxFCxYEICtW7dy11130bJly3QPKEmSJEmSMt5dbWvzzaJNrNiyi/4fzuPRzg3++eK33oIZMyB/fujX78iFlKTDkJKSwuLFi9mwYQMpKSkHvXbKKaekacxUl6uDBw/mlFNOoWLFihxzzDEAzJo1i5IlS/L666+nKYQkSZIkSQpXvoQ4hnRuwPnPT+H9Gas4s25JWtUt9dcLd++Gu+4Kju+6C4oXP7JBJSkNvv/+ey666CJ+/fVXotHoQa9FIhGSk1O5od//S/WGVmXLluWnn35i0KBB1KlTh+OOO47HH3+cOXPmUL58+TSFkCRJkiRJ4WtUqQhXnVIVgLtGzWFT4t6/XvT447ByJZQvDzfddIQTSlLaXH311TRq1Ii5c+eyZcsWfvvttwOPLVu2pHncSPS/q9psZtWqVZQvX56VK1dSrly5sONIkiRJkpSp7d2fTMenvmPBuh20rFOSF7oeRyQSCV7cuBGqVoUdO+D11+GSS8INK+mIyA79Wr58+Zg9ezbVqlVL13HTvAPVvHnzWLFiBfv27TvofIcOHQ47lCRJkiRJCkdCXCxDOjek49Pf8sW89bw/YzXnHff/ZUr//kGxeuyxcNFF4QaVpFRo0qQJixcvDr9cXbp0KWeffTZz5swhEokcWKPg999ipXV9AkmSJEmSlDnUKVOAm1vWYNC4hfT/8GdOqFKEchtWwnPPBRcMHgwxqV5pUJKOqJ9++unA8Q033MCtt97KunXrqFevHvHx8QddW79+/TTdI9Xl6k033UTlypUZP348lStXZtq0aWzevJlbb72VwYMHpymEJEmSJEnKXK46pSrj529g+q+/cdvIn3jr44eIJCdD+/Zw2mlhx5Ok/6lhw4YHTQ4F6NGjx4Hj3187nA2tUl2uTpkyha+++opixYoRExNDTEwMJ510EgMHDuTGG29k5syZaQoiSZIkSZIyj9iYCI+e34A2j39DytdfE/lwDMTGwqBBYUeTpEOybNmyDL9HqsvV5ORk8ufPD0CxYsVYs2YNNWvWpGLFiixcuDDdA0qSJEmSpHBUKpaPu9rUpP65NwKwtVsPCtWqFXIqSTo0FStWpEePHjz++OMH+sz0luoFUo4++mhmz54NBAvBDho0iO+++44BAwZQpUqVdA8oSZIkSZLCc8myyTRYt4gdufJwQ/X2JCWnhB1Jkg7Z8OHD2b17d4aNn+py9Z577iElJfhGOmDAAJYtW8bJJ5/MJ598whNPPJHuASVJkiRJUkj27CFy110AvHLSBXyzLYZnJiwJOZQkHbo/r7eaEVK9LECrVq0OHFerVo0FCxawZcsWChcuTCQSSddwkiRJkiQpRE88AStWQLlyVHngbhi9kCe/WsRptYpTv1yhsNNJ0iHZsWMHuXPn/tdrChQokKaxUzVzNSkpibi4OObOnXvQ+SJFilisSpIkSZKUnWzaBA88EBw/+CDtmlTlrPql2Z8S5ZYRs9mTlLadtSXpSKtRowaFCxf+20ehQoUoXLhwmsdO1czV+Ph4KlSoQHKy30AlSZIkScrWBgyA7dvhmGPg4ouJRCLc3/Fopi3bwuINiTzy2UL6tKsTdkpJ+p/ee+89ihQpkiFjp3rN1bvvvpu77rqLLVu2ZEQeSZIkSZIUtl9+gWefDY4HD4aYoD4onC8XD59bD4CXv13GlCWbw0ooKRObNGkS7du3p0yZMkQiEUaPHv2v148aNYqWLVtSvHhxChQoQNOmTfnss8/+ct3TTz9NpUqVyJ07N02aNGHatGmHlOfEE0/k1FNP/ddHWqW6XH3qqaeYNGkSZcqUoWbNmhx77LEHPSRJkiRJUhZ3552wfz+0awenn37QS6fXKkmXxuUB6DVyNjv2JIWRUFImtnPnTho0aMDTTz99SNdPmjSJli1b8sknnzB9+nROO+002rdvz8yZMw9c8+6773LLLbfQr18/ZsyYQYMGDWjVqhUbNmzIqC/jkESiqdwyq3///v/6er9+/Q4rUHpbtWoV5cuXZ+XKlZQrVy7sOJIkSZIkZW6TJsGpp0JsLMyZA7Vr/+WSxL37afP4JFZu2U3nRuUYdF6DEIJKOlIOp1+LRCJ88MEHdOrUKVXvq1u3LhdccAF9+/YFoEmTJhx//PE89dRTAKSkpFC+fHluuOEG7rzzzn8cp3Llyvz4448ULVo0Vfc/VKlacxUyX3kqSZIkSZLSSUoK9OoVHF9xxd8WqwBHJcTx6PkNueCFKYz4cRUt65SiZZ2SRzCopDDs2LGD7du3H3iekJBAQkJCut8nJSWFHTt2HFgndd++fUyfPp3evXsfuCYmJoYWLVowZcqUfx1r2bJl6Z7vz1K9LIAkSZIkScqm3n0XfvgBjjoK7r33Xy9tXLkIV55cBYDeo35ic+LeIxBQUpjq1KlDwYIFDzwGDhyYIfcZPHgwiYmJdO7cGYBNmzaRnJxMyZIH/xKnZMmSrFu3LkMyHKpUz1yNiYkhEon84+vJycmHFUiSJElSFrJzJwwaBEWLQocOUKlS2IkkpdWePfD7rLA774SS/3sm6s0ta/D1wo0sXL+Duz6Yw3OXHPevnYGkrG3evHmULVv2wPOMmLX61ltv0b9/f8aMGUOJEiXSffz0lupy9YMPPjjoeVJSEjNnzmT48OH/cz1WSZIkSdnI9u1w1lnw7bfB85tugvr1oWPH4HHssWDJImUdTz4Jv/4KZcvCzTcf0ltyx8fyaOcGdHr6Oz77eT0fzFzNOce634mUXeXPn58CBQpk2PjvvPMOl19+OSNHjqRFixYHzhcrVozY2FjWr19/0PXr16+nVKlSGZbnUKS6XO3YseNfzp133nnUrVuXd999l8suuyxdgkmSJEnKxH77DVq3hmnToGDBoFT97jv46afgcd99UK5cMJu1Y0do3hxy5Qo7taR/smkTPPBAcPzAA5A37yG/9eiyBenZojqDP/+FfmN+5oQqRSlTKE8GBZWUXb399tv06NGDd955h7POOuug13LlysVxxx3H+PHjD2yMlZKSwvjx47n++utDSPuHVJer/+SEE07gyiuvTK/hJEmSJGVWmzdDy5YwcyYUKQJffBHMUt20CT76CMaMgc8/h1Wr4JlngkeBAtCmTVC0tmkDhQqF/VVI+rP77oNt26BhQ7jkklS//epTq/Ll/A3MWrmV296bzes9mhAT48x1KadKTExk8eLFB54vW7aMWbNmUaRIESpUqEDv3r1ZvXo1r732GhAsBdC9e3cef/xxmjRpcmAd1Tx58lCwYEEAbrnlFrp3706jRo1o3LgxQ4cOZefOnVx66aX/mOOJJ5445Mw33nhjWr5UItFoNJqmd/7J7t276d27N59++ikLFy483OHS1apVqyhfvjwrV66kXDk/miBJkiQdlvXroUULmDsXSpSAL7+EevX+et3u3TB+fFC0jh0bvO93cXHBTNaOHYOZrRUqHLH4kv7GokVQpw7s3x/8N33GGWkaZunGRNo+8Q17klLo36Eu3ZtVSt+ckkKT2n7t66+/5rTTTvvL+e7duzNs2DD+85//sHz5cr7++msAmjdvzsSJE//x+t899dRTPPLII6xbt46GDRvyxBNP0KRJk3/MUbly5YOeb9y4kV27dlHo/3/Ju3XrVvLmzUuJEiVYunTp//y6/k6qy9XChQsftDh1NBplx44d5M2blzfeeIMOHTqkKUhGsVyVJEmS0snq1UHpsnAhlC4NX30FtWr97/elpMDUqUHROmYMLFhw8OsNG/6xTmvDhq7TKh1p554Lo0ZB27bw8ceHNdRrU5bTd8zP5I6P4eMbT6Zq8aPSKaSkMGWHfu2tt97imWee4eWXX6ZmzZoALFy4kCuuuIKrrrqKiy++OE3jprpcHTZs2EHlakxMDMWLF6dJkyYULlw4TSEyUnb4y5ckSZJCt2IFnH46LFkC5csHxWq1amkb65df/ihaJ0+GP/+TpEKFP9ZpPfVUiI9Pn/yS/t6338LJJ0NMTLBect26hzVcSkqU7q9O45tFm2hQvhDvX92UuNiYdAorKSzZoV+rWrUq7733Hsccc8xB56dPn855553HsmXL0jRuuiwLkJllh798SZIkKVRLlgQzVn/9FapUCT7uX6lS+oy9cePB67Tu3v3HawULBjPpfl+nNQN3J5ZypGgUTjgh2Jjuyivh+efTZdi123Zz5mOT2LFnP7e2rMENZ1RPl3ElhSc79Gt58+Zl4sSJHH/88QednzZtGs2bN2fXrl1pGjfVvz569dVXGTly5F/Ojxw5kuHDh6cphCRJkqRMauHCYAbpr79CjRowcWL6FasAxYvDpZfC6NHBhlhjxkCPHsH5bdvg7bfhwguhWDFo1QqefhpWrky/+0s52YgRQbGaLx/0759uw5YumIcBHYMZsI+PX8Tc1dvSbWxJSqszzjiDq666ihkzZhw4N336dK655hpatGiR5nFTXa4OHDiQYsWK/eV8iRIlePDBB9McRJIkSVImM3duUKyuXh1sdjNxImTkbJW8eYMlAV5+GdauDT6ufNttQamblBTMbL3++mDpgOOOgwEDYPbsg5cVkHRo9u6FO+8Mju+4A0qVStfhOzUsS5ujS7E/JcrN785iT1Jyuo4vSan1yiuvUKpUKRo1akRCQgIJCQk0btyYkiVL8tJLL6V53FQvC5A7d24WLFhApf/6bfXy5cupXbs2u//8MZ5MIDtMW5YkSZKOuJkzoWVL2Lw52GTq88+D2aRhWbjwj3Vap0w5uFCtVOmPdVpPPtl1WqVD8eij0KsXlCkTrIOcL1+632Jz4l5aDf2GTYl7ufKUKtzVtna630PSkZGd+rVffvmFBf+/uWatWrWoUaPGYY2X6pmrJUqU4KeffvrL+dmzZ1O0aNHDCiNJkiQpE/jhh2Dzqs2b4fjjgzVWwyxWAWrWhNtvh+++C2a1vvQStG8PuXPD8uXwxBPBurAlSsAll8DIkbB9e7iZpcxq82a4//7g+P77M6RYBSh6VAIPnVMPgBe/WcrUpZsz5D6SlBqVKlWiZs2atG3b9rCLVUhDudqlSxduvPFGJkyYQHJyMsnJyXz11VfcdNNNXHjhhYcdSJIkSVKIvvsuKCm3boVmzeCLL6BIkbBTHaxkSbjsMvjww2Cd1g8+gP/8J1iXdetWePNN6Nw5KIRbt4Znnw2WNpAUuP/+4L+V+vWhW7cMvVWLOiW5oFF5olG4deRsEvfuz9D7SdI/2bVrF5dddhl58+albt26rFixAoAbbriBhx56KM3jprpcve+++2jSpAlnnHEGefLkIU+ePJx55pmcfvrprrkqSZIkZWUTJgSbRu3YAc2bw2efQcGCYaf6d/nyQadO8OqrsG4dfPNN8FHn6tVh377ga7j22mCt2OOPD0qlOXNcp1U51+LFwcZwECwNEBub4be8p11tyhXOw6rfdnP/R/My/H6S9Hd69+7N7Nmz+frrr8mdO/eB8y1atODdd99N87ipXnP1d4sWLWLWrFnkyZOHevXqUbFixTSHyEjZaU0ISZIkKcN89llQUu7ZA2eeGcwGzZs37FRpF43CggV/rNM6derBhWrlysEarR07wkknQVxceFmlI+n88+G996BNG/jkkyN22++XbqbLi98TjcLL3RtxRu2SR+zekg5fdujXKlasyLvvvssJJ5xA/vz5mT17NlWqVGHx4sUce+yxbE/jckKpnrn6u+rVq3P++efTrl27TFusSpIkSToEY8cGG0Lt2ROsYzpmTNYuVgEiEahdO9gNfcoUWLMGXngBzjoLEhJg2TIYOhROOy1Yp7Vr16Bw2rEj7ORSxpk8OfjfeUwMDBp0RG99QpWiXHZiZQDueH8OW3buO6L3l6SNGzdSokSJv5zfuXMnkUgkzeOmulw999xzefjhh/9yftCgQZx//vlpDiJJkiQpBO+/D+ecE3yE/txzg+LlTx+VyzZKlYIrroCPPgo28xk1Crp3h6JF4bff4I03ghl9xYpB27bw/PPBxllSdhGNwq23BseXXQZHH33EI/RqVZPqJY5iU+Je7hk9hzR+kFaS0qRRo0Z8/PHHB57/Xqi+9NJLNG3aNM3jpnpZgOLFi/PVV19Rr169g87PmTOHFi1asH79+jSHyQjZYdqyJEmSlCHeeivYzCY5GS66CIYPz3kfj9+/P5jN9/vyAUuWHPx648Z/LB9Qp04wI1bKikaODDZ6y5cPFi2C0qVDiTFn1TbOfuY79qdEefzChnRsWDaUHJJSJzv0a99++y1t2rThkksuYdiwYVx11VXMmzePyZMnM3HiRI477rg0jZvqmauJiYnkypXrL+fj4+PTvDaBJEmSpCPslVfgkkuCYvXSS+G113JesQrB13zKKcHGPosWwdy58MADQakKMG0a3H13MMuvevVg5t/EiUEpK2UVe/cGS2QA3H57aMUqQL1yBbnxjOoA9Bk9l7XbdoeWRVLOctJJJzFr1iz2799PvXr1+PzzzylRogRTpkxJc7EKaZi52rhxY9q1a0ffvn0POn/vvfcyduxYpk+fnuYwGSE7NOuSJElSunr2Wbj22uD46quDncNj0rwdQ/a1Zk2wHu2HH8L48UFB9buiRYP1Wzt2hFatgtmAUmb12GNwyy1BqbpoUej/e92fnMK5z01h9sqtnFy9GK/1aHxY6x1Kynj2a/8s1eXq2LFjOeecc7jooos4/fTTARg/fjxvvfUW7733Hp06dcqInGnmX74kSZL0J7+XLAA9e8KQIX7U/VAkJsJnnwVLB3z8MWzZ8sdrCQnQokVQtLZvH6zvKmUWW7ZAtWrB2sIvvww9eoSdCIAlGxNp+/g37N2fwn0d69K1aaWwI0n6F9mhX5sxYwbx8fEHljodM2YMr776KnXq1OHee+/920/qH4pU/3q6ffv2jB49msWLF3Pttddy6623snr1ar766iuqVauWphCSJEmSjoCHHvqjWL3zTovV1DjqqGDDr9deg/XrYcKEoJyuXDmY0frxx3DllVCmDDRtGvzfev78YBMhKUz33x8Uq/XqBZu4ZRJVix/FnW1qAfDAJ/NZtmlnyIkkZXdXXXUVv/zyCwBLly7lggsuIG/evIwcOZLbb789zeOmeubqf9u+fTtvv/02L7/8MtOnTyc5Oflwhkt32aFZlyRJkg5LNAr9+wcPCP7s08diNT1Eo8E6rR9+GMxq/eGHg1+vXh06dAhmtTZrBrGx4eRUzrRkCdSuDUlJwczrM88MO9FBUlKiXPLyVCYv2cwxFQox8qqmxMW6RImUGWWHfq1gwYLMmDGDqlWr8vDDD/PVV1/x2Wef8d1333HhhReycuXKNI2b5u9akyZNonv37pQpU4ZHH32U008/ne+//z6tw0mSJEnKCNEo9O79R7H60EPQt6/FanqJRIIZgXffHWx+tWpVsKZt69aQK1ewvuWjjwabZpUqFWweNno07NoVdnLlBL17B8Vqq1aZrlgFiImJ8Mj5DcifEMfMFVt5ftLSsCNJysai0SgpKSkAfPnll7Rt2xaA8uXLs2nTpjSPm6pydd26dTz00ENUr16d888/nwIFCrB3715Gjx7NQw89xPHHH5/mIJIkSZLSWTQafHT94YeD50OHwh13hJko+ytbNtgk7NNPYdMmGDECLr4YChUKng8bBmefHWyI1aFDsAbm+vVhp1Z2NGUKjBwZbFb3yCNhp/lHZQvl4d4OdQEY+uUv/LxmW8iJJGVXjRo14v777+f1119n4sSJnHXWWQAsW7aMkiVLpnncQy5X27dvT82aNfnpp58YOnQoa9as4cknn0zzjSVJkiRloJQUuOYaeOKJ4Pmzz8JNN4WbKafJnx/OPx/eeAM2bIDx44O/g0qVYM8eGDsWLr882MH9xBNh0CBYsCDs1MoOolG49dbg+NJLg9nVmdg5x5alVd2SJCVHueXd2ezdn7mWG5SUPQwdOpQZM2Zw/fXXc/fddx/YO+q9996jWbNmaR73kNdcjYuL48Ybb+Saa66hevXqB87Hx8cze/Zs6tSpk+YQGSk7rAkhSZIkpUpyclDaDRsWfGz9lVfgP/8JO5V+F43CnDnBGq1jxsD06Qe/XqNGsEZrx45wwgmu06rUe++9oNjPmzdYmqJMmbAT/U+bE/fSaugkNiXu46pTq9C7Te2wI0n6k+zcr+3Zs4fY2Fji4+PT9P5Dnrn67bffsmPHDo477jiaNGnCU089dVjrEUiSJEnKAElJ0LVrUKzGxgazJi1WM5dIBOrXDzYV+/FHWLkSnn46WBMzPh5++SX4GPdJJwWzWi+7LNgwy3VadSj27ftj+Y/bbssSxSpA0aMSePDsYIbtC5OW8sPyLSEnkpRT5M6dO83FKqRi5urvdu7cybvvvssrr7zCtGnTSE5OZsiQIfTo0YP8+fOnOUhGyc7NuiRJknSQffugSxcYNSoo6d5+G849N+xUSo1t24Jd3ceMgY8/Dp7/Lk+eoIDt2BHatYPixcPLqcxr6FC4+eZgA7VFi+Coo8JOlCq9Rs7mvemrKF8kD5/edApHJcSFHUkSWbdfK1KkCL/88gvFihWjcOHCRP5lQ88tW9L2S51Ul6t/tnDhQl5++WVef/11tm7dSsuWLfnwww/TOlyGyKp/+ZIkSVKq7NkD550XFHK5csH77wcFnLKupCSYNOmP5QNWrPjjtUgEmjX7Y/mAGjXCy6nM47ffoGrV4M8XXwyWB8litu9Jos3Qb1i9dTddGldg4DmZe71YKafIqv3a8OHDufDCC0lISGD48OH/em337t3TdI/DKld/l5yczNixY3nllVcsVyVJkqQjbdcu6NQJvvgimN04enQww1HZRzQKs2f/UbTOnHnw67Vq/VG0NmkS7BCvnKdXL3j0UTj6aJg1K8uu1zt5ySYuenEqAK/+53hOq1Ui5ESS7Nf+WbqUq5mZf/mSJEnK1hITgxmqEydCvnzw0UfQvHnYqZTRVqwI1mEdMwa+/hr27//jtZIloX37oGg944ygcFf2t3Qp1K4dLA/y6afQunXYiQ7LgLHzeOW7ZZTIn8BnPU+hcL5cYUeScrSs2q9t3779kK8tUKBAmu5huSpJkiRlVdu2QZs2MGUKFCgQFCrNmoWdSkfatm3B3/2YMfDJJ/Dnf0jmzQutWkGHDkEJX6xYeDmVsS68EN59F1q2DNbt/Zd1BbOCPUnJnPXENyzZuJN29Uvz1EXHhh1JytGyar8WExPzr+usAkSjUSKRCMnJyWm6h+WqJEmSlBVt2RKUZj/+CIULB2XK8ceHnUph27cvmMU8Zkwws3Xlyj9ei4mBE0/8Y/mAatXCy6l/Fo1CSkrwSE4+tMf8+cFs5UgkWDKiQYOwv4p08dOqrZz9zGSSU6I80eUYOjQoE3YkKcfKqv3axIkTD/naU089NU33sFyVJEmSspqNG4PZabNnBzMRv/gCGjYMO5Uym2g0KNp+X6d19uyDX69T54+i9fjjU7dOazR6cLmXmiIwK77vSN4zJSXtf+eXXgqvvJL292dCj33xC4+PX0TBPPF81vMUShXMHXYkKUeyX/tnlquSJElSVrJ2LbRoAfPmQalS8OWXULdu2KmUFfz66x/rtE6cePA6rUWLBmv2HmoRmL3/GZn5xcb+9VGhAowbB2Wy1+zOpOQUznlmMnNWb+OUGsUZfunx//MjvpLSX3bq13bt2sWKFSvYt2/fQefr16+fpvEsVyVJkqSsYtUqOP10WLQIypaFr76CGjXCTqWs6Lff/lin9dNPYceO9B0/Evlr+RcT8/el4KE+Duf9Yd47vbOnZoZxNrF4ww7aPvEt+/ancH+no7nkhIphR5JynOzQr23cuJFLL72UTz/99G9fT+uaq3GHE0qSJEnSEbJ8eVCsLlsGFSsGxWqVKmGnUlZVuDBcdFHw2LsX5swJZq2mV0HozEKlo2ol8nNH61rc99E8Hvh4PidVK0alYvnCjiUpi+nZsydbt25l6tSpNG/enA8++ID169dz//338+ijj6Z5XMtVSZIkKbNbtAjOOCPYnKhaNRg/PvgIsJQeEhKgUaOwU0j/6tJmlfhi3jq+X7qFW0fOZsRVTYmNscSXdOi++uorxowZQ6NGjYiJiaFixYq0bNmSAgUKMHDgQM4666w0jZvzPk8gSZIkZSXz58OppwbFaq1awVqZFquScpiYmAiDz2/AUQlxTP/1N16YtDTsSJKymJ07d1KiRAkAChcuzMaNGwGoV68eM2bMSPO4lquSJElSZvXTT0GxunYt1KsXFKvZbLMaSTpU5QrnpV/7OgAM+WIh89duDzmRpKykZs2aLFy4EIAGDRrw/PPPs3r1ap577jlKly6d5nEtVyVJkqTMaPp0OO002LgRjj0WJkyA/59tIUk51XnHlaNF7ZIkJUe5+d1Z7N2ftg1oJOU8N910E2vXrgWgX79+fPrpp1SoUIEnnniCBx98MM3jRqLRaDS9QmZG2WE3M0mSJOUwU6ZAmzawbRs0aQLjxkGhQmGnkqRMYeOOvbQaOoktO/dxTfOq3NG6VtiRpGwvO/Zru3btYsGCBVSoUIFixYqleRxnrkqSJEmZyaRJcOaZQbF68snwxRcWq5L0J8XzJ/Dg2fUAeH7iEn5cviXkRJKyorx583LsscceVrEKEJdOeSRJkiQdri+/hA4dYPduOOMMGDMG8uULO5UkZTqtjy7FOceWZdSM1dw6cjaf3Hgy+RKsOCT9s2g0ynvvvceECRPYsGEDKSkpB70+atSoNI3rzFVJkiQpM/jkE2jXLihW27SBsWMtViXpX/RrX5cyBXPz6+ZdDPx0fthxJGVyPXv2pGvXrixbtoyjjjqKggULHvRIK3+tI0mSJIVt9Gjo3BmSkqBjR3j3XUhICDuVJGVqBfPE88j5Dbj4pam88f0KWtYpxak1iocdS1Im9frrrzNq1Cjatm2bruM6c1WSJEkK07vvwnnnBcVq584wcqTFqiQdohOrFeM/zSoBcPt7s9m6a1+4gSRlWgULFqRKlSrpPq7lqiRJkhSW116Diy6C5GTo2hXefBPi48NOJUlZyh2ta1GlWD7Wb99L3zE/hx1HUiZ177330r9/f3bv3p2u41quSpIkSWF48UX4z38gJQUuvxyGDYM4V+2SpNTKkyuWIRc0JDYmwoez1zB29pqwI0nKhDp37sxvv/1GiRIlqFevHscee+xBj7TypzdJkiTpSHvqKbjhhuD4+uvh8cchxnkPkpRWDcsX4rrmVXniq8X0GTOXxpWLULJA7rBjScpEunfvzvTp07nkkksoWbIkkUgkXca1XJUkSZKOpMGD4bbbguNevWDQIEinH+4lKSe7/vTqfLVwA3NXb+eO93/i1f8cn27liaSs7+OPP+azzz7jpJNOStdx/fW4JEmSdKTcf/8fxeo991isSlI6yhUXw5DODckVF8PXCzfyzg8rw44kKRMpX748BQoUSPdxLVclSZKkjBaNBmVqnz7B8/vvh/vus1iVpHRWo2R+bm9VE4D7PprHis27Qk4kKbN49NFHuf3221m+fHm6jmu5KkmSJGWkaDSYrfrAA8HzwYPh7rvDzSRJ2ViPEyvTuHIRdu1L5taRs0hOiYYdSVImcMkllzBhwgSqVq1K/vz5KVKkyEGPtHLNVUmSJCmjpKTAjTfC008Hz596Cq67LtxMkpTNxcREePT8BrQeOokflv/GS98s5apTq4YdS1LIhg4dmiHjWq5KkiRJGSE5Ga6+Gl56Kfj4/wsvwOWXh51KknKE8kXy0rd9He54fw6Pfv4Lp9YsTq1S6b/WoqSsISkpiYkTJ9KnTx8qV66crmO7LIAkSZKU3vbvh0svDYrVmBgYPtxiVZKOsM6NynNGrRLsS07h5ndns29/StiRJIUkPj6e999/P0PGtlyVJEmS0lNSElx0Ebz+OsTFwdtvQ9euYaeSpBwnEokw8Nx6FM4bz/y123li/KKwI0kKUadOnRg9enS6j+uyAJIkSVJ62bsXLrgAxoyB+HgYORI6dgw7lSTlWCXy5+bBs+txzZszeObrxZxeuwTHVigcdixJIahevToDBgzgu+++47jjjiNfvnwHvX7jjTemadxINBrN1tvmrVq1ivLly7Ny5UrKlSsXdhxJkiRlV7t3wznnwLhxkJAAH3wAbdqEnUqSBNz87iw+mLmaysXy8fGNJ5E3l3PNpNTIDv3av621GolEWLp0aZrG9buJJEmSdLh27oQOHeCrryBvXvjwQzjjjLBTSZL+370d6jJlyWaWbdrJQ58uYEDHo8OOJOkIW7ZsWYaM65qrkiRJ0uHYvh1atw6K1aOOCmauWqxKUqZSME88j5xfH4DXpvzKpF82hpxIUpii0Sjp9WF+y1VJkiQprbZuhTPPhG+/hYIF4Ysv4OSTw04lSfobJ1cvTremFQG4/b2f2LYrKeREko601157jXr16pEnTx7y5MlD/fr1ef311w9rTMtVSZIkKS02b4bTT4epU6FIkWDm6gknhJ1KkvQv7mxTi8rF8rFu+x76fTg37DiSjqAhQ4ZwzTXX0LZtW0aMGMGIESNo3bo1V199NY899liax3VDK0mSJCm11q+HFi1g7lwoXhzGj4d69cJOJUk6BDNW/MZ5z04mJQrPXHwsbeuVDjuSlOllh36tcuXK9O/fn27duh10fvjw4dx7771pXpPVmauSJElSaqxeDaeeGhSrpUvDxIkWq5KUhRxboTDXNq8GwN0fzGHDjj0hJ5J0JKxdu5ZmzZr95XyzZs1Yu3Ztmse1XJUkSZIO1YoVQbG6cCGULw+TJkHt2mGnkiSl0o1nVKdO6QL8tiuJ3u/PSbeNbSRlXtWqVWPEiBF/Of/uu+9SvXr1NI8barmanJxMnz59qFy5Mnny5KFq1arcd999B76pJSUlcccdd1CvXj3y5ctHmTJl6NatG2vWrAkztiRJknKiJUvglFOCPytXDorVatXCTiVJSoNccTE8dkFDcsXGMH7BBkb8uDLsSJIyWP/+/enbty+tW7fmvvvu47777qN169b079+fAQMGpHncUMvVhx9+mGeffZannnqK+fPn8/DDDzNo0CCefPJJAHbt2sWMGTPo06cPM2bMYNSoUSxcuJAOHTqEGVuSJEk5zcKFwYzVX3+FGjWCYrVSpbBTSZIOQ81S+bn1zBoADBg7j5VbdoWcSFJGOvfcc5k6dSrFihVj9OjRjB49mmLFijFt2jTOPvvsNI8b6oZW7dq1o2TJkrz88ssHzp177rnkyZOHN95442/f88MPP9C4cWN+/fVXKlSo8D/vkR0W3JUkSVKI5s4NNq9avx7q1Ak2rypVKuxUkqR0kJwS5cIXpvDD8t9oXKkIb195ArExkbBjSZmO/do/C3XmarNmzRg/fjy//PILALNnz+bbb7+lTZs2//iebdu2EYlEKFSo0BFKKUmSpBxr5kxo3jwoVhs2hK+/tliVpGwkNibCo+c3JG+uWKYt38Ir36Ztt3BJOVdcmDe/88472b59O7Vq1SI2Npbk5GQeeOABLr744r+9fs+ePdxxxx106dKFAgUK/O01e/fuZe/evQee79ixI0OyS5IkKZv74Qc480zYuhWOPx7GjYMiRcJOJUlKZxWK5qVPuzr0HjWHRz5fyKk1i1OjZP6wY0lKJzExMUQi/z4jPRKJsH///jSNH2q5OmLECN58803eeust6taty6xZs+jZsydlypShe/fuB12blJRE586diUajPPvss/845sCBA+nfv39GR5ckSVJ29t130KYN7NgBzZrBJ59AwYJhp5IkZZALjy/P5z+vY8LCjdz87iw+uPZEcsWF+mFfSenkgw8++MfXpkyZwhNPPEFKSkqaxw/1O8Vtt93GnXfeyYUXXki9evXo2rUrN998MwMHDjzout+L1V9//ZUvvvjiH2etAvTu3Ztt27YdeMybNy+jvwxJkiRlJxMmQKtWQbHavDl89pnFqiRlc5FIhIfPrU+hvPH8vGY7T321KOxIUpY2adIk2rdvT5kyZYhEIowePfpfr1+7di0XXXQRNWrUICYmhp49e/7lmmHDhhGJRA565M6d+39m6dix418etWrVYtiwYQwePJjzzz+fhQsXpvErDblc3bVrFzExB0eIjY09qC3+vVhdtGgRX375JUWLFv3XMRMSEihQoMCBR/78TuWXJEnSIfrsM2jbFnbuDJYE+PhjOOqosFNJko6AEgVyc3+nowF4+uslzFzxW8iJpKxr586dNGjQgKeffvqQrt+7dy/FixfnnnvuoUGDBv94XYECBVi7du2Bx6+//pqqXGvWrOGKK66gXr167N+/n1mzZjF8+HAqVqyYqnH+LNRlAdq3b88DDzxAhQoVqFu3LjNnzmTIkCH06NEDCIrV8847jxkzZvDRRx+RnJzMunXrAChSpAi5cuUKM74kSZKyk7Fj4bzzYN8+aN8eRoyAQ5gNIUnKPtrVL8PnP6/nw9lruHXEbD6+8WTy5IoNO5aU5bRp0+ZfN6z/b5UqVeLxxx8H4JVXXvnH6yKRCKXSsLnotm3bePDBB3nyySdp2LAh48eP5+STT071OH8n1JmrTz75JOeddx7XXnsttWvXplevXlx11VXcd999AKxevZoPP/yQVatW0bBhQ0qXLn3gMXny5DCjS5IkKTt5/30455ygWD33XHjvPYtVScqhBnSsS8kCCSzdtJOHxy0IO46UqezYsYPt27cfePx5U/kjITExkYoVK1K+fHk6duzIzz///D/fM2jQIKpUqcJHH33E22+/zeTJk9OtWAWIRKPRaLqNlgmtWrWK8uXLs3LlSsqVKxd2HEmSJGU2b70F3bpBcjJcdBEMHw5xoX7AS5IUsom/bKT7K9MAePPyJpxYrVjIiaRw/d6v/bd+/fpx7733/ut7I5EIH3zwAZ06dTqkezVv3pyGDRsydOjQg85PmTKFRYsWUb9+fbZt28bgwYOZNGkSP//88792fjExMeTJk4cWLVoQG/vPM9FHjRp1SPn+mz81SpIkKed65RW4/HKIRuHSS+HFF+FffuiWJOUMp9YoziUnVOCN71fQa+RsxvU8hYJ54sOOJYVu3rx5lC1b9sDzhISEI3bvpk2b0rRp0wPPmzVrRu3atXn++ecPfAr+73Tr1o1IJJJhuSxXJUmSlDM99xxcc01wfPXV8PTTEBPqqlmSpEzkrra1+WbRJn7dvIv+Y39mSOeGYUeSQpc/f34KFCgQdgwA4uPjOeaYY1i8ePG/Xjds2LAMzeFPj5IkScp5hg79o1i96SZ45hmLVUnSQfLmimNI5wbERGDUjNWMm7s27EiS/iQ5OZk5c+ZQunTpUHP4E6QkSZJylocegptvDo7vvBMeewwy8KNikqSs67iKRbjq1KoA3PXBXDbuOLKb90hZVWJiIrNmzWLWrFkALFu2jFmzZrFixQoAevfuTbdu3Q56z+/XJyYmsnHjRmbNmsW8efMOvD5gwAA+//xzli5dyowZM7jkkkv49ddfufzyy4/Y1/V3XBZAkiRJOUM0Cv37Bw+Ae++Fvn0tViVJ/6pni+pMWLCBBet20HvUT7zYrVGGrt8oZQc//vgjp5122oHnt9xyCwDdu3dn2LBhrF279kDR+rtjjjnmwPH06dN56623qFixIsuXLwfgt99+44orrmDdunUULlyY4447jsmTJ1OnTp2M/4L+RSQajUZDTZDBft/NbOXKlf+6c5gkSZKysWgUeveGhx8Onj/0ENxxR7iZJElZxvy12+nw1LckJUcZdF59Ojf6667pUnZmv/bPXBZAkiRJ2Vs0GiwD8HuxOnSoxaokKVVqly7ALS1rAjBg7DxWbtkVciJJmYXlqiRJkrKvlJRg46rHHw+eP/tssIGVJEmpdOUpVTiuYmES9+7ntvdmk5KSrT8ILOkQWa5KkiQpe0pOhssug+efD9ZVfeUVuPrqsFNJkrKo2JgIQzo3IG+uWL5fuoVXJy8PO5KkTMByVZIkSdlPUhJ07QrDhkFsLLzxBlx6adipJElZXMWi+birbW0AHh63gEXrd4ScSFLYLFclSZKUvezbBxdeCG+/DfHx8O67cNFFYaeSJGUTFzepwCk1irNvfwq3jJhNUnJK2JEkhchyVZIkSdnHnj1wzjkwahTkyhX8ee65YaeSJGUjkUiEQefWp2CeeOas3sZTXy0OO5KkEFmuSpIkKXvYtQs6dICPP4bcuWHsWGjXLuxUkqRsqFTB3NzX6WgAnpqwmNkrt4YbSFJoLFclSZKU9SUmQtu28MUXkC8ffPopnHlm2KkkSdlYhwZlaFe/NMkpUW4ZMYs9SclhR5IUAstVSZIkZW3btgVF6sSJUKAAfP45NG8edipJUg5wX8ejKZE/gSUbdzJo3MKw40gKgeWqJEmSsq4tW6BFC5gyBQoVgi+/hGbNwk4lScohCufLxcPn1gfgle+WMXnJppATSTrSLFclSZKUNW3cCKefDj/+CMWKwYQJcPzxYaeSJOUwp9UqQZfGFQC4beRPbN+TFHIiSUeS5aokSZKynrVrg4/+z54NJUvC119Dw4Yhh5Ik5VT3nFWbCkXysnrrbgaMnRd2HElHkOWqJEmSspZVq+DUU2HePChbFiZNgrp1w04lScrB8iXE8WjnBkQi8N70VXz287qwI0k6QixXJUmSlHUsXw6nnAKLFkHFikGxWqNG2KkkSeL4SkW48pQqANw1ag6bEveGnEjSkWC5KkmSpKxh0aKgWF22DKpWDYrVKlXCTiVJ0gG3tKxBzZL52bxzH3eNmkM0Gg07kqQMZrkqSZKkzG/+/GApgJUroVatoFitUCHsVJIkHSQhLpYhFzQgPjbC5/PWM2rG6rAjScpglquSJEnK3H76KShW166FevVg4kQoUybsVJIk/a26ZQrSs0WwZM29H/7M6q27Q04kKSNZrkqSJCnzmj4dTjsNNm6EY4+FCROgRImwU0mS9K+uOqUKx1QoxI69+7lt5GxSUlweQMquLFclSZKUOX3/PZxxBmzZAk2awPjxULRo2KkkSfqf4mJjGNK5IXniY5m8ZDPDpywPO5KkDGK5KkmSpMxn0iRo2RK2bYOTT4YvvoBChcJOJUnSIatcLB93ta0FwEOfLmDxhsSQE0nKCJarkiRJyly+/BJat4bExGDm6qefQv78YaeSJCnVLjmhIidXL8be/SncMmIWSckpYUeSlM4sVyVJkpR5fPIJtGsHu3dDmzYwdizkyxd2KkmS0iQSiTDovPoUyB3HT6u28cyEJWFHkpTOLFclSZKUOYweDZ06wd690LEjfPAB5MkTdipJkg5L6YJ5uK/T0QA8+dUi5qzaFnIiSenJclWSJEnhe/ddOO88SEqCzp1h5EhISAg7lSRJ6aJDgzK0rVeK/SlRbh4xiz1JyWFHkpROLFclSZIUnmgUXnwRLroIkpOha1d4802Ijw87mSRJ6SYSiXB/p3oUOyqBxRsSGfzZwrAjSUonlquSJEk68qJRGDcOjj8errwSUlLg8sth2DCIiws7nSRJ6a5Ivlw8fG49AF7+bhlTlmwOOZGk9GC5KkmSpCNr4kQ45ZRgw6rp0+Goo+D+++H55yHGH08lSdnXGbVLcuHx5YlGodfI2ezYkxR2JEmHyZ9eJUmSdGRMnQotW0Lz5vDtt5A7N/TqBUuXwt13W6xKknKEe9rVoVzhPKzeupv7PpoXdhxJh8mfYCVJkpSxZs+GDh3ghBPgyy+D9VSvvRaWLIFHHoHixcNOKEnSEXNUQhyPnt+ASARG/LiKL+etDzuSpMNguSpJkqSMsWABXHABNGwIY8cGM1N79IBffoGnn4YyZcJOKElSKJpUKcrlJ1UG4M5RP7E5cW/IiSSlleWqJEmS0tfSpfCf/0DdujBiRHDuwgth/nx4+WWoVCnMdJIkZQq3nlmT6iWOYlPiPu7+YC7RaDTsSJLSwHJVkiRJ6WP1arjmGqhZE4YPh5QU6NgxWBbg7behRo2wE0qSlGnkjo/lsQsaEhcTYdzP6xg9a3XYkSSlgeWqJEmSDs+GDXDLLVC1Kjz3HOzfD61awbRpMHo01K8fdkJJkjKlo8sW5KYzqgPQb8zPbNm5L+REklLLclWSJElp89tvcPfdUKUKPPYY7N0LJ58MEyfCuHFw/PFhJ5QkKdO7pnlVapcuwPY9+xnyxcKw40hKJctVSZIkpc6OHXD//VC5Mjz4IOzcGRSpn30WFKunnBJ2QkmSsoy42Bj6ta8DwFtTVzB/7faQE0lKDctVSZIkHZrdu+HRR4OZqn36wLZtUK9e8NH/qVPhzDMhEgk7pSRJWc4JVYrStl4pUqIwYOw8N7eSshDLVUmSJP27ffvgmWeCNVV79YJNm4LNqd5+G2bNCjatslSVJOmw9G5Tm1xxMUxZupnPfl4fdhxJh8hyVZIkSX9v/3549dWgSL3uOli7FipWhFdegZ9/hgsvhBh/nJQkKT2UL5KXK0+uAsADn8xjT1JyyIkkHQp/GpYkSdLBUlLgnXegbl3o0QN+/RVKl4ann4ZffoFLL4W4uLBTSpKU7VzTvColCySwcstuXvluWdhxJB0Cy1VJkiQFolEYMwYaNoQuXYIitVgxGDwYFi+Ga6+FXLnCTilJUraVLyGOO1rXAuCprxazfvuekBNJ+l8sVyVJknK6aBQ+/xyaNIFOnWDOHChYEO67D5YuhVtvhbx5w04pSVKO0KlhWRqWL8SufckMGrcw7DiS/gfLVUmSpJzsm2/g1FOhVSv44QfIlw/uuguWLYN77oH8+cNOKElSjhITE6Ff+zoAvD9jFbNXbg03kKR/ZbkqSZKUE/3wQ1ConnJKULAmJMDNNwczVR94AAoXDjuhJEk51jEVCnPOMWUB6D/2Z6LRaMiJJP0Ty1VJkqScZM6c4KP/jRsHSwHExcHVV8OSJTBkCJQoEXZCSZIE3N66FnlzxTJjxVY+nL0m7DiS/oHlqiRJUk7wyy/BJlUNGgSbVsXEQPfusHAhPPsslC0bdkJJkvQnpQrm5trmVQEY+MkCdu3bH3IiSX/HclWSJCk7W74cevSA2rXhnXeCzas6d4aff4Zhw6BKlbATSpKkf3D5yVUoVzgP67bv4bmvl4QdR9LfsFyVJEnKjtasgeuugxo14NVXISUF2reHWbPg3XehVq2wE0qSpP8hd3wsd7etDcDzk5ay6rddISeS9N8sVyVJkrKTTZugVy+oWhWeeQaSkqBFC5gyBT78MFgWQJIkZRmtjy5Fk8pF2Ls/hYGfLgg7jqT/YrkqSZKUHWzdCn36QOXK8OijsGcPnHgiTJgAX3wBJ5wQdkJJkpQGkUiEvu3rEBOBj39ay7RlW8KOJOlPLFclSZKyssREePDBoFS9//7g+XHHwaefwjffQPPmYSeUJEmHqW6ZglxwfAUA+o/9meSUaMiJJP3OclWSJCkr2rMHHnss2JDq7ruDmat168KoUfDDD9C6NUQiYaeUJEnppNeZNcifO46f12xn5I8rw44j6f9ZrkqSJGUl+/bBc89BtWpwyy2wcWOwvuobb8Ds2XD22ZaqkiRlQ0WPSuCmM6oDMPjzhWzfkxRyIklguSpJkpQ1JCfD8OFQqxZccw2sXg3ly8OLL8L8+XDxxRAbG3ZKSZKUgbo1rUSVYvnYlLiPp75aHHYcSViuSpIkZW4pKTBiBBx9NPznP7BsGZQqBU8+CYsWweWXQ3x82CklSdIRkCsuhj7t6gDw6nfLWLZpZ8iJJFmuSpIkZUbRKIwdC8ceCxdcAAsWQJEiMGgQLFkC118PCQlhp5QkSUfYabVKcGqN4iQlR3ng43lhx5FyPMtVSZKkzCQahS+/hKZNoUOHYB3VAgWgf/9g1uptt0HevGGnlCRJIerTrjZxMRG+nL+BSb9sDDuOlKNZrkqSJGUW330Hp58OLVvC1KlBiXrnnbB0KfTtG5SskiQpx6tWIj/dmlYC4L6P5pGUnBJuICkHs1yVJEkK2/Tp0LYtnHQSfP015MoFN90UfPx/4EAoWjTshJIkKZO56YzqFM4bz6INibz5/a9hx5FyLMtVSZKksPz8M5x7LjRqBJ9+CnFxcOWVsHgxDB0abFwlSZL0NwrmjefWM2sC8NiXi/ht576QE0k5k+WqJEnSkbZ4MVxyCdSrB6NGQSQSPF+wAJ5/HsqXDzuhJEnKAro0rkCtUvnZtjuJx778Jew4Uo5kuSpJknSkrFgBV1wBtWrBm28Gm1eddx7MnQuvvw5Vq4adUJIkZSGxMRH6tq8DwBvf/8rCdTtCTiTlPJarkiRJGW3dOrjxRqheHV56CZKT4ayzgrVWR46EOnXCTihJkrKoZlWL0bpuKVKiMOCjn4lGo2FHknIUy1VJkqSMsnkz3HEHVKkCTz4J+/bBaafBd9/BRx/BsceGnVCSJGUDd7WtTa7YGL5bvJkv5q0PO46Uo1iuSpIkpbdt2+Dee6FyZRg0CHbvhhNOgPHj4auvoFmzsBNKkqRspELRvFx+cmUAHvhkPnv3J4ecSMo5LFclSZLSy86d8NBDQanavz/s2AENGwazVCdPhtNPDzuhJEnKpq49rRol8ifw6+ZdvPrd8rDjSDmG5aokSdLh2rMHnngi2JCqd2/47TeoXTtYT3X69GB91Ugk7JSSJCkbOyohjttb1wLgyfGL2LBjT8iJpJzBclWSJCmtkpLgxReDjapuugnWrw/WV33tNZgzB847D2L8cUuSJB0Z5xxTlgblCrJzXzKDP1sYdhwpR/CnfUmSpNRKToY33ghmp155JaxaBeXKwfPPw4IF0LUrxMaGnVKSJOUwMTER+ravC8DI6av4adXWcAMpx5o0aRLt27enTJkyRCIRRo8e/a/Xr127losuuogaNWoQExNDz549//a6kSNHUqtWLXLnzk29evX45JNP0j98KlmuSpIkHaqUFHj/fahfPyhQlyyBEiVg6FBYtCgoWuPjw04pSZJysOMqFqZTwzJEozBg7Dyi0WjYkZQD7dy5kwYNGvD0008f0vV79+6lePHi3HPPPTRo0OBvr5k8eTJdunThsssuY+bMmXTq1IlOnToxd+7c9IyeapFoNv+vbNWqVZQvX56VK1dSrly5sONIkqSsKBqFTz+Fe+6BmTODc4ULw+23ww03QL584eaTJEn6k7XbdnP64InsTkrmiS7H0KFBmbAjKYs7nH4tEonwwQcf0KlTp0O6vnnz5jRs2JChQ4cedP6CCy5g586dfPTRRwfOnXDCCTRs2JDnnnsuVZnSkzNXJUmS/s2ECXDSScGmVDNnwlFHQd++sGwZ3HmnxaokScp0ShfMwzXNqwIw8JP57N6XHHIiZRc7duxg+/btBx579+49YveeMmUKLVq0OOhcq1atmDJlyhHL8HcsVyVJkv7OlClwxhlw+ukweTLkyQO33RaUqv37Q8GCYSeUJEn6R1eeUoWyhfKwdtsenp+0JOw4yibq1KlDwYIFDzwGDhx4xO69bt06SpYsedC5kiVLsm7duiOW4e/EhXp3SZKkzGbmTOjTBz7+OHgeHw9XXQV33QWlS4ebTZIk6RDljo/lrra1ue6tGTw3cQnnNypP2UJ5wo6lLG7evHmULVv2wPOEhIQQ02QOzlyVJEkCmD8fzj8fjj02KFZjY+Gyy4KNqp580mJVkiRlOW3rlaJxpSLsSUrhoU8XhB1H2UD+/PkpUKDAgceRLFdLlSrF+vXrDzq3fv16SpUqdcQy/B3LVUmSlLMtWQLdusHRR8N770EkAhddFJStL70EFSuGnVCSJClNIpEIfdvXIRKBsbPX8MPyLWFHktKsadOmjB8//qBzX3zxBU2bNg0pUcByVZIk5UwrVwYf969VC15/HVJS4Oyz4aef4M03oXr1sBNKkiQdtqPLFuSCRuUB6D/2Z1JSoiEnUk6QmJjIrFmzmDVrFgDLli1j1qxZrFixAoDevXvTrVu3g97z+/WJiYls3LiRWbNmMW/evAOv33TTTYwbN45HH32UBQsWcO+99/Ljjz9y/fXXH7Gv6+9EotFotv6vatWqVZQvX56VK1dSrly5sONIkqSwrV8PAwfCc8/B77ubtm4N990HjRqFm02SJCkDbNyxl9MHf82OvfsZdG59Oh9fPuxIymJS2699/fXXnHbaaX853717d4YNG8Z//vMfli9fztdff33gtUgk8pfrK1asyPLlyw88HzlyJPfccw/Lly+nevXqDBo0iLZt26bpa0ovlquSJCln2LIFHnkEnngCdu0Kzp16Ktx/P5x0UrjZJEmSMtiLk5bywCfzKXZUAhN6nUr+3PFhR1IWYr/2z1wWQJIkZW/bt8OAAVC5Mjz0UFCsNm4MX3wBEyZYrEqSpByhe7NKVC6Wj02Je3lqwuKw40jZhuWqJEnKnnbtCmaqVqkC/foFJWv9+vDhh/D999CiRbB5lSRJUg6QKy6Ge86qDcCr3y5n+aadISeSsgfLVUmSlL3s3QtPPQVVq8Ltt8PmzVCzJrz7LsycCe3bW6pKkqQc6fRaJTi5ejH2JafwwCfzw44jZQuWq5IkKXvYvx9efhlq1IAbboB166BSJRg2DObOhc6dIcYffSRJUs4ViUTo264OsTERvpi3nm8XbQo7kpTl+S8MSZKUuUWjkJwM+/YFH/XfsQN++w02bQoK1NWr4a23oHZtuPxyWLECypSBZ5+FhQuhe3eIiwv7q5AkScoUqpfMT9cTKgIw4KOf2Z+cEnIiKWvzXxqSJIUlJSWYbZmc/Meffz4+lNdSe/2RGis975OcfOj/Ny1eHHr3hquvhjx5Mu7vTpIkKQvr2aI6o2et5pf1ibw1bQXdmlYKO5KUZVmuSpJyrmgUJk2C996DxMQjXyDq8EUiEBsLRYsGSwHcdBMcdVTYqSRJkjK1QnlzcWvLGvQZ8zNDvviFDg3KUChvrrBjSVmS5aokKedJTIQ33oCnnw7W4syMYmODj7LHxh58/N9/HunXMlOm2FjXUJUkSUqjLo0r8Mb3K1i4fgdDv1zEvR3qhh1JypIsVyVJOceCBfDMMzB8OGzfHpzLmxe6dIHq1TNPeRgT4272kiRJylBxsTH0bV+Hi1+ayuvf/8pFTSpQo2T+sGNJWY7lqiQpe9u/Hz76KJil+uWXf5yvXh2uuy7Y7KhQodDiSZIkSWE5sVoxzqxTks/nree+j+bxWo/GRPwlv5QqlquSpOxpwwZ46SV47jlYuTI4FxMD7doFpWqLFn6kXJIkSTne3WfV5uuFG/lm0SbGz99Aizolw44kZSn+q1KSlH1Eo/D999C1K5QvD3ffHRSrRYvCHXfAkiUwZgyceabFqiRJkgRULJqPHidVBuD+j+exb39KyImkrMV/WUqSsr7du+HVV+H446Fp02Czqn37oHHjYH3VVavgoYegUqWwk0qSJEmZzvWnV6N4/gSWb97FsMnLwo4jZSmWq5KkrGvpUrjtNihXDnr0gOnTISEhWEd12jSYOhW6dYPcucNOKkmSJGVaRyXEcVurmgA8MX4xG3fsDTmRlHVYrkqSspaUFPj002Dt1GrVYPBg2LIFKlaEhx8OZqkOGxbMYpUkSZJ0SM47thz1yhYkce9+Hv18YdhxpCwj1HI1OTmZPn36ULlyZfLkyUPVqlW57777iEajB66JRqP07duX0qVLkydPHlq0aMGiRYtCTC1JCsVvv8GQIVCjBrRtCx9/HKyx2qoVfPhhsJ7q7bdDsWJhJ5UkSZKynJiYCP3a1wHg3R9XMnf1tpATSVlDqOXqww8/zLPPPstTTz3F/Pnzefjhhxk0aBBPPvnkgWsGDRrEE088wXPPPcfUqVPJly8frVq1Ys+ePSEmlyQdMTNnwuWXQ9mycOutQYlasCD07AkLF8K4cdC+PcTGhp1UkiRJytIaVSpChwZliEZhwNh5B01+k/T34sK8+eTJk+nYsSNnnXUWAJUqVeLtt99m2rRpQDBrdejQodxzzz107NgRgNdee42SJUsyevRoLrzwwtCyS5Iy0L598N578PTTMHnyH+fr14frr4eLLoJ8+cLLJ0mSJGVTd7apxefz1jFt+RY+nrOWdvXLhB1JytRCnbnarFkzxo8fzy+//ALA7Nmz+fbbb2nTpg0Ay5YtY926dbRo0eLAewoWLEiTJk2YMmVKKJklSRlo5Uq45x4oXx4uvjgoVuPi4MIL4ZtvYNYsuOIKi1VJkiQpg5QplIerT60KwMBPFrAnKTnkRFLmFurM1TvvvJPt27dTq1YtYmNjSU5O5oEHHuDiiy8GYN26dQCULFnyoPeVLFnywGv/be/evezd+8eudjt27Mig9JKkdBGNwoQJwSzVMWMg+f9/eCtTBq66KihTS5cON6MkSZKUg1x1SlVG/LCS1Vt388Kkpdx4RvWwI0mZVqgzV0eMGMGbb77JW2+9xYwZMxg+fDiDBw9m+PDhaR5z4MCBFCxY8MCjTp066ZhYkpRutm8PCtW6deGMM2DUqKBYbd4cRo6E5cuhb1+LVUmSJOkIy5Mrlt5tawPwzNeLWbN1d8iJpMwr1HL1tttu48477+TCCy+kXr16dO3alZtvvpmBAwcCUKpUKQDWr19/0PvWr19/4LX/1rt3b7Zt23bgMW/evIz9IiRJqTNvHlx3XbBB1fXXw/z5wcf8r7kG5swJZrGedx7Ex4edVJIkScqx2tUvzfGVCrMnKYWHxy0IO46UaYVaru7atYuYmIMjxMbGkpKSAkDlypUpVaoU48ePP/D69u3bmTp1Kk2bNv3bMRMSEihQoMCBR/78+TPuC5AkHZr9++H99+H004OZqs88A4mJUKsWPPkkrFkTnDv66LCTSpIkSQIikQh929UlEoExs9Yw/dctYUeSMqVQ11xt3749DzzwABUqVKBu3brMnDmTIUOG0KNHDyD4D7lnz57cf//9VK9encqVK9OnTx/KlClDp06dwowuSToU69fDiy/Cc8/B6tXBuZgY6NgxmL16+ukQiYSbUZIkSdLfqleuIOcfV44RP66i/9h5jL72RGJi/Pld+rNQy9Unn3ySPn36cO2117JhwwbKlCnDVVddRd++fQ9cc/vtt7Nz506uvPJKtm7dykknncS4cePInTt3iMklSf8oGoUpU+Cpp+C99yApKThfvHiwOdVVV0GFCuFmlCRJknRIerWqySdz1vHTqm2Mmrma844rF3YkKVOJRKPRaNghMtKqVasoX748K1eupFw5vwFIUobZtQveeivYpGrWrD/ON20azFI97zxISAgtniRJkqS0eX7iEgZ+uoDi+ROY0Ks5RyWEOldPIbBf+2ehrrkqScoGFi+GW28NNqi64oqgWM2dG3r0gOnTYfJkuPhii1VJkiQpi/rPiZWoWDQvG3fs5ekJi8OOI2UqlquSpNRLToaPP4Y2baB6dRgyBLZuhSpV4JFHgvVVX34Zjj027KSSJEmSDlNCXCz3nFUHgJe/WcaKzbtCTiRlHparkqRDt3lzUJ5Wrw7t2sG4ccGGVG3bBmXrokXQqxcUKRJ2UkmSJEnpqEXtEpxUrRj7klN44JN5YceRMg3LVUnS/zZ9evAx/3Ll4PbbYdkyKFw4WA5g0aKgWG3bFmL8fyuSJElSdhSJROjTrg6xMRE++3k9kxdvCjuSlCn4r2BJ0j/bvx+6dIFGjeDVV2HPHjjmmOAj/6tWweDBULVq2CklSZIkHQE1S+XnkiYVABjw0Tz2J6eEnEgKn+WqJOnvpaQEs1XfeQfi4uCii4LNqX6fxZo3b9gJJUmSJB1hPVvUoGCeeBas28HbP6wMO44UOstVSdJfRaNw003w+usQGwujRsGbb0LTpsEaq5IkSZJypML5cnFLyxoADPl8Idt2JYWcSAqX5aok6a/69oWnngqK1Ndeg/btw04kSZIkKZO4uEkFapQ8it92JTF0/C9hx5FCZbkqSTrY4MFw//3B8dNPB8sBSJIkSdL/i4uNoU+7OgC8PuVXFm/YEXIiKTyWq5KkP7z0Etx2W3A8cCBcc024eSRJkiRlSidXL06L2iXZnxJlwEfziUajYUeSQmG5KkkKjBgBV14ZHN9xB9x5Z7h5JEmSJGVqd59Vm/jYCJN+2ciEhRvCjiOFwnJVkgSffgqXXBJsZHXVVcGsVUmSJEn6F5WL5aPHiZUBuP+j+ezbnxJyIunIs1yVpJzum2/g3HMhKQm6dAnWWY1Ewk4lSZIkKQu4/vRqFDsqF0s37eS1KcvDjiMdcZarkpSTzZgB7drB7t1w1lkwfDjExoadSpIkSVIWkT93PLe1qgnA4+MXsSlxb8iJpCPLclWScqoFC6BVK9i+HU49FUaOhPj4sFNJkiRJymLOO648R5ctwI49+3n081/CjiMdUZarkpQTLV8OLVrApk1w3HHw4YeQJ0/YqSRJkiRlQbExEfq2qwvAOz+s4Oc120JOJB05lquSlNOsWwctW8Lq1VC7NowbBwUKhJ1KkiRJUhbWuHIR2tUvTTQKA8bOIxqNhh1JOiIsVyUpJ/ntNzjzTFi8GCpVgi++gGLFwk4lSZIkKRvo3bY2CXExTF22hU/nrgs7jnREWK5KUk6RmAht28KcOVCqFHz5JZQtG3YqSZIkSdlE2UJ5uOrUqgA88PF89iQlh5xIyniWq5KUE+zdC506wfffQ+HCwYzVqlXDTiVJkiQpm7n61CqULpib1Vt389I3S8OOI2U4y1VJyu7274cuXWD8eDjqqGCN1aOPDjuVJEmSpGwob6447mxTC4CnJyxh3bY9ISeSMpblqiRlZykpcPnl8MEHkJAAH34IjRuHnUqSJElSNtahQRmOq1iY3UnJDBq3IOw4UoayXJWk7CoahZ49YfhwiI2FESPgtNPCTiVJkiQpm4tEIvRrXweAUTNXM2PFbyEnkjKO5aokZVf33gtPPhkcDxsGHTqEmUaSJElSDlK/XCHOO64cAP3HziMlJRpyIiljWK5KUnY0ZAgMGBAcP/UUXHJJuHkkSZIk5Ti3t6pJvlyxzF65ldGzVocdR8oQlquSlN28/DLcemtw/MADcN114eaRJEmSlCOVKJCb60+vDsBDny5g5979ISeS0p/lqiRlJyNHwpVXBse33Qa9e4ebR5IkSVKO1uOkSlQokpcNO/by7NdLwo4jpTvLVUnKLsaNg4svhpQUuOIKePhhiETCTiVJkiQpB0uIi+Xus2oD8MI3S1m5ZVfIiaT0ZbkqSdnBt9/COedAUhJccAE8+6zFqiRJkqRM4cw6JWlWtSj79qfw4Cfzw44jpSvLVUnK6mbMgLPOgt27oW1beO01iI0NO5UkSZIkARCJROjbvg4xEfh07jqmLNkcdiQp3ViuSlJWtnAhtG4N27fDKacEa67myhV2KkmSJEk6SK1SBbi4SUUABnw0j+SUaMiJpPRhuSpJWdWKFdCyJWzcCMcdB2PHQt68YaeSJEmSpL91c8saFMgdx/y123n3h5Vhx5HSheWqJGVF69dDixawciXUrh1sZlWgQNipJEmSJOkfFcmXi5tb1gBg8OcL2bY7KeRE0uGzXJWkrOa336BVK1i0CCpWhM8/h2LFwk4lSZIkSf/TJSdUpFqJo9iycx9PjF8UdhzpsFmuSlJWsnNnsHnV7NlQsiR8+SWUKxd2KkmSJEk6JPGxMfRpVweA4ZOXs2RjYsiJpMNjuSpJWcXevXD22TBlChQuDF98AdWqhZ1KkiRJklLl1BrFOaNWCfanRLn/o3lhx5EOi+WqJGUF+/fDRRcFhWq+fPDJJ1CvXtipJEmSJClN7j6rNvGxESYs3MiEhRvCjiOlmeWqJGV2KSlwxRUwahTkygVjxsAJJ4SdSpIkSZLSrErxo/hPs0oA3PfRPJKSU8INJKWR5aokZWbRKNx8MwwbBrGx8O67cMYZYaeSJEmSpMN2wxnVKZovF0s37uS1Kb+GHUdKE8tVScrM+veHJ54Ijl99FTp1CjWOJEmSJKWXArnj6dWqJgBDv/yFzYl7Q04kpZ7lqiRlVkOHBuUqwJNPQteuocaRJEmSpPTWuVF56pQuwI49+xnyxS9hx5FSzXJVkjKjV18NlgMAuP9+uP76cPNIkiRJUgaIjYnQr30dAN6etoL5a7eHnEhKHctVScps3n8fLr88OO7VC+66K9w8kiRJkpSBmlQpyln1SpMShQFj5xGNRsOOpMM0adIk2rdvT5kyZYhEIowePfp/vufrr7/m2GOPJSEhgWrVqjFs2LCDXr/33nuJRCIHPWrVqpUxX0AqWK5KUmby2WfQpQukpAQF66BBEImEnUqSJEmSMtSdbWqREBfDlKWb+ezndWHH0WHauXMnDRo04Omnnz6k65ctW8ZZZ53FaaedxqxZs+jZsyeXX345n3322UHX1a1bl7Vr1x54fPvttxkRP1Xiwg4gSfp/330HZ58NSUlw/vnw3HMWq5IkSZJyhPJF8nLlKVV48qvFPPDJfJrXLEHu+NiwYymN2rRpQ5s2bQ75+ueee47KlSvz6KOPAlC7dm2+/fZbHnvsMVq1anXguri4OEqVKpXueQ+HM1clKTOYNQvOOgt274bWreGNNyDWHyQkSZIk5RzXNK9KqQK5WbllNy9/uyzsOPobO3bsYPv27Qcee/fuTZdxp0yZQosWLQ4616pVK6ZMmXLQuUWLFlGmTBmqVKnCxRdfzIoVK9Ll/ofDclWSwvbLL3DmmbBtG5x0UrDmaq5cYaeSJEmSpCMqb6447mhTE4CnJyxm/fY9ISfSf6tTpw4FCxY88Bg4cGC6jLtu3TpKlix50LmSJUuyfft2du/eDUCTJk0YNmwY48aN49lnn2XZsmWcfPLJ7NixI10ypJXLAkhSmFasgBYtYONGOOYY+OgjyJs37FSSJEmSFIqODcry2pRfmbliK4PGLeTRzg3CjqQ/mTdvHmXLlj3wPCEh4Yjd+8/LDNSvX58mTZpQsWJFRowYwWWXXXbEcvw3Z65KUljWr4eWLWHlSqhVK9jMqmDBsFNJkiRJUmhiYiL0a18XgPdnrGLWyq3hBtJB8ufPT4ECBQ480qtcLVWqFOvXrz/o3Pr16ylQoAB58uT52/cUKlSIGjVqsHjx4nTJkFaWq5IUhq1boVWrYEmAihXhiy+gePGwU0mSJElS6BqWL8Q5xwazI/uP/ZloNBpyImW0pk2bMn78+IPOffHFFzRt2vQf35OYmMiSJUsoXbp0Rsf7V5arknSk7dwZbF41ezaULBkUq+XKhZ1KkiRJkjKNO1rXIm+uWGau2MqYWWvCjqNUSkxMZNasWcyaNQuAZcuWMWvWrAMbUPXu3Ztu3boduP7qq69m6dKl3H777SxYsIBnnnmGESNGcPPNNx+4plevXkycOJHly5czefJkzj77bGJjY+nSpcsR/dr+m+WqJB1Je/fCOefA5MlQqBB8/jlUrx52KkmSJEnKVEoWyM11p1UD4KFPF7Br3/6QEyk1fvzxR4455hiOOeYYAG655RaOOeYY+vbtC8DatWsPFK0AlStX5uOPP+aLL76gQYMGPProo7z00ku0atXqwDWrVq2iS5cu1KxZk86dO1O0aFG+//57iof8KdBINJvPrV61ahXly5dn5cqVlHNmmKQw7d8PF14I778fbFr15ZfwLx9xkCRJkqScbE9SMi0fm8jKLbu58fRq3HJmzbAj5Vj2a//MmauSdCSkpMCVVwbFaq5cMHq0xaokSZIk/Yvc8bHc3bY2AM9PWsqq33aFnEj6K8tVScpo0Sjceiu8+irExMA770DLlmGnkiRJkqRMr1XdUpxQpQh796cw8NMFYceR/sJyVZIy2n33wdChwfErr8DZZ4caR5IkSZKyikgkQt92dYmJwMc/rWXq0s1hR5IOYrkqSRnp8cehX78/jrt3DzePJEmSJGUxdcoUoEvjCgD0HzuP5JRsvX2QshjLVUnKKMOGQc+ewfGAAXDjjWGmkSRJkqQs65aWNcifO455a7cz8seVYceRDrBclaSMMGoUXHZZcHzLLXDPPeHmkSRJkqQsrOhRCfRsUQOARz5byPY9SSEnkgKWq5KU3j7/HC68EFJSgoJ18GCIRMJOJUmSJElZWremFalSPB+bd+7jyfGLwo4jAZarkpS+Jk8ONqxKSoLzz4fnn7dYlSRJkqR0EB8bQ592dQAYNnk5SzcmhpxIslyVpPQzeza0bQu7dkHr1vDGGxAbG3YqSZIkSco2TqtZgtNqFicpOcoDH88PO45kuSpJ6WLRIjjzTNi2DU48Ed5/H3LlCjuVJEmSJGU797SrQ1xMhPELNjDxl41hx1EOZ7kqSYdr5Upo0QI2bICGDeGjjyBv3rBTSZIkSVK2VLX4UXRvVgmA+z6aR1JySriBlKNZrkpSWkWj8OmncPrpsGIF1KgBn30GhQqFnUySJEmSsrUbz6hOkXy5WLwhkTe+/zXsOMrBLFclKbWiUfjyy+Dj/23bwuLFUKECfPEFlCgRdjpJkiRJyvYK5onn1jNrAPDYF7+wZee+kBMpp7JclaTUmDQJmjeHli1hyhTInRtuvRWmTw8KVkmSJEnSEXHh8RWoVSo/2/fs57Evfgk7jnIoy1VJOhRTpgTrqp56alCw5soFN94IS5fC4MFQrFjYCSVJkiQpR4mNidCvfV0A3pz6KwvWbQ85kXIiy1X9X3v3Hhdlnf///zmcxhOCZ0QBUTwfyCNZW1qyeVrNdLOsTU0zjx10TbO1UNtNV2u1T1pa6/FrrdlJNy0LVMyUNE1URFHRNAvRMjyhIsz798f8mm0SK0bgYuBxv92um8x73nNdrwtfvRmfXVwD4Nd8+aXUvbt0yy3S+vWSv780cqSUni69/LJUu7bVFQIAAABAmdWxQTV1bxEih5GmfZgqY4zVJaGMIVwFgPwkJ0t33y116CCtWyf5+kpDh0oHD0qvvirVrWt1hQAAAAAASc/0aKoAPx9tTf9Bn6ZmWl0OyhjCVQD4uX37pD//WWrdWvrvfyUfH2ngQCktTfr3v6V69ayuEAAAAADwM2FVK2jYbZGSpH+s3a8ruXkWV4SyhHAVACRneDpggNSypfTee5LN5ny8b5+0dKnUoIHVFQIAAAAArmNU5yjVDLTr+JlsLfr8a6vLQRlCuAqgbEtPlwYNkpo1k1askIyR+vWT9uyR3npLatLE6goBAAAAAL+hot1PE7s5//02d8MhnTp/2eKKUFYQrgIom44dkx55RGrcWFq2THI4pN69pV27pHfflVq0sLpCAAAAAEAB3NO6jqLDgnUxJ0+z1qVZXQ7KCMJVAGXLiRPSyJFSw4bSwoVSXp7Uvbu0fbu0erV0001WVwgAAAAA8ICPj01xvZpJkt7ZeUJ7TmRZWxDKBMJVAGVDRob0+OPOe6fOny9dvSrFxkpbtkgffSS1b291hQAAAACAG9QmvIruaV1HkjT1w1QZYyyuCKUd4SqA0u30aWn8eGeo+sorUk6OdPvtUmKiFB8v3XKL1RUCAAAAAArRxG5NVN7fVzuP/aj/7v7O6nJQyhGuAiidfvhBmjRJioyUXnpJunRJ6thRSkhwBqudOlldIQAAAACgCIQEldOozg0kSTM+PqBLOXkWV4TSjHAVQOmSlSXFxTlD1RkzpIsXpXbtnL/6v2WL1KWLZLNZXSUAAAAAoAgNu72+6gSXV8bZy5q/Kd3qclCKEa4CKB3OnZP+/ndnqDptmnT+vBQd7fyQqu3bnR9aRagKAAAAAGVCOX9f/a1nU0nS/E3p+jbrksUVobQiXAXg3S5elP75T2eo+uyzzitXmzeX3n1X+uorqXdvQlUAAAAAKIO6twhRh8iqupLr0IyPD1hdDkopwlUA3unSJWn2bKl+fenpp6UzZ6RGjaS33pJ275b69ZN8WOIAAAAAoKyy2WyK69VMNpv04e7vtP3oGatLQilE8gDAu1y5Is2dKzVoII0bJ5065QxYly6V9u2TBgyQfH2trhIAAAAAUAI0Dw3S/e3DJUnT1uyTw2EsrgilDeEqAO+QkyMtWCBFRUmPPSZlZEgREdK//y0dOCANHCj5+VldJQAAAACghPnrXY0UaPdTyrfn9O7OE1aXg1KGcBVAyZabKy1aJDVuLI0YIZ04IdWpI736qnTwoDR0qOTvb3WVAAAAAIASqnolu56IbShJmvnJAZ2/fNXiilCaEK4CKJny8qTly6WmTZ0B6tdfS7VqSS+/LB0+LI0cKQUEWF0lAAAAAMALDOxYT/WrV9T3F3I0d+Nhq8tBKUK4CqBkcTikt9+WWrSQHnrIGaRWry69+KJ05Ij0+ONSuXJWVwkAAAAA8CIBfj6a/KemkqRFnx/V0e8vWlwRSgvCVQAlgzHS++9L0dHS/fc776NapYo0fbp09Kj0179KFSpYXSUAAAAAwEvd0bimOjWqoat5Rv9Yu9/qclBKEK4CsJYx0po1Utu2Ur9+UkqKFBQkTZ3qvBXA009LlSpZXSUAAAAAwMvZbDY9+6em8vWxKWF/pjYfOm11SSgFCFcBWMMY6ZNPpJtvlnr1knbtcoaokyc7r1R97jmpcmWrqwQAAAAAlCJRNQM1sGOEJOn5NanKzXNYXBG8HeEqgOK3YYN0221St27S9u3OX/efONEZqj7/vPN2AAAAAAAAFIEnuzRSlQr+Oph5QW9tP251OfByhKsAis/nn0t33CF16SJt2eL8YKqxY50fVDVjhvODqwAAAAAAKEJBFfw17q7GkqSXPj2oHy/mWFwRvBnhKoCit22bdNddzqtVExOlgABpzBgpPV3617+kWrWsrhAAAAAAUIYMaB+mJiGBOnvpquYkHLS6HHgxwlUARWfnTulPf3LeVzU+XvLzk4YPlw4dkl55RQoNtbpCAAAAAEAZ5Ofro+f+1EyStHzbcR3MPG9xRfBWhKsACt+ePdI990jt2klr10q+vtLDD0sHD0rz50vh4VZXCAAAAAAo426Jqq6uzWspz2H0/JpUGWOsLgleiHAVQOFJTZX695eio6VVqySbTfrLX6T9+6VFi6TISKsrBAAAAADA5W89minA10ebD32v9ftPWV0OvBDhKoAbd/CgM0Rt0UJ65x3n2H33Sfv2Sf/v/0kNG1pbHwAAAAAA+QivVkFDb3NeCPT3tam6kptncUXwNoSrADx35Ijz1/2bNpXefFMyxnk7gD17pBUrnOMAAAAAAJRgo++IUo1Au77+IVtLtnxtdTnwMoSrAAru+HHp0Uelxo2lJUskh8P5wVU7d0rvvy+1bGl1hQAAAAAA/C6V7H6a0LWxJOmVDYd1+vwViyuCNyFcBfD7ffutNHq0FBUlvfGGlJsrde0qffGF9OGHUps2VlcIAAAAAECB9WtTV63qBunClVy9+Ema1eXAi1gartarV082m+2abfTo0ZKkkydP6qGHHlJISIgqVqyoNm3a6L333rOyZKBsOnlSGjtWatBAevVV6epV6c47pc2bpXXrpJgYqysEAAAAAMBjPj42xfVqLklaufMbpXx71uKK4C0sDVe//PJLZWRkuLb4+HhJ0r333itJGjhwoNLS0vTf//5Xe/fuVd++fdW/f3/t2rXLyrKBsuP776UJE6T69aU5c6QrV6Q//EHauFFav975NQAAAAAApUDbiCq6+6ZQGSNN/XCfjDFWlwQvYGm4WqNGDYWEhLi2NWvWqEGDBurUqZMkaevWrXrsscfUoUMH1a9fX5MnT1ZwcLB27txpZdlA6XfmjPS3v0mRkdKsWdKlS86rUz/9VPrsM6lzZ6srBAAAAACg0D3dvYnK+/vqy69/1Jo9GVaXAy9QYu65mpOTo+XLl2vIkCGy2WySpFtuuUVvv/22zpw5I4fDoRUrVujy5cvq/CvBzpUrV3Tu3DnXdv78+WI6A6AUOHtWmjrVGaq+8IJ04YLzPqpr1khJSdIf/yj9//99AgAAAABQ2tQOKq8RnRpIkmZ8fECXcvIsrgglXYkJV1etWqWsrCwNHjzYNbZy5UpdvXpV1apVk91u1/Dhw/XBBx8oKirquvuZPn26goKCXFuzZs2KoXrAy1244AxTIyOlKVOkc+ekli2lDz6QduyQevYkVAUAAAAAlAmP3l5fdYLL69usS3r9syNWl4MSrsSEqwsXLlT37t0VGhrqGnv22WeVlZWlhIQE7dixQ+PGjVP//v21d+/e6+5n0qRJOnv2rGtLTU0tjvIB75Sd7fy1/8hI520AfvxRatpUWrlSSk6W+vQhVAUAAAAAlCnlA3w1qUcTSdJrmw7ru6xLFleEkszP6gIk6dixY0pISND777/vGktPT9fcuXOVkpKi5s2dn9YWHR2tzZs3a968eZo/f36++7Lb7bLb7a7H586dK9riAW90+bK0YIE0fbqUmekca9hQiouT7r9f8vW1tj4AAAAAACzUs2VtLat3TNu/PqN/rjugl+9vbXVJKKFKxJWrixcvVs2aNdWzZ0/XWHZ2tiTJx8e9RF9fXzkcjmKtDyg1rlyRXn1VatBAevJJZ7AaGSktXiylpkoPPkiwCgAAAAAo82w2m57r1Uw2m7Q6+TvtPHbG6pJQQlkerjocDi1evFiDBg2Sn9//LqRt0qSJoqKiNHz4cG3fvl3p6el66aWXFB8frz59+lhXMOCNrl6V3nhDatRIGj1a+u47KSxMev11KS1NGjxY8isRF7IDAAAAAFAitKgTpPvahUmSpn6YKofDWFwRSiLLw9WEhAQdP35cQ4YMcRv39/fXRx99pBo1aqhXr15q1aqVli1bpqVLl6pHjx4WVQt4mdxcaelSqUkT6dFHpePHpdq1pblzpUOHpGHDJH9/q6sEAAAAAKBE+utdjVXJ7qc9J87qva9OWF0OSiCbMaZUx+4nTpxQWFiYvvnmG9WtW9fqcoDikZcnvf22NHWqdPCgc6xmTWnSJGn4cKl8eWvrAwAAAADAS7z+Wbpe+OiAagTatXF8Z1Wyl73f/CRfuz7Lr1wFUIgcDumdd6RWrZz3Tz14UKpWTZo5UzpyxHmfVYJVAAAAAAB+t8G3RKpetQo6ff6K5m08bHU5KGEIV4HSwBhp9WqpTRupf3/nh1MFB0t//7t09Kj01FNSxYpWVwkAAAAAgNcJ8PPR5J7NJEkLNx/VsR8uWlwRShLCVcCbjhoG9QAAHChJREFUGSN99JHUvr3Up4+0e7dUubIUFyd9/bX0t79JgYFWVwkAAAAAgFfr0rSmbmtYXTl5Dr3w0X6ry0EJQrgKeCNjpPh46ZZbpJ49pZ07nVemPvOM80rVKVOkoCCrqwQAAAAAoFSw2Wx67k/N5Otj0yf7MrXl8PdWl4QSgnAV8DabNkmdOkl33SV98YXzHqpPPeUMVf/xD6lqVasrBAAAAACg1GlYK1AP3RwhSZr2Yapy8xwWV4SSgHAV8BZbt0pdukidO0ubN0t2u/TEE84Pqpo5U6pRw+oKAQAAAAAo1Z6MbajgCv5Kyzyv/3z5jdXloAQgXAVKuu3bpe7dpVtvlTZskPz9pVGjpPR0ac4cKSTE6goBAAAAACgTgisEaNwfG0mS/vVpms5mX7W4IliNcBUoqXbtknr3lmJipHXrJD8/adgw6dAhad48qU4dqysEAAAAAKDMeaBDuBrVqqQfs69qzvqDVpcDixGuAiVNSorUr5/Upo304YeSj480eLCUlia9/roUEWF1hQAAAAAAlFl+vj567k/NJUnLko7pUOZ5iyuClQhXgZLiwAHp/vulVq2k99+XbDbpgQek/fulxYul+vWtrhAAAAAAAEj6Q8Pq+mOzWspzGD2/dr+MMVaXVKJ89tln6tWrl0JDQ2Wz2bRq1arffE1iYqLatGkju92uqKgoLVmy5Jo58+bNU7169VSuXDnFxMRo+/bthV98AflZXQDgFRwOKTdXysv73/Zrjwvy3NWr0nvvSW++6TyOJN17rxQXJzVvbu15AwAAAACAfP2tR1NtSjutzw6e1sa0U7qzSS2rSyoxLl68qOjoaA0ZMkR9+/b9zflHjx5Vz549NWLECL355ptav369HnnkEdWuXVtdu3aVJL399tsaN26c5s+fr5iYGM2ZM0ddu3ZVWlqaatasWdSndF02U8qj9RMnTigsLEzffPON6tata3U5JYcx14Z9+QWAxf3n751zI2GmJ3OLy913S1OnStHRxXdMAAAAAADgkekf79eCTUcUWb2iPnnydgX4lc5fEr+RfM1ms+mDDz5Qnz59rjtn4sSJWrt2rVJSUlxj999/v7KysrRu3TpJUkxMjNq3b6+5c+dKkhwOh8LCwvTYY4/p6aefLvhJFRKuXPV2EydKH31U8CDzpyskceN8fd03P7/8v/61xxER0oQJUtu2Vp8NAAAAAAD4ncbcEaX3dn6ro99f1NKtX2vY7aX7ln7nz5/XuXPnXI/tdrvsdvsN7zcpKUmxsbFuY127dtWTTz4pScrJydHOnTs1adIk1/M+Pj6KjY1VUlLSDR//RhCuertvvnF+AFJh8vFxD/4K+88bfa2nAWZRPOfj47w3KgAAAAAAKHMCy/lrQtfGmvDeHr294xsN+UOkfH1Kb07QrFkzt8dxcXGaMmXKDe/35MmTqlXL/bYKtWrV0rlz53Tp0iX9+OOPysvLy3fOgQMHbvj4N4Jw1dtNmiQNHVq4ASdhIQAAAAAAwO/y57Z1dTEnV/3bhZXqYFWSUlNTVadOHdfjwrhq1dsRrnq7li2trgAAAAAAAKDM8vGx6eFbI60uo1gEBgaqcuXKhb7fkJAQZWZmuo1lZmaqcuXKKl++vHx9feXr65vvnJCQkEKvpyBK5112AQAAAAAAAHiFjh07av369W5j8fHx6tixoyQpICBAbdu2dZvjcDi0fv161xyrEK4CAAAAAAAAKDQXLlxQcnKykpOTJUlHjx5VcnKyjh8/LkmaNGmSBg4c6Jo/YsQIHTlyRBMmTNCBAwf06quvauXKlRo7dqxrzrhx4/TGG29o6dKl2r9/v0aOHKmLFy/q4YcfLtZz+yVuCwAAAAAAAACg0OzYsUN33HGH6/G4ceMkSYMGDdKSJUuUkZHhClolKTIyUmvXrtXYsWP18ssvq27duvr3v/+trl27uubcd999On36tJ577jmdPHlSN910k9atW3fNh1wVN5sxxlhaQRE7ceKEwsLC9M0336hu3bpWlwMAAAAAAAB4FfK16+O2AAAAAAAAAADgAcJVAAAAAAAAAPAA4SoAAAAAAAAAeIBwFQAAAAAAAAA8QLgKAAAAAAAAAB4gXAUAAAAAAAAADxCuAgAAAAAAAIAHCFcBAAAAAAAAwAOEqwAAAAAAAADgAcJVAAAAAAAAAPAA4SoAAAAAAAAAeIBwFQAAAAAAAAA8QLgKAAAAAAAAAB4gXAUAAAAAAAAADxCuAgAAAAAAAIAHCFcBAAAAAAAAwAOEqwAAAAAAAADgAcJVAAAAAAAAAPAA4SoAAAAAAAAAeIBwFQAAAAAAAAA8QLgKAAAAAAAAAB4gXAUAAAAAAAAADxCuAgAAAAAAAIAHCFcBAAAAAAAAwAN+VhdQ1BwOhyQpIyPD4koAAAAAAAAA7/NTrvZTzob/KfXhamZmpiSpQ4cOFlcCAAAAAAAAeK/MzEyFh4dbXUaJYjPGGKuLKEq5ubnatWuXatWqJR+f0ncXhPPnz6tZs2ZKTU1VYGCg1eXAC9AzKCh6BgVFz6Cg6BkUFD2DgqJnUFD0DAqqtPeMw+FQZmamWrduLT+/Un+tZoGU+nC1tDt37pyCgoJ09uxZVa5c2epy4AXoGRQUPYOComdQUPQMCoqeQUHRMygoegYFRc+UXaXvUk4AAAAAAAAAKAaEqwAAAAAAAADgAcJVL2e32xUXFye73W51KfAS9AwKip5BQdEzKCh6BgVFz6Cg6BkUFD2DgqJnyi7uuQoAAAAAAAAAHuDKVQAAAAAAAADwAOEqAAAAAAAAAHiAcBUAAAAAAAAAPEC4CgAAAAAAAAAeIFwtAebNm6d69eqpXLlyiomJ0fbt2391/jvvvKMmTZqoXLlyatmypT766CO3540xeu6551S7dm2VL19esbGxOnTokNucM2fO6MEHH1TlypUVHBysoUOH6sKFC4V+bih8hdkvV69e1cSJE9WyZUtVrFhRoaGhGjhwoL777ju3fdSrV082m81tmzFjRpGcHwpfYa8xgwcPvqYfunXr5jaHNca7FXbP/LJfftpmzZrlmsM6490K0jP79u1Tv379XH/nc+bM8Wifly9f1ujRo1WtWjVVqlRJ/fr1U2ZmZmGeFopQYffM9OnT1b59ewUGBqpmzZrq06eP0tLS3OZ07tz5mnVmxIgRhX1qKCKF3TNTpky5ph+aNGniNod1xrsVds/k917FZrNp9OjRrjmsM96tID3zxhtv6LbbblOVKlVUpUoVxcbGXjOfbKYMMbDUihUrTEBAgFm0aJHZt2+fGTZsmAkODjaZmZn5zt+yZYvx9fU1M2fONKmpqWby5MnG39/f7N271zVnxowZJigoyKxatcrs3r3b9O7d20RGRppLly655nTr1s1ER0ebL774wmzevNlERUWZAQMGFPn54sYUdr9kZWWZ2NhY8/bbb5sDBw6YpKQk06FDB9O2bVu3/URERJhp06aZjIwM13bhwoUiP1/cuKJYYwYNGmS6devm1g9nzpxx2w9rjPcqip75ea9kZGSYRYsWGZvNZtLT011zWGe8V0F7Zvv27Wb8+PHmP//5jwkJCTGzZ8/2aJ8jRowwYWFhZv369WbHjh3m5ptvNrfccktRnSYKUVH0TNeuXc3ixYtNSkqKSU5ONj169DDh4eFu60inTp3MsGHD3NaZs2fPFtVpohAVRc/ExcWZ5s2bu/XD6dOn3eawznivouiZU6dOufVLfHy8kWQ2btzomsM6470K2jMPPPCAmTdvntm1a5fZv3+/GTx4sAkKCjInTpxwzSGbKTsIVy3WoUMHM3r0aNfjvLw8ExoaaqZPn57v/P79+5uePXu6jcXExJjhw4cbY4xxOBwmJCTEzJo1y/V8VlaWsdvt5j//+Y8xxpjU1FQjyXz55ZeuOR9//LGx2Wzm22+/LbRzQ+Er7H7Jz/bt240kc+zYMddYREREvm8wUPIVRc8MGjTI3H333dc9JmuMdyuOdebuu+82d955p9sY64z3KmjP/Nz1/t5/a59ZWVnG39/fvPPOO645+/fvN5JMUlLSDZwNikNR9MwvnTp1ykgymzZtco116tTJPPHEE56UDIsVRc/ExcWZ6Ojo676Odca7Fcc688QTT5gGDRoYh8PhGmOd8V430jPGGJObm2sCAwPN0qVLjTFkM2UNtwWwUE5Ojnbu3KnY2FjXmI+Pj2JjY5WUlJTva5KSktzmS1LXrl1d848ePaqTJ0+6zQkKClJMTIxrTlJSkoKDg9WuXTvXnNjYWPn4+Gjbtm2Fdn4oXEXRL/k5e/asbDabgoOD3cZnzJihatWqqXXr1po1a5Zyc3M9PxkUi6LsmcTERNWsWVONGzfWyJEj9cMPP7jtgzXGOxXHOpOZmam1a9dq6NCh1zzHOuN9POmZwtjnzp07dfXqVbc5TZo0UXh4uMfHRfEoip7Jz9mzZyVJVatWdRt/8803Vb16dbVo0UKTJk1SdnZ2oR0TRaMoe+bQoUMKDQ1V/fr19eCDD+r48eOu51hnvFdxrDM5OTlavny5hgwZIpvN5vYc64z3KYyeyc7O1tWrV10/d8hmyhY/qwsoy77//nvl5eWpVq1abuO1atXSgQMH8n3NyZMn851/8uRJ1/M/jf3anJo1a7o97+fnp6pVq7rmoOQpin75pcuXL2vixIkaMGCAKleu7Bp//PHH1aZNG1WtWlVbt27VpEmTlJGRoX/96183eFYoSkXVM926dVPfvn0VGRmp9PR0PfPMM+revbuSkpLk6+vLGuPFimOdWbp0qQIDA9W3b1+3cdYZ7+RJzxTGPk+ePKmAgIBr/kfgr/UeSoai6JlfcjgcevLJJ3XrrbeqRYsWrvEHHnhAERERCg0N1Z49ezRx4kSlpaXp/fffL5TjomgUVc/ExMRoyZIlaty4sTIyMjR16lTddtttSklJUWBgIOuMFyuOdWbVqlXKysrS4MGD3cZZZ7xTYfTMxIkTFRoa6gpTyWbKFsJVAJKcH27Vv39/GWP02muvuT03btw419etWrVSQECAhg8frunTp8tutxd3qbDY/fff7/q6ZcuWatWqlRo0aKDExER16dLFwsrgDRYtWqQHH3xQ5cqVcxtnnQFQWEaPHq2UlBR9/vnnbuOPPvqo6+uWLVuqdu3a6tKli9LT09WgQYPiLhMW6969u+vrVq1aKSYmRhEREVq5cmW+v10B/NzChQvVvXt3hYaGuo2zzpRNM2bM0IoVK5SYmHjNe1yUDdwWwELVq1eXr6/vNZ84mZmZqZCQkHxfExIS8qvzf/rzt+acOnXK7fnc3FydOXPmuseF9YqiX37yU7B67NgxxcfHu121mp+YmBjl5ubq66+/LviJoNgUZc/8XP369VW9enUdPnzYtQ/WGO9U1D2zefNmpaWl6ZFHHvnNWlhnvIMnPVMY+wwJCVFOTo6ysrIK7bgoHkXRMz83ZswYrVmzRhs3blTdunV/dW5MTIwkuX5+oWQq6p75SXBwsBo1auT2foZ1xjsVdc8cO3ZMCQkJv/v9jMQ6U9LdSM+8+OKLmjFjhj799FO1atXKNU42U7YQrlooICBAbdu21fr1611jDodD69evV8eOHfN9TceOHd3mS1J8fLxrfmRkpEJCQtzmnDt3Ttu2bXPN6dixo7KysrRz507XnA0bNsjhcLgWf5Q8RdEv0v+C1UOHDikhIUHVqlX7zVqSk5Pl4+Nzza8woGQpqp75pRMnTuiHH35Q7dq1XftgjfFORd0zCxcuVNu2bRUdHf2btbDOeAdPeqYw9tm2bVv5+/u7zUlLS9Px48c9Pi6KR1H0jCQZYzRmzBh98MEH2rBhgyIjI3/zNcnJyZLk+vmFkqmoeuaXLly4oPT0dFc/sM54r6LumcWLF6tmzZrq2bPnb85lnfEOnvbMzJkz9fzzz2vdunVu902VyGbKHKs/UausW7FihbHb7WbJkiUmNTXVPProoyY4ONicPHnSGGPMQw89ZJ5++mnX/C1bthg/Pz/z4osvmv3795u4uDjj7+9v9u7d65ozY8YMExwcbFavXm327Nlj7r77bhMZGWkuXbrkmtOtWzfTunVrs23bNvP555+bhg0bmgEDBhTficMjhd0vOTk5pnfv3qZu3bomOTnZZGRkuLYrV64YY4zZunWrmT17tklOTjbp6elm+fLlpkaNGmbgwIHF/w1AgRV2z5w/f96MHz/eJCUlmaNHj5qEhATTpk0b07BhQ3P58mXXflhjvFdR/FwyxpizZ8+aChUqmNdee+2aY7LOeLeC9syVK1fMrl27zK5du0zt2rXN+PHjza5du8yhQ4d+9z6NMWbEiBEmPDzcbNiwwezYscN07NjRdOzYsfhOHB4rip4ZOXKkCQoKMomJiW7vZ7Kzs40xxhw+fNhMmzbN7Nixwxw9etSsXr3a1K9f39x+++3Fe/LwSFH0zF//+leTmJhojh49arZs2WJiY2NN9erVzalTp1xzWGe8V1H0jDHOT5APDw83EydOvOaYrDPeraA9M2PGDBMQEGDeffddt58758+fd5tDNlM2EK6WAK+88ooJDw83AQEBpkOHDuaLL75wPdepUyczaNAgt/krV640jRo1MgEBAaZ58+Zm7dq1bs87HA7z7LPPmlq1ahm73W66dOli0tLS3Ob88MMPZsCAAaZSpUqmcuXK5uGHH3ZbBFByFWa/HD161EjKd9u4caMxxpidO3eamJgYExQUZMqVK2eaNm1qXnjhBbcgDSVbYfZMdna2ueuuu0yNGjWMv7+/iYiIMMOGDXMLPIxhjfF2hf1zyRhjFixYYMqXL2+ysrKueY51xvsVpGeu97OnU6dOv3ufxhhz6dIlM2rUKFOlShVToUIFc88995iMjIyiPE0UosLumeu9n1m8eLExxpjjx4+b22+/3VStWtXY7XYTFRVlnnrqKXP27NliOmPcqMLumfvuu8/Url3bBAQEmDp16pj77rvPHD582O2YrDPerSh+Nn3yySdG0jX/vjaGdaY0KEjPRERE5NszcXFxrjlkM2WHzRhjivLKWAAAAAAAAAAojbjnKgAAAAAAAAB4gHAVAAAAAAAAADxAuAoAAAAAAAAAHiBcBQAAAAAAAAAPEK4CAAAAAAAAgAcIVwEAAAAAAADAA4SrAAAAAAAAAOABwlUAAIAyLjExUTabTVlZWUV6nM6dO+vJJ58s0mPkp169epozZ84N7WPw4MHq06fPr86x6vwAAABgHcJVAAAALzZ48GDZbDbZbDb5+/srMjJSEyZM0OXLl4uthilTprhquN4GAAAAlEaEqwAAAF6uW7duysjI0JEjRzR79mwtWLBAcXFxxXb88ePHKyMjw7XVrVtX06ZNcxvz1NWrVwuxUgAAAKBwEa4CAAB4ObvdrpCQEIWFhalPnz6KjY1VfHy863mHw6Hp06crMjJS5cuXV3R0tN59993r7m/KlCm66aab3MbmzJmjevXq5Tu/UqVKCgkJcW2+vr4KDAx0G/t5LRMmTFDVqlUVEhKiKVOmuO3LZrPptddeU+/evVWxYkX94x//kCStXr1abdq0Ubly5VS/fn1NnTpVubm5kiRjjKZMmaLw8HDZ7XaFhobq8ccfd9tvdna2hgwZosDAQIWHh+v11193e37v3r268847Vb58eVWrVk2PPvqoLly4cN3v0cWLFzVw4EBVqlRJtWvX1ksvvXTduQAAACi9CFcBAABKkZSUFG3dulUBAQGusenTp2vZsmWaP3++9u3bp7Fjx+ovf/mLNm3aVOz1LV26VBUrVtS2bds0c+ZMTZs2zS0Ilpzh7j333KO9e/dqyJAh2rx5swYOHKgnnnhCqampWrBggZYsWeIKXt977z3XFbuHDh3SqlWr1LJlS7d9vvTSS2rXrp127dqlUaNGaeTIkUpLS5PkDEq7du2qKlWq6Msvv9Q777yjhIQEjRkz5rrn8dRTT2nTpk1avXq1Pv30UyUmJuqrr74q5O8WAAAASjo/qwsAAADAjVmzZo0qVaqk3NxcXblyRT4+Ppo7d64k6cqVK3rhhReUkJCgjh07SpLq16+vzz//XAsWLFCnTp2KtdZWrVq5blnQsGFDzZ07V+vXr9cf//hH15wHHnhADz/8sOvxkCFD9PTTT2vQoEGu+p9//nlNmDBBcXFxOn78uEJCQhQbGyt/f3+Fh4erQ4cObsft0aOHRo0aJUmaOHGiZs+erY0bN6px48Z66623dPnyZS1btkwVK1aUJM2dO1e9evXSP//5T9WqVcttXxcuXNDChQu1fPlydenSRZIzNK5bt24hf7cAAABQ0hGuAgAAeLk77rhDr732mi5evKjZs2fLz89P/fr1kyQdPnxY2dnZbuGlJOXk5Kh169bFXmurVq3cHteuXVunTp1yG2vXrp3b4927d2vLli2uK1UlKS8vT5cvX1Z2drbuvfdezZkzR/Xr11e3bt3Uo0cP9erVS35+/3ur+/Pj2mw2hYSEuI67f/9+RUdHu4JVSbr11lvlcDiUlpZ2Tbianp6unJwcxcTEuMaqVq2qxo0bF/TbAQAAAC9HuAoAAODlKlasqKioKEnSokWLFB0drYULF2ro0KGu+4auXbtWderUcXud3W7Pd38+Pj4yxriNFdYHS/n7+7s9ttlscjgcbmM/Dzkl55WiU6dOVd++fa/ZX7ly5RQWFqa0tDQlJCQoPj5eo0aN0qxZs7Rp0ybX8X7PcQEAAICC4p6rAAAApYiPj4+eeeYZTZ48WZcuXVKzZs1kt9t1/PhxRUVFuW1hYWH57qNGjRo6efKkW8CanJxcTGdwrTZt2igtLe2a+qOiouTj43w7W758efXq1Uv/93//p8TERCUlJWnv3r2/a/9NmzbV7t27dfHiRdfYli1b5OPjk+/VqA0aNJC/v7+2bdvmGvvxxx918ODBGzxTAAAAeBvCVQAAgFLm3nvvla+vr+bNm6fAwECNHz9eY8eO1dKlS5Wenq6vvvpKr7zyipYuXZrv6zt37qzTp09r5syZSk9P17x58/Txxx8X81n8z3PPPadly5Zp6tSp2rdvn/bv368VK1Zo8uTJkqQlS5Zo4cKFSklJ0ZEjR7R8+XKVL19eERERv2v/Dz74oMqVK6dBgwYpJSVFGzdu1GOPPaaHHnromlsCSFKlSpU0dOhQPfXUU9qwYYNSUlI0ePBgV9ALAACAsoN3gAAAAKWMn5+fxowZo5kzZ+rixYt6/vnn9eyzz2r69Olq2rSpunXrprVr1yoyMjLf1zdt2lSvvvqq5s2bp+joaG3fvl3jx48v5rP4n65du2rNmjX69NNP1b59e918882aPXu2KzwNDg7WG2+8oVtvvVWtWrVSQkKCPvzwQ1WrVu137b9ChQr65JNPdObMGbVv315//vOf1aVLF9eHguVn1qxZuu2229SrVy/FxsbqD3/4g9q2bVso5wsAAADvYTO/vKEWAAAAAAAAAOA3ceUqAAAAAAAAAHiAcBUAAAAAAAAAPEC4CgAAAAAAAAAeIFwFAAAAAAAAAA8QrgIAAAAAAACABwhXAQAAAAAAAMADhKsAAAAAAAAA4AHCVQAAAAAAAADwAOEqAAAAAAAAAHiAcBUAAAAAAAAAPEC4CgAAAAAAAAAeIFwFAAAAAAAAAA/8fxemFy7N5kSFAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import pandas as pd\n", + "\n", + "data = pd.read_csv(\"../runlog/resnet18_accuracy_var_relu.csv\")\n", + "\n", + "fig, ax1 = plt.subplots()\n", + "fig.set_figheight(9)\n", + "fig.set_figwidth(16)\n", + "ax2 = ax1.twinx()\n", + "\n", + "ax1.plot(data[\"ReLU_Threshold\"], data[\"Top5_Accuracy\"], label=\"Accuracy\")\n", + "ax1.axhline(data[\"Top5_Accuracy\"].max() - 1, color='k', linestyle='dashed', linewidth=1, label=\"Accuracy Loss = 1%\")\n", + "ax2.plot(data[\"ReLU_Threshold\"], data[\"Throughput\"]/(data[\"Throughput\"].min()), label=\"Throughput\", color = 'r')\n", + "\n", + "\n", + "fig.suptitle('Overview of relu thresholding for resnet18')\n", + "ax1.set(xlabel = \"Relu Threshold\", ylabel = \"Accuracy\")\n", + "ax2.set(xlabel = \"Relu Threshold\", ylabel = \"Normalised Throughput\")\n", + "ax1.legend(loc = \"best\")\n", + "ax2.legend(loc = \"best\")\n", + "\n", + "fig.savefig(\"figures/resnet18_relu_acc_vs_through.png\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "sparseCNN", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.10" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +}