From 921537b108d8f4523ec682fdb755a4c31abd206b Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 12 Jul 2023 11:17:45 -0700 Subject: [PATCH 01/50] adding platforms for build and push --- .github/workflows/publish-to-docker-hub.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/publish-to-docker-hub.yml b/.github/workflows/publish-to-docker-hub.yml index d2ea7d23..764de922 100644 --- a/.github/workflows/publish-to-docker-hub.yml +++ b/.github/workflows/publish-to-docker-hub.yml @@ -27,9 +27,10 @@ jobs: password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push id: docker_build - uses: docker/build-push-action@v2.7.0 + uses: docker/build-push-action@v4 # prev 2.7.0 with: context: . + platforms: linux/amd64,linux/arm64 # new line for m1/m2 macs push: true tags: | pathml/pathml:latest From 092402f7a3b94412ccc52d6460ade522bdeb3d9b Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 14 Jul 2023 09:34:35 -0700 Subject: [PATCH 02/50] updating push action version and adding platforms for mac support --- .github/workflows/publish-to-docker-hub.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-to-docker-hub.yml b/.github/workflows/publish-to-docker-hub.yml index 764de922..e7424d4f 100644 --- a/.github/workflows/publish-to-docker-hub.yml +++ b/.github/workflows/publish-to-docker-hub.yml @@ -27,10 +27,10 @@ jobs: password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push id: docker_build - uses: docker/build-push-action@v4 # prev 2.7.0 + uses: docker/build-push-action@v4 with: context: . - platforms: linux/amd64,linux/arm64 # new line for m1/m2 macs + platforms: linux/amd64,linux/arm64 push: true tags: | pathml/pathml:latest From a6ee90339fd208e4fc666514e1f98c5fbf584005 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 17 Jul 2023 06:28:33 -0700 Subject: [PATCH 03/50] changing openslide version in environ yml --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index 3910502f..457f49b2 100644 --- a/environment.yml +++ b/environment.yml @@ -24,7 +24,7 @@ dependencies: - protobuf==3.20.1 - deepcell==0.11.0 - opencv-contrib-python==4.5.3.56 - - openslide-python==1.1.2 + - openslide-python==1.2.0 - scanpy==1.8.2 - anndata==0.7.8 - tqdm==4.62.3 From e6bd53bc66c04858f6e43e0b3adb376fba4a145c Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 17 Jul 2023 06:34:40 -0700 Subject: [PATCH 04/50] adding adjusted openslide dependency in yml --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index 457f49b2..c140b04d 100644 --- a/environment.yml +++ b/environment.yml @@ -24,7 +24,7 @@ dependencies: - protobuf==3.20.1 - deepcell==0.11.0 - opencv-contrib-python==4.5.3.56 - - openslide-python==1.2.0 + - openslide-python==1.2.0 - scanpy==1.8.2 - anndata==0.7.8 - tqdm==4.62.3 From a57b7f66225b94a2414e585d9f5d20a2cd77cf89 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:07:14 -0700 Subject: [PATCH 05/50] adding inference to init file --- pathml/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pathml/__init__.py b/pathml/__init__.py index bd878e09..8c1bcc84 100644 --- a/pathml/__init__.py +++ b/pathml/__init__.py @@ -6,6 +6,7 @@ from . import datasets as ds from . import ml from . import preprocessing as pp +from .inference import * from ._logging import PathMLLogger from ._version import __version__ from .core import * # noqa: F403 From 6c8cf28915bcc61f015316d8e9ad4f9ae22402ea Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:08:23 -0700 Subject: [PATCH 06/50] add inference classes and functions --- pathml/inference/__init__.py | 6 + pathml/inference/inference.py | 314 ++++++++++++++++++++++++++++++++++ 2 files changed, 320 insertions(+) create mode 100644 pathml/inference/__init__.py create mode 100644 pathml/inference/inference.py diff --git a/pathml/inference/__init__.py b/pathml/inference/__init__.py new file mode 100644 index 00000000..84f56e04 --- /dev/null +++ b/pathml/inference/__init__.py @@ -0,0 +1,6 @@ +""" +Copyright 2023, Dana-Farber Cancer Institute and Weill Cornell Medicine +License: GNU GPL 2.0 +""" + +from .inference import * \ No newline at end of file diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py new file mode 100644 index 00000000..d94915da --- /dev/null +++ b/pathml/inference/inference.py @@ -0,0 +1,314 @@ +""" +Copyright 2023, Dana-Farber Cancer Institute and Weill Cornell Medicine +License: GNU GPL 2.0 +""" + +import os +import numpy as np +from pathml.core import SlideData, Tile +from pathml.preprocessing import Pipeline +import pathml.preprocessing.transforms as Transforms +import onnx +import onnxruntime as ort +import pathml + + +def remove_initializer_from_input(model_path, new_path): + """Removes initializers from HaloAI ONNX models + Taken from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py + + Args: + model_path (str): path to ONNX model, + new_path (str): path to save adjusted model w/o initializers, + + Returns: + ONNX model w/o initializers to run inference using PathML + """ + + model = onnx.load(model_path) + + inputs = model.graph.input + name_to_input = {} + for onnx_input in inputs: + name_to_input[onnx_input.name] = onnx_input + + for initializer in model.graph.initializer: + if initializer.name in name_to_input: + inputs.remove(name_to_input[initializer.name]) + + onnx.save(model, new_path) + + +def check_onnx_clean(model_path): + """Checks if the model has had it's initalizers removed from input graph. + Adapted from from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py + + Args: + model_path (str): path to ONNX model, + + Returns: + Boolean if there are initializers in input graph. + """ + + model = onnx.load(model_path) + + inputs = model.graph.input + name_to_input = {} + for onnx_input in inputs: + name_to_input[onnx_input.name] = onnx_input + + for initializer in model.graph.initializer: + if initializer.name in name_to_input: + return True + +# Base class +# I think this should still inherit from Transforms to make the tiling easier/so we don't have to rewrite so much existing code +class InferenceBase(Transforms.Transform): + """ + Base class for all ONNX Models. + Each transform must operate on a Tile. + """ + + def __init__(self): + self.model_card = { + 'name' : None, + 'num_classes' : None, + 'model_type' : None, + 'notes' : None, + 'model_input_notes': None, + 'model_output_notes' : None, + 'citation': None + } + + def __repr__(self): + return "Base class for all ONNX models" + + + def get_model_card(self): + return self.model_card + + def set_name(self, name): + self.model_card['name'] = name + + def set_num_classes(self, num): + self.model_card['num_classes'] = num + + def set_model_type(self, model_type): + self.model_card['model_type'] = model_type + + def set_notes(self, note): + self.model_card['notes'] = note + + def set_model_input_notes(self, note): + self.model_card['model_input_notes'] = note + + def set_model_output_notes(self, note): + self.model_card['model_output_notes'] = note + + def set_citation(self, citation): + self.model_card['citation'] = citation + + + def reshape(self, image): + """standard reshaping of tile image""" + # flip dimensions + # follows convention used here https://github.com/Dana-Farber-AIOS/pathml/blob/master/pathml/ml/dataset.py + + if image.ndim == 3: + # swap axes from HWC to CHW + image = image.transpose(2, 0, 1) + # add a dimesion bc onnx models usually have batch size as first dim: e.g. (1, channel, height, width) + image = np.expand_dims(image, axis = 0) + + return image + else: + # in this case, we assume that we have XYZCT channel order + # so we swap axes to TCZYX for batching + # note we are not adding a dim here for batch bc we assume that subsetting will create a batch "placeholder" dim + image = image.T + + return image + + def F(self, target): + """functional implementation""" + raise NotImplementedError + + def apply(self, tile): + """modify Tile object in-place""" + raise NotImplementedError + +# class to handle local onnx models +class Inference(InferenceBase): + """Transformation to run inferrence on ONNX model. + + Assumptions: + - The ONNX model has been cleaned by `remove_initializer_from_input` first + + Args: + model_path (str): path to ONNX model w/o initializers, + input_name (str): name of the input the ONNX model accepts + """ + def __init__(self, model_path = None, input_name = 'data', num_classes = None, model_type = None, local = True): + super().__init__() + + + self.input_name = input_name + self.num_classes = num_classes + self.model_type = model_type + self.local = local + + if self.local: + # using a local onnx model + self.model_path = model_path + else: + # if using a model from the model zoo, set the local path to a temp file + self.model_path = 'temp.onnx' + + # fill in parts of the model_card with the following info + self.model_card['num_classes'] = self.num_classes + self.model_card['model_type'] = self.model_type + + + # check if there are initializers in input graph if using a local model + if local: + if check_onnx_clean(model_path): + raise ValueError("The ONNX model still has graph initializers in the input graph. Use `remove_initializer_from_input` to remove them.") + else: + pass + + + def __repr__(self): + if self.local: + return f"Class to handle ONNX model locally stored at {self.model_path}" + else: + return f"Class to handle a {self.model_card['model_name']} from the PathML model zoo." + + def inference(self, image): + + # reshape the image + image = self.reshape(image) + + # load fixed model + onnx_model = onnx.load(self.model_path) + + # check tile dimensions match ONNX input dimensions + input_node = onnx_model.graph.input + + dimensions = [] + for input in input_node: + if input.name == self.input_name: + input_shape = input.type.tensor_type.shape.dim + for dim in input_shape: + dimensions.append(dim.dim_value) + + assert image.shape[-1] == dimensions[-1] and image.shape[-2] == dimensions[-2], f'expecting tile shape of {dimensions[-2]} by {dimensions[-1]}, got {image.shape[-2]} by {image.shape[-1]}' + + # check onnx model + onnx.checker.check_model(onnx_model) + + # start an inference session + ort_sess = ort.InferenceSession(self.model_path) + + # create model output, returns a list + model_output = ort_sess.run(None, {self.input_name: image.astype('f')}) + + return model_output + + def F(self, image): + + # run inference function + prediction_map = self.inference(image) + + # single task model + if len(prediction_map) == 1: + # return first and only prediction array in the list + return prediction_map[0] + + # multi task model + else: + # concatenate prediction results + # assumes that the tasks all output prediction arrays of same dimension on H and W + # To Do: figure out solution for way different tasks such as if a model does both segmentation and classification + result_array = np.concatenate(prediction_map, axis = 1) + return result_array + + def apply(self, tile): + tile.image = self.F(tile.image) + +class HaloAIInference(Inference): + """Transformation to run inferrence on HALO AI ONNX model. + + Assumptions: + - Assumes that the ONNX model returns a tensor in which there is one prediction map for each class + - For example, if there are 5 classes, the ONNX model will output a (1, 5, Height, Weight) tensor + - If you select to argmax the classes, the class assumes a softmax or sigmoid has already been applied + - HaloAI ONNX models always have 20 class maps so you need to index into the first x maps if you have x classes + + + Args: + model_path (str): path to ONNX model w/o initializers, + num_classes (int): number of classes in the data, + input_name (str): name of the input the ONNX model accepts + """ + def __init__(self, model_path = None, input_name = 'data', num_classes = None, model_type = None, local = True): + super().__init__(model_path, input_name, num_classes, model_type, local) + + self.model_card['num_classes'] = self.num_classes + self.model_card['model_type'] = self.model_type + + + def __repr__(self): + return f"Class to handle HALO AI ONNX model locally stored at {self.model_path}" + + def F(self, image): + + prediction_map = self.inference(image) + + prediction_map = prediction_map[0][:, 0 : self.num_classes, :, :] + + return prediction_map + + def apply(self, tile): + tile.image = self.F(tile.image) + +# class to handle remote onnx models +# ToDo create function to remove model after tiling is done would be a sep line in workflow +class RemoteTestHoverNet(Inference): + """Transformation to run inferrence on ONNX model. + + Citation for model: + Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. + TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120. + + Args: + model_path (str): temp file name to download onnx from huggingface, + input_name (str): name of the input the ONNX model accepts + """ + def __init__(self, model_path = 'temp.onnx', input_name = 'data', num_classes = 5, model_type = 'Segmentation', local = False): + super().__init__(model_path, input_name, num_classes, model_type, local) + + # specify URL of the model in PathML public repository + url = 'https://huggingface.co/pathml/test/resolve/main/hovernet_fast_tiatoolbox_fixed.onnx' + + # download model, save as temp.onnx + with open(self.model_path, 'wb') as out_file: + content = requests.get(url, stream=True).content + out_file.write(content) + + + self.model_card['num_classes'] = self.num_classes + self.model_card['model_type'] = self.model_type + self.model_card['name'] = 'Tiabox HoverNet Test' + self.model_card['model_input_notes'] = 'Accepts tiles of 256 x 256' + self.model_card['citation'] = 'Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.' + + + def __repr__(self): + return "Class to handle remote TIAToolBox HoverNet test ONNX. See model card for citation." + + def apply(self, tile): + tile.image = self.F(tile.image) + + def remove(self): + # remove the temp.onnx model + os.remove(self.model_path) \ No newline at end of file From 34237f2f1695a94305102566572a145c6adc1c37 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:08:50 -0700 Subject: [PATCH 07/50] add tests for inference --- tests/inference_tests/test_inference.py | 136 ++++++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 tests/inference_tests/test_inference.py diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py new file mode 100644 index 00000000..950bdce4 --- /dev/null +++ b/tests/inference_tests/test_inference.py @@ -0,0 +1,136 @@ +import os +import numpy as np +import onnx +import onnxruntime as ort +import pytest + +from pathml.inference import * + + +def test_remove_initializer_from_input(): + # Create a temporary ONNX model file + model_path = "test_model.onnx" + # temp_file = tempfile.NamedTemporaryFile(delete=False) + # temp_file.close() + + # Create a sample ONNX model with initializer and graph input + model = onnx.ModelProto() + model.ir_version = 4 + + # Add inputs to the graph + input_1 = model.graph.input.add() + input_1.name = "input_1" + + input_2 = model.graph.input.add() + input_2.name = "input_2" + + # Add an initializer that matches one of the inputs + initializer = model.graph.initializer.add() + initializer.name = "input_2" + + # Save the model to a file + onnx.save(model, model_path) + + # Call the function to remove initializers + new_model_path = "new_model.onnx" + remove_initializer_from_input(model_path, new_model_path) + + # Assert that the initializer has been removed from the new model + new_model = onnx.load(new_model_path) + input_names = [input.name for input in new_model.graph.input] + assert initializer.name not in input_names + + # Clean up the temporary files + os.remove(model_path) + os.remove(new_model_path) + +def test_check_onnx_clean(): + # Create a temporary ONNX model file + model_path = "test_model.onnx" + # temp_file = tempfile.NamedTemporaryFile(delete=False) + # temp_file.close() + + # Create a sample ONNX model with initializer and graph input + model = onnx.ModelProto() + model.ir_version = 4 + + # Add inputs to the graph + input_1 = model.graph.input.add() + input_1.name = "input_1" + + input_2 = model.graph.input.add() + input_2.name = "input_2" + + # Add an initializer that matches one of the inputs + initializer = model.graph.initializer.add() + initializer.name = "input_2" + + # Save the model to a file + onnx.save(model, model_path) + + if check_onnx_clean(model_path): + pass + else: + raise ValueError('check_onnx_clean function is not working') + + # Clean up the temporary files + os.remove(model_path) + +def test_InferenceBase(): + + # initialize InferenceBase + test = InferenceBase() + + # test setter functions + test.set_name('name') + + test.set_num_classes('num_classes') + + test.set_model_type('model_type') + + test.set_notes('notes') + + test.set_model_input_notes('model_input_notes') + + test.set_model_output_notes('model_output_notes') + + test.set_citation('citation') + + for key in test.model_card: + assert key == test.model_card[key], f"function for {key} is not working" + + # test reshape function + random = np.random.rand(1,2,3) + assert test.reshape(random).shape == (1, 3, 1, 2), "reshape function is not working on 3d arrays" + + random = np.random.rand(1,2,3,4,5) + assert test.reshape(random).shape == (5,4,3,2,1), "reshape function is not working on 5d arrays" + +def test_Inference(tileHE): + + new_path = '../random_model.onnx' + + inference = Inference(model_path = new_path, input_name = 'data', num_classes = 1, model_type = 'segmentation') + + orig_im = tileHE.image + inference.apply(tileHE) + assert np.array_equal(tileHE.image, inference.F(orig_im)) + +def test_HaloAIInference(tileHE): + + new_path = '../random_model.onnx' + + inference = HaloAIInference(model_path = new_path, input_name = 'data', num_classes = 1, model_type = 'segmentation') + orig_im = tileHE.image + inference.apply(tileHE) + assert np.array_equal(tileHE.image, inference.F(orig_im)) + +def test_RemoteTestHoverNet(tileHE): + + inference = RemoteTestHoverNet() + + orig_im = tileHE.image + inference.apply(tileHE) + assert np.array_equal(tileHE.image, inference.F(orig_im)) + + inference.remove() \ No newline at end of file From 980c6620f87a22034017aa44104bbb9f25685abd Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:15:15 -0700 Subject: [PATCH 08/50] fixed import statements --- pathml/inference/__init__.py | 9 ++++++++- pathml/inference/inference.py | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/pathml/inference/__init__.py b/pathml/inference/__init__.py index 84f56e04..b4b9c58e 100644 --- a/pathml/inference/__init__.py +++ b/pathml/inference/__init__.py @@ -3,4 +3,11 @@ License: GNU GPL 2.0 """ -from .inference import * \ No newline at end of file +from .inference import ( + remove_initializer_from_input, + check_onnx_clean, + InferenceBase, + Inference, + HaloAIInference, + RemoteTestHoverNet +) \ No newline at end of file diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py index d94915da..3020320e 100644 --- a/pathml/inference/inference.py +++ b/pathml/inference/inference.py @@ -11,7 +11,7 @@ import onnx import onnxruntime as ort import pathml - +import requests def remove_initializer_from_input(model_path, new_path): """Removes initializers from HaloAI ONNX models From 77d5d4d82c27f3dcbf5b31a1e179a0f924387bd9 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:20:26 -0700 Subject: [PATCH 09/50] fixing import statements in the init and the test inference --- pathml/__init__.py | 2 +- tests/inference_tests/test_inference.py | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/pathml/__init__.py b/pathml/__init__.py index 8c1bcc84..0ae82c6b 100644 --- a/pathml/__init__.py +++ b/pathml/__init__.py @@ -6,7 +6,7 @@ from . import datasets as ds from . import ml from . import preprocessing as pp -from .inference import * +import .inference from ._logging import PathMLLogger from ._version import __version__ from .core import * # noqa: F403 diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 950bdce4..d6b99cc8 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -4,7 +4,14 @@ import onnxruntime as ort import pytest -from pathml.inference import * +from pathml.inference import ( + remove_initializer_from_input, + check_onnx_clean, + InferenceBase, + Inference, + HaloAIInference, + RemoteTestHoverNet +) def test_remove_initializer_from_input(): From b3d295a8ae2547551e0c3d3635e13c02f7d551f4 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:24:33 -0700 Subject: [PATCH 10/50] fixing init files --- pathml/__init__.py | 4 ++-- pathml/inference/__init__.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pathml/__init__.py b/pathml/__init__.py index 0ae82c6b..1889e4be 100644 --- a/pathml/__init__.py +++ b/pathml/__init__.py @@ -6,7 +6,7 @@ from . import datasets as ds from . import ml from . import preprocessing as pp -import .inference +from . import inference from ._logging import PathMLLogger from ._version import __version__ -from .core import * # noqa: F403 +from .core import * # noqa: F403 \ No newline at end of file diff --git a/pathml/inference/__init__.py b/pathml/inference/__init__.py index b4b9c58e..59b2e4fe 100644 --- a/pathml/inference/__init__.py +++ b/pathml/inference/__init__.py @@ -5,9 +5,9 @@ from .inference import ( remove_initializer_from_input, - check_onnx_clean, - InferenceBase, - Inference, - HaloAIInference, + check_onnx_clean, + InferenceBase + Inference, + HaloAIInference, RemoteTestHoverNet ) \ No newline at end of file From d144d918529fee58603add10733d236abbc1b07b Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:28:43 -0700 Subject: [PATCH 11/50] fixing comma in inference init --- pathml/inference/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pathml/inference/__init__.py b/pathml/inference/__init__.py index 59b2e4fe..aca4d69b 100644 --- a/pathml/inference/__init__.py +++ b/pathml/inference/__init__.py @@ -6,7 +6,7 @@ from .inference import ( remove_initializer_from_input, check_onnx_clean, - InferenceBase + InferenceBase, Inference, HaloAIInference, RemoteTestHoverNet From 51841aa54893701433db2c382a11a3accd7008d8 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:35:21 -0700 Subject: [PATCH 12/50] getting rid of unused packages --- pathml/inference/inference.py | 7 ++----- tests/inference_tests/test_inference.py | 2 -- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py index 3020320e..15c094da 100644 --- a/pathml/inference/inference.py +++ b/pathml/inference/inference.py @@ -5,12 +5,9 @@ import os import numpy as np -from pathml.core import SlideData, Tile -from pathml.preprocessing import Pipeline import pathml.preprocessing.transforms as Transforms import onnx -import onnxruntime as ort -import pathml +import onnxruntime import requests def remove_initializer_from_input(model_path, new_path): @@ -207,7 +204,7 @@ def inference(self, image): onnx.checker.check_model(onnx_model) # start an inference session - ort_sess = ort.InferenceSession(self.model_path) + ort_sess = onnxruntime.InferenceSession(self.model_path) # create model output, returns a list model_output = ort_sess.run(None, {self.input_name: image.astype('f')}) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index d6b99cc8..0d1b6ec1 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -1,8 +1,6 @@ import os import numpy as np import onnx -import onnxruntime as ort -import pytest from pathml.inference import ( remove_initializer_from_input, From 6044024243c555b3e74bcd16ada06796723ca247 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 10:53:37 -0700 Subject: [PATCH 13/50] fixed files via black --- pathml/inference/__init__.py | 2 +- pathml/inference/inference.py | 312 +++++++++++++----------- tests/inference_tests/test_inference.py | 116 +++++---- 3 files changed, 236 insertions(+), 194 deletions(-) diff --git a/pathml/inference/__init__.py b/pathml/inference/__init__.py index aca4d69b..bf14de1f 100644 --- a/pathml/inference/__init__.py +++ b/pathml/inference/__init__.py @@ -9,5 +9,5 @@ InferenceBase, Inference, HaloAIInference, - RemoteTestHoverNet + RemoteTestHoverNet, ) \ No newline at end of file diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py index 15c094da..880a4cbf 100644 --- a/pathml/inference/inference.py +++ b/pathml/inference/inference.py @@ -4,24 +4,25 @@ """ import os -import numpy as np +import numpy as np import pathml.preprocessing.transforms as Transforms import onnx -import onnxruntime -import requests +import onnxruntime +import requests + def remove_initializer_from_input(model_path, new_path): """Removes initializers from HaloAI ONNX models Taken from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py - + Args: model_path (str): path to ONNX model, new_path (str): path to save adjusted model w/o initializers, Returns: ONNX model w/o initializers to run inference using PathML - """ - + """ + model = onnx.load(model_path) inputs = model.graph.input @@ -35,18 +36,18 @@ def remove_initializer_from_input(model_path, new_path): onnx.save(model, new_path) - + def check_onnx_clean(model_path): - """Checks if the model has had it's initalizers removed from input graph. + """Checks if the model has had it's initalizers removed from input graph. Adapted from from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py - + Args: model_path (str): path to ONNX model, Returns: - Boolean if there are initializers in input graph. - """ - + Boolean if there are initializers in input graph. + """ + model = onnx.load(model_path) inputs = model.graph.input @@ -56,75 +57,74 @@ def check_onnx_clean(model_path): for initializer in model.graph.initializer: if initializer.name in name_to_input: - return True + return True + # Base class -# I think this should still inherit from Transforms to make the tiling easier/so we don't have to rewrite so much existing code +# I think this should still inherit from Transforms to make the tiling easier/so we don't have to rewrite so much existing code class InferenceBase(Transforms.Transform): """ Base class for all ONNX Models. Each transform must operate on a Tile. """ - - def __init__(self): + + def __init__(self): self.model_card = { - 'name' : None, - 'num_classes' : None, - 'model_type' : None, - 'notes' : None, - 'model_input_notes': None, - 'model_output_notes' : None, - 'citation': None - } + "name": None, + "num_classes": None, + "model_type": None, + "notes": None, + "model_input_notes": None, + "model_output_notes": None, + "citation": None, + } def __repr__(self): return "Base class for all ONNX models" - - + def get_model_card(self): return self.model_card - + def set_name(self, name): - self.model_card['name'] = name - + self.model_card["name"] = name + def set_num_classes(self, num): - self.model_card['num_classes'] = num - + self.model_card["num_classes"] = num + def set_model_type(self, model_type): - self.model_card['model_type'] = model_type - + self.model_card["model_type"] = model_type + def set_notes(self, note): - self.model_card['notes'] = note - + self.model_card["notes"] = note + def set_model_input_notes(self, note): - self.model_card['model_input_notes'] = note - + self.model_card["model_input_notes"] = note + def set_model_output_notes(self, note): - self.model_card['model_output_notes'] = note - + self.model_card["model_output_notes"] = note + def set_citation(self, citation): - self.model_card['citation'] = citation - - + self.model_card["citation"] = citation + def reshape(self, image): """standard reshaping of tile image""" - # flip dimensions + # flip dimensions # follows convention used here https://github.com/Dana-Farber-AIOS/pathml/blob/master/pathml/ml/dataset.py - + if image.ndim == 3: - # swap axes from HWC to CHW + # swap axes from HWC to CHW image = image.transpose(2, 0, 1) - # add a dimesion bc onnx models usually have batch size as first dim: e.g. (1, channel, height, width) - image = np.expand_dims(image, axis = 0) - - return image + # add a dimesion bc onnx models usually have batch size as first dim: e.g. (1, channel, height, width) + image = np.expand_dims(image, axis=0) + + return image else: # in this case, we assume that we have XYZCT channel order # so we swap axes to TCZYX for batching # note we are not adding a dim here for batch bc we assume that subsetting will create a batch "placeholder" dim image = image.T - - return image + + return image def F(self, target): """functional implementation""" @@ -133,179 +133,203 @@ def F(self, target): def apply(self, tile): """modify Tile object in-place""" raise NotImplementedError - -# class to handle local onnx models + + +# class to handle local onnx models class Inference(InferenceBase): """Transformation to run inferrence on ONNX model. - + Assumptions: - - The ONNX model has been cleaned by `remove_initializer_from_input` first - + - The ONNX model has been cleaned by `remove_initializer_from_input` first + Args: model_path (str): path to ONNX model w/o initializers, input_name (str): name of the input the ONNX model accepts - """ - def __init__(self, model_path = None, input_name = 'data', num_classes = None, model_type = None, local = True): - super().__init__() - - - self.input_name = input_name + """ + + def __init__( + self, + model_path=None, + input_name="data", + num_classes=None, + model_type=None, + local=True, + ): + super().__init__() + + self.input_name = input_name self.num_classes = num_classes self.model_type = model_type self.local = local - + if self.local: # using a local onnx model self.model_path = model_path - else: + else: # if using a model from the model zoo, set the local path to a temp file - self.model_path = 'temp.onnx' - + self.model_path = "temp.onnx" + # fill in parts of the model_card with the following info - self.model_card['num_classes'] = self.num_classes - self.model_card['model_type'] = self.model_type - - + self.model_card["num_classes"] = self.num_classes + self.model_card["model_type"] = self.model_type + # check if there are initializers in input graph if using a local model if local: if check_onnx_clean(model_path): - raise ValueError("The ONNX model still has graph initializers in the input graph. Use `remove_initializer_from_input` to remove them.") + raise ValueError( + "The ONNX model still has graph initializers in the input graph. Use `remove_initializer_from_input` to remove them." + ) else: - pass + pass - def __repr__(self): if self.local: return f"Class to handle ONNX model locally stored at {self.model_path}" else: return f"Class to handle a {self.model_card['model_name']} from the PathML model zoo." - + def inference(self, image): - # reshape the image - image = self.reshape(image) - - # load fixed model + image = self.reshape(image) + + # load fixed model onnx_model = onnx.load(self.model_path) - - # check tile dimensions match ONNX input dimensions + + # check tile dimensions match ONNX input dimensions input_node = onnx_model.graph.input - + dimensions = [] - for input in input_node: - if input.name == self.input_name: + for input in input_node: + if input.name == self.input_name: input_shape = input.type.tensor_type.shape.dim for dim in input_shape: - dimensions.append(dim.dim_value) - - assert image.shape[-1] == dimensions[-1] and image.shape[-2] == dimensions[-2], f'expecting tile shape of {dimensions[-2]} by {dimensions[-1]}, got {image.shape[-2]} by {image.shape[-1]}' - + dimensions.append(dim.dim_value) + + assert ( + image.shape[-1] == dimensions[-1] and image.shape[-2] == dimensions[-2] + ), f"expecting tile shape of {dimensions[-2]} by {dimensions[-1]}, got {image.shape[-2]} by {image.shape[-1]}" + # check onnx model onnx.checker.check_model(onnx_model) - + # start an inference session ort_sess = onnxruntime.InferenceSession(self.model_path) - - # create model output, returns a list - model_output = ort_sess.run(None, {self.input_name: image.astype('f')}) - + + # create model output, returns a list + model_output = ort_sess.run(None, {self.input_name: image.astype("f")}) + return model_output - + def F(self, image): - # run inference function - prediction_map = self.inference(image) - + prediction_map = self.inference(image) + # single task model if len(prediction_map) == 1: - # return first and only prediction array in the list - return prediction_map[0] - + # return first and only prediction array in the list + return prediction_map[0] + # multi task model else: # concatenate prediction results # assumes that the tasks all output prediction arrays of same dimension on H and W # To Do: figure out solution for way different tasks such as if a model does both segmentation and classification - result_array = np.concatenate(prediction_map, axis = 1) - return result_array - + result_array = np.concatenate(prediction_map, axis=1) + return result_array + def apply(self, tile): tile.image = self.F(tile.image) + class HaloAIInference(Inference): """Transformation to run inferrence on HALO AI ONNX model. - + Assumptions: - Assumes that the ONNX model returns a tensor in which there is one prediction map for each class - For example, if there are 5 classes, the ONNX model will output a (1, 5, Height, Weight) tensor - If you select to argmax the classes, the class assumes a softmax or sigmoid has already been applied - HaloAI ONNX models always have 20 class maps so you need to index into the first x maps if you have x classes - - + + Args: model_path (str): path to ONNX model w/o initializers, - num_classes (int): number of classes in the data, + num_classes (int): number of classes in the data, input_name (str): name of the input the ONNX model accepts - """ - def __init__(self, model_path = None, input_name = 'data', num_classes = None, model_type = None, local = True): - super().__init__(model_path, input_name, num_classes, model_type, local) - - self.model_card['num_classes'] = self.num_classes - self.model_card['model_type'] = self.model_type - - + """ + + def __init__( + self, + model_path=None, + input_name="data", + num_classes=None, + model_type=None, + local=True, + ): + super().__init__(model_path, input_name, num_classes, model_type, local) + + self.model_card["num_classes"] = self.num_classes + self.model_card["model_type"] = self.model_type + def __repr__(self): return f"Class to handle HALO AI ONNX model locally stored at {self.model_path}" - + def F(self, image): - prediction_map = self.inference(image) - + prediction_map = prediction_map[0][:, 0 : self.num_classes, :, :] - + return prediction_map - + def apply(self, tile): tile.image = self.F(tile.image) - -# class to handle remote onnx models + + +# class to handle remote onnx models # ToDo create function to remove model after tiling is done would be a sep line in workflow class RemoteTestHoverNet(Inference): """Transformation to run inferrence on ONNX model. - + Citation for model: - Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. + Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120. - + Args: model_path (str): temp file name to download onnx from huggingface, input_name (str): name of the input the ONNX model accepts - """ - def __init__(self, model_path = 'temp.onnx', input_name = 'data', num_classes = 5, model_type = 'Segmentation', local = False): + """ + + def __init__( + self, + model_path="temp.onnx", + input_name="data", + num_classes=5, + model_type="Segmentation", + local=False, + ): super().__init__(model_path, input_name, num_classes, model_type, local) - + # specify URL of the model in PathML public repository - url = 'https://huggingface.co/pathml/test/resolve/main/hovernet_fast_tiatoolbox_fixed.onnx' - - # download model, save as temp.onnx - with open(self.model_path, 'wb') as out_file: + url = "https://huggingface.co/pathml/test/resolve/main/hovernet_fast_tiatoolbox_fixed.onnx" + + # download model, save as temp.onnx + with open(self.model_path, "wb") as out_file: content = requests.get(url, stream=True).content out_file.write(content) - - - self.model_card['num_classes'] = self.num_classes - self.model_card['model_type'] = self.model_type - self.model_card['name'] = 'Tiabox HoverNet Test' - self.model_card['model_input_notes'] = 'Accepts tiles of 256 x 256' - self.model_card['citation'] = 'Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.' - - + + self.model_card["num_classes"] = self.num_classes + self.model_card["model_type"] = self.model_type + self.model_card["name"] = "Tiabox HoverNet Test" + self.model_card["model_input_notes"] = "Accepts tiles of 256 x 256" + self.model_card[ + "citation" + ] = "Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120." + def __repr__(self): return "Class to handle remote TIAToolBox HoverNet test ONNX. See model card for citation." - + def apply(self, tile): tile.image = self.F(tile.image) - + def remove(self): - # remove the temp.onnx model - os.remove(self.model_path) \ No newline at end of file + # remove the temp.onnx model + os.remove(self.model_path) \ No newline at end of file diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 0d1b6ec1..a91bb1b7 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -1,14 +1,16 @@ import os import numpy as np import onnx +import onnxruntime as ort +import pytest -from pathml.inference import ( +from pathml.inference import ( remove_initializer_from_input, - check_onnx_clean, - InferenceBase, - Inference, - HaloAIInference, - RemoteTestHoverNet + check_onnx_clean, + InferenceBase, + Inference, + HaloAIInference, + RemoteTestHoverNet, ) @@ -43,12 +45,13 @@ def test_remove_initializer_from_input(): # Assert that the initializer has been removed from the new model new_model = onnx.load(new_model_path) input_names = [input.name for input in new_model.graph.input] - assert initializer.name not in input_names + assert initializer.name not in input_names # Clean up the temporary files os.remove(model_path) os.remove(new_model_path) - + + def test_check_onnx_clean(): # Create a temporary ONNX model file model_path = "test_model.onnx" @@ -76,66 +79,81 @@ def test_check_onnx_clean(): if check_onnx_clean(model_path): pass else: - raise ValueError('check_onnx_clean function is not working') + raise ValueError("check_onnx_clean function is not working") # Clean up the temporary files os.remove(model_path) - -def test_InferenceBase(): - + + +def test_InferenceBase(): # initialize InferenceBase test = InferenceBase() - - # test setter functions - test.set_name('name') - - test.set_num_classes('num_classes') - - test.set_model_type('model_type') - - test.set_notes('notes') - - test.set_model_input_notes('model_input_notes') - - test.set_model_output_notes('model_output_notes') - - test.set_citation('citation') - + + # test setter functions + test.set_name("name") + + test.set_num_classes("num_classes") + + test.set_model_type("model_type") + + test.set_notes("notes") + + test.set_model_input_notes("model_input_notes") + + test.set_model_output_notes("model_output_notes") + + test.set_citation("citation") + for key in test.model_card: assert key == test.model_card[key], f"function for {key} is not working" - - # test reshape function - random = np.random.rand(1,2,3) - assert test.reshape(random).shape == (1, 3, 1, 2), "reshape function is not working on 3d arrays" - - random = np.random.rand(1,2,3,4,5) - assert test.reshape(random).shape == (5,4,3,2,1), "reshape function is not working on 5d arrays" - -def test_Inference(tileHE): - - new_path = '../random_model.onnx' - - inference = Inference(model_path = new_path, input_name = 'data', num_classes = 1, model_type = 'segmentation') - + + # test reshape function + random = np.random.rand(1, 2, 3) + assert test.reshape(random).shape == ( + 1, + 3, + 1, + 2, + ), "reshape function is not working on 3d arrays" + + random = np.random.rand(1, 2, 3, 4, 5) + assert test.reshape(random).shape == ( + 5, + 4, + 3, + 2, + 1, + ), "reshape function is not working on 5d arrays" + + +def test_Inference(tileHE): + new_path = "../random_model.onnx" + + inference = Inference( + model_path=new_path, input_name="data", num_classes=1, model_type="segmentation" + ) + orig_im = tileHE.image inference.apply(tileHE) assert np.array_equal(tileHE.image, inference.F(orig_im)) - -def test_HaloAIInference(tileHE): - new_path = '../random_model.onnx' - inference = HaloAIInference(model_path = new_path, input_name = 'data', num_classes = 1, model_type = 'segmentation') +def test_HaloAIInference(tileHE): + new_path = "../random_model.onnx" + + inference = HaloAIInference( + model_path=new_path, input_name="data", num_classes=1, model_type="segmentation" + ) orig_im = tileHE.image inference.apply(tileHE) assert np.array_equal(tileHE.image, inference.F(orig_im)) -def test_RemoteTestHoverNet(tileHE): +def test_RemoteTestHoverNet(tileHE): inference = RemoteTestHoverNet() orig_im = tileHE.image inference.apply(tileHE) assert np.array_equal(tileHE.image, inference.F(orig_im)) - - inference.remove() \ No newline at end of file + + inference.remove() \ No newline at end of file From e6002d2c3e0ad0e38b9d867b956025db2b76f11a Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 11:03:56 -0700 Subject: [PATCH 14/50] fixed spacing and alpha order --- pathml/__init__.py | 7 +++---- pathml/inference/__init__.py | 10 +++++----- pathml/inference/inference.py | 7 +++++-- tests/inference_tests/test_inference.py | 14 +++++++------- 4 files changed, 20 insertions(+), 18 deletions(-) diff --git a/pathml/__init__.py b/pathml/__init__.py index 1889e4be..5865c6ea 100644 --- a/pathml/__init__.py +++ b/pathml/__init__.py @@ -1,12 +1,11 @@ """ -Copyright 2021, Dana-Farber Cancer Institute and Weill Cornell Medicine +Copyright 2023, Dana-Farber Cancer Institute and Weill Cornell Medicine License: GNU GPL 2.0 """ from . import datasets as ds -from . import ml +from . import inference, ml from . import preprocessing as pp -from . import inference from ._logging import PathMLLogger from ._version import __version__ -from .core import * # noqa: F403 \ No newline at end of file +from .core import * # noqa: F403 diff --git a/pathml/inference/__init__.py b/pathml/inference/__init__.py index bf14de1f..dd2b9a39 100644 --- a/pathml/inference/__init__.py +++ b/pathml/inference/__init__.py @@ -4,10 +4,10 @@ """ from .inference import ( - remove_initializer_from_input, - check_onnx_clean, - InferenceBase, - Inference, HaloAIInference, + Inference, + InferenceBase, RemoteTestHoverNet, -) \ No newline at end of file + check_onnx_clean, + remove_initializer_from_input, +) diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py index 880a4cbf..4274bc35 100644 --- a/pathml/inference/inference.py +++ b/pathml/inference/inference.py @@ -4,12 +4,15 @@ """ import os + import numpy as np -import pathml.preprocessing.transforms as Transforms import onnx import onnxruntime import requests +import pathml.preprocessing.transforms as Transforms + + def remove_initializer_from_input(model_path, new_path): """Removes initializers from HaloAI ONNX models @@ -332,4 +335,4 @@ def apply(self, tile): def remove(self): # remove the temp.onnx model - os.remove(self.model_path) \ No newline at end of file + os.remove(self.model_path) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index a91bb1b7..4fef292e 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -1,19 +1,19 @@ import os + import numpy as np import onnx -import onnxruntime as ort -import pytest from pathml.inference import ( - remove_initializer_from_input, - check_onnx_clean, - InferenceBase, - Inference, HaloAIInference, + Inference, + InferenceBase, RemoteTestHoverNet, + check_onnx_clean, + remove_initializer_from_input, ) + def test_remove_initializer_from_input(): # Create a temporary ONNX model file model_path = "test_model.onnx" @@ -156,4 +156,4 @@ def test_RemoteTestHoverNet(tileHE): inference.apply(tileHE) assert np.array_equal(tileHE.image, inference.F(orig_im)) - inference.remove() \ No newline at end of file + inference.remove() From 35a3df1611e8b85dd942ca27f06f20c3e4ee9c2a Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 11:25:06 -0700 Subject: [PATCH 15/50] added dependencies --- environment.yml | 2 ++ pathml/inference/inference.py | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index c140b04d..fc11831e 100644 --- a/environment.yml +++ b/environment.yml @@ -23,6 +23,8 @@ dependencies: - python-javabridge==4.0.0 - protobuf==3.20.1 - deepcell==0.11.0 + - onnx==1.14.0 + - onnxruntime==1.15.1 - opencv-contrib-python==4.5.3.56 - openslide-python==1.2.0 - scanpy==1.8.2 diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py index 4274bc35..83ec0093 100644 --- a/pathml/inference/inference.py +++ b/pathml/inference/inference.py @@ -13,7 +13,6 @@ import pathml.preprocessing.transforms as Transforms - def remove_initializer_from_input(model_path, new_path): """Removes initializers from HaloAI ONNX models Taken from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py From c015ff866faf0a3a669404ee9c8845686291fb79 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Fri, 11 Aug 2023 11:33:40 -0700 Subject: [PATCH 16/50] re black the test inference file --- tests/inference_tests/test_inference.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 4fef292e..27446324 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -13,7 +13,6 @@ ) - def test_remove_initializer_from_input(): # Create a temporary ONNX model file model_path = "test_model.onnx" From 0dd09b8a3dda2420eaa607e61aac7f464cb8c1aa Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 07:47:53 -0700 Subject: [PATCH 17/50] update protobuf version in envi yml --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index fc11831e..2cfddff9 100644 --- a/environment.yml +++ b/environment.yml @@ -21,7 +21,7 @@ dependencies: - pip: - python-bioformats==4.0.0 - python-javabridge==4.0.0 - - protobuf==3.20.1 + - protobuf==3.20.2 - deepcell==0.11.0 - onnx==1.14.0 - onnxruntime==1.15.1 From 5657efbe2b4444012c9d25895baf3354313143df Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 08:01:11 -0700 Subject: [PATCH 18/50] changing numpy version --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index 2cfddff9..2541b9b5 100644 --- a/environment.yml +++ b/environment.yml @@ -6,7 +6,7 @@ channels: dependencies: - pip==21.3.1 - - numpy==1.19.5 + - numpy==1.21.6 - scipy==1.7.3 - scikit-image==0.18.3 - matplotlib==3.5.1 From 0b7df6b827ffa3ce72cdcc42417fb47168737705 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 08:35:04 -0700 Subject: [PATCH 19/50] adjusting deepcel version --- environment.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index 2541b9b5..258ed401 100644 --- a/environment.yml +++ b/environment.yml @@ -22,7 +22,8 @@ dependencies: - python-bioformats==4.0.0 - python-javabridge==4.0.0 - protobuf==3.20.2 - - deepcell==0.11.0 + # - deepcell==0.11.0 + - deepcell==0.12.7 - onnx==1.14.0 - onnxruntime==1.15.1 - opencv-contrib-python==4.5.3.56 From 4612a68396c9ef125ca8df4928339dfc2956b22f Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 09:03:00 -0700 Subject: [PATCH 20/50] deleted comment from yml file --- environment.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/environment.yml b/environment.yml index 258ed401..cc4308a6 100644 --- a/environment.yml +++ b/environment.yml @@ -22,7 +22,6 @@ dependencies: - python-bioformats==4.0.0 - python-javabridge==4.0.0 - protobuf==3.20.2 - # - deepcell==0.11.0 - deepcell==0.12.7 - onnx==1.14.0 - onnxruntime==1.15.1 From 4cd57a5844cf2219daf09bffb98db139a28a27f1 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 09:24:16 -0700 Subject: [PATCH 21/50] updating pytest version --- environment.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index cc4308a6..d00aca02 100644 --- a/environment.yml +++ b/environment.yml @@ -15,7 +15,8 @@ dependencies: - h5py==3.1.0 - dask==2021.12.0 - pydicom==2.2.2 - - pytest==6.2.5 + # - pytest==6.2.5 + - pytest==7.4.0 - pre-commit==2.16.0 - coverage==5.5 - pip: From 05f19e76ed83e83ee549024147af47d28222ea75 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 09:51:04 -0700 Subject: [PATCH 22/50] setting pandas version --- environment.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index d00aca02..a8c7a5b9 100644 --- a/environment.yml +++ b/environment.yml @@ -27,7 +27,8 @@ dependencies: - onnx==1.14.0 - onnxruntime==1.15.1 - opencv-contrib-python==4.5.3.56 - - openslide-python==1.2.0 + - openslide-python==1.2.0 + - pandas==1.5.2 - scanpy==1.8.2 - anndata==0.7.8 - tqdm==4.62.3 From ddb1b07d537f419f3a64e7453cc9b0077704c50b Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 10:11:04 -0700 Subject: [PATCH 23/50] adding test onnx and fixing path in test file --- tests/inference_tests/test_inference.py | 4 ++-- tests/testdata/random_model.onnx | 23 +++++++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) create mode 100644 tests/testdata/random_model.onnx diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 27446324..03e1cdc3 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -126,7 +126,7 @@ def test_InferenceBase(): def test_Inference(tileHE): - new_path = "../random_model.onnx" + new_path = "../testdata/andom_model.onnx" inference = Inference( model_path=new_path, input_name="data", num_classes=1, model_type="segmentation" @@ -138,7 +138,7 @@ def test_Inference(tileHE): def test_HaloAIInference(tileHE): - new_path = "../random_model.onnx" + new_path = "../testdata/andom_model.onnx" inference = HaloAIInference( model_path=new_path, input_name="data", num_classes=1, model_type="segmentation" diff --git a/tests/testdata/random_model.onnx b/tests/testdata/random_model.onnx new file mode 100644 index 00000000..3f028573 --- /dev/null +++ b/tests/testdata/random_model.onnx @@ -0,0 +1,23 @@ +pytorch2.0.0:ÿ +‘ +data + conv.weight + conv.bias3 +/conv/Conv"Conv* + dilations@@ * +group * + kernel_shape@@ * +pads@@@@ * +strides@@  torch_jit*…B conv.weightJlÚã>½¦=ãŒ*½…&¬= R¼N¹½Qp€=R·.¾p&r½Dè2>Ì4Å=d2#½ÝS½é‡?>˜ä<}2r½ù).<|V~;¾±ý@½HE8¾@Ó>ÅLô½/¶/>Ëñ~½Æ…˼s. ¾*B conv.biasJ œA¾Z +data + + + +ô +ôb +3 + + + +ô +ôB \ No newline at end of file From af07fb0922080d5a033e9641158816d86b6ee500 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 11:05:50 -0700 Subject: [PATCH 24/50] fixing remote test --- tests/inference_tests/test_inference.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 03e1cdc3..2a5d9d3d 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -3,6 +3,7 @@ import numpy as np import onnx +from pathml.core import SlideData from pathml.inference import ( HaloAIInference, Inference, @@ -126,7 +127,7 @@ def test_InferenceBase(): def test_Inference(tileHE): - new_path = "../testdata/andom_model.onnx" + new_path = "../testdata/random_model.onnx" inference = Inference( model_path=new_path, input_name="data", num_classes=1, model_type="segmentation" @@ -138,7 +139,7 @@ def test_Inference(tileHE): def test_HaloAIInference(tileHE): - new_path = "../testdata/andom_model.onnx" + new_path = "../testdata/random_model.onnx" inference = HaloAIInference( model_path=new_path, input_name="data", num_classes=1, model_type="segmentation" @@ -148,11 +149,22 @@ def test_HaloAIInference(tileHE): assert np.array_equal(tileHE.image, inference.F(orig_im)) -def test_RemoteTestHoverNet(tileHE): +def test_RemoteTestHoverNet(): inference = RemoteTestHoverNet() - orig_im = tileHE.image - inference.apply(tileHE) - assert np.array_equal(tileHE.image, inference.F(orig_im)) + wsi = SlideData("../testdata/small_HE.svs") + + tiles = wsi.generate_tiles(shape=(256, 256), pad=False) + a = 0 + test_tile = None + + while a == 0: + for tile in tiles: + test_tile = tile + a += 1 + + orig_im = test_tile.image + inference.apply(test_tile) + assert np.array_equal(test_tile.image, inference.F(orig_im)) inference.remove() From 7048a4e3566eeb88a4109b79c37d33dd3c204ab1 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 14 Aug 2023 11:26:57 -0700 Subject: [PATCH 25/50] fixing path to onnx model in test data --- tests/inference_tests/test_inference.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 2a5d9d3d..8d812865 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -127,7 +127,7 @@ def test_InferenceBase(): def test_Inference(tileHE): - new_path = "../testdata/random_model.onnx" + new_path = "tests/testdata/random_model.onnx" inference = Inference( model_path=new_path, input_name="data", num_classes=1, model_type="segmentation" @@ -139,7 +139,7 @@ def test_Inference(tileHE): def test_HaloAIInference(tileHE): - new_path = "../testdata/random_model.onnx" + new_path = "tests/testdata/random_model.onnx" inference = HaloAIInference( model_path=new_path, input_name="data", num_classes=1, model_type="segmentation" @@ -152,7 +152,7 @@ def test_HaloAIInference(tileHE): def test_RemoteTestHoverNet(): inference = RemoteTestHoverNet() - wsi = SlideData("../testdata/small_HE.svs") + wsi = SlideData("tests/testdata/small_HE.svs") tiles = wsi.generate_tiles(shape=(256, 256), pad=False) a = 0 From f76314a8f6d43e864f2b408ff58a01c76bf6cb41 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Tue, 15 Aug 2023 11:08:31 -0700 Subject: [PATCH 26/50] changing numpy to latest version --- environment.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/environment.yml b/environment.yml index a8c7a5b9..e7a65761 100644 --- a/environment.yml +++ b/environment.yml @@ -6,7 +6,7 @@ channels: dependencies: - pip==21.3.1 - - numpy==1.21.6 + - numpy==1.25.2 # orig = 1.19.5 - scipy==1.7.3 - scikit-image==0.18.3 - matplotlib==3.5.1 @@ -15,20 +15,19 @@ dependencies: - h5py==3.1.0 - dask==2021.12.0 - pydicom==2.2.2 - # - pytest==6.2.5 - - pytest==7.4.0 + - pytest==7.4.0 # orig = 6.2.5 - pre-commit==2.16.0 - coverage==5.5 - pip: - python-bioformats==4.0.0 - python-javabridge==4.0.0 - protobuf==3.20.2 - - deepcell==0.12.7 + - deepcell==0.12.7 # orig = 0.11.0 - onnx==1.14.0 - onnxruntime==1.15.1 - opencv-contrib-python==4.5.3.56 - openslide-python==1.2.0 - - pandas==1.5.2 + - pandas==1.5.2 # orig no req - scanpy==1.8.2 - anndata==0.7.8 - tqdm==4.62.3 From 69c8ce623c854cd65a6bf075a880f8b5f81dc919 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Tue, 15 Aug 2023 11:32:07 -0700 Subject: [PATCH 27/50] adjusting numpy to 1.24.0 --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index e7a65761..bdbfdcf7 100644 --- a/environment.yml +++ b/environment.yml @@ -6,7 +6,7 @@ channels: dependencies: - pip==21.3.1 - - numpy==1.25.2 # orig = 1.19.5 + - numpy==1.24.0 # orig = 1.19.5 - scipy==1.7.3 - scikit-image==0.18.3 - matplotlib==3.5.1 From a5d28ac82a6f1a4a2f7379bb7c808881ce70808b Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Tue, 15 Aug 2023 11:44:34 -0700 Subject: [PATCH 28/50] switching to numpy 1.22.4 --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index bdbfdcf7..43d65319 100644 --- a/environment.yml +++ b/environment.yml @@ -6,7 +6,7 @@ channels: dependencies: - pip==21.3.1 - - numpy==1.24.0 # orig = 1.19.5 + - numpy==1.22.4 # orig = 1.19.5 - scipy==1.7.3 - scikit-image==0.18.3 - matplotlib==3.5.1 From c84cc247e83875eca7ab8c8af8d4565bde15a306 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Thu, 17 Aug 2023 13:28:36 -0700 Subject: [PATCH 29/50] adding pandas --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index 43d65319..e8a5469e 100644 --- a/environment.yml +++ b/environment.yml @@ -27,8 +27,8 @@ dependencies: - onnxruntime==1.15.1 - opencv-contrib-python==4.5.3.56 - openslide-python==1.2.0 - - pandas==1.5.2 # orig no req - scanpy==1.8.2 - anndata==0.7.8 - tqdm==4.62.3 - loguru==0.5.3 + - pandas==1.5.2 # orig no req From 6c7268e59504d21119514e791e66b0586d9fd75e Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 6 Sep 2023 10:32:52 -0700 Subject: [PATCH 30/50] adding repr tests --- tests/inference_tests/test_inference.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 8d812865..806bef7f 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -107,6 +107,10 @@ def test_InferenceBase(): for key in test.model_card: assert key == test.model_card[key], f"function for {key} is not working" + assert "Base class for all ONNX models" == repr(test) + + assert test.model_card == test.get_model_card() + # test reshape function random = np.random.rand(1, 2, 3) assert test.reshape(random).shape == ( @@ -137,6 +141,8 @@ def test_Inference(tileHE): inference.apply(tileHE) assert np.array_equal(tileHE.image, inference.F(orig_im)) + assert repr(inference) == "Class to handle ONNX model locally stored at {new_path}" + def test_HaloAIInference(tileHE): new_path = "tests/testdata/random_model.onnx" @@ -148,6 +154,11 @@ def test_HaloAIInference(tileHE): inference.apply(tileHE) assert np.array_equal(tileHE.image, inference.F(orig_im)) + assert ( + repr(inference) + == "Class to handle HALO AI ONNX model locally stored at {new_path}" + ) + def test_RemoteTestHoverNet(): inference = RemoteTestHoverNet() @@ -167,4 +178,9 @@ def test_RemoteTestHoverNet(): inference.apply(test_tile) assert np.array_equal(test_tile.image, inference.F(orig_im)) + assert ( + repr(inference) + == "Class to handle remote TIAToolBox HoverNet test ONNX. See model card for citation." + ) + inference.remove() From c4c2795dc826f4cdfb582e64ae723237e339a76b Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 6 Sep 2023 10:37:05 -0700 Subject: [PATCH 31/50] fixing print statements in repr tests --- tests/inference_tests/test_inference.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 806bef7f..dd3c51fa 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -141,7 +141,7 @@ def test_Inference(tileHE): inference.apply(tileHE) assert np.array_equal(tileHE.image, inference.F(orig_im)) - assert repr(inference) == "Class to handle ONNX model locally stored at {new_path}" + assert repr(inference) == f"Class to handle ONNX model locally stored at {new_path}" def test_HaloAIInference(tileHE): @@ -156,7 +156,7 @@ def test_HaloAIInference(tileHE): assert ( repr(inference) - == "Class to handle HALO AI ONNX model locally stored at {new_path}" + == f"Class to handle HALO AI ONNX model locally stored at {new_path}" ) From 2b0263ef818b58b3c20179282c0f64c49bfe95d2 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Thu, 7 Sep 2023 08:03:50 -0700 Subject: [PATCH 32/50] adding example notebook and fixing comments in inference file --- examples/InferenceOnnx_tutorial.ipynb | 805 ++++++++++++++++++++++++++ pathml/inference/inference.py | 3 - 2 files changed, 805 insertions(+), 3 deletions(-) create mode 100644 examples/InferenceOnnx_tutorial.ipynb diff --git a/examples/InferenceOnnx_tutorial.ipynb b/examples/InferenceOnnx_tutorial.ipynb new file mode 100644 index 00000000..98c67052 --- /dev/null +++ b/examples/InferenceOnnx_tutorial.ipynb @@ -0,0 +1,805 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "c4e08d2c-f53e-4366-888d-ab72819b4c2f", + "metadata": {}, + "source": [ + "# PathML ONNX Tutorial\n", + "\n", + "Written by James Wen. James_Wen@dfci.harvard.edu. \n", + "\n", + "[![View on GitHub](https://img.shields.io/badge/View-on%20GitHub-lightgrey?logo=github)](https://github.com/Dana-Farber-AIOS/pathml/blob/master/examples/)\n", + "\n", + "## Updates\n", + "August 09, 2023\n", + "- Simplified the class name structure\n", + "- Added \"model cards\" via base class\n", + "- Added setter functions to the base class to edit model card\n", + "- New fxn to check if model is clean (without initializers in the input graph) \n", + "\n", + "## Introduction\n", + "\n", + "This notebook is a tutorial on how to use the future ONNX `inference` feature in PathML. \n", + "\n", + "Some notes:\n", + "- The ONNX inference pipeline uses the existing PathML Pipeline and Transforms infrastructure.\n", + " - ONNX labels are saved to a `pathml.core.slide_data.SlideData` object as `tiles`.\n", + " - Users can iterate over the tiles as they would when using this feature for preprocessing. \n", + "- Preprocessing images before inference\n", + " - Users will need to create their own bespoke `pathml.preprocessing.transforms.transform` method to preprocess images before inference if necessary.\n", + " - A guide on how to create preprocessing pipelines is [here](https://pathml.readthedocs.io/en/latest/creating_pipelines.html). \n", + " - A guide on how to run preprocessing pipelines is [here](https://pathml.readthedocs.io/en/latest/running_pipelines.html). \n", + "- ONNX Model Initializers \n", + " - ONNX models often have neural network initializers stored in the input graph. This means that the user is expected to specify initializer values when running inference. To solve this issue, we have a function that removes the network initializers from the input graph. This functions is adopted from the `onnxruntime` [github](https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py). \n", + " - We also have a function that checks if the initializers have been removed from the input graph before running inference. Both of these functions are described more below. \n", + "- When using a model stored remotely on HuggingFace, the model is *downloaded locally* before being used. The user will need to delete the model after running `Pipeline` with a method that comes with the model class. An example of how to do this is below. \n", + "\n", + "## Quick Sample Code\n", + "- Below is an example of how users would use the ONNX inference feature in PathML with a locally stored model.\n", + "```python\n", + "# load packages\n", + "from pathml.core import SlideData\n", + "\n", + "from pathml.preprocessing import Pipeline\n", + "import pathml.preprocessing.transforms as Transforms\n", + "\n", + "from pathml.inference import Inference, remove_initializer_from_input\n", + "\n", + "# Define slide path\n", + "slide_path = 'PATH TO SLIDE'\n", + "\n", + "# Set path to model \n", + "model_path = 'PATH TO ONNX MODEL'\n", + "# Define path to export fixed model\n", + "new_path = 'PATH TO SAVE NEW ONNX MODEL'\n", + "\n", + "# Fix the ONNX model by removing initializers. Save new model to `new_path`. \n", + "remove_initializer_from_input(model_path, new_path) \n", + "\n", + "inference = Inference(model_path = new_path, input_name = 'data', num_classes = 8, model_type = 'segmentation')\n", + "\n", + "# Create a transformation list\n", + "transformation_list = [\n", + " inference\n", + "] \n", + "\n", + "# Initialize pathml.core.slide_data.SlideData object\n", + "wsi = SlideData(slide_path, stain = 'Fluor')\n", + "\n", + "# Set up PathML pipeline\n", + "pipeline = Pipeline(transformation_list)\n", + "\n", + "# Run Inference\n", + "wsi.run(pipeline, tile_size = 1280, level = 0)\n", + "```\n", + "\n", + "- Below is an example of how users would use the ONNX inference feature in PathML with a model stored in the public HuggingFace repository.\n", + "```python\n", + "# load packages\n", + "from pathml.core import SlideData\n", + "\n", + "from pathml.preprocessing import Pipeline\n", + "import pathml.preprocessing.transforms as Transforms\n", + "\n", + "from pathml.inference import RemoteTestHoverNet\n", + "\n", + "# Define slide path\n", + "slide_path = 'PATH TO SLIDE'\n", + "\n", + "inference = RemoteTestHoverNet()\n", + "\n", + "# Create a transformation list\n", + "transformation_list = [\n", + " inference\n", + "] \n", + "\n", + "# Initialize pathml.core.slide_data.SlideData object\n", + "wsi = SlideData(slide_path)\n", + "\n", + "# Set up PathML pipeline\n", + "pipeline = Pipeline(transformation_list)\n", + "\n", + "# Run Inference\n", + "wsi.run(pipeline, tile_size = 256)\n", + "\n", + "# DELETE ONNX MODEL DOWNLOADED FROM HUGGINGFACE\n", + "inference.remove() \n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "886a74a3-b905-40dd-9b3e-4e1b90918f9b", + "metadata": {}, + "source": [ + "## Load Packages\n", + "\n", + "**NOTE**\n", + "- Please put in your environment name in the following line if you are using a jupyter notebook. If not, you may remove this line. \n", + " `os.environ[\"JAVA_HOME\"] = \"/opt/conda/envs/YOUR ENVIRONMENET NAME\"` " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "436b91f3-6338-4043-8742-496b354544aa", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "os.environ[\"JAVA_HOME\"] = \"/opt/conda/envs/YOUR ENVIRONMENET NAME\" # TO DO: CHANGE THIS TO YOUR ENVIRONMENT NAME\n", + "import numpy as np \n", + "import onnx\n", + "import onnxruntime as ort \n", + "import requests\n", + "\n", + "from pathml.core import SlideData, Tile\n", + "from dask.distributed import Client\n", + "from pathml.preprocessing import Pipeline\n", + "import pathml.preprocessing.transforms as Transforms\n", + "\n", + "from pathml.inference import (\n", + " HaloAIInference,\n", + " Inference,\n", + " InferenceBase,\n", + " RemoteTestHoverNet,\n", + " check_onnx_clean,\n", + " remove_initializer_from_input,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "34e9fb8c-0148-4184-ba6b-cf5dae63a869", + "metadata": {}, + "source": [ + "## ONNX Inference Class and ONNX Model Fixer\n", + "\n", + "- Here is the raw code for the functions that handle the initializers in the ONNX model and the classes that run the inference.\n", + "\n", + "### Functions to remove initializers and check that initializers have been removed.\n", + "\n", + "- `remove_initializer_from_input`\n", + " - This function removes any initializers from the input graph of the ONNX model.\n", + " - Without removing the initializers from the input graph, users will not be able to run inference.\n", + " - Adapted from the `onnxruntime` [github](https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py). \n", + " - Users specify:\n", + " - `model_path` (str): path to ONNX model,\n", + " - `new_path` (str): path to save adjusted model w/o initializers\n", + " - We will run this function on all models placed in our model zoo, so users will not have to run it unless they are working with their own local models.\n", + " \n", + "
\n", + " \n", + "- `check_onnx_clean`\n", + " - Checks if the initializers are in the input graph\n", + " - Returns `True` and a `ValueError` if there are initializers in the input graph\n", + " - Adapted from the `onnxruntime` [github](https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py). \n", + " - Users specify:\n", + " - `model_path` (str): path to ONNX model\n", + "\n", + "### Inference Classes\n", + "\n", + "
\n", + "\n", + "- `InferenceBase`\n", + " - This class inherits from `pathml.preprocessing.transforms.transform`, similar to all of the preprocessing transformations. Inheriting from `transforms.transform` allows us to use the existing `Pipeline` function in PathML which users should be familar with. \n", + " - This is the base class for all Inference classes for ONNX modeling\n", + " - Each instance of a class also comes with a `model_card` which specifies certain details of the model in dictionary form. The default parameters are:\n", + " - ```python \n", + " self.model_card = {\n", + " 'name' : None, \n", + " 'num_classes' : None,\n", + " 'model_type' : None, \n", + " 'notes' : None, \n", + " 'model_input_notes': None, \n", + " 'model_output_notes' : None,\n", + " 'citation': None \n", + " } \n", + " ``` \n", + " - Model cards are where important information about the model should be kept. Since they are in dictionary form, the user can add keys and values as they see fit. \n", + " - This class also has getter and setter functions to adjust the `model_card`. Certain functions include `get_model_card`, `set_name`, `set_num_classes`, etc. \n", + " \n", + "
\n", + " \n", + "- `Inference` \n", + " - This class is for when the user wants to use an ONNX model stored locally. \n", + " - Calls the `check_onnx_clean` function to check if the model is clean.\n", + " - Users specify:\n", + " - `model_path` (str): path to ONNX model,\n", + " - `input_name` (str): name of input for ONNX model, *defaults to `data`* \n", + " - `num_classes` (int): number of outcome classes, \n", + " - `model_type` (str): type of model (classification, segmentation) \n", + " - `local` (bool): if you are using a local model or a remote model, *defaults to `True`* \n", + " \n", + "
\n", + " \n", + "- `HaloAIInference`\n", + " - This class inherits from `Inference`\n", + " - HaloAI ONNX models always return 20 prediction maps: this class will subset and return the necessary ones. \n", + "\n", + "
\n", + "\n", + "- `RemoteTestHoverNet` \n", + " - This class inherits from `Inference` and is the test class for public models hosted on `HuggingFace`. \n", + " - `local` is automatically set to `False` \n", + " - Our current test model is a HoverNet from [TIAToolbox](https://github.com/TissueImageAnalytics/tiatoolbox)\n", + " - Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\n", + " - Its `model_card` is:\n", + " - ```python \n", + " {'name': 'Tiabox HoverNet Test',\n", + " 'num_classes': 5,\n", + " 'model_type': 'Segmentation',\n", + " 'notes': None,\n", + " 'model_input_notes': 'Accepts tiles of 256 x 256',\n", + " 'model_output_notes': None,\n", + " 'citation': 'Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.'}\n", + " ```\n", + " \n", + "### Raw Code\n", + "\n", + "Below is the raw code for your convenience. You can also find the raw code on our github. \n", + "[![View on GitHub](https://img.shields.io/badge/View-on%20GitHub-lightgrey?logo=github)](https://github.com/Dana-Farber-AIOS/pathml/tree/master/pathml)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "3339cf66-8de6-4af1-9d3e-7312cd69eb33", + "metadata": {}, + "outputs": [], + "source": [ + "def remove_initializer_from_input(model_path, new_path):\n", + " \"\"\"Removes initializers from HaloAI ONNX models\n", + " Taken from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py\n", + "\n", + " Args:\n", + " model_path (str): path to ONNX model,\n", + " new_path (str): path to save adjusted model w/o initializers,\n", + "\n", + " Returns:\n", + " ONNX model w/o initializers to run inference using PathML\n", + " \"\"\"\n", + "\n", + " model = onnx.load(model_path)\n", + "\n", + " inputs = model.graph.input\n", + " name_to_input = {}\n", + " for onnx_input in inputs:\n", + " name_to_input[onnx_input.name] = onnx_input\n", + "\n", + " for initializer in model.graph.initializer:\n", + " if initializer.name in name_to_input:\n", + " inputs.remove(name_to_input[initializer.name])\n", + "\n", + " onnx.save(model, new_path)\n", + "\n", + "\n", + "def check_onnx_clean(model_path):\n", + " \"\"\"Checks if the model has had it's initalizers removed from input graph.\n", + " Adapted from from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py\n", + "\n", + " Args:\n", + " model_path (str): path to ONNX model,\n", + "\n", + " Returns:\n", + " Boolean if there are initializers in input graph.\n", + " \"\"\"\n", + "\n", + " model = onnx.load(model_path)\n", + "\n", + " inputs = model.graph.input\n", + " name_to_input = {}\n", + " for onnx_input in inputs:\n", + " name_to_input[onnx_input.name] = onnx_input\n", + "\n", + " for initializer in model.graph.initializer:\n", + " if initializer.name in name_to_input:\n", + " return True\n", + "\n", + "\n", + "# Base class\n", + "class InferenceBase(Transforms.Transform):\n", + " \"\"\"\n", + " Base class for all ONNX Models.\n", + " Each transform must operate on a Tile.\n", + " \"\"\"\n", + "\n", + " def __init__(self):\n", + " self.model_card = {\n", + " \"name\": None,\n", + " \"num_classes\": None,\n", + " \"model_type\": None,\n", + " \"notes\": None,\n", + " \"model_input_notes\": None,\n", + " \"model_output_notes\": None,\n", + " \"citation\": None,\n", + " }\n", + "\n", + " def __repr__(self):\n", + " return \"Base class for all ONNX models\"\n", + "\n", + " def get_model_card(self):\n", + " return self.model_card\n", + "\n", + " def set_name(self, name):\n", + " self.model_card[\"name\"] = name\n", + "\n", + " def set_num_classes(self, num):\n", + " self.model_card[\"num_classes\"] = num\n", + "\n", + " def set_model_type(self, model_type):\n", + " self.model_card[\"model_type\"] = model_type\n", + "\n", + " def set_notes(self, note):\n", + " self.model_card[\"notes\"] = note\n", + "\n", + " def set_model_input_notes(self, note):\n", + " self.model_card[\"model_input_notes\"] = note\n", + "\n", + " def set_model_output_notes(self, note):\n", + " self.model_card[\"model_output_notes\"] = note\n", + "\n", + " def set_citation(self, citation):\n", + " self.model_card[\"citation\"] = citation\n", + "\n", + " def reshape(self, image):\n", + " \"\"\"standard reshaping of tile image\"\"\"\n", + " # flip dimensions\n", + " # follows convention used here https://github.com/Dana-Farber-AIOS/pathml/blob/master/pathml/ml/dataset.py\n", + "\n", + " if image.ndim == 3:\n", + " # swap axes from HWC to CHW\n", + " image = image.transpose(2, 0, 1)\n", + " # add a dimesion bc onnx models usually have batch size as first dim: e.g. (1, channel, height, width)\n", + " image = np.expand_dims(image, axis=0)\n", + "\n", + " return image\n", + " else:\n", + " # in this case, we assume that we have XYZCT channel order\n", + " # so we swap axes to TCZYX for batching\n", + " # note we are not adding a dim here for batch bc we assume that subsetting will create a batch \"placeholder\" dim\n", + " image = image.T\n", + "\n", + " return image\n", + "\n", + " def F(self, target):\n", + " \"\"\"functional implementation\"\"\"\n", + " raise NotImplementedError\n", + "\n", + " def apply(self, tile):\n", + " \"\"\"modify Tile object in-place\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "# class to handle local onnx models\n", + "class Inference(InferenceBase):\n", + " \"\"\"Transformation to run inferrence on ONNX model.\n", + "\n", + " Assumptions:\n", + " - The ONNX model has been cleaned by `remove_initializer_from_input` first\n", + "\n", + " Args:\n", + " model_path (str): path to ONNX model w/o initializers,\n", + " input_name (str): name of the input the ONNX model accepts\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " model_path=None,\n", + " input_name=\"data\",\n", + " num_classes=None,\n", + " model_type=None,\n", + " local=True,\n", + " ):\n", + " super().__init__()\n", + "\n", + " self.input_name = input_name\n", + " self.num_classes = num_classes\n", + " self.model_type = model_type\n", + " self.local = local\n", + "\n", + " if self.local:\n", + " # using a local onnx model\n", + " self.model_path = model_path\n", + " else:\n", + " # if using a model from the model zoo, set the local path to a temp file\n", + " self.model_path = \"temp.onnx\"\n", + "\n", + " # fill in parts of the model_card with the following info\n", + " self.model_card[\"num_classes\"] = self.num_classes\n", + " self.model_card[\"model_type\"] = self.model_type\n", + "\n", + " # check if there are initializers in input graph if using a local model\n", + " if local:\n", + " if check_onnx_clean(model_path):\n", + " raise ValueError(\n", + " \"The ONNX model still has graph initializers in the input graph. Use `remove_initializer_from_input` to remove them.\"\n", + " )\n", + " else:\n", + " pass\n", + "\n", + " def __repr__(self):\n", + " if self.local:\n", + " return f\"Class to handle ONNX model locally stored at {self.model_path}\"\n", + " else:\n", + " return f\"Class to handle a {self.model_card['model_name']} from the PathML model zoo.\"\n", + "\n", + " def inference(self, image):\n", + " # reshape the image\n", + " image = self.reshape(image)\n", + "\n", + " # load fixed model\n", + " onnx_model = onnx.load(self.model_path)\n", + "\n", + " # check tile dimensions match ONNX input dimensions\n", + " input_node = onnx_model.graph.input\n", + "\n", + " dimensions = []\n", + " for input in input_node:\n", + " if input.name == self.input_name:\n", + " input_shape = input.type.tensor_type.shape.dim\n", + " for dim in input_shape:\n", + " dimensions.append(dim.dim_value)\n", + "\n", + " assert (\n", + " image.shape[-1] == dimensions[-1] and image.shape[-2] == dimensions[-2]\n", + " ), f\"expecting tile shape of {dimensions[-2]} by {dimensions[-1]}, got {image.shape[-2]} by {image.shape[-1]}\"\n", + "\n", + " # check onnx model\n", + " onnx.checker.check_model(onnx_model)\n", + "\n", + " # start an inference session\n", + " ort_sess = onnxruntime.InferenceSession(self.model_path)\n", + "\n", + " # create model output, returns a list\n", + " model_output = ort_sess.run(None, {self.input_name: image.astype(\"f\")})\n", + "\n", + " return model_output\n", + "\n", + " def F(self, image):\n", + " # run inference function\n", + " prediction_map = self.inference(image)\n", + "\n", + " # single task model\n", + " if len(prediction_map) == 1:\n", + " # return first and only prediction array in the list\n", + " return prediction_map[0]\n", + "\n", + " # multi task model\n", + " else:\n", + " # concatenate prediction results\n", + " # assumes that the tasks all output prediction arrays of same dimension on H and W\n", + " result_array = np.concatenate(prediction_map, axis=1)\n", + " return result_array\n", + "\n", + " def apply(self, tile):\n", + " tile.image = self.F(tile.image)\n", + "\n", + "\n", + "class HaloAIInference(Inference):\n", + " \"\"\"Transformation to run inferrence on HALO AI ONNX model.\n", + "\n", + " Assumptions:\n", + " - Assumes that the ONNX model returns a tensor in which there is one prediction map for each class\n", + " - For example, if there are 5 classes, the ONNX model will output a (1, 5, Height, Weight) tensor\n", + " - If you select to argmax the classes, the class assumes a softmax or sigmoid has already been applied\n", + " - HaloAI ONNX models always have 20 class maps so you need to index into the first x maps if you have x classes\n", + "\n", + "\n", + " Args:\n", + " model_path (str): path to ONNX model w/o initializers,\n", + " num_classes (int): number of classes in the data,\n", + " input_name (str): name of the input the ONNX model accepts\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " model_path=None,\n", + " input_name=\"data\",\n", + " num_classes=None,\n", + " model_type=None,\n", + " local=True,\n", + " ):\n", + " super().__init__(model_path, input_name, num_classes, model_type, local)\n", + "\n", + " self.model_card[\"num_classes\"] = self.num_classes\n", + " self.model_card[\"model_type\"] = self.model_type\n", + "\n", + " def __repr__(self):\n", + " return f\"Class to handle HALO AI ONNX model locally stored at {self.model_path}\"\n", + "\n", + " def F(self, image):\n", + " prediction_map = self.inference(image)\n", + "\n", + " prediction_map = prediction_map[0][:, 0 : self.num_classes, :, :]\n", + "\n", + " return prediction_map\n", + "\n", + " def apply(self, tile):\n", + " tile.image = self.F(tile.image)\n", + "\n", + "\n", + "# class to handle remote onnx models\n", + "class RemoteTestHoverNet(Inference):\n", + " \"\"\"Transformation to run inferrence on ONNX model.\n", + "\n", + " Citation for model:\n", + " Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D.\n", + " TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\n", + "\n", + " Args:\n", + " model_path (str): temp file name to download onnx from huggingface,\n", + " input_name (str): name of the input the ONNX model accepts\n", + " \"\"\"\n", + "\n", + " def __init__(\n", + " self,\n", + " model_path=\"temp.onnx\",\n", + " input_name=\"data\",\n", + " num_classes=5,\n", + " model_type=\"Segmentation\",\n", + " local=False,\n", + " ):\n", + " super().__init__(model_path, input_name, num_classes, model_type, local)\n", + "\n", + " # specify URL of the model in PathML public repository\n", + " url = \"https://huggingface.co/pathml/test/resolve/main/hovernet_fast_tiatoolbox_fixed.onnx\"\n", + "\n", + " # download model, save as temp.onnx\n", + " with open(self.model_path, \"wb\") as out_file:\n", + " content = requests.get(url, stream=True).content\n", + " out_file.write(content)\n", + "\n", + " self.model_card[\"num_classes\"] = self.num_classes\n", + " self.model_card[\"model_type\"] = self.model_type\n", + " self.model_card[\"name\"] = \"Tiabox HoverNet Test\"\n", + " self.model_card[\"model_input_notes\"] = \"Accepts tiles of 256 x 256\"\n", + " self.model_card[\n", + " \"citation\"\n", + " ] = \"Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\"\n", + "\n", + " def __repr__(self):\n", + " return \"Class to handle remote TIAToolBox HoverNet test ONNX. See model card for citation.\"\n", + "\n", + " def apply(self, tile):\n", + " tile.image = self.F(tile.image)\n", + "\n", + " def remove(self):\n", + " # remove the temp.onnx model\n", + " os.remove(self.model_path)\n" + ] + }, + { + "cell_type": "markdown", + "id": "8b28c79e-2453-42e5-9280-6c0d3ee082c0", + "metadata": {}, + "source": [ + "## Try it Yourself!\n", + "\n", + "- What you need:\n", + " - An ONNX model stored locally\n", + " - An image with which you want to run inference stored locally\n", + " - PathML already downloaded \n", + "\n", + "- Make sure to define the `Inference` class and `remove_initializer_from_input` above in the previous seciton if you have not downloaded the latest version of PathML.\n", + "\n", + "- You will need to define the following variables: \n", + " - `slide_path`: 'PATH TO SLIDE'\n", + " - `model_path`: 'PATH TO ONNX MODEL'\n", + " - `new_path`: 'PATH TO SAVE FIXED ONNX MODEL'\n", + " - `num_classes`: 'NUMBER OF CLASSES IN YOUR DATASET'\n", + " - `tile_size`: 'TILE SIZE THAT YOUR ONNX MODEL ACCEPTS'\n", + " \n", + "- The code in the cell below assumes you want the images passed in as is. If you need to select channels, you will need to add another `transform` method to do so before the inference transform. The following code provides an example if you want to subset into the first channel of an image. *Remember that PathML reads images in as XYZCT.* \n", + "\n", + "```python \n", + "class convert_format(Transforms.Transform):\n", + " def F(self, image):\n", + " # orig = (1280, 1280, 1, 6, 1) = (XYZCT)\n", + " image = image[:, :, :, 0, ...] # this will make the tile (1280, 1280, 1, 1)\n", + " return image\n", + "\n", + " def apply(self, tile):\n", + " tile.image = self.F(tile.image)\n", + " \n", + "convert = convert_format()\n", + "inference = Inference(\n", + " model_path = 'PATH TO LOCAL MODEL', \n", + " input_name = 'data', \n", + " num_classes = 'NUMBER OF CLASSES' , \n", + " model_type = 'CLASSIFICATION OR SEGMENTATION', \n", + " local = True)\n", + "\n", + "transformation_list = [convert, inference] \n", + "\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "bcdeaac3-80ae-4e67-8aa9-8f4c637a92eb", + "metadata": {}, + "source": [ + "### Local ONNX Model Using the `Inference` Class" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0bc2f84e-e554-4770-aad9-c51fa1890ea6", + "metadata": {}, + "outputs": [], + "source": [ + "# Define slide path\n", + "slide_path = 'PATH TO SLIDE'\n", + "\n", + "# Set path to model \n", + "model_path = 'PATH TO ONNX MODEL'\n", + "# Define path to export fixed model\n", + "new_path = 'PATH TO SAVE NEW ONNX MODEL'\n", + "\n", + "\n", + "# Fix the ONNX model\n", + "remove_initializer_from_input(model_path, new_path) \n", + "\n", + "inference = Inference(model_path = new_path, input_name = 'data', num_classes = 'NUMBER OF CLASSES' , model_type = 'CLASSIFICATION OR SEGMENTATION', local = True)\n", + "\n", + "transformation_list = [inference] \n", + "\n", + "# Initialize pathml.core.slide_data.SlideData object\n", + "wsi = SlideData(slide_path)\n", + "\n", + "# Set up PathML pipeline\n", + "pipeline = Pipeline(transformation_list)\n", + "\n", + "# Run Inference\n", + "# Level is equal to 0 for highest resolution (Note that this is the default setting)\n", + "wsi.run(pipeline, tile_size = 'TILE SIZE THAT YOUR ONNX MODEL ACCEPTS', level = 0)" + ] + }, + { + "cell_type": "markdown", + "id": "bc7902dc-0113-4604-abe4-6f3a8588c0b5", + "metadata": {}, + "source": [ + "### Local ONNX Model Using the `HaloAIInference` Class" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d2eedbf1-be61-440e-a044-6dce4c8de04e", + "metadata": {}, + "outputs": [], + "source": [ + "# Define slide path\n", + "slide_path = 'PATH TO SLIDE'\n", + "\n", + "# Set path to model \n", + "model_path = 'PATH TO ONNX MODEL'\n", + "# Define path to export fixed model\n", + "new_path = 'PATH TO SAVE NEW ONNX MODEL'\n", + "\n", + "\n", + "# Fix the ONNX model\n", + "remove_initializer_from_input(model_path, new_path) \n", + "\n", + "inference = HaloAIInference(model_path = new_path, input_name = 'data', num_classes = 'NUMBER OF CLASSES' , model_type = 'CLASSIFICATION OR SEGMENTATION', local = True)\n", + "\n", + "transformation_list = [inference] \n", + "\n", + "# Initialize pathml.core.slide_data.SlideData object\n", + "wsi = SlideData(slide_path)\n", + "\n", + "# Set up PathML pipeline\n", + "pipeline = Pipeline(transformation_list)\n", + "\n", + "# Run Inference\n", + "# Level is equal to 0 for highest resolution (Note that this is the default setting)\n", + "wsi.run(pipeline, tile_size = 'TILE SIZE THAT YOUR ONNX MODEL ACCEPTS', level = 0)" + ] + }, + { + "cell_type": "markdown", + "id": "431abad0-10ff-44fe-ba56-eb6402ce8e4c", + "metadata": {}, + "source": [ + "### Remote ONNX Using our `RemoteTestHoverNet` Class\n", + "- Uses a Hovernet from [TIAToolbox](https://github.com/TissueImageAnalytics/tiatoolbox) \n", + "- Note that the purpose of this model is to illustrate how PathML will handle future remote models. We plan on release more public models to our model zoo on HuggingFace in the future.\n", + "- Citation for model:\n", + " - Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\n", + "- Make sure your image has 3 channels! \n", + "- When the `RemoteTestHoverNet` is first initialized, it downloads the HoverNet from HuggingFace and saves it locally on your own system as `temp.onnx`. \n", + " - **You will need to remove it manually by calling the `remove()` method** An example of how to call this method is in the last line in the code below. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8976d60b-6e78-42ca-a52d-489911e580f4", + "metadata": {}, + "outputs": [], + "source": [ + "# Define slide path\n", + "slide_path = 'PATH TO SLIDE'\n", + "\n", + "inference = RemoteTestHoverNet()\n", + "\n", + "# Create a transformation list\n", + "transformation_list = [\n", + " inference\n", + "] \n", + "\n", + "# Initialize pathml.core.slide_data.SlideData object\n", + "wsi = SlideData(slide_path)\n", + "\n", + "# Set up PathML pipeline\n", + "pipeline = Pipeline(transformation_list)\n", + "\n", + "# Run Inference\n", + "wsi.run(pipeline, tile_size = 256)\n", + "\n", + "# DELETE ONNX MODEL DOWNLOADED FROM HUGGINGFACE\n", + "inference.remove() " + ] + }, + { + "cell_type": "markdown", + "id": "318ae957-73d8-4c7f-b87c-b012750eda10", + "metadata": {}, + "source": [ + "## Iterate over the tiles\n", + "\n", + "Now that you have your tiles saved to your SlideData object, you can now iterate over them.\n", + "\n", + "For example, if you wanted to check the shape of the tiles you could run the following code: \n", + "\n", + "```python\n", + "for tile in wsi.tiles: \n", + " print(tile.image.shape) \n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "fc5c89ae-400e-4380-a717-12800fb77d97", + "metadata": {}, + "source": [ + "## References\n", + "\n", + "- Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\n", + "\n", + "- https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py" + ] + } + ], + "metadata": { + "environment": { + "kernel": "james_test2", + "name": "pytorch-gpu.1-13.m105", + "type": "gcloud", + "uri": "gcr.io/deeplearning-platform-release/pytorch-gpu.1-13:m105" + }, + "kernelspec": { + "display_name": "james_test2", + "language": "python", + "name": "james_test2" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.15" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py index 83ec0093..958f97d6 100644 --- a/pathml/inference/inference.py +++ b/pathml/inference/inference.py @@ -63,7 +63,6 @@ def check_onnx_clean(model_path): # Base class -# I think this should still inherit from Transforms to make the tiling easier/so we don't have to rewrite so much existing code class InferenceBase(Transforms.Transform): """ Base class for all ONNX Models. @@ -235,7 +234,6 @@ def F(self, image): else: # concatenate prediction results # assumes that the tasks all output prediction arrays of same dimension on H and W - # To Do: figure out solution for way different tasks such as if a model does both segmentation and classification result_array = np.concatenate(prediction_map, axis=1) return result_array @@ -287,7 +285,6 @@ def apply(self, tile): # class to handle remote onnx models -# ToDo create function to remove model after tiling is done would be a sep line in workflow class RemoteTestHoverNet(Inference): """Transformation to run inferrence on ONNX model. From 1c462d2137f705e66017c362d72b1ff30fbba94c Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Thu, 7 Sep 2023 10:20:10 -0700 Subject: [PATCH 33/50] adding model w/ initializers, modified tests and notebook --- examples/InferenceOnnx_tutorial.ipynb | 33 +++++++++++----------- tests/inference_tests/test_inference.py | 10 +++++++ tests/testdata/model_with_initalizers.onnx | 3 ++ 3 files changed, 30 insertions(+), 16 deletions(-) create mode 100644 tests/testdata/model_with_initalizers.onnx diff --git a/examples/InferenceOnnx_tutorial.ipynb b/examples/InferenceOnnx_tutorial.ipynb index 98c67052..c2787b7a 100644 --- a/examples/InferenceOnnx_tutorial.ipynb +++ b/examples/InferenceOnnx_tutorial.ipynb @@ -1,6 +1,7 @@ { "cells": [ { + "attachments": {}, "cell_type": "markdown", "id": "c4e08d2c-f53e-4366-888d-ab72819b4c2f", "metadata": {}, @@ -11,13 +12,6 @@ "\n", "[![View on GitHub](https://img.shields.io/badge/View-on%20GitHub-lightgrey?logo=github)](https://github.com/Dana-Farber-AIOS/pathml/blob/master/examples/)\n", "\n", - "## Updates\n", - "August 09, 2023\n", - "- Simplified the class name structure\n", - "- Added \"model cards\" via base class\n", - "- Added setter functions to the base class to edit model card\n", - "- New fxn to check if model is clean (without initializers in the input graph) \n", - "\n", "## Introduction\n", "\n", "This notebook is a tutorial on how to use the future ONNX `inference` feature in PathML. \n", @@ -109,6 +103,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "886a74a3-b905-40dd-9b3e-4e1b90918f9b", "metadata": {}, @@ -150,6 +145,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "34e9fb8c-0148-4184-ba6b-cf5dae63a869", "metadata": {}, @@ -187,15 +183,14 @@ " - This is the base class for all Inference classes for ONNX modeling\n", " - Each instance of a class also comes with a `model_card` which specifies certain details of the model in dictionary form. The default parameters are:\n", " - ```python \n", - " self.model_card = {\n", - " 'name' : None, \n", - " 'num_classes' : None,\n", - " 'model_type' : None, \n", - " 'notes' : None, \n", - " 'model_input_notes': None, \n", - " 'model_output_notes' : None,\n", - " 'citation': None \n", - " } \n", + " self.model_card = {\n", + " 'name' : None, \n", + " 'num_classes' : None,\n", + " 'model_type' : None, \n", + " 'notes' : None, \n", + " 'model_input_notes': None, \n", + " 'model_output_notes' : None,\n", + " 'citation': None } \n", " ``` \n", " - Model cards are where important information about the model should be kept. Since they are in dictionary form, the user can add keys and values as they see fit. \n", " - This class also has getter and setter functions to adjust the `model_card`. Certain functions include `get_model_card`, `set_name`, `set_num_classes`, etc. \n", @@ -571,6 +566,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "8b28c79e-2453-42e5-9280-6c0d3ee082c0", "metadata": {}, @@ -617,6 +613,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "bcdeaac3-80ae-4e67-8aa9-8f4c637a92eb", "metadata": {}, @@ -659,6 +656,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "bc7902dc-0113-4604-abe4-6f3a8588c0b5", "metadata": {}, @@ -701,6 +699,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "431abad0-10ff-44fe-ba56-eb6402ce8e4c", "metadata": {}, @@ -746,6 +745,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "318ae957-73d8-4c7f-b87c-b012750eda10", "metadata": {}, @@ -763,6 +763,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "fc5c89ae-400e-4380-a717-12800fb77d97", "metadata": {}, diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index dd3c51fa..8a8c64fd 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -104,11 +104,14 @@ def test_InferenceBase(): test.set_citation("citation") + # test model card for key in test.model_card: assert key == test.model_card[key], f"function for {key} is not working" + # test repr function assert "Base class for all ONNX models" == repr(test) + # test get model card fxn assert test.model_card == test.get_model_card() # test reshape function @@ -143,6 +146,13 @@ def test_Inference(tileHE): assert repr(inference) == f"Class to handle ONNX model locally stored at {new_path}" + # test initializer catching + new_path = "tests/testdata/model_with_initalizers.onnx" + try: + inference = Inference(model_path=model_path, input_name="data", num_classes=1, model_type="segmentation") + except Exception as e: + assert str(e) == "The ONNX model still has graph initializers in the input graph. Use `remove_initializer_from_input` to remove them." + def test_HaloAIInference(tileHE): new_path = "tests/testdata/random_model.onnx" diff --git a/tests/testdata/model_with_initalizers.onnx b/tests/testdata/model_with_initalizers.onnx new file mode 100644 index 00000000..36e68494 --- /dev/null +++ b/tests/testdata/model_with_initalizers.onnx @@ -0,0 +1,3 @@ +:!* Binput_2Z +input_1Z +input_2 \ No newline at end of file From 640de9eade160e6fd1c40d3de7b5e28ae541cd1e Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Thu, 7 Sep 2023 10:20:48 -0700 Subject: [PATCH 34/50] fixed test format --- tests/inference_tests/test_inference.py | 33 ++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 8a8c64fd..6fefa48c 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -147,11 +147,36 @@ def test_Inference(tileHE): assert repr(inference) == f"Class to handle ONNX model locally stored at {new_path}" # test initializer catching - new_path = "tests/testdata/model_with_initalizers.onnx" - try: - inference = Inference(model_path=model_path, input_name="data", num_classes=1, model_type="segmentation") + bad_model = "tests/testdata/model_with_initalizers.onnx" + try: + inference = Inference( + model_path=bad_model, + input_name="data", + num_classes=1, + model_type="segmentation", + ) except Exception as e: - assert str(e) == "The ONNX model still has graph initializers in the input graph. Use `remove_initializer_from_input` to remove them." + assert ( + str(e) + == "The ONNX model still has graph initializers in the input graph. Use `remove_initializer_from_input` to remove them." + ) + + # test repr function with local set to False + inference = Inference( + model_path=new_path, + input_name="data", + num_classes=1, + model_type="segmentation", + local=False, + ) + + fake_model_name = "test model" + inference.set_name(fake_model_name) + + assert ( + repr(inference) + == f"Class to handle a {fake_model_name} from the PathML model zoo." + ) def test_HaloAIInference(tileHE): From 575ff2b7e11f44cd2859fd73b3dd79c796f8a5bb Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Thu, 7 Sep 2023 11:10:40 -0700 Subject: [PATCH 35/50] fixing name in card --- pathml/inference/inference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py index 958f97d6..b360dda2 100644 --- a/pathml/inference/inference.py +++ b/pathml/inference/inference.py @@ -187,7 +187,7 @@ def __repr__(self): if self.local: return f"Class to handle ONNX model locally stored at {self.model_path}" else: - return f"Class to handle a {self.model_card['model_name']} from the PathML model zoo." + return f"Class to handle a {self.model_card['name']} from the PathML model zoo." def inference(self, image): # reshape the image From e1ffa6261403af7705a64941fc89107d016b71c3 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Tue, 17 Oct 2023 10:22:46 -0700 Subject: [PATCH 36/50] adding comment to adj openslide --- environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/environment.yml b/environment.yml index e8a5469e..bb83d68a 100644 --- a/environment.yml +++ b/environment.yml @@ -32,3 +32,4 @@ dependencies: - tqdm==4.62.3 - loguru==0.5.3 - pandas==1.5.2 # orig no req + # to do: adjust openslide From 4f1fea50da5ed95acb2f41ded84bcef107b520d7 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 18 Oct 2023 10:34:20 -0700 Subject: [PATCH 37/50] changing readthedocs recs --- docs/readthedocs-requirements.txt | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/readthedocs-requirements.txt b/docs/readthedocs-requirements.txt index b672810f..342675b2 100644 --- a/docs/readthedocs-requirements.txt +++ b/docs/readthedocs-requirements.txt @@ -1,8 +1,7 @@ -sphinx==4.3.2 +sphinx==6.2.1 nbsphinx==0.8.8 nbsphinx-link==1.3.0 sphinx-rtd-theme==1.0.0 -sphinx-autoapi==1.8.4 +sphinx-autoapi==3.0.0 ipython==8.10.0 sphinx-copybutton==0.4.0 - From 0f2b1935f676e15194bd1431d1c010c28197eb0c Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 18 Oct 2023 11:37:32 -0700 Subject: [PATCH 38/50] chaning mamba to miniforge --- .github/workflows/tests-conda.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/tests-conda.yml b/.github/workflows/tests-conda.yml index 53a8d93a..c93c4213 100644 --- a/.github/workflows/tests-conda.yml +++ b/.github/workflows/tests-conda.yml @@ -40,7 +40,10 @@ jobs: auto-activate-base: false activate-environment: pathml environment-file: environment.yml - mamba-version: "*" + # mamba-version: "*" + miniforge-version: latest + use-mamba: true + channels: conda-forge python-version: ${{ matrix.python-version }} - name: Debugging run: | From 7e165eaacc4715ff35d803dfcfa329ef1a8df5e0 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 18 Oct 2023 11:57:54 -0700 Subject: [PATCH 39/50] addjusting sphinx version --- docs/readthedocs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/readthedocs-requirements.txt b/docs/readthedocs-requirements.txt index 342675b2..ba3a28a5 100644 --- a/docs/readthedocs-requirements.txt +++ b/docs/readthedocs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==6.2.1 +sphinx==7.2.6 nbsphinx==0.8.8 nbsphinx-link==1.3.0 sphinx-rtd-theme==1.0.0 From 2730b7e92e2227bb2a16dd86f94ae06c850dbf46 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 18 Oct 2023 12:36:20 -0700 Subject: [PATCH 40/50] changing sphinx-rtd-theme version --- docs/readthedocs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/readthedocs-requirements.txt b/docs/readthedocs-requirements.txt index ba3a28a5..1efef52e 100644 --- a/docs/readthedocs-requirements.txt +++ b/docs/readthedocs-requirements.txt @@ -1,7 +1,7 @@ sphinx==7.2.6 nbsphinx==0.8.8 nbsphinx-link==1.3.0 -sphinx-rtd-theme==1.0.0 +sphinx-rtd-theme==1.3.0 sphinx-autoapi==3.0.0 ipython==8.10.0 sphinx-copybutton==0.4.0 From 5e0f0f935293f7a6fa0af064d7148be272549968 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 18 Oct 2023 12:55:30 -0700 Subject: [PATCH 41/50] adjusting sphynx --- docs/readthedocs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/readthedocs-requirements.txt b/docs/readthedocs-requirements.txt index 1efef52e..0e8f22d3 100644 --- a/docs/readthedocs-requirements.txt +++ b/docs/readthedocs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==7.2.6 +sphinx==7.1.2 nbsphinx==0.8.8 nbsphinx-link==1.3.0 sphinx-rtd-theme==1.3.0 From 995409c86085fb829771738fd0c6b1a6c2f28767 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 1 Nov 2023 16:53:32 +0100 Subject: [PATCH 42/50] adding pytorch to onnx function --- pathml/inference/__init__.py | 1 + pathml/inference/inference.py | 41 +++++++++++++++++++ tests/inference_tests/test_inference.py | 51 ++++++++++++++++++++++++ tests/testdata/test.pt | Bin 0 -> 2310 bytes 4 files changed, 93 insertions(+) create mode 100644 tests/testdata/test.pt diff --git a/pathml/inference/__init__.py b/pathml/inference/__init__.py index dd2b9a39..3ee73dac 100644 --- a/pathml/inference/__init__.py +++ b/pathml/inference/__init__.py @@ -9,5 +9,6 @@ InferenceBase, RemoteTestHoverNet, check_onnx_clean, + convert_pytorch_onnx, remove_initializer_from_input, ) diff --git a/pathml/inference/inference.py b/pathml/inference/inference.py index b360dda2..a63fd6de 100644 --- a/pathml/inference/inference.py +++ b/pathml/inference/inference.py @@ -9,6 +9,7 @@ import onnx import onnxruntime import requests +import torch import pathml.preprocessing.transforms as Transforms @@ -62,6 +63,46 @@ def check_onnx_clean(model_path): return True +def convert_pytorch_onnx( + model, dummy_tensor, model_name, opset_version=10, input_name="data" +): + """Converts a Pytorch Model to ONNX + Adjusted from https://pytorch.org/tutorials/advanced/super_resolution_with_onnxruntime.html + + You need to define the model class and load the weights before exporting. See URL above for full steps. + + Args: + model_path (torch.nn.Module Model): Pytorch model to be converted, + dummy_tensor (torch.tensor): dummy input tensor that is an example of what will be passed into the model, + model_name (str): name of ONNX model created with .onnx at the end, + opset_version (int): which opset version you want to use to export + input_name (str): name assigned to dummy_tensor + + Returns: + Exports ONNX model converted from Pytorch + """ + + if not isinstance(model, torch.nn.Module): + raise ValueError( + f"The model is not of type torch.nn.Module. Received {type(model)}." + ) + + if not torch.is_tensor(dummy_tensor): + raise ValueError( + f"The dummy tensor needs to be a torch tensor. Received {type(dummy_tensor)}." + ) + + torch.onnx.export( + model, + dummy_tensor, + model_name, + export_params=True, + opset_version=opset_version, + do_constant_folding=True, + input_names=[input_name], + ) + + # Base class class InferenceBase(Transforms.Transform): """ diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 6fefa48c..535e3f4d 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -2,6 +2,7 @@ import numpy as np import onnx +import torch from pathml.core import SlideData from pathml.inference import ( @@ -10,6 +11,7 @@ InferenceBase, RemoteTestHoverNet, check_onnx_clean, + convert_pytorch_onnx, remove_initializer_from_input, ) @@ -219,3 +221,52 @@ def test_RemoteTestHoverNet(): ) inference.remove() + + +def test_convert_pytorch_onnx(): + class SimpleModel(torch.nn.Module): + def __init__(self): + super(SimpleModel, self).__init__() + self.linear = torch.nn.Linear(10, 1) + torch.nn.init.xavier_uniform_(self.linear.weight) + + def forward(self, x): + y = self.linear(x) + return y + + test_tensor = torch.randn(1, 10) + model_test = torch.load("tests/testdata/test.pt") + + model_test.eval() + + convert_pytorch_onnx( + model=model_test, dummy_tensor=test_tensor, model_name="test_export.onnx" + ) + + os.remove("test_export.onnx") + + # test Value Error Statements + + # test lines to check model input + try: + convert_pytorch_onnx( + model=None, dummy_tensor=test_tensor, model_name="test_export.onnx" + ) + + except Exception as e: + assert ( + str(e) + == f"The model is not of type torch.nn.Module. Received {type(None)}." + ) + + # test lines to check model dummy input + try: + convert_pytorch_onnx( + model=model_test, dummy_tensor=None, model_name="test_export.onnx" + ) + + except Exception as e: + assert ( + str(e) + == f"The dummy tensor needs to be a torch tensor. Received {type(None)}." + ) diff --git a/tests/testdata/test.pt b/tests/testdata/test.pt new file mode 100644 index 0000000000000000000000000000000000000000..a4047a54695774620baab6726947d43e70480d37 GIT binary patch literal 2310 zcmb7GOK%%h6duQUb@FJFHi0%JEwA8|#2!DA*d=kODW)(@>N*sLNYya*+~kfj@%YT# zX;h?2Ae1I_2Lg#zS7p-`8xRXb5PO8W;1{rC&qHFveb^a4;s}m(<;*?b`OdlbeD~fI z)Nr>X_4i9{HYANn1bHN88pMcJx9ogwL|SjP54*~`zHL|)U61Y=(k=qTw@y%xd}vNkb+nHq)e6U+8w-9=0+R0?%n$}rmjLZ#+8u6`#WLo^c6`e zMacVD9HBSx2~1DKwOx3#2ybDbd=(3oU&o`XSm<^I3t_Hep}QMc2&{mGdTwH&lOh&s zSi_>-Z)4HUTUd1P9e(iXY1}{t@m<>4cU4&%r0a5g5=?4p$S-+*DOynb`u{OM{(7A* zosB}rn`&6PByOtGU4k5Tf8=vh?N?Rn)y2PUix3;JT)RuZon;zhqjRnS=}K@b#*}<+ z=|3hqGW+^^-{XaY_rL!uyZ7x+KdcOWvv~RUsioUbl;y7;{F43o)Ti0Mv2?KfdFp^w z_@ymOe*jj_?0dr-;=?mO(#bt?H>_Biqm zYi^uQ$}Ibnhnu&$Xl6D#XP=PfMD{3ql42fmEyK1xFxbPPTV_5N|Nq6rlZpAHGM}E$ zq%+C%TvACZbBdzSuXrYz%48PO>68*rq*#@!e-8f`?0<+Ad-!rTDs_lCnyl#^EvU_* zjlyHEM_`X>H+blwuv=prCBfd+z{Y!Ov3#$T*1*PoHwS*#6c}#}Y-|-fyn&seCx=&C z6Gv-gYK`_Fuv8Qqcea literal 0 HcmV?d00001 From 8c6ab73d8c94ca92d5923c010e6028d5688d863b Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 1 Nov 2023 17:36:05 +0100 Subject: [PATCH 43/50] attempt to get pytorch class to load --- examples/InferenceOnnx_tutorial.ipynb | 102 ++++++++++++++++++++++++ tests/inference_tests/test_inference.py | 13 +-- tests/testdata/simple_model.py | 12 +++ 3 files changed, 117 insertions(+), 10 deletions(-) create mode 100644 tests/testdata/simple_model.py diff --git a/examples/InferenceOnnx_tutorial.ipynb b/examples/InferenceOnnx_tutorial.ipynb index c2787b7a..5d1bb2b9 100644 --- a/examples/InferenceOnnx_tutorial.ipynb +++ b/examples/InferenceOnnx_tutorial.ipynb @@ -128,6 +128,7 @@ "import onnx\n", "import onnxruntime as ort \n", "import requests\n", + "import torch\n", "\n", "from pathml.core import SlideData, Tile\n", "from dask.distributed import Client\n", @@ -174,6 +175,19 @@ " - Users specify:\n", " - `model_path` (str): path to ONNX model\n", "\n", + "
\n", + "\n", + " - `convert_pytorch_onnx` \n", + " - Converts a PyTorch `.pt` file to `.onnx`\n", + " - Wrapper function of the [PyTorch](https://pytorch.org/tutorials/advanced/super_resolution_with_onnxruntime.html) function to handle the conversion.\n", + " - Users specify:\n", + " - model_path (torch.nn.Module Model): Pytorch model to be converted,\n", + " - dummy_tensor (torch.tensor): dummy input tensor that is an example of what will be passed into the model,\n", + " - model_name (str): name of ONNX model created with .onnx at the end,\n", + " - opset_version (int): which opset version you want to use to export\n", + " - input_name (str): name assigned to dummy_tensor\n", + " - Note that the model class must be defined before loading the `.pt` file and set to eval before calling this function. \n", + "\n", "### Inference Classes\n", "\n", "
\n", @@ -293,6 +307,46 @@ " return True\n", "\n", "\n", + "def convert_pytorch_onnx(\n", + " model, dummy_tensor, model_name, opset_version=10, input_name=\"data\"\n", + "):\n", + " \"\"\"Converts a Pytorch Model to ONNX\n", + " Adjusted from https://pytorch.org/tutorials/advanced/super_resolution_with_onnxruntime.html\n", + "\n", + " You need to define the model class and load the weights before exporting. See URL above for full steps.\n", + "\n", + " Args:\n", + " model_path (torch.nn.Module Model): Pytorch model to be converted,\n", + " dummy_tensor (torch.tensor): dummy input tensor that is an example of what will be passed into the model,\n", + " model_name (str): name of ONNX model created with .onnx at the end,\n", + " opset_version (int): which opset version you want to use to export\n", + " input_name (str): name assigned to dummy_tensor\n", + "\n", + " Returns:\n", + " Exports ONNX model converted from Pytorch\n", + " \"\"\"\n", + "\n", + " if not isinstance(model, torch.nn.Module):\n", + " raise ValueError(\n", + " f\"The model is not of type torch.nn.Module. Received {type(model)}.\"\n", + " )\n", + "\n", + " if not torch.is_tensor(dummy_tensor):\n", + " raise ValueError(\n", + " f\"The dummy tensor needs to be a torch tensor. Received {type(dummy_tensor)}.\"\n", + " )\n", + "\n", + " torch.onnx.export(\n", + " model,\n", + " dummy_tensor,\n", + " model_name,\n", + " export_params=True,\n", + " opset_version=opset_version,\n", + " do_constant_folding=True,\n", + " input_names=[input_name],\n", + " )\n", + "\n", + "\n", "# Base class\n", "class InferenceBase(Transforms.Transform):\n", " \"\"\"\n", @@ -612,6 +666,54 @@ "```" ] }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "afe45989", + "metadata": {}, + "source": [ + "### Converting a Pytorch Model to ONNX\n", + "\n", + "Note the following:\n", + "- Similar to PyTorch, you will need to define and create an instance of you model class before loading the `.pt` file. Then you will need to set it to eval mode before calling the conversion function. The code to do these steps is below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aa8f41f7", + "metadata": {}, + "outputs": [], + "source": [ + "# Define your model class\n", + "class SimpleModel(torch.nn.Module):\n", + " def __init__(self):\n", + " super(SimpleModel, self).__init__()\n", + " self.linear = torch.nn.Linear(10, 1)\n", + " torch.nn.init.xavier_uniform_(self.linear.weight)\n", + " def forward(self, x):\n", + " y = self.linear(x)\n", + " return y\n", + "\n", + "# Define your model var\n", + "model = SimpleModel()\n", + "\n", + "# Export model as .pt if you haven't already done so\n", + "# If you have already exported a .pt file, you will still need to define a model class, initialize it, and set it to eval mode. \n", + "torch.save(model, \"test.pt\")\n", + "\n", + "# Load .pt file\n", + "model_test = torch.load(\"test.pt\")\n", + "# Set model to eval mode\n", + "model_test.eval()\n", + "\n", + "# Define a dummy tensor (this is an example of what the ONNX should expect during inference)\n", + "x = torch.randn(1, 10)\n", + "\n", + "# Run conversion function\n", + "convert_pytorch_onnx(model = model_test, dummy_tensor = x, model_name = \"NAME_OF_OUTPUT_MODEL_HERE.onnx\")" + ] + }, { "attachments": {}, "cell_type": "markdown", diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index 535e3f4d..bbb8e013 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -14,6 +14,7 @@ convert_pytorch_onnx, remove_initializer_from_input, ) +from pathml.tests.testdata.simple_model import SimpleModel def test_remove_initializer_from_input(): @@ -224,18 +225,10 @@ def test_RemoteTestHoverNet(): def test_convert_pytorch_onnx(): - class SimpleModel(torch.nn.Module): - def __init__(self): - super(SimpleModel, self).__init__() - self.linear = torch.nn.Linear(10, 1) - torch.nn.init.xavier_uniform_(self.linear.weight) - - def forward(self, x): - y = self.linear(x) - return y + model_test = SimpleModel() test_tensor = torch.randn(1, 10) - model_test = torch.load("tests/testdata/test.pt") + model_test.load_state_dict(torch.load("tests/testdata/test.pt")) model_test.eval() diff --git a/tests/testdata/simple_model.py b/tests/testdata/simple_model.py new file mode 100644 index 00000000..bd9a0219 --- /dev/null +++ b/tests/testdata/simple_model.py @@ -0,0 +1,12 @@ +import torch + + +class SimpleModel(torch.nn.Module): + def __init__(self): + super(SimpleModel, self).__init__() + self.linear = torch.nn.Linear(10, 1) + torch.nn.init.xavier_uniform_(self.linear.weight) + + def forward(self, x): + y = self.linear(x) + return y From 1ae9f8c78ce31d68247240c6e6eac01256bcfc6a Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 1 Nov 2023 18:12:23 +0100 Subject: [PATCH 44/50] deleting simple model module --- tests/testdata/simple_model.py | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 tests/testdata/simple_model.py diff --git a/tests/testdata/simple_model.py b/tests/testdata/simple_model.py deleted file mode 100644 index bd9a0219..00000000 --- a/tests/testdata/simple_model.py +++ /dev/null @@ -1,12 +0,0 @@ -import torch - - -class SimpleModel(torch.nn.Module): - def __init__(self): - super(SimpleModel, self).__init__() - self.linear = torch.nn.Linear(10, 1) - torch.nn.init.xavier_uniform_(self.linear.weight) - - def forward(self, x): - y = self.linear(x) - return y From 8fa3bebb317c5d3d7d09b41815fb63e4ae990437 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 1 Nov 2023 18:15:59 +0100 Subject: [PATCH 45/50] adding new .pt file and new test --- tests/inference_tests/test_inference.py | 5 +---- tests/testdata/test.pt | Bin 2310 -> 3646 bytes 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/inference_tests/test_inference.py b/tests/inference_tests/test_inference.py index bbb8e013..d8106d43 100644 --- a/tests/inference_tests/test_inference.py +++ b/tests/inference_tests/test_inference.py @@ -14,7 +14,6 @@ convert_pytorch_onnx, remove_initializer_from_input, ) -from pathml.tests.testdata.simple_model import SimpleModel def test_remove_initializer_from_input(): @@ -225,10 +224,8 @@ def test_RemoteTestHoverNet(): def test_convert_pytorch_onnx(): - model_test = SimpleModel() - test_tensor = torch.randn(1, 10) - model_test.load_state_dict(torch.load("tests/testdata/test.pt")) + model_test = torch.jit.load("tests/testdata/test.pt") model_test.eval() diff --git a/tests/testdata/test.pt b/tests/testdata/test.pt index a4047a54695774620baab6726947d43e70480d37..f52fcb123abbfc92f9982bb5f86ec4395d12ac21 100644 GIT binary patch literal 3646 zcmbtX3se(V8lDg|5FP@`%SaW)5`pGH!XqLmS%|hH$U`&=b__`%kz~Ro6D*HivAhvj zq}F9ug@p>NRTL0vX?d(*RXC{C)y0a4^#!i09;6gpD(nm)5)fECdvorYN#=gvcjtfI zKS3Z-9{>ytfOm!wU=5&R6-pNgPywC6<1PkvzB_i;GMY=+cI)>y7c`CiVOjH4k1v{h z9}PF9gb|yMxp_1%{Y~MsmS*b{1d7hT9bG1mOPNVmyOTqe#p`hH zyxdp{_96g2XXiEnd?&eSu`x0p*LyB@t;+1zWIZ7Sp$er?0zouLdn*^f(K0c_B!^1n zu`+RhLL`=v5n#>!-&6UxsZpgs3QOT=gpkR%!oEXN6%?hG$)HGqFlMhnDS{*lMGT@B z$X|#(l}TZ-K#34_l*SclFievxL~5B>MboSyZysMtM)b#}aPVHSG+Kfp3xpa7q(-GO z6&X^BBh^xw2tviMN}+_}T*=5nDwR*d5Grh?AXhqwF*=C`WiBCPO7sHNk%$F07t?UpBdg>E z`>;*n6LDP``+JV(@$h3;ppxtoiFqSa9*4Y3Gq!=Jb`F9b?zpz^WvdmIvbH zm-j@OebE=|%dR_O*0J@h-@whno;x+s4y3@^f6G@^Cwe@LIV$sH{d_Kd1U_;F7&0|o zQYtZm)<3%)7G-iarAF-?Zdg_;Nq%Bj_eA}sDJ^aQvm43a-bIh@OPEjm=d`0MU^{O+ z(nMI4MML9xW;wG(z1z2dsT*uA^}a(4GI+UsRbX2m z%j-tMGk1m8m;BG}@9*gyiP@N#nxE16_u{nvk#$MgJuSOOU)f}w?dnZ*2)ya)d-9qM z7=N)Q**b&PwgEbYP4IqXJ*`J7_!T`ZE1F^q`CXF z->treL#Ok@D!W;G*Lq*6Y3q+yGCB3&QHu-Nsr!%$P|uMa+P=vh!vTt~gZ-?S^U zto+-@NuCj;4aqjk2G4kpU2gXx7|Cpar`h&7#xOC3NIcyC@>J3>%JGXMMBK(UnLbv} zFdLiAX=56|nbF4MDNKi9`b5^#HCaC%v2HWlclOGqd7=*b9&TkA-yN__b)>|m#ku9@ z9lI0w)8`xVR>B-qm_x2i^W|;h1r1#Tm;T4>yIg>-|neDe`jGd$n5^jlAkeqQ2fo7+~$Jvfno3 zUUH5|FaF(8tI99^{ExqQ_4)CJBtQ8dpVohW6#_@=OH(R3z3o!F&-ifbwuP@wUDn*{ zX3bc6=Hsf|?x?y4Jui8Uzo;`5xDOgE0~20iKKSdLKJc8SUd-+T-Dsb8{qPO|16vtp z>X0T50i=hXs5`)4p(gBEz0)Z)pX*TL8|)J(+;j9cvQ?dO`?XDkrQ@MIUIJ^tXW%(+ zJCOVXSXGqP-{q8)1GVlRY{+T->9?o8dh2W0`GM?eK>oNk*}p6iWaAZQrz|yYsg@IaiwvpKT9^7wvV@ed!D^1V%RrKILP}nKe2{=!R^`d zignq!4eUN^+(R*u!u#f!ceYJw3A_?*3aG1aO^#-m~dex&~Y`Z6hs z%Eb5{(!3wq;tc~nXWs(_U}`Kt8*d9$Vx?3dlO_o8a~_h4c-)WP&mc^u2Z!y+WU<}Z zOivGNaM&D%JA=vMxH6d>Hp`Rk;l^ZeT=5-}xVA2n{UZT4*4xfix3@IHA50lKeQXdo z6{y=dP6d93QA`4v(}B8ehshMRo4PzC`z=#b0Q0T*F>!Tyq{>%5+xUv&D6i z61%g;B7459lT~awzi!>6P2o>mFf;$`wQ@SQZpM2~;U2@{ai+b`&OQANCWAyb$t|X+ zlwb|xgo;ir(d=>(ubZaEQ}9a+C-4)6VLDPbxlE@Z3$)0$Nj)8_8}^hbSS#{`>deU) z1TI|YG6DF2_|aRaHq5nG2fUNV#N*sLNYya*+~kfj@%YT# zX;h?2Ae1I_2Lg#zS7p-`8xRXb5PO8W;1{rC&qHFveb^a4;s}m(<;*?b`OdlbeD~fI z)Nr>X_4i9{HYANn1bHN88pMcJx9ogwL|SjP54*~`zHL|)U61Y=(k=qTw@y%xd}vNkb+nHq)e6U+8w-9=0+R0?%n$}rmjLZ#+8u6`#WLo^c6`e zMacVD9HBSx2~1DKwOx3#2ybDbd=(3oU&o`XSm<^I3t_Hep}QMc2&{mGdTwH&lOh&s zSi_>-Z)4HUTUd1P9e(iXY1}{t@m<>4cU4&%r0a5g5=?4p$S-+*DOynb`u{OM{(7A* zosB}rn`&6PByOtGU4k5Tf8=vh?N?Rn)y2PUix3;JT)RuZon;zhqjRnS=}K@b#*}<+ z=|3hqGW+^^-{XaY_rL!uyZ7x+KdcOWvv~RUsioUbl;y7;{F43o)Ti0Mv2?KfdFp^w z_@ymOe*jj_?0dr-;=?mO(#bt?H>_Biqm zYi^uQ$}Ibnhnu&$Xl6D#XP=PfMD{3ql42fmEyK1xFxbPPTV_5N|Nq6rlZpAHGM}E$ zq%+C%TvACZbBdzSuXrYz%48PO>68*rq*#@!e-8f`?0<+Ad-!rTDs_lCnyl#^EvU_* zjlyHEM_`X>H+blwuv=prCBfd+z{Y!Ov3#$T*1*PoHwS*#6c}#}Y-|-fyn&seCx=&C z6Gv-gYK`_Fuv8Qqcea From 2e84cf7f0a6629568e46a79acfc710d748ed35e2 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 1 Nov 2023 18:36:25 +0100 Subject: [PATCH 46/50] adding new tutorial --- examples/InferenceOnnx_tutorial.ipynb | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/examples/InferenceOnnx_tutorial.ipynb b/examples/InferenceOnnx_tutorial.ipynb index 5d1bb2b9..fe4de75a 100644 --- a/examples/InferenceOnnx_tutorial.ipynb +++ b/examples/InferenceOnnx_tutorial.ipynb @@ -672,7 +672,7 @@ "id": "afe45989", "metadata": {}, "source": [ - "### Converting a Pytorch Model to ONNX\n", + "### Converting a Pytorch Model to ONNX Using the `convert_pytorch_onnx` Function\n", "\n", "Note the following:\n", "- Similar to PyTorch, you will need to define and create an instance of you model class before loading the `.pt` file. Then you will need to set it to eval mode before calling the conversion function. The code to do these steps is below." @@ -686,10 +686,12 @@ "outputs": [], "source": [ "# Define your model class\n", + "num_input, num_output, batch_size = 10, 1, 1\n", + "\n", "class SimpleModel(torch.nn.Module):\n", " def __init__(self):\n", " super(SimpleModel, self).__init__()\n", - " self.linear = torch.nn.Linear(10, 1)\n", + " self.linear = torch.nn.Linear(num_input, num_output)\n", " torch.nn.init.xavier_uniform_(self.linear.weight)\n", " def forward(self, x):\n", " y = self.linear(x)\n", @@ -700,6 +702,7 @@ "\n", "# Export model as .pt if you haven't already done so\n", "# If you have already exported a .pt file, you will still need to define a model class, initialize it, and set it to eval mode. \n", + "# If you saved your model using `torch.jit.script`, you will not need to define your model class and instead load it using `torch.jit.load` then set it to eval mode.\n", "torch.save(model, \"test.pt\")\n", "\n", "# Load .pt file\n", @@ -708,7 +711,7 @@ "model_test.eval()\n", "\n", "# Define a dummy tensor (this is an example of what the ONNX should expect during inference)\n", - "x = torch.randn(1, 10)\n", + "x = torch.randn(batch_size, num_input)\n", "\n", "# Run conversion function\n", "convert_pytorch_onnx(model = model_test, dummy_tensor = x, model_name = \"NAME_OF_OUTPUT_MODEL_HERE.onnx\")" From 91d8f41b49d1b2b21a274db7c3fbabe187430f21 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Mon, 6 Nov 2023 15:51:16 +0100 Subject: [PATCH 47/50] new example book --- examples/InferenceOnnx_tutorial.ipynb | 574 +++++++++----------------- 1 file changed, 188 insertions(+), 386 deletions(-) diff --git a/examples/InferenceOnnx_tutorial.ipynb b/examples/InferenceOnnx_tutorial.ipynb index fe4de75a..9e1d8f19 100644 --- a/examples/InferenceOnnx_tutorial.ipynb +++ b/examples/InferenceOnnx_tutorial.ipynb @@ -117,7 +117,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 8, "id": "436b91f3-6338-4043-8742-496b354544aa", "metadata": {}, "outputs": [], @@ -126,7 +126,7 @@ "os.environ[\"JAVA_HOME\"] = \"/opt/conda/envs/YOUR ENVIRONMENET NAME\" # TO DO: CHANGE THIS TO YOUR ENVIRONMENT NAME\n", "import numpy as np \n", "import onnx\n", - "import onnxruntime as ort \n", + "import onnxruntime\n", "import requests\n", "import torch\n", "\n", @@ -135,14 +135,11 @@ "from pathml.preprocessing import Pipeline\n", "import pathml.preprocessing.transforms as Transforms\n", "\n", - "from pathml.inference import (\n", - " HaloAIInference,\n", - " Inference,\n", - " InferenceBase,\n", - " RemoteTestHoverNet,\n", - " check_onnx_clean,\n", - " remove_initializer_from_input,\n", - ")" + "import matplotlib.pyplot as plt\n", + "import matplotlib \n", + "\n", + "from PIL import Image\n", + "\n" ] }, { @@ -243,380 +240,7 @@ " 'model_input_notes': 'Accepts tiles of 256 x 256',\n", " 'model_output_notes': None,\n", " 'citation': 'Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.'}\n", - " ```\n", - " \n", - "### Raw Code\n", - "\n", - "Below is the raw code for your convenience. You can also find the raw code on our github. \n", - "[![View on GitHub](https://img.shields.io/badge/View-on%20GitHub-lightgrey?logo=github)](https://github.com/Dana-Farber-AIOS/pathml/tree/master/pathml)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "3339cf66-8de6-4af1-9d3e-7312cd69eb33", - "metadata": {}, - "outputs": [], - "source": [ - "def remove_initializer_from_input(model_path, new_path):\n", - " \"\"\"Removes initializers from HaloAI ONNX models\n", - " Taken from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py\n", - "\n", - " Args:\n", - " model_path (str): path to ONNX model,\n", - " new_path (str): path to save adjusted model w/o initializers,\n", - "\n", - " Returns:\n", - " ONNX model w/o initializers to run inference using PathML\n", - " \"\"\"\n", - "\n", - " model = onnx.load(model_path)\n", - "\n", - " inputs = model.graph.input\n", - " name_to_input = {}\n", - " for onnx_input in inputs:\n", - " name_to_input[onnx_input.name] = onnx_input\n", - "\n", - " for initializer in model.graph.initializer:\n", - " if initializer.name in name_to_input:\n", - " inputs.remove(name_to_input[initializer.name])\n", - "\n", - " onnx.save(model, new_path)\n", - "\n", - "\n", - "def check_onnx_clean(model_path):\n", - " \"\"\"Checks if the model has had it's initalizers removed from input graph.\n", - " Adapted from from https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py\n", - "\n", - " Args:\n", - " model_path (str): path to ONNX model,\n", - "\n", - " Returns:\n", - " Boolean if there are initializers in input graph.\n", - " \"\"\"\n", - "\n", - " model = onnx.load(model_path)\n", - "\n", - " inputs = model.graph.input\n", - " name_to_input = {}\n", - " for onnx_input in inputs:\n", - " name_to_input[onnx_input.name] = onnx_input\n", - "\n", - " for initializer in model.graph.initializer:\n", - " if initializer.name in name_to_input:\n", - " return True\n", - "\n", - "\n", - "def convert_pytorch_onnx(\n", - " model, dummy_tensor, model_name, opset_version=10, input_name=\"data\"\n", - "):\n", - " \"\"\"Converts a Pytorch Model to ONNX\n", - " Adjusted from https://pytorch.org/tutorials/advanced/super_resolution_with_onnxruntime.html\n", - "\n", - " You need to define the model class and load the weights before exporting. See URL above for full steps.\n", - "\n", - " Args:\n", - " model_path (torch.nn.Module Model): Pytorch model to be converted,\n", - " dummy_tensor (torch.tensor): dummy input tensor that is an example of what will be passed into the model,\n", - " model_name (str): name of ONNX model created with .onnx at the end,\n", - " opset_version (int): which opset version you want to use to export\n", - " input_name (str): name assigned to dummy_tensor\n", - "\n", - " Returns:\n", - " Exports ONNX model converted from Pytorch\n", - " \"\"\"\n", - "\n", - " if not isinstance(model, torch.nn.Module):\n", - " raise ValueError(\n", - " f\"The model is not of type torch.nn.Module. Received {type(model)}.\"\n", - " )\n", - "\n", - " if not torch.is_tensor(dummy_tensor):\n", - " raise ValueError(\n", - " f\"The dummy tensor needs to be a torch tensor. Received {type(dummy_tensor)}.\"\n", - " )\n", - "\n", - " torch.onnx.export(\n", - " model,\n", - " dummy_tensor,\n", - " model_name,\n", - " export_params=True,\n", - " opset_version=opset_version,\n", - " do_constant_folding=True,\n", - " input_names=[input_name],\n", - " )\n", - "\n", - "\n", - "# Base class\n", - "class InferenceBase(Transforms.Transform):\n", - " \"\"\"\n", - " Base class for all ONNX Models.\n", - " Each transform must operate on a Tile.\n", - " \"\"\"\n", - "\n", - " def __init__(self):\n", - " self.model_card = {\n", - " \"name\": None,\n", - " \"num_classes\": None,\n", - " \"model_type\": None,\n", - " \"notes\": None,\n", - " \"model_input_notes\": None,\n", - " \"model_output_notes\": None,\n", - " \"citation\": None,\n", - " }\n", - "\n", - " def __repr__(self):\n", - " return \"Base class for all ONNX models\"\n", - "\n", - " def get_model_card(self):\n", - " return self.model_card\n", - "\n", - " def set_name(self, name):\n", - " self.model_card[\"name\"] = name\n", - "\n", - " def set_num_classes(self, num):\n", - " self.model_card[\"num_classes\"] = num\n", - "\n", - " def set_model_type(self, model_type):\n", - " self.model_card[\"model_type\"] = model_type\n", - "\n", - " def set_notes(self, note):\n", - " self.model_card[\"notes\"] = note\n", - "\n", - " def set_model_input_notes(self, note):\n", - " self.model_card[\"model_input_notes\"] = note\n", - "\n", - " def set_model_output_notes(self, note):\n", - " self.model_card[\"model_output_notes\"] = note\n", - "\n", - " def set_citation(self, citation):\n", - " self.model_card[\"citation\"] = citation\n", - "\n", - " def reshape(self, image):\n", - " \"\"\"standard reshaping of tile image\"\"\"\n", - " # flip dimensions\n", - " # follows convention used here https://github.com/Dana-Farber-AIOS/pathml/blob/master/pathml/ml/dataset.py\n", - "\n", - " if image.ndim == 3:\n", - " # swap axes from HWC to CHW\n", - " image = image.transpose(2, 0, 1)\n", - " # add a dimesion bc onnx models usually have batch size as first dim: e.g. (1, channel, height, width)\n", - " image = np.expand_dims(image, axis=0)\n", - "\n", - " return image\n", - " else:\n", - " # in this case, we assume that we have XYZCT channel order\n", - " # so we swap axes to TCZYX for batching\n", - " # note we are not adding a dim here for batch bc we assume that subsetting will create a batch \"placeholder\" dim\n", - " image = image.T\n", - "\n", - " return image\n", - "\n", - " def F(self, target):\n", - " \"\"\"functional implementation\"\"\"\n", - " raise NotImplementedError\n", - "\n", - " def apply(self, tile):\n", - " \"\"\"modify Tile object in-place\"\"\"\n", - " raise NotImplementedError\n", - "\n", - "\n", - "# class to handle local onnx models\n", - "class Inference(InferenceBase):\n", - " \"\"\"Transformation to run inferrence on ONNX model.\n", - "\n", - " Assumptions:\n", - " - The ONNX model has been cleaned by `remove_initializer_from_input` first\n", - "\n", - " Args:\n", - " model_path (str): path to ONNX model w/o initializers,\n", - " input_name (str): name of the input the ONNX model accepts\n", - " \"\"\"\n", - "\n", - " def __init__(\n", - " self,\n", - " model_path=None,\n", - " input_name=\"data\",\n", - " num_classes=None,\n", - " model_type=None,\n", - " local=True,\n", - " ):\n", - " super().__init__()\n", - "\n", - " self.input_name = input_name\n", - " self.num_classes = num_classes\n", - " self.model_type = model_type\n", - " self.local = local\n", - "\n", - " if self.local:\n", - " # using a local onnx model\n", - " self.model_path = model_path\n", - " else:\n", - " # if using a model from the model zoo, set the local path to a temp file\n", - " self.model_path = \"temp.onnx\"\n", - "\n", - " # fill in parts of the model_card with the following info\n", - " self.model_card[\"num_classes\"] = self.num_classes\n", - " self.model_card[\"model_type\"] = self.model_type\n", - "\n", - " # check if there are initializers in input graph if using a local model\n", - " if local:\n", - " if check_onnx_clean(model_path):\n", - " raise ValueError(\n", - " \"The ONNX model still has graph initializers in the input graph. Use `remove_initializer_from_input` to remove them.\"\n", - " )\n", - " else:\n", - " pass\n", - "\n", - " def __repr__(self):\n", - " if self.local:\n", - " return f\"Class to handle ONNX model locally stored at {self.model_path}\"\n", - " else:\n", - " return f\"Class to handle a {self.model_card['model_name']} from the PathML model zoo.\"\n", - "\n", - " def inference(self, image):\n", - " # reshape the image\n", - " image = self.reshape(image)\n", - "\n", - " # load fixed model\n", - " onnx_model = onnx.load(self.model_path)\n", - "\n", - " # check tile dimensions match ONNX input dimensions\n", - " input_node = onnx_model.graph.input\n", - "\n", - " dimensions = []\n", - " for input in input_node:\n", - " if input.name == self.input_name:\n", - " input_shape = input.type.tensor_type.shape.dim\n", - " for dim in input_shape:\n", - " dimensions.append(dim.dim_value)\n", - "\n", - " assert (\n", - " image.shape[-1] == dimensions[-1] and image.shape[-2] == dimensions[-2]\n", - " ), f\"expecting tile shape of {dimensions[-2]} by {dimensions[-1]}, got {image.shape[-2]} by {image.shape[-1]}\"\n", - "\n", - " # check onnx model\n", - " onnx.checker.check_model(onnx_model)\n", - "\n", - " # start an inference session\n", - " ort_sess = onnxruntime.InferenceSession(self.model_path)\n", - "\n", - " # create model output, returns a list\n", - " model_output = ort_sess.run(None, {self.input_name: image.astype(\"f\")})\n", - "\n", - " return model_output\n", - "\n", - " def F(self, image):\n", - " # run inference function\n", - " prediction_map = self.inference(image)\n", - "\n", - " # single task model\n", - " if len(prediction_map) == 1:\n", - " # return first and only prediction array in the list\n", - " return prediction_map[0]\n", - "\n", - " # multi task model\n", - " else:\n", - " # concatenate prediction results\n", - " # assumes that the tasks all output prediction arrays of same dimension on H and W\n", - " result_array = np.concatenate(prediction_map, axis=1)\n", - " return result_array\n", - "\n", - " def apply(self, tile):\n", - " tile.image = self.F(tile.image)\n", - "\n", - "\n", - "class HaloAIInference(Inference):\n", - " \"\"\"Transformation to run inferrence on HALO AI ONNX model.\n", - "\n", - " Assumptions:\n", - " - Assumes that the ONNX model returns a tensor in which there is one prediction map for each class\n", - " - For example, if there are 5 classes, the ONNX model will output a (1, 5, Height, Weight) tensor\n", - " - If you select to argmax the classes, the class assumes a softmax or sigmoid has already been applied\n", - " - HaloAI ONNX models always have 20 class maps so you need to index into the first x maps if you have x classes\n", - "\n", - "\n", - " Args:\n", - " model_path (str): path to ONNX model w/o initializers,\n", - " num_classes (int): number of classes in the data,\n", - " input_name (str): name of the input the ONNX model accepts\n", - " \"\"\"\n", - "\n", - " def __init__(\n", - " self,\n", - " model_path=None,\n", - " input_name=\"data\",\n", - " num_classes=None,\n", - " model_type=None,\n", - " local=True,\n", - " ):\n", - " super().__init__(model_path, input_name, num_classes, model_type, local)\n", - "\n", - " self.model_card[\"num_classes\"] = self.num_classes\n", - " self.model_card[\"model_type\"] = self.model_type\n", - "\n", - " def __repr__(self):\n", - " return f\"Class to handle HALO AI ONNX model locally stored at {self.model_path}\"\n", - "\n", - " def F(self, image):\n", - " prediction_map = self.inference(image)\n", - "\n", - " prediction_map = prediction_map[0][:, 0 : self.num_classes, :, :]\n", - "\n", - " return prediction_map\n", - "\n", - " def apply(self, tile):\n", - " tile.image = self.F(tile.image)\n", - "\n", - "\n", - "# class to handle remote onnx models\n", - "class RemoteTestHoverNet(Inference):\n", - " \"\"\"Transformation to run inferrence on ONNX model.\n", - "\n", - " Citation for model:\n", - " Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D.\n", - " TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\n", - "\n", - " Args:\n", - " model_path (str): temp file name to download onnx from huggingface,\n", - " input_name (str): name of the input the ONNX model accepts\n", - " \"\"\"\n", - "\n", - " def __init__(\n", - " self,\n", - " model_path=\"temp.onnx\",\n", - " input_name=\"data\",\n", - " num_classes=5,\n", - " model_type=\"Segmentation\",\n", - " local=False,\n", - " ):\n", - " super().__init__(model_path, input_name, num_classes, model_type, local)\n", - "\n", - " # specify URL of the model in PathML public repository\n", - " url = \"https://huggingface.co/pathml/test/resolve/main/hovernet_fast_tiatoolbox_fixed.onnx\"\n", - "\n", - " # download model, save as temp.onnx\n", - " with open(self.model_path, \"wb\") as out_file:\n", - " content = requests.get(url, stream=True).content\n", - " out_file.write(content)\n", - "\n", - " self.model_card[\"num_classes\"] = self.num_classes\n", - " self.model_card[\"model_type\"] = self.model_type\n", - " self.model_card[\"name\"] = \"Tiabox HoverNet Test\"\n", - " self.model_card[\"model_input_notes\"] = \"Accepts tiles of 256 x 256\"\n", - " self.model_card[\n", - " \"citation\"\n", - " ] = \"Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\"\n", - "\n", - " def __repr__(self):\n", - " return \"Class to handle remote TIAToolBox HoverNet test ONNX. See model card for citation.\"\n", - "\n", - " def apply(self, tile):\n", - " tile.image = self.F(tile.image)\n", - "\n", - " def remove(self):\n", - " # remove the temp.onnx model\n", - " os.remove(self.model_path)\n" + " ```" ] }, { @@ -811,6 +435,7 @@ "source": [ "### Remote ONNX Using our `RemoteTestHoverNet` Class\n", "- Uses a Hovernet from [TIAToolbox](https://github.com/TissueImageAnalytics/tiatoolbox) \n", + "- This version of Hovernet was trained on the [MoNuSAC](https://monusac-2020.grand-challenge.org/) dataset.\n", "- Note that the purpose of this model is to illustrate how PathML will handle future remote models. We plan on release more public models to our model zoo on HuggingFace in the future.\n", "- Citation for model:\n", " - Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\n", @@ -864,7 +489,180 @@ "```python\n", "for tile in wsi.tiles: \n", " print(tile.image.shape) \n", - "```" + "```\n", + "\n", + "To see how to use these tiles to make visualizations, see below." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "251a9099-8e6f-4e4c-b685-7087191fe9fe", + "metadata": {}, + "source": [ + "## Full Example With Vizualization of Output\n", + "\n", + "The `RemoteTestHoverNet()` uses a pretrained HoverNet from TIAToolBox trained on the [MoNuSAC](https://monusac-2020.grand-challenge.org/) dataset. **The model was trained to accept tiles of 256x256 to create a prediction matrix of size 164x164 with 9 channels.** The first 5 channels correspond to the Nuclei Types (TP), the next two channels correspond to the Nuclei Pixels (NP), and the last two channels correspond to the Hover (HV). The documention for these channels can be found here on TIAToolBox's [website](https://tia-toolbox.readthedocs.io/en/v1.0.1/_modules/tiatoolbox/models/architecture/hovernet.html#HoVerNet.infer_batch). \n", + "\n", + "In this example we use an taken from the [MoNuSAC](https://monusac-2020.grand-challenge.org/) dataset. See citation in the `References` section.\n", + "\n", + "See citation in the `References` section.\n", + "\n", + "### Load Packages" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "925d4ebd-3803-409a-82be-780115ffb152", + "metadata": {}, + "source": [ + "### Run Code as Demonstrated Above\n", + "\n", + "Note that to run the following code, you will need to download and save the image titled `TCGA-5P-A9K0-01Z-00-DX1_1.svs` in the same directory as the notebook." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "23951050-b47f-4b38-b0b6-786081fc69f0", + "metadata": {}, + "outputs": [], + "source": [ + "# Define slide path\n", + "slide_path = 'TCGA-5P-A9K0-01Z-00-DX1_1.svs'\n", + "\n", + "inference = RemoteTestHoverNet()\n", + "\n", + "# Create a transformation list\n", + "transformation_list = [\n", + " inference\n", + "] \n", + "\n", + "# Initialize pathml.core.slide_data.SlideData object\n", + "wsi = SlideData(slide_path)\n", + "\n", + "# Set up PathML pipeline\n", + "pipeline = Pipeline(transformation_list)\n", + "\n", + "# Run Inference\n", + "wsi.run(pipeline, tile_size = 256, tile_stride = 164, tile_pad=True)\n", + "\n", + "# DELETE ONNX MODEL DOWNLOADED FROM HUGGINGFACE\n", + "inference.remove() " + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "2921a180-20bc-4ce1-960d-7005892f4585", + "metadata": {}, + "source": [ + "Let's look at the first tile which comes from the top left corner (0,0) and Nucleus Pixel predictions." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "a607bb7d-de3e-4444-8829-75d7da9505fb", + "metadata": {}, + "outputs": [], + "source": [ + "for tile in wsi.tiles:\n", + " # Create empty numpy array\n", + " a = np.empty((2, 164, 164), dtype=object)\n", + " # Get Nucleus Predictions\n", + " classes = tile.image[0, 5:7, :, :] \n", + " a = classes\n", + " # Take the argmax to make the predictions binary\n", + " image = np.argmax(a, axis = 0) \n", + " # Multiple values by 255 to make the array image friendly\n", + " image = image * (255/1) \n", + " # Make a grey scale image\n", + " img = Image.fromarray(image.astype('uint8'), \"L\")\n", + " # Save Image\n", + " img.save('test_array_1.png')\n", + " # Can break after one iteration since we are using at the tile at (0, 0).\n", + " break " + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "aa6fbb49-7173-4a65-9b1f-e7b90a5228c5", + "metadata": {}, + "source": [ + "Lets visualize the tile vs the tile predictions. Since the model uses a 256x256 tile to create a prediction map of size 164x164, we need to take our tile located at (0,0) and crop it down to the center 164x164 pixes. " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "e29e98f3-c04c-4d77-8681-c837181bf415", + "metadata": {}, + "outputs": [], + "source": [ + "prediction_dim = 164\n", + "tile_dim = 256\n", + "crop_amount = int((256 - 164) / 2) \n", + "wsi = SlideData(slide_path)\n", + "\n", + "generator = wsi.generate_tiles(shape = (tile_dim, tile_dim), level = 0)\n", + "\n", + "for tile in generator:\n", + " # Extract array from tile\n", + " image = tile.image\n", + " # Crop tile\n", + " image = image[crop_amount: crop_amount + prediction_dim, crop_amount: crop_amount + prediction_dim] \n", + " # Convert array to image\n", + " img = Image.fromarray(image)\n", + " # Save Image\n", + " img.save('raw_tile.png')\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "98ab9eb0-455d-4353-b760-3d65820e81de", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABB0AAAIlCAYAAACZ/IBkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOy9ebQlWVmm/0XEOfdm1ggFBcjQVYCAYJcgICIICrqapQyCAjJIMwmorUg3sH4oAqKgKE2DogjYTMqwkEEEEWccWKg4Y8tCAasQCqgpK+d77zkRsX9/ZObZz7fP/m7em5WnJt7HxXKfODHs2LEjzs2o9/3eJqWUTAghhBBCCCGEEOI0017XHRBCCCGEEEIIIcSNE710EEIIIYQQQgghxErQSwchhBBCCCGEEEKsBL10EEIIIYQQQgghxErQSwchhBBCCCGEEEKsBL10EEIIIYQQQgghxErQSwchhBBCCCGEEEKsBL10EEIIIYQQQgghxErQSwchhBBCCCGEEEKsBL10EOI64qd/+qetaZpT2vatb32rNU1jl1xyyentFLjkkkusaRp761vfurJjCCGEEGJnNE1jP/3TP73r7W5Iv+ff/u3fbt/+7d+++LyKvl944YX2lKc85bTtTwhxcvTSQYhd8q//+q/2Az/wA3ab29zG1tfX7da3vrU98YlPtH/913+9rrt2nfBnf/Zn1jSNvfe9772uuyKEEEKslBMv/ZumsY997GNL36eU7Ha3u501TWMPe9jDroMenjonfs9P/G86ndod7nAH++///b/bf/zHf1zX3dsVH//4x+2nf/qnbf/+/dd1V4QQppcOQuyK97///XbPe97T/uRP/sSe+tSn2ute9zp7+tOfbh/96Eftnve8p/32b//2jvf1Uz/1U7axsXFK/XjSk55kGxsbdsEFF5zS9kIIIYQ4dfbs2WPvfOc7l5b/+Z//uX3xi1+09fX166BXp4dnP/vZ9pu/+Zv2xje+0R760Ifau9/9bvumb/om+9KXvnSt9+WCCy6wjY0Ne9KTnrSr7T7+8Y/bS1/60upLh3/7t3+zX//1Xz9NPRRC7ITJdd0BIW4ofO5zn7MnPelJdoc73MH+4i/+ws4///zFdz/+4z9uD3jAA+xJT3qSffKTn7Q73OEO4X6OHDliZ555pk0mE5tMTu0W7LrOuq47pW2FEEIIcc347u/+bnvPe95jv/zLv+x+y9/5znfave51L7vyyiuvw95dMx7wgAfYox/9aDMze+pTn2p3vvOd7dnPfra97W1vs5/4iZ+obnPib5vTTdM0tmfPntO6zxvyCyEhbqhI6SDEDnnlK19pR48etTe+8Y3uhYOZ2c1vfnN7wxveYEeOHLFf/MVfXCw/UbfhU5/6lD3hCU+wm970pvat3/qt7juysbFhz372s+3mN7+5nX322faIRzzCLr300iUfZ62mw4UXXmgPe9jD7GMf+5jd5z73sT179tgd7nAH+43f+A13jH379tnznvc8u+iii+yss86yc845x77ru77L/vmf//k0jVQ+t3//93+3H/iBH7Bzzz3Xzj//fHvRi15kKSX7whe+YN/zPd9j55xzjt3qVreyV73qVW772WxmL37xi+1e97qXnXvuuXbmmWfaAx7wAPvoRz+6dKyrrrrKnvSkJ9k555xjN7nJTezJT36y/fM//3PVA/rpT3/aHv3oR9t5551ne/bssXvf+972wQ9+8LSdtxBCiK8OHv/4x9tVV11lf/RHf7RYNpvN7L3vfa894QlPqG5z5MgRe+5zn2u3u93tbH193e5yl7vY//7f/9tSSm69ra0t+5//83/a+eefv/hb4Itf/GJ1n5deeqk97WlPs1ve8pa2vr5uX//1X29vfvObT9+JmtmDH/xgMzO7+OKLzWz7v23MzN7+9rfbve51L9u7d6+dd9559rjHPc6+8IUvLO33jW98o93xjne0vXv32n3ucx/7y7/8y6V1opoOn/70p+2xj32snX/++bZ37167y13uYi984QsX/Xv+859vZma3v/3tF3aRE38z1Wo6/Md//Ic95jGPsfPOO8/OOOMMu+9972sf/vCH3Ton7Ce/9Vu/ZS9/+cvttre9re3Zs8e+4zu+wz772c+6dT/zmc/Y933f99mtbnUr27Nnj932tre1xz3ucXbgwIGTjLYQN06kdBBih3zoQx+yCy+80B7wgAdUv3/gAx9oF1544dKPlJnZYx7zGLvTne5kP/dzP7f0xwV5ylOeYr/1W79lT3rSk+y+972v/fmf/7k99KEP3XEfP/vZz9qjH/1oe/rTn25PfvKT7c1vfrM95SlPsXvd61729V//9WZ27If1Ax/4gD3mMY+x29/+9nbZZZfZG97wBvu2b/s2+9SnPmW3vvWtd3y8k/H93//9dte73tVe8YpX2Ic//GF72cteZuedd5694Q1vsAc/+MH2C7/wC/aOd7zDnve859k3fdM32QMf+EAzMzt48KD93//7f+3xj3+8PeMZz7BDhw7Zm970JnvIQx5in/jEJ+we97iHmZmN42gPf/jD7ROf+IT98A//sH3d132d/c7v/I49+clPXurLv/7rv9r9739/u81tbmMveMEL7Mwzz7Tf+q3fskc+8pH2vve9zx71qEedtvMWQghx4+bCCy+0b/mWb7F3vetd9l3f9V1mZvaRj3zEDhw4YI973OPsl3/5l936KSV7xCMeYR/96Eft6U9/ut3jHvewP/iDP7DnP//5dumll9qrX/3qxbo/+IM/aG9/+9vtCU94gt3vfvezP/3TP63+LXDZZZfZfe97X2uaxn70R3/Uzj//fPvIRz5iT3/60+3gwYP2nOc857Sc6+c+9zkzM7vZzW7mltf+tnn5y19uL3rRi+yxj32s/eAP/qBdccUV9trXvtYe+MAH2j/+4z/aTW5yEzMze9Ob3mTPetaz7H73u5895znPsf/4j/+wRzziEXbeeefZ7W53u23788lPftIe8IAH2HQ6tWc+85l24YUX2uc+9zn70Ic+ZC9/+cvte7/3e+3f//3f7V3vepe9+tWvtpvf/OZmZkv/wegEl112md3vfvezo0eP2rOf/Wy72c1uZm9729vsEY94hL33ve9d+vvgFa94hbVta8973vPswIED9ou/+Iv2xCc+0f7mb/7GzI69fHrIQx5iW1tb9mM/9mN2q1vdyi699FL73d/9Xdu/f7+de+65u7sAQtwYSEKIk7J///5kZul7vud7tl3vEY94RDKzdPDgwZRSSi95yUuSmaXHP/7xS+ue+O4Ef//3f5/MLD3nOc9x6z3lKU9JZpZe8pKXLJa95S1vSWaWLr744sWyCy64IJlZ+ou/+IvFsssvvzytr6+n5z73uYtlm5ubaRgGd4yLL744ra+vp5/5mZ9xy8wsveUtb9n2nD/60Y8mM0vvec97ls7tmc985mJZ3/fptre9bWqaJr3iFa9YLL/66qvT3r1705Of/GS37tbWljvO1VdfnW55y1umpz3taYtl73vf+5KZpde85jWLZcMwpAc/+MFLff+O7/iOdNFFF6XNzc3FsnEc0/3ud790pzvdadtzFEIIIVLKv79/+7d/m37lV34lnX322eno0aMppZQe85jHpAc96EEppWO/yQ996EMX233gAx9IZpZe9rKXuf09+tGPTk3TpM9+9rMppZT+6Z/+KZlZ+pEf+RG33hOe8ISlvwWe/vSnp6/5mq9JV155pVv3cY97XDr33HMX/drt7/mb3/zmdMUVV6QvfelL6cMf/nC68MILU9M06W//9m9TSvHfNpdccknqui69/OUvd8v/5V/+JU0mk8Xy2WyWbnGLW6R73OMe7rf+jW98YzKz9G3f9m2LZbW+P/CBD0xnn312+vznP++OM47jov3KV75y6e+kE1xwwQXub47nPOc5yczSX/7lXy6WHTp0KN3+9rdPF1544eJvphPjc9e73tX1+5d+6ZeSmaV/+Zd/SSml9I//+I9LfxcJ8dWO7BVC7IBDhw6ZmdnZZ5+97Xonvj948KBb/kM/9EMnPcbv//7vm5nZj/zIj7jlP/ZjP7bjft7tbndzSozzzz/f7nKXu7iq0+vr69a2x279YRjsqquusrPOOsvucpe72D/8wz/s+Fg74Qd/8AcX7a7r7N73vrellOzpT3/6YvlNbnKTpT52XWdra2tmdkzNsG/fPuv73u5973u7Pv7+7/++TadTe8YznrFY1rat/Y//8T9cP/bt22d/+qd/ao997GPt0KFDduWVV9qVV15pV111lT3kIQ+xz3zmM3bppZee1nMXQghx4+axj32sbWxs2O/+7u/aoUOH7Hd/93dDa8Xv/d7vWdd19uxnP9stf+5zn2spJfvIRz6yWM/MltYrVQspJXvf+95nD3/4wy2ltPhdu/LKK+0hD3mIHThw4JR/05/2tKfZ+eefb7e+9a3toQ99qB05csTe9ra32b3vfW+3Xvm3zfvf/34bx9Ee+9jHuv7c6la3sjvd6U4Li+Tf/d3f2eWXX24/9EM/tPitNzum9jyZCuCKK66wv/iLv7CnPe1p9l/+y39x351qDPnv/d7v2X3ucx9nETnrrLPsmc98pl1yySX2qU99yq3/1Kc+1fX7xN9dJ/6OOXEOf/AHf2BHjx49pT4JcWND9gohdsCJlwknXj5ERC8nbn/725/0GJ///Oetbduldb/2a792x/0sf4DNzG5605va1Vdfvfg8jqP90i/9kr3uda+ziy++2IZhWHxXSievKWV/zj33XNuzZ89C6sjlV111lVv2tre9zV71qlfZpz/9aZvP54vlHJ/Pf/7z9jVf8zV2xhlnuG3LMfvsZz9rKSV70YteZC960Yuqfb388svtNre5zc5PTgghxFc1559/vn3nd36nvfOd77SjR4/aMAyLAowln//85+3Wt7710t8Hd73rXRffn/j/bdvaHe94R7feXe5yF/f5iiuusP3799sb3/hGe+Mb31g95uWXX35K5/XiF7/YHvCAB1jXdXbzm9/c7nrXu1YLX5d/r3zmM5+xlJLd6U53qu53Op2aWT7Xcr0TEZ3bceIf9v/1v/7XnZ3MDvj85z9v3/zN37y0nNeGxyv/trnpTW9qZrb4W+v2t7+9/a//9b/s//yf/2PveMc77AEPeIA94hGPWNS4EuKrEb10EGIHnHvuufY1X/M19slPfnLb9T75yU/abW5zGzvnnHPc8r17966yewuiRIuEOhI/93M/Zy960YvsaU97mv3sz/6snXfeeda2rT3nOc+xcRxX3p+d9PHtb3+7PeUpT7FHPvKR9vznP99ucYtbWNd19vM///MLb+luOHFez3ve8+whD3lIdZ3dvNwRQgghzMye8IQn2DOe8Qz7yle+Yt/1Xd+1qFmwak78rv3AD/xAtY6Rmdk3fMM3nNK+L7roIvvO7/zOk65X/m0zjqM1TWMf+chHqr/1Z5111in15/rGTv6OedWrXmVPecpT7Hd+53fsD//wD+3Zz362/fzP/7z99V//td32tre9troqxPUGvXQQYoc87GEPs1//9V+3j33sY06Cd4K//Mu/tEsuucSe9axnndL+L7jgAhvH0S6++GL39r+siHxNee9732sPetCD7E1vepNbvn///iUFwnXFe9/7XrvDHe5g73//+51c8iUveYlb74ILLrCPfvSjdvToUad2KMfsxH85mU6nO/pDSgghhNgJj3rUo+xZz3qW/fVf/7W9+93vDte74IIL7I//+I/t0KFDTu3w6U9/evH9if8/jqN97nOfc+qGf/u3f3P7O5FsMQzD9eZ37Y53vKOllOz2t7+93fnOdw7XO3Gun/nMZxbJGGZm8/ncLr74Yrv73e8ebnvi9/z//b//t21fdmO1uOCCC5bG12z52uyWiy66yC666CL7qZ/6Kfv4xz9u97///e31r3+9vexlLzul/QlxQ0Y1HYTYIc9//vNt79699qxnPWvJCrBv3z77oR/6ITvjjDMWMU275cR/gX/d617nlr/2ta89tQ4HdF23lKDxnve853pV0+DEf0VgP//mb/7G/uqv/sqt95CHPMTm87n9+q//+mLZOI72q7/6q269W9ziFvbt3/7t9oY3vMG+/OUvLx3viiuuOJ3dF0II8VXCWWedZb/2a79mP/3TP20Pf/jDw/W++7u/24ZhsF/5lV9xy1/96ldb0zSLBIwT/79Mv3jNa17jPnddZ9/3fd9n73vf+6r/AL8ufte+93u/17qus5e+9KVLf2eklBZ/O9373ve2888/317/+tfbbDZbrPPWt77V9u/fv+0xzj//fHvgAx9ob37zm+0///M/l45xgjPPPNPM7KT7Mzt2bT7xiU+4vzGOHDlib3zjG+3CCy+0u93tbifdBzl48KD1fe+WXXTRRda2rW1tbe1qX0LcWJDSQYgdcqc73cne9ra32ROf+ES76KKL7OlPf7rd/va3t0suucTe9KY32ZVXXmnvete7lnyYO+Ve97qXfd/3fZ+95jWvsauuumoRmfnv//7vZnbqBZJKHvawh9nP/MzP2FOf+lS73/3uZ//yL/9i73jHO07qo7w2edjDHmbvf//77VGPepQ99KEPtYsvvthe//rX293udjc7fPjwYr1HPvKRdp/73Mee+9zn2mc/+1n7uq/7OvvgBz9o+/btMzM/Zr/6q79q3/qt32oXXXSRPeMZz7A73OEOdtlll9lf/dVf2Re/+EX753/+52v9PIUQQtzwiewN5OEPf7g96EEPshe+8IV2ySWX2N3vfnf7wz/8Q/ud3/kde85znrP42+Ee97iHPf7xj7fXve51duDAAbvf/e5nf/Inf1JVPb7iFa+wj370o/bN3/zN9oxnPMPudre72b59++wf/uEf7I//+I8Xv4XXFne84x3tZS97mf3ET/yEXXLJJfbIRz7Szj77bLv44ovtt3/7t+2Zz3ymPe95z7PpdGove9nL7FnPepY9+MEPtu///u+3iy++2N7ylrfs6G+RX/7lX7Zv/dZvtXve8572zGc+c/G32Ic//GH7p3/6JzM79jeVmdkLX/hCe9zjHmfT6dQe/vCHL15GkBe84AWL6NNnP/vZdt5559nb3vY2u/jii+1973vfovj2TvnTP/1T+9Ef/VF7zGMeY3e+852t73v7zd/8zcWLIiG+GtFLByF2wWMe8xj7uq/7Ovv5n//5xYuGm93sZvagBz3IfvInf/IaFzb6jd/4DbvVrW5l73rXu+y3f/u37Tu/8zvt3e9+t93lLnexPXv2nJZz+Mmf/Ek7cuSIvfOd77R3v/vdds973tM+/OEP2wte8ILTsv/TwVOe8hT7yle+Ym94wxvsD/7gD+xud7ubvf3tb7f3vOc99md/9meL9bqusw9/+MP24z/+4/a2t73N2ra1Rz3qUfaSl7zE7n//+7sxu9vd7mZ/93d/Zy996UvtrW99q1111VV2i1vcwr7xG7/RXvziF18HZymEEOKrhbZt7YMf/KC9+MUvtne/+932lre8xS688EJ75Stfac997nPdum9+85vt/PPPt3e84x32gQ98wB784Afbhz/8Ybvd7W7n1rvlLW9pn/jEJ+xnfuZn7P3vf7+97nWvs5vd7Gb29V//9fYLv/AL1+bpLXjBC15gd77zne3Vr361vfSlLzUzs9vd7nb23/7bf7NHPOIRi/We+cxn2jAM9spXvtKe//zn20UXXWQf/OAHw2LP5O53v7v99V//tb3oRS+yX/u1X7PNzU274IIL7LGPfexinW/6pm+yn/3Zn7XXv/719vu///sL+2rtpcMtb3lL+/jHP27/3//3/9lrX/ta29zctG/4hm+wD33oQ/bQhz5012Nw97vf3R7ykIfYhz70Ibv00kvtjDPOsLvf/e72kY98xO573/vuen9C3BhoUql/EkJcr/inf/on+8Zv/EZ7+9vfbk984hOv6+7cIPjABz5gj3rUo+xjH/uY3f/+97+uuyOEEEIIIcRXLarpIMT1iI2NjaVlr3nNa6xtW3vgAx94HfTo+k85ZsMw2Gtf+1o755xz7J73vOd11CshhBBCCCGEmewVQlyv+MVf/EX7+7//e3vQgx5kk8nEPvKRj9hHPvIRe+Yzn7kkqxTH+LEf+zHb2Niwb/mWb7GtrS17//vfbx//+Mft537u5661qFIhhBBCCCFEHdkrhLge8Ud/9Ef20pe+1D71qU/Z4cOH7b/8l/9iT3rSk+yFL3yhTSZ6R1jjne98p73qVa+yz372s7a5uWlf+7Vfaz/8wz9sP/qjP3pdd00IIYQQQoivevTSQQghhBBCCCGEECtBNR2EEEIIIYQQQgixEvTSQQghhBBCCCGEECthRybxcRztS1/6kp199tnWNM2q+ySEEEKIa5mUkh06dMhufetbW9te8/8mob8dhBBCiBs3O/3bYUcvHb70pS+pcr4QQgjxVcAXvvAFu+1tb3uN96O/HYQQQoivDk72t8OOXjqcffbZZmb2n/95iZ1zzjnbrFn/Lxmn9b9vnMb/WhJV0Nz9EVLQri8el77KR2ya/IaoGbDOgbzV7PKji/bl//6lRfvKz+R2OjJftCd93k+HfRqWm5kdObKxaB88nNtHN2eL9sYstw8fyv3Y2tjMx2jztFqfri/aa9NpXqfrFu3NrS3Xj83NfOxxzOfNcePyfsjt+ZBPqu/zGGyh37M+rzPywhQXfsTn1KRgOTbA3Gza3O6wygRvAM+Yri3aZ+45Y9G+ybnnuX6cdda5i/a8z+d6GNfryquvXrQv23dlXmcrr7PR5zHocfF5brMxj1mf/ARxY4UXmfwvmO6/ZrrbAutg4w6rt10+t67lRDVLaQtt9H3kG1XcOynPtbbJ47zW5XHe0525aK9PcrTmmeu4Fufk+WtmdlN8PmNvPsGz9uQTOe+cm+T22flanrv37OrxZpjvG5tHcr8xcbqJf3PcuIHL3/UJ9wVqBPO6TDA319byOE1xfybsc8Chh9Y/38YJ5vl67vDa2WvYJl+vm9/65nmdW+fxmN4yj3lax72GedokDMicd5WZbeZ+zPfnObx5WX5GXfKPn12091+Me+TS/fkcZvnY0yYfo2W95SHPzTT4p3nC82fo83o9njmzrU20cU/ieTUOfv6foIl+K4r1WB96THlffG4OWN4fX745zuzFl79l8Zt/TTld+xFCrJ4DBw5c11240XDuueeefCUhbmSc7Dd/Ry8dTvzBes455+ilw0n3tKKXDvjHxOxovmwbZxxctDf35H9EJfwBO+E/Xvi3bFf0dZ77MUyxfMAOxtweJrlP+PvcJnzpMMFLh0n+hwhfOjRDMeId/4EfvHTA8h7fdJbPe85/kLZ8IYB/UF3LLx2m+JfkepfHY0+Xx2kv/kFqZnbGFP9Axbn2uHvXu/wP17U272uKSdSjr/yHvxtjNwZ+QKIXNOFLB8cOXjqgH13j/5GdcN7+XQbXw5wyvnTAP6zxAmLa4mUExozt9W6P68eeCa9T7sneaT6RM9by9TprLf9j+qz1fH/uwTXeShiPMe/TvXSYli8dOLejlw64P91Lh7w+XzqwHb106Ld76bAHc3tvHie+dDjnzPyDtHYWXjqcg2fXqbx0wPjP8exbO5zXOwvPx/lafhkxTvMLrQ7/WF/jS4eRcx8vHRr/NB8bvHQw3HsWvATGDdDjecUXBeQav3Sw4KVD8at0uqwQslQIccNh+7/vhRBie072m7+jlw47h3+QruiPDf4Xp9P6B02qtIqzCMNF2adgpW3+Mef/5RocgS8txvzH4oA/sPthjpXyOh0uc4M/pMvJ0eEfBy3/oOVq+EO1wR/cLc478Y9Z/Nd19pt/MM/m6LeZbeHzgP/i17t2X10+cGxSvc1/ZPNSjMVl6LGvOY+R+CIE7cQ5lNv4d5lNcd7zWf5H3nye/+vnWKTYbm7mfxT1eHOwuZnHaQOqh36Of/jgHPw/PvIx+M+blPgPn+If/pzb7iZpqov5qeE/alJ9zLhxKv5xy5cIlibumwz+IcqXEU1en/929Nc3z9PNed726IafFGs4dMILOBvyF90A5c5m/i9HW3vydTl7T37pcMbe/GLjJufk/zrSo09bc68GYt/5D98RLwsavLXgf+GejfhXb/QCDfsZ8QItdX5O8Nq32NnAZ1HHl1pY3vB5UH/+JnSKCqq2KX66cL0PHjy0aF/1pSsW7au/ktUNWwfyOlOohzoej/9w7/nMze22eJa3eNYmTDa8i7AWD5oJ+t1NsC2VLSOfrVHbdcO9pCN8GWp4iXPixXdnXbmJEGLFpECVdm0fW5w+ynHVC1ghlF4hhBBCCCGEEEKIFaGXDkIIIYQQQgghhFgJp9leQa7/VosmsFREhyi+2cERdlg1oql/41wNlPrSQgCpNCXNjZPk8jLTNlG+cwrk6UHNig5S3Qk85wPkynNnr8h7GdC/fvRS4Dnky/Rnz2G74HIWaRtpAXBycZxB0C4FyezXHOPcu/Gve6R5vTqMGcpmWN/Di097Re97srmRx9BGWDJQ9G5rlrdPY11aTfm8czUEbUvl/KiPbXyn85ux2na2IRabHL3Mu8HcpLTeSRZZWyLwvrMGBK/dHJ57nvXRTXOwBoUN2RbRoi5JM0NBwI1cGLI5O6+zDpvTGoquoNSJs+iU0kwWGpzjPFoUg2y73OYzg9Yk3i+s3cC6J7RasD6GmVkbPOJms2wHWVunjN/5DHDAatOakfYN2lmKuYlb5NBVuc7N5V/4yqJ94PJ9i3Z3OF+jvbAZ0F7BR0mPe7Kf57GctH6eTmhfw/3Ddsd1YKloXHVWWFJQrJJ2M86BoShoSfuUe5I39V+YE6dd1lIRQlwzdmtfiNaXPF8IcUNHf2EIIYQQQgghhBBiJeilgxBCCCGEEEIIIVbCCu0VpJSLrUAmlgJZ/aqOQcLjXbOO+IAASK2pwg38GJRHt3i3lCBLHorTGcdUbfuhjeIRc5vb9rBKUPU/Yv15IQ3m5xk6ORvr7Tn6x3PydoJlKXHZqTH5fgwuIQM2FqYvcMx8liM/5KZLgHCZqHmVxmv6R1gN2pRjHgdcS2cxcVGJdTvM6GwNQeRrqts0ytWi+6Jx6RWB9cEdj3MujvBLzNtwsYEnvzFGFymZx4xJA2wvpxPU0xooS0dYg62hT1uwXWxuZT/AmWflJIvpnnx9nXehiJXtt/DdjOPBa8+YUc5zWC3cRMChW9g0YCFoJ0W6AewBGzMmgORIynPPQhwm0xOYs0v7DG0UuF5pA/f8fn+PbFyWbSz7P59TKg588aq8zUFYkDZhK4GFo6P9jNfaWX/ie4QJOKNLjqlbSVpnd6vfLy5hxwKWHHv17f2tWvnNlIJbiB1xbac+yHZxw+a6TCgR4vqClA5CCCGEEEIIIYRYCXrpIIQQQgghhBBCiJVwWu0VOxcM7TDV4VQ5heCMQAy/s2O4HVE6vtN9MkWjHgvgZPIdrBMTyHNdm/vJy8cesuIiJWE2sMI9LQRYqQnk0UwFgIyMSRQ8tyhFwMxsHlTmn2OdGfrBdk9FurNzsF2X8afEI5gZ5PdO5u0cAZT91+X9CXL2ET3p2SsmEPRFP1Caf+LmRx5/JmfwbKlmn+AD1d7jQFn3WG0fOw+eeG42VI4z9ABj0Lr7op6Mwv2n8tjOqsG7iftCKkB099FNEKR88Hy2igdIh/OYwsK0DpvBHkRQ8Ai8p2iH4f3S4N7u1hmzUtgrWtpEkFrjbklcyxb3IZYPLu0lH2+K50e7nq0W3TrsH2bWrOXz3tzcWLQ30KezeO07/OSgs7R9NUx3yCEYNjuQPxz5Uk6oMDO7+pJsozjw+Svyepfl9cYNpD1gv7wPO1haWtwvzlbj3BW878wGo3WFlidaYGjf4bwGO4qXAaVcl4/pYJNrWx4uxA2R6/t9Itm+EOKGgpQOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVcEr2itUJuOoy6Pjgq+mJF2ZHfap7OHyPIqtFeXKRrJxS37y0g41iMs1S5A7thOr2wxxSbFTQnx3Nsn0zs6NHszx6C9X1KQunnYBV7btJlmA3DaXj+XjuzCAJnBXpFVuwZMxGWi0oXYZUGtLlOSvzu6SCekoFK8yXlegpfW6jmBBnd6i/wxsjCwHbrGg/eMk2xzO1+ZbtnF2lnlQQ2SWaFLTd+n6etju4PynldukOLe+FuvVhbHiN/AEaF6FSn4+NG2esHly7KOGlxfLC1WA9LBlMfqBTydmIsF+GHtBCtAFbwnQzX98z13OqxeSMfH+Zma116/nYmB9jcGGS99nk9bl8mpev7c02iukZuR/dxNsr3B3T0Q6Snw2TtdzXtfUz8uqWzylt4NoPePYcyj6Iw7BK7P/PbKcwM9v/+fx567JD+YvDeWxo1RjxTNyCjYp2tQmTOphogpMeCntFP57cXuHvK0ijMbcS2oOzuvE+j2XfLu2F8z+wFAkhMtd3S8XpxD0rvorO+7pElhjx1YqUDkIIIYQQQgghhFgJeukghBBCCCGEEEKIlaCXDkIIIYQQQgghhFgJu6rp0NgKqijs1kLmSimk6mK/PjzcO+183XLrP0SxnDuI6ywXl/GAtZ0xirCD93qCyLpumj3SfZtrKfRD9hZvbmZj88aRTXe0jY38eWuOWgwuyhDRdqgv0HY088Mzjv1zP4Pzt5c1HXJ/nUc6sY5DX22P8FEzSnCe6jUgRtarKC7DBOc3YdQf5xRN/+nkdQtYe2FkHQHGXBa+yhZj0GL7puV+o0mLORTVcUCbdRu64q4ag+w9VzEh1es4dJjAYZxlFB27tF792L6GSj3usEE/WrTdtUatknapTkfe15jYRk85J5xfNi/uUbfk6Eau6dCgHMH0zNyn9bVcV8HMbA33W98y6rZ+vzQj7tUU1KxAXZY9Nzl70b7JuTfDOfgRn6FGzLiW+7Te5v6eccY5i3bX5poQaRPRulv5uTRuoO7MVUcX7QNfvHLR3odYTDOzw186sGj3+/J4TlC2JvWI1kW27hzPxynGZrB6TQdeyLGov9KjHgVr0IyI0uyi3wWsn7DfgW3WdHCPnjIyM6g742o91PshhLhhUtZkUL0AIcT1CSkdhBBCCCGEEEIIsRL00kEIIYQQQgghhBAr4ZQiM68xpymVZ0fpPpSgFuvH0rPAI9FE61AWDjl7EK1Y2ikSZdANpeBYCfFt7RSWikluT9Gej1nmy/jLzRnkv4U0eHARjnVJbtHx6vrRZaG9gm1Gyx37DFvESBsFZcaMo4PEG9smLDdYMDjeTJBLhZSeFhAbaQ8AzlKRaQNvjZsT9MzAijAU7wLndK4wasnlQuZtug7RqSPj9rC6i7bk/nO7vI6NBXPTzdm6paJr+KjBPHARfhzjUiZat2fQIsGoy86183g0kMw3DSJf0b8W65TXkfaRBMn9OMfcRJvRjAOyFoeOMba5Pd/CtrgtuuJR3azBwoSoyhnnP+T6axjPScO5hn3CQrTnjDMX7emebJUYe39dxjHvaw+6uD7Zk5ev5X01Q7ZwHL0627kOHjy8aG/sy+3NK7Lf5MgV2UJxFMvNzOZXZ0tFgj1jihMckGvaMoYSN/qI+NGe8533UWDVMvNWtj54do38TeK9k+oWjsG1GZm5nXSa91X92eyi8o5/s10MpxA3VBQFKYQQ1z1SOgghhBBCCCGEEGIl6KWDEEIIIYQQQgghVsK1Y684jcq2aFdxekVulnaKFHzn98WV6uuPjD1gm9JZJ4X1ktzkPter67uS59Ms/56sZbnyFNXnKVvfQmX4OZIhljIzIElvIbV2tgucH5ezav7g1qkv70faK2CDKD73A1M0huo6Pr2C69NqAa06JdG+fL/rx8gq/5CqN4GFpgnSF2IrQltdXtorWvSRQR8D9uuSGDAPEiTlPTagbaBjIkNgCTKLpdlNcE7OsgArwxhJXWkdKb7iONM60QYWCSZQRPYKS7RUMN2B9gqPS8Jw0n1I6edozyCrZ6oL2sME98gWrtcm5vuWv1vbvbBVIc1mxHnwHqH1pGl5Ldxe83LMlSOHcoLE5lGfeLOJxJu9ZyOx4twzFu0Bm+y/7OCifehQtlEc2JetE1tXH6m2N/dlS8XsQO6TmVm7CesELCATzhvvpVo0+fxOsEfMjQkv3BTXqLRXuGdUPVXHWdEa3pMZPkuGoO0l42WOy8n/e4L7PWyK/y/EDRDZKDwcj50kWbi/gzWWQojTjJQOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVsDp7xbWhzNplesVyOf4m/m6xOJB8u/3W7QesdG9OtlakV0D22yDFIEq/YNLBTW52s0V7frNso9i8Mmua98+ydNlQWX9SpDUkvINykl5IhjchI6dtY3MjV48/ijbXcfYKjNm8z+uYmfVDTtigRNlLl9F2+8V+cH49JPO0L3iBsh8Pn/CwO3uFu95Wn2du2zFY3/xUm+ADZxHtBA1l/JCLcz7SsuHsFU0w58xCa0hsr4BNoaXsn5YbjmWU/GLWulQMWirqdiSeUxuMuUseGXkduU/fj473JyeP871Qro82kizGBnO2xf3V5XvhAOwEqejHXtgXJmeu5UM3dUm/T0qB/aDNVhyOXw/bxMYs94N2CjOz2RaSZg7n+34GS8YEdjAO2hasGvNDSJ84mvvdHcV5H8Uz6Yi3ZDW48TG0tjbJYzPDc6LlNOjqz+ahh3XC2enqz0azMpmonmo0BraIOPWHtrTIvlfA3yFaONr6fbv4DZOkWtyAkAVACCFuOEjpIIQQQgghhBBCiJWglw5CCCGEEEIIIYRYCddOesWK2EmShas6XqrFd7vjiKZur7DIHlHkRnhLBt8DOX1/XgptcDvNl3COpIeBaQvYP6u5TyBTN/PWDsp4R5S4n83zMY5uQYK9keXUG5u5PcM6tGyMaM9hpzAz6xPtFbkfcyczHqrL57RXWN1SQUE0FcZtceE5OsnJqClxbqvLR1o4UlNtm7Mo8NhFP7CJTxKBpBzXbtLhvJni4OZg/dhpmwQJDpZLVnHnUU+vYGoEZ3/rPtVl+Mf2VbdtdK76f90+4q4r5bi0XQTD33Z+FFzCAMeKsnckuRgk+ompEYi+6J0MH2kXmNebW96CdM7WOYv2mTc5K2/T4l5AeoVLSsE4bcDyMZ/BmjHAQuCScPy9Osfz4Ajm3XQtX/tuWk8S4TWyWT7GDFaLDdg0aPlovbvC2rF+jSdNfR7QZkDrA9N9mMgz0omA4w7Fs3y0+kTybry61cL/PiGxJli/vvbxz9vZChfrVOx/O6hwL8R1iSwVp4aSLIQQ1zVSOgghhBBCCCGEEGIl6KWDEEIIIYQQQgghVsLq7BU7kHfulGhzf4htZOHBnnwRcVbgT7XF5oThTqbNSuHVXRYMxeesFU7GivOQm3c4Bq7abDySP7RZ+tx0eT+TXKDeurX8nmlt8Je/xXmPGMU5Ei82MQQdi7sn2jHysWewToyU8cMOwEr+Zl5mPKLS/giZ94BtmIoxOqm6Vdve9LKTeWPueru0Bre8rS6PkiycFYe2msJawPFgSMIc0v0pNp9gglBevt7lSv49Ug4GHtslPZRQqo60DKtbLVqra9JdWgb203IeFAenHL5z92qQ9uLuz0DyzvsW9zb70RTPDCZhuFQM2itg/THcC6nnefNeo9Qf912f753NrWw5MDMbZtluMTuSLQi8v/lM7GEboI2C93YPq8Uw1BNGlp/FTHVAUgfm8HQtP4DWpnkOTjiHMGTzDdir0ObknxTvy90nWKFoB+MccvDS0c7V522de4PPpyK9Yhzr9pgUtLnfNkiOScH4u/Sbxp9blHwUhF/YpDt2jSbFs1gIIYQQ4nQgpYMQQgghhBBCCCFWgl46CCGEEEIIIYQQYiXs0l7R2ElE6PFmEan+YUeOjCaQxjtp+8726ZMAoncxlMgiHYKSbdeR+n6apuwJLBUW6HgpxUeVeGuzzPqMc7N0+YxzcvvomVnezMr/68O668VkxnHLfacceyPl7SdObs+uQrI90l5RT24YiiszYHxc29iGFJwV/wN/gBcbU5KPivbmaeq7Cts+iaFuLWiCmyG5zvrxcDYijO0wYAzZ+W7Pojmh3WGa50GPDZhy4KpULymteb3riRUuFYOpHewez4gpAim4YFbYKJylgpvsoCK30QpFS0UeV6aWtGU/2E7sB88b9/NAu1RupiDphPvsZ7jWG0VKwmZOcpgdzharPeu41yd5vxtIfjh6hFYNzI82/xzwevHIZQIQ7zdaGXqc99p6fhbt2ZOfObQBtbD4DDzved7PlBab8hlNaxg6vzWH5czNtfqzhGk5tKTEFpPiJtmBpSKNXI5+O2tSYHliKgvXL5OInD2D/at21abHPXj98k0vxHWO0hOuW5RkIYQ4HUjpIIQQQgghhBBCiJWglw5CCCGEEEIIIYRYCatLr9gpTnJcj7zwDowdWCoCI0WcJmGFpB2SaMpqWfqble+pwS5krjXa5NeZWpYfN0gk8PELbbU92Zul9Gfe7NxF+5xbZdtFj8iJ9nDu62TLX/4ZJdxH8vYb6FN7NFfKT9AxOykyK7hTukxJ85D7sdXTXmK2yUr72Nc80bbB6vqsms891e0cnAd861a6XtrAeuEtGfX0BHYjMF248XO9bv1yl9YQhjJMsE4+4hSS+abJ82yO9IqG0711pfyLjtHiUE+vcLLuoGo+oUQ8bWeFcs6LwF4RJFm4/mEdd31hteiwvLybnYsFfWdKSMtEA1hX/ETAhz7vxz1iOK8L2fvAlAUkTbRbtFfk3o+wPjQz3JMD7zVYEaIYnuIh6lKDsK8GfRp79BXWCcbqTGjt6HH1sS2m+NL8GHE8nhOlwG3w+zLyuYQxH3B/jtEELq1QzvKH+RylV4ycg7h3wvSnus2oXUqvYKJM3eplle1Lm4YQ4sZHCtK4doKsFkKIU0VKByGEEEIIIYQQQqwEvXQQQgghhBBCCCHESlihvWKnkq26PCuyWuxk21IAHBJ0caQUdqDdIUo68OL76qEppR+9hLUdsc08Hy/1lPTSXgFZ7NreRXvPTbNc+ZyjkDfPsP+DSN3IRe/NzKw7nGXJfZePN93MVot2kqcM0yTmsET0Y26zun3vrBJ5+Sak32ZmG/hMS4WTPrvq/xjblpLhPM5dU5cxB1du2+/ccif7r1t/3PJUl1yTphD1U/of9anBOLXY7wTj0bV5v05q7irrQ+beBpJyK5Mi6lYGytapQuexXUDLNgke0V3sUy0iG0WQfMH72aVX0Crhj+zsFbhOHVMgmJDBJAve57SVcBDwuGmczaMcgXpawxxWqKGrzxamzgwDrAWzfN+1SHqYMC2n6AbltZNUn6dpVrdapDXsjL9EnHZMesDYLCXe0JJV2LVOEKWepGAsk7tXq7us/IQEz/9oebTfyCYGWjfnCnsF74XAalG3KZ1COpUQK0DS/es/sloIIXaDlA5CCCGEEEIIIYRYCXrpIIQQQgghhBBCiJWwS3tFY6dfflm3UfilTsNeW91VDfe7p5R7O8k2pLQu3gDV/93q2C80uWkYq8up2x2HQho8RzX5TaRAQJbcdXmbaZf7tEZh7ASWiL05qWDPOWfkfaY52oUMGX23Lew3B2RYu4eSaPS7oSWibrVglX3aK5hQcaxfqd4OKs5Tnc63aK6Na8rUAf/WrahEnyg6jpIV6nOTc5a2ECrHk5NAU6boDuAk3/6ceB71vjKhYVJX91uCDD+NlP0XoxPKJ52PCP3A8YLx4y53oDpf2nETnjctFVjfHbBu/wg7WH7X8RiwLdGShbmdWl5jJEvw+eESK+q2ITOz1HN+wc6E5UzVYdqIeybCjtHAyuAsSO45Gz//R1pr3PXmmKNPA9fBOHH5yDQJWi18P/oBz7Uhsle01bZfB+fdMfkljI1x26dg5jaJxw5sX7A/dS3vw/o+2yCRx6xMdYlsX+j3iXkXWL6EuDaQRF8IIW68SOkghBBCCCGEEEKIlaCXDkIIIYQQQgghhFgJK0yv2D07yatodmC1sGDxcv33SDIP2WpLHXq94jwrp/eoAM/lLoliLN71QCo830B7k72aLVqTNh9jz6RutaAcm1YLQ1AEbR3HOo/tc2CFNXtptYA0ewrrAxIWekjs55COz8f68qGQVDoLQtRu6vNgJwkGnUsw4JG30SgH+BGs94lumpH2g0DunYp3gT120CIxgekcTi5OOXxgtXDqclqFYBmwwl4x0DqUonsnw60pt3ftwF6xUxtXY7QE0K4SJM0EiQSYvs5OUKZXuBQOytZHrsN7HW1aJ3jesGCMaPPR0xTXgrsdo2cf5z+TS5BGwa629ceH70eh43f2FtpVok7x3uPQJNorYEfiieLQYzE9aONy9gpakEJ7Bec8nhMcp8jaVZ6nu7D1+UhLBV0vXcfrxfuZ8yZKvCk/0+bERJ+6JSPf27JXCCF2j5Isds52NkUhbsxI6SCEEEIIIYQQQoiVoJcOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVsMLITGcs32a1kxdj2MkRffReUPchPkThJ6eXl3GCeWewEFs7TBftyZjb3Rxe7Y1cTGHz0KY72sbhXECh3+qr7XHINR0M8XxnnbmOds62nODE956VazqsI1Zzc63wZ+/J3w3wGh/Y2Mgf1vKUGeAT7zFm/ZjXmQ9oo67FDB7uvvBF+4jJoHYAveHRNa4nOXpftPNdl9R3zPnlHND0NGIxA0FH1pZoWTsEc66IVGVNhwn84Ky/4KzeGE/WF2gb+rzR7aiOQ/Ln37M2BWMe3dnWx4Ae/MEVcqBnn/GXyy71Kq4uA+IfXe0Mrs8oSPj9uRz3/KSYXBMUO9iDSMU903zfT1k/gYdm7QbMCtaG6FBQonWRl95r349Dte3qOHSMyczNdkfPR/TVjU0ZERlEvUaWXs6bpn5vj6xhwJhMPgtSOR54VrIOBO/VsKYDa/XkpazXwLjOwUV6ljUdeM3y/GBMLw/trr2r6YBdst4FxyPxyVLUHnH1ObjfVF1+onW9KvIkhLhBsl3Ngq/Weg+q4yCElA5CCCGEEEIIIYRYEXrpIIQQQgghhBBCiJVw/VJTRqqrdPKVkpOCUyNLKbF/x0Lp+Zjq21BaPN+CxHYzt6ewEHRwThy98uiifegrVy/aB67Y7/pxaN+hRZuWijRAPpvy8g5RlWeemy0V55531qJ9k5vk9llnnZH7uofjAfm1mbWTvK8x5e/W9+fzsEm2aqQ2S8qHJi8fEtpOBo7xY9s8ofgusFQ0gSy/YRSkk3VjW6fPL+ZHU587oXKcUvOGsm5K0Cn7z/OGkYYJUatmZkMPqXvrtNl5X4wr5DGwvLO6dL+jJLzD/oto17bFNePYRvdkYDfp2Y8dJvQ5ZSI/BBGYNL5wHjB6krGQlL9P2nxdpoUkcg3XiZaKM6aIroVMnnYJWkk4/1uuj/Fv3fAXtgbEQvY9cnBpqcAJ0lqTLLK0WHUd2kKGppDxu7hJNMdgubPB1b/wtgtEVfJZMvinBr8bU91eYYGlwrfr/R5gDetxP6bCgtTQmsP7E9HGHe893s7OWlOf186G4qwWUehzMc/rj49FlOZE/x1CCLFCbszRmrJQCLE9+gtDCCGEEEIIIYQQK0EvHYQQQgghhBBCCLESdmevOBFesVtF1NL69Wr3fAVCFa93TtTlS/HbE6xfSrmZXIDvmEwxzlDB/HD+4uiBbDk4ekW2Rxy57MCiffDSqxbtA1/el5dfvt914+jBvK9xK0t3zz4zWyRGc3EZi+b6WVnifda5exftm97s7EX7vJufu2jvOSOvf5Ob3sT1Y63NNoyz9uT1ztxzZt5+LVswptOcnDGdILWDUmK0WwxsY6zw7y9MLBQGgYotkns72wXbsJi0bWknYPX/uiyfIm92ydl1sEEK+mFjoEE3sxafecMyfIQ2AN/GOtwn2snZVtBuvf2Gyv2hiaTdTrOd18HyAev3Y91fkeJYhULCePKHEddmMkWLOdg5WXxePi3mhLdX1NtrOG+mjTSc/xwzDgHtGLQoFLJNZxNB3ynv73gtXdoIkzOYqlBPcqG9omnK8Y6SH3J7GGBjG+rXK0WnijbnSt+X6RU4RjCnOFd8GgjvbSZc1OdZ627b4ljl5wq8RhxzWp7YpoXDtWkXKZ8Z6DtTcibueLx2x7aXvUKIGz/XFxvAbvtxOu0Y15cxEOKrCf2FIYQQQgghhBBCiJWglw5CCCGEEEIIIYRYCaeWXhGqkoIy29f4GNTeBrJkV9He6uuUyldq41EAftjMX8wOzRbto/sOL9r7L8tpFF/+7BcW7cs+98VFe/PKbLsYD27l/W/4yuvNkPu+eXhj0V4/m/J0bIOS/1tX5D4dmmYLx37YLq6+GZIszskWjO5r7+D6cfa5rNqfLRVrEOavIbFiDUkWtFdMaK+AJp/tlraGVNor6rJm2hEos3Pi/rqrwVpsS+kxLRRdYSdoWeqdUmZu7+TfnJtIJ7B6p5zEHtX420Iezpt0iu3XII9e5/Km3qbtwtkr3NhQ+u3HY4L1xqC6vrt0HDP0lfaKgbJ/LB+3kVH6R0M9fcEFj/D8gnPtnC0ht5fsFZjD65gftFRMkUYxcQkekLPjejNdg5aKyPZy7JwwbzHp3XzGPeasD7AmNBPI8KP7i/O6fPazX1iPNgqmrzAFwl1h3hcc8+D+6gubBj+7+9Adgs8SHIJWIZfuEFj53Mbls2sMvsPzG9elwd3dNHWbDEeKZ+bXKKxh7hnF525bXedEqk67ja1JCCGuS2SJEOKGjZQOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVcGr2Ckcgg95pgfm0E5k2V6+vT1moE2BB4Zrm5phvwKZwONsftg7l9pGrjizal3/+K4v2V/7j0rz8ki8v2ge+fGXuB2waayMkroN/19NRKp2yTWG+kavdj6ggbym3x8TK8pAJH8on2xzK68zP2ly0L93MVhAzs5vePG+zftZNF+2NfdkmMmxlu4lLVWgp4d2BDYLS8VS++2KleH6X26MF86AenlDI6tEPSsqLXgyUcGNsm7YugTdU6Xf2Cid1hsSe1xTrd4W1gAkUtEvQUuHsFc5qUd9PZFXx4+TtFaljtXss5zou/aJu1XCic1oqtkkgYB+bnVgq3JjXLQTOXsF1sHxSyM3hnDA4E6zjfchbFdvSZtO0dcm7SzZIHG/fD16nro3OtW5TsOC+cM9ilzjBuV9YkNytSzsIljb15W7e0JnEY2Mdn3riumEDxmdkKpGzRXE+8noH/eYBUvAsaLxVjjYKnwSTlyf0bxjyM3ccabuoJ4lEiTz+OVladtgc68uPP4uGoTwfIa49muBvByGEEDd8pHQQQgghhBBCCCHEStBLByGEEEIIIYQQQqyE02Cv2AFlwVlIPxNl1ImVzZva6q6qPTd10mXKkmGp6IvUiI0D2WpwCEkT+7+ckyn2ffGKRfvLn8l2hCsu/tKiPTuYEyfSFmS0W9keMZ+zGrx5ULHeKe6H3KdJUFF/DUkRaxNIn2f5eBtH83luXnF00T7wxQOuG7e4TbaS3PRWX7NoH5rlQdw6nLcf53k5rRaNq3w/VpczZaIpVJSFUBhfUHqeF3M4KX/3kvlo/6Coej+meqX91tkGWm6Ql6NN+Tvb7DeVpF1xwzB1gokVPpmira7P9IRJYEHyYS91qf7SRkGVe2evQNIDZflRuAyl3dvJvL2loi6NdzaDwN7SheugXUh8/Ryuz+0EWw5tQExvmXZ49FIaz2ejk9iX6RVR363a9uNU9xAwpWKAxWFwSRtWwP3ieruEGNii8IhqgrQSJpr4pBPD+n48aKlw92qQxtLBKsR70j2c+TAOPIJtMSAJ1osWCS/+XsrtAclFtF00gUWka5AMRItOGz413XV1P7e4+4bx2G9VP+TfLCHEjQclPwghrmukdBBCCCGEEEIIIcRK0EsHIYQQQgghhBBCrITTYK84BckWJamUsG9muX46ABn/oU2sz2rkOelhK+VT6anh7SFnP+qlo4evPLho77s02yi+9Ln/XLS/jJSKowez/WC+kfs6P5LTLuabOd1hnOXjUeLaFe96KJll1XJfaDyQPmMoXdgCd8NS76i2vrXpJexfuDjbR774pZzC0UM2jcAPO7yVr0u/mcegwfE6XN8WcmW2O/N+E6/ir88VyquhYvbSeOwmSq+wIAWj3D5RJo/lnZO5Y/0gDaHDWA44N+5/Wki2J9hm0lJejTbWaV0CAlNC8j6jivitW6e0FgQpBIFssw2sGrzaTKlwUvpyX4Fc3zsFuE792E1gMeG1bgKrkJkV6QFcTpl8vU8NpfHdGnZZT+1wyQjFELdhKow7k2rbpUa4Nu0OuR+9s7EVuIQG90Xer/e95LazM/EgqbqOG6fiukTJOG6CBBaJ3f6CeadEOSL5GjvrhbMF0n4WX40TdJjv3hbC50qcAOTmbBARdeKZ0eq/Q4jrCUqyEEKIGxf6C0MIIYQQQgghhBArQS8dhBBCCCGEEEIIsRKunfSKApdMAatA2sqWBUpQB0hKZ7O87dY8bztnOyv9bX442x02rj7s+rHv0ssX7Su+8CW0v5LX+cpVuR+HYfNASsXYo+o+5NE90h16rDNtKdA3W5vkbSb4jrL6hlXHqT5m8gXbLq2B0t5s+RhGbzfZZH8Pwd7SZRtLDxnvFtafbyLBY57HvENnabXgsUsZPyXYlFUm2iuwerK6fJ5SYSf1jwTVxeIUSJRpo5g4nwH3hWOzjT4NuL4jXv9Ni8r8TKbo2nwtWtiLmBwQGQdSYA2IrBYlLiGGCna3Y9hKsL6bg7RRuASIui3HzFfqZ9pAmeqwOF6QWOHsNGP9+jq2VfUGoxikfLS8dminBBsWLRUtxqb1HUnOzkHLGfsRJIYE58T5PmD/bPMZY1a43UZeF+6XVgvMx8Zf47wt5lnrPGZ526UIINxjbn4EqUFNfc66W562I27q7u0iVcS9xq9bw5ylkPPfnQOTNvCb0NG+wedKSWQaqdtNTvzEdsn/NgkhbrgosUIIcX1CSgchhBBCCCGEEEKsBL10EEIIIYQQQgghxEq4VuwVpZqXVcgbWCea9VzRvYHMs2lQ6R0yYyY6pCHLyOawMhy6av+iffnnv+z6cfklOZniSlgqDl6Zt9k8mG0GaROWABaWZ7V7HgBxEt1IqXmR1tBjv229en3nrAJIKhjQEQ40oyxarDPO6m3zUt/EBATLUvAOVot2zPaKZsjWk6afoZ3PbeJSJiD9LmXWLsCjLst3CQiBsj0qmh8pDpcLZNeryU+w4tTJptkpXm/2g/YD2ivyWtNCGj116RX5XmjbIPWEpLp0PMJZXQodvhvPHVg1OE6tq96POR60y2rlU7bb6FxxPGevoBWBFqT6gER5Asuf62fOVIGJSx7AWdBmkNiGfB57L2X8yZ0T+9dU2+5+CWxDI9Yf0Kd+rFsXzMxSYKnYSSQEp9fgUioCKw6TTkb/zGjheWIfk0uKgA2OXXVzrW6D4PDT3jOZ+J9QWiEs6LvBTkNLUcP9BpYKLk/BPXVsv2hzeWBFO2HFaZdSMIS47lGSxc6QnUIIcX1Gf2EIIYQQQgghhBBiJeilgxBCCCGEEEIIIVbCyuwVlMimQpRMeVwDuWizF3LxKeSy69h+AxLsFmkXAyWrOb5itnFk0T68/2rXj4P79qGdvzt6MG/Tb+AYM0hhoc+dUBaLtpegQ35dVF5vYYVgsXZXJR1SencM7opJIPyiRUrFCBtEYa+YQBncTfA+CukaPfY1afLYtAmWigHLIWmmbYDXvSj57uwWAyXKlF0HEmzqoCOrRSxBLGWb9fV409Ay0oxOq76AinTOiZHnDXn4pHgXuObsFaxej/slsJiEqQXQXzt7kJuz25gJdmCvYGJFl+rHM9ornE/AH3sNJ+jsFak+D1xaCaW52KdrB+uUSQ9j0EVu30FyP50wbaT+jtelTxgtM7BRmU8VcH1saIuqp71E5+QcWWj3Lr2ivq1Z8fwxHg/3YVOfISPTaDAPRtdvWiqQGJTYW58Kw0norBrO4sBBgJWM8xTj2nbcP9Nk/E/odAIrIPfV1y2FA0adto0OD+MJ2i3m/gjr2jj68SC0XvnnBJcf+0L2CnF9R1YLjywVQogbCvoLQwghhBBCCCGEECtBLx2EEEIIIYQQQgixEnZlr0jH/8+sLmFPoXi5SCfgd5CIex26VZdTctxA95u2sm2gP5LtEfPDBxft2aEDrhdb+Lx1+FDeZmMj93wLfWcVd/bDnWq9+rlRDlxIAqORagLJt7nllJrn5gDp7WDZ+pBgqUipSK/gqaIqfd/n8ZjhGFtwbTA9hErkKaTBTnLt1OLFeDR1SXQfSLDdmLm5yeuFtpMQO8+BeahFhlUA++oC6bJTe+N47F/icrQnhcR56r6DbQB9p+ya/XBj6aT3TFxh6gP6XaREhPOUqSRcP5DxOwsMbQk8B/Pwu84lNNTlte7x4a4j+uQSJGjTqO5yccQTDIEdxE0D9+xikkL9Wvhdon/Fo9rb17hN3eIwBjYUb50IkiICO4yZWYskhsiM03hvE/rKfuTnY3LHRuIE7/ligozBPB9pz3DJFHWLjrsXovlrwXPZ/PhbZNVwThDMf1qs0EbTJR/xWE1p2XMWkCCxgvdt4/+/EDcEtrMW3NisF7JRCCFu6EjpIIQQQgghhBBCiJWglw5CCCGEEEIIIYRYCXrpIIQQQgghhBBCiJVwSjUdoki+bQLp3FpNENdniGlzpnHWDkAZgnEDqxzKNR22DuxftOeo2zBuHva9nR3N7XnePvU5cnOE6bnDcDkPPc8vrOMAz3IxHs6NG8Q/0pfrrkCq+9ITYivTuIX1MZjmY9a4fQ/P+Rb2tTnkbfomxwFas46+5oINa6zpwBoGfN1VWBV9zB0i5RAJyloP9MfT98ioxA5FJNrIG9kUHtBUfyfn4hijmg4p8LG7MUBdkKBmgZmv6cB6GS3vI7a7ul/d1yCAHxzH6joey58/Pf9NUMOAhwjrKnA5xwDLS/8q55SrdQJzfwoeOW5Pro5GEL3Gegvls8t95L3OY3Ns63UffI0F1irJq8c1Bco6DvVnw4jlo6uTYGgzkrLedhGWRUcG9wyp185owjjLzOj6gWhMtnngYn74uhPcnufBSVGfLO3SSJ9Yg/dwvd/H+o7oZtdj1k3hvYrfF0YvN+xTfZ5x/2W/3b5a/kZwV5Xnpmzj4kbCbmsgnM4aEKq/IIQQy0jpIIQQQgghhBBCiJWglw5CCCGEEEIIIYRYCbuyVxzTdkYhdYWUeBt7hSOwUSDl0YZNyP6P5Pb8UJb9b+zP0ZhH9l2Rlx+8Mq+/4SMzh1mO1kwj7BWwI4zUO7uIMizGPjucUMeYRUpqlyTb9fg8L8Fu0a7bUNJAWTekt+xH58T0rh/zATGbc9hBcGzaFIz2ijZPpQH9652EF6tDLt8WOW0NrSQY/7GlpaLeDlIrrQtsF9vOzUAh2boIzHoUJ5ePkcUBsmfadUr7B2/SLpBdcxu6IlwMKucZrwsPxv10fn44ewDnWhQ56Gwo9fGnLcSdQxEb6uM765GFUURnio5N2TktFYw3tAJ3T+fFYyTv7/Mzys8PwuvCWEhK9aMtiueEsybldXhPRjaKnhaHVLcllHYCJ0l2loq6vaiYbNV+97Ao9LBz+ahVf2UwHW0ePBv4rBzd5lE0MdYPnH+jlfaK+nO35ZyKkqG5Iyf1rkdeuijNib9XJ7h32R56XONh+b4tf5uE+GpBlgghhFgtUjoIIYQQQgghhBBiJeilgxBCCCGEEEIIIVbCNU6v8N9nmqBtZr5EO+SetgW57VGkJBzKctvZwbz+xtU5feLwVfsX7UNXXb5oHz14Vd52I1swzMyGHvEXkPQmJDzQXhFVxw+Uuq6CONXA7VKVZCeWxvF4bPaPsnAmOuQxm+DYVMlPJ0xx8JLcYQvHw3XpYJ3oJmuLds+Tgnx77qqq1yu4UxrcFTJ+JhVwcFPrvALoeL2KO2X/PEITWAvSNvLKJpB2NxhDn7CQ1+kpZ4dc3AVc4HqVbwKdjNrZVdhuq8vHJpDeB5W6Kf0ei/QKZwMaA1m9S1/AHOR+AhtFs43FpLRboJfcc30Vp/Rvqm0OukvaGItxYvpCE5w4kmqGgcegpYjXC+eAdoLfzIv4fWIF0yjcbcHl+MJZKlwb9iqXGkHbhU+8YToEcWMbpsXUEyFo82D/mrY+x83MxuHkz+w2sHy0hc1ssQr376wgmWHpPkJ/2Udnr6vbJRqXfFT/BeV97udQYcnq8Mzm85SP0CFIHhFCCCGEOM1I6SCEEEIIIYQQQoiVoJcOQgghhBBCCCGEWAm7slc0x/+vXHpSfKlwS3NaKrAclorhcK763h/MK23uyykTR67MdonDV+7L7X378zoHDuVtj2Q7hpnZfDMfY4SdIFESTdW0q44fmUm4PuXoQYV088psjq+rtM+NguSADttOsC2TG/o5xhWV9c3MesiSW7yPGtD3AddupBafdgl8mFKiT0kzzqcrJOwtU0K8nh1t7paWEfaD3eO5uS/qx1r6sm4J8CkcdXvFAOn3PJhDTkxdSMe9HaR+Ho0xbQRzLQV2HaszBm2zwkHg7A719BC/ryCZoq2P8ZIk340/+tEF/aC1yQ1u3SPlk1U4fkU33LUIbD3R+v5GR7tu+aB9o7wWPhEFy7Ei72daJ+ZsD/PqOiNsFO5YY2mvCGYSzyMw27lEErSZtDG6xI/6/X+sX+hjYBvoeN9ijejNu5/LtCZZtV320c354Fni7BXO8mHV5RYsX057CRJD8OB1QUTH52YT2K6EEEIIIa4JUjoIIYQQQgghhBBiJeilgxBCCCGEEEIIIVbCNbZXOJMB5e8sxN2bI80gvd2AXP8IqpYfmi3a8/05ZeLoVdlScfCKK3P7qpxScejqvM7RA0cW7Y3D2ZphZjbfQrV29DHRDuIk/Sevjs8PLEbuq5z7/XSBrNalFiCxwhW7D+wYlPPSyjCdrC/ae9b2uH70WI8OmDnOe0ZnDJMYXIJEZq2p2yso420HLx5vg/H0roOm0vIS6s6lIXDLIPVhW6sQZdonT6zgnnqXaJLbo7NEBNEoS/2ty/g52dJQ3HC7IDmpftmP+pgnXHFaRigvT4G83Nr6e88lkbfbpK2uSZuNc2rwBmhOnrbgLA7F6pTZN8E1a11fA5uH9wdxA6yT22Px7Bmc5aHe7nF/eksFnq0j7BWwWgzoX3JWDn9l+N0YJC5wfng7U8ZfUcynNphDxQxhv/pUv0eawK4S5Z/QpOEsFW5bf10a9j3V++7Pr55A0QTPyiKOAwcufpedEwU9pr2i8mhulm48IYQQQohrjpQOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVsCt7xTHxaSEnpRwTKQfUlDfzQpKLxIphI8t+Z0cg+z2cV9o4lFMnDl199aJ99ZWXL9r7r8z2isP7DizaRw5me8XmEW+vmG0hLQOJFSlxWIJkCsqPg+rsrPpOVfeyopw2gHrqRFtXcrv1J5TqutgNbst1fEdoR6BsuGd5/BQlZ5C6NNvJqXme5nGf3WlEqQJM7cjnMOnydWzrCm9X9b0pKsA7TTX0yt62gd46FXQeJ84mnvfo5NeBhNrMUhOMvyuvn6rrR3PTUx/X0l7RugPSUlG3LPDYzmrBedpSjk6pfrFPZ5eoJxI0wU3CbvsED1iCAmvGuDRkQRoFu0q3ROAqcWarQFY/Bs8Ss8JGgciKfmB7wDqwkiGZgu0R4+Hbhrbvh7fQGNqRLQLjTPtI9P57O8cTj+c8Z3XrCq0ao7sPq6u73YRWi8L2wufJiHPl+bnkl6Z+LzgLEa5RQnpImNBSdN7dS7RnMb1i+WshhBBCiNOGlA5CCCGEEEIIIYRYCXrpIIQQQgghhBBCiJVwiukVlM/je+pwoUdN86KydnZRWL+ZV6S9YvNwTqw4cihbJA7sz/aK/fuuWLQPXr0vr3/g8KK9cSjvZ3MjV2c3M5ujH8OYtabJCfzrMvIRkldXeZ3yWspamdZQarYDuTmluhbsi5YAJxFHXxPk13MkGyxJtqFlHrAvtr0VJC9l0gb7yjFgogYnXlcMRxdKtk/epr1i2tXHNbJXtKVGOSiW77Zx1epx3tBgN2jXcxe8bL1UOAdOEi+h5vpOTl23j8RJHZGFxcvkmx3ZNoLkEicpx9wa+dAorwW2p6wcx/DScMx/c3r4vNw9vHioeqJA0Q03Hi6xwu0rGBuXQlIfV3ZvKKwuA67xQHvFyMQK2CjQ7lNgo2jqc9C1SwtSlNCQ+Dxtq21n7+L1jtJrovQP85Yd/6OE5bTyYGe0Tvhna17MZyXv1TK9YgzOlZY9n+bB/uH5zdSOsT65nO1qyRnGswpsQLWH0Q7tLEIIIYQQu0FKByGEEEIIIYQQQqwEvXQQQgghhBBCCCHESthlesVxIu1tYK8Y507AasNWtjmMW9njMM5yYsXm4UOL9sF9Vy7ah/fnlIqjh7PVYmsjWzBms7z/OY7d974ifo/EChR9t9E5GepSbkJJ7YD120CCXkpyByf1rUv3vfQf8l5uSwl6Ct4nOXm4l92y2v0c7QTZb0IiBLs0ad2OM7QZUPJOufJQph8woYGSY1Z3ry+fwFLRBdaJJlByN4WfoA1k9m5fvBZu4mDOO99R3RLhFeWFHQljQHl7ef3yckqw61JuP58C60MhpfdTsB7L4MYWc4LV+LuuQxuPICYNWLYBlX33tg3K4YNzwjgNkdUiSBfg3D+2s7oEPuzrDuTq4XV0dh1/j9Bu4awWro2ECxyCj8HBPZc4t0CUqmD+/mTIzQhLQM++umcrP3Cf0Q1qIe55wq67zes2lsi1wefBiGQUF2pTJs0EaTvROoN7PjKlgv0+eSrLWE6hIIWmMAsttRReIYQQQohVIKWDEEIIIYQQQgghVoJeOgghhBBCCCGEEGIlnJq9AjjpZ2C1SH1hr4D9YUCExDDbXLS3jhxctA8fyMkURw/n9tZGtmDMZjmlou+zNLuH5rfohpP6UgJMqaqXo9atD1zH1wyvS9CHptwr7RV1O0fHdANWLS+ryVf6VMrk2UPSTXBOmBnzgdXxIXunfJ62Bic/5uFor6hbW8x8RX2quSdOrp/blOt7e0W1q4Wcui7xLrfpKDF3a/FaQB7dFPaA2sE5HpRAFxJ2doRDxXnq3S20V9TnYDQ3XRpKW4qx65J0t9yNOa5FW7dUTCa5zak1LunF66kAXm5eT3Xxe6nbD2hhcTaDprBXdNPcpm2G15IpCaHE3un4sZy7ZPqEv0nGwLLAYWPiAq0PtIANsGE52xcTQprADmNmLa4f7QjOygaL25jq6TkurISXPkz/KD6XUSsnIXZtcFxpb2ESRZw0E3kPk2vXn4P8raINqMMY4PHmUi1GH91SzKOm+kXNMpYiW54QQgghxDVAf2EIIYQQQgghhBBiJeilgxBCCCGEEEIIIVbCLu0VyZYEpU5NCgmp1/n6bSBbHbZyYsXWkcOL9gxpFOM8WycGtOf90bw+rBmbSMTYmvVY3/djPlJmTOl5HZdeEaRJWCi9pVw5tlf0gfCXMvm6uNxLaplOQGuGS2RY6iGkt7hGndf0LpojkxgS7QSQBkOd7pwM2H8qpNJe+o/1ArlyNP6Ue3OMabtgKsA4eP+NS34wnEiQXtFgDBqmLwQS8ZRYrZ7n6aG8fcRgse38FYENhfMuRUkPtJGU/Y5Sa9yhaXtpq+2JS69AGgTTJJrCcxMK4utj2wSJGskCS0RTv77l7r1ro74vnxBDAum9c6hhPtKKMBYWNWepqD/HuEVkqXDPPWfzwPg5eX5hLnLPzaa6Gq+lDzji8QIrQhDxUj5D+WW7zfWr9cPbrerPEmcF2Ta9gtevbqOIElDYD5fV4tJD6n0t7Uj8/R0HHgPH9l6o4/u5xo5LIYQQQoglpHQQQgghhBBCCCHEStBLByGEEEIIIYQQQqyEU9RSBokVgbS9KSSoDdMQZrBXbGZLxTDPdok05rSLEe2hz9vOkIKxiXSMLVROn/Vea8s0i2EHku02FMCO1bY7a1bWD40XHg7bSNk67QHuyHmDSWDncHLeUjoOOTC/64LK8GnI8m9W2nfHaOvy61I87/brJN9YHsmxIxkz7SbOXlE3qIxFQoC1vADYwr2q49hQ0B6cIS0OiVX9eYAiNSJlSbQbG0rgE20zlJpjuYvwiNIrKMVeigjANljcsslK+2y3J207y0c559w1C6whXEo7EtM/Uv1cmYjBFIdU3iR0WFn9WkSpBeV9n+H8ZWIC5sfo01BGJhcwgcK1eb+xzTGv9aJwxDkXSWENG8s5snxsd0u6S0z7QpAqkupzrkwn4Ty3wDrk+sdtnU1sB/aK4Dm0tJ6zkNVvmMj+xLnSBpYnPn9LewXdOGlwZg200J40yxsKIYQQQpwmpHQQQgghhBBCCCHEStBLByGEEEIIIYQQQqwEvXQQQgghhBBCCCHESthdTYeUjv3PeXxZuyGIBhy8v71HzQV+1zqDMfy0KL4wn7NeQ27P5vAEj9kDz0i4IjHT+sQoQvicU90nngKffnKebMQgNjTWRnmDxVGCLDfGN3KYEPTnljeB97wJbPnlksZ53Fm7AXUfnH9/gvUzLJPg6xHUj2tWxuEFsZVRVCLPj75+VxODNSdyv5tJ3I+2rdeBcB58xh26CxDkSyJG0tfd4HkWnwODfFRHw9vb6z7xZgf1Eo4dD/e6q1FSH1vWzuD4u3ovuM/XJvlarHV+DEY8J/jMcJGlTTDPg9oerAPg7uxt7tWmPvw7qAhTPEui8g5BuZwydZh1D3q2sdUQRaSyxgV27GopBPGUqTi7VDwha/st+75Yu14+Iaz1wE61jd9pqt+eRUhpvUbDEDwneN4sXcFxaosL6epDhG2rtltGxro4TMaxsiO5mYoft5RyjSNrttBGbRDWrJlMl9cVQgghhDhNSOkghBBCCCGEEEKIlaCXDkIIIYQQQgghhFgJ19heUeRCVtvb2Suove0oI4eedT5D7OWsRxu2C6w/2BTtvM5Q2isYHUd7hbMZMCqtLoxtcIwR7ZZWCxel6aXjpQi42k71SDiuzShNxqE1lNIH9g0zp7r2x8A2LpqN8nkcg+ukgbLiwDbRlO++6raDcH4F7gUnpQ9k9d0kX4uuK2+H+jhT7jymPB8HZ0mpa8q97SVDNwEtH2ZmTZO/pAVpDPX9QWRjYK1hbKWFFhi/kbuSuEYTF5PJ8Q+iIzG5+nmPNp4RZtZhDCa4Zjy2M4m4vNlUXYf3C8dm8FfbPFEEZr0dxWRGobnJzet6/OWxz+hv1Ob6YXQt5vIQPBycTaO8V2nVoMWN0ZG83oEFL4hetuDe3o7IuuLOLop35n5oQ8HyHnO2Ld7b88nuI0iDuF9nQ+HFw+9WZH9iJGcxTRmza7aOdv5tdHap9tjyJohAFUIIIYS4JkjpIIQQQgghhBBCiJWglw5CCCGEEEIIIYRYCbu0Vxz/n7MfNP77E0CqO/Ze++nkts6qkfc1gwXj6JHNRXtjI1fl3kJiRdOu5W37LNOeM8nCPKPxu3r58yaUWtftFQ2THrDcSWpLyTZltYFMnu+HWIneGTUiqblr16vYm5k1HdMGaHGo+2m8TNjFVFQP7o7WBBroErcRj83DBYkOQZLCBDaeFm1X4d8KCbur8g9LhUuvwHxy6nQmLOTlLca7c4kTpf2mbgPiZaGNIgX2BbdHl2TB8cOxyhvGzfkMLRWTrq0u99YH+nXQJ/S7K+am3x5tq0vjm8BWYkGKiUteSDzPcp4ySSCyQtlJ8dM6sA25tn8/7NIXIksFD+HGE9eYlgg3lvVxWhoN9oP3CNNGxvrYuLSjHTjMthX+c72mvmaUBkJ7Be063laSF/sx9iOyo8SKKLVjrF9IPj+cnQj3aluOTmBLY7oSD35i8fI9L4QQQghxzZHSQQghhBBCCCGEECtBLx2EEEIIIYQQQgixEnZnrzjhr3Al+FlNG/LQHtaC0UtQvSoZFcKhE55tZQn70aNbi/Ymls977Ij2ClT+7iFBLZWjtFQMTurLavfU4TKNApYKyO2bFCVZQG6cinc9TaC3hX6e8uqxqVstaDOIMh+K3AH3DWX23l7RoU3ZdD5vStJdYkKUPhEkZZSfvQsjSFYI5OyUiEdtJqY0nbc1DM6+QFl9cF2cxYEdYZV4LEZ8QtfF18VdV2cfyetE1fHddXFV+vOt30WS/iWVOuXcdSvEBEkYnbP70FJVt+tQ5r5kMHGuoHoahXcm1SdIGzy7UnDPp6XJdfKJ1zT1eZCi5AbXbk/aNitSJ9zyjLf41K+rdzg4z0F1eSqiIXgpB1o1hrqMf7dEloqmfKrt4HkQbRDN+cYlfnD38fzw15vjxodJ/Z50DxwmUwSWD2cfa8v/flC3TNECxj4tfqNH/XcIIYQQQpx+9BeGEEIIIYQQQgghVoJeOgghhBBCCCGEEGIl7NJecRKcnDQvbkoZf5MPO8Aisclkis0s3e/nqIreQyo65v1szQLJOx0fhcx3oAydp2HcFxMo6skUTcpJGyz/zSSL1qVXlO96Tl4R30utKQuv2y6i0A1nzSiCDWgNcRJz2EqalueHKv9BkoU7RmTLaQv7TWDPoA2ghRVigv2yunsaYbPBPGtcifa8fNKsGWlhK2Eqw8BElAFzIkUyaO4Tq3D+RSXtrbyXaHWhHQbrBwkl3kKAVQK5eGkH4CdnqWjYxjq0YPCaBvPDG0z8PdIFY1jEmPCLvF/n6qlbBSI7UinvbziH3XPCrWWnipO8Y/6l4h4ZnRKfVot6eoKLJUicE/V7ITn7xnbnA3ucs5ZxbOrJNpEl61TMGJGtwaU9tLSM1ZNV+Jzmj4e7h60+983M1mgv4n0Lu2ETPa+C3xqfooHx3saiFn2Kx/bU56wQQgghxMmQ0kEIIYQQQgghhBArQS8dhBBCCCGEEEIIsRJO0V4RiDQDuTjTD8zMum66aLPI+eZGtinMaZdwFbVzl5sm72drlrdNkK8OkPAORYrG4JS0lCJTWkzLAdMomFiBttWXO7lxU9bmL3wOeU2s0e6gzcrktb2YTbi8UNRSwU2ZcdNA6uvsFZDPO3tFXaLMfjSu2nohTm8pj87LWaGddocJEihoFaB8u+9hgxjqUuKmqAA/gVdgAjvHCF3z4OwL2Jhj6VJBrLoSLSljMU/TWN++4XkHMQ5NMA84Tm1giyqhFJx2h51YKtrIXuHmGaXw/tHE1Akem7J82gx8wf9IOl5PznBD2fg5wV3RGjI6n0jdoOHut/rjxicVeA+Mh+kLDed8PTFhDB4xLlWhcV4orJOXDttMkMHqyT3xszVKgeC51XpUIbTWMOGB91E96YcpQ22qP4f4vFkr5scefGd85qDtngdYnVaX1MCyF4zH2NSXm5WWRv7etNV1FuPURL9FQgghhBCnjpQOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVsDt7RdMsVckmlNGWVga3G1okEvWlkIvDOtF1exbtSbee12lz2gArpA9ILRigKx6KuIbBSc/rVcSZdODTK+qWCgusFq2zUJQSVhdPgKWUqkPeH1gqnLSa+6HbYZtetJTxY6POJTFQGsykCBwP9ogUzYMgpePYV3XZOw/SJMqErd5uKLO2aptyYqZdmPlkEFpPGkqwje36OVGOPZlivlMejXk2730/evaLFf/H4CLznJCuMWI/LkEFaTJMjWiLe53zYILN24bzNEiscJMQ/RiZjEK7SJxeQZn84MIX6haHxiUEBH4TwHSC0prRBOkBfrV62kBoEGDoTOQnKF8Pt7zGUQpE3XqSnAWDCR51K5RL8ymTVdCO0itcm/cwd0T/Aga2Cew3O6V1c7Y+t90xaAvEmNHOtYa+5l+pY3TOc8OHYm53WN4F12JInIPYjZsf2yV+1O0jjQXPieM2kdQEN4UQQgghxDVASgchhBBCCCGEEEKsBL10EEIIIYQQQgghxEo4RXtFXQKcUl3O26RSokxpd73Kdttm4epkkm0UXZfbTK8wmy1a/Zgrfw+U/Bb2ijFQWtNGYSmwWhh13fV1mkBinAoxrJNgp7o0mz2n9YESb1aDH7kf43643DPBNhNKnCEn7jBlXJJCx8Gst50yO7CRlCvGVovAdkH7h0tDYPJCXbrMa31st3luOln5yIQSVruvS8enlGZPmb6CXTqbESvXm6WeY0jZdFCx3/W1LvF2y3EjdPCRdEVlftoanL3CeN51u0nj5ib7xPslOjc/1wIlvnmbUvCMCaaQZxt7hetjkFIR+SKiVIvAFmXBOR/b1VBdz99vtOKM1eV+n9hNU79vl4xh7jueR/TsO7kFhs8bS/XxbkurSt2hVliCYK8wrl+fCLQHTbGfNdiAluwV7nevfn+6VJfQ4hf99wA8y8vICkc9jcUi+8SJ66X0CiGEEEKsACkdhBBCCCGEEEIIsRL00kEIIYQQQgghhBArYXf2CmvwvxNQ1xrIZberAA9pd9/31TbTKHqUq+8hcx9giRjZhvWhrMw9OgkxK4EH1gmrr+8rhXeVpWZtE8msfUqCs0JA6cqEAMrWx6BNGXPTwC6C5UzEMDMboFUfBkiRR8qSkRqBSAGGDUxbWDA6XhdUZ2ePSncF2pRatx3621KKXI8R4LaNq/TOdA2km5i3V1hPqwuuEbahEpwV7nkOXVQ1391GeZ1J469Lcncpt2eCB2XrkLPTB4F1JrAvdbhebv4W18UHjgTWjkDx7e6Fti4jb1q2/RjQIpH4nRtodxC08xfuPsfEc8kvPG45BjiRwaX1LCcBlDuIkmN8ogbvW9zzxbi6NA/aMAJLS7ODVAJ/3vXxGIsBcRa1wErlgz2C99xYn7aeFveUs+ktzc0gYcetUx8DLue9uoZjTzGHnD2r3BfanOctEmK6jilNeX1a39q2/kyLbBeptFo0/qphB9itkiqEEEIIce0gpYMQQgghhBBCCCFWgl46CCGEEEIIIYQQYiWcmr3CVVgP5OyUxcZKaUuoXt/3uWp/P8A6gXY/5HXm/VZejsSK0fL6qYVkfkleiwrrA+XRQcV/J12OLBU4b554rAz28mXIZCn2ZziEk+RS3uyuC1MEsG1gxzi2r7zmMEBOTN26O6e8PiXH5qTIud3DJuOq6ZdpHhxbWhO6+jg7Owal2U6Tzur9TDTBNR195XbadFIgCw/tEi5FA8vrrgQ3nyaFtcDdS03djuAF+/n8WlpdArl3g8fAyMSOwm3ikxFC/Tz6alVanh/HqUOfSnsFLRK8Fh2PR/tNXmekPN3ZBoL7n6kPRd9pWaClwtsr6mPDNBrXdjcx01Dy4q4YS0wJm2BfcATZEDy7UlsfDzf7XapC3ZJSbh+J9d2cdbckE1fyYtoPphP8RAWOgfK7yOLgbk+XoIKUCgzsOubj1FlY6gk5ZsXzB/tyzh9ePK7vfnfqlrsoAWXJ6OGSdOo2rCa8h4UQQgghTi9SOgghhBBCCCGEEGIl6KWDEEIIIYQQQgghVsLu7BUngivqwQ2uanjn7BVFWkM31L+DxtZV1qYdA9JxJlYMKdsrBtoraFJoS2lw3VIRSsed5Jiy3bp8uEztiKDNgZJXtkcn70U/mrpev6FVwmiDoGzXS9hp7Uiuin6PNpMbYHdwEmpIrlmBvx50YqW210uUUekdcmdKs53k28n1cX5OaU7p8TYSZfdNkFbirC5BGgIvHmT8jatiH9kmfH+dh2bk9aqnc7B/rMzftTx2HteUYGMo71skx9hYT0Rx66NNi4mfN7R/YHnnH03e3lJPMXG2LXcjoqcusQI2G5ciUL8Hj33O7T5MrwgsJi3vPXa2budylpsiaYBy/zndJiPvN84DzBWMmb/n68kXrlmMxxj0N3z20UZBGxbaE1x7Wo14nzsLXHE8Wh4mnPOBNYGWs85ti98zYxvnUMxT2kE4/j7OI0jbMdrSovSQenvJ3OJ+wnhdScXzp0ALIYQQQqwAKR2EEEIIIYQQQgixEvTSQQghhBBCCCGEECtBLx2EEEIIIYQQQgixEnYZmbk99KLTTm8T7yTtJvCTB55s9zrE+VsRc+eiD/tq29VqKPrLeg+jlfmAi4OfvB12vB7b52M4y8+MNBsrS70HOQVt4iLe6BsuLv+Uvm8XsYeaDi7tjd58trFTVwqhXmugLZzGiTGZge97jMbMRdAxmpHjVK/B4SM2fb+CNEwfUxpEH7L+B+dj484tiBU0f06sN+DqE9DvzmNzv1av6dC1UxyLEYX+neSASNthXvf8R/UdzNWsCGq/uOsVR2a6e8nNwSCSMtVrAaTgueKqpCzdUqzjUG/7R0Pg6w+yU+m/5wiUc4LlNhiZOcHBezdP3UTP/XYFA+pztq2f2rHPuAFcXQVfeaa6zpSRlF2eg25uYv3Rxd76frCOQ+tql7DNLuE54aJJsdw9N+s1I6ZlTYfpWv6A8RwR+xzXdADuA2uMsIZG/XmzdIggmtfXs1ExByGEEEKsDikdhBBCCCGEEEIIsRL00kEIIYQQQgghhBArYVf2imTbizCdy2CCD1O/Hu0VlFqPkKPOIeWez3O7R2wfLQBRXKFTvJdxbzheP4clA0pVStIpGW5d9CSJJNRcWm4RyWRptUCfIGkesG2PlRjn12OdOfY5NN5S0kzzdKDEeUp5PzXKWMf5aTjOjPDDsSgDb8uIyIYWEMijmaIaRExOuW0T9NXqfepKDXugs4+sIc5G4a4Xth3rY8ML3DZ+PFqMuducVg1aBbDOBNdl0iLOj1GVVrdglO8kRwj+Ge8X2Xo8LjMzLw0sOqkYAz+20f1Sl/RbtH7imLOruLeTv0fGIbh+O7BhOTl7YOvp3Liiq8UYc450kZ1grEeL8v5yi12EIscyipg1GwJbiks8bvlcyfudBDYKHxuc97PG9SfFj0p4vev33hT3BS0SXf2WtA4DxUjPrvU2IBdJHMypKE401aeHeXtKXtoGkc/lHkJLRcWClPTfIYQQQgixAvQXhhBCCCGEEEIIIVaCXjoIIYQQQgghhBBiJZxSekUkpnaqUadbL1Zk6XVWg4d1Yg67w2yGqvlDvTo+5dcjFdRjfR0zs6GnvWLgigsoW2/auv6VVcRTYK9I0M6mosK/r5xf73wkXe5dhfW8fgd59Bz7mePC9EVCAF0O0ynkx9hmgos5pWya+2K/IU+nLWTiYjDipIIW27S0atBGMalLtt0xOIK0GWCNJdGzOw9KvuuV7N3RApk225TnUx7ddcV1gR1pxLFpI2oHLmeKTD3No011G4WzVxTpFdxmpFzc3eC0vTg/DJZT9u/iUHKzkKB7VwQl8/jCJUVgt1Zfn0kAbjTQp34s7lXn1wrSGqJ0Arec14UHD2xiRT9atz2ud0M7DedX3Y7E6ej26WxRtGT5foxNvY/cr0upmOS2k/0HlixeO+5nfertFUxsGfE7wrazauAe27OGxIkeKRP4TaDNK3zuWXH/uLQe3i9ug3wObnn9V9YtDu0Y5Ub8UL/f0njsXFNg/RBCCCGEuCZI6SCEEEIIIYQQQoiVoJcOQgghhBBCCCGEWAmnZK+omyMK+MVQfAUJa98zmSIvp9XCSYubQKZNe4VTWQeVu83MvXNJdVmpS69g5XVI3n2x9LpVIjmrRSFRxg7CbbC+V9hSokwbBKXt9fXLtIY5dN4zJjwMTPYoLuaJ/U5ZNb9+PC8J76ptMy//dtcF+53SUtHVq+O7N2qBdN/nDBTpJqk+u50U39kr6nPQJVbQAuDmNfo38deFdgvK+Acnucd+eYlw7KGnVYj7x5yg9aT1jwfaSjjph5H9ZaIGxgY2D6bXTCi3j6TmZk72Tsm8S7KoO6/83ijdp4XApWjARlLuyT9cqtvEiRWB1aKtz6HeWUF8PxracZjq0OblExfswflfT1jgXdhx/xiFvrj/B87nhmPItIeu2nbwugTWgs5dR98PH5yEZ0DHseE8R5/c4WhDwbGb+j24lDTjEiE4znXLSOSL4KVzz3g+Q4N+m5W/Fy4qBTtmSsvx9IrifIQQQgghTgf6C0MIIYQQQgghhBArQS8dhBBCCCGEEEIIsRJOyV4RKIY9lCH3XqQ8n8/QpqUCMn5X/j+Ql1ISbZSK5k2TU7D7ztKO0ATvX1xleMqYO9orUN19qNsomOJQyvZHVl4PkjB8+kV90F24Bsaso1w8aJuZ9c5ekUl97nvPiv+UUEN+vIb+TVjhn/aUtj6ux/aLPrrxzG1OXFai9/ORkub6pHVjtmSnwHUtjBi1/bo16tN3m/QK9tXD8UlBMoITVNOug2NQkc470llSmrr1ocQnFTBxAeMRWCo6JA9wOU9nLGX8TK1JeE5YYEGIqv8HhjBnjmjq1/fYAetzqqFFyE2EKL2CNrF68oVzR2ybXpH71DX5mk3aYK64JAs8V7DKgH228Ou0xXgMTFBx93TdltY6D03dUlG/0wpb2uDnB4/HhIw1zDUuT7To9NxXYMEIzqG8V0M7jVupHjvhk4uCmArXxnUsp3WwCZ8fqTZPlV4hhBBCiBUgpYMQQgghhBBCCCFWgl46CCGEEEIIIYQQYiWcmr0CRGrMRPk1bBNmZv08J1Z4GXVdckyZK5fTckDLAqXfUYH/ckGZ5HACp1C2ul3Cl7Gv2yt4nmOZkuD2y71Sfl+X1Yblz+uhD+7IZWX+OSTHW0EsSYcdU/HeQ8bPKvNtqst5nf2jsL046Th6SauFuxYj5lckaQ4SPJqaxLjS39FZNaJK9JA7d/VK/ubkzUFfl8Y+sHbQJsKkCSR78D7kXroOFocp+optrSvGg3aQlpaKsr8n+oR7GMeLbVG8hwsZP+fawDFg2gaO19SfGWPD+xA2jUDmnoqLET1/XHqFizfBtpFKPqAJ7hez8h7hcyyvk5xVgw8W2ouQygA7DFMpnMWssGQNvO+ZFDFhOk2UDFS3BDlrUrROsS/aH5iQsYa5NmVaAwNyXAgS7GO0czHlo9suaQO2DT5hm/JpW6OeapHc5Dx5Mkq5fWThcEksi5VkrxBCCCHE6UdKByGEEEIIIYQQQqwEvXQQQgghhBBCCCHEStiVvSId/597UxGoMV0iQ1FpfKD1wlUwD6qfd4EcGxJsV+zbVe9nMoSXuFK623X19y9R9XS6Qpp2B+sHbTOfXOBOo9oj/81Owgz4wdlQiiMwIWCOLk6YnoBxGp3VIu+LV9vnUtTlw2WqCO0Wo7Mg1M0h45DtOm1Usd8llXDeBFYL89dlDK6MG3J3HkwzqCcHcOsUtI/1kZ/qlow4WYV2H6yPSv4tq/qj20NT3LeQiLPtkjdo+eiYhEG5fd1SESVtHPuMOTHSOoH9WrZwNLSPdHjMjbB22WZuIxFj5Drl863lPOKNj+WRjaKJ7m4+D+vXty1sDUys8PYHHMHN//ozl3aJ6SSP2RCs0zf+juazjOM8CVJJbCkh5vg6zqF28iSL8mndObsWrBYcm8C+5/o31sfM7x/9a/1ETTs2sy3TBJ/CWePmnL8uUYpPCmyIi2diKie8EEIIIcQ1R0oHIYQQQgghhBBCrAS9dBBCCCGEEEIIIcRKOCV7hat5HehfKfmlBNosloKz+ryTl0ISOgz1NtdnFfaRFboLaW/b1Ldx9gy3BWWqkNW6WIb6+tulV7gtlqqhnzgex6Yu1eWpOsWwk+pS2l5s76qc1/fLNBAmCrhxDq5pGy0vXn1RST6BZSFxRdd3jkfdcpOGobq2s+4UFhtKzEev/87rBNYCJ9+OhOHhjeRJqS6wTm7O11MIUjCXea4JmnxnmyhsQHOkPYxYr+nq0vbp2hTttbwjWnEK69XiHAo7AdMQIk+R2wSrp66eRsFbis8S98gorgtTSXgtvdWC8vsg2YY7jSwHxnH1/RjbuqWCdiQmjKTg/CaQ5a/BXkEbVu/sG35O0F4xgU1nMuVPSyTpz7S0PoRWC8x383B7jgfHrXPTJkjCqF9Gtz7TQpqmtFewXb/2sYWhbvlwc9btkz+4xa4CCxMTgIYe53R8PpWpMUIIIYQQpwMpHYQQQgghhBBCCLES9NJBCCGEEEIIIYQQK2F39oqUjkk1t5GCn8DJy0t7BaXgDauc19+BULrPNtMrGuipJ6iiTjnpOHhpsA86qEc/RNW+2Y82qFxPKewYSWS3wa9FiTgk1DilSK6cLJCXl1aOpm4DoPrbpVRQqkspN5Xwbh6wHVlSvCS99Z6d3A5SD5Kr0p9Xd9eOlfmN17GozO+uXwCOweSMNkipaAJ7hVu+TXpFE8xZ2hqYTOFtMjgLZ1vJiwfYhoYx2ynK7ygXdykVTDCAvYLS+2Ge98t0Gfa1LewEU6ZUTJigwvsK+2qZQIH1m7pFxF1rp4QvUiOcdcVFRSyaTVNPMIitWlbFJYEUz9wwscJ1t36PMF/GJT3g2qXI1lDYK7jedFq3V7gEIdgRvF0isFfgWFy+nb2C3/mUD7a5zsnfvTfOalRPAjErrRDRzuof3JR3qT31eepPNO5HZFXkczCPn+wVQgghhDj9SOkghBBCCCGEEEKIlaCXDkIIIYQQQgghhFgJu7JXNMf/54SbdSWx0yiXtoZhDrkoKmizwjrlypS/UgLPBISunWD5FOtTq1tWyqe8FEtdxf96ckDTRPJVrk3JNe0OBUFqRL2nsUy7CS0cgQS9kAY7uT7TAnBdelzLuWUJ+wzHWMN+elyjHjLrltaYbRS9E6ZcNPXxdI6Fke38wSVROIsOrBajHw8mdbhpzrnN6v3NyaXShDYj2ivaIkXD2TagpaddZTJFIgyu3eg8JkzjyItpSRmRJlEmS3A83bxz/aP9IK8/6/Nc6WezRXs+y8t5DtNp8WjifUULDVJhyrSNvLxunaJs3SveKfUvo1Uoe+c9ndtjmEYB2qa+HHBujeWp4ZyY0EArW1d/RLnHYOv6Ufc1cP/tUm/zZ96rE6YnMO3F6uMXua2cpWKbfnhzUj2Zgu3OHTt4rgQ2Nvf8KC61t0IFaTY7OJ7/TUFfOfe3+00Jfkl4f075u3q8f11hMRNCCCGEOB1I6SCEEEIIIYQQQoiVoJcOQgghhBBCCCGEWAm7slcsoNSZ6QmQ7dIqMc69Nrif5RXnaPdYj9tThttFVgu2cVqstp6WpMGRN6RuWfABF/WEi6hquDnZalEBnu0yUWL5cGFfI0uFO+1tgjO8XQXSf0jYRyQYzGGNmaGDM1yjdch5506aju6VIRpdIPluOQ+wfmBfSIH8PXABLUVUOLcPJM5ePo9q/C5NgtYJyqlpo+iq63eFvYJTh/LoCWwU3TSwZ9Au4aYHLQr1to2xdjwShfPDgHkz9Dk1YjafVZe3k7W8m9Y/mlraYwYmICBhIzEVA11ydom6RN8FUTC5pUg28FYNZ8qqtLwVx6ey1PvncGk5fnIyradD6kTnxq3lBrmJ+9alhOBGbJhs0NTvwWK3NsGHSZD8UoxmXu4uQP3529GCVLwu96EdqdrmOTVB0ow/vbycthD/AFnyveRjNPXnKZ+z5XhWul2cD5JYgiSKyg5yn/BsoA3rxBTsyoEVQgghhDgN6C8MIYQQQgghhBBCrAS9dBBCCCGEEEIIIcRK0EsHIYQQQgghhBBCrIRTisx09Qzo+6Yvl9bwwftN+1n+st/KPuw52v08t8ceNQXg53Z1I1hPwuUB0ltfFjSgn3kn9R3qxRF8DCVrSDAmjTUdSh9vVKMBS9m/oL5DE9SooGWcnt2leDRc137I478Gb/J0kuNIG/jx57hec/iX5wP85jgH1uYYi1dfUS2GaJ0ondLF1EXBhOxHsY63VefvhqDuAeuHjMgrhOXe+ahZC2R9z3pwZG/Pblv6s3EaruvoR2IEJu4p1nGIPPC2DUGE5cgbMahPwjIJLQoBsJZHKscgaLtL0QcxrK4uA+aEq++A+w79LudEE9V7YDIpnlc92k2L+Y9x7oLaHj4VtugHoy7h02dkprnYSkxC1paYoEYIxz+qdVLG7KLtqkmwJoSrPVDf1j2bU/05xjouXVFrI7pz/fOYfWINoHp9B+Ijc1kDpZyn9fjStsvPTdZs8VHKiBZFv1s3ZnxG83fR94P1VHivc36hLMjisvZjGSsthBBCCHHNkdJBCCGEEEIIIYQQK0EvHYQQQgghhBBCCLESTslekYK4SOcAcLLnQvqJaEzGZw6zOdbJUnDG6g0DpNxo02qRnNUCB06xYDyU30d5k5SOU1rt5LzUR1PW7Y8QRl06yTGlz4Edg4spw3f2iqypnXaFvQLjNlAWPsU20zxl2I8BUt8ecl7KdQf4KAZKrq2AEYdeJ19pFZGIgdeCSXCJkmaXhFeKtOvXkpJ+jkHnpgRl1pCwY/xoIVpbz3GRtEGYeauLj1Rl/2A7guR7HGGpoL3CTRbKy9n284NWAxetyfnP5eir22/Hdh4P2ivKCMAUxAbSm8NnAL0njbMs1OdQctGnI9Yp5wS3x7FdX9E9N0516TojFH3sKs6z8+PBuFW2Y3sFPTqptoqPz3QxwKC4WV20Jr5suR5jQ0P7zsntFc4GUdhvvL2Iz5b6b1Vkm2sseN5wzqGdyqhK168g0pnzseWcx7xzka/cO39v69G9ZsXzmPd9h2cG3ZDH5+l8yHG2QgghhBCnCykdhBBCCCGEEEIIsRL00kEIIYQQQgghhBArYVf2ihMGiyaSrDI1Yjt7xYwJFJSF1iXOLWXa3JHXxqNPrMyPY1kpb65X1w+XMxkhtGrUK8B7wfA2KQls10MFPIFk2NXrZ2IFKqd3rb/8E6ZUpLrMnikOk0m2BEwgk3dV8F0SBeBYem10YZdgP/I6lIJTuh9JqL2kObfh0HEV3828jWJw0n2OIdqTSbU9meZx6thGEghPLhWV6CnR50gNTg1PyXxd/k0p96SpJyZEUv9j67XV9Zi+wGu0ubm5aG+hTXvFlPYK2hXGcsJzRrvJk5sd+4dr0dUTIWjhaNyY19NJzAprGa6LXw3zjmPD8cetN0WiSTepy/gnk/Ie4XzOO3N2lYbjkbdtg3STRFsPx8Y9yEpvGG1ctCzU7WfeBeF8SvwC+6wfy1swwkCf4reAlhEMSMPl9QctH/cuOaR4lo+pvq/R+Q1pE6mnV/jfgfrv7RgsL/fll6MbtMEd/x3uB6VXCCGEEOL0I6WDEEIIIYQQQgghVoJeOgghhBBCCCGEEGIlnFp6BZeluoyWVeXHuTmGeV3S28L+0DZsU15eT3TwelTGV/T15cd2Vt1XKVTNqzP1ILB2VLcsJLJLzoy6VSOSyHrJcX0vLrHCpThQcu3TCaZNng5TvI+i9Jw2gzVoxNfWYCdgFXa2KUuOEieKz06yHVktQlk3Ze6U4SOBA1LzYfSS7TGQMrdBGsVkmu0SHdqTtWl1nemkfvst2W+YEsI+cmoztCCw4tBGMaH9AzYP2m/axr+TbIKJR5sBU09GSLX5nOisbk/ho2QoZPy0CtAe07pq/LQj1K0uPmoDySou/YNrx7L1kekhDIeg9afjmLPd1Ndv6haWMknE2biC8WxcYg79FXnbeY9rN+BBHTxXUmFrcHYLzhfaP7iHIIXH2YDCkCGmsvhvovQKb6lg2kvdBsF13KVHHAcek9b4oBnzm/PZQpsfnyv19/5+nbpNY3T2nvKmd36QvF93j8leIYQQQohrBykdhBBCCCGEEEIIsRL00kEIIYQQQgghhBArYZfpFceI5NtMdEio0J3K9Io5JdhZn5pSlvemcZbXH7by+pBvc7+JMQROAky5aOHzoLSV8u3ALkGa6MMOKo0v2SmYXBD5MwILhrMiBBJlypubcP/FRngfNVIaPM9jO4ftZQKp7pRV+llNv42k1UUlelbUxzxy4SjuGnHe1S+eW95GE7hMCKhX/+8CS0XbcT5RYg/bylpOr5hiP+zfWIwHx9+nBdCiUpflj67sPppRYoXVr5GZTwnhNgPuPVoq+EaTVhL2m/ct5d6lvaKb5m1oAXE+G6YCcH20R+y3cc6rwP7U+jnhLA+wkPmN6tJ2WkHatn4jUlbvbDwTb69gIhAl8bQdcE7QStIFEv0USOt5ZmNpQRrr9pbUcp7SalFPVvHP7LoNK7JjHFuv3nZ2hGD72GpRT36hBWY0Px6cU+47F4iC/TKZpphrtfV5j3AuL7sr6vY1b8/Iy/vj81/uCiGEEEKsAikdhBBCCCGEEEIIsRL00kEIIYQQQgghhBAr4ZTsFY5Irk856eC1n5G9gkkT45DtFX3PdrZIsFI+K+i7xARnryhKngNXYT1Ih7BgcROsFMqBSxl/eMCmuk7UdokOUSKGC3dI8XdcTuk55PCzeb5eVLzvWWcaQpZWd9EQl/1wVdm5DWTGwb5GNzb52LRwcM6NUQKKFbYDXDNaBZhe0QZl95kiwCQF2i6abju7CW8mzm0cg6kTbZl0cHw/nCvtyeXXqZDSczwpkx8wnkOPNBraMXDeruo+9NwDbQLFxWhhhWowp2gpajGGXXaxWMunHE7JKcn5GHLuCP9elikSrh+BNcnvjEekJYIpGPVnSVdc0xEdHnvOZySGuHfKwRyC7aKhtt7ZFZrq+mZmNtBeETxAfNSM1VbaSXqFt54Vz4zQTsBN6skjY5BYwTnYWt3Ss2Q15KXn8ZzvBePvbH11u5S7X9xc2Wbe1EN8nCVjqDxnl1MwhBBCCCGuOVI6CCGEEEIIIYQQYiXopYMQQgghhBBCCCFWwjW2VzROu1lvOwuFmQ2wS6SR6RWoXg+pbz+fow0pN6W9zr2AyvWQhJeS7SaQ6POLSNE7pLrM1Vdhr69Tqodpkegg506hThiydbw3at35BBLvQHZ7rOus4F+X2TbB2AxBRXz2m6kPaxNU5i8GxMuGsavIy4NmN0U6BCT9cyQbzJ0cnakRga7bCqsF5xrTEBhegX0xcaXvaUnBtXOWDXgDrKzgX5dU0xYxQLrfIfWgcxYMzgnsp6fs30vpG5x40+DRQeU5rmtpIqoczl3TEf0eCotJQ3U67VqUp9PGAjvCZJLtPj5pg4kYSN2g1aVwZDnzEyX9vG9dEolVl5uzsQSJKym+WUPpPiX6gd+N19VL7Os2A2dfKgckuCe93B/zxlkWQJQAVO/S0kM0mmvuIZ/q7TBZCIt7q1v5luwIYboMr0v1EP4+DH5fous+FLETUcrFGNn3jvevibwtQgghhBDXACkdhBBCCCGEEEIIsRL00kEIIYQQQgghhBAr4ZqnVxAni2XiwdytxmSKRAm3s2RkeegclgraK8be6bQX0FJhkIGXb1go9XWqUsrZ0e5HSmzR7x1YKlwHSxk/ZOutk4tDAozeM0GC1fQ7bssxCGTFZVqDk7qj7xw3n+gQSavrdpMJzmd9miXvbZGSYAMtN9ivBTJhNCcYA9o5+hlTEpCAMgbSajNrINFnGkhLq4xLk6B9Ad2DZWG+lec+LQBMtWD7WD9QyZ42Cs5H3FNcBwEZNlnnlcQc531E+0GR9uJcDS6dIDe7KRJD+Axg1X1OQVftP7ATWWG9YMgCoin8/Me14GMOPo2hxX3H/XS0qhT9oM3D3Vb5w4A527Oz7plRt0446f3A52HRD16nyF4R2HKcvSIF5xpE2SxZwwLbQLi996csmry/xjD1oZ7as3Rwuit8FEmtS+7x7d0FtEjlZxLtWWU/WliYOp4f00eCVCPfxv3ijlC/p5jkZFbaZrAHPr/b5fSVJtWTb4QQQgghrglSOgghhBBCCCGEEGIl6KWDEEIIIYQQQgghVsIp2isCLStJrIIf2ysodaeklFX+Z7O8/ryHNJ4ya0hInUUB9oq0VOMc0njIrmlr4DZjYKmI7ASNk6fXK8kf6zuk4FC38jxsoMS+LqHucK5OJlsPvlhKr6CUuceXHaTuLZW6HRICIKtfQ1rA2oT2FowrLl5ZMZ22gyZI/ZhtbS3a6+vr+RzQb9pyBmeDqKcIjIWkn0JjyqZbVuDH8aa4XhN3Dnk/A/s0ZRIL7CadfxdI6f4csvoZUl04OLRnJMzlOW0XvEc6yMBxHbd7OIy8X1gFnwkeODbnuDselndDPvYw5vE4tlF9zvO9aUJCST/nvZrHyUn3B97/uI8wfsNYJO/QJgLLB68YLRVMy+A84L3gEycytFQsyefxfCy/W+yLiRopktvjWuD5MQ6BlWzpGVpPJXEEX3gnA302OHa002abfjh/RdAlBuzgeTzh8wrPNzdX0D/azcz8fcXnt3v20SLBPkWJJoENJTy5Ynte785d72UrhfvNEUIIIYQ4TegvDCGEEEIIIYQQQqwEvXQQQgghhBBCCCHESrjm6RVBxfnkkh52ll7BQuV9j/SKWT29gnJq6lQ7yEYpTafE28ysaVCFnIpcfBiZXuH2hYrzUUoF0zGCdrkee9i1TquOtSnbRcICkzqcvYLSZZ5D0YtAet45pS8sBLABMI1iHakRax0TILB/SMKbiZf58rybMunjOEyd6LppdfkIabxLXqDsf6hbLcz8dcImPs2D/cb8YIoGbRucv5zX3SS3W/PjwTm8BUsF29O1POYT2FsSLnI/uJskb+tSNFhx348954RPo+Dcrg8Ul7f4onM2g9zvMtCEKnt+RwuCG2emlfTZilPPWzFvT3FV/b11YXR2nLyctiPaKzhmnP/eelKX3kfzxsxbKoYhsFcwDSFImnH9wLxLUYLEUmpE/Rkcpc64pA4X8VLtXmivKJ8KLhQjtHmwyfPjfnEPYx6MvEeYalPcI+F3LpaEY1a3VNDKx+QLjplLxyhsES0GYXTpKEyvqIy/7BVCCCGEWAH6C0MIIYQQQgghhBArQS8dhBBCCCGEEEIIsRJ2Z68Yj/9vDunsHPJQWCIGlzLh7RX8PGnX8vJZfgfSz3LX5ltYZw5p6lC3CjirhEuo8BJlSlDbhhrbvAMcwiaQy9Ja0LN6/Og0vOgHZK2FfJ5VxNtAxgvnhJNj+2rp9aSIdmyr60+KlITO7TcvX8PydYzBOmwR69jXGtZhexLYSNrCbrJsPzkO+rE23YMNkNpB6bgbfy6v2yas85r+SZf36yX3eR3K7fsB8wvr9NBvD94HhFPAHBiLqvLoJCX3rZNyZ2tC0+T7pcM5sNJ+2wZ6dneP+PuWEm5vd8D5wdLi78lgzrJPrpp+keCBa88wBd57fZBWwudSZKlgUgH7uiyfZ1pD3U7m0xeYcpNXoeS9tfozw9kmCr+J+y4FDz9zFwlL0W93j3DTumWmLYwNjYsrsTq0j/i4hurq/vnN1YPxPrYgt11qR93m4c6JSS6BzyO0WpXzg8kstIm5ny3ODyznXGF6CPuKe57PJyt/U1rYemDxoV2QQzgc78gwBM9eIYQQQohrgJQOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVoJcOQgghhBBCCCGEWAm7rOmQzMbkYuoG1HSwnlFxfbVtZjb0iAdscxd61GvoZ1O0s0c99fWoOVrUvZcfsYRFlloHb62LDwtsylOs08MDPqf/FkZZ1nHonPe5iDdz9R7qbXrJW/r/6fdNdQ8xPcsTnAPPx8xsiuNN4U1ewzHWsWNfxyHvZ411MOCFnrBPOG5Zw4FxjBwDepvX1vdiC8aG1i+ei3iE557xnF3Rj8kENR26IE4wqOlAfzbrC9CZ36JuSTvJMbLNULwLZKwh+uv6h5oOreV2161j/bzcjHMWEbboYSrmRxtEY46Ip+w5/zFObv52nKccY9Z68F75hLk9dvTpo/6KK6uA69LX6xk0Qf0D9q98K8u6LoOrZcGDY/66OgSYN0HdEp62K+lQrMdaEZznDa9RUD8hpXJvx5e7GhAYj1SvU2BWxLs2vC51XHwj6980TWVtH+nJPo1FTGjj4oxxfkHRiob1UNyhGWHZV5c3rhaQP1Nf74EdxHIOc1O/LiOeUQ2joVHvhVHBXVmfB0Mww70ww2/0jDWYjo/tUJ8aQgghhBDXCCkdhBBCCCGEEEIIsRL00kEIIYQQQgghhBArYXf2iqYxaxoXbUf5a7Is06b4tSmi0ahmnW1B2g35LC0YfZ/XGRMi+dCPjrYERvLRolBkuk1chCBk/E1dHDxCktt3lNiiT1G8HGwkZVJcYnQZZe+wsbTOQkDfAN4bJcpwc9PZTSh5991w0vEprQw43ppbnvc1hTS4dcpvypKbenu7GL5gOWX5PJPk7Cn1fnSMZmwpt/fH8/F39ahE2itcFCGO104wB2HT4HLGTo5FPCLny2SNcZiMm8R5Y/jGlGMv54ixNUbEsq90GXX+8eCtAnnOc9p56X49LtISnh/GOcHrW+i8nT2AbVi60KbdgbaXNNbl9gl96tAufQKU0/PaN85WQntQMFcCawGvHe0bFlgiis39xXB2mEzrT5x7qu+0qV+jY1sE27hYzvq94+9Pq37ivB6M17EYDz7kUnEjL7pXv5+dZSS4RlEkKiNszbw9YeTcxjqcB6OL/qzbQlwkrbOV4P7v/NOctrEpnpUDfj9b3MMnfifbtLs/CYQQQgghdoKUDkIIIYQQQgghhFgJeukghBBCCCGEEEKIlbBLe8Xx/7FgeUtZd102XUqUqRTe3NhYtCkd7QdYNYatvC2kra2zLLB6P1Mj8rEmhTSY6Q20jIy+7HteTvsBVqFcn9LlhIFqnMTVD8g41OWzIyTmVM82qFrO6v2UMQen4N4yLVXmR3uKcXP2iqa+zsTZKwLJdlu3wJTiauLWoy3CpVpweX1fPtUCEu9o9767vuQ8JeKu4H/dajFF2sh0LadJ8N7htkPvpeOcL5zzDcIoWqYyGJNjmCyR7RVeag4p9hT3UWGvoKWCsnKXzpECm4GzYXFg69X7nZzdvGx9YHoO7xe0ByaJcB0nyw+SA0Y+Ywo7gbMqBf1184MThMeop2gwqID7LMeD9gBay6zucPBWhiY2NuSl0TpFqkh1a49/FNXvHWf/YFoLrR1jcHLmbRsR/rTrx+B+h6E+r30Ch78uCfduEyRyuN8UWjiC5wrnJu0RPmbJPzNot6Clq2MqCc7vRLLQZMl0J4QQQghxzZHSQQghhBBCCCGEECtBLx2EEEIIIYQQQgixEk6tVHW9QH2po82rFJXGeyRTbCG9YraVbRQDKu1Tohy9JYnkubRUTAv9/ASSeWxiA7StPCXKyNmPjskSVk9V8AkLpRS4Lu91amBW/E+BSYL2Cq5Bh4Oznnj5b+vkzki5wCGmkOqu4Ysp1mEKhjNROC9DbK/wdon6epFkm7jxc4euV7EvQzOYYuISE9yxIY1n5XpKpZ3kGnJstp0jpUxZwbWHnHuY437pKNeH7Yiya2chQJtpAVi9KwICetyT89kW2kiXgZWB1iYmdXD+NrwvIJ/vC7k8pe7zOeTm88A6EbSZepAi68MQJBsc73Fej9vQCsHUA25LzfxYW+qSbFL4cC125daLUid2lhxT3Q8PW95qLsmFfcJy39mgT1xOyx6+2Fb5H6VwcDGeH7inRvcMre9xDCwsS8Mx8DkxxitWumcj7WfcKZuYf86O4W0ePtWIv595vUm7/AzodmSYEUIIIYTYHVI6CCGEEEIIIYQQYiXopYMQQgghhBBCCCFWwu7sFen4/6gaHShRrkszx6IaP+XYs80s03ZJFvNswaDStAtkp5TqTtiGjHZaKIah+LaW1fydrB79hhyeaQG0VEwwOL3TyNJe4ccjqnLuEgasbq9ojLJ1ngOr26ON8egKu0njqrLj/DDoUyQa0F6xxmr6Tt1M+wK/2M5eUbdhUHadosrwQUn81lkq6t0or8NAO0JYHb+eQNH3lPFTM58tCh6mXUzdN52zVyBBYgarRUPrzxq6Rwk7KvP3dVsC7RVt7895Psd9C3tFP8/n5M6VVfN57TpaKiBzx+FopzAz69FftkfK2QfaKIK0EZcWQHk5D45mMSdc0gT25VIIgkQCl05g9eVOMs+UFSsJtqFVxur3C++F7ZJjcv941CJ5h30Pz6m+X9dXZ32o2yAsSFw51i+utoNzwjH6IE2FJ+6eNzh2Kk8uBXNwrF/L5D7wub4DiwgWD97HYwnPGaZ+NPjt6ZiOdHy/09KmIYQQQghxGpDSQQghhBBCCCGEECtBLx2EEEIIIYQQQgixEk4tvcIpUFGZ3+twF5RS6fkM6RWbm2jDXoFK+bQ7+OraqboOkxSmQZLFsc+57awM9cLfNoNMtU2UpuZh7FpWpacUdhs7QaCkbZ1MGNuPdXtF6ywmSJnACa1PsvR+rZDxT1CBvx1Y5TxvPwkk825kMWhOrDvW5dRNcV0aSKrbNo9ti/5ynableNBewQSOILEikCubmY3zIOkgStTAhRwZUoE90wbhKvZznZRtDMcW5PtlOsX4T3H79nlnA8+E0wa7HMe6DByX3WZtb2QLtqitrXzf8hngbEq4EZkIM6OtAeeWnF3BPzMi+wJXcwpzJ23HmGOVjjJ5Zw2wEPeIC6wTUZszjBarFJyEm7NdYScIbRu0JuT1aW1yp+ceRfUxoG1iHL38fgitJNxtlKhh9eV8ALvzpsXMXyRvqQijHxYUZoTccukm9T3yU2nkaPDc7dpgB34SoU/1ueIsYy7Ng5YPPx6JT143P/Bcx7PohH2ka4pnjxBCCCHEaUBKByGEEEIIIYQQQqwEvXQQQgghhBBCCCHESjg1ewUJJbW5uWyvyNaJ2RYq4m/liviUXTMpwqVXNPXlk6A9LaqaU15NWTgr1rPntHY0CRYAyH4nTbYAcDhGSnIbL4UlbVTRnfpcV/Ic1ge0mTKxPmE72yvWO59eQYVuO3JscYy23nZvryBRHlIgDXapFL4fnbNU5P5O16ZYntehdLwpRj3vE92gfQZrD0NxXahQXjJfnDgeJPq4LpHVYnSWD+wHQzP03tYwOitPHo+G8StItUj9smz62H7y6v5eZaoFEx0iUbnZWpuvxRz9HYYg4YWJMLi350jgcPL8IhWgdRYa3gtYybkU4n0t9uPmIK4ddl+mllB+78MJIpsBOxVYLVLdatE2wUQtjuGsKLS3cA4GyRlO3h/0m9aWobBXuPQQ9w3vyXoCjXse0OrS1NvOdlUmVLgYmtyM5pSzUfCaup3yE1NBsLRMNwme360bco4t7YnOH1Q7tDtN2mdKe4X3GvGeZDzN8rXrGqVXCCGEEOL0I6WDEEIIIYQQQgghVoJeOgghhBBCCCGEEGIl7M5ekY7/jzYDfu0kv7ndz71odTbLn+f4zil3IQ3unMQ2r0LZLu0VU0i5eYKTQgDc1Yv8O8nsEBQIZ/V0ypidpJYHiyIxrChOzv4FEmUOQuusD1l+vBf2ijOQ+rAH/SjtJuw7K+dPo4LpbnmkAeZiyKPRv7aweXT4brKOtI0963lfXV0a7yXRuEZBZXgns/ZZG8UWdUk/Uwh88gjWgdSZ9oWOSRQYg1Kyzb4zPcTNNc6JQKrOucX7c4QlYo6Eir6weUwwj9bW8nWZ7NljNWi16JFYY1FyyTaxER1u1q7tquu4pIgdpUzUJez8MKRCcD8uS9LL9shkisA6ESUYmLN55LEprS4jtPXJonnQVdfph7q8f4BFZwzsFWnpGtWvJY9tLmmG9z0sUpz/fB7QwtWxXcyBpv7+fHTWiTwfnTWGv2dMDEEqUXi9lqw79d9AF8aS6vPD/RC4e4Rzbmf9aNyzHJYs95uybN+bnAbHpRBCCCFEiZQOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVsEt7xVhUxfaqVspUKSMv7RXzGeXm3D0k85APN0iH6OBx6LgcUlGmTExSqi438yfv3AE8D0qXqX4NlNlR9XIvmd9GPs/zoEQWCQ+uDenyFJLjPZDC753k5WvY51ohlXZ2iY62DY4t1qG8n/aRwH5DC0GH9AmmVZTfre2BvWJvtlc4GXogdG9c5fqgWj2TG/w0dZJ0SsfdqEHWHI3fCJuC60eqS7m7JbsJxh+JFU1bt1Rw/FwqADpO6wRtTWtICFmHtcWsSHtwJfVz08nWca59P2B1jGvL8ajuconG6veSs064dAhYvQZK7OupD2yX9ophjDqJpIhorg2MQ+G8wbXD/ewsFKN/P+xtQZyntCbkeTCmfGymCQ1DThKivYIpFd79UTy7WlqEMD9ol0DSSWSx6vCMiq4FLVVuffP3grnfoSjFxEWd4ByQRIR2dB1TMVHHHSRkNIGNKAj3KZ5v6PcY/AiZWdvlMXeJN5gT/F0dx2M7aAKbihBCCCHENUF/YQghhBBCCCGEEGIl6KWDEEIIIYQQQgghVsIp2itcZsWiRTtBT3tF7yXKPSSvI2XDTk4M2TQkq7Q1uKQHpldwOeSoXWknYN+hk22YnBHYNiiRdUkWrk84B0rhzTPB8ZgoseaSKZDoAFnylPYKtNchsd/DxAPIrNvBpxNwDJkYMnWJHLROoB1IzflF6+wV9faxz0yvQBvSf0qOB0qXuaMoFcD1jxJlf2VajPl0CrsEJdQDUxUok8f8t7rNwKVG9FnmnlI5QwIJO9MCnJy9bsHg7G8Qy0JlOucW22aFXSJIQPDy8jiNotqpbRZzX0NTv95jkJ7jLTQ8B9gdoG1ngsG8SPCYU1rvRzQvD1I0vPUnrz/peH/l69hzN8Uz1Fk4mI5C9X3inMe8g43CpWvwAM4alhcvJ81Axj/JMv4Olgqj5YP3mEu1YNJG3cLSuGdJea/W7RUJNhH/jBqr7SawiwxB4oRLJLEiwSlcr27JcvalwO7nD1Y/7rEd02qEOcv7wjgPjh17SPk5JIQQQghxupDSQQghhBBCCCGEECtBLx2EEEIIIYQQQgixEnZnr7B07H9e+5mbkIdSRj6fQ8ZsZj3TK+aQe7K4O9vQGTNBgnYAnkgH+TXTK9qiEn3XBPYKvIuZYB0Kiyeu3VTblAM72W5RanyKz+uQCa9DlrzGRAdImplY4e0VXA6Z9ZzSY9cNZweZsM30Ckqt2W79OS0IJNHtdvYKWBkmU9or8nqUyQ9FOgp6VV3qkg3cOn79zp0gKr0PlKTXEwlY1Z7JAyNsDSMm/AznMAx+LIcxjwHHcGpZzk7LSAqyH1IYD4G0hqEuxV7eWW5yPFwqBi1IPFqU6sLxK+TiTqrOIWeXnM0gr9RjrsydpSX3lckITM7ZKuwVW7MZ+sR5lHF2BFpgmGxDC0xwWQZYd4biGequE857glthcPvlnM1tuhJcGgptEHzOTmheM+umsFd0eT7SmkS7EBNbGn9w7LVuRWiCZ8+x41U3sYaDGyRWsO0tY6m6zjgw+aW0V+BoKRhzrk/bl7N61S2MTWjdKXDJGUguMaSVuHvsWK/6cXObnQohhBBCnBpSOgghhBBCCCGEEGIl6KWDEEIIIYQQQgghVoJeOgghhBBCCCGEEGIl7DIy88T/4DdltCUsovNZ9r1ubfgYriOHji7ahw8ezsvRnm9m7zQd/2vw1rPd0j8ODyu9v5PWm2BZq4Ce4MTCBfAar6N+wh4ce+B+GVMHX3OLCLnOZ7e5iE9X04FteKkZhzllG+tP4VHnRR5G1hTw/XA1HVg7gHUjgthLF9nY1MdvMs3rTCZttX3sM44XtFtY3F2dBNZbYKwjl6PeCNdpyti5yDPtohl5DNZDoJ8btUMm9Ldzl/nDvIgy7RFx2ExQXwPjOTD2r6/78Z3fHH0dMR5NUJOk/Mz1higysxzPvFZex+2nPq5mvs4CC7uEcZioxcDx3JrnZxFjStuxPpdZA+LYZ9SNcDGKrF+B50rD+gmomeDGKfdv4PODc7wYS/rxXXkCzvMGz0EOX1u/P1nDxMVTsv5EEZnZuGdcbjNKk6VBRle2oD5PGSfq4i953xXXhXULuN4wR/0FzAkXYcn9Yp2e9+TWDO2tSu9OnIarXpJbHM+m/rvjy5vU++dJQdtvEz2LWLPlRM2JskaFEEIIIcTpQEoHIYQQQgghhBBCrAS9dBBCCCGEEEIIIcRK2JW9IjWtpaZ1ElJGoFEROp9luevmRpajmpkdPbKxaNNSsXHoyKLdQvK91iEuEnrUNdogKCuGLpmS1a7IWesCyXHTURabV9qDDWaQ+lNS3kIG3tBSMclxcsuRmZk19GPdnSvjM9mGDaKtS6VbowSdcvY4ItJZKnC8SVePb2T8XWSv6FwkX11uXPaD14h2GNpY4DiweSCxH/u61cICmfWxz1b/bqxbOAbM2QGWCCcd5wm5eD1u6yXO3G87h71lhmtBuXkxnrVTcHGYrl3d9NjxeP3a+vtKb6moy8Ip8WZzYPTm6OXzRntF6qrrzRnTC0sFl8/mWSbPWF/aHTrK8wu5+dxZa6L4wnrsIi1gLZaPmMudS4usz5Xyo3MjWH2e0jrFCEvGA/NepY3C2SuWrju376rr+STIYH4E4+eWYo73Zd6vi1utPwNotXD2ClzHAVaXEevPEZXaw6JTWpDatj5ujCDtuvpzkL9b3l5Uj57dLjMzpeD+5h4qQ1DedkIIIYQQpwMpHYQQQgghhBBCCLES9NJBCCGEEEIIIYQQK2GX6RXNcS1vYK+gVBpy9q2NLE0189XJp6h4vg4Lgo1ZwjphNfggpaJh9f4OUmlK25tYjkqpaot9TfFaZh2y2L2QH4+opt9Cjt3h3Kbd+qI9KdTvHY5Ne8WaszvU7QQTSHI7yIo7SratblcYJ6W9AokSTKlwUmsmGAAmDzSUA0O6TD2vL7nv+sHrNA5rWA1yeCdVx8bY7+jsFZAu83DcTyledpJtFzWBzrqOV9sj0xpwcFoqRkP1/c7PU6YpDBibTaQvTGkjggXGVdN3VfO5PDcH9I+2DjOz1CKJgSEGHCfehxaNU31cZ7RBFMfmAVNbT5eY0VJBKT7uyRkk8z3W570zcRX+fTdohUjOWoCVgiQLC9qU+tOWQ2sX5fnHv0QT++K8c5e4vv6Icxhgu2iNVgnMpyXrDscjQ+sJLSou7WXkc4n94965n7x82CatYeQ9NkTzuX6NUmSjcr81mSWzSZhSUU8D4W3I84vsFW05/Cf6VCxP7lkbJGq4lJXlPgshhBBCnC6kdBBCCCGEEEIIIcRK0EsHIYQQQgghhBBCrITd2SuaplIZnxXWWck/t+ezOTewDtusTXJ2w7CWLQisrt9Bd9oUlf1z17A+Uym4zrbVvuuycFZ9Z5rEgESH1GN9SIYnXT63dbS7Yghpr6CdYwoJe+uq4EPmjnFqaBWgVDqQ81pHjbxfL0qaiKTIY5BaMFLO6xwVsBOMvh/sYo+0gWGax5Ay7yZMlkDl+p5SacqVmbphniC9wsuSOc559cFJ9HO7ZzICZeCQQ3cTPx6cRzzVGfXYLeXweXHnUkUon6fPBokmTV2mvoSzSNSTY9wABqkFI7bdQirA1ry0dpzcXrHV19MNeswJplowvWICCTptQNvZCZa/O7G8uriwPtTtFZTx854vblU/bysyeTMfROLdLVypboHpIhtJgUtBYUKMs0gxlQHnx8629fuwCWxNaYfpFaN7ztSvK+8Lb6nAcxbd2C7Now1sL87egvX9HKrfF417nnJbbuonHbdxY+X6tPy7IHuFEEIIIVaBlA5CCCGEEEIIIYRYCXrpIIQQQgghhBBCiJWwK3tF0x6XblP2j/Yc0mpXBR+SZjOz2Xxr0e5Rgd9LZiMrBCWo6JvVpf6U8KaiAjyP0CTKlZHc0OT2GtMeRlgRsP7eCY49QXrFlBJoL2FtXXVyWjvqY+BsIk7ajjYOwYSRSOZ7rB8WrBdsg/22rqv1xAQupjycY3ysH5ChBzYdV4HfVcSHnDqyMriEEdgSYGM43pEMLSC0FlAm77wrGLMW1hr2ifYP3iOFVNowNy2wMgxbeWxmc6SvwL40QaoF7RWcZhybJZ8AzxXbjy45gNclVdvuutAugiHYGrx8PnHMu3zecyZTMAUiOAbbfB7Q7jBwopa3IGX2Td2CFG0eCdcHJj1gvrfwI3XJdyRKSaCdpnPXz03m3D83t2BPoZCfyQ1LFqS6bcZdY6bIYJ57S1tmDTaqtTVYqoL9m3lbiXteOVtUPamG27p72I1xPcGnvKqJzy43BzGGbsjq60fzJkr2KCfqTtJ2mto8LW0rQgghhBCnASkdhBBCCCGEEEIIsRL00kEIIYQQQgghhBArYXfpFW069j/GL0B1PiZWKYekNhX2ilm2V8x7pBNAettA5kmbQSQoHUsZ9GI5JLLmZfzJaU1ZPR1y+DYPEeWszTT3bwJJ8wiFPiubs8j5kmrddSqSK9etJymwocQ5HfFKwaFdyXRfkZ2r13W7UZX4FhEj3VIF+HydKMufzfJc4RhyHdooXJF+SNV7Sp2dV6isAO8NJ9hbbrLKP84v0W+C8+E+aY8YYTMai4SW0V0MytNhX4CE3XDvrU/zOutreRU34i7NA/dLcV04iZ2tBGMzwF4xR5d6JLyMgfJ7hsCKeaHyTi0tNFEaCNrYlgkqLomBzgJcI3cdiwcLJfDOvkCbTnSPuLQXpmvkdQYmJtBCZOXc5DXLyyf8wBgfN+9gx3A3Ei03mGcDrAXbJLzw+U0bRWT/mCClhc9Q3hfzTdzzOGxpUWsQ79Gyza66sAxa8LgS94q+Yp8d2uXvjrfK0G7Ih2hudrT4RE/tyM0RWSiWvsLvlo8xsaUvtkusEUIIIYQ4RaR0EEIIIYQQQgghxErQSwchhBBCCCGEEEKshN3ZK2w8/r96xfMxZYn4MOZ2P+S2mVmP72jJSE3dQjAGVce9faEuGWYl81SkAjSwTnRILphO9yzaa9OsSZ9D3j/HOTVDPgcaOCZN3v+kCzW8TmrtUj8odQ1k705W7Kqt0y5Sl4uXFeA5/PyOVgGmCDRR9X4mIxjXYRty5ULGz2s2zFFRv+d4WLVNaXsH+XaX8rWgVL1Dwshkbd33A5X9qY4e5vUECS+fhwXA2QHy+jPYIOYNzq24Ltyeem7nuIGtwaV/UFaPNuXeCe1pxzSPWEvv5ixTNGD/6PE8mONcfdAJ0z8wlsU9MgSpNUN9aMwNjTse5yafJXV7xWCF1SVVJOlWWFF4v2HbNNYtIgNtFM6CxGtRvB92zwnY0gIrVGSp8M9QPifqaS20mx37DDsN5zn612EeTbH+BJ1lgo9Leojk/qVFDc+ommvg2CZNfZ3AiuAsCm39+Vb2I7l5Wr9GtPLw4C69gskS7iTYKTaLtBfuy1k7GL+COZ9OrCt7hRBCCCFOP1I6CCGEEEIIIYQQYiXopYMQQgghhBBCCCFWwqnZK4KUisG159XlZl6anVwSQ51Ijs2y46wozkrjCfrrVCpHG9orso1iMsky+0mX1+lTtlckpA0kpm5Q3g8V7dRJrgv5vKtkD+sJbBu0UTTBebuq7ZSROzk67TBektuwyjnlymNd9tvQZUPpcpgKwgr/dQtGua9hqEuUeVIcj8k0X692J/aK6bTaPnYI9Kundn+oNZ0Uv8c13hryvJmhTcvBaHWbxvEv83eBn4CWiolLluC8K+0SS7txUu6msL0wQcXZK9D3PtFekc+1N1is2voxONxW3KsJ9pEB4+EtFbAp0OrCHUXn1wT2iMZfCx6DY56C7b1NDOfAfUapJ7znJ0XyDuaBs2rwnuZB3C1Zt9z4lBWeJ+6vIr6CdhxafHgeU1hD1iZMsghSbjAEvI98gk/pa8jfNeF6qbo4BQkotKK5y8vzLPoxBgklzpYDu8Pg+lG3Fy6d64nduLEp5qmz8mBPzm7CYx/7//0ge4UQQgghTj9SOgghhBBCCCGEEGIl6KWDEEIIIYQQQgghVsLu7BXp+P8o54V2cz6fV5c3TSxB9arQQIrvVLGU8UMmDN3/GEjQG+8m8JXDodkecB6U9zq7A5ZTJuxSNLgcxyptJFAc20j5cuJ+KXFGG/LmruPlhASYSQUu2cBDuW5d0Ou/iOwwXpqN9XGxW8qYi1dfXrZelzI78TGTInDtW5zhgOvrqtJz3hQSZY5b79q0E9Tbc6xDS8VWP6uuX6Yk+I7U++TsFZRT0+KDseXUolifySPlvUqY9NGmvIc5jj3red54TrgUE6Yy5PYYPFfMzHqMz9xJ2PM6PrGiPlecvjy4Fxo3D/x4uOQYlw7BZ1pdou5tZYZ2PamHc6IpkndSYFEzdw/XrR2lvSuvX29baHEwG2g1GOv7dakufB7jmdi6e7Le9r8P5TwNrnfwJHP7dfOO1sEoBgP9KJN3nL2OKRX1uUJbiHtOc5zdKdT3MxRjTwuS/47HW97XhtIrhBBCCLECpHQQQgghhBBCCCHEStBLByGEEEIIIYQQQqyEXdormuP/y4tYJZuVr52MtvWV1702PrBU0EZBZaurck5fAu0LkCVTpVpIlFnWmwkUUMCbMZFjyLYL7ql1aRI8Hx6KdhP/rodJE5OWthLK5GGj4Ppod01dLu6k48FYHl9S3WYb3XUVyvhHrs90DOh/J4VE2RXq5/xw9hFGOkBmPHA+BlXpsZe+4XXx0mLadOawDcx5DFoqkNwww3xiesUm2kwa8HXrC5m6U8/X7RUNUz6wPi0mHXa0PkFqR5GMUNn9sfUaJqXA2rSFdA4EjMxHzidYM5p6m9dibj7xBtkX1iOWIbJRjIEk3Zw9C0s5N9ku7hGXCoPlibJ0Z/k4eSZPZK/ox8hCYT42JbBXOBtbZJtxgxDd3EyTKVMS6s98Z+PClI3SERKeAT5NheMdROeYT+EYE5/B9ffq3sIRPDOGYJ5hn+VPW/RbQNudS+RBO7mxxHKXNlK3g/WFO6t3Y57bg0t14fnJXiGEEEKI1SGlgxBCCCGEEEIIIVaCXjoIIYQQQgghhBBiJeilgxBCCCGEEEIIIVbC7mo6NO1xL20c/5jXRbPwCrPGA+s40LdMLy4jFFsXB8hoxmxibeGzZ2RgVxQkYA2ElueE2g30xtJz6yLrgjg0+pddDYPOG4EZrTbF2LBeA/3WjJezoI6Gc3kHF6mMe3PbsD5EVKMh8JK3ro1tixjExZZdWy5YNDk2LmqREaQuthV1FdD2sXG8Xvlaj0XND47nfGA05lhtzxNqG4yIyRy2Fu3NHrGyznNvIe7+CeLvWs5H+NJZx4E3+wT7nKKmg4vhK/z3/sh5+zmOlzB/2dXe9Y87jWJJ3aFtYLQp/fvBuNVn5jYlSYKbpC2eXXzmuJhM3odBJHDTuoci+sc6DKjzMXK571eTeO2DmExGAlt9rkXPCRdPiw2Gcpxc5CO/yNdosPq9ytoInJuszxBFVVrx7BqDaN3W6nUVfD0Ozi3UW+D496x/kOkGP1FbV8ehXt9nYP0gxDO73xS0eU0HhN2yvMa8rOnA73D9XK0HnN+JGOBN1XQQQgghxAqQ0kEIIYQQQgghhBArQS8dhBBCCCGEEEIIsRJ2aa/ojv+PMYOQslJaCll8N/GHmeBzixjERN9AQ8lwXSLLdgfNNuWotFBMSqm0i8+jJJoSZ0peI9F2xsmYh2hbD+MwnSSXIuBAsp16RiXC/kG5cVOXdTu59/EjnmB08ZtOjFxd3yLJNtpOdk4rTWG76FL9+jHakadE2wstFT3HhtfCxc5BOr4k6a/bKygXn2PMt5C1utlnS8XGfDMvh5zaCZkD6b1ZGe3INiT2Y13mzuDJnrGEY47M5Nwcg+hZMy8xH1xcH+NxcT9Tvs2Y0ZFjmZe7CNHifvGfOLetyja3W7B+3aLQFZGLvGd4f4+RvcJZxjh/gyhHd8tzznoa52Vg3zP1Nfz97Nv1ddw8LXwetEv49E3EPKb6vHHzy0Vjcmzq9oqxfF/uusXnehyzmZfXn2njDuwzQ1vYKwY8r2gNw7zp+77a5vX2z9PAvof2rHh2Ma6W1ouZs4zlL2bH7+2tJHuFEEIIIU4/UjoIIYQQQgghhBBiJeilgxBCCCGEEEIIIVbC7uwV7fH/TVB1fC3vYjLN7TPO2LNo7z1j3e1mz978eXN6dNEemyxPp62BsvcxrGzObsKOsU2KhsNVbq8bBLi5s3YE9ggKVSm/LrXfLtWBUnVaIZw0u1753iUPYD+uejxlz+Urp0jS27jBQT/qNgg35uiTd3Ng/Erx/MjUj3o/nDUkRRJvq+Jk0zhW3/v1KD/ewpfOUoFK9BtzWCpmSKyApWLG6vBtXW6fypQPlxCAscF6U6elZ2RLvied3JsJKi59JS8uLQ5uTrnkDcxTWKzSHFJuSspdGkog79+pvSKyUTR1ib2zO3BaO3tFxufMxFYll6bgUgjc1lb9Arvp2vp74KW5zM2dJaaeYDOGaTSwUQT3sDts2RFni6g/K+PrxclWT2vgtq17NpZxHvVkCs6jxllD6na6FNhWLLIgpcKClKLfJ6Y08Vrg9yI4Bs+BiSQ9E2QKn9GcNgq0txBfsQUr2nw81pa9QgghhBCrQEoHIYQQQgghhBBCrAS9dBBCCCGEEEIIIcRK2GV6xfH/QXPcTfOHyTS/wzjjrL2L9plom5mdcWa2XhyFPWPeUmpNKTcqh0PNmrq6BjgqUm5LEuV6WkYU0NBSfkx7BSTlXVuXGw+BfNvMy+mHoPI6kxuY+GFOhgurQKI9BfJhJoQUktzIfuLSMnBOk2hsQntFfZ3ycrWwonj7CGTaTBFwLpt8rrgsNvCAgU2AaRdm3lKxAYvEJpZv9nn5UWepyFahmUtryHS4XzqcxDj6foxDPkaCnaNxySpIaWlzMgVtKJ1LjQnsOs5e4brhZd7u3qlf1xHXosc50F7Bi8TjFQYTJ0l3y1N9HdqcvNY/sib5o51gKMaAc5VWg5Hz0UVQVJve5sH9Yz8uGch3w0v8Gz4UsU6QTMFz6oMUHtpN2sCSUvbdt+uWD3aQtjJe+zZ4BnrrlJ8PPtWIFjeOzcnTiizh/sK20VwZy2con/Ocg0yOYQoHLFa9S5moPzNojeHyvpghPtkir7nZs53P9cS9Ok/lnSeEEEIIcc2R0kEIIYQQQgghhBArQS8dhBBCCCGEEEIIsRJ2Z684AfS5lH7T1rAG28SevWtuc6ZZrO/JUvAtWDWaCWTytBlAFu7agaR/m7wKrwUPLBUpqgzvoh+ChAvK1p0EvZQG05KBdkPrSh7DdpLHrEcFctoDKKGmnJfWjG6a97PUeZxTDxnugGOs4dq5avcDLQ6wEGD/k47nXECJL4434qScvYLBAUNdys1q90zBoA1lXiiLN2Hz2EA/NuaQKM+5nGkXqB7vrkVudzjexNlkfD9SMDdpH3HWE87fKIUgtCXE89SnftSr/Puu1u1LtI8MziZAS4R/H+pSE3iurheQzyfeRye3VLj94MO8SBJJlKTzPGh7wfqNG1sL1kE7sHmUaR5Mw2E/nCXA3ZPYlvYKfOha3Ld49rRsF++pG/+wRJtpPX11eYdrPGnr7a6pPyfKad3henfBu3QOobOA+ZyPvNzZVjj3sXZ5rza0sjEpKI+hTwzJy2c49iaGj1YJ2kVGt0/fjwEXf271Nvd7ws7YF/NMCCGEEOJ0IKWDEEIIIYQQQgghVoJeOgghhBBCCCGEEGIl7M5eMZ74H+XRkM5CG8zK/Ovrhb1ib7ZX7NmT25trsA1gE0phKb1tKdF3loq69LukcRLbujydOl7ulceLFKlRVfoOslgzbxOhpaJpkTYAS0XT5XZCyfMeHae8lrpu7oeWjeMHXzQp2U60OEBu3tK2gbPlnGgie0UTyLLLYydaRrAeX5exMnxkqHFOGo4TbBCFVno20C4B6TMrwGOdGbo3Rwdxh7hUFifv57G3kTj7BBXI3l1lfg5OZDCqHy+6XmZmQ2BT4PULExpor2DKCpM6KKXvykdTW23HFom65YPUjTi+3RdJIkNgqeCcbZ2NCPc6rhHvF2/5wCQaguXFsZO73+pWGd47TD1x6RAuLYP2Cs4J/+xydgtcV/bJ2yvysSfo3xRzeR0pK1M+G90zvrCbMFKJ1pworIfPfj4b3LxJ1eW0uvRj2Q8mmgzVdsLcZlrRHB3cwn75XBkiy03xnw9cWgmfd1xesf+VaRxCCCGEEKcDKR2EEEIIIYQQQgixEvTSQQghhBBCCCGEECvh1NIrKMN11oC6fnW65mX8Z5xx5qK9d+8Zi/bG+kbe76yepECpP1MVnKAUUm4nNC/k4q64vkudgFy/oaS3vi+/X1bNx1Ine/YS5cZ9xrFdUXV+CKr/O4sIbRqQo9s2VgTsa6C0G6XRG8iYqfhukDwyDLO8DmZYH9gmlgTwqW7noCzfpRNg08GdX/1cqSAeMW/KpIIZ5toMc20OaTvPyVWWpww6kESnob5+Y17S31AaH0rEUf0f9+QUUvVuwns1b5ucLL6+/xO9zF8xjYLV9TmH8vpunrrLXbdNLL0PTYEe3hGlWtQNFk0wfokJEMWc4PVmggTtD03HeVePHhmxvkvB6AP7xuDnBOX+PI8JrRDO+lC3CkTRKO5KBM/TY585Pkx7qCc/cHs3VzifOG+wPu0+pfWHNhHe901gkeBph3OZbazPdpn2wM/tQMsN7DDcF/o9w5j1TLhombaTt90upWZIwT3J1B9af078RiQ/z4QQQgghTgdSOgghhBBCCCGEEGIl6KWDEEIIIYQQQgghVsLu7BXN8f9BPuwk2229avikqES/Z8/eRXt9fc+iPUWawjipWypGVEKf9/XkjNYVjHcRFcXpuEgDLEcqA20Urhp/3VLhNe+sAI+1i/FokVJBFwWV3ZRdJ5xrTxsEK9HjgJSOO4l3oaSltHgc6jrejn11Uvx8jFCKPNTtMG1xXVon/YfUPZBpe2k1K/bzaJDPU7rMSvSDl9LPIY+eRZYKWhMoL2958EjizYQQzjPfD5pvnIyaiSiQSvN+m0y4nNYf2mdwf1k92aDsF69Rj4nQY8zYdnYdWmPc/RnbK6I5HIZzBIkQzhbl9s9tkTzC5AUzG/HM8ekc9e5FVh7OA9p1+Eybz+eL9lDYK2jXop2G9ilzlhHaBtgP7JRhCBxi92z0c9N/V0+X4Dzift1zglaGBvMJx1rD78u0i+0Vbfhcx+IgIYbPPQTWhGkQ82I3LgGn5zWDrYGWMVyj3o0B7RFWXc7fgbG4Lj2tGu54WGnCxI8TK8heIYQQ4tSIfltPhSVLvLjBI6WDEEIIIYQQQgghVoJeOgghhBBCCCGEEGIl7DK9ItlS0kAke6bMffDbNJEdwUn/IQ+dQ2QLyWqX6tpZJ7VtaZUoJNtsu33Vq59HJGioaTlofXl87L5Ir2AVcUrxR0p9mQSAsQmkTJSjR+kY5aaUFg+9807krSnZxnn3WL+bThft2cAkCqZP5PZk4q/LtKv3111XWAhaJDQwaYOF2BNMCqOTdef2fBt7Ba0hA1M4nCa92vTzoO66cDL1tvAMuH1hzNuR4wHLk7P1BCkwbj4FqSBteb/UZfK0LzBRxlmC3HSiJYsS78CjUBybkjuOf/IbgHpaQwosAG7EClvDiHnAZwvtLX7ceA/TyoO5hfHvIZPvnVutuBa0teF+a2FRazreF5yztKvVEx2SBc/NMibBbcTECvSV9hbamZi6ATvShHMzSLgoTQC03TE1ZeISL6y6nNd+wD01d7YEWrgy82I85i6VBDYW3Bfcl7NOuDaerVzO35qmbtMw88+uOZ/l7ncI1/h4u5xnQgghbricTrvDtU3Ud9kubrjoLwwhhBBCCCGEEEKsBL10EEIIIYQQQgghxErYnb0ipeNy2roM2le7R1XuuRfDUrrPdj/P7fkMVdxns0W7g2yUclkvv65Lg7tSLu4qt9ftBFzJC334KZCFN8vyVbPCTmGFxJzyXKMEmwkBrFoeHM8du6uus2SvYHoF2uG+KAGGlH4CuffRjSM4HmT8xraXSk2g3eexKaniMWi1YECGtxCgrzhvVpsv7RUufYEJD0zUiEIVeA4c5zFoU+pfSLY77Jij3znpeId23ZJCqRqtLhw0lyxhfp66OcwucpyxLycvx+ouQcIlLMSpFC5FxtkrLGjX0yucpcJZqur7HItq/rQpNLC0MCWEl8+lo2Db2Ui7Du9nHIupFP5SOEtFQ0sFlyNpxjCXU8qpGClIVhmb+jOjdJsl3DPOXZHqz004AqzFs5WnN6ElhWEQQcKFmVmP6zfFb49PvMB44jnWtXw2oM30oKbenhUP0Rn7SBsLn9+BZYS2ixRYMHpnr8jn5kfDbMbfVVx7zolJi2fo8RuuLy+wEEKIGxQ3ZEvFTpDt4oaLlA5CCCGEEEIIIYRYCXrpIIQQQgghhBBCiJWwK3tFSqOlNLoK99ToTyZZrjlFokDX+sPMaJ3Yyu2jR47mdbaypcKchBeyXSppOkryI8tHST0+ILJqOFgJHbJdyqw7Vx2ciRp+PCilpZq7T7QHoO1k4fWq+ZR+M9mAEnZKfo9tFKSBMJUB7ShswKUhBMPfhLacOH2E4zlx6RV53nHMGowl7SKsSu+qvBdJBfOxbm8ZIGbmEKYuGJB6eIKXiLl2MWi0lWAM1nCPrUFW33FsOCdcugbGP9VtOeV1cYo294H2BVbKx7VoSgH4ifUDijHwlqn6pBqDZIpxBwkLUT+a5PvN1I6Oz5yOUvd6Ao27GdyNFHTE3Y7FtcD7YtozmBTE5B0GCNGSNbppyucel2M/hd1koG2DdhpaWnhfYbmzV+AcGGbDBI9sCjGbFxdsir6v03bQ5fuC49lNMZ+Y6IPz5pUf0O855tMW/VxmtomUCq7HlCFuMWIMXEKG60fdauH75weEiRX87aAVbZLy2Jx4ns4TfneFEELcILixWyp2Asfgq8lqcUM6bykdhBBCCCGEEEIIsRL00kEIIYQQQgghhBArQS8dhBBCCCGEEEIIsRJ2WdMhHfOOMNYRbXrJm8gca2ZHD28s2psbW4v2xuFc02GcZffqGmP8At8SYwnpu2Z7KSOS8XlYSkuML+kAzzN23E1zXQVXy6Kre+VT4blhUuPofNj1Og4u3JN9ZS0L5+XnSWzj+9pBHQe3qyBOlLGJHCdXO4AxkEXdDHctWd8B9StcjKfrOHzRLnovj9oM/aMne1b4sxlx6Mc/iCxN9TanHetosM6Et/XH16hD/RDWceC8Y10R1hpwdRFYY4TXAv0unWEuYtYVs6j79FtcyJbPjIY1IFATALtvl+p8MBIU0YduqOqRr43PckTz5B7IprgWrOPg7m+Mp5srPJ6bpvX4UVdWxe3H92twUZeooRLNu8TnShCTGRS54H76vrhHesRvBjVKXK0H1hfAPOhw4hMeD/th3YdpMSDrfH/OmiaYR1NXdwM/fU3w/MBi1nSZ4bmw2bPShNnGPH/eQt9Z34GRmzwG6zX4KM283NfXqNdtOPa5PgfbAb9PY25Pj9e+UE0HIYS4YaA6DjE3pDoH27Hba3xN58Sqx0pKByGEEEIIIYQQQqwEvXQQQgghhBBCCCHEStiVvaJpG2vaximrnQ4fqo6NI9lCsf/qA24/V1x+Vf5uX/5umGeBaYd9TXCMCeTNUxzQycWbukS8abzspHH2inpW3ejkypRsQ2aNY4/Ie2sTpbqMk/PyFUr03VeRT4TSWe6IEnacQxeoZUoVjbNU0OIQ2Cso9eXYpMC2wvjGSRADaeZtFM3I8Ue8JcegyZLmDUTFbcKiw+Ubc9gr5llOPCuk0v1AG1F9zN2t4KZQfd74C1yXQZXyqGg8OxfDSrl+c9I275EJ3j3yvis1/WNi9GH9fmkiewVGqoyhzMvdJ/edi1ek3Nx1o26j8JaK+hHc0Tg2xc0zwObB+2fE+dGCsAW5fR/YJQZGWDpLBNvFXOGXXG+S+zFl1HAky3fPDDebq82+tCDRSmX1ucYOun7g2D3mFq1QLkrT2QlcN6xDNGZq8LPG52Zgo3CRxdgnbSiMz3WWrNGPx4zRmlg+p42C42k8BsYDx0u0saHNuNKh6McY3EstAju7Po/TiWfrPPlnoBBCiOsPslTsnhua1eK6vMa7PfZux1NKByGEEEIIIYQQQqwEvXQQQgghhBBCCCHEStilvaI9loQACWmD9xb9LEszD1y1f9G+4itXuP1ccVn+fGT/wfwFtK2ds1TkdhbRmk0hNZ06ewWl5nVp+rHvKAfOyxMrw29TIXxxPEjbu4H+A9oxsLxIaxijVAykEAxOjl23NVCu3AayaS8j9zKatt51Q2CCW8dJvkcew5Xgz/uhvaLtqsvNvPSfHRkGysKzTHiO5UfR3kB7E/aKLbYhfy/TK4bEZIV6Ugfhcm+1qK/v5Oi0KBTypmg9jhNdAKGiPEqsoO1i4Hmynr75RIPgeuNWdQ8XV7E/SF7w6RV+TnS0gGA++8r+J7dXOJq6JcWcPav0IFl1PaaSzGmvmGX7jku1wLHHU7BXjC7ypp4OkVzKEJ8TsDK4hBakYASTiJL+sh/uOYj7OwWWLNfvVO8HbTkukaSYHyPnNtNbYHdrODkDSaBzFznrQ91esTX68ZgzMYfjhnYf2CtmLiEjHyPBmscHCy0VfB6W8F6iXXB0fT92jF72CiGEuF4hS4W4sSClgxBCCCGEEEIIIVaCXjoIIYQQQgghhBBiJezKXnECpjJQ1bl1eGvRvvzSKxftL//nl932h5BmMWxm+fEZbe7OhOkQ3BgS1G6av9mzvoZ1skSU0tKm0Ln7Ny71CuHOIuHWxhhA/Tr02E8LCTrVykVF/IayZEiA15DiYA1k6LAEzPt6BfMBGl6fPkFJuYW4pAOMlLc+YAyC/SSLpOqU2HvbCpXj3IbBD2NgK6EkOm4H1fvLfgSJFSmQ2JMxSPNwSQ+0SmCNaedn5hqu2Rq8Lkz9cBakYB1nwYjsBy4BorATufQK3Ifo7pSy+mhXaLdMJ3HyeT+uTOqgHcTZWLicSvooFaMJ5PZYnsKZXaYeQEqP8WQCikuvYKd4vCBJoewHrSTJzWdaJ/L6Ll0jsFQ4O1HdSeYsDuWxOz6pg8QQc/3GfHLpPvXUjW6b+67FnG+n+XeknWRDXtMx1YL2llrvfKoI03K2YDGZFZY7mhOcjQLzl5aKnokVvHYYG/985HMvP+/HwgrlnrSBxZDPuxOWLtkrhBBC3FgprSrXlzSLG6qF5kS/Dx48aOeee+5J15fSQQghhBBCCCGEECtBLx2EEEIIIYQQQgixEnZnr0iNWWqsoSR6liWah/cdWbQv/2K2V1xxqU+v2Dq8sWhPUS1/up6lsFNKsF0fIOdtcvfXp9leMd9CFXZXib6U1VgVJz2nDNrFO9RtBmNdge5SNNptbB6u8vo0kF3TWuDSNSC3dZYPHKBjaoQzrhRvoIKkA1eKnvYDpodgFbdPrO/U/V6izOvSeW8ItqHtgm1eO0rm2QtaH+oV/s1ie4aT5eNknYnCeQvqNgpC6TjtEWZmeyZ5njt7Bbdpaa9gu6mu7+b4GEi5l+wVmEiQgtP6sEYJ+4izRXxF09DygfFvmeDhD93BauQsU0FSBw/trS71ecNrOnI+FXOCn5kEQHG7S5px92pgr8D4NcF8Gss7KZhItHT5bALYKKwu4+dyl+DjrFD141Z6EizmfRSkjdTDa9z937T+p6ud1C0VHZfzecfrzVuBSSJowzVnM1hmZsWAMLGCj90hsHTRUjEGlhku5z3JZ92SNNNNbVpXsCtbnitDmVgjhBDiWueGKrcXYjukdBBCCCGEEEIIIcRK0EsHIYQQQgghhBBCrITd2SvGdFyLijSDPrf3X34A7YOL9uxITqg4dlBU43eV77MmdAqZqkuyQGVyV+UfUqQJJN60VKQxliu5CqZNXQ7fRlpukqpN94EJF2ZmI+TtLVXGtFpg/ckE44dL2ASvkJrGC7Vzn4pzwPgwocT1ydkuKJM/eWpElFhRSsedBhhzwtk2KKV3tg22aT2BjBx+kyGo5H+sG3XJt+sTJN+UTfu5hnFlKgjnOPa/NvH2Cn7mlG9YfZ6pLqz4z26j3TCpALaJxsnqvb0iYZvGpVfkOUg5O/fl0lBSXofV+8cxGj8/7xongcc6tClwCmEdn0JSty+5e3vwYzC6tBOsFrRHZ+dgQk49aSOyAZXJKu7aNNG9FKRa4HpT3k9pvT/POFWk5ZgzJQF2mCawq4VhFKl+rZny0xYWpAafm/bkbR49BTYxzo/IXjEv7hGXUkGLG9cJ7RVM8wiem0Hbyp8j97zCnoLUmhPHLlMwhBBCXDvIUvHVw/XlWpfpHbvt127TP6R0EEIIIYQQQgghxErQSwchhBBCCCGEEEKshFOzV0A2Om7lD1dftn/RPnx1TrKwud/NnklOmliDpJoS847Sc2hF1yDlnkBuS4vCFPYKLi/l4oQV9UOpb1DtnjLVUJpC+0EhHXeycix3b4Qg2504jT2rs0N27qTq9S6VpeibsF2XrYfruOWUA+PQzvZSSsdp86hLu7mFr9Jf34+3WjDloy4pX9oXrjfH2Xk+mFyCrV36B9an2Jt2orVCOu4SK3BOtFS0icdD9ziW3CnnrBsDJkiUthc/unm/tJswIYDbY35g/rv0Ct4jRXyFS1NwcyKwLAQJKpxDQ1OX0rt5WvSDt26PNSm/d8ksYVpG0GZ6hXsuFPcIkx9oowjOu3eJFbAXWd1eRKsQnyW0Ex3rer7eLkXGrcSO04KBeyq4YA1tXrRsNGU/+PyuP7Ob4BicT4ERrbBHoF1Yw3rsaz6m6nImWYy0OXFfPL3Ap+dljV7i6L5yvjTn/Vk6dirORwghxOq4vsjshbg2kNJBCCGEEEIIIYQQK0EvHYQQQgghhBBCCLESdmWvSGOyNCZrYJc4cNWhRfuqy69etI8c3szbFQWxOwjLmYxACTXVs11Xl6dTkltWVa8tb8tVnMTZsB7321bXL8SsaLfVxc4aUMipKGl1Ffid5Lu+fRNUek+BfLjFPrvWy/g7SKc7nAfl8D3THlwbWl1crwZWECYepFQfp+PfLlqsJu8SQMZ6O7lB5/n1XAn9qB72+PZ1CXx59XKrLpNzVf0jWwLWb4v5QYuEsx05eTragTw9nh91+XV5NrQgNMG5JkQ3ONmgi3SgP6ueZjAp3ofSbuGST3hdApmiS7gIUjB8ggG77ffJz2xTPu/ub5c20tSXt/XnkLNkbZMkkoLUlJHOnxTZKAJLBY7npkdxrzr7iEX3SMar/uu2HJc8wkQd7qe81ngWGdJpGljtIquRS/zgWAbWMI5rGYg0GOfHWF9u0TEiax66l4J7uPzvB+FvI9t8HjdL+xdCCCHE6eGGbKXZbUpFhJQOQgghhBBCCCGEWAl66SCEEEIIIYQQQoiVsCt7xTge+18Le8W+Kw4s2ldfldtbm7NFuynebVBW7iT3kGDTXtHWHQ6haDSSbC/7K1gJnMerWzJclfP/v713W3IcR7c0cSAluXt4HPK4q7p72qznbp5lnmpeal5m5naPWc+uyqo4eLhLJNAXHiF8Pxy/hxQR2plVtT6ztIIoEgRBgIqir4VVx/tHc2webu8lNtZuMb5ASrkZImDk1Gb72JZg7BWTvf1M/aCMfzm0G74srXyAjJnXQKk0LQDmplKX3Ml27GL37ZgFCurFKPTH9pZkxhAk185K90/aYdINxtJxm7xBG8uoRbYMF0qYUH/u/CYJ1zehb+c87mcj4+eY4GZud9IWnvhevHQIjLX1wCQMnmPsjakmGQG2q9zdC7Po/tjmZAwIXqKMk+5Aif2CNi1dsgrtCItjr6B83j6wPPn8eC4Y6X3122HSR+L4mUa7hC3XYdmmT/Aa7LPcHy9M9ynDfTgPOa4ZzGHmEe5FXG1/xLX1QSwst3nP5I1kPCOo1+lX81uBevr7Uoy9Ymy1KNEe0yr2Poz/NmDHhx1bsRozCkq07I3a8Y8r/xRCiH8E/pFl9v/ofC+bwD8Lz43FS/SVlA5CCCGEEEIIIYS4CHrpIIQQQgghhBBCiItwlr0ixhxizAGK1fDx/UMr37XyCm31PG1MPQly/UgLgllFHEVKWI1cf9xOykWMbaKXihhZM4+hlBYSVkcWXnENmfYFhCdQ2tuveB5RV6FdAnaTYvbBquhsB66byRRphgwfloqcbXpFwmcjzT60eg+4+Wsdy8h5eZQYm1XUoy8fNrYGJ7GCVgujtKblAMkZM+TX89SuYcL2qbsxxkaBNpU6tlQEZ9wxFWTGfdmivIO1ZZtsf5j9phll3EvsX+A92S9jO0Bdx+kHJh0m+/eFc3VdaUdYeUCrC9c0Yb4YyTvuXT82l9DqPXCOOOMr1vGzxD4zcA20VKzss97WMLZe2PQKphOU4XbK8JnQEJ2Ujj7ZwCR4MB6ojp9pPJ4WDNMmx+YVPYtU97ma/TyrBRMk8KyMGMu0RaHdE5qXO0kgP0/hy+VkrAnov7KOtxuHCfqm2N8Um4IytleYtCInZcJz5USnL5+6Imi3oh2E84WpOE41QgghRPDl9rKL/GPxe1pMpHQQQgghhBBCCCHERdBLByGEEEIIIYQQQlyEs+wVKcWQUgwHKI4f7ptU/cAvsDp4b68IsclWKatNxuLgSNjNittj2WgNYxn4EzuGjbzA9vHxlNuuXCHdvLqhRSEP90ndouFGmER5LmTelLCvZSyP5nXnCSkH6H9K5mOX5kELweqs5n/gav5O4oe5c1xxHp2QnPKnHdvxXAHeJHWwDBn/RHtLG1vsg3lu/bpB+/b9femk9e2E4/5nb060VMAqsEWbrmCVYHnb9ccGn3cz7BW0IMBGURamjbT5SUuFTdrAeJ85H63FwcaKtCLl/SvakY1dAveFCnFUyf1jZ69gSgvcXTY7wQuHcGxbdNMUzOfFWIhspV4iAZ8HxZXYYx/apTgXYKVhv5bAe9fVdYK9wqjyw3iuFiP753NinITQ12UZDxZjumBiC5Ms+ByLdVieurk5o67JpMK0csbV0n4TTf+Ny7S9ZCSrpO76zc+WiW/5cmKFsVe4ZVpEcKq+WlgqYnAsjINngP4KIYQQ4jP/DGkP/wzX8K38UfpA/8YQQgghhBBCCCHERdBLByGEEEIIIYQQQlwEvXQQQgghhBBCCCHERThrTYdS2n/HbbDALou37oD1kswJXnv4Yzd4BTLF5qtOWAOC1tgVPmfajmPqvOifj+2cr5XxaPT7cq0CU9U4bsyLr1sLvdOM27TtWhGNadducCLzGEVI7zXL9G179XT3pcL/z/Uk9gdsN81gp7eOMrFxuJ4J99rEznVWIxO/U7/si+bx1bkvbOqMCNFNwRoJXb0J/cZIvnXlWiLs88aE67PrOKCM9Rmuuf5E1yEzy8ajPvZnm7jTMC6bNTUy1xTw/fu8F9WJ64tOTGZOYz8Zq+G6IP0CDSZC0PjgcTzKfSxtO984InJlTCbXUukjM3kOrstQx2U+V1he8UyLXBtibXNtMetMtHU6HtvB9R68CNdWjJjr1cTbjsvExG0+WT0A+9XxeOS8NfGPXO8F+5goTYyJGXOtnyNmzRB3TR5nTQc++wPbgTUk0HDOwakbp/w8YYGHYvqQi4ngYD4Tw/hZzrJZi6XrDxNF26+Zc2S0poN/f4UQQnwd/6ixkiZm+Q+yLgCJzhpw/0ht/VdCSgchhBBCCCGEEEJcBL10EEIIIYQQQgghxEU4y15RS/vvuM0ROFeUSydRTpCYT5B+zpTIUrZe9mgDo+IgXTY2A9Nqtx2MyaP0lrLVmsbvZTzFqqnTfIE6OwUr4x+tLaXtY2JAaUOh/wNtZbTfgghF60Ox8iMjMWcEIywStBPkQO8J7zftMJA3p9HewWrs+89GLkU5MLYn9hljCcfxgYwT3cC8UDoLQMJ1Mza0sM+9yEz07Rbxj1c49w6WCsZf9pNywkiaIX/OPHcc9/OaKZ8fRzOyP6Iz3kOwVgPOQw6jCXOYdhrK5KtRlztxuLWXxnF+jtuHKWbdGXX8DKB9aWVMZvHtFTYRkfOCkb2evWIdlm2/MqoWVotqIzOrsXCQOCxHx59i7RV8NrSiidIM/VylhWD8/E/GyoBTMJKy0CrH+EvYKzBH5q4dfA4m47fCfTHP+7H9g/YCRmwaewWtWt2za6FlhzGlvK+OxScWxpS2Y5N5WtL7x960lkLO4+j4kUyi6qftqervEEIIIZ7yR7EEeNaJP6Kl4o/CH8V6on9hCCGEEEIIIYQQ4iLopYMQQgghhBBCCCEuwln2ivjpP6ZXMLVgs23r/+92rZwO+0CSWVp+bDsoKzXYkL8aGXNA2VthHVLWYiXKlFFXJhVAtlrqWHIczcrhYUg10mwmWVjM2vNUBruq2nHyAFM7qmuVGKdgPH6ENB4WmEOA9PyAPizjCy/2xowuoVvFtU8VWfCdUxetFubcY/l8oH0B6RXbNO6/EEKYFiYaQM6ex/YPuDwCnAVmpf0NyhPlTrhfuevWPFhl/un5MB5xrSvHHVfWx/3NsHlQSs8UhxA6W1WhVKttN0kYeSzrDsaWQGuBs/+nlh2/MtLzsZTe2Cto8aG03bFUcHvp5IRmrjqD07NU8LlUOKdM4g3GXGX7uqeGaVYalqNjtbAPMuMBGFfvWWBC1wemLlh2OH7xgDP2CpRNegV+X2YmWfQJL7AIJcciVJ1xZxMr8DtgEitaeYPyoRunK20p3rOcaUVOIg/7ib9t1iXjP7tCdOwVJgmD52tnE0II8X1RcoHPv6Itor/m/8wx8XtaLaR0EEIIIYQQQgghxEXQSwchhBBCCCGEEEJchPPsFfXxP0qrKfXfbLbH8hZWi/WDlWnHDJnrwpW8sR3S7ghpO2X10ejQx/YKL+0ihBBK4X6UXVOOjXabixivBm/U3mFcTy+iMZaKMK6XkmFTzuPkgbVb7b61g/3dS5QhuZ9aqsPK5I1A+wLrHUvNo3dtTptC6O6L2XF4uFXum9XqsQ/tFShTh9/LjDi2p0hbyfjcxu6A7UyZ4Ir4meOaF9e9CjSWCloZ8IEy9Izry7iGggby/nIM0YoTgp+YwHuUJrQje7Lu8Tw3aS9mrFiMIctRgxnLk3E2OfaKk8qd0cCRwJm5biwVTjlyHiGlwiRW0ObRy/DHNgrbN56lYpwkEpz7xWvro3e8ZxzvBYeznRe0XZThPhMtFXi+bbr4IKZX0B4XTVTK2HZnE2gwR1DGEDfpFdvuGVqMLQI7IpmCg9M+u7CLd1+c34QnqTPRsdmY+8J++tyI8e+GEEII8b34V7RUPMel7Td/lP6W0kEIIYQQQgghhBAXQS8dhBBCCCGEEEIIcRHOsleE5fE/Spcp/Y5G2m7WeTfVJKPy4IriWMEfr0Om1JrJ1cUTZdM4h1npvY7lv4+fnXgICnyxj7FIWC0x2uFZLcJ4nyffjeFK5UY+n8bSaq6inp1+6tU2Bf1flnF/Gjk1LSlcoR7dmvGBqRaHtZeLNyotAehnBm+wTdxOtTPtEWZlfUrTmWiy2DZFWA1MGfeP9gUmSFAiThkzLRjjFtmElhA6SxHvH+XRTHSge8RGe+CLVqQdwCYpHEw7+F2N4/HPMsdExU0q3r0z48liesSEL8BqxP431pPx9mSeY8NLsPL8Jy1x0ivYh7RIRPbfeO6YPjO190kzp0jlUBcf2s5zzNqcTnkqdbYXk4xAa1Irz3iwz3iumzITK+K4nqm7ftofsplj47lDq4VJsqDlA2Oc85kpGkuwqRE2LQb3G/tVJHskPnNRj7m/iRY6HOvYrj6fcVSOTA+JT+dIfuZeCyGE+Hb+iEkWfxT5vbjM+Pg9EyuIlA5CCCGEEEIIIYS4CHrpIIQQQgghhBBCiItwlr2iLmuohzUESO/jSkl+kxKvB0izS78iNqUdy7DMgAGuWh4dObCRYxejH8axvUR5/M4lRnYLyzz3WF4+ztOweCkOzx1j7BVpXOa1epJys5h7d47Duj+WF2OvwDE83pVjjyXACw6mlSM90QZn7MdV/lt5gS5/pUafCRyZ9w51Io2D+v566NIaMJ7ZCUbWjHFqrRZtu5Fss4xzeUafp5/HVpng2HrMMviU9FP2j/44YK4u3bw1d5v2DErB2LW8R7CneEkudEU9SV9xLBXGxmLk5lzln5YKzgvUg4ZntqO7GcbewmeLSeegRQX9DD+HeRSZ8tjmFZ88GcYpIV611bF/WHuFI8nnaTtZnk3OYGoK0mJgndjgmrbcnmaUHasFztX/cHmWimzukXOtT5JBPteD+iPPjRSNZO0VZl6sYwtHwNzLxto0fp5G53nP7b1a0o5Tbm/Q9vX5eWWvRgghxCX5Pa0WslT88fmW8fFHvL9SOgghhBBCCCGEEOIi6KWDEEIIIYQQQgghLsJZ9opyfwhlPoSH902G//HDh2N53bftdYW9YqWFwkr0U+V37ZiCnVauuO1IgGlZ4B7JFRxbaXDiKviQ+ua8Ge5fzUWMT0FJc/FWUQ9dqkYtw+20V0xczdxUVLBP+2YzY+X0pfXx4WDvCz/vD5RgU9LLldS/vJ2XduCK8SinTqKcscI9Ff6U6y+HcbrGnCmfb/eR46PQjoHEitglalCyPRm7yjixInmSZiOtdiTUjjvicTeObSartGIpbfsCT4BJ/yheGbYVx1bz2AymZXB1/TTcZ8W5DybNhpL8NNyf7esOsfJ5JnU4q/m7qRbo18Qy7Ri2FSbxojhWCw7aAnuFdXeNZW9MfXhud+vIoL2CfTu2mVlrgVM+oX0h2OdmDrRXMJmizcNtpKWila3VYmyvmNDfT+wVLDsWJl6RSQlxUl3MuMF22itKb6+otIbh+Y/YmghLRY6wJOK3sDr2s5x5r/3fFGs1Gt+XPLDD7KsMFkII8Xtwihz+n0FiL76Of4Z7KaWDEEIIIYQQQgghLoJeOgghhBBCCCGEEOIinGWvWO4fwjI/hA9v3x+3fXj79lg+PNwfy2WhvQLlYFMMUoUlA+kVlHwHo351JN5eFoCRyPYaZa7+zRXCW7dME+0VPMdYkmssFUZWP14x/vHwsVyqGvn9WJIejZQb/Yq+maBBf1iYUNHKIYTwAHsM7RWBktypSaXZN7REBCOxb5sX6PVXpJ7kTtGbaWPBMQXSf9Zl5NGUyaOf2Df0DdBSkTpJP9vB6zPye+tvaUVaSZjEgPtlhmP0hOAheCLxSpl3YZ+jjHYcTOIH+tWkirQz1e6dZDSRHEysgJ0DfUC7xEI7TRynm/B8tctWOUVU5u3D+ZUiy20fXhotGKlLvDG2DZ7DSUmIXnqOseXgPrJ9jmXs8RzDqtztftLMOKXCSvcx9rtnaA60GsEKwdQJJ6XCWCqwPxMhNrSVYUzkrkeMpYLOtzh+tvJwT65q7Dom6QHP1tinV7RrWjFYrL2CNri2/1JgL6x8PsJWwoGKffrfkMncCzyzY3tmT7gX6dM+D+UhCCGE+GPyzyCxF/+6SOkghBBCCCGEEEKIi6CXDkIIIYQQQgghhLgIZ9kr7t5/DFOYwsf3zV5xj/QKY69Ym1R/PXw09Wyw4vlaIPcPlKHjAKZX8D0J9qE6HdUb0enTBfHHUutKpwDkrFZaPJY4FSOxp4aX7bbSccp7q6OcKpTJG+sK7BUoUy++ot17pDU8LNi/+7xndAFWWKfMeDLlRorjd1lmNXfu/6Qvx7Jy127CQAdIjpd9G4/V+CAoYafU2bbCyO8dS4WplddHObuxWozTScxwT73fBPJ2eFEy9qsm+aHVtuc9XZbh/mOxvU38CKFP5GhlWjWCk4pBCwYnKK/HyNlLNyYcW4RJJKDVCIk5K+bLWmhzgjwd98WOiS5pxnzgOMVcwH2ZK+T2tJLweWDuBaw/OFX/7ApOH5xitUjuHef+Y5tSbyeYca1bU27HbFHXDpe9RZs2XtlJrMi9Rc1cEq1o4w454VHu1m+SIbp2ZGMBgSWD/Wx+wmhPYdoOLG1fkV4xOekhTK/gPp8thU/mnRBCCCHEd0BKByGEEEIIIYQQQlwEvXQQQgghhBBCCCHERTjLXvHw8WO4T1P4CEsF7RX7+7tjuSAZ4dDZK8rc5J4rVuxOgXJnyK7RzOrYLihljZD2lme0s8ZSYfSz43JMYykybQOVknJIXo30vrNXGBk6v2AKBCXzB6Ntb/WiXAssLLhOWigeFtuOAz4ulP6bRlE+3CTsM/ah7J9lkzVCiX3qUhJ4Y0yICST2UHlz9X9K5g9I4KBNhuPDjLlo+yOb5JOxtYN3jCkQ0SSX1NHuVuRexvLr/nzGjoByMWkUrczUiIX2G9pTaNNgm2wz7HccH6ufzDI6R5og686tbLu1q8f05zgZpJQ2HhdYKpZDW5F/5bHG1MJ0DY7xDoxNK+nHMycZs1FrB65vteYJ1IOEl0qrhR2bFkruaS0wDWdj21Yn3SEZ2X+raO7sFUyX2JkyLBU4xxZ9y/IG+28Cz4d2MKGiG50xjOek6RvH4hNhKTBTlR84X0xSSZeywjbiq2ysYfTvMbFlGu7D56N5nsbxfQ/BJoYwISOzXtpmPp2jPvHxCCGEEEJ8O1I6CCGEEEIIIYQQ4iLopYMQQgghhBBCCCEuwln2ihxiyCGG5b5ZJ4zV4q6lWixLkzTXirSFEMKyMA0BFgRKl03aAOTfkMIWrrTNdAEcaeTGyUpQE1IB8kT5N6MwIHcOlHJjO6TtCywLaxlL75dOwsqkAyNDz5TejuuiRDaj3QfU+YD+3q9oR/fOycjCaT2h5JhSf8ry01j2z32SIwfu7RWEaQ8FUnojFjeqafZf+2JBasFh38ZmMJJti1nIfR2311yTsVdQTt2qobXDJgSM7R8h2GQEjp0SaKlAaoS5Xzgf7i/vEcccx2JvAyqQyVN67rVpwjk2U7NUTbBXzShPxvZioW2JaRQHWCeWw3gfY7WgHcMkGGAcwEY19c+MQFm+45XhPrgQ2pfME5EpGoHPxobNmencJ67Mfmy1oKXCGhHGdjVaHOYumWZLSwX6ypbPs1fwHJOT0PAk78Y+pFrRe/7DflbNNRk/1xAzn2tne8E4ZZtoSklIjUjmucntbV5EpFokY2/hs9Xelwl1zXwem3agpk9jgukuQgghhBDfCykdhBBCCCGEEEIIcRH00kEIIYQQQgghhBAXQS8dhBBCCCGEEEIIcRHOWtNhijlMMYfDvq3pcG/WdGBkZnMt977XlTGPxr8Oz7nxd2PtgNi7eY9f4ANNt05kWgghc02HPPZhr14k3wG+csRQck2HYqLYEH/Z9QftyLnZd82aBPQm02qczPoJreF7tg8m/5WW5S7+LnJdC5yP6wUYDzg9yIhBZJuys6aDXXfDYmJHvXhEc/w4UnLCGhcLxiwjFOkTD7FfS6FB+z7HisnudPImzXVzTQe22y78YNrhRbLSM24iLHFsyuPoPC96045luxYLIyZXrieBtnN+xhm+8t32WN5sNsfyFms6bBil2c3VleuS3OO+rq2NnFeMB13WZbgPbP1MvAypOusAhC6a12TAjqMMEyo2vn6zLksc7rOw3CeImmaZhoy3c40W51COx8lEWLY6N9194ZoOVyYms+2zYRlnnzGGZpxjxj6TE+nZR6pG5/kYvHmVxmU8YkIJ43UizDpB3bOcnxPjhc2aLW3/jGd8NlGybb4ErOkQzU82n632WT6hrg2f6+Z+41kSHy+cv81CCCGEEN8LKR2EEEIIIYQQQghxEfTSQQghhBBCCCGEEBfhLHvF4WEJh2kJd++ajeLjx4/HMqWZXnRbCKHLqgPQfhrVahxLkbm/ka9S2u63wki2K6wT69qk+AnnWCnT5rGOvJmWikJ7RSdhXRlNiOtg5CDjM832yPOFYZly78wotWQluRXeCyPjZ7QarsPIwinHZlQizsdGlXUcP9p/x3bwWhmJuNluh/sf9iv259hsJBMpaUeIVXbbcMHR1mpsEGPbRkxjqwqPTb2UnlGyJooTFqQ0Hh/RxL/S5tHuS6XUPNKKYNtR6hJGTBuMxxlye1gqWN5uxpaK3abtMyf7aNo/IIIXPfdxD6uFsX+g3egnEzNq5PNjr0Ws9r0s+5yzp5o4R84RWrg4X2AzQDMYq0lLxWL9FF3cp2ngsHyKpYKHTo6lYtO9p6a9YouveMxkIm3ZB7Bhod7MfczF0ePk/IYEa19gt/F+F85hMz4YTcxjUba+GnNuPhO3yVx4a1+kjRCWJ85n1GktYBhneFCk7qEBZ1PYzJz3bCvb9Kkse4UQQgghLoCUDkIIIYQQQgghhLgIeukghBBCCCGEEEKIi3CeveL+EA5pH+4+NHvFPWXPxtYAiXfoUhJYppS58BhKU1vFVNVOmdLgcTJCohy9l4vDLnGAvcLKWbE/9qElIE9NLj5NbWV+k5yB89YnaR6tXsrkS4EM16xsjn5iakEZS2O5svkEm0Z5Rj7PxIpe7n+sC9snXOEM2e7M88GSYuwVi5Xt035ibAfozwPbB5sI0wwWng99HgNlxWMp/Kezj8txrG1nW0sdHxuLU6eZPLbDzVxyFuOnJJ0r12euXM9xA1m3tR+08qFLCLDJJ5DcX7fxv71pFonr61beMb1ibu2Y0b7r7XXbhzEuIYS79+2Zs4f9KdzBXoG+WSltp8WHSSxMRuFTo6KfSmevYKoO+mfCveQRxoKBNm0S+5njuu2/wEOxdDYP2gCs82L87OI+8YnR7FO7jSWCaRJ+esUmjr+bHOsbz0GrCudk8vbniZ9L8zCThHOybaY9peD5yCezSS5i/3spGKGz0NDyRCsbkyaM9YpJRNyFlgrYzUzyhW3HZgP7Dvw72ViscO5Pz4a0jC1UQgghhBDfgpQOQgghhBBCCCGEuAh66SCEEEIIIYQQQoiLcH56RV7C/qFJ2A8HyNkPTY6aofSfqrVXZK7obsqQhUOCOkMGylXOjUzekQwbq0Rna4iO9YJSfEqw10LZP+phIoFjB2AyQs6dVJoWE8qByzos87Z5CQZczTxF6uJRTe1l/JADo/9rHdtKaGuYeK1sNxIkFoyVw76V1y69opqEhrEcmxaT1SRh8H6Hcdmcq5VL1x9MwmCbzIr6HNppPB6NjJnS7DK+L7GzeTCNIqPP2QcTbArWUsExwXazrbTfwDIwd/3BhAFM8Bn2ipc/vTqWr66a1ejFLawTnr1id9O2R2uvCJCP3z20xJz6FuMA11doqeBYhnq8ljYGK59RlYkrnb2CAQrGUuGkkhg7zDjNgDaKA3Y5YBw8tVfQGsJzcy55liBuZlIEn8VftlqEEMKM/WirmkyyzXhe2OSMcSKEl67Rvy7nd+z/1ZQryhgrtBeZtAvWOZ4vobdk8bcKc3WOtDaNPVL8XeC85XzOiKWYNkgi2tj5stm27zYbWvNwatgz0qd6twfZK4QQQgjx/ZHSQQghhBBCCCGEEBdBLx2EEEIIIYQQQghxEc6yV6wPS1jyEtYFcvYVklWUI5eS76TBRiJOOWp0tqex1JeyXWpqSxnbHUon46dal3LiCk20kevzcF5eqcP9jcWB15Ot3YQaf0qDTcKDY7VIZjVyR4pM2AddfAVVvzPayHQJWg4oL5+NvQKWFBx7OLTUAdoreudDdOwIXLGfloMF49FGo4zfqVVaVcy4sfsVxDWUOr75JhGCVhfKmLndjDMmEHCs2JvHa50pr2ZKBa01jHixUTFoE7ZzbKLOuRtEkeN2177b3jYbxetffziWN1dt/9evb9v2zZfTK3K1cvFDbPfs73fvW9N/a+cuqKuuvLFmsrYi7TTGAgb7UmfbssEinpVh7OsxyQGQz6+oFA61cED/04LxeGbaK1g2OTlhjJf6M06coFVi6vrD2ii4n2MpMmXMBSd9wthTTCpF94AzSR18HrftKypbAy0VtFqgnmQ+4NS0Svj2CtrP+NtmLE+YqxMsRBPm+bxtc2FzhfJug+1ITQohbHc8ntYrNJUOpPmx3g2CYYQQQgghvhdSOgghhBBCCCGEEOIi6KWDEEIIIYQQQgghLsJZ9oplLWFZSqhUs1N2ijItCrWT5Fa75D+28/i2B20RNbNeyNONpB+JCZD3L6tdmduuCg7JrCM/tqkCY9vGAguBDQuApLaTPRfKsanSxtrrFW1fF7QjcX12yLEP44SRgvuSevsBZc3YPM9jyTCl8Sapw1wObTK0BrR9VpPMYdMsKCOP0D4bW0NnRzhuN5aPtr243pM+rWH8wQaA0LLg+W9gjTGS/uKUO1sDrAXsQ6ZA2IQMtpUWDvSfGftNsr3m1o6p2MfDmto4evHTi2P59Z9eH8s//9efWjumVtfrN23/eYPEA0jNNxMk4ovtgxexnWPz9l07/u/NtjG/+9AOr3et/EArD+czxxD6Eo/Ffmil6FgWcO+tUwDyeSbYYOrBxWOfGbRXRP8ZyuuwJ8f+dWz54FaT5oPt2ZStB2nCWKWrhwkvtGFxHz4zrKVo/NtRzbO46w8nvcLY1XB9Cy6D5RLGY8LaQjjvurnqJMRUkxSBZyiSJWipYPrE1YttK9/ujuXtzRblLr0CNowZdeUJfTijTZ/SL/J9q1MIIYQQ4nshpYMQQgghhBBCCCEugl46CCGEEEIIIYQQ4iKcZa8oaw1lrcZGkWOTcabIVd8bVtobQuHK4cZegVXEIeNdIT1PhRJbtg2WCtgJ9rA79PaKOaDteVyvsSAkyP7L2F5hrAGwANTMxATbH7E6km1jMYFNZMG549iSskc6xHJA+gQkw3Gytz+59gpIxHFNOZu7PLwEk9xg7BUYA4tdMt2mRljrxfF43JaJ4y55bYqDkuXpFTir5TN1gtYfJz3EpFTQRoFje8uNaVdlH7YxC4eEO35NiAPqTOjAtOU8gL2i66iCY17/udko/vS//7lt/9OrY3mp98fy7eurVi+U4MZyQDvBvZ2rL6ZW7/Yvfz+Wd2+bpWL7tlkqHg64Lxz/kTYU2iggQY++vSJ29oIG0kewlceba+U+Tjlj3Dx5UBtrwjhNgXOPCRfmGcU0D1qZWK68Nnv9vL7E/YzVAvvgCpOZXmNrHmd/rXm4/+N3/E1pcPzzd2TBM2aP/jD9N03D7cHYZOwA4TMxcgLBUpGROrG9bnaGzYYpFW2fF29aqsstykyNma97ewW+22F+00ZI29xnm8ddm09CCCGEEN8LKR2EEEIIIYQQQghxEfTSQQghhBBCCCGEEBfhPHtFqaGUGiLeVcyQe9NqQXtE7SS5xUhhYakw8v6x1H+F3J7SdkrVqYrPmRr0fgX4Bu0IKVPOPrYvUDLPZAraDyLOsOxhIfD0/cFK46ONIThCa0dw7BW1LsNyQVJEWax1oZqTjxtFmTEtH5QoZ64Aj3t9OLTz7WGpqJ21gP0faGOpY1k4yymO71107AvOov6PH+P4g02vcBIrvH3Q/7GOx2zvtqmF7wabbJppGbxWI72vcbidrxtrwgknRx4eQnj545tj+Zf//qdj+df/8V+O5flVe6TskSAx37QxQVfPxFX9mdZybztht23Xff3L62P56m2zcGx+a9Lw6a6NzfQRfb5wTHDMwkLEdAIvrSIEd/Bk3Bc6kKbk2A8o++c9xbnNnA/BPIM5x5i8EY11rR2/4hwFfb4GzwJG20VvDeP4Nw+g4WbzHKP1wUmTqMZOR0tJZ6/gbwf6g0+4BXXt4buAEydMGQkSG8w1eJkibuqGHqcQwgaWhbjBOLpq5Re3N8fy7W2zSzBBadq28i2SX17+1NJati9hzXhhUyfmq/Z5wtxJG1otMFY+tbt+aMkwQgghhBDfCykdhBBCCCGEEEIIcRH00kEIIYQQQgghhBAX4Ux7RQmllJAgP54z7BWJ9gquPt/bGhwbBSWyRgJsGnEsrtDnUiHOFIZppv3DXs8eKReHpbWXqvKIleELJMM89wRZ7AxJ7oo6DzhX6mweiRYQR2ZPuwqXZGc/WSCJhky7Qt7fp3kYO0GilDkPy4Vq7JVSbiaatP1pYXnYPxzLeWMlyjMkwEyjiDwhpeew3CSmVJj7iHLhhToy8L4C414Y2ygcR4qxF1WTxnGC5SNYG0stvGdlWI60tzidsFZK91udK/t7ZyXbb/7047H883//9Vj+4b/9fCwfNs02kzGg0tyuG48Mu9o/zpW21vqTd7tj+cWvr4/lm9/aONr9z/fH8uYtxtoHWoqQKAPbUUb/MRnlaVoFfSm0kDGFA3Yr2iswZvm21zhgsJ2zonRDzlgNHKsF0zkK0zUiLRUNJqjYVCH6HXrvT/ts+op2JPYTtjP1h9Y6BBGFdeV8ZsrEM7YX5zfFnANtpXsp4fdivm6JK4lWCVgorjc2NeIaSRHxGikoL9r221fNLvHqVbNLbHbj9IqXP71sx/7Y9s9X+Pne2mcoEzIy7BWZlhE+Zz/Nw+V9m2dCCCGEEN8LKR2EEEIIIYQQQghxEfTSQQghhBBCCCGEEBfhLHtFXdZQlyUkyGVnrCI+wyZQ6RgoVipNybx57YHN9ojxqupGUEprhiOF72XxXOndJB3Q7sAyzk2V8UIZOa0PlPpjVfTaSaWN4H6lFJnSZbb7BJz7Qll96O0V/AA9N1fUD5BmJ7NqPmXrqAYS9lib5D0n3kcrYY+wIMxIsogzrCto+2r02Ez2qMPtTD2J6Nl+RXwjYec4MlL14JSRXILmUVJOuw/HXIzdHXZSXbj6fyqUlLft7I4Dkwqw/WGFzQgy8pc/NRl4CCG8/uXVsfzqlyb53iKxIm2Z4tDsGTU228U0cR/0KyX52crFw9Rk3y9/+eFYfvGXVu/1//f2WH7314+tTVNLuIjGjoSOKkxTQfJFl17BIWLTYrAPLEVMwsiB8wXH4r6khHGN7aWzUXGMmLEZaa/AHMaxKy0O6AL7NBgbPXpjEa1bTLahDYttqrRnRf5I0AqCtvIWwQdRuzSP4tiWzLzihNu1803wwGxvm6Xi6mVLlpiv5mH59traEV7ctM/pFraGlxi/r5FG8bKVt9dtvvAc2xskUVy3OsMG88W6PMIEe8aMcoIVkL/+n29FF8YhhBBCCPFdkNJBCCGEEEIIIYQQF0EvHYQQQgghhBBCCHERzrRXHEJdDsZeMbn2Ckg/VyvKNUkCTFbgiuJmxf9W5grr0UQKtCJXI6/P2Cso6U2UlaKBXPG8wBIAlXw4oFzWcd9kyFop7w8hhBX2kxW69wLbgJH0s6lmNfnW1hkrlk8TpMTYf790thdHLr6W8b2YYBmZMju9yfXZH5mr91MO3EnYaa/Y4MZkXMcBDXxYYJegb4BjiH2OfRLG6dTZKyZojXNy3s9VWi3GHbhCuG4k+Z4sPlmNc4QM3dgreElOdMaCa91jnO1TK3/A/YqpSbm3LyDlDiG8+rlZKq7fQFZ+hXvc1OkhYUyssOJwqNBeUQv3sXrxis+3P7eTvPyPZqN4+VOzf7z7j5Zk8eEvrbzetXtROHFrs1cEpHnELmkmw2pQjNUF6RV1wv6Q2BubAr01GB/pgO2tTbRdfNqCEsaOk9zAgUdLBafLYp6Pjn2j2nlgRqqxMrQ+MHYhziOM82rsFRwTaN+CObX09oowJM94JsJSMSPtYQMrw4vXN8fyLWwQu9s23ncv2hy5fYEBH0K4vW2WjOlV+472CtoltjtGudAugXuB5Iv5ZatzwrFp0z2fOH0yno8TflMyxvmnSZlgtRJCCCGE+F5I6SCEEEIIIYQQQoiLoJcOQgghhBBCCCGEuAh66SCEEEIIIYQQQoiLcNaaDqEsIZTFRP1tpvbeYmI0I19nTNYXXRf4qrkWA/dBmes4FBNbCc8+fMBrn0l53Mm+Y/E82cbaTM8zz4Gdsompw3oQySxe0arsIzNxHUvhuhHjaMGUWE7jfeI4+yzDR72drWd/Nes4jKMnub6DjZFkDCUiABH1lzBWkl1Ew7SD44DrhwSscZFxzCYz+21877Px0I/vnVmXItjxPJkIR47B8ToOXEOC15qM1501Ig6w64/KKEmMYZZL4Vhuxx64pgPW1/iAiMg7rB3w4ofmPb952fnVf0SE4DXjB7E2yBae8Al9wFhZc3nMbByv7xCCnVcbrCTw5t+aB//j398cy/dv/47tzeBe7ts59ljGgYOO4zp362tM+LzU8VonXDsm42GZuR5NHY8hPt+YklmdNQsedxx/MHHBjIblPlwPgm1CjWa9hGrXo7HPbIzthIhgjv/E5yP2yYzSxD5c3wFrt7DcX0eeuF5De8Zdvdih3NZVuGac5Q+3KLc1Ha6xPsMV1ne4wvoMIYSww/nCzTwsbxCNucX+eYs1cjYoY+2GCetSMP4ydr+xXEajYi2i4JTrpzte5/P+SSCEEEIIcQpSOgghhBBCCCGEEOIi6KWDEEIIIYQQQgghLsJZWsoYSoi1mMi7GR9YXijd7F5tHCDtZtTfimMYpUkhLcXOax3vsxjpckC5i1kLYxmv0RbTUlFYL2S/kAwz7pDyZleuHLrYOraXkmgnUtHGVjKCDvUb6Tikx7OVjtPakZzoSdooGBFpt6OMuLYZ9oqcaROw0mC2t0DOXQ6MdsR15CY/Nkpwjgq2lZGNuHdPIjNpXaGnqNLKQEtFHe1iLBWZ8nljD2K7bTtqZEwj5tj48kybaK84wAXxcf/Qtm9bRbubJh2//aFZF0II4eYVYjJxTJhxjzI8C5nz2YmuNXYRXEPnDqKDZsKYevVra1N5eH0s3/2tyeTf/aXJ2Q8fYLV4wEnQx7m2fXp7hbHK0ArFOFhjhRo/03jvjWWB886x3zzWhX4z9i6TnYoi9uE8d6J4V1rPaG/rrD/BGedhbn2eINln7K2xVJi4zbEdI5k4YWuvSDhmntv9o6XiBhaJ29fNKnT7po3zVz869orX48hMxnCGYC0S6xa/HYi0nGGpoP2DtgtGHifGZ8KbZBx0/Z8PPIeh+TCwV2QbVSuEEEII8T2Q0kEIIYQQQgghhBAXQS8dhBBCCCGEEEIIcRHOslekFELKj//7GUrVpxkfUkuoeJLWAHnvwfgfuAI8qsKxsY5tEBWy08WRmpfOXsEkhtVJN+Cq6L09ozWbfgBaKsYr9vfVmNQIrtbOFf8pd6a0mvLvNE5GiOwoXnOxEmV+XlAuKNNCEE+wLFBqTltJObTxETsJu/m8ju8fpe2RMn7Kj5lMgfYxqSTB5pE6e0V45p4Nd+K4YcIIZhkTRuqKOUKZ+2RTRaZNk11T3v5xD7sJ7kWN7YS0V9j0lbHc+/XPr47lH//ttWnHfE1fFb0dnEfNw2ESE8yAHNsaQh5bNkIIocZ2rTW1c2xetTa9+rXJ4X/48w/H8m//8+dj+eGu1XnYt2PXdxgHh/u2k22GsTmYZ9803s4xb+w+6Cc+yArGDe/j09fDeViupsx0E96jsS3Na52xVyQ7EWhryNdIdXjZ7sUVLDubqzaW4zy2WgSzHWXHNheCtb3MSH7Y3TClotkrXrxu5RtaLVDmPlcvx/aKvO2eXfwNxHcVZaZOsJy5nQ8s3nvzYxhOwtuNP6Wfk0v4HBFCCCGE+F5I6SCEEEIIIYQQQoiLoJcOQgghhBBCCCGEuAhn2SvylEOespGkUx09bfJwe1iCgRLxQxlLZikJZXJD9VZSp2yddgVjGbDS0QMlx0avO5bomxX4zerujbJQioz0A8qmO1sDbQNrdeT6eD+UUZ4CrQyUeLM/WD/k9itF1CHs8XkxlgqkUTjJHslJ2ki0vSyof2mDIk/23dfkWTKqE9eAMZF5biRORDO2mC5AS4S/BLwZOnF8j0zZ7D62GZjex7Wl2a4gn2GvWBBBsd/TooLzGasRzsf+2LRz3LxuK/b/8OuPx/KbX16adqQNY1Ca3cEkeBi71NgqxDSOGMfPjBjt2Kx4iMDBFCZK+t80OfzrPzV7xc9//Xgs39+1k9w/wIbCVfs/fGjn3duHV11pAcNYq3iU1nYM7SZ8MNmQCfbN2LuW+vfDjAQyCSCwVJg+x3zG+ChlxT60hnGutSLnVAghzEhruHrTLBW3P70+ll9i+83LZl9IGIN1wjjA+I9Iu3hurtEaRZvC9rpZh2iL2N228hXKTLjg9ojhUVCer+1P6BYJFGEHm9TWsUucgPOTcE4NTnnwPI2dn0gIIYQQ4jsgpYMQQgghhBBCCCEugl46CCGEEEIIIYQQ4iKcZa+YNvn432e4Uvhm13Sn2ytIjDvZ+mEPOTYk/bVgNX8j2aamdLwKu1GFGjtB2/ycvWKhdD+Mz+etxr8WrIJP6TJsKJNJtbAYRXoYS6157gPTFxzlbKrj8y3GXuGneXDFeloQ6JqZHHtFdCTiJlki0zJjNcNMXCimTQ0jp4Y0O2MVfJMiQBsPJOXU9NM29HiOPCybe4T9zXaOOzMecW6n/tCleXDcrpTAY2X/wtQC3PsF7xUPtAbAlvD6p2apeP3z62N5vrHtWONDq3ffEh7itsnIM8rm3rtL8I/vV+xl3vgczW2CBeGqPX9e/dLsFX/+2Pppv2/X/XBobb3LTRa//vb2WK4Pe9sO2IIOiMJYUS5jR4WxSMQwntvGIkXryZP3w+OUHFccb6wJzpzEw2SCjSIhkWHaWuvP7gXG0c/NjvPm19et/FPbfvtDs1psMAZptaClomJOMvWolH58tCKTH2aMie1Nu98blq+5TytP2J43GGebdrJp0/2E4rcxzGiUnUoOdVD6WlgXf2Cc8ud9yrefWQghhBCiR0oHIYQQQgghhBBCXAS9dBBCCCGEEEIIIcRFOC+9Yp7DtJnDBqtys7y7aiva72+aTPPQ2Sv2TaUdlj1WwW/FUJ1UC6v+hOQaX8RIOXvb51DsivgLpO60VxjVKW0Djp1ggXR5ZloAVpKfTkg5CMFKpaMn4+e10iKB6051vJ2WiqWTKDPZokBqzUSIiWW0L5vTcTV9yrdhg4DeuLe9rCvvBa6VfQNZeIKlIk3OezTPuoNd+vSKDJl3NlYGrv7PNJUG7xFHHfuciSamfusfCItx/oyvlaknK6w8rIs2j5vbm2P5h19/PpZf/tik8AlOice6ML8PrbwpsFvRGmJSZzDmjV2KungmsXR6dM4ZBjcwygLK/5sfm11iiu36Pu5bvXdwTryf2sU+bJvs//7te9OM/V1Ltoh5HZYr6q1LF93z+RJ4rXyu1C9bLULwn4nRUcdnjPQJOzEEI8EOkGihu2p9c3Xb0h1CCOHF6zaOXv3crBNMPvnh11fH8ssf2z7GqkG7FfqGtzck/xlqLhvXNO1g/4NdIiFNIm/btV4jXWN328ZBSLB50SLVpXlUfu4SWPCFs/0bqP3HsY3CpssMKpC9QgghhBAXQEoHIYQQQgghhBBCXAS9dBBCCCGEEEIIIcRFOMtesdluwma7Dbtdk51eXTe57cN1k6A+fGzHrXsrMaZSmCkJS6CEFRJ7SNKrEbE3+Wqq4+0LZPWHauWuTBJYjR2BjaXke1zmNdDtkPAhPSupHVsQkrEyQEofx/YASthzHstkjb2iS68wUubqWBkoC0eXm7wPz8JhbAOwANTOXuFIfOl+4DFcyT6WcbtdSTPVxv15HZsN4TilTWRBKsuytHLxrg3XkJJNCEgZK/tHrOzPPjQ2G/R5buXdru3/85+b5P2X/9qSHq5ftfmcupX506adY7Nzkj1WSrnHiTAF4zcnjmUT9WBwxfQTPFk7WBww1mhM+PX/uD2Wtz//j2P5L/9v64//+H+aZeAv//7/mzMf/oI5ed/uy3xo9R7u2sNv+di8ZEyaYfpNNJJ3PuuwuesBY/EJaVzmhGGSzjROX8iwIsywIly9aPaKF29a34QQwivYJX76U+tD2it2tzhHc72EpS4ot+vebJmI1H5raH+qnY+kOjaMbGwi7Zp2L9o4Z0qF+UU0z8Ox3e/JY8Hztxjb3Cn7XAZr2eMzI9r/FUIIIYT4jkjpIIQQQgghhBBCiIuglw5CCCGEEEIIIYS4CGfZK+bNNsybbdheNWnq9XWTD9/DXrG7a+X7D9bWkLjCNzT6K6wTlN6ulbEWWDGeMnJjnYClYkWZ8RjB2iJYpiw/xvGK+ibpgDLyMi5HT577WPOxxBSDCTJ72isW2EdMXdAYZyQKUDG7FvbxE21wO95JozDCX0rEeSvQH6uTKsJja9+O6OmXx6kRvMcmDAH3y3Y5JOj4gqkZIYSQMu8frD9MpoCN4oCkApZXts/Ug8SJFQkoxyPj3QAAHhVJREFUXQLHDE067RWU2K+0GmHV/LRp5776ocnk/+2/vTmWf/rT62N596LJ2dNsbR7TBrJ8LOxfIN03fUi/DpMskmOXwn2P/SSJw2KoCakksxPjgO7c7No13d608bFOzQ5QkWyw+aXZB0II4favPx3L939rSRYPb1v5/u+tfPhw35qBcTNxzMMCVpiwU2kXMc0IEReVUhsTEVacNMH+McEO4yRTbG9YbnVe3bbtr3+y/fEGn69etmO26Nv5prV1vmoXcoXfAT5PY+a1wRzD53LqOoTfod4M+wjtQtyncL7QAhbHg8669J6xZPH5GJ7zZJy+j8szj9DqTB5jtfhcjPo7hBBCCCG+P/oXhhBCCCGEEEIIIS6CXjoIIYQQQgghhBDiIpxlr8jTFPI0hRmy63nbpLe7qyYDv2KSxbWVre/3XPGf9of9sUxZ/t7I01uZkveK7Swva7NU8Fwh2FX0jXyWVoFi9PrYn/YAx14BiTils9V2R+C7n6m2/pwnyrEh+a6wDbAulCf4I3Lm+v1c0Z7bQ4h13Am0EzBhZEYuQGYkCdMMqCJn4gT7plsxPTu2CJOQQbsK0iEo054KJP1m1XaUeW21S1mh5BttNJYMyOFptbCWCqSYsE6ei9aWaO/LNNHmANsMjpkTZfWQs9+28qufr4/l1z81m8CLNy11gPaKvO0yIyBDL7hWJiOk7Fha2Ae0MrlWGtsHZiAZfwWfARyzsPjQqoHkhs2L1u4f/0vrg9vrVr5/Z8fEHWwU7//6rpV/ezfc5+F9s1cc7tvzrWDMpjC2cNVnLFlpQiLEjNSJTXt+TFuMG1oZ0Afbazy/aa+4zsPyzSvET4QQrm+Z/IC+yu25u7ka15UxboxdzcFaMHx7hbVa8HnHfVrRPPaC88zmqWj56O0VTvqDZ5Yw26v74QR8q5yZLs4hR3fFxXIzhBBCCPGvjJQOQgghhBBCCCGEuAh66SCEEEIIIYQQQoiLcJ69IueQpykkyHknSnthtdjumnx4d23TKx727fN+aZLjhxWyVSh11wNk3UyvMCkOXAGelgrfXmFeuUAWS2sHpdllHcttq+NxGOcuDNIrYJ3gDUFYQJhhqaCk3wRLQCe8weYZ15ZMMkcnYTeXgetmf5hmwxIAyXGaxikfC5NEIC/PT1aipy2C7YB8HvJ+bk/r2GoxiAx5cuz6ZB+ju25FpoHgmNWkEKBNpv/H9gqump86qTn7lvescAAn2HK2TQJ//aptf/lDSxq4eUVLRbNdzNeQy2+tD2hNrW8XjMEcmLhCaw3nC/rDkcJbaXr/PpT3AvYYpGVU9PmEFJgZsvo9U0Vqe/ZEXPYMy0Dc2HZMty255+bn1s8/Prw+lj+8/3gs372/O5YPeO6tC/oGySUVqTNMN+n7Y4Pn7m6HMu0SV3iaJJwPUTN5RnIO+qAEJP0kWCVuuna8aO2dtrR8wKaDehG0YZI6WOZ4N9Ywb9x86btPcAwWs52MbQkmLeTZhIfxc6Yaq5354pm6Rjg2o2cPwTU5p6uf91F6hRBCCCEugP6FIYQQQgghhBBCiIuglw5CCCGEEEIIIYS4CGfZK9JmDnkzh7xph0XIc7kqull8PncCVhN0AIk+ZPksU65v5NSwNawrLQBjO8DarSxO6T7lr0YyT3k/ji3OyuZUy0ZXOhvdT4Wr8VN+XLkPpLqQ9EdIYwsk2ytvC2wava2Bg4FpDbQpUBqfIGFP1E1PlHWncTkibaS/L0y8gGSecn1aELxV/pkmUU3/0aYxluc/fvbagSQL7L/S/mHuPfbHjcy4X2Zl/ie6aV43+x/Sdsjtw473orXpvjSpf5kxMzZoK3053dMhQz6fZoyPmd4EpmvwEnAOs/q/997ziQdpXIblI8A2UMN4XswzrAU7JOzEljLxUJs9YtpYC9KGDy9YIdalnWPbujlcfxwnU2SkjRi7CBNXjCvNjokZ/bzZ0u7W9mE30/qzICWIdrWQYA3Dc3ma26DY7JikEsL2iskZOOHE+8pnNuxPsI/Uyn6iVcL4x5yyn/XgWyfGZY/om+We7Dn6LrrHPFfXsCFDnrbC2dFJ10B8hRBCCCHEd0dKByGEEEIIIYQQQlwEvXQQQgghhBBCCCHERdBLByGEEEIIIYQQQlyE8yIzt1PI2ykkeHcjvLsVsWwlMp7MRmYWOuFhA86oa8K6EXOBj/jQzrEYPz7WMMC5uI5D7d6xrGHswTfHh7F/vwTWO17fweYswt/bxZJ5EWy1sF761XEsIzpNbGjbZ1kYJdg6nDGcnxp2LGau45BxX7juBtZxiKbMm8p1HMY+6tL1WnHW2mDcWzS+b3rD2+YFsZx23Q3GoHLdh25NB679wJhSjimuo8FjOT7QcK4nkbPjY+9HUaXXvm2fEOcYt5iTu1bXQ344lu9KW6tgnZqvv2J9h4o1IMLUOeJ3OAfWEQiMNTQxrOPIUfOukwt4mDFu70W3egZOwYhUxumOT12w1gOfS3nb2rTDNXBdlcfPuGcB0Zp4lG5u2vFXhxarWRaOJ6w5gT6IZkETfw0DuxZOGpa5xk5G/82BfRZQHscxcp95260Ds3HWbmC/cb0Sxr+aeTuOGuak99ZYCaFbF4Z9aNam8dZ0+PK791PWfXjCCUs0VHuxzsFnrs/w5GhvvzPXkxBCCCGE+EqkdBBCCCGEEEIIIcRF0EsHIYQQQgghhBBCXITzIjPnKaTNFBJiMlmDa6+InRTWiWbLiIGb1yZdXkqLbKM1Y61jyTutD4XbOxsDUjbDCjkwLRWMm7T2inGZ73Eo46fuP/d2Csq52fbaS8w/7085NmTM5nrah5l95lgXQgghI+qyUooM+XzOkJR70YdG9ot6olN+ko7IfMtxRGoyMZ6tzPvIeEonwdJIv5PVOvvBpk78prVUtH1WcxvbwUwV9Cw2IVipesZ8YVwho2srYi/3sE5sXrZ7N79AzOI1rRmoZ7bWAkYquvJvM9fHcv3g2pHG+zx+4lwaa/+NNeHJoPp0KC6pLLRKMHay9VM/Bel4SMYqAIvDivLS9lkOiK08cN4iEtXYpVjurC6cbuYZyuug7YVjc2xxMEPQkf2nLmbXvd+wDrlxk+aE42el3TyOL36slValLz+LTsmGPM1Q0e3lPmg45k+xNZxrqfg2q8Xn7V9lIxFCCCGE+AJSOgghhBBCCCGEEOIi6KWDEEIIIYQQQgghLsJZ9oo4pce0CkpszUL0TIBYh+UQQijGFoF0AsiEmZiQkZiQFqNJb3XSEsFzMV2gk1wX+CsW53hTl1O2a8x70llcZ7CpEdFIxGkJGK/oblZeN6djSgKlyOPtsZP2biqk9bzFTFnIjoyZzTBlJ8HDPSIYTXst4zJl4ZR808pgjnVk1smxfDx+N25icRI1qpPOwXY4TgtjEekFzkxyyEgkmJkgge1lhlx/08rXb66O5e2rlqowX6Me2CvCZOftGnkvkIAQOSYo6WfcSDiL2qvOMSA5x2IdS/SrI3Of4q6VZ3rDxj6q2j0zaGeiu8jaFHhjMdbm5nuZC8fQ2FJhrBadocAMVT6DsblGtnU8h2kv8u0HtAZ0z1CmSPA5dpKtYXyPivPMsGEenfnJ2K34s+akDI0dHL6l6mROSaD4cjKF6WfzXPmadgghhBBC/L5I6SCEEEIIIYQQQoiLoJcOQgghhBBCCCGEuAhn2SvWsoa1rGGBtHopbUX2dW3lsKC835t66sPDsVzu23eHw+FYfti3fe4PKK9tn31p5YOxc9RhuXTS4MUpm+Mpqw8sO8kIjvQ+mnpsO5KTcmGkxWEs0Wdbac2gujlCl5wg/V6frMwPe4WxgEDmHcdyau9ijYWD2+O43Z/PPipH0+fjlApj3cGYMNJsJoygzv4NnFGwo0OT6dx21ESpOiTeZYVNAcWMVJAIW0LK1n7DVBHP2pSRXrG5aTL+zU/NRvHz//bLsfzqp5ftYFoqIMmn3enxMz54Fivu/w0S76cWE343XvG/t8d8GSc5INO+0dkaOFY5YNJ4bCfOF6apcH9WY8q0/tgxwbbT7mMsBCzHcdnrS48n9zTZb9390KpxxY41KdCy8Vxbecz4fE8eM89Xc4IJ4vTK3PQVA+/LuSkVJybvuNTuf4UQQgghvh9SOgghhBBCCCGEEOIi6KWDEEIIIYQQQgghLsJZ9orlsD7+t9BSATm7V4ZtIoQQysN9q/O+WSceDs1qcQ9LxkfYKx4WWCpgtThAVr/ADmDsFZ1y1OzH5A3ocD2rhZXLolInocG83XkS1jA+xhPFFki+2aY4VrxTLW76oO8PY6+oeVg2NeOEVAwbGbSr1vWyQLrV/I0lA+kJvEewLHA1faY+VJN+AAuB9cbYdjB5gAkNiC3ITAVAykrMbWqtB/QrZPWsxyRAJDst8zSjTW2/1UjH0Qm1zc8Ea8a0hYVjh3uKcsltTqVO0U+7BdtRnZFaHbn9KbaLPiXBHVMnJGTYzU7ujGfN6CZJnyIxbFNwxm8aj7toLAQsm3iM7ny0bZTh9mrm+njix8S2egkq/lMpnvTE+rJRwWQ7mHY4lpT+uUJbhJNc4p7QtHScFPHtpoM4LJ5f8Zf777n9xPehOl6d8y1eQgghxL8GUjoIIYQQQgghhBDiIuilgxBCCCGEEEIIIS7CWfaKWtZQ1tVIWzOl5hNksZBir9FKkh+QOnFfmo2CaRQLJOKrSW4YWydYXowlYrz/0+Od7Y7VojopEwkyflf5XXuJtidDNweh1L4xSRZoR2aigLEWoJ5OOl4rD+LQWIb78Ohi5N4o8hymHTxvL1WF5NuRqq9ITeG5HfF7J7/myWnH8DXbJl2CK+07SQXGCgJZeDENRH8Yf4p9Fxgxx2inYYoMo0hyanaMqxdXx/Lrn14dyzMSLmormvmSUvdO0kQrjNvOYdBdxbhMObKZL88kmhhtvW+P+TInHNvp+J9PUPh8yHgORyTCmFpMcgPnF3uwl23DeoH7ZAM1vGtC2emCasb4KRaKHq+fvO2eDeLLz8Ova8eXrSCebehUTjra+krG+xgXkOT7fwR0H4QQQojzkNJBCCGEEEIIIYQQF0EvHYQQQgghhBBCCHERzrNXHEqohxKgFg+TWckfq/HDarEmK3F9gJlhD+k+EyhoqaDEuaJsrBbGRtHKi0l68O0VxdlurB04nmW+ubESfUeW3LWjWv0sjhiXaUeoxnpCaXYdl00IgDUjVLPy/TosMxnBujNwbkj9y1KG+1Ce2suYq1nZ35g4hm2i1cJGeDCNA/U7dfZS7ARLBe0qdD/YdI1xu09a39/q7e2XuCYzL2CvqEur4ArK++uXzV5x++b2WJ6u2tSvzY0ReKtDN2/dhAdi+tlJrGAHuir5/lyOccZt0pc9BNXZbm0eJ+JK4535bFwl47lgbUBdaoRjf/Bl3+P0CvsscdIhWGdf/Snnc7d7SQzj/jDWlug/M06xSESnGecadJ5LjTjX2HH6OYQQQggh/rGQ0kEIIYQQQgghhBAXQS8dhBBCCCGEEEIIcRHOslfEJYa4xBBWCD4LkhSwtD5l9X1Kgq/SpuwXEn0s+b+u4+1GJmyCJWiJsBLt4iRTFM9S4eiuzfniWF773Dr5ntifEn2bmDC2DbCb7ZWOBf5PbA28l0bFj36C5D471gLe71rG9oUaxykYIfT30jneGStWuu+Np7Z9Qr9O2U6HzdR8B9sZEQ84R0GaR6xIJyhte8YFJhybkUCQsb2fHmw7jy+wKVF9v7lubX2JxIrtq12rB/swCGE2yRndKGJ/epL+zOs4Ia3Ekbk/l15BixXb9C0Lytv5ObbGPH72LCOORt9U4DXwy9ufSPW9sIcT7AsnBDc84wl62iNfruCUk/vPpS8f65/726wMp1yP34549tnPP9v34vOYf5rgI4QQQgjx7UjpIIQQQgghhBBCiIuglw5CCCGEEEIIIYS4COelV+xrqHMN9YBtWDW/mHKTfsdOsTlRYk6ZtuMzKI5c37NX+AkBtiEFS/XTelGMlNupyyirT5BWO4ry/nNx5Lk8ByX2xrpibCWQKxs7Bhtl3znVQNsAEhNgu1iRboDNIadxH9gu95IoTDNMHxSTouEkTXi6esfyQe9InNoUeGqv2B7LtFdUWCrYH5U+BUwtY0VAokbG+SZs798EcszHDAsMJwy2b69bu2mvSC9gqUB6RYiYq0ijCWuXboKPNrjEGZsn2Cu88fH0lnJQoT84L7B3l/UwrMdt07Oy+rHvIPLsJ9jH/O2eXcGH7bX2rBPqil6fO9aRZ/l6M0N1tlue6Q/zDD6l32jReSad45sYt+NbbBf2d+S0mBWlXwghhBDi90ZKByGEEEIIIYQQQlwEvXQQQgghhBBCCCHERTjLXrH/sA/7+hCW++W4rRwgdYa2N8WMsn23YQXKzmrwpIxl8p61wDKWZT9+ZEoFrBaOW6KmcyXHXpKFv0a6SdvA9uRI9F1ZuOnzdi+MFaS3V8A2UMrYQrBAjj0zKIJ9Rhl++bKsu3Q3zyaGjJNEeqvMsdrEfkI6hDPOMsZp7N/BMc1jGR+fmECRm30hT7AymIQLlHGPElMfUjdfnJSQOMdheXPVzr27vWoVbVFv8qT36Ptk+3jl3MOYoKkkpXE/mTHPceO89nyS4BE8b4fd60vbvflixtYz7TDz0IwX7/3tuDaTvuLs7Y3x/ng3OcOT33+Tm+A5u8mXj/m2bIQTrSfObie19Oyx9TXmhXPrGo8Vu8tzY2V8w6Pd6QsnEEIIIYT4eqR0EEIIIYQQQgghxEXQSwchhBBCCCGEEEJchLPsFQ9v78LmkMLy/v64rTy0KIsEBfTsJFSEYGWdZsV5R/o5lIF25ZMsG510tDpyfSN9juOWuCu9uxJZX4ZLZWxnAMHhlKdDis8ED/d8Xo/375zGfgkmXhSTKjLc3fZl+fINq13LrfR8nFBijjCuF6ZD5OFOJXBswgbU203Wsc0j07aBczDtYrPZtXppdSnjcWD6L3f7wHqRJloqMA42bfuMZIoZVoswsd5xkgitLkuwoDtM3yRveHG+OJ6F03MRnHQITp765e2ulcEp9zYqmwrzZWtYdbT+9nxfton1m6NzjJe2Y/v/vGfU6Yr7S+cknK/9j96Q5z7PGmq+xHNt+p51nbPPM8e4Q03+CiGEEEJcDikdhBBCCCGEEEIIcRH00kEIIYQQQgghhBAXQS8dhBBCCCGEEEIIcRHOi8z862/hYbcPy8e2jkO427fKluYC38LOOnf1ZPitI8pM6EuMJWS0YBpHDloLN2MW23a7XkLoDL/0jDtrKdiDB6WubK5nvP3xMxs/9n2XyrUNnlzJYz3pFE/2cz7jcSwnYzZN5CajLbnAAzI2a0G24slW8rGvuF/7YVRBdNYRMGUMCqZT5u4d3IS1G2aUpwnlGesnYF2FacIY99aycNYHmLZ2xqQbfN62cz9ErLqAyRMn58J5ct4XDifTT91aLCZL1ixCwda2zTzFgYuBcP7zBBx/5tQhTtgwees78AAvSra1I5kL5xomp3r8T8ihfCbKsO3OcYDtz8Y9fnm9gPNXWPiWtQ2+7dzfupLA91tN4veMxjyzHm+Nm/4MZmg+/xD2vxdCCCGE+HqkdBBCCCGEEEIIIcRF0EsHIYQQQgghhBBCXITz7BW//Rb224ew7KGb3iNK8NBEnjMsAJtO/JnxmWXK0BOk1gnSdq9MpTTl0YXS5T4SEfplayEYB+iZ+Ds3fq1hLCJGjt7ZIxx7hdkF9oUCiTjlsNlcw7h+KzW3Ulrb5ygzsjGP9d+1LCjDXkGNPYhOFOmTj67u2rO3OFYLSvoZ94hjTcJmsP05oT+2M2wXsDtkuCBihvWhwHLg2A8iojs3nb1ie3t9LC+71vYVGbV1HtsreF9MXOc9ttPuxH7q7BUczwW+lIqOM66Nh9a+9WOzYfGZEVdcA083dVaXXeuTzP7p7tmxLlqNTLQlrRm0WmDMmphWW68rPvceDSeNZaf+3mNiGFfmt+9cA8NXxFOe1pITjh1zqTDH0+rl79RzfMuVfMuxzxgsqrOfax0UQgghhPi+SOkghBBCCCGEEEKIi6CXDkIIIYQQQgghhLgIZ9kr6vv3oe7XsD40aTatFqVChg/1di7WTjBD1plN2sDYZuDK5yFJ90MmyrgcrMQ/GfkxrAzBsSaYleXHkn6+0bFvd2xjq2OpiHanViy0V4zTPCZI3nlpKxJG0mZrzmcsFSeUTXpFHVsqaLVgukatTgLB44WEIU7SgbFaMOkkjG0NtLdEE29ix0dBX9EkUjewVOQJZaZJwIpQvDJHBe5vN07zjPt63c43XW1aXdvWwgmWD5N0gnnLMb4umMO0OxQ7Liv6NsJyQ7tKRIrG+v6hnfrth2M5Icki4xwP+2bBONAWEkLY3Vwdy1cvb1pdu9YHaYPHGRJGzFMO5ZjH48Y4ofqhWHmM+SIMcRMGxmPcsyicnF7h2Ciswv6UdJjnzu1wYbX+c9Vfynpx/rlOsb2cd+/Mb435CRr/HvUbomwUQgghhPidkdJBCCGEEEIIIYQQF0EvHYQQQgghhBBCCHERzrNXfLgL9VDNSvSHh8OxvIYZZaRMrFYqTXvFRIsDJKLJkZRaSwTLkHsbaSll/Hap+5QoaW/buXo9l+M39g/XasHy2GrxVVJgY6+gnH187mlqt5ZydloG8u4qEM9GkfN4O+0IhTJ+lK0EeCzf7u0URg5sutmTpI/TCdztTipAWe2dWSHxP6Ah28J0CUZQjAdCxfWwXBi9wDYl244Ee8XmplliXv7Y7t+6a+3YXLN9qAv2Cia3HO7bfK7sgy69woy1hHkxjW0by9/ujuX9b++O5XmlpaqVl49t/4/3H82595t2/P317lieYLuYWUbix3TT9o/NjRHKBJuNsV2g3EvTTRIGk3EcTkiv8C0V3yqLx3Oiem19TqL/pEmnnu7iVovTzCquk+TEWs/HbYfZ54SO8hInTt5nbINTYoUQQgghfg+kdBBCCCGEEEIIIcRF0EsHIYQQQgghhBBCXISz7BXLhyUs+4ORYz/QalFRhrxzOXDt/xAmyMcnvPaYIPnOkIsmyEMzVPzGjmH25z5xWA4hhJUhAUYG/dzy9Z/3H+umbQYGEgLYjr4qk+qA7UYxS3vA2FaS0zixYkbCwhKbFH7qbAaZqSJ0P5i9cK2O3cRsNX4TJ+WjtztA314DUzG4E/ojtTIDKJbCNqEhqfUHExlqsu2oxvLQjikc2xzasCNk1JUy60HyxRYWmA0tA/AAhBDyVdvv+tWLY/nFL69bW2/atc6vmr2CKRNsLO0w6x62i2fsFQmpGtmkwtD6g9O9bXaJw9tml8i5tS/jum/mZonYBFpEQri7a3W9+9vf2hdzs11MsF3s0E9Xr1t5c+v0c9sc6ga2hMm3mNgvnO0GL8nilGpOzG44KZliXK9V3o/TYf7z+RrDA6+Jz6tzjvzPTcT4qhPyGe39ToUgS4UQQgghfnekdBBCCCGEEEIIIcRFOEnp8Pkv2h8O9yGEEO4PD8fvPi5N3bDgry1UOtwtVunwEQtL3q+trofS6tpjAcc9/nx9qCvKWOgPf2JdTHkZlkMIYXX+tLTyr8A4n/vXwhP+hFafUTpY9UDbSrVCCuOFGs0BKB+MWqOdca1t4c89lCkhhJDRzxMlAwVKAuwfzT74aznuHctW6QBVRrELfAZ8t+L4hXWZBSPHi0cmLvTHfoICIuNP10v3F10qQcy5UaQyYFqweCraVCvLXKWwTb8Iec566P4yuYdy4b79db5+pGIDSodN23/zvi08mTHdOcbvuZBkeU7pAFUNyhOvA0Pi3YemQvh4975dTmrtOxzaeKyQHh2Wtv3x+KaUuLu/b1/MrT+ngOfBVIflTWjXOpXWl5FTwSgd7GylKol/avb/0DxWBnlLCdrFT09VGzgL3Jqtp/wZ3W+Vj9Pe7/bXda/dp8oCzlM6fM0ZyClX7S0celJLTPGERUDDYDHU9sWTTW/fvn2s7tzOcvhe9QghhBDij82XfvNPeunw7t3j/3n4P//v/+vbWyS+Duvb+D68+/IuQggh/rV49+5dePXq1XepRwghhBD//Hzp3w6xnvCniFJK+Pd///dwe3vr/9VECCGEEP+w1FrDu3fvwp///OeQ0re7L/VvByGEEOKfm1P/7XDSSwchhBBCCCGEEEKIc9FCkkIIIYQQQgghhLgIeukghBBCCCGEEEKIi6CXDkIIIYQQQgghhLgIeukghBBCCCGEEEKIi6CXDkIIIYQQQgghhLgIeukghBBCCCGEEEKIi6CXDkIIIYQQQgghhLgI/wvRjCpKv0HjaAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Set figure sice\n", + "plt.rcParams['figure.figsize'] = 11 ,8\n", + "\n", + "# Read images\n", + "img_A = matplotlib.image.imread('raw_tile.png')\n", + "img_B = matplotlib.image.imread('test_array_1.png')\n", + "\n", + "# Set up plots\n", + "fig, ax = plt.subplots(1,2)\n", + "plt.xticks([])\n", + "plt.yticks([])\n", + "ax[0].imshow(img_A)\n", + "ax[1].imshow(img_B, cmap='gray')\n", + "ax[0].set_title(\"Original Image\")\n", + "ax[1].set_title(\"Model Predictions\")\n", + "plt.tight_layout()\n", + "\n", + "# Get rid of tick marks\n", + "for a in ax.ravel():\n", + " a.set_xticks([])\n", + " a.set_yticks([])\n", + "\n", + "# Show images\n", + "plt.show()" ] }, { @@ -877,7 +675,11 @@ "\n", "- Pocock J, Graham S, Vu QD, Jahanifar M, Deshpande S, Hadjigeorghiou G, Shephard A, Bashir RM, Bilal M, Lu W, Epstein D. TIAToolbox as an end-to-end library for advanced tissue image analytics. Communications medicine. 2022 Sep 24;2(1):120.\n", "\n", - "- https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py" + "- R. Verma, et al. \"MoNuSAC2020: A Multi-organ Nuclei Segmentation and Classification Challenge.\" IEEE Transactions on Medical Imaging (2021).\n", + "\n", + "- https://github.com/microsoft/onnxruntime/blob/main/tools/python/remove_initializer_from_input.py\n", + "\n", + "- https://pytorch.org/tutorials/advanced/super_resolution_with_onnxruntime.html" ] } ], From c69e4a28949cfd376b4e0509e66ad54fc740e475 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 8 Nov 2023 21:43:17 +0100 Subject: [PATCH 48/50] further edits --- examples/InferenceOnnx_tutorial.ipynb | 2 -- 1 file changed, 2 deletions(-) diff --git a/examples/InferenceOnnx_tutorial.ipynb b/examples/InferenceOnnx_tutorial.ipynb index 9e1d8f19..2245643f 100644 --- a/examples/InferenceOnnx_tutorial.ipynb +++ b/examples/InferenceOnnx_tutorial.ipynb @@ -506,8 +506,6 @@ "\n", "In this example we use an taken from the [MoNuSAC](https://monusac-2020.grand-challenge.org/) dataset. See citation in the `References` section.\n", "\n", - "See citation in the `References` section.\n", - "\n", "### Load Packages" ] }, From a3011b318e157eebe86a6d8eff38266cd6e1f9a9 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Wed, 22 Nov 2023 08:44:14 -0800 Subject: [PATCH 49/50] changing torch version --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index bb83d68a..1b8973df 100644 --- a/environment.yml +++ b/environment.yml @@ -11,7 +11,7 @@ dependencies: - scikit-image==0.18.3 - matplotlib==3.5.1 - openjdk==8.0.152 - - pytorch==1.10.1 + - pytorch==1.13.1 # orig = 1.10.1 - h5py==3.1.0 - dask==2021.12.0 - pydicom==2.2.2 From 591caaf930717d8a59aedb70f138fb2164081ac5 Mon Sep 17 00:00:00 2001 From: jamesgwen Date: Thu, 30 Nov 2023 12:02:16 -0800 Subject: [PATCH 50/50] fixed typos in example notebook --- examples/InferenceOnnx_tutorial.ipynb | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/examples/InferenceOnnx_tutorial.ipynb b/examples/InferenceOnnx_tutorial.ipynb index 2245643f..23452c56 100644 --- a/examples/InferenceOnnx_tutorial.ipynb +++ b/examples/InferenceOnnx_tutorial.ipynb @@ -8,8 +8,6 @@ "source": [ "# PathML ONNX Tutorial\n", "\n", - "Written by James Wen. James_Wen@dfci.harvard.edu. \n", - "\n", "[![View on GitHub](https://img.shields.io/badge/View-on%20GitHub-lightgrey?logo=github)](https://github.com/Dana-Farber-AIOS/pathml/blob/master/examples/)\n", "\n", "## Introduction\n", @@ -138,8 +136,7 @@ "import matplotlib.pyplot as plt\n", "import matplotlib \n", "\n", - "from PIL import Image\n", - "\n" + "from PIL import Image" ] }, { @@ -504,9 +501,7 @@ "\n", "The `RemoteTestHoverNet()` uses a pretrained HoverNet from TIAToolBox trained on the [MoNuSAC](https://monusac-2020.grand-challenge.org/) dataset. **The model was trained to accept tiles of 256x256 to create a prediction matrix of size 164x164 with 9 channels.** The first 5 channels correspond to the Nuclei Types (TP), the next two channels correspond to the Nuclei Pixels (NP), and the last two channels correspond to the Hover (HV). The documention for these channels can be found here on TIAToolBox's [website](https://tia-toolbox.readthedocs.io/en/v1.0.1/_modules/tiatoolbox/models/architecture/hovernet.html#HoVerNet.infer_batch). \n", "\n", - "In this example we use an taken from the [MoNuSAC](https://monusac-2020.grand-challenge.org/) dataset. See citation in the `References` section.\n", - "\n", - "### Load Packages" + "In this example we use an taken from the [MoNuSAC](https://monusac-2020.grand-challenge.org/) dataset. See citation in the `References` section." ] }, {