diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index 29bfc57..6e3ff49 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -1,14 +1,13 @@ name: Deploy to GitHub Pages - -permissions: - contents: write - pages: write - on: push: - branches: [ "main", "master" ] + branches: [main] workflow_dispatch: +env: + OBJC_DISABLE_INITIALIZE_FORK_SAFETY: YES jobs: deploy: runs-on: ubuntu-latest - steps: [uses: fastai/workflows/quarto-ghp@master] + steps: + - uses: answerdotai/workflows/quarto-ghp@master +# with: {pre: 1} \ No newline at end of file diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 5608592..500912c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -1,7 +1,38 @@ name: CI -on: [workflow_dispatch, pull_request, push] +on: + workflow_dispatch: + pull_request: + push: + branches: [master] + +env: + OBJC_DISABLE_INITIALIZE_FORK_SAFETY: YES jobs: test: - runs-on: ubuntu-latest - steps: [uses: fastai/workflows/nbdev-ci@master] + strategy: + fail-fast: true + matrix: + os: [ubuntu, macos] + version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + runs-on: ${{ matrix.os }}-latest + steps: + - uses: answerdotai/workflows/nbdev-ci@master + with: + version: ${{ matrix.version }} + pre: 1 + - name: test docs build + if: ${{ (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') && matrix.version == '3.10' && matrix.os == 'ubuntu' }} + run: | + set -ux + wget -q $(curl https://latest.fast.ai/pre/quarto-dev/quarto-cli/linux-amd64.deb) + sudo dpkg -i quarto*.deb + nbdev_docs + if [ -f "_docs/index.html" ]; then + echo "docs built successfully." + else + echo "index page not found in rendered docs." + ls -la + ls -la _docs + exit 1 + fi \ No newline at end of file diff --git a/.gitignore b/.gitignore index 5f112e2..bb2297e 100644 --- a/.gitignore +++ b/.gitignore @@ -149,3 +149,5 @@ checklink/cookies.txt # Quarto .quarto + +saves \ No newline at end of file diff --git a/LICENSE.txt b/LICENSE.txt index 261eeb9..ef865b5 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,192 +1,4 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] + Copyright 2025 Florian Fürrutter Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index 80df770..4b2718a 100644 --- a/README.md +++ b/README.md @@ -2,94 +2,110 @@ +

-awesome -generative-models -diffusion-models -python-3.10 + +python-3.12 DOI +https://florianfuerrutter.github.io/genQC +huggingface.co/collections/Floki00 +Online_Demo

Code repository for generating quantum circuits with diffusion models. -[\[Arxiv\]](https://arxiv.org/abs/2311.02041) -[\[Demo\]](https://huggingface.co/spaces/Floki00/genQC) -![](https://github.com/FlorianFuerrutter/genQC/blob/main/src/assets/inference.png?raw=true) +![Generation process for 4-qubit +QFT.](https://github.com/FlorianFuerrutter/genQC/blob/main/src/webpage/assets/qft_4qubit_circuit_15s_wpause.gif?raw=true) + + + + + +## 📰 News + +- 🔥 \[2025-06-01\] *Discrete-continuous circuits with multimodal + diffusion* - model released on [Hugging Face: + huggingface.co/collections/Floki00](https://huggingface.co/collections/Floki00/discrete-continuous-circuits-with-multimodal-diffusion-6839c4e4553e56b957bbd5bf). ## The codebase The code contained within this repo allows the sampling of pre-trained diffusion models and includes our pipeline to fine-tune and train models -from scratch. Pre-trained weights can be found on [Hugging -Face](https://huggingface.co/collections/Floki00/generative-quantum-circuits-6550e926c67f60a368b02bc3) -and can be downloaded automatically via our code (see minimal example). -For the CLIP model weights we use the -[OpenCLIP](https://github.com/mlfoundations/open_clip) library, which +from scratch. Pre-trained weights can be found on [\[Hugging +Face\]](https://huggingface.co/collections/Floki00/) and can be +downloaded automatically via our code (see minimal example). For the +text CLIP model weights we use the +[`OpenCLIP`](https://github.com/mlfoundations/open_clip) library, which will download (and cache) the CLIP model on first usage of our pipeline. -In case you prefer reading a documentation rather than notebooks or code -see [\[Documentation\]](https://florianfuerrutter.github.io/genQC/). +In case you prefer reading a documentation, rather than notebooks or +code, see the project page under +[\[Documentation\]](https://florianfuerrutter.github.io/genQC/). -The repo inlcudes: +This repo inlcudes: 1. `genQC/` a full release of our used diffusion pipeline. -2. `src/examples` examples how to reproduce some figures of the - [Paper](https://arxiv.org/abs/2311.02041). +2. `src/examples/` examples and tutorials to show how to use the + library. 3. `src/` the source notebooks for - [nbdev](https://github.com/fastai/nbdev). + [`nbdev`](https://github.com/fastai/nbdev). ## Examples #### Minimal example -A minimal example to generate a 5 qubit circuit conditioned on a SRV of -$[1,1,1,2,2]$. You can try it out on your own with our -[\[Demo\]](https://huggingface.co/spaces/Floki00/genQC), no coding -required. +A minimal example to compile the 4-qubit Quantum Fourier transform (QFT) +unitary, using parameterized circuits. ``` python -from genQC.pipeline.diffusion_pipeline import DiffusionPipeline -from genQC.inference.infer_srv import generate_srv_tensors, convert_tensors_to_srvs - -pipeline = DiffusionPipeline.from_pretrained("Floki00/qc_srv_3to8qubit", "cpu") -pipeline.scheduler.set_timesteps(20) - -out_tensor = generate_srv_tensors(pipeline, "Generate SRV: [1,1,2,2,2]", samples=1, system_size=5, num_of_qubits=5, max_gates=16, g=10) -qc_list, _, srv_list = convert_tensors_to_srvs(out_tensor, pipeline.gate_pool) +import torch +from genQC.pipeline.multimodal_diffusion_pipeline import MultimodalDiffusionPipeline_ParametrizedCompilation +from genQC.inference.sampling import generate_compilation_tensors, decode_tensors_to_backend +from genQC.utils.misc_utils import infer_torch_device, set_seed +from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer +from genQC.benchmark.bench_compilation import SpecialUnitaries +from genQC.platform.simulation import Simulator, CircuitBackendType + +device = infer_torch_device() +set_seed(0) + +pipeline = MultimodalDiffusionPipeline_ParametrizedCompilation.from_pretrained( + repo_id="Floki00/cirdit_multimodal_compile_3to5qubit", + device=device) + +pipeline.scheduler.set_timesteps(40) +pipeline.scheduler_w.set_timesteps(40) + +pipeline.g_h, pipeline.g_w = 0.3, 0.1 +pipeline.lambda_h, pipeline.lambda_w = 1.0, 0.35 + +U = SpecialUnitaries.QFT(num_qubits=4).to(torch.complex64) + +out_tensor, params = generate_compilation_tensors(pipeline, + prompt="Compile 4 qubits using: ['h', 'cx', 'ccx', 'swap', 'rx', 'ry', 'rz', 'cp']", + U=U, + samples=8, + system_size=5, + num_of_qubits=4, + max_gates=32) ``` - [INFO]: `genQC.models.unet_qc.QC_Cond_UNet` instantiated from given config on cpu. - [INFO]: `genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder` instantiated from given config on cpu. - [INFO]: `genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder`. No save_path` provided. No state dict loaded. - ``` python -print(f"Circuit is SRV {srv_list[0]}") +vocabulary = {g:i+1 for i, g in enumerate(pipeline.gate_pool)} +tokenizer = CircuitTokenizer(vocabulary) +simulator = Simulator(CircuitBackendType.QISKIT) + +qc_list, _ = decode_tensors_to_backend(simulator, tokenizer, out_tensor, params) qc_list[0].draw("mpl") ``` - Circuit is SRV [1, 1, 2, 2, 2] - -![](https://github.com/FlorianFuerrutter/genQC/blob/main/index_files/figure-commonmark/cell-3-output-2.png?raw=true) +![](https://github.com/FlorianFuerrutter/genQC/blob/main/get_started_files/figure-commonmark/cell-3-output-1.png?raw=true) -#### Included examples +#### Further examples -Example notebooks are provided in the directory `src/examples/`. - -- `0_hello_circuit` - [\[doc\]](https://florianfuerrutter.github.io/genQC/examples/hello_circuit.html) - [\[notebook\]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/0_hello_circuit.ipynb): - How to sample a circuit (conditioned on a SRV) -- `1_editing_and_masking` - [\[doc\]](https://florianfuerrutter.github.io/genQC/examples/editing_and_masking.html) - [\[notebook\]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/1_editing_and_masking.ipynb): - Presents editing and masking of circuits -- `2_unitary_compilation` - [\[doc\]](https://florianfuerrutter.github.io/genQC/examples/unitary_compilation.html) - [\[notebook\]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/2_unitary_compilation.ipynb): - Compile unitaries and transpile circuits -- `3_dataset_and_fineTune` - [\[doc\]](https://florianfuerrutter.github.io/genQC/examples/dataset_and_finetune.html) - [\[notebook\]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/3_dataset_and_fineTune.ipynb): - How to create a dataset and fine-tune a pre-trained model +More detailed examples and tutorial notebooks are provided on the +project page +[\[tutorials\]](https://florianfuerrutter.github.io/genQC/examples/tutorials.html) +or in the directory `src/examples/`. ## Installation @@ -106,20 +122,21 @@ pip install genQC Note, this will install missing requirements automatically. You may want to install some of them manually beforehand, e.g. `torch` for specific -cuda support, see -[pytorch.org/get-started/locally](https://pytorch.org/get-started/locally/). - -**Requirements:** `genQC` depends on `python` (min. version 3.10) and -the libraries: `torch`, `numpy`, `matplotlib`, `scipy`, `pandas`, -`omegaconf`, `qiskit`, `tqdm`, `joblib`, `open_clip_torch`, -`ipywidgets`, `pylatexenc` and `huggingface_hub`. All can be installed -with `pip`. In `src/RELEASES.md` +cuda support, see . + +**Requirements:** `genQC` depends on `python` (min. version 3.12) and +the libraries: `torch`, `numpy`, `matplotlib`, `scipy`, `omegaconf`, +`qiskit`, `tqdm`, `joblib`, `open_clip_torch`, `ipywidgets`, +`pylatexenc`, `safetensors`, `tensordict` and `huggingface_hub`. All can +be installed with `pip install`. In `src/RELEASES.md` [\[doc\]](https://florianfuerrutter.github.io/genQC/RELEASES.html) and -the release descriptions specific tested-on versions are listed. +the [GitHub release +descriptions](https://github.com/FlorianFuerrutter/genQC/releases), +specific tested-on versions are listed. #### Method 2: clone the repository -To use the latest GitHub code you can clone the repository by running: +To use the latest GitHub code, you can clone the repository by running: ``` sh git clone https://github.com/FlorianFuerrutter/genQC.git @@ -136,9 +153,10 @@ pip install -e . #### Test installation -You can run the provided `0_hello_circuit` -[\[doc\]](https://florianfuerrutter.github.io/genQC/examples/hello_circuit.html) -[\[notebook\]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/0_hello_circuit.ipynb) +You can run the provided +`src/examples/Quantum circuit synthesis with diffusion models/0_hello_circuit` +[\[doc\]](https://florianfuerrutter.github.io/genQC/examples/Quantum%20circuit%20synthesis%20with%20diffusion%20models/hello_circuit.html) +[\[notebook\]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/Quantum%20circuit%20synthesis%20with%20diffusion%20models/0_hello_circuit.ipynb) example to test your installation. On a computer with a moderate GPU this inference example notebook should run under half a minute. @@ -153,6 +171,8 @@ License We kindly ask you to cite our paper if any of the previous material was useful for your work. +#### Quantum circuit synthesis with diffusion models + ``` latex @article{furrutter2024quantum, title={Quantum circuit synthesis with diffusion models}, diff --git a/genQC/__init__.py b/genQC/__init__.py index 485f44a..d3ec452 100644 --- a/genQC/__init__.py +++ b/genQC/__init__.py @@ -1 +1 @@ -__version__ = "0.1.1" +__version__ = "0.2.0" diff --git a/genQC/_modidx.py b/genQC/_modidx.py index c05ae31..7a3cb50 100644 --- a/genQC/_modidx.py +++ b/genQC/_modidx.py @@ -5,250 +5,403 @@ 'doc_host': 'https://FlorianFuerrutter.github.io', 'git_url': 'https://github.com/FlorianFuerrutter/genQC', 'lib_path': 'genQC'}, - 'syms': { 'genQC.config_loader': { 'genQC.config_loader.class_to_str': ('config_loader.html#class_to_str', 'genQC/config_loader.py'), - 'genQC.config_loader.config_to_dict': ('config_loader.html#config_to_dict', 'genQC/config_loader.py'), - 'genQC.config_loader.get_obj_from_str': ( 'config_loader.html#get_obj_from_str', - 'genQC/config_loader.py'), - 'genQC.config_loader.instantiate_from_config': ( 'config_loader.html#instantiate_from_config', - 'genQC/config_loader.py'), - 'genQC.config_loader.load_config': ('config_loader.html#load_config', 'genQC/config_loader.py'), - 'genQC.config_loader.load_model_from_config': ( 'config_loader.html#load_model_from_config', - 'genQC/config_loader.py'), - 'genQC.config_loader.save_dataclass_yaml': ( 'config_loader.html#save_dataclass_yaml', - 'genQC/config_loader.py'), - 'genQC.config_loader.save_dict_yaml': ('config_loader.html#save_dict_yaml', 'genQC/config_loader.py')}, - 'genQC.dataset.cached_qc_dataset': { 'genQC.dataset.cached_qc_dataset.Cached_OpenClip_Dataset': ( 'dataset/cached_qc_dataset.html#cached_openclip_dataset', - 'genQC/dataset/cached_qc_dataset.py'), - 'genQC.dataset.cached_qc_dataset.Cached_OpenClip_Dataset.caching': ( 'dataset/cached_qc_dataset.html#cached_openclip_dataset.caching', - 'genQC/dataset/cached_qc_dataset.py'), - 'genQC.dataset.cached_qc_dataset.Cached_OpenClip_Dataset.from_config_file': ( 'dataset/cached_qc_dataset.html#cached_openclip_dataset.from_config_file', - 'genQC/dataset/cached_qc_dataset.py'), - 'genQC.dataset.cached_qc_dataset.Cached_OpenClip_Dataset.get_dataloaders': ( 'dataset/cached_qc_dataset.html#cached_openclip_dataset.get_dataloaders', - 'genQC/dataset/cached_qc_dataset.py'), - 'genQC.dataset.cached_qc_dataset.Cached_OpenClip_Dataset.x_y_preprocess': ( 'dataset/cached_qc_dataset.html#cached_openclip_dataset.x_y_preprocess', - 'genQC/dataset/cached_qc_dataset.py')}, - 'genQC.dataset.config_dataset': { 'genQC.dataset.config_dataset.Config_Dataset': ( 'dataset/config_dataset.html#config_dataset', - 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.__init__': ( 'dataset/config_dataset.html#config_dataset.__init__', - 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.from_config': ( 'dataset/config_dataset.html#config_dataset.from_config', - 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.from_config_file': ( 'dataset/config_dataset.html#config_dataset.from_config_file', - 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.from_huggingface': ( 'dataset/config_dataset.html#config_dataset.from_huggingface', - 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.get_config': ( 'dataset/config_dataset.html#config_dataset.get_config', + 'syms': { 'genQC.benchmark.bench_compilation': { 'genQC.benchmark.bench_compilation.BaseHamiltonian': ( 'benchmark/bench_compilation.html#basehamiltonian', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.BaseHamiltonian.__init__': ( 'benchmark/bench_compilation.html#basehamiltonian.__init__', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.BaseHamiltonian._generate_matrix': ( 'benchmark/bench_compilation.html#basehamiltonian._generate_matrix', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.BaseHamiltonian.get_evolution': ( 'benchmark/bench_compilation.html#basehamiltonian.get_evolution', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.IsingHamiltonian': ( 'benchmark/bench_compilation.html#isinghamiltonian', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.IsingHamiltonian.__init__': ( 'benchmark/bench_compilation.html#isinghamiltonian.__init__', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.IsingHamiltonian._generate_matrix': ( 'benchmark/bench_compilation.html#isinghamiltonian._generate_matrix', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.SpecialUnitaries': ( 'benchmark/bench_compilation.html#specialunitaries', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.SpecialUnitaries.QFT': ( 'benchmark/bench_compilation.html#specialunitaries.qft', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.XXZHamiltonian': ( 'benchmark/bench_compilation.html#xxzhamiltonian', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.XXZHamiltonian.__init__': ( 'benchmark/bench_compilation.html#xxzhamiltonian.__init__', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.XXZHamiltonian._generate_matrix': ( 'benchmark/bench_compilation.html#xxzhamiltonian._generate_matrix', + 'genQC/benchmark/bench_compilation.py'), + 'genQC.benchmark.bench_compilation.qubit_tensor_product': ( 'benchmark/bench_compilation.html#qubit_tensor_product', + 'genQC/benchmark/bench_compilation.py')}, + 'genQC.dataset.balancing': { 'genQC.dataset.balancing.add_balance_fn_quantile_qc_length': ( 'dataset/balancing.html#add_balance_fn_quantile_qc_length', + 'genQC/dataset/balancing.py'), + 'genQC.dataset.balancing.get_tensor_gate_length': ( 'dataset/balancing.html#get_tensor_gate_length', + 'genQC/dataset/balancing.py')}, + 'genQC.dataset.cached_dataset': { 'genQC.dataset.cached_dataset.CachedOpenCLIPDataset': ( 'dataset/cached_dataset.html#cachedopenclipdataset', + 'genQC/dataset/cached_dataset.py'), + 'genQC.dataset.cached_dataset.CachedOpenCLIPDataset.caching': ( 'dataset/cached_dataset.html#cachedopenclipdataset.caching', + 'genQC/dataset/cached_dataset.py'), + 'genQC.dataset.cached_dataset.CachedOpenCLIPDataset.get_dataloaders': ( 'dataset/cached_dataset.html#cachedopenclipdataset.get_dataloaders', + 'genQC/dataset/cached_dataset.py'), + 'genQC.dataset.cached_dataset.CachedOpenCLIPDataset.x_y_preprocess': ( 'dataset/cached_dataset.html#cachedopenclipdataset.x_y_preprocess', + 'genQC/dataset/cached_dataset.py'), + 'genQC.dataset.cached_dataset.CachedOpenCLIPDatasetConfig': ( 'dataset/cached_dataset.html#cachedopenclipdatasetconfig', + 'genQC/dataset/cached_dataset.py')}, + 'genQC.dataset.circuits_dataset': { 'genQC.dataset.circuits_dataset.CircuitsConfigDataset': ( 'dataset/circuits_dataset.html#circuitsconfigdataset', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.CircuitsConfigDataset.__init__': ( 'dataset/circuits_dataset.html#circuitsconfigdataset.__init__', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.CircuitsConfigDataset.params_config': ( 'dataset/circuits_dataset.html#circuitsconfigdataset.params_config', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.CircuitsConfigDatasetConfig': ( 'dataset/circuits_dataset.html#circuitsconfigdatasetconfig', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset._cut': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset._cut', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset._cut_compilation_params': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset._cut_compilation_params', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset._get_cut_sizes': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset._get_cut_sizes', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset._preprocess_dataset': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset._preprocess_dataset', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset.cut_padding_bucket_collate_fn', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn_compilation': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset.cut_padding_bucket_collate_fn_compilation', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn_compilation_params': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset.cut_padding_bucket_collate_fn_compilation_params', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset.cut_padding_collate_fn': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset.cut_padding_collate_fn', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset.cut_padding_collate_fn_compilation': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset.cut_padding_collate_fn_compilation', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset.cut_padding_collate_fn_compilation_params': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset.cut_padding_collate_fn_compilation_params', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset.from_datasets': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset.from_datasets', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDataset.params_config': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdataset.params_config', + 'genQC/dataset/circuits_dataset.py'), + 'genQC.dataset.circuits_dataset.MixedCircuitsConfigDatasetConfig': ( 'dataset/circuits_dataset.html#mixedcircuitsconfigdatasetconfig', + 'genQC/dataset/circuits_dataset.py')}, + 'genQC.dataset.config_dataset': { 'genQC.dataset.config_dataset.ConfigDataset': ( 'dataset/config_dataset.html#configdataset', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.__init__': ( 'dataset/config_dataset.html#configdataset.__init__', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.check_save_type': ( 'dataset/config_dataset.html#configdataset.check_save_type', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.from_config': ( 'dataset/config_dataset.html#configdataset.from_config', 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.load_x_y': ( 'dataset/config_dataset.html#config_dataset.load_x_y', - 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.params_config': ( 'dataset/config_dataset.html#config_dataset.params_config', + 'genQC.dataset.config_dataset.ConfigDataset.from_config_file': ( 'dataset/config_dataset.html#configdataset.from_config_file', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.from_huggingface': ( 'dataset/config_dataset.html#configdataset.from_huggingface', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.get_config': ( 'dataset/config_dataset.html#configdataset.get_config', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.get_dataloaders': ( 'dataset/config_dataset.html#configdataset.get_dataloaders', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.load_x_y': ( 'dataset/config_dataset.html#configdataset.load_x_y', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.memory_summary': ( 'dataset/config_dataset.html#configdataset.memory_summary', 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.save_dataset': ( 'dataset/config_dataset.html#config_dataset.save_dataset', + 'genQC.dataset.config_dataset.ConfigDataset.params_config': ( 'dataset/config_dataset.html#configdataset.params_config', 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.store_x_y': ( 'dataset/config_dataset.html#config_dataset.store_x_y', - 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset.to': ( 'dataset/config_dataset.html#config_dataset.to', - 'genQC/dataset/config_dataset.py'), - 'genQC.dataset.config_dataset.Config_Dataset_config': ( 'dataset/config_dataset.html#config_dataset_config', - 'genQC/dataset/config_dataset.py')}, + 'genQC.dataset.config_dataset.ConfigDataset.save_dataset': ( 'dataset/config_dataset.html#configdataset.save_dataset', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.store_x_y': ( 'dataset/config_dataset.html#configdataset.store_x_y', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.to': ( 'dataset/config_dataset.html#configdataset.to', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.valid_split': ( 'dataset/config_dataset.html#configdataset.valid_split', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDataset.x_y_preprocess': ( 'dataset/config_dataset.html#configdataset.x_y_preprocess', + 'genQC/dataset/config_dataset.py'), + 'genQC.dataset.config_dataset.ConfigDatasetConfig': ( 'dataset/config_dataset.html#configdatasetconfig', + 'genQC/dataset/config_dataset.py')}, 'genQC.dataset.dataset_helper': { 'genQC.dataset.dataset_helper.balance_tensor_dataset': ( 'dataset/dataset_helper.html#balance_tensor_dataset', 'genQC/dataset/dataset_helper.py'), 'genQC.dataset.dataset_helper.check_duplicate_in_dataset': ( 'dataset/dataset_helper.html#check_duplicate_in_dataset', 'genQC/dataset/dataset_helper.py'), 'genQC.dataset.dataset_helper.check_duplicates_in_dataset': ( 'dataset/dataset_helper.html#check_duplicates_in_dataset', 'genQC/dataset/dataset_helper.py'), - 'genQC.dataset.dataset_helper.check_duplicates_in_dataset_python': ( 'dataset/dataset_helper.html#check_duplicates_in_dataset_python', - 'genQC/dataset/dataset_helper.py'), 'genQC.dataset.dataset_helper.get_unique_elements_indices': ( 'dataset/dataset_helper.html#get_unique_elements_indices', 'genQC/dataset/dataset_helper.py'), - 'genQC.dataset.dataset_helper.map_old_tensor_to_new': ( 'dataset/dataset_helper.html#map_old_tensor_to_new', - 'genQC/dataset/dataset_helper.py'), 'genQC.dataset.dataset_helper.shuffle_tensor_dataset': ( 'dataset/dataset_helper.html#shuffle_tensor_dataset', 'genQC/dataset/dataset_helper.py'), 'genQC.dataset.dataset_helper.uniquify_tensor_dataset': ( 'dataset/dataset_helper.html#uniquify_tensor_dataset', 'genQC/dataset/dataset_helper.py')}, - 'genQC.dataset.mixed_cached_qc_dataset': { 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.cut_padding_bucket_collate_fn', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn_compilation': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.cut_padding_bucket_collate_fn_compilation', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn_compilation_params': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.cut_padding_bucket_collate_fn_compilation_params', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.cut_padding_collate_fn', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn_compilation': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.cut_padding_collate_fn_compilation', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn_compilation_params': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.cut_padding_collate_fn_compilation_params', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.flexPadAttn_TimeOnly_padding_collate_fn': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.flexpadattn_timeonly_padding_collate_fn', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.flexPadAttn_padding_collate_fn': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.flexpadattn_padding_collate_fn', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.from_config_file': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.from_config_file', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.from_datasets': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.from_datasets', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.get_dataloaders': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.get_dataloaders', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset.params_config': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset.params_config', - 'genQC/dataset/mixed_cached_qc_dataset.py'), - 'genQC.dataset.mixed_cached_qc_dataset.Mixed_Cached_OpenClip_Dataset_config': ( 'dataset/mixed_cached_qc_dataset.html#mixed_cached_openclip_dataset_config', - 'genQC/dataset/mixed_cached_qc_dataset.py')}, - 'genQC.dataset.qc_dataset': { 'genQC.dataset.qc_dataset.Qc_Config_Dataset': ( 'dataset/qc_dataset.html#qc_config_dataset', - 'genQC/dataset/qc_dataset.py'), - 'genQC.dataset.qc_dataset.Qc_Config_Dataset.__init__': ( 'dataset/qc_dataset.html#qc_config_dataset.__init__', - 'genQC/dataset/qc_dataset.py'), - 'genQC.dataset.qc_dataset.Qc_Config_Dataset.get_dataloaders': ( 'dataset/qc_dataset.html#qc_config_dataset.get_dataloaders', - 'genQC/dataset/qc_dataset.py'), - 'genQC.dataset.qc_dataset.Qc_Config_Dataset.params_config': ( 'dataset/qc_dataset.html#qc_config_dataset.params_config', - 'genQC/dataset/qc_dataset.py'), - 'genQC.dataset.qc_dataset.Qc_Config_Dataset.plot_distribution': ( 'dataset/qc_dataset.html#qc_config_dataset.plot_distribution', - 'genQC/dataset/qc_dataset.py'), - 'genQC.dataset.qc_dataset.Qc_Config_Dataset.plot_example': ( 'dataset/qc_dataset.html#qc_config_dataset.plot_example', - 'genQC/dataset/qc_dataset.py'), - 'genQC.dataset.qc_dataset.Qc_Config_Dataset.valid_split': ( 'dataset/qc_dataset.html#qc_config_dataset.valid_split', - 'genQC/dataset/qc_dataset.py'), - 'genQC.dataset.qc_dataset.Qc_Config_Dataset.x_y_preprocess': ( 'dataset/qc_dataset.html#qc_config_dataset.x_y_preprocess', - 'genQC/dataset/qc_dataset.py'), - 'genQC.dataset.qc_dataset.Qc_Config_Dataset_config': ( 'dataset/qc_dataset.html#qc_config_dataset_config', - 'genQC/dataset/qc_dataset.py')}, + 'genQC.dataset.mixed_cached_dataset': { 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset._add_missing_conditions': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset._add_missing_conditions', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset._create_train_valid_datasets': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset._create_train_valid_datasets', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset._pad_conditions': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset._pad_conditions', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset._preprocess_datasets': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset._preprocess_datasets', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset._reorder_to_buckets': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset._reorder_to_buckets', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset.from_datasets': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset.from_datasets', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset.get_dataloaders': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset.get_dataloaders', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDataset.params_config': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdataset.params_config', + 'genQC/dataset/mixed_cached_dataset.py'), + 'genQC.dataset.mixed_cached_dataset.MixedCachedOpenCLIPDatasetConfig': ( 'dataset/mixed_cached_dataset.html#mixedcachedopenclipdatasetconfig', + 'genQC/dataset/mixed_cached_dataset.py')}, 'genQC.imports': {}, - 'genQC.inference.export_cudaq': { 'genQC.inference.export_cudaq.CircuitInstruction': ( 'inference/export_cudaq.html#circuitinstruction', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions': ( 'inference/export_cudaq.html#circuitinstructions', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions.__init__': ( 'inference/export_cudaq.html#circuitinstructions.__init__', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions.__repr__': ( 'inference/export_cudaq.html#circuitinstructions.__repr__', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions.add_instruction': ( 'inference/export_cudaq.html#circuitinstructions.add_instruction', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions.data': ( 'inference/export_cudaq.html#circuitinstructions.data', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions.length': ( 'inference/export_cudaq.html#circuitinstructions.length', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions.max_gates': ( 'inference/export_cudaq.html#circuitinstructions.max_gates', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions.num_qubits': ( 'inference/export_cudaq.html#circuitinstructions.num_qubits', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitInstructions.print': ( 'inference/export_cudaq.html#circuitinstructions.print', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitsCudaqBackend': ( 'inference/export_cudaq.html#circuitscudaqbackend', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitsCudaqBackend._construct_kernel': ( 'inference/export_cudaq.html#circuitscudaqbackend._construct_kernel', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitsCudaqBackend.check_error_circuit': ( 'inference/export_cudaq.html#circuitscudaqbackend.check_error_circuit', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitsCudaqBackend.draw': ( 'inference/export_cudaq.html#circuitscudaqbackend.draw', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitsCudaqBackend.export_cudaq': ( 'inference/export_cudaq.html#circuitscudaqbackend.export_cudaq', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.CircuitsCudaqBackend.get_unitary': ( 'inference/export_cudaq.html#circuitscudaqbackend.get_unitary', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.genqc_to_cudaq': ( 'inference/export_cudaq.html#genqc_to_cudaq', - 'genQC/inference/export_cudaq.py'), - 'genQC.inference.export_cudaq.tensor_to_instructions': ( 'inference/export_cudaq.html#tensor_to_instructions', - 'genQC/inference/export_cudaq.py')}, - 'genQC.inference.infer_compilation': { 'genQC.inference.infer_compilation.check_correct_gates': ( 'inference/infer_compilation.html#check_correct_gates', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.check_correct_unitary_distance': ( 'inference/infer_compilation.html#check_correct_unitary_distance', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.check_correct_unitary_exact': ( 'inference/infer_compilation.html#check_correct_unitary_exact', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.generate_comp_tensors': ( 'inference/infer_compilation.html#generate_comp_tensors', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.get_gate_and_U_acc': ( 'inference/infer_compilation.html#get_gate_and_u_acc', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.get_new_unitary_indices': ( 'inference/infer_compilation.html#get_new_unitary_indices', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.get_new_unitary_indices_batch': ( 'inference/infer_compilation.html#get_new_unitary_indices_batch', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.plot_hist_overview': ( 'inference/infer_compilation.html#plot_hist_overview', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.split_U_to_tensor': ( 'inference/infer_compilation.html#split_u_to_tensor', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.test_comp_acc': ( 'inference/infer_compilation.html#test_comp_acc', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.test_comp_acc_on_rnd_samples': ( 'inference/infer_compilation.html#test_comp_acc_on_rnd_samples', - 'genQC/inference/infer_compilation.py'), - 'genQC.inference.infer_compilation.test_comp_acc_on_testset': ( 'inference/infer_compilation.html#test_comp_acc_on_testset', - 'genQC/inference/infer_compilation.py')}, - 'genQC.inference.infer_gate_hist': { 'genQC.inference.infer_gate_hist.get_circuit_gate_length': ( 'inference/infer_gate_hist.html#get_circuit_gate_length', - 'genQC/inference/infer_gate_hist.py'), - 'genQC.inference.infer_gate_hist.get_tensor_gate_length': ( 'inference/infer_gate_hist.html#get_tensor_gate_length', - 'genQC/inference/infer_gate_hist.py')}, - 'genQC.inference.infer_misc': { 'genQC.inference.infer_misc.convert_tensors_to_circuits': ( 'inference/infer_misc.html#convert_tensors_to_circuits', - 'genQC/inference/infer_misc.py'), - 'genQC.inference.infer_misc.get_rnd_gatepool_subset': ( 'inference/infer_misc.html#get_rnd_gatepool_subset', - 'genQC/inference/infer_misc.py')}, - 'genQC.inference.infer_srv': { 'genQC.inference.infer_srv.convert_tensors_to_srvs': ( 'inference/infer_srv.html#convert_tensors_to_srvs', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.generate_srv_tensors': ( 'inference/infer_srv.html#generate_srv_tensors', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.get_all_srvs': ( 'inference/infer_srv.html#get_all_srvs', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.get_srv_accuracy': ( 'inference/infer_srv.html#get_srv_accuracy', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.plot_guidance_dep': ( 'inference/infer_srv.html#plot_guidance_dep', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.plot_srv_acc_vs_length': ( 'inference/infer_srv.html#plot_srv_acc_vs_length', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.plot_srv_acc_vs_maxLength': ( 'inference/infer_srv.html#plot_srv_acc_vs_maxlength', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.plot_srv_clr_distribution_bin_accuracy': ( 'inference/infer_srv.html#plot_srv_clr_distribution_bin_accuracy', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.plot_srv_clr_distribution_hist': ( 'inference/infer_srv.html#plot_srv_clr_distribution_hist', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.test_guidance_dep': ( 'inference/infer_srv.html#test_guidance_dep', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.test_srv_acc_vs_length': ( 'inference/infer_srv.html#test_srv_acc_vs_length', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.test_srv_acc_vs_maxLength': ( 'inference/infer_srv.html#test_srv_acc_vs_maxlength', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.test_srv_clr_distribution': ( 'inference/infer_srv.html#test_srv_clr_distribution', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.test_srv_clr_distribution_bin_samples': ( 'inference/infer_srv.html#test_srv_clr_distribution_bin_samples', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.test_srv_length_distribution': ( 'inference/infer_srv.html#test_srv_length_distribution', - 'genQC/inference/infer_srv.py'), - 'genQC.inference.infer_srv.true_sample_bin_dist': ( 'inference/infer_srv.html#true_sample_bin_dist', - 'genQC/inference/infer_srv.py')}, - 'genQC.metrics': { 'genQC.metrics.Accuracy': ('metrics.html#accuracy', 'genQC/metrics.py'), - 'genQC.metrics.Accuracy._eval': ('metrics.html#accuracy._eval', 'genQC/metrics.py'), - 'genQC.metrics.Mean': ('metrics.html#mean', 'genQC/metrics.py'), - 'genQC.metrics.Mean.__init__': ('metrics.html#mean.__init__', 'genQC/metrics.py'), - 'genQC.metrics.Mean._eval': ('metrics.html#mean._eval', 'genQC/metrics.py'), - 'genQC.metrics.Mean.reset_state': ('metrics.html#mean.reset_state', 'genQC/metrics.py'), - 'genQC.metrics.Mean.result': ('metrics.html#mean.result', 'genQC/metrics.py'), - 'genQC.metrics.Mean.update_state': ('metrics.html#mean.update_state', 'genQC/metrics.py'), - 'genQC.metrics.Metric': ('metrics.html#metric', 'genQC/metrics.py'), - 'genQC.metrics.Metric.__init__': ('metrics.html#metric.__init__', 'genQC/metrics.py'), - 'genQC.metrics.Metric.__repr__': ('metrics.html#metric.__repr__', 'genQC/metrics.py'), - 'genQC.metrics.Metric._eval': ('metrics.html#metric._eval', 'genQC/metrics.py'), - 'genQC.metrics.Metric.reset_state': ('metrics.html#metric.reset_state', 'genQC/metrics.py'), - 'genQC.metrics.Metric.result': ('metrics.html#metric.result', 'genQC/metrics.py'), - 'genQC.metrics.Metric.update_state': ('metrics.html#metric.update_state', 'genQC/metrics.py')}, - 'genQC.models.config_model': { 'genQC.models.config_model.Config_Model': ( 'models/config_model.html#config_model', - 'genQC/models/config_model.py'), - 'genQC.models.config_model.Config_Model.__init__': ( 'models/config_model.html#config_model.__init__', - 'genQC/models/config_model.py'), - 'genQC.models.config_model.Config_Model.from_config': ( 'models/config_model.html#config_model.from_config', - 'genQC/models/config_model.py'), - 'genQC.models.config_model.Config_Model.from_config_file': ( 'models/config_model.html#config_model.from_config_file', - 'genQC/models/config_model.py'), - 'genQC.models.config_model.Config_Model.get_config': ( 'models/config_model.html#config_model.get_config', + 'genQC.inference.eval_metrics': { 'genQC.inference.eval_metrics.BaseNorm': ( 'inference/eval_metrics.html#basenorm', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.BaseNorm.distance': ( 'inference/eval_metrics.html#basenorm.distance', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.BaseNorm.name': ( 'inference/eval_metrics.html#basenorm.name', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.UnitaryFrobeniusNorm': ( 'inference/eval_metrics.html#unitaryfrobeniusnorm', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.UnitaryFrobeniusNorm.__call__': ( 'inference/eval_metrics.html#unitaryfrobeniusnorm.__call__', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.UnitaryFrobeniusNorm.distance': ( 'inference/eval_metrics.html#unitaryfrobeniusnorm.distance', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.UnitaryFrobeniusNorm.name': ( 'inference/eval_metrics.html#unitaryfrobeniusnorm.name', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.UnitaryInfidelityNorm': ( 'inference/eval_metrics.html#unitaryinfidelitynorm', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.UnitaryInfidelityNorm.__call__': ( 'inference/eval_metrics.html#unitaryinfidelitynorm.__call__', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.UnitaryInfidelityNorm.distance': ( 'inference/eval_metrics.html#unitaryinfidelitynorm.distance', + 'genQC/inference/eval_metrics.py'), + 'genQC.inference.eval_metrics.UnitaryInfidelityNorm.name': ( 'inference/eval_metrics.html#unitaryinfidelitynorm.name', + 'genQC/inference/eval_metrics.py')}, + 'genQC.inference.evaluation_helper': { 'genQC.inference.evaluation_helper.get_srvs': ( 'inference/evaluation_helper.html#get_srvs', + 'genQC/inference/evaluation_helper.py'), + 'genQC.inference.evaluation_helper.get_unitaries': ( 'inference/evaluation_helper.html#get_unitaries', + 'genQC/inference/evaluation_helper.py')}, + 'genQC.inference.sampling': { 'genQC.inference.sampling.batched_sampling': ( 'inference/sampling.html#batched_sampling', + 'genQC/inference/sampling.py'), + 'genQC.inference.sampling.decode_tensors_to_backend': ( 'inference/sampling.html#decode_tensors_to_backend', + 'genQC/inference/sampling.py'), + 'genQC.inference.sampling.generate_compilation_tensors': ( 'inference/sampling.html#generate_compilation_tensors', + 'genQC/inference/sampling.py'), + 'genQC.inference.sampling.generate_tensors': ( 'inference/sampling.html#generate_tensors', + 'genQC/inference/sampling.py'), + 'genQC.inference.sampling.get_batch_samples': ( 'inference/sampling.html#get_batch_samples', + 'genQC/inference/sampling.py'), + 'genQC.inference.sampling.prepare_prompts': ( 'inference/sampling.html#prepare_prompts', + 'genQC/inference/sampling.py')}, + 'genQC.models.clip.frozen_open_clip': { 'genQC.models.clip.frozen_open_clip.CachedFrozenOpenCLIPEmbedder': ( 'models/clip/frozen_open_clip.html#cachedfrozenopenclipembedder', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.__init__': ( 'models/clip/frozen_open_clip.html#cachedfrozenopenclipembedder.__init__', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.forward': ( 'models/clip/frozen_open_clip.html#cachedfrozenopenclipembedder.forward', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.generate_cache': ( 'models/clip/frozen_open_clip.html#cachedfrozenopenclipembedder.generate_cache', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.get_token_count': ( 'models/clip/frozen_open_clip.html#cachedfrozenopenclipembedder.get_token_count', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.look_up_cos_sim_cached_index': ( 'models/clip/frozen_open_clip.html#cachedfrozenopenclipembedder.look_up_cos_sim_cached_index', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.CachedFrozenOpenCLIPEmbedderConfig': ( 'models/clip/frozen_open_clip.html#cachedfrozenopenclipembedderconfig', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.__init__': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.__init__', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.encode_with_transformer': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.encode_with_transformer', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.forward': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.forward', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.freeze': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.freeze', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.from_config': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.from_config', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.get_config': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.get_config', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.store_model': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.store_model', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.text_transformer_forward': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.text_transformer_forward', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.to': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.to', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedder.tokenize_and_push_to_device': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedder.tokenize_and_push_to_device', + 'genQC/models/clip/frozen_open_clip.py'), + 'genQC.models.clip.frozen_open_clip.FrozenOpenCLIPEmbedderConfig': ( 'models/clip/frozen_open_clip.html#frozenopenclipembedderconfig', + 'genQC/models/clip/frozen_open_clip.py')}, + 'genQC.models.clip.unitary_clip': { 'genQC.models.clip.unitary_clip.CircuitEncoder': ( 'models/clip/unitary_clip.html#circuitencoder', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.CircuitEncoder.__init__': ( 'models/clip/unitary_clip.html#circuitencoder.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.CircuitEncoder._init_weights': ( 'models/clip/unitary_clip.html#circuitencoder._init_weights', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.CircuitEncoder.forward': ( 'models/clip/unitary_clip.html#circuitencoder.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.CircuitEncoderConfig': ( 'models/clip/unitary_clip.html#circuitencoderconfig', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.CoreTransformer': ( 'models/clip/unitary_clip.html#coretransformer', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.CoreTransformer.__init__': ( 'models/clip/unitary_clip.html#coretransformer.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.CoreTransformer.forward': ( 'models/clip/unitary_clip.html#coretransformer.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.FeedForwardBlock': ( 'models/clip/unitary_clip.html#feedforwardblock', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.FeedForwardBlock.__init__': ( 'models/clip/unitary_clip.html#feedforwardblock.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.FeedForwardBlock._init_weights': ( 'models/clip/unitary_clip.html#feedforwardblock._init_weights', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.FeedForwardBlock.forward': ( 'models/clip/unitary_clip.html#feedforwardblock.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.FeedForwardBlock.siglu': ( 'models/clip/unitary_clip.html#feedforwardblock.siglu', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.PackingTransformer': ( 'models/clip/unitary_clip.html#packingtransformer', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.PackingTransformer.__init__': ( 'models/clip/unitary_clip.html#packingtransformer.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.PackingTransformer.forward': ( 'models/clip/unitary_clip.html#packingtransformer.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.RotaryMultiheadAttention': ( 'models/clip/unitary_clip.html#rotarymultiheadattention', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.RotaryMultiheadAttention.__init__': ( 'models/clip/unitary_clip.html#rotarymultiheadattention.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.RotaryMultiheadAttention._init_weights': ( 'models/clip/unitary_clip.html#rotarymultiheadattention._init_weights', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.RotaryMultiheadAttention.forward': ( 'models/clip/unitary_clip.html#rotarymultiheadattention.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.SelfAttnBlock': ( 'models/clip/unitary_clip.html#selfattnblock', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.SelfAttnBlock.__init__': ( 'models/clip/unitary_clip.html#selfattnblock.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.SelfAttnBlock._init_weights': ( 'models/clip/unitary_clip.html#selfattnblock._init_weights', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.SelfAttnBlock.forward': ( 'models/clip/unitary_clip.html#selfattnblock.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryCLIP': ( 'models/clip/unitary_clip.html#unitaryclip', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryCLIP.__init__': ( 'models/clip/unitary_clip.html#unitaryclip.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryCLIP._init_weights': ( 'models/clip/unitary_clip.html#unitaryclip._init_weights', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryCLIP.forward': ( 'models/clip/unitary_clip.html#unitaryclip.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryCLIPConfig': ( 'models/clip/unitary_clip.html#unitaryclipconfig', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryEncoderAttnBlock': ( 'models/clip/unitary_clip.html#unitaryencoderattnblock', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryEncoderAttnBlock.__init__': ( 'models/clip/unitary_clip.html#unitaryencoderattnblock.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryEncoderAttnBlock._init_weights': ( 'models/clip/unitary_clip.html#unitaryencoderattnblock._init_weights', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryEncoderAttnBlock.forward': ( 'models/clip/unitary_clip.html#unitaryencoderattnblock.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryTextEncoder': ( 'models/clip/unitary_clip.html#unitarytextencoder', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryTextEncoder.__init__': ( 'models/clip/unitary_clip.html#unitarytextencoder.__init__', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryTextEncoder._init_weights': ( 'models/clip/unitary_clip.html#unitarytextencoder._init_weights', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryTextEncoder.forward': ( 'models/clip/unitary_clip.html#unitarytextencoder.forward', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryTextEncoder.preproc_text': ( 'models/clip/unitary_clip.html#unitarytextencoder.preproc_text', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryTextEncoder.preproc_unitary': ( 'models/clip/unitary_clip.html#unitarytextencoder.preproc_unitary', + 'genQC/models/clip/unitary_clip.py'), + 'genQC.models.clip.unitary_clip.UnitaryTextEncoderConfig': ( 'models/clip/unitary_clip.html#unitarytextencoderconfig', + 'genQC/models/clip/unitary_clip.py')}, + 'genQC.models.config_model': { 'genQC.models.config_model.ConfigModel': ( 'models/config_model.html#configmodel', + 'genQC/models/config_model.py'), + 'genQC.models.config_model.ConfigModel.__init__': ( 'models/config_model.html#configmodel.__init__', + 'genQC/models/config_model.py'), + 'genQC.models.config_model.ConfigModel.check_save_type': ( 'models/config_model.html#configmodel.check_save_type', + 'genQC/models/config_model.py'), + 'genQC.models.config_model.ConfigModel.freeze': ( 'models/config_model.html#configmodel.freeze', + 'genQC/models/config_model.py'), + 'genQC.models.config_model.ConfigModel.from_config': ( 'models/config_model.html#configmodel.from_config', 'genQC/models/config_model.py'), - 'genQC.models.config_model.Config_Model.store_model': ( 'models/config_model.html#config_model.store_model', - 'genQC/models/config_model.py')}, + 'genQC.models.config_model.ConfigModel.from_config_file': ( 'models/config_model.html#configmodel.from_config_file', + 'genQC/models/config_model.py'), + 'genQC.models.config_model.ConfigModel.get_config': ( 'models/config_model.html#configmodel.get_config', + 'genQC/models/config_model.py'), + 'genQC.models.config_model.ConfigModel.store_model': ( 'models/config_model.html#configmodel.store_model', + 'genQC/models/config_model.py'), + 'genQC.models.config_model.ConfigModel.unfreeze': ( 'models/config_model.html#configmodel.unfreeze', + 'genQC/models/config_model.py')}, + 'genQC.models.embedding.base_embedder': { 'genQC.models.embedding.base_embedder.BaseEmbedder': ( 'models/embedding/base_embedder.html#baseembedder', + 'genQC/models/embedding/base_embedder.py'), + 'genQC.models.embedding.base_embedder.BaseEmbedder.__init__': ( 'models/embedding/base_embedder.html#baseembedder.__init__', + 'genQC/models/embedding/base_embedder.py'), + 'genQC.models.embedding.base_embedder.BaseEmbedder.embed': ( 'models/embedding/base_embedder.html#baseembedder.embed', + 'genQC/models/embedding/base_embedder.py'), + 'genQC.models.embedding.base_embedder.BaseEmbedder.forward': ( 'models/embedding/base_embedder.html#baseembedder.forward', + 'genQC/models/embedding/base_embedder.py'), + 'genQC.models.embedding.base_embedder.BaseEmbedder.invert': ( 'models/embedding/base_embedder.html#baseembedder.invert', + 'genQC/models/embedding/base_embedder.py')}, + 'genQC.models.embedding.rotational_preset_embedder': { 'genQC.models.embedding.rotational_preset_embedder.MultimodialEmbedder': ( 'models/embedding/rotational_preset_embedder.html#multimodialembedder', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialEmbedder.__init__': ( 'models/embedding/rotational_preset_embedder.html#multimodialembedder.__init__', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialEmbedder.invert_scale_emb': ( 'models/embedding/rotational_preset_embedder.html#multimodialembedder.invert_scale_emb', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialEmbedder.scale_emb': ( 'models/embedding/rotational_preset_embedder.html#multimodialembedder.scale_emb', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialEmbedder.set_scaling': ( 'models/embedding/rotational_preset_embedder.html#multimodialembedder.set_scaling', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.__init__': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.__init__', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder._init_weights': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder._init_weights', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder._prepare_params': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder._prepare_params', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder._reduce_params_spatial': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder._reduce_params_spatial', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.embed': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.embed', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.embed_discrete': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.embed_discrete', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.get_discrete_sim': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.get_discrete_sim', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.get_parametrized_mask': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.get_parametrized_mask', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.invert': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.invert', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.invert_discrete': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.invert_discrete', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.print_emb_matrix': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.print_emb_matrix', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.tokens_to_unique_class_values': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.tokens_to_unique_class_values', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedder.unique_class_values_to_tokens': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedder.unique_class_values_to_tokens', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.MultimodialPresetEmbedderConfig': ( 'models/embedding/rotational_preset_embedder.html#multimodialpresetembedderconfig', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.RotationalMultimodialPresetEmbedder': ( 'models/embedding/rotational_preset_embedder.html#rotationalmultimodialpresetembedder', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.RotationalMultimodialPresetEmbedder.__init__': ( 'models/embedding/rotational_preset_embedder.html#rotationalmultimodialpresetembedder.__init__', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.RotationalMultimodialPresetEmbedder.embed_continuous': ( 'models/embedding/rotational_preset_embedder.html#rotationalmultimodialpresetembedder.embed_continuous', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.RotationalMultimodialPresetEmbedder.invert_continuous': ( 'models/embedding/rotational_preset_embedder.html#rotationalmultimodialpresetembedder.invert_continuous', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.RotationalMultimodialPresetEmbedderTiny': ( 'models/embedding/rotational_preset_embedder.html#rotationalmultimodialpresetembeddertiny', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.RotationalMultimodialPresetEmbedderTiny.__init__': ( 'models/embedding/rotational_preset_embedder.html#rotationalmultimodialpresetembeddertiny.__init__', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.RotationalMultimodialPresetEmbedderTiny.embed_continuous': ( 'models/embedding/rotational_preset_embedder.html#rotationalmultimodialpresetembeddertiny.embed_continuous', + 'genQC/models/embedding/rotational_preset_embedder.py'), + 'genQC.models.embedding.rotational_preset_embedder.RotationalMultimodialPresetEmbedderTiny.invert_continuous': ( 'models/embedding/rotational_preset_embedder.html#rotationalmultimodialpresetembeddertiny.invert_continuous', + 'genQC/models/embedding/rotational_preset_embedder.py')}, 'genQC.models.frozen_open_clip': { 'genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder': ( 'models/frozen_open_clip.html#cachedfrozenopenclipembedder', 'genQC/models/frozen_open_clip.py'), + 'genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.__init__': ( 'models/frozen_open_clip.html#cachedfrozenopenclipembedder.__init__', + 'genQC/models/frozen_open_clip.py'), 'genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.forward': ( 'models/frozen_open_clip.html#cachedfrozenopenclipembedder.forward', 'genQC/models/frozen_open_clip.py'), 'genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.generate_cache': ( 'models/frozen_open_clip.html#cachedfrozenopenclipembedder.generate_cache', 'genQC/models/frozen_open_clip.py'), + 'genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.get_token_count': ( 'models/frozen_open_clip.html#cachedfrozenopenclipembedder.get_token_count', + 'genQC/models/frozen_open_clip.py'), 'genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder.look_up_cos_sim_cached_index': ( 'models/frozen_open_clip.html#cachedfrozenopenclipembedder.look_up_cos_sim_cached_index', 'genQC/models/frozen_open_clip.py'), + 'genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedderConfig': ( 'models/frozen_open_clip.html#cachedfrozenopenclipembedderconfig', + 'genQC/models/frozen_open_clip.py'), 'genQC.models.frozen_open_clip.FrozenOpenCLIPEmbedder': ( 'models/frozen_open_clip.html#frozenopenclipembedder', 'genQC/models/frozen_open_clip.py'), 'genQC.models.frozen_open_clip.FrozenOpenCLIPEmbedder.__init__': ( 'models/frozen_open_clip.html#frozenopenclipembedder.__init__', @@ -271,8 +424,8 @@ 'genQC/models/frozen_open_clip.py'), 'genQC.models.frozen_open_clip.FrozenOpenCLIPEmbedder.tokenize_and_push_to_device': ( 'models/frozen_open_clip.html#frozenopenclipembedder.tokenize_and_push_to_device', 'genQC/models/frozen_open_clip.py'), - 'genQC.models.frozen_open_clip.FrozenOpenCLIPEmbedder_config': ( 'models/frozen_open_clip.html#frozenopenclipembedder_config', - 'genQC/models/frozen_open_clip.py')}, + 'genQC.models.frozen_open_clip.FrozenOpenCLIPEmbedderConfig': ( 'models/frozen_open_clip.html#frozenopenclipembedderconfig', + 'genQC/models/frozen_open_clip.py')}, 'genQC.models.layers': { 'genQC.models.layers.DownBlock2D': ('models/layers.html#downblock2d', 'genQC/models/layers.py'), 'genQC.models.layers.DownBlock2D.__init__': ( 'models/layers.html#downblock2d.__init__', 'genQC/models/layers.py'), @@ -312,12 +465,12 @@ 'genQC/models/layers.py'), 'genQC.models.layers.ResBlock2D.forward': ( 'models/layers.html#resblock2d.forward', 'genQC/models/layers.py'), - 'genQC.models.layers.ResBlock2D_Conditional': ( 'models/layers.html#resblock2d_conditional', - 'genQC/models/layers.py'), - 'genQC.models.layers.ResBlock2D_Conditional.__init__': ( 'models/layers.html#resblock2d_conditional.__init__', - 'genQC/models/layers.py'), - 'genQC.models.layers.ResBlock2D_Conditional.forward': ( 'models/layers.html#resblock2d_conditional.forward', + 'genQC.models.layers.ResBlock2DConditional': ( 'models/layers.html#resblock2dconditional', + 'genQC/models/layers.py'), + 'genQC.models.layers.ResBlock2DConditional.__init__': ( 'models/layers.html#resblock2dconditional.__init__', 'genQC/models/layers.py'), + 'genQC.models.layers.ResBlock2DConditional.forward': ( 'models/layers.html#resblock2dconditional.forward', + 'genQC/models/layers.py'), 'genQC.models.layers.ResDownBlock2D': ('models/layers.html#resdownblock2d', 'genQC/models/layers.py'), 'genQC.models.layers.ResDownBlock2D.__init__': ( 'models/layers.html#resdownblock2d.__init__', 'genQC/models/layers.py'), @@ -338,30 +491,170 @@ 'genQC/models/layers.py'), 'genQC.models.layers.UpBlock2D.forward': ( 'models/layers.html#upblock2d.forward', 'genQC/models/layers.py')}, - 'genQC.models.transformers': { 'genQC.models.transformers.BasisCrossAttnBlock': ( 'models/transformers.html#basiscrossattnblock', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.BasisCrossAttnBlock.__init__': ( 'models/transformers.html#basiscrossattnblock.__init__', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.BasisCrossAttnBlock.forward': ( 'models/transformers.html#basiscrossattnblock.forward', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.BasisSelfAttnBlock': ( 'models/transformers.html#basisselfattnblock', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.BasisSelfAttnBlock.__init__': ( 'models/transformers.html#basisselfattnblock.__init__', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.BasisSelfAttnBlock.forward': ( 'models/transformers.html#basisselfattnblock.forward', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.SpatialTransformer': ( 'models/transformers.html#spatialtransformer', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.SpatialTransformer.__init__': ( 'models/transformers.html#spatialtransformer.__init__', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.SpatialTransformer.forward': ( 'models/transformers.html#spatialtransformer.forward', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.SpatialTransformerSelfAttn': ( 'models/transformers.html#spatialtransformerselfattn', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.SpatialTransformerSelfAttn.__init__': ( 'models/transformers.html#spatialtransformerselfattn.__init__', - 'genQC/models/transformers.py'), - 'genQC.models.transformers.SpatialTransformerSelfAttn.forward': ( 'models/transformers.html#spatialtransformerselfattn.forward', - 'genQC/models/transformers.py')}, + 'genQC.models.position_encoding': { 'genQC.models.position_encoding.LearnedPositionalEmbedding': ( 'models/position_encoding.html#learnedpositionalembedding', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.LearnedPositionalEmbedding.__init__': ( 'models/position_encoding.html#learnedpositionalembedding.__init__', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.LearnedPositionalEmbedding._init_weights': ( 'models/position_encoding.html#learnedpositionalembedding._init_weights', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.LearnedPositionalEmbedding.forward': ( 'models/position_encoding.html#learnedpositionalembedding.forward', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.RotaryPositionalEmbedding': ( 'models/position_encoding.html#rotarypositionalembedding', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.RotaryPositionalEmbedding.__init__': ( 'models/position_encoding.html#rotarypositionalembedding.__init__', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.RotaryPositionalEmbedding.forward': ( 'models/position_encoding.html#rotarypositionalembedding.forward', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.RotaryPositionalEmbedding.rebuild_rope_cache': ( 'models/position_encoding.html#rotarypositionalembedding.rebuild_rope_cache', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.RotaryPositionalEmbedding2D': ( 'models/position_encoding.html#rotarypositionalembedding2d', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.RotaryPositionalEmbedding2D.__init__': ( 'models/position_encoding.html#rotarypositionalembedding2d.__init__', + 'genQC/models/position_encoding.py'), + 'genQC.models.position_encoding.RotaryPositionalEmbedding2D.forward': ( 'models/position_encoding.html#rotarypositionalembedding2d.forward', + 'genQC/models/position_encoding.py')}, + 'genQC.models.transformers.attention': { 'genQC.models.transformers.attention.BasisCrossAttnBlock': ( 'models/transformers/attention.html#basiscrossattnblock', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.BasisCrossAttnBlock.__init__': ( 'models/transformers/attention.html#basiscrossattnblock.__init__', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.BasisCrossAttnBlock.forward': ( 'models/transformers/attention.html#basiscrossattnblock.forward', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.BasisSelfAttnBlock': ( 'models/transformers/attention.html#basisselfattnblock', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.BasisSelfAttnBlock.__init__': ( 'models/transformers/attention.html#basisselfattnblock.__init__', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.BasisSelfAttnBlock.forward': ( 'models/transformers/attention.html#basisselfattnblock.forward', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.FeedForwardBlock': ( 'models/transformers/attention.html#feedforwardblock', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.FeedForwardBlock.__init__': ( 'models/transformers/attention.html#feedforwardblock.__init__', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.FeedForwardBlock.forward': ( 'models/transformers/attention.html#feedforwardblock.forward', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.FeedForwardBlock.siglu': ( 'models/transformers/attention.html#feedforwardblock.siglu', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.SpatialTransformer': ( 'models/transformers/attention.html#spatialtransformer', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.SpatialTransformer.__init__': ( 'models/transformers/attention.html#spatialtransformer.__init__', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.SpatialTransformer.forward': ( 'models/transformers/attention.html#spatialtransformer.forward', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.SpatialTransformerSelfAttn': ( 'models/transformers/attention.html#spatialtransformerselfattn', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.SpatialTransformerSelfAttn.__init__': ( 'models/transformers/attention.html#spatialtransformerselfattn.__init__', + 'genQC/models/transformers/attention.py'), + 'genQC.models.transformers.attention.SpatialTransformerSelfAttn.forward': ( 'models/transformers/attention.html#spatialtransformerselfattn.forward', + 'genQC/models/transformers/attention.py')}, + 'genQC.models.transformers.cirdit_multimodal': { 'genQC.models.transformers.cirdit_multimodal.AdaptiveSelfAttnBlock': ( 'models/transformers/cirdit_multimodal.html#adaptiveselfattnblock', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.AdaptiveSelfAttnBlock.__init__': ( 'models/transformers/cirdit_multimodal.html#adaptiveselfattnblock.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.AdaptiveSelfAttnBlock._init_weights': ( 'models/transformers/cirdit_multimodal.html#adaptiveselfattnblock._init_weights', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.AdaptiveSelfAttnBlock.forward': ( 'models/transformers/cirdit_multimodal.html#adaptiveselfattnblock.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CirDiT': ( 'models/transformers/cirdit_multimodal.html#cirdit', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CirDiT.__init__': ( 'models/transformers/cirdit_multimodal.html#cirdit.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CirDiT._init_weights': ( 'models/transformers/cirdit_multimodal.html#cirdit._init_weights', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CirDiT.forward': ( 'models/transformers/cirdit_multimodal.html#cirdit.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CirDiT.main_pass': ( 'models/transformers/cirdit_multimodal.html#cirdit.main_pass', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CirDiTConfig': ( 'models/transformers/cirdit_multimodal.html#cirditconfig', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CoreTransformer': ( 'models/transformers/cirdit_multimodal.html#coretransformer', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CoreTransformer.__init__': ( 'models/transformers/cirdit_multimodal.html#coretransformer.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CoreTransformer.forward': ( 'models/transformers/cirdit_multimodal.html#coretransformer.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CrossAttnBlock': ( 'models/transformers/cirdit_multimodal.html#crossattnblock', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CrossAttnBlock.__init__': ( 'models/transformers/cirdit_multimodal.html#crossattnblock.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CrossAttnBlock._init_weights': ( 'models/transformers/cirdit_multimodal.html#crossattnblock._init_weights', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.CrossAttnBlock.forward': ( 'models/transformers/cirdit_multimodal.html#crossattnblock.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.FeedForwardBlock': ( 'models/transformers/cirdit_multimodal.html#feedforwardblock', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.FeedForwardBlock.__init__': ( 'models/transformers/cirdit_multimodal.html#feedforwardblock.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.FeedForwardBlock._init_weights': ( 'models/transformers/cirdit_multimodal.html#feedforwardblock._init_weights', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.FeedForwardBlock.forward': ( 'models/transformers/cirdit_multimodal.html#feedforwardblock.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.FeedForwardBlock.siglu': ( 'models/transformers/cirdit_multimodal.html#feedforwardblock.siglu', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.PackingTransformer': ( 'models/transformers/cirdit_multimodal.html#packingtransformer', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.PackingTransformer.__init__': ( 'models/transformers/cirdit_multimodal.html#packingtransformer.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.PackingTransformer.forward': ( 'models/transformers/cirdit_multimodal.html#packingtransformer.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.RotaryMultiheadAttention': ( 'models/transformers/cirdit_multimodal.html#rotarymultiheadattention', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.RotaryMultiheadAttention.__init__': ( 'models/transformers/cirdit_multimodal.html#rotarymultiheadattention.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.RotaryMultiheadAttention._init_weights': ( 'models/transformers/cirdit_multimodal.html#rotarymultiheadattention._init_weights', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.RotaryMultiheadAttention.forward': ( 'models/transformers/cirdit_multimodal.html#rotarymultiheadattention.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.SelfAttnBlock': ( 'models/transformers/cirdit_multimodal.html#selfattnblock', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.SelfAttnBlock.__init__': ( 'models/transformers/cirdit_multimodal.html#selfattnblock.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.SelfAttnBlock._init_weights': ( 'models/transformers/cirdit_multimodal.html#selfattnblock._init_weights', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.SelfAttnBlock.forward': ( 'models/transformers/cirdit_multimodal.html#selfattnblock.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.TimeEmbedding': ( 'models/transformers/cirdit_multimodal.html#timeembedding', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.TimeEmbedding.__init__': ( 'models/transformers/cirdit_multimodal.html#timeembedding.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.TimeEmbedding.forward': ( 'models/transformers/cirdit_multimodal.html#timeembedding.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.UnitaryCLIPPartialNoiseCompilationCirDiT': ( 'models/transformers/cirdit_multimodal.html#unitaryclippartialnoisecompilationcirdit', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.UnitaryCLIPPartialNoiseCompilationCirDiT.__init__': ( 'models/transformers/cirdit_multimodal.html#unitaryclippartialnoisecompilationcirdit.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.UnitaryCLIPPartialNoiseCompilationCirDiT.forward': ( 'models/transformers/cirdit_multimodal.html#unitaryclippartialnoisecompilationcirdit.forward', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.UnitaryCLIPPartialNoiseCompilationCirDiTConfig': ( 'models/transformers/cirdit_multimodal.html#unitaryclippartialnoisecompilationcirditconfig', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.UnpackingTransformer': ( 'models/transformers/cirdit_multimodal.html#unpackingtransformer', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.UnpackingTransformer.__init__': ( 'models/transformers/cirdit_multimodal.html#unpackingtransformer.__init__', + 'genQC/models/transformers/cirdit_multimodal.py'), + 'genQC.models.transformers.cirdit_multimodal.UnpackingTransformer.forward': ( 'models/transformers/cirdit_multimodal.html#unpackingtransformer.forward', + 'genQC/models/transformers/cirdit_multimodal.py')}, + 'genQC.models.transformers.transformers': { 'genQC.models.transformers.transformers.BasisCrossAttnBlock': ( 'models/transformers/transformers.html#basiscrossattnblock', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.BasisCrossAttnBlock.__init__': ( 'models/transformers/transformers.html#basiscrossattnblock.__init__', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.BasisCrossAttnBlock.forward': ( 'models/transformers/transformers.html#basiscrossattnblock.forward', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.BasisSelfAttnBlock': ( 'models/transformers/transformers.html#basisselfattnblock', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.BasisSelfAttnBlock.__init__': ( 'models/transformers/transformers.html#basisselfattnblock.__init__', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.BasisSelfAttnBlock.forward': ( 'models/transformers/transformers.html#basisselfattnblock.forward', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.SpatialTransformer': ( 'models/transformers/transformers.html#spatialtransformer', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.SpatialTransformer.__init__': ( 'models/transformers/transformers.html#spatialtransformer.__init__', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.SpatialTransformer.forward': ( 'models/transformers/transformers.html#spatialtransformer.forward', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.SpatialTransformerSelfAttn': ( 'models/transformers/transformers.html#spatialtransformerselfattn', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.SpatialTransformerSelfAttn.__init__': ( 'models/transformers/transformers.html#spatialtransformerselfattn.__init__', + 'genQC/models/transformers/transformers.py'), + 'genQC.models.transformers.transformers.SpatialTransformerSelfAttn.forward': ( 'models/transformers/transformers.html#spatialtransformerselfattn.forward', + 'genQC/models/transformers/transformers.py')}, 'genQC.models.unet_qc': { 'genQC.models.unet_qc.Decoder': ('models/unet_qc.html#decoder', 'genQC/models/unet_qc.py'), 'genQC.models.unet_qc.Decoder.__init__': ( 'models/unet_qc.html#decoder.__init__', 'genQC/models/unet_qc.py'), @@ -385,12 +678,12 @@ 'genQC/models/unet_qc.py'), 'genQC.models.unet_qc.QC_Cond_UNet._init_weights': ( 'models/unet_qc.html#qc_cond_unet._init_weights', 'genQC/models/unet_qc.py'), - 'genQC.models.unet_qc.QC_Cond_UNet.embedd_clrs': ( 'models/unet_qc.html#qc_cond_unet.embedd_clrs', - 'genQC/models/unet_qc.py'), + 'genQC.models.unet_qc.QC_Cond_UNet.embed': ( 'models/unet_qc.html#qc_cond_unet.embed', + 'genQC/models/unet_qc.py'), 'genQC.models.unet_qc.QC_Cond_UNet.forward': ( 'models/unet_qc.html#qc_cond_unet.forward', 'genQC/models/unet_qc.py'), - 'genQC.models.unet_qc.QC_Cond_UNet.invert_clr': ( 'models/unet_qc.html#qc_cond_unet.invert_clr', - 'genQC/models/unet_qc.py'), + 'genQC.models.unet_qc.QC_Cond_UNet.invert': ( 'models/unet_qc.html#qc_cond_unet.invert', + 'genQC/models/unet_qc.py'), 'genQC.models.unet_qc.QC_Cond_UNet_config': ( 'models/unet_qc.html#qc_cond_unet_config', 'genQC/models/unet_qc.py'), 'genQC.models.unet_qc.UNet_block': ('models/unet_qc.html#unet_block', 'genQC/models/unet_qc.py'), @@ -410,6 +703,30 @@ 'genQC/models/unitary_encoder.py'), 'genQC.models.unitary_encoder.Unitary_encoder_config': ( 'models/unitary_encoder.html#unitary_encoder_config', 'genQC/models/unitary_encoder.py')}, + 'genQC.pipeline.callbacks': { 'genQC.pipeline.callbacks.Callback': ( 'pipeline/callbacks.html#callback', + 'genQC/pipeline/callbacks.py'), + 'genQC.pipeline.callbacks.CancelBatchException': ( 'pipeline/callbacks.html#cancelbatchexception', + 'genQC/pipeline/callbacks.py'), + 'genQC.pipeline.callbacks.CancelEpochException': ( 'pipeline/callbacks.html#cancelepochexception', + 'genQC/pipeline/callbacks.py'), + 'genQC.pipeline.callbacks.CancelFitException': ( 'pipeline/callbacks.html#cancelfitexception', + 'genQC/pipeline/callbacks.py'), + 'genQC.pipeline.callbacks.run_cbs': ( 'pipeline/callbacks.html#run_cbs', + 'genQC/pipeline/callbacks.py')}, + 'genQC.pipeline.compilation_diffusion_pipeline': { 'genQC.pipeline.compilation_diffusion_pipeline.DiffusionPipeline_Compilation': ( 'pipeline/compilation_diffusion_pipeline.html#diffusionpipeline_compilation', + 'genQC/pipeline/compilation_diffusion_pipeline.py'), + 'genQC.pipeline.compilation_diffusion_pipeline.DiffusionPipeline_Compilation.__call__': ( 'pipeline/compilation_diffusion_pipeline.html#diffusionpipeline_compilation.__call__', + 'genQC/pipeline/compilation_diffusion_pipeline.py'), + 'genQC.pipeline.compilation_diffusion_pipeline.DiffusionPipeline_Compilation.denoising': ( 'pipeline/compilation_diffusion_pipeline.html#diffusionpipeline_compilation.denoising', + 'genQC/pipeline/compilation_diffusion_pipeline.py'), + 'genQC.pipeline.compilation_diffusion_pipeline.DiffusionPipeline_Compilation.denoising_step': ( 'pipeline/compilation_diffusion_pipeline.html#diffusionpipeline_compilation.denoising_step', + 'genQC/pipeline/compilation_diffusion_pipeline.py'), + 'genQC.pipeline.compilation_diffusion_pipeline.DiffusionPipeline_Compilation.empty_unitary_fn': ( 'pipeline/compilation_diffusion_pipeline.html#diffusionpipeline_compilation.empty_unitary_fn', + 'genQC/pipeline/compilation_diffusion_pipeline.py'), + 'genQC.pipeline.compilation_diffusion_pipeline.DiffusionPipeline_Compilation.get_guidance_U': ( 'pipeline/compilation_diffusion_pipeline.html#diffusionpipeline_compilation.get_guidance_u', + 'genQC/pipeline/compilation_diffusion_pipeline.py'), + 'genQC.pipeline.compilation_diffusion_pipeline.DiffusionPipeline_Compilation.train_step': ( 'pipeline/compilation_diffusion_pipeline.html#diffusionpipeline_compilation.train_step', + 'genQC/pipeline/compilation_diffusion_pipeline.py')}, 'genQC.pipeline.diffusion_pipeline': { 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline', 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.CFG': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.cfg', @@ -418,14 +735,14 @@ 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.__init__': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.__init__', 'genQC/pipeline/diffusion_pipeline.py'), + 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.cfg_drop': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.cfg_drop', + 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.denoising': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.denoising', 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.denoising_step': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.denoising_step', 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.from_config_file': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.from_config_file', 'genQC/pipeline/diffusion_pipeline.py'), - 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.from_pretrained': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.from_pretrained', - 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.get_guidance_condition': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.get_guidance_condition', 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.latent_filling': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.latent_filling', @@ -434,6 +751,8 @@ 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.prepare_c_emb': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.prepare_c_emb', 'genQC/pipeline/diffusion_pipeline.py'), + 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.sample_timesteps_low_variance': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.sample_timesteps_low_variance', + 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.store_pipeline': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.store_pipeline', 'genQC/pipeline/diffusion_pipeline.py'), 'genQC.pipeline.diffusion_pipeline.DiffusionPipeline.train_on_epoch': ( 'pipeline/diffusion_pipeline.html#diffusionpipeline.train_on_epoch', @@ -451,17 +770,68 @@ 'genQC.pipeline.diffusion_pipeline_special.DiffusionPipeline_Compilation.get_guidance_U': ( 'pipeline/diffusion_pipeline_special.html#diffusionpipeline_compilation.get_guidance_u', 'genQC/pipeline/diffusion_pipeline_special.py'), 'genQC.pipeline.diffusion_pipeline_special.DiffusionPipeline_Compilation.train_step': ( 'pipeline/diffusion_pipeline_special.html#diffusionpipeline_compilation.train_step', - 'genQC/pipeline/diffusion_pipeline_special.py'), - 'genQC.pipeline.diffusion_pipeline_special.DiffusionPipeline_attnPadded': ( 'pipeline/diffusion_pipeline_special.html#diffusionpipeline_attnpadded', - 'genQC/pipeline/diffusion_pipeline_special.py'), - 'genQC.pipeline.diffusion_pipeline_special.DiffusionPipeline_attnPadded.train_step': ( 'pipeline/diffusion_pipeline_special.html#diffusionpipeline_attnpadded.train_step', - 'genQC/pipeline/diffusion_pipeline_special.py')}, - 'genQC.pipeline.pipeline': { 'genQC.pipeline.pipeline.Pipeline': ( 'pipeline/pipeline.html#pipeline', + 'genQC/pipeline/diffusion_pipeline_special.py')}, + 'genQC.pipeline.metrics': { 'genQC.pipeline.metrics.Accuracy': ('pipeline/metrics.html#accuracy', 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Accuracy._eval': ( 'pipeline/metrics.html#accuracy._eval', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Mean': ('pipeline/metrics.html#mean', 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Mean.__init__': ( 'pipeline/metrics.html#mean.__init__', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Mean._eval': ( 'pipeline/metrics.html#mean._eval', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Mean.reset_state': ( 'pipeline/metrics.html#mean.reset_state', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Mean.result': ( 'pipeline/metrics.html#mean.result', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Mean.update_state': ( 'pipeline/metrics.html#mean.update_state', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Metric': ('pipeline/metrics.html#metric', 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Metric.__init__': ( 'pipeline/metrics.html#metric.__init__', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Metric.__repr__': ( 'pipeline/metrics.html#metric.__repr__', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Metric._eval': ( 'pipeline/metrics.html#metric._eval', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Metric.reset_state': ( 'pipeline/metrics.html#metric.reset_state', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Metric.result': ( 'pipeline/metrics.html#metric.result', + 'genQC/pipeline/metrics.py'), + 'genQC.pipeline.metrics.Metric.update_state': ( 'pipeline/metrics.html#metric.update_state', + 'genQC/pipeline/metrics.py')}, + 'genQC.pipeline.multimodal_diffusion_pipeline': { 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation.__init__': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation.__init__', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation._get_guidance_scales': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation._get_guidance_scales', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation.denoising': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation.denoising', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation.denoising_step': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation.denoising_step', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation.denoising_step_joint': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation.denoising_step_joint', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation.denoising_step_single_mode_w': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation.denoising_step_single_mode_w', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation.from_config_file': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation.from_config_file', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation.params_config': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation.params_config', + 'genQC/pipeline/multimodal_diffusion_pipeline.py'), + 'genQC.pipeline.multimodal_diffusion_pipeline.MultimodalDiffusionPipeline_ParametrizedCompilation.train_step': ( 'pipeline/multimodal_diffusion_pipeline.html#multimodaldiffusionpipeline_parametrizedcompilation.train_step', + 'genQC/pipeline/multimodal_diffusion_pipeline.py')}, + 'genQC.pipeline.pipeline': { 'genQC.pipeline.pipeline.CheckpointCB': ( 'pipeline/pipeline.html#checkpointcb', + 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.CheckpointCB.__init__': ( 'pipeline/pipeline.html#checkpointcb.__init__', + 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.CheckpointCB.after_epoch': ( 'pipeline/pipeline.html#checkpointcb.after_epoch', + 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.Pipeline': ( 'pipeline/pipeline.html#pipeline', 'genQC/pipeline/pipeline.py'), 'genQC.pipeline.pipeline.Pipeline.__call__': ( 'pipeline/pipeline.html#pipeline.__call__', 'genQC/pipeline/pipeline.py'), 'genQC.pipeline.pipeline.Pipeline.__init__': ( 'pipeline/pipeline.html#pipeline.__init__', 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.Pipeline._get_parameters': ( 'pipeline/pipeline.html#pipeline._get_parameters', + 'genQC/pipeline/pipeline.py'), 'genQC.pipeline.pipeline.Pipeline._reset_opt': ( 'pipeline/pipeline.html#pipeline._reset_opt', 'genQC/pipeline/pipeline.py'), 'genQC.pipeline.pipeline.Pipeline._set_opt_param': ( 'pipeline/pipeline.html#pipeline._set_opt_param', @@ -488,75 +858,210 @@ 'genQC/pipeline/pipeline.py'), 'genQC.pipeline.pipeline.Pipeline.train_step': ( 'pipeline/pipeline.html#pipeline.train_step', 'genQC/pipeline/pipeline.py'), - 'genQC.pipeline.pipeline.Pipeline_IO': ( 'pipeline/pipeline.html#pipeline_io', - 'genQC/pipeline/pipeline.py'), - 'genQC.pipeline.pipeline.Pipeline_IO.from_config_file': ( 'pipeline/pipeline.html#pipeline_io.from_config_file', - 'genQC/pipeline/pipeline.py'), - 'genQC.pipeline.pipeline.Pipeline_IO.get_config': ( 'pipeline/pipeline.html#pipeline_io.get_config', - 'genQC/pipeline/pipeline.py'), - 'genQC.pipeline.pipeline.Pipeline_IO.params_config': ( 'pipeline/pipeline.html#pipeline_io.params_config', - 'genQC/pipeline/pipeline.py'), - 'genQC.pipeline.pipeline.Pipeline_IO.store_pipeline': ( 'pipeline/pipeline.html#pipeline_io.store_pipeline', - 'genQC/pipeline/pipeline.py')}, - 'genQC.platform.qcircuit_dataset_construction': { 'genQC.platform.qcircuit_dataset_construction.decode_circuit': ( 'platform/qcircuit_dataset_construction.html#decode_circuit', - 'genQC/platform/qcircuit_dataset_construction.py'), - 'genQC.platform.qcircuit_dataset_construction.encode_circuit': ( 'platform/qcircuit_dataset_construction.html#encode_circuit', - 'genQC/platform/qcircuit_dataset_construction.py'), - 'genQC.platform.qcircuit_dataset_construction.gen_compilation_rndGates_dataset': ( 'platform/qcircuit_dataset_construction.html#gen_compilation_rndgates_dataset', - 'genQC/platform/qcircuit_dataset_construction.py'), - 'genQC.platform.qcircuit_dataset_construction.gen_qc_dataset': ( 'platform/qcircuit_dataset_construction.html#gen_qc_dataset', - 'genQC/platform/qcircuit_dataset_construction.py'), - 'genQC.platform.qcircuit_dataset_construction.get_rnd_encoded_circuit': ( 'platform/qcircuit_dataset_construction.html#get_rnd_encoded_circuit', - 'genQC/platform/qcircuit_dataset_construction.py'), - 'genQC.platform.qcircuit_dataset_construction.get_rnd_encoded_circuits': ( 'platform/qcircuit_dataset_construction.html#get_rnd_encoded_circuits', - 'genQC/platform/qcircuit_dataset_construction.py'), - 'genQC.platform.qcircuit_dataset_construction.get_specific_rnd_srv_circuit': ( 'platform/qcircuit_dataset_construction.html#get_specific_rnd_srv_circuit', - 'genQC/platform/qcircuit_dataset_construction.py'), - 'genQC.platform.qcircuit_dataset_construction.get_target_control_qubits': ( 'platform/qcircuit_dataset_construction.html#get_target_control_qubits', - 'genQC/platform/qcircuit_dataset_construction.py')}, - 'genQC.platform.qcircuit_evaluation': { 'genQC.platform.qcircuit_evaluation.extract_gate_number': ( 'platform/qcircuit_evaluation.html#extract_gate_number', - 'genQC/platform/qcircuit_evaluation.py'), - 'genQC.platform.qcircuit_evaluation.get_gate_stat_from_circuits': ( 'platform/qcircuit_evaluation.html#get_gate_stat_from_circuits', - 'genQC/platform/qcircuit_evaluation.py'), - 'genQC.platform.qcircuit_evaluation.get_gate_stat_from_tensors': ( 'platform/qcircuit_evaluation.html#get_gate_stat_from_tensors', - 'genQC/platform/qcircuit_evaluation.py'), - 'genQC.platform.qcircuit_evaluation.sort_into_bins': ( 'platform/qcircuit_evaluation.html#sort_into_bins', - 'genQC/platform/qcircuit_evaluation.py')}, - 'genQC.platform.qcircuit_metrics': { 'genQC.platform.qcircuit_metrics.Unitary_FrobeniusNorm': ( 'platform/qcircuit_metrics.html#unitary_frobeniusnorm', - 'genQC/platform/qcircuit_metrics.py'), - 'genQC.platform.qcircuit_metrics.Unitary_FrobeniusNorm.distance': ( 'platform/qcircuit_metrics.html#unitary_frobeniusnorm.distance', - 'genQC/platform/qcircuit_metrics.py'), - 'genQC.platform.qcircuit_metrics.Unitary_FrobeniusNorm.name': ( 'platform/qcircuit_metrics.html#unitary_frobeniusnorm.name', - 'genQC/platform/qcircuit_metrics.py')}, - 'genQC.platform.qcircuit_util': { 'genQC.platform.qcircuit_util.get_element_matching_indices': ( 'platform/qcircuit_util.html#get_element_matching_indices', - 'genQC/platform/qcircuit_util.py'), - 'genQC.platform.qcircuit_util.get_entanglement_bins': ( 'platform/qcircuit_util.html#get_entanglement_bins', - 'genQC/platform/qcircuit_util.py')}, - 'genQC.platform.simulation.qcircuit_sim': { 'genQC.platform.simulation.qcircuit_sim.gate_pool_to_gate_classes': ( 'platform/simulation/qcircuit_sim.html#gate_pool_to_gate_classes', - 'genQC/platform/simulation/qcircuit_sim.py'), - 'genQC.platform.simulation.qcircuit_sim.get_number_of_gate_params': ( 'platform/simulation/qcircuit_sim.html#get_number_of_gate_params', - 'genQC/platform/simulation/qcircuit_sim.py'), - 'genQC.platform.simulation.qcircuit_sim.instruction_name_to_qiskit_gate': ( 'platform/simulation/qcircuit_sim.html#instruction_name_to_qiskit_gate', - 'genQC/platform/simulation/qcircuit_sim.py'), - 'genQC.platform.simulation.qcircuit_sim.optimize_circuit': ( 'platform/simulation/qcircuit_sim.html#optimize_circuit', - 'genQC/platform/simulation/qcircuit_sim.py'), - 'genQC.platform.simulation.qcircuit_sim.plot_svr_stat': ( 'platform/simulation/qcircuit_sim.html#plot_svr_stat', - 'genQC/platform/simulation/qcircuit_sim.py'), - 'genQC.platform.simulation.qcircuit_sim.rnd_circuit': ( 'platform/simulation/qcircuit_sim.html#rnd_circuit', - 'genQC/platform/simulation/qcircuit_sim.py'), - 'genQC.platform.simulation.qcircuit_sim.schmidt_rank_vector': ( 'platform/simulation/qcircuit_sim.html#schmidt_rank_vector', - 'genQC/platform/simulation/qcircuit_sim.py')}, - 'genQC.printing': { 'genQC.printing.display_colums': ('printing.html#display_colums', 'genQC/printing.py'), - 'genQC.printing.ndarray_to_latex': ('printing.html#ndarray_to_latex', 'genQC/printing.py'), - 'genQC.printing.print_markdown': ('printing.html#print_markdown', 'genQC/printing.py'), - 'genQC.printing.print_table': ('printing.html#print_table', 'genQC/printing.py'), - 'genQC.printing.tensor_to_latex': ('printing.html#tensor_to_latex', 'genQC/printing.py')}, + 'genQC.pipeline.pipeline.PipelineIO': ( 'pipeline/pipeline.html#pipelineio', + 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.PipelineIO.from_config_file': ( 'pipeline/pipeline.html#pipelineio.from_config_file', + 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.PipelineIO.from_pretrained': ( 'pipeline/pipeline.html#pipelineio.from_pretrained', + 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.PipelineIO.get_config': ( 'pipeline/pipeline.html#pipelineio.get_config', + 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.PipelineIO.params_config': ( 'pipeline/pipeline.html#pipelineio.params_config', + 'genQC/pipeline/pipeline.py'), + 'genQC.pipeline.pipeline.PipelineIO.store_pipeline': ( 'pipeline/pipeline.html#pipelineio.store_pipeline', + 'genQC/pipeline/pipeline.py')}, + 'genQC.pipeline.unitary_clip_pipeline': { 'genQC.pipeline.unitary_clip_pipeline.UnitaryCLIPPipeline': ( 'pipeline/unitary_clip_pipeline.html#unitaryclippipeline', + 'genQC/pipeline/unitary_clip_pipeline.py'), + 'genQC.pipeline.unitary_clip_pipeline.UnitaryCLIPPipeline.__call__': ( 'pipeline/unitary_clip_pipeline.html#unitaryclippipeline.__call__', + 'genQC/pipeline/unitary_clip_pipeline.py'), + 'genQC.pipeline.unitary_clip_pipeline.UnitaryCLIPPipeline.__init__': ( 'pipeline/unitary_clip_pipeline.html#unitaryclippipeline.__init__', + 'genQC/pipeline/unitary_clip_pipeline.py'), + 'genQC.pipeline.unitary_clip_pipeline.UnitaryCLIPPipeline.from_config_file': ( 'pipeline/unitary_clip_pipeline.html#unitaryclippipeline.from_config_file', + 'genQC/pipeline/unitary_clip_pipeline.py'), + 'genQC.pipeline.unitary_clip_pipeline.UnitaryCLIPPipeline.get_loss': ( 'pipeline/unitary_clip_pipeline.html#unitaryclippipeline.get_loss', + 'genQC/pipeline/unitary_clip_pipeline.py'), + 'genQC.pipeline.unitary_clip_pipeline.UnitaryCLIPPipeline.params_config': ( 'pipeline/unitary_clip_pipeline.html#unitaryclippipeline.params_config', + 'genQC/pipeline/unitary_clip_pipeline.py'), + 'genQC.pipeline.unitary_clip_pipeline.UnitaryCLIPPipeline.store_pipeline': ( 'pipeline/unitary_clip_pipeline.html#unitaryclippipeline.store_pipeline', + 'genQC/pipeline/unitary_clip_pipeline.py'), + 'genQC.pipeline.unitary_clip_pipeline.UnitaryCLIPPipeline.train_step': ( 'pipeline/unitary_clip_pipeline.html#unitaryclippipeline.train_step', + 'genQC/pipeline/unitary_clip_pipeline.py')}, + 'genQC.platform.backends.base_backend': { 'genQC.platform.backends.base_backend.BaseBackend': ( 'platform/backends/base_backend.html#basebackend', + 'genQC/platform/backends/base_backend.py'), + 'genQC.platform.backends.base_backend.BaseBackend.backend_to_genqc': ( 'platform/backends/base_backend.html#basebackend.backend_to_genqc', + 'genQC/platform/backends/base_backend.py'), + 'genQC.platform.backends.base_backend.BaseBackend.draw': ( 'platform/backends/base_backend.html#basebackend.draw', + 'genQC/platform/backends/base_backend.py'), + 'genQC.platform.backends.base_backend.BaseBackend.genqc_to_backend': ( 'platform/backends/base_backend.html#basebackend.genqc_to_backend', + 'genQC/platform/backends/base_backend.py'), + 'genQC.platform.backends.base_backend.BaseBackend.get_unitary': ( 'platform/backends/base_backend.html#basebackend.get_unitary', + 'genQC/platform/backends/base_backend.py')}, + 'genQC.platform.backends.circuits_cudaq': { 'genQC.platform.backends.circuits_cudaq.CircuitsCudaqBackend': ( 'platform/backends/circuits_cudaq.html#circuitscudaqbackend', + 'genQC/platform/backends/circuits_cudaq.py'), + 'genQC.platform.backends.circuits_cudaq.CircuitsCudaqBackend.__init__': ( 'platform/backends/circuits_cudaq.html#circuitscudaqbackend.__init__', + 'genQC/platform/backends/circuits_cudaq.py'), + 'genQC.platform.backends.circuits_cudaq.CircuitsCudaqBackend._construct_kernel': ( 'platform/backends/circuits_cudaq.html#circuitscudaqbackend._construct_kernel', + 'genQC/platform/backends/circuits_cudaq.py'), + 'genQC.platform.backends.circuits_cudaq.CircuitsCudaqBackend.backend_to_genqc': ( 'platform/backends/circuits_cudaq.html#circuitscudaqbackend.backend_to_genqc', + 'genQC/platform/backends/circuits_cudaq.py'), + 'genQC.platform.backends.circuits_cudaq.CircuitsCudaqBackend.check_error_circuit': ( 'platform/backends/circuits_cudaq.html#circuitscudaqbackend.check_error_circuit', + 'genQC/platform/backends/circuits_cudaq.py'), + 'genQC.platform.backends.circuits_cudaq.CircuitsCudaqBackend.draw': ( 'platform/backends/circuits_cudaq.html#circuitscudaqbackend.draw', + 'genQC/platform/backends/circuits_cudaq.py'), + 'genQC.platform.backends.circuits_cudaq.CircuitsCudaqBackend.genqc_to_backend': ( 'platform/backends/circuits_cudaq.html#circuitscudaqbackend.genqc_to_backend', + 'genQC/platform/backends/circuits_cudaq.py'), + 'genQC.platform.backends.circuits_cudaq.CircuitsCudaqBackend.get_unitary': ( 'platform/backends/circuits_cudaq.html#circuitscudaqbackend.get_unitary', + 'genQC/platform/backends/circuits_cudaq.py'), + 'genQC.platform.backends.circuits_cudaq.ParametrizedCudaqKernel': ( 'platform/backends/circuits_cudaq.html#parametrizedcudaqkernel', + 'genQC/platform/backends/circuits_cudaq.py')}, + 'genQC.platform.backends.circuits_pennylane': { 'genQC.platform.backends.circuits_pennylane.CircuitsPennylaneBackend': ( 'platform/backends/circuits_pennylane.html#circuitspennylanebackend', + 'genQC/platform/backends/circuits_pennylane.py'), + 'genQC.platform.backends.circuits_pennylane.CircuitsPennylaneBackend.backend_to_genqc': ( 'platform/backends/circuits_pennylane.html#circuitspennylanebackend.backend_to_genqc', + 'genQC/platform/backends/circuits_pennylane.py'), + 'genQC.platform.backends.circuits_pennylane.CircuitsPennylaneBackend.draw': ( 'platform/backends/circuits_pennylane.html#circuitspennylanebackend.draw', + 'genQC/platform/backends/circuits_pennylane.py'), + 'genQC.platform.backends.circuits_pennylane.CircuitsPennylaneBackend.genqc_to_backend': ( 'platform/backends/circuits_pennylane.html#circuitspennylanebackend.genqc_to_backend', + 'genQC/platform/backends/circuits_pennylane.py'), + 'genQC.platform.backends.circuits_pennylane.CircuitsPennylaneBackend.get_unitary': ( 'platform/backends/circuits_pennylane.html#circuitspennylanebackend.get_unitary', + 'genQC/platform/backends/circuits_pennylane.py'), + 'genQC.platform.backends.circuits_pennylane.ParametrizedPennylaneCircuit': ( 'platform/backends/circuits_pennylane.html#parametrizedpennylanecircuit', + 'genQC/platform/backends/circuits_pennylane.py'), + 'genQC.platform.backends.circuits_pennylane.instruction_name_to_pennylane_name': ( 'platform/backends/circuits_pennylane.html#instruction_name_to_pennylane_name', + 'genQC/platform/backends/circuits_pennylane.py')}, + 'genQC.platform.backends.circuits_qiskit': { 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend.backend_to_genqc': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend.backend_to_genqc', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend.draw': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend.draw', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend.genqc_to_backend': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend.genqc_to_backend', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend.get_unitary': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend.get_unitary', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend.optimize_circuit': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend.optimize_circuit', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend.randomize_params': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend.randomize_params', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend.rnd_circuit': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend.rnd_circuit', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.CircuitsQiskitBackend.schmidt_rank_vector': ( 'platform/backends/circuits_qiskit.html#circuitsqiskitbackend.schmidt_rank_vector', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.get_number_of_gate_params': ( 'platform/backends/circuits_qiskit.html#get_number_of_gate_params', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.get_target_control_qubits': ( 'platform/backends/circuits_qiskit.html#get_target_control_qubits', + 'genQC/platform/backends/circuits_qiskit.py'), + 'genQC.platform.backends.circuits_qiskit.instruction_name_to_qiskit_gate': ( 'platform/backends/circuits_qiskit.html#instruction_name_to_qiskit_gate', + 'genQC/platform/backends/circuits_qiskit.py')}, + 'genQC.platform.circuits_generation': { 'genQC.platform.circuits_generation.CircuitConditionType': ( 'platform/circuits_generation.html#circuitconditiontype', + 'genQC/platform/circuits_generation.py'), + 'genQC.platform.circuits_generation.generate_circuit_dataset': ( 'platform/circuits_generation.html#generate_circuit_dataset', + 'genQC/platform/circuits_generation.py'), + 'genQC.platform.circuits_generation.get_rnd_encoded_circuit': ( 'platform/circuits_generation.html#get_rnd_encoded_circuit', + 'genQC/platform/circuits_generation.py'), + 'genQC.platform.circuits_generation.get_rnd_encoded_circuits': ( 'platform/circuits_generation.html#get_rnd_encoded_circuits', + 'genQC/platform/circuits_generation.py')}, + 'genQC.platform.circuits_instructions': { 'genQC.platform.circuits_instructions.CircuitInstruction': ( 'platform/circuits_instructions.html#circuitinstruction', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions': ( 'platform/circuits_instructions.html#circuitinstructions', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions.__init__': ( 'platform/circuits_instructions.html#circuitinstructions.__init__', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions.__repr__': ( 'platform/circuits_instructions.html#circuitinstructions.__repr__', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions.add_instruction': ( 'platform/circuits_instructions.html#circuitinstructions.add_instruction', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions.data': ( 'platform/circuits_instructions.html#circuitinstructions.data', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions.length': ( 'platform/circuits_instructions.html#circuitinstructions.length', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions.max_gates': ( 'platform/circuits_instructions.html#circuitinstructions.max_gates', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions.num_qubits': ( 'platform/circuits_instructions.html#circuitinstructions.num_qubits', + 'genQC/platform/circuits_instructions.py'), + 'genQC.platform.circuits_instructions.CircuitInstructions.print': ( 'platform/circuits_instructions.html#circuitinstructions.print', + 'genQC/platform/circuits_instructions.py')}, + 'genQC.platform.simulation': { 'genQC.platform.simulation.CircuitBackendType': ( 'platform/simulation.html#circuitbackendtype', + 'genQC/platform/simulation.py'), + 'genQC.platform.simulation.Simulator': ( 'platform/simulation.html#simulator', + 'genQC/platform/simulation.py'), + 'genQC.platform.simulation.Simulator.__init__': ( 'platform/simulation.html#simulator.__init__', + 'genQC/platform/simulation.py'), + 'genQC.platform.simulation.Simulator.backend_to_genqc': ( 'platform/simulation.html#simulator.backend_to_genqc', + 'genQC/platform/simulation.py'), + 'genQC.platform.simulation.Simulator.genqc_to_backend': ( 'platform/simulation.html#simulator.genqc_to_backend', + 'genQC/platform/simulation.py'), + 'genQC.platform.simulation.TensorEncodingType': ( 'platform/simulation.html#tensorencodingtype', + 'genQC/platform/simulation.py'), + 'genQC.platform.simulation.is_circuit_type': ( 'platform/simulation.html#is_circuit_type', + 'genQC/platform/simulation.py')}, + 'genQC.platform.tokenizer.base_tokenizer': { 'genQC.platform.tokenizer.base_tokenizer.BaseTokenizer': ( 'platform/tokenizer/base_tokenizer.html#basetokenizer', + 'genQC/platform/tokenizer/base_tokenizer.py'), + 'genQC.platform.tokenizer.base_tokenizer.BaseTokenizer.__init__': ( 'platform/tokenizer/base_tokenizer.html#basetokenizer.__init__', + 'genQC/platform/tokenizer/base_tokenizer.py'), + 'genQC.platform.tokenizer.base_tokenizer.BaseTokenizer.decode': ( 'platform/tokenizer/base_tokenizer.html#basetokenizer.decode', + 'genQC/platform/tokenizer/base_tokenizer.py'), + 'genQC.platform.tokenizer.base_tokenizer.BaseTokenizer.encode': ( 'platform/tokenizer/base_tokenizer.html#basetokenizer.encode', + 'genQC/platform/tokenizer/base_tokenizer.py'), + 'genQC.platform.tokenizer.base_tokenizer.BaseTokenizer.tokenize': ( 'platform/tokenizer/base_tokenizer.html#basetokenizer.tokenize', + 'genQC/platform/tokenizer/base_tokenizer.py'), + 'genQC.platform.tokenizer.base_tokenizer.invert_vocabulary': ( 'platform/tokenizer/base_tokenizer.html#invert_vocabulary', + 'genQC/platform/tokenizer/base_tokenizer.py')}, + 'genQC.platform.tokenizer.circuits_tokenizer': { 'genQC.platform.tokenizer.circuits_tokenizer.CircuitTokenizer': ( 'platform/tokenizer/circuits_tokenizer.html#circuittokenizer', + 'genQC/platform/tokenizer/circuits_tokenizer.py'), + 'genQC.platform.tokenizer.circuits_tokenizer.CircuitTokenizer.__init__': ( 'platform/tokenizer/circuits_tokenizer.html#circuittokenizer.__init__', + 'genQC/platform/tokenizer/circuits_tokenizer.py'), + 'genQC.platform.tokenizer.circuits_tokenizer.CircuitTokenizer.decode': ( 'platform/tokenizer/circuits_tokenizer.html#circuittokenizer.decode', + 'genQC/platform/tokenizer/circuits_tokenizer.py'), + 'genQC.platform.tokenizer.circuits_tokenizer.CircuitTokenizer.encode': ( 'platform/tokenizer/circuits_tokenizer.html#circuittokenizer.encode', + 'genQC/platform/tokenizer/circuits_tokenizer.py'), + 'genQC.platform.tokenizer.circuits_tokenizer.CircuitTokenizer.get_parametrized_tokens': ( 'platform/tokenizer/circuits_tokenizer.html#circuittokenizer.get_parametrized_tokens', + 'genQC/platform/tokenizer/circuits_tokenizer.py'), + 'genQC.platform.tokenizer.circuits_tokenizer.CircuitTokenizer.tokenize': ( 'platform/tokenizer/circuits_tokenizer.html#circuittokenizer.tokenize', + 'genQC/platform/tokenizer/circuits_tokenizer.py')}, + 'genQC.platform.tokenizer.tensor_tokenizer': { 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.__init__': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.__init__', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.decode': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.decode', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.encode': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.encode', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.extract_current_gate_overlap_pairs': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.extract_current_gate_overlap_pairs', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.extract_new_gate_overlap_pairs': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.extract_new_gate_overlap_pairs', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.get_topk_pairs': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.get_topk_pairs', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.learn': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.learn', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.learn_step': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.learn_step', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.replace_current_overlap_pairs': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.replace_current_overlap_pairs', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.standardize_overlap_pairs': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.standardize_overlap_pairs', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.standardize_vocab_pair': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.standardize_vocab_pair', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.to': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.to', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.tokenize': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.tokenize', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.GatePairTokenizer.unpack_col': ( 'platform/tokenizer/tensor_tokenizer.html#gatepairtokenizer.unpack_col', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.get_topk_depth_unpacked': ( 'platform/tokenizer/tensor_tokenizer.html#get_topk_depth_unpacked', + 'genQC/platform/tokenizer/tensor_tokenizer.py'), + 'genQC.platform.tokenizer.tensor_tokenizer.sort_config': ( 'platform/tokenizer/tensor_tokenizer.html#sort_config', + 'genQC/platform/tokenizer/tensor_tokenizer.py')}, 'genQC.scheduler.scheduler': { 'genQC.scheduler.scheduler.Scheduler': ( 'scheduler/scheduler.html#scheduler', 'genQC/scheduler/scheduler.py'), 'genQC.scheduler.scheduler.Scheduler.__init__': ( 'scheduler/scheduler.html#scheduler.__init__', 'genQC/scheduler/scheduler.py'), 'genQC.scheduler.scheduler.Scheduler.add_noise': ( 'scheduler/scheduler.html#scheduler.add_noise', 'genQC/scheduler/scheduler.py'), + 'genQC.scheduler.scheduler.Scheduler.from_config': ( 'scheduler/scheduler.html#scheduler.from_config', + 'genQC/scheduler/scheduler.py'), + 'genQC.scheduler.scheduler.Scheduler.from_scheduler': ( 'scheduler/scheduler.html#scheduler.from_scheduler', + 'genQC/scheduler/scheduler.py'), 'genQC.scheduler.scheduler.Scheduler.get_config': ( 'scheduler/scheduler.html#scheduler.get_config', 'genQC/scheduler/scheduler.py'), 'genQC.scheduler.scheduler.Scheduler.params_config': ( 'scheduler/scheduler.html#scheduler.params_config', @@ -565,6 +1070,8 @@ 'genQC/scheduler/scheduler.py'), 'genQC.scheduler.scheduler.Scheduler.step': ( 'scheduler/scheduler.html#scheduler.step', 'genQC/scheduler/scheduler.py'), + 'genQC.scheduler.scheduler.Scheduler.to': ( 'scheduler/scheduler.html#scheduler.to', + 'genQC/scheduler/scheduler.py'), 'genQC.scheduler.scheduler.Scheduler.unsqueeze_vector_to_shape': ( 'scheduler/scheduler.html#scheduler.unsqueeze_vector_to_shape', 'genQC/scheduler/scheduler.py')}, 'genQC.scheduler.scheduler_ddim': { 'genQC.scheduler.scheduler_ddim.DDIMScheduler': ( 'scheduler/scheduler_ddim.html#ddimscheduler', @@ -581,36 +1088,102 @@ 'genQC/scheduler/scheduler_ddim.py')}, 'genQC.scheduler.scheduler_ddpm': { 'genQC.scheduler.scheduler_ddpm.DDPMScheduler': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler', 'genQC/scheduler/scheduler_ddpm.py'), + 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.SNR': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.snr', + 'genQC/scheduler/scheduler_ddpm.py'), 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.__init__': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.__init__', 'genQC/scheduler/scheduler_ddpm.py'), 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.add_noise': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.add_noise', 'genQC/scheduler/scheduler_ddpm.py'), - 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.add_noise_LEdit': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.add_noise_ledit', - 'genQC/scheduler/scheduler_ddpm.py'), + 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.enforce_zero_terminal_snr': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.enforce_zero_terminal_snr', + 'genQC/scheduler/scheduler_ddpm.py'), 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.params_config': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.params_config', 'genQC/scheduler/scheduler_ddpm.py'), 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.set_timesteps': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.set_timesteps', 'genQC/scheduler/scheduler_ddpm.py'), 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.step': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.step', 'genQC/scheduler/scheduler_ddpm.py'), - 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.to_device': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.to_device', - 'genQC/scheduler/scheduler_ddpm.py'), + 'genQC.scheduler.scheduler_ddpm.DDPMScheduler.to': ( 'scheduler/scheduler_ddpm.html#ddpmscheduler.to', + 'genQC/scheduler/scheduler_ddpm.py'), 'genQC.scheduler.scheduler_ddpm.DDPMSchedulerOutput': ( 'scheduler/scheduler_ddpm.html#ddpmscheduleroutput', 'genQC/scheduler/scheduler_ddpm.py')}, - 'genQC.util': { 'genQC.util.DataLoaders': ('util.html#dataloaders', 'genQC/util.py'), - 'genQC.util.DataLoaders.__init__': ('util.html#dataloaders.__init__', 'genQC/util.py'), - 'genQC.util.MemoryCleaner': ('util.html#memorycleaner', 'genQC/util.py'), - 'genQC.util.MemoryCleaner._clean_ipython_hist': ( 'util.html#memorycleaner._clean_ipython_hist', - 'genQC/util.py'), - 'genQC.util.MemoryCleaner._clean_tb': ('util.html#memorycleaner._clean_tb', 'genQC/util.py'), - 'genQC.util.MemoryCleaner.purge_mem': ('util.html#memorycleaner.purge_mem', 'genQC/util.py'), - 'genQC.util.infer_torch_device': ('util.html#infer_torch_device', 'genQC/util.py'), - 'genQC.util.latents_to_pil': ('util.html#latents_to_pil', 'genQC/util.py'), - 'genQC.util.normalize_tensor': ('util.html#normalize_tensor', 'genQC/util.py'), - 'genQC.util.number_of_paramters': ('util.html#number_of_paramters', 'genQC/util.py'), - 'genQC.util.plot_image_grid': ('util.html#plot_image_grid', 'genQC/util.py'), - 'genQC.util.savePdf': ('util.html#savepdf', 'genQC/util.py'), - 'genQC.util.savePng': ('util.html#savepng', 'genQC/util.py'), - 'genQC.util.saveSvg': ('util.html#savesvg', 'genQC/util.py'), - 'genQC.util.scale_tensor': ('util.html#scale_tensor', 'genQC/util.py'), - 'genQC.util.virtual': ('util.html#virtual', 'genQC/util.py')}}} + 'genQC.scheduler.scheduler_dpm': { 'genQC.scheduler.scheduler_dpm.DPMScheduler': ( 'scheduler/scheduler_dpm.html#dpmscheduler', + 'genQC/scheduler/scheduler_dpm.py'), + 'genQC.scheduler.scheduler_dpm.DPMScheduler.__init__': ( 'scheduler/scheduler_dpm.html#dpmscheduler.__init__', + 'genQC/scheduler/scheduler_dpm.py'), + 'genQC.scheduler.scheduler_dpm.DPMScheduler.params_config': ( 'scheduler/scheduler_dpm.html#dpmscheduler.params_config', + 'genQC/scheduler/scheduler_dpm.py'), + 'genQC.scheduler.scheduler_dpm.DPMScheduler.step': ( 'scheduler/scheduler_dpm.html#dpmscheduler.step', + 'genQC/scheduler/scheduler_dpm.py'), + 'genQC.scheduler.scheduler_dpm.DPMSchedulerOutput': ( 'scheduler/scheduler_dpm.html#dpmscheduleroutput', + 'genQC/scheduler/scheduler_dpm.py')}, + 'genQC.utils.async_fn': { 'genQC.utils.async_fn.MemoryMappedArray': ( 'utils/async_fn.html#memorymappedarray', + 'genQC/utils/async_fn.py'), + 'genQC.utils.async_fn.MemoryMappedArray.__init__': ( 'utils/async_fn.html#memorymappedarray.__init__', + 'genQC/utils/async_fn.py'), + 'genQC.utils.async_fn.MemoryMappedArray.clean': ( 'utils/async_fn.html#memorymappedarray.clean', + 'genQC/utils/async_fn.py'), + 'genQC.utils.async_fn.MemoryMappedArray.get_obj': ( 'utils/async_fn.html#memorymappedarray.get_obj', + 'genQC/utils/async_fn.py'), + 'genQC.utils.async_fn.run_parallel_jobs': ( 'utils/async_fn.html#run_parallel_jobs', + 'genQC/utils/async_fn.py')}, + 'genQC.utils.config_loader': { 'genQC.utils.config_loader.class_to_str': ( 'utils/config_loader.html#class_to_str', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.config_to_dict': ( 'utils/config_loader.html#config_to_dict', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.get_obj_from_str': ( 'utils/config_loader.html#get_obj_from_str', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.instantiate_from_config': ( 'utils/config_loader.html#instantiate_from_config', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.load_config': ( 'utils/config_loader.html#load_config', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.load_model_state_dict': ( 'utils/config_loader.html#load_model_state_dict', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.load_tensor': ( 'utils/config_loader.html#load_tensor', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.save_dataclass_yaml': ( 'utils/config_loader.html#save_dataclass_yaml', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.save_dict_yaml': ( 'utils/config_loader.html#save_dict_yaml', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.store_model_state_dict': ( 'utils/config_loader.html#store_model_state_dict', + 'genQC/utils/config_loader.py'), + 'genQC.utils.config_loader.store_tensor': ( 'utils/config_loader.html#store_tensor', + 'genQC/utils/config_loader.py')}, + 'genQC.utils.math': { 'genQC.utils.math.gram_schmidt': ('utils/math.html#gram_schmidt', 'genQC/utils/math.py'), + 'genQC.utils.math.matrix_power': ('utils/math.html#matrix_power', 'genQC/utils/math.py')}, + 'genQC.utils.misc_utils': { 'genQC.utils.misc_utils.DataLoaders': ( 'utils/misc_utils.html#dataloaders', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.DataLoaders.__init__': ( 'utils/misc_utils.html#dataloaders.__init__', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.MemoryCleaner': ( 'utils/misc_utils.html#memorycleaner', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.MemoryCleaner._clean_ipython_hist': ( 'utils/misc_utils.html#memorycleaner._clean_ipython_hist', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.MemoryCleaner._clean_tb': ( 'utils/misc_utils.html#memorycleaner._clean_tb', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.MemoryCleaner.free_memory': ( 'utils/misc_utils.html#memorycleaner.free_memory', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.MemoryCleaner.purge_mem': ( 'utils/misc_utils.html#memorycleaner.purge_mem', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.cache_data': ( 'utils/misc_utils.html#cache_data', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.get_element_matching_indices': ( 'utils/misc_utils.html#get_element_matching_indices', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.get_entanglement_bins': ( 'utils/misc_utils.html#get_entanglement_bins', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.infer_torch_device': ( 'utils/misc_utils.html#infer_torch_device', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.latents_to_pil': ( 'utils/misc_utils.html#latents_to_pil', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.normalize_tensor': ( 'utils/misc_utils.html#normalize_tensor', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.number_of_paramters': ( 'utils/misc_utils.html#number_of_paramters', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.plot_image_grid': ( 'utils/misc_utils.html#plot_image_grid', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.savePdf': ('utils/misc_utils.html#savepdf', 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.savePng': ('utils/misc_utils.html#savepng', 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.saveSvg': ('utils/misc_utils.html#savesvg', 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.scale_tensor': ( 'utils/misc_utils.html#scale_tensor', + 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.set_seed': ('utils/misc_utils.html#set_seed', 'genQC/utils/misc_utils.py'), + 'genQC.utils.misc_utils.virtual': ('utils/misc_utils.html#virtual', 'genQC/utils/misc_utils.py')}}} diff --git a/genQC/platform/simulation/__init__.py b/genQC/benchmark/__init__.py similarity index 100% rename from genQC/platform/simulation/__init__.py rename to genQC/benchmark/__init__.py diff --git a/genQC/benchmark/bench_compilation.py b/genQC/benchmark/bench_compilation.py new file mode 100644 index 0000000..5654230 --- /dev/null +++ b/genQC/benchmark/bench_compilation.py @@ -0,0 +1,192 @@ +"""Functions to test and benchmark unitary compilation.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/benchmark/bench_compilation.ipynb. + +# %% auto 0 +__all__ = ['sigma_x', 'sigma_y', 'sigma_z', 'SpecialUnitaries', 'qubit_tensor_product', 'BaseHamiltonian', 'IsingHamiltonian', + 'XXZHamiltonian'] + +# %% ../../src/benchmark/bench_compilation.ipynb 2 +from ..imports import * + +# %% ../../src/benchmark/bench_compilation.ipynb 5 +class SpecialUnitaries: + """Special unitary matrices to benchmark compilation.""" + + @staticmethod + def QFT(num_qubits: int) -> torch.Tensor: + """The Quantum Fourier transform (QFT) unitary for `num_qubits`-qubits.""" + + N = 2**num_qubits + wN = np.exp(2.0j*np.pi/N) + + U = torch.zeros((N, N), dtype=torch.complex128) + for x in range(N): + U[:, x] = torch.tensor([np.power(wN, x*k, dtype=complex) for k in range(N)]) + + U *= 1.0/np.sqrt(N) + return U + +# %% ../../src/benchmark/bench_compilation.ipynb 9 +sigma_x = torch.tensor([[0, 1], + [1, 0]], + dtype=torch.complex128) + +sigma_y = torch.tensor([[ 0, -1j], + [1j, 0]], + dtype=torch.complex128) + +sigma_z = torch.tensor([[1, 0], + [0, -1]], + dtype=torch.complex128) + +# %% ../../src/benchmark/bench_compilation.ipynb 11 +def qubit_tensor_product(num_qubits: int, *ops: torch.Tensor, pos: int | Sequence[int]) -> torch.Tensor: + """ + Make tensor product with identities, assumes `ops` placed at `pos` in the tensor product ordering. + """ + + _ops = [torch.eye(2) for i in range(num_qubits)] + + if isinstance(pos, int): + pos = [pos] + elif isinstance(pos, Sequence): + assert len(pos) == len(ops) + else: + raise NotImplementedError() + + for pos_i, ops_i in zip(pos, ops): + _ops[pos_i] = ops_i + + mat = _ops[0] + for op in _ops[1:]: + mat = torch.kron(mat, op) + + return mat + +# %% ../../src/benchmark/bench_compilation.ipynb 19 +class BaseHamiltonian(abc.ABC): + """Base implementation of a Hamiltonian.""" + + def __init__(self, device: Optional[str | torch.device] = None) -> None: + self.device = default(device, "cpu") + self._generate_matrix() + + if not torch.allclose(self.data.adjoint(), self.data): + raise RuntimeError("Generated Hamiltonian matrix is not self-adjoint!") + + @abc.abstractmethod + def _generate_matrix(self) -> torch.Tensor: + """Generates the Hamiltonian matrix into `self.data`.""" + raise NotImplementedError() + + def get_evolution(self, t: float | torch.Tensor, split_complex_channel: bool = False, dtype: Optional[torch.dtype] = None) -> torch.Tensor: + """ + Assuming `h_bar=1`. Returns the unitary evolution in marix form. + """ + U = torch.linalg.matrix_exp(-1j * t * self.data) + + if split_complex_channel: + U = torch.stack([torch.real(U), torch.imag(U)]) + + if exists(dtype): + U = U.to(dtype) + + return U + +# %% ../../src/benchmark/bench_compilation.ipynb 21 +class IsingHamiltonian(BaseHamiltonian): + """Implementation of the Ising Hamiltonian on a qubit chain.""" + + def __init__(self, + h: float, + J: float, + num_qubits: int, + periodic_boundary: bool = True, + device: Optional[str | torch.device] = None) -> None: + """ + h: Magnetic field + J: Coupling constant + """ + self.h = h + self.J = J + self.num_qubits = num_qubits + self.periodic_boundary = periodic_boundary + super().__init__(device) + + def _generate_matrix(self) -> torch.Tensor: + """ + Note: We take big endian convention in placing the `i,j`-sigmas in tensor product ordering. + For little endian we need to use `pos = self.num_qubits-i`. + """ + + N = 2**self.num_qubits + ham = torch.zeros((N, N), dtype=torch.complex128) + + pairs = [(i, i+1) for i in range(self.num_qubits-1)] + + if self.periodic_boundary: + pairs.append((self.num_qubits-1, 0)) + + for (i, j) in pairs: + Z_term = qubit_tensor_product(self.num_qubits, sigma_z, sigma_z, pos=[i, j]) + + # Coupling + Perturbation + ham += -self.J * Z_term + + # Magnetic + for i in range(self.num_qubits): + ham += -self.h * qubit_tensor_product(self.num_qubits, sigma_x, pos=i) + + self.data = ham.to(self.device) + +# %% ../../src/benchmark/bench_compilation.ipynb 29 +class XXZHamiltonian(BaseHamiltonian): + """Implementation of the XXZ Hamiltonian on a qubit chain.""" + + def __init__(self, + h: float, + J: float, + delta: float, + num_qubits: int, + periodic_boundary: bool = True, + device: Optional[str | torch.device] = None) -> None: + """ + h: Magnetic field + J: Coupling constant + delta: Perturbation + """ + self.h = h + self.J = J + self.delta = delta + self.num_qubits = num_qubits + self.periodic_boundary = periodic_boundary + super().__init__(device) + + def _generate_matrix(self) -> torch.Tensor: + """ + Note: We take big endian convention in placing the `i,j`-sigmas in tensor product ordering. + For little endian we need to use `pos = self.num_qubits-i`. + """ + + N = 2**self.num_qubits + ham = torch.zeros((N, N), dtype=torch.complex128) + + pairs = [(i, i+1) for i in range(self.num_qubits-1)] + + if self.periodic_boundary: + pairs.append((self.num_qubits-1, 0)) + + for (i, j) in pairs: + X_term = qubit_tensor_product(self.num_qubits, sigma_x, sigma_x, pos=[i, j]) + Y_term = qubit_tensor_product(self.num_qubits, sigma_y, sigma_y, pos=[i, j]) + Z_term = qubit_tensor_product(self.num_qubits, sigma_z, sigma_z, pos=[i, j]) + + # Coupling + Perturbation + ham += -self.J * (X_term + Y_term + self.delta * Z_term) + + # Magnetic + for i in range(self.num_qubits): + ham += -self.h * qubit_tensor_product(self.num_qubits, sigma_x, pos=i) + + self.data = ham.to(self.device) diff --git a/genQC/config_loader.py b/genQC/config_loader.py deleted file mode 100644 index 8db4012..0000000 --- a/genQC/config_loader.py +++ /dev/null @@ -1,60 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../src/config_loader.ipynb. - -# %% auto 0 -__all__ = ['class_to_str', 'load_config', 'config_to_dict', 'save_dataclass_yaml', 'save_dict_yaml', 'get_obj_from_str', - 'instantiate_from_config', 'load_model_from_config'] - -# %% ../src/config_loader.ipynb 3 -from .imports import * -from omegaconf import OmegaConf - -# %% ../src/config_loader.ipynb 5 -def class_to_str(cls): - return str(cls)[8:-2] - -# %% ../src/config_loader.ipynb 6 -def load_config(file_path): - return OmegaConf.load(f"{file_path}") - -# %% ../src/config_loader.ipynb 7 -def config_to_dict(config): - return OmegaConf.to_container(config) - -# %% ../src/config_loader.ipynb 8 -def save_dataclass_yaml(data_obj, file_path): - conf = OmegaConf.structured(data_obj) - with open(file_path, 'w') as f: - OmegaConf.save(config=conf, f=f) - -# %% ../src/config_loader.ipynb 9 -def save_dict_yaml(dict_obj, file_path): - conf = OmegaConf.create(dict_obj) - with open(file_path, 'w') as f: - OmegaConf.save(config=conf, f=f) - -# %% ../src/config_loader.ipynb 14 -def get_obj_from_str(string, reload=False): - module, cls = string.rsplit(".", 1) - if reload: - module_imp = importlib.import_module(module) - importlib.reload(module_imp) - return getattr(importlib.import_module(module, package=None), cls) - -# %% ../src/config_loader.ipynb 15 -def instantiate_from_config(config): - if not "target" in config: raise KeyError("Expected key `target` to instantiate.") - if not "params" in config: print(f"[WARNING] Expected key `params` to instantiate.") - return get_obj_from_str(config["target"])(**config.get("params", dict())) - -# %% ../src/config_loader.ipynb 16 -def load_model_from_config(config, ckpt, device): - - print(f"Loading model from {ckpt}") - pl_sd = torch.load(ckpt, map_location=torch.device(device).type, weights_only=True) - - model = instantiate_from_config(config.model) - - sd = pl_sd["state_dict"] - m, u = model.load_state_dict(sd, strict=True) - - return model.to(device) diff --git a/genQC/dataset/balancing.py b/genQC/dataset/balancing.py new file mode 100644 index 0000000..3669711 --- /dev/null +++ b/genQC/dataset/balancing.py @@ -0,0 +1,72 @@ +"""Helper functions used to balance a dataset.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/balancing.ipynb. + +# %% auto 0 +__all__ = ['get_tensor_gate_length', 'add_balance_fn_quantile_qc_length'] + +# %% ../../src/dataset/balancing.ipynb 2 +from ..imports import * +import genQC.dataset.dataset_helper as dahe + +# %% ../../src/dataset/balancing.ipynb 4 +def get_tensor_gate_length(clr_tensor: torch.Tensor, padding_token: int = 0) -> torch.Tensor: + """ + Returns the gate count of a tokenized circuit. + Make sure you use use the correct `padding_token`. + + """ + assert clr_tensor.dim() == 3, "[b, s, t]" + + red_clr_tensor = (clr_tensor != padding_token).any(dim=1) # [b, t] + return torch.count_nonzero(red_clr_tensor, dim=1) # [b] + +# %% ../../src/dataset/balancing.ipynb 5 +def add_balance_fn_quantile_qc_length(indices: Union[np.ndarray, torch.Tensor], + x: Union[np.ndarray, torch.Tensor], + y: Union[np.ndarray, torch.Tensor], + *z, + padding_token: int = 0, + balance_quantile: float = 0.5, + device: torch.device = torch.device("cpu"), + quantile_length_weights: Optional[Callable[[torch.Tensor, torch.Tensor], torch.Tensor]] = None) -> torch.Tensor: + """Balances according to gate length.""" + + xb = x[indices].to(device) + l = get_tensor_gate_length(xb, padding_token=padding_token).to(device) + + l_uniques, l_uniques_cnt = torch.unique(l, dim=0, return_counts=True) + + #----------------------------------- + # samples = torch.min(l_uniques_cnt) + # samples = torch.median(l_uniques_cnt) + samples = torch.quantile(l_uniques_cnt.float(), balance_quantile, interpolation='nearest', dim=0).to(l_uniques_cnt.dtype) + samples = max(samples, 2) + + #----------------------------------- + sub_ind = list() + for l_unique in l_uniques.to(device): + comp = (l==l_unique) + ind = comp.nonzero().squeeze().cpu() + + if ind.dim() > 0: + if exists(quantile_length_weights): + _samples = int(quantile_length_weights(l_unique, samples)) + else: + _samples = samples + + ind = dahe.shuffle_tensor_dataset(ind) + ind = ind[:_samples] + else: + ind = ind[None] + + sub_ind.append(ind) + + sub_ind = torch.cat(sub_ind, dim=0) + + indices = indices[sub_ind] + + if indices.ndim < 1: + indices = indices[None] + + return indices diff --git a/genQC/dataset/cached_qc_dataset.py b/genQC/dataset/cached_dataset.py similarity index 61% rename from genQC/dataset/cached_qc_dataset.py rename to genQC/dataset/cached_dataset.py index 90e8cf9..88711ae 100644 --- a/genQC/dataset/cached_qc_dataset.py +++ b/genQC/dataset/cached_dataset.py @@ -1,20 +1,32 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/cached_qc_dataset.ipynb. +"""Classes to create a dataset with cached labels.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/cached_dataset.ipynb. # %% auto 0 -__all__ = ['Cached_OpenClip_Dataset'] +__all__ = ['CachedOpenCLIPDatasetConfig', 'CachedOpenCLIPDataset'] -# %% ../../src/dataset/cached_qc_dataset.ipynb 3 +# %% ../../src/dataset/cached_dataset.ipynb 2 from ..imports import * -from .qc_dataset import Qc_Config_Dataset -from .config_dataset import Config_Dataset -from ..config_loader import * +from .config_dataset import ConfigDataset, ConfigDatasetConfig +from ..utils.config_loader import * + +# %% ../../src/dataset/cached_dataset.ipynb 3 +@dataclass +class CachedOpenCLIPDatasetConfig(ConfigDatasetConfig): + pass + +# %% ../../src/dataset/cached_dataset.ipynb 4 +class CachedOpenCLIPDataset(ConfigDataset): + """ + Adds `.caching` to the `ConfigDataset` class. + + Cached dataset that caches the label `y` prompts using the CLIP `text_encoder`. This speeds up training significantly. + """ -# %% ../../src/dataset/cached_qc_dataset.ipynb 4 -class Cached_OpenClip_Dataset(Qc_Config_Dataset): - """Adds `.caching` to the `Quantum circuit dataset` class.""" + #----------------------------------- - def x_y_preprocess(self, balance_max, max_samples=None): - x_proc, y_proc, *z = super().x_y_preprocess(balance_max=balance_max, max_samples=max_samples) + def x_y_preprocess(self, balance_max, shuffle=False, max_samples=None, make_unique=True): + x_proc, y_proc, *z = super().x_y_preprocess(balance_max=balance_max, shuffle=shuffle, max_samples=max_samples, make_unique=make_unique) y_proc = self.caching(y_proc) return x_proc, y_proc, *z @@ -39,29 +51,21 @@ def caching(self, y_proc, y_on_cpu=False): if y_on_cpu: y_tok = y_tok.cpu() - #now for using cache we need the uniques and the corresponding indices of the uniques - y_uniques, y_ptrs = torch.unique(torch.cat([self.text_encoder.empty_token.to(y_tok.device), y_tok]), dim=0, return_inverse=True) + # Now for using cache we need the uniques and the corresponding indices of the uniques + y_uniques, y_ptrs = torch.unique(torch.cat([self.text_encoder.empty_token.to(y_tok.device), y_tok], dim=0), dim=0, return_inverse=True) cached_empty_token_index = y_ptrs[0] #store what index the empty token has y_ptrs = y_ptrs[1:] #remove the cat empty token - #use cache + # Use cache print(" - generate_cache") self.text_encoder.generate_cache(tokens=y_uniques, cached_empty_token_index=cached_empty_token_index, y_on_cpu=y_on_cpu) - print("[INFO]: Generated cache") - return y_ptrs + print(f"[INFO]: Generated cache, {y_ptrs.shape=}") + return y_ptrs.clone() #------------------------------------------- def get_dataloaders(self, batch_size, text_encoder, p_valid=0.1, balance_max=None, max_samples=None): self.text_encoder = text_encoder - return super().get_dataloaders(batch_size, p_valid, balance_max, max_samples) - - #------------------------------------------- - - @staticmethod - def from_config_file(config_path, device: torch.device, save_path: str=None): - config = load_config(config_path) - config["target"] = class_to_str(Cached_OpenClip_Dataset) - return Config_Dataset.from_config(config, device, save_path) + return super().get_dataloaders(batch_size, p_valid, balance_max, max_samples) diff --git a/genQC/dataset/circuits_dataset.py b/genQC/dataset/circuits_dataset.py new file mode 100644 index 0000000..429071d --- /dev/null +++ b/genQC/dataset/circuits_dataset.py @@ -0,0 +1,297 @@ +"""Dataset for quantum circuits.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/circuits_dataset.ipynb. + +# %% auto 0 +__all__ = ['CircuitsConfigDatasetConfig', 'CircuitsConfigDataset', 'MixedCircuitsConfigDatasetConfig', + 'MixedCircuitsConfigDataset'] + +# %% ../../src/dataset/circuits_dataset.ipynb 2 +from ..imports import * +from .cached_dataset import CachedOpenCLIPDataset, CachedOpenCLIPDatasetConfig +from .mixed_cached_dataset import MixedCachedOpenCLIPDataset, MixedCachedOpenCLIPDatasetConfig +from ..utils.config_loader import * +from .config_dataset import ConfigDataset +from .dataset_helper import shuffle_tensor_dataset +from ..utils.misc_utils import MemoryCleaner + +# %% ../../src/dataset/circuits_dataset.ipynb 4 +@dataclass +class CircuitsConfigDatasetConfig(CachedOpenCLIPDatasetConfig): + optimized: bool + random_samples: int + num_of_qubits: int + min_gates: int + max_gates: int + max_params: int + gate_pool: list[str] + +# %% ../../src/dataset/circuits_dataset.ipynb 5 +class CircuitsConfigDataset(CachedOpenCLIPDataset): + """Dataset for quantum circuits, access `gate_pool` directly and all other paras with `.params_config`""" + + req_params = [f.name for f in dataclasses.fields(CircuitsConfigDatasetConfig)] + + #----------------------------------- + def __init__(self, device: torch.device=torch.device("cpu"), **parameters) -> None: + super().__init__(device, **parameters) + + + if isinstance(list(parameters["gate_pool"])[0], str): + self.gate_pool = list(parameters["gate_pool"]) + + else: + try: + self.gate_pool = [get_obj_from_str(node) for node in parameters["gate_pool"]] + except Exception as er: + print(f"[WARNING]: error => {er}") + print(f"[WARNING]: gate_pool is passed as str") + self.gate_pool = [str(node) for node in parameters["gate_pool"]] + + @property + def params_config(self): + params_config = super().params_config + + if type(self) == CircuitsConfigDataset: + params_config = CircuitsConfigDatasetConfig(**params_config) + return params_config + +# %% ../../src/dataset/circuits_dataset.ipynb 8 +@dataclass +class MixedCircuitsConfigDatasetConfig(CircuitsConfigDatasetConfig, MixedCachedOpenCLIPDatasetConfig): + pass + +# %% ../../src/dataset/circuits_dataset.ipynb 9 +class MixedCircuitsConfigDataset(CircuitsConfigDataset, MixedCachedOpenCLIPDataset): + """ + Dataset that uses multiple cached dataset and combines them with padding, either i) Bucket or ii) Max. + Also provides a corresponding `collate_fn` for training. + """ + + req_params = [f.name for f in dataclasses.fields(MixedCircuitsConfigDatasetConfig)] + + #----------------------------------- + + @property + def params_config(self): + params_config = super().params_config + if type(self) == MixedCircuitsConfigDataset: + params_config = MixedCircuitsConfigDatasetConfig(**params_config) + return params_config + + #----------------------------------- + + def _get_cut_sizes(self, z): + z_0 = torch.max(z[:, 0]) # space + z_1 = torch.max(z[:, 1]) # time + z_1 = (torch.ceil(z_1 / self.model_scale_factor) * self.model_scale_factor).to(torch.int32) + return z_0, z_1 + + def _cut(self, x, y, z): + z_0, z_1 = self._get_cut_sizes(z) + + x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch + return x, y + + def _cut_compilation_params(self, x, y, p, U, z): + z_0, z_1 = self._get_cut_sizes(z) + bit_exp = 2**z_0 + + x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch + p = p[:, :, :z_1] # cut down to max [b, nP , time] of batch + U = U[:, :, :bit_exp, :bit_exp] # [b, Re/Im, 2^n, 2^n] + return x, y, p, U + + #----------------------------------- + # BUCKET PADDING, all x,y are already passed as batch + + def cut_padding_Bucket_collate_fn(self, b): + """this function is called for training for every batch, order in b is store dict""" + + x, y, z = b[0] + x, y = self._cut(x, y, z) + return x, y + + + def cut_padding_Bucket_collate_fn_compilation(self, b): + """this function is called for training for every batch""" + raise NotImplementedError() + + + def cut_padding_Bucket_collate_fn_compilation_params(self, b): + """this function is called for training for every batch, order in b is store dict""" + + b = b[0] # {'x': 'tensor', 'y': 'numpy', 'params': 'tensor', 'U': 'tensor', 'z': 'tensor'} + + x = b[0] + y = b[1] + p = b[2] + U = b[3] + z = b[4] + + #--------------- + + x, y, p, U = self._cut_compilation_params(x, y, p, U, z) + + return x, y, p, U + + #----------------------------------- + # MAX PADDING, x are passes as sampled list (batch), std collate them + + def cut_padding_collate_fn(self, b): + """this function is called for training for every batch""" + x, y, z = torch.utils.data.default_collate(b) + x, y = self._cut(x, y, z) + return x, y + + def cut_padding_collate_fn_compilation(self, b): + """this function is called for training for every batch""" + raise NotImplementedError() + + def cut_padding_collate_fn_compilation_params(self, b): + """this function is called for training for every batch, order in b is store dict""" + # {'x': 'tensor', 'y': 'numpy', 'params': 'tensor', 'U': 'tensor', 'z': 'tensor'} + x, y, p, U, z = torch.utils.data.default_collate(b) + x, y, p, U = self._cut_compilation_params(x, y, p, U, z) + return x, y, p, U + + #----------------------------------- + + @staticmethod + def _preprocess_dataset(dataset, device, balance_max, max_samples, i, shuffle, make_unique, pad_constant, + model_scale_factor, parameters, max_gates, max_qubits): + + dataset = dataset.to(device) + + existing_z_type = dataset.store_dict.pop("z", None) # remove z, as it would mess up `ConfigDataset.x_y_preprocess`, it would be put in `*c`. + if exists(existing_z_type): + assert existing_z_type == "tensor" + z = dataset.z + else: + z = None + + x, y, *c = ConfigDataset.x_y_preprocess(dataset, balance_max=balance_max, max_samples=max_samples[i], shuffle=shuffle, make_unique=make_unique) + x = x.to(device) # [b, s, t] + + print(f" - dataset size after balancing {x.shape[0]}") + + #------- + # store original size + if not_exists(z): + z = torch.zeros((x.shape[0], 2), device=device, dtype=torch.int32) + z[:, 0] = max(dataset.params_config.num_of_qubits, 1) + + red_x = torch.sum(x.abs(), dim=1) # [b, t] .. collaps the zeros to get circuit length + z[:, 1] = torch.count_nonzero(red_x, dim=1) # [b] + z[z[:, 1]==0, 1] = 1 + + # Create masks for space and time padding + space_mask = torch.arange(x.shape[1], device=x.device).unsqueeze(0) >= z[:, 0].unsqueeze(1) + time_mask = torch.arange(x.shape[2], device=x.device).unsqueeze(0) >= z[:, 1].unsqueeze(1) + + # Apply masks to pad_constant to handle both dimensions + x = torch.where(space_mask.unsqueeze(2), pad_constant, x) + x = torch.where( time_mask.unsqueeze(1), pad_constant, x) + + z[:, 1] = (torch.ceil(z[:, 1] / model_scale_factor) * model_scale_factor).to(torch.int32) #for cut needs multiple + + #------- + + # now pad x, padding is defined from last dim forward! + pad = (0, max_gates-dataset.params_config.max_gates, 0, max_qubits-dataset.params_config.num_of_qubits) + x = F.pad(x, pad, "constant", pad_constant) + + #------- + + c = MixedCachedOpenCLIPDataset._add_missing_conditions(parameters, dataset, c, x.shape[0], "cpu") + + dataset = dataset.to("cpu") #helps with gpu mem overflowing + del dataset + + return x.cpu(), y, z.cpu(), *[ic.cpu() for ic in c] + + @staticmethod + def from_datasets(datasets: list[CircuitsConfigDataset], balance_maxes: list, pad_constant, device: torch.device=torch.device("cpu"), bucket_batch_size=None, + max_samples=None, shuffle=True, make_unique=True, test_split=0.05, pad_with_memmap=False, **parameters): + if pad_constant == 0: + print("[WARNING]: >pad_constant == 0<; This could be an error!") + + model_scale_factor = parameters["model_scale_factor"] + + max_qubits = max(dataset.params_config.num_of_qubits for dataset in datasets) + max_gates = max(dataset.params_config.max_gates for dataset in datasets) + max_gates = int(np.ceil(max_gates /model_scale_factor) * model_scale_factor) + max_params = max(dataset.params_config.max_params for dataset in datasets) + + parameters["num_of_qubits"] = max_qubits + parameters["max_gates"] = max_gates + parameters["max_params"] = max_params + parameters["random_samples"] = sum([dataset.params_config.random_samples for dataset in datasets]) + parameters["min_gates"] = min([dataset.params_config.min_gates for dataset in datasets]) + parameters["comment"] = f"Generated with 'from_datasets' with {len(datasets)} datasets. Qubits: {[dataset.params_config.num_of_qubits for dataset in datasets]}." + parameters["pad_constant"] = pad_constant + parameters["bucket_batch_size"] = bucket_batch_size + + parameters["store_dict"] = {} + for dataset in datasets: + parameters["store_dict"] |= dataset.params_config.store_dict #needs python 3.9 for union of dict + parameters["store_dict"]["z"] = "tensor" #add special item + + #----------------- + + xs, ys, zs, cs = MixedCircuitsConfigDataset._preprocess_datasets(datasets, device, balance_maxes, max_samples, shuffle, make_unique, pad_constant, + model_scale_factor, parameters, max_gates=max_gates, max_qubits=max_qubits) + #----------------- + + has_U = "U" in parameters["store_dict"] + has_p = "params" in parameters["store_dict"] + + if bucket_batch_size > 0: + collate_fn_name = MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn.__name__ + if has_U: + collate_fn_name = MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn_compilation.__name__ + if has_p: + collate_fn_name = MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn_compilation_params.__name__ + + else: + collate_fn_name = MixedCircuitsConfigDataset.cut_padding_collate_fn.__name__ + if has_U: + collate_fn_name = MixedCircuitsConfigDataset.cut_padding_collate_fn_compilation.__name__ + if has_p: + collate_fn_name = MixedCircuitsConfigDataset.cut_padding_collate_fn_compilation_params.__name__ + + parameters["collate_fn"] = collate_fn_name + + #----------------- + if bucket_batch_size > 0: + xs, ys, zs, cs = MixedCachedOpenCLIPDataset._reorder_to_buckets(parameters, bucket_batch_size, xs, ys, zs, cs) + + x = torch.cat(xs) + y = ys # torch.cat(ys) is wrong, y is list of numpy or str!! not a tensor + + if isinstance(y, list): + match parameters["store_dict"]["y"]: + case "numpy": y = np.concatenate(y, axis=0) + case "tensor": y = torch.cat(y, dim=0) + case _: raise NotImplementedError() + + z = torch.cat(zs) + c = cs + + #----------------- + + params_pad = (max_params, max_gates) + unitary_pad = 2**max_qubits + + ci_list, ci_k_list, memmap_cleans = MixedCachedOpenCLIPDataset._pad_conditions(parameters, bucket_batch_size, c, unitary_pad=unitary_pad, params_pad=params_pad, pad_with_memmap=pad_with_memmap) + + #----------------- + + mixed_CircuitsConfigDataset, mixed_CircuitsConfigDataset_test = \ + MixedCircuitsConfigDataset._create_train_valid_datasets(device, parameters, test_split, x, y, z, ci_list, ci_k_list, shuffle=shuffle) + + if pad_with_memmap: + mixed_CircuitsConfigDataset.memmap_cleans = memmap_cleans + mixed_CircuitsConfigDataset_test.memmap_cleans = memmap_cleans + + return mixed_CircuitsConfigDataset, mixed_CircuitsConfigDataset_test diff --git a/genQC/dataset/config_dataset.py b/genQC/dataset/config_dataset.py index f32864f..9ab8295 100644 --- a/genQC/dataset/config_dataset.py +++ b/genQC/dataset/config_dataset.py @@ -1,28 +1,35 @@ +"""Base class for managing, loading and saving.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/config_dataset.ipynb. # %% auto 0 -__all__ = ['Config_Dataset_config', 'Config_Dataset'] +__all__ = ['ConfigDatasetConfig', 'ConfigDataset'] # %% ../../src/dataset/config_dataset.ipynb 2 from ..imports import * -from ..config_loader import * +from ..utils.config_loader import * +from .dataset_helper import * from huggingface_hub import snapshot_download # %% ../../src/dataset/config_dataset.ipynb 3 @dataclass -class Config_Dataset_config: +class ConfigDatasetConfig: """Config `dataclass` used for storage.""" store_dict: dict + dataset_to_gpu: bool # %% ../../src/dataset/config_dataset.ipynb 4 -class Config_Dataset(): +class ConfigDataset(): """Base class for datasets, manages loading and saving.""" - req_params = [f.name for f in dataclasses.fields(Config_Dataset_config)] - comment = "" + req_params = [f.name for f in dataclasses.fields(ConfigDatasetConfig)] + comment = "" + add_balance_fn = None - def __init__(self, device: torch.device=torch.device("cpu"), **parameters): + def __init__(self, device: torch.device=torch.device("cpu"), save_type=None, **parameters) -> None: + self.save_type = default(save_type, "safetensors") + req_params = self.req_params for p in req_params: if p not in parameters: raise RuntimeError(f"Missing parameter `{p}` in argument `**parameters: dict`") @@ -54,6 +61,145 @@ def to(self, device: torch.device, excepts=[], **kwargs): setattr(self, str(k), x) return self + + def memory_summary(self) -> None: + print("##################### Dataset memory summary #####################") + print("Name || Type || Memory || Device || Shape") + print("---------------------------------------------------------------") + + total_mem = 0.0 + byte_to_giga = 1 / (1024**3) + + for k,v in self.store_dict.items(): + mem = 0.0 + dev = "None" + shape = "None" + dtype = "None" + + x = getattr(self, str(k)) + + if v == "tensor": + mem += float(x.dtype.itemsize) * np.prod([s for s in x.shape], dtype=np.double) * byte_to_giga + dev = x.device + shape = x.shape + dtype = x.dtype + + elif v == "tensor_list": + dev = [] + for x_i in x: + mem += float(x_i.dtype.itemsize) * np.prod([s for s in x_i.shape], dtype=np.double) * byte_to_giga + dev.append(x_i.device) + shape = (len(x), x[0].shape) + dtype = x[0].dtype + + elif v == "list": + shape = (len(x)) + dtype = "python" + + elif v == "numpy": + shape = x.shape + dtype = x.dtype + + + print(f" - [{str(k):>8}] ({str(dtype):>15} {str(v):>8}): {mem:3.4f} GB ({str(dev):6}) | {shape}") + total_mem += mem + + print("--------------------------------------") + print(f" Total memory used: {total_mem:3.4f} GB ") + print("---------------------------------------------------------------") + + #---------------------------- + + def x_y_preprocess(self, balance_max=None, shuffle=False, max_samples=None, make_unique=True): + z_proc = [] + for k,v in self.store_dict.items(): + if k != "x" and k != "y": + z_proc.append(getattr(self, k)) + + x_proc, y_proc = self.x, self.y + + #--------------------- + if shuffle: + x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc) + + if exists(max_samples): + x_proc = x_proc[:max_samples] + y_proc = y_proc[:max_samples] + z_proc = (iz[:max_samples] for iz in z_proc) + + #--------------------- + t = self.store_dict["y"] + if exists(balance_max): + if t == "tensor" or t == "numpy": x_proc, y_proc, *z_proc = balance_tensor_dataset(x_proc, y_proc, *z_proc, make_unique=make_unique, shuffle_lables=shuffle, + samples=balance_max, add_balance_fn=self.add_balance_fn, njobs=1) + else: print(f"[WARNING]: Unsupported y type: `{t}`. Not balancing dataset!") + else: print(f"[INFO]: Not balancing dataset! {balance_max=}") + + #--------------------- + if shuffle: + x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc) + + return x_proc, y_proc, *z_proc + + def valid_split(self, x, y, *z, p_valid=0.1, y_type=None, split_sequential=False): + """ + split_sequential ... if true split data ordered (valid-train order), else split randomly (the same as shuffle and then seq. split) + """ + + if split_sequential: ind = torch.arange(x.shape[0]) + else: ind = torch.randperm(x.shape[0]) + + splits = max(int(x.shape[0] * p_valid), 1) + ind, ind_valid = ind[splits:], ind[:splits] + + #### Note: advanced indexing always creates copy not view. So we can skip the .clone() + x, x_valid = x[ind], x[ind_valid] + + t = y_type if exists(y_type) else self.store_dict["y"] + if t == "tensor" : y, y_valid = y[ind], y[ind_valid] + elif t == "numpy": y, y_valid = y[ind], y[ind_valid] + + z = list(z) + z_valid = [None] * len(z) + for i, iz in enumerate(z): + # assert tensors for now + z[i], z_valid[i] = iz[ind], iz[ind_valid] + + z, z_valid = tuple(z), tuple(z_valid) + + return x, x_valid, y, y_valid, (z, z_valid) + + def get_dataloaders(self, batch_size, p_valid=0.1, balance_max=None, max_samples=None, y_on_cpu=False, shuffle=True): + #------------------------- + # valid split and to device + + x_proc, y_proc, *z_proc = self.x_y_preprocess(balance_max=balance_max, max_samples=max_samples, shuffle=shuffle) + x, x_valid, y, y_valid, (z, z_valid) = self.valid_split(x_proc, y_proc, *z_proc, p_valid=p_valid) + + + if self.params_config.dataset_to_gpu: + x, x_valid = x.to("cuda"), x_valid.to("cuda") + z, z_valid = list(iz.to("cuda") for iz in z), list(iz_valid.to("cuda") for iz_valid in z_valid) + + if not y_on_cpu: + y, y_valid = y.to("cuda"), y_valid.to("cuda") + + #------------------------- + # create dataloaders + + ds = TensorDataset(x, y, *z) + ds_valid = TensorDataset(x_valid, y_valid, *z_valid) + + if self.params_config.dataset_to_gpu: + train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True) + valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True) + + else: + train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12) + valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12) + + self.dataloaders = DataLoaders(train_loader, valid_loader) + return self.dataloaders #---------------------------- @@ -61,16 +207,22 @@ def to(self, device: torch.device, excepts=[], **kwargs): def params_config(self): params_config = {} for p in self.req_params: params_config[p] = getattr(self, p) + + if type(self) == ConfigDataset: + params_config = ConfigDatasetConfig(**params_config) return params_config - + + #---------------------------- + def get_config(self, save_path=None, without_metadata=False): if not without_metadata: config = {} config["target"] = class_to_str(type(self)) config["device"] = str(self.device) config["comment"] = self.comment - config["save_path"] = self.save_path if hasattr(self, "save_path") else save_path + config["save_path"] = self.save_path if hasattr(self, "save_path") and not exists(save_path) else save_path config["save_datetime"] = datetime.now().strftime("%m/%d/%Y %H:%M:%S") + config["save_type"] = self.save_type config["params"] = self.params_config else: config = self.params_config @@ -79,22 +231,41 @@ def get_config(self, save_path=None, without_metadata=False): return config def save_dataset(self, config_path: str, save_path: str): + if exists(config_path): os.makedirs(config_path[:config_path.rfind("/")] + "/", exist_ok=True) + if exists(save_path): os.makedirs(save_path[:save_path.rfind("/")] + "/", exist_ok=True) + config = self.get_config(save_path, without_metadata=False) save_dict_yaml(config, config_path) self.store_x_y(save_path) #---------------------------- + + def check_save_type(self, save_path): + if exists(self.save_type) and exists(save_path): + if not save_path.endswith(f".{self.save_type}"): + save_path += f".{self.save_type}" + return save_path def store_x_y(self, path_str): for k,v in self.store_dict.items(): x = getattr(self, str(k)) - torch.save(x, path_str + f"_{k}.pt") - - def load_x_y(self, path_str): + + # torch.save(x, path_str + f"_{k}.pt") + store_tensor({"0": x}, self.check_save_type(path_str + f"_{k}"), type=v) + + def load_x_y(self, path_str, device: Optional[torch.device] = None, make_contiguous: bool = True): self.save_path = path_str - for k,v in self.store_dict.items(): - x = torch.load(path_str + f"_{k}.pt", weights_only=False) + for k,v in self.store_dict.items(): + # x = torch.load(path_str + f"_{k}.pt", map_location=device) + x = load_tensor(self.check_save_type(path_str + f"_{k}"), device, type=v) + + if isinstance(x, dict): + x = x["0"] + + if v == "tensor" and make_contiguous: + x = x.contiguous() #load memmap into memory + setattr(self, str(k), x) #---------------------------- @@ -115,7 +286,7 @@ def from_config(config, device: torch.device, save_path: Optional[str] = None, m if "save_path" in config: save_path = config["save_path"] else: print("[INFO]: Found no key `save_path` path in config and no `save_path` arg provided.") - if exists(save_path): config_dataset.load_x_y(save_path) + if exists(save_path): config_dataset.load_x_y(save_path, device=device, make_contiguous=make_contiguous) else: print("[INFO]: No save_path` provided. Nothing loaded.") #-------------------------------- @@ -132,7 +303,7 @@ def from_config_file(cls, config_path, device: torch.device, save_path: Optiona If this method is called with `ConfigDataset.from_config_file` we use the given `target`, else use the caller class. """ config = load_config(config_path) - if cls is not Config_Dataset: + if cls is not ConfigDataset: config["target"] = class_to_str(cls) return cls.from_config(config, device, save_path, make_contiguous) @@ -140,5 +311,11 @@ def from_config_file(cls, config_path, device: torch.device, save_path: Optiona def from_huggingface(cls, repo_id: str, device: torch.device, **kwargs): """Load a dataset directly from Huggingface.""" dataset_path = snapshot_download(repo_id=repo_id, repo_type="dataset", allow_patterns=["*.pt", "*.yaml", "*.safetensors"], **kwargs) - dataset = cls.from_config_file(config_path=dataset_path+"/config.yaml", device=device, save_path=dataset_path+"/dataset") + + try: + name = repo_id.split("/")[-1] + dataset = cls.from_config_file(config_path=dataset_path+f"/{name}.yaml", device=device, save_path=dataset_path+f"/{name}") + except Exception as e: + dataset = cls.from_config_file(config_path=dataset_path+"/config.yaml", device=device, save_path=dataset_path+"/dataset") + return dataset diff --git a/genQC/dataset/dataset_helper.py b/genQC/dataset/dataset_helper.py index 4affe36..14b2fde 100644 --- a/genQC/dataset/dataset_helper.py +++ b/genQC/dataset/dataset_helper.py @@ -1,15 +1,17 @@ +"""Some comonly used functions for datasets.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/dataset_helper.ipynb. # %% auto 0 -__all__ = ['check_duplicate_in_dataset', 'check_duplicates_in_dataset_python', 'check_duplicates_in_dataset', - 'shuffle_tensor_dataset', 'get_unique_elements_indices', 'uniquify_tensor_dataset', 'balance_tensor_dataset', - 'map_old_tensor_to_new'] +__all__ = ['check_duplicate_in_dataset', 'check_duplicates_in_dataset', 'shuffle_tensor_dataset', 'get_unique_elements_indices', + 'uniquify_tensor_dataset', 'balance_tensor_dataset'] -# %% ../../src/dataset/dataset_helper.ipynb 3 +# %% ../../src/dataset/dataset_helper.ipynb 2 from ..imports import * -from ..config_loader import * +from ..utils.config_loader import * +from ..utils.async_fn import run_parallel_jobs -# %% ../../src/dataset/dataset_helper.ipynb 5 +# %% ../../src/dataset/dataset_helper.ipynb 4 def check_duplicate_in_dataset(x, dataset): """Check if 'x' is in 'dataset'""" # x ... [ *] @@ -18,32 +20,9 @@ def check_duplicate_in_dataset(x, dataset): comp = (dataset==x) comp = torch.reshape(comp, [comp.shape[0], -1]) comp = torch.all(comp, dim=1) - - num = comp.nonzero().squeeze().numel() - return bool(num) + return comp.any().item() -# %% ../../src/dataset/dataset_helper.ipynb 6 -def check_duplicates_in_dataset_python(xs, dataset): - cnt = 0 - - raise NotImplementedError("") - - # f = lambda x: int(check_duplicate_in_dataset(x, dataset)) - # res = async_loop_consumer(f, xs) - # cnt = sum(res) - - comp = [] - - for i,x in enumerate(xs): - if check_duplicate_in_dataset(x, dataset): - #print(f"[INFO] Duplicate in dataset at index={i}") - comp.append(i) - cnt += 1 - # print(f"[INFO] Found {cnt}/{xs.shape[0]} duplicates in dataset of {dataset.shape[0]}.") - - return cnt, comp - -# %% ../../src/dataset/dataset_helper.ipynb 7 +# %% ../../src/dataset/dataset_helper.ipynb 5 def check_duplicates_in_dataset(xs, dataset, return_ind=False, invert=False): ''' Checks if `xs` is are `dataset`. Boolean `invert` changes if we count duplicates (False) or ones that are not in dataset (True). @@ -71,39 +50,56 @@ def get_comp(x, dataset): comp = comp.nonzero() num = comp.shape[0] - # except Exception as er: - # print("[WARNING] check_duplicates_in_dataset:", er) - # print("We will use python instead.") - # raise NotImplementedError("") - # # cnt, comp = check_duplicates_in_dataset_python(xs, dataset) - if return_ind: return num, comp.squeeze() #comp is [i_xs, i_dataset] pairs return num -# %% ../../src/dataset/dataset_helper.ipynb 11 -def shuffle_tensor_dataset(x, y=None, *z): +# %% ../../src/dataset/dataset_helper.ipynb 9 +def shuffle_tensor_dataset(x, y=None, *z, cpu_copy=True): '''Assumes numpy or tensor objects with same length.''' rand_indx = torch.randperm(x.shape[0]) if exists(y): assert x.shape[0] == y.shape[0] - for iz in z: assert x.shape[0] == iz.shape[0] - return x[rand_indx], y[rand_indx], *(iz[rand_indx] for iz in z) + for iz in z: assert x.shape[0] == iz.shape[0] + + + if cpu_copy: + + def _cpu_array_index(var): + if type(var) == np.ndarray: + var = var[rand_indx] + else: + device = var.device + var = var.to("cpu") + var = var[rand_indx] + var[:] = var.to(device) + return var + + x = _cpu_array_index(x) + y = _cpu_array_index(y) + z = (_cpu_array_index(iz) for iz in z) + + return x, y, *z + + else: + return x[rand_indx], y[rand_indx], *(iz[rand_indx] for iz in z) return x[rand_indx] -# %% ../../src/dataset/dataset_helper.ipynb 12 +# %% ../../src/dataset/dataset_helper.ipynb 10 def get_unique_elements_indices(tensor): '''Returns indices of unique_elements in `tensor`.''' tensor_unique, ptrs, cnt = torch.unique(tensor, dim=0, return_inverse=True, return_counts=True) - _, ind_sorted = torch.sort(ptrs, stable=True) #e.g. gets the index that points to zero at pos [0] + _, ind_sorted = torch.sort(ptrs, dim=0, stable=True) #e.g. gets the index that points to zero at pos [0] - cum_sum = cnt.cumsum(0) - cum_sum = torch.cat((torch.tensor([0], device=tensor.device), cum_sum[:-1])) - - return tensor_unique, ind_sorted[cum_sum] + cum_sum = cnt.cumsum(dim=0) + cum_sum = torch.cat([torch.tensor([0], device=tensor.device), cum_sum[:-1]], dim=0) + + idx = ind_sorted[cum_sum].cpu() + + return tensor[idx], idx -# %% ../../src/dataset/dataset_helper.ipynb 13 +# %% ../../src/dataset/dataset_helper.ipynb 11 def uniquify_tensor_dataset(x, y=None, *z): '''`x` has to be tensor, assumes numpy or tensor obj for `y` and `z`''' x, x_idx = get_unique_elements_indices(x) @@ -116,48 +112,66 @@ def uniquify_tensor_dataset(x, y=None, *z): return x -# %% ../../src/dataset/dataset_helper.ipynb 14 -def balance_tensor_dataset(x, y, *z, samples: int=None, make_unique: bool=True, y_uniques=None, shuffle_lables: bool=True, add_balance_fn: callable=None): +# %% ../../src/dataset/dataset_helper.ipynb 12 +def balance_tensor_dataset(x, y, *z, samples: int=None, make_unique: bool=True, y_uniques=None, shuffle_lables: bool=True, add_balance_fn: callable=None, njobs=1): '''Assumes `x` is tensor and `y` is tensor or numpy.''' y_type = type(y) assert y_type in [np.ndarray, torch.Tensor] + + print(f" - balance_tensor_dataset, {njobs=}, number of samples={x.shape[0]}") #------------------------------ if make_unique: x, y, *z = uniquify_tensor_dataset(x, y, *z) assert x.shape[0] == y.shape[0] + + print(f" - uniquify_tensor_dataset, number of samples now {x.shape[0]}") #bcs unique sorts, we need to shuffle the dataset before picking the first 'samples' entries x, y, *z = shuffle_tensor_dataset(x, y, *z) #------------------------------ + + search_y = y_uniques if exists(y_uniques) else y - if y_type == np.ndarray: y_uniques_temp, y_uniques_cnt = np.unique(y, return_counts=True, axis=0) - else: y_uniques_temp, y_uniques_cnt = torch.unique(y, return_counts=True, dim=0) + if y_type == np.ndarray: _, y_ptrs, y_uniques_cnt = np.unique(search_y, return_counts=True, return_inverse=True, axis=0) + else: _, y_ptrs, y_uniques_cnt = torch.unique(search_y, return_counts=True, return_inverse=True, dim=0) - if y_uniques is None: y_uniques = y_uniques_temp - if samples is None: + if not exists(samples): if y_type == np.ndarray: samples = np.min(y_uniques_cnt) # the actual balancing count else: samples = torch.min(y_uniques_cnt) + print(f" - balancing") + # ToDo: make parallel + ind = list() - for y_unique in y_uniques: + # for y_unique in tqdm(y_uniques, total=y_uniques.shape[0]): + for y_ptr_index in tqdm(range(y_uniques_cnt.shape[0]), total=y_uniques_cnt.shape[0]): if y_type == np.ndarray: - comp = (y==y_unique) + comp = (y_ptrs==y_ptr_index) indices = np.squeeze(np.nonzero(comp)) indices = indices if indices.ndim > 0 else indices[None] - else: - comp = torch.all(y==y_unique, dim=1) - indices = comp.nonzero().squeeze().cpu() + else: + comp = (y_ptrs==y_ptr_index) + + indices = comp.nonzero().squeeze() #.cpu() indices = indices if indices.dim() > 0 else indices[None] - + #special add balncing, e.g., for circuit length if add_balance_fn is not None: indices = add_balance_fn(indices, x, y, *z) - + + if not y_type == np.ndarray: indices = indices.cpu() + + indices = shuffle_tensor_dataset(indices) + + #fixes bug: shuffle_tensor_dataset removes dim if numpy array only has 1 element! + if y_type == np.ndarray: indices = indices if indices.ndim > 0 else indices[None] + else: indices = indices if indices.dim() > 0 else indices[None] + ind.append(indices[:samples]) #limit samples if y_type == np.ndarray: ind = np.concatenate(ind, axis=0) @@ -171,23 +185,3 @@ def balance_tensor_dataset(x, y, *z, samples: int=None, make_unique: bool=True, if shuffle_lables: xb, yb, *zb = shuffle_tensor_dataset(xb, yb, *zb) return xb, yb, *zb - -# %% ../../src/dataset/dataset_helper.ipynb 16 -def map_old_tensor_to_new(x): - raise DeprecationWarning("[WARNING] There really should be no more old tensors arround .... delete them") - print("[WARNING] There really should be no more old tensors arround .... delete them") - - b, gc, bits, t = x.shape - - x = x.reshape((b, gc//3, 3, bits, t)) # [b, g-c, bits, t] -> [b, g, c, bits, t] - x = torch.argmax(x, dim=2) # [b, g, c, bits, t]-> [b, g, bits, t] - - gate = torch.concat([torch.zeros_like(x[:,:1]), x], dim=1) # add zeros for empty token - gate = torch.argmax(gate, dim=1) - - control_target = torch.sum(x, dim=1) - mapped_tensor = torch.zeros_like(control_target) - mapped_tensor[control_target==1] = -1 - mapped_tensor[control_target==2] = 1 - - return gate * mapped_tensor # is now [b, space, time] with elements +-gate_number diff --git a/genQC/dataset/mixed_cached_dataset.py b/genQC/dataset/mixed_cached_dataset.py new file mode 100644 index 0000000..0309126 --- /dev/null +++ b/genQC/dataset/mixed_cached_dataset.py @@ -0,0 +1,314 @@ +"""Dataset that combines and handles multiple cached datasets.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/mixed_cached_dataset.ipynb. + +# %% auto 0 +__all__ = ['MixedCachedOpenCLIPDatasetConfig', 'MixedCachedOpenCLIPDataset'] + +# %% ../../src/dataset/mixed_cached_dataset.ipynb 3 +from ..imports import * +from .cached_dataset import CachedOpenCLIPDataset, CachedOpenCLIPDatasetConfig, ConfigDataset +from .dataset_helper import * +from ..utils.misc_utils import DataLoaders, MemoryCleaner +from tensordict import TensorDict + +# %% ../../src/dataset/mixed_cached_dataset.ipynb 4 +@dataclass +class MixedCachedOpenCLIPDatasetConfig(CachedOpenCLIPDatasetConfig): + pad_constant: int + collate_fn: str + bucket_batch_size: int + model_scale_factor: int + +# %% ../../src/dataset/mixed_cached_dataset.ipynb 5 +class MixedCachedOpenCLIPDataset(CachedOpenCLIPDataset): + """Dataset that uses multiple cached dataset and combines them with padding, either i) Bucket or ii) Max.""" + + req_params = [f.name for f in dataclasses.fields(MixedCachedOpenCLIPDatasetConfig)] + + #----------------------------------- + @property + def params_config(self): + params_config = super().params_config + if type(self) == MixedCachedOpenCLIPDataset: + params_config = MixedCachedOpenCLIPDatasetConfig(**params_config) + return params_config + + #----------------------------------- + # functions to combine multiple datasets together + + @classmethod + def _preprocess_datasets(dataset_cls, datasets, device, balance_maxes, max_samples, shuffle, + make_unique, pad_constant, model_scale_factor, parameters, **kwargs): + xs = [] + ys = [] + zs = [] + cs = [] + + if isinstance(max_samples, int): + max_samples = [max_samples] * len(datasets) + else: + assert isinstance(max_samples, (list, np.ndarray)) + + if isinstance(balance_maxes, int): + balance_maxes = [balance_maxes] * len(datasets) + else: + assert isinstance(balance_maxes, (list, np.ndarray)) + + for i, (dataset, balance_max) in tqdm(enumerate(zip(datasets, balance_maxes)), total=len(datasets)): + + x, y, z, *c = dataset_cls._preprocess_dataset(dataset, device, balance_max, max_samples, i, shuffle, make_unique, pad_constant, model_scale_factor, parameters, **kwargs) + MemoryCleaner.purge_mem() + + #combine datasets + xs.append(x.cpu()) + ys.append(y) + zs.append(z.cpu()) + cs.append([ic.cpu() for ic in c]) + + del x + del y + del z + del c + + for k in datasets[i].store_dict.keys(): + setattr(datasets[i], str(k), None) + del dataset + + MemoryCleaner.purge_mem() + + return xs, ys, zs, cs + + @staticmethod + def _add_missing_conditions(parameters, dataset, c, batch_size, device): + # if c is missing something of the union we set it to a zero tensor, e.g. used for combining SRV with compilation + c_temp = [] + c_temp_index = 0 + + for k,v in parameters["store_dict"].items(): + if k != "x" and k != "y" and k != "z": + if k not in dataset.params_config.store_dict: + empty_tensor = torch.zeros((1,), device=device) + + if k == "U": #scetchy hardcoded for compilation + empty_tensor = torch.zeros((batch_size, 2, 1, 1), device=device) # unitary is [b, Re/Im, 2^n, 2^n] + + c_temp.append(empty_tensor) + + else: # done to conserve the ordering of c args!!! + c_temp.append(c[c_temp_index]) + c_temp_index += 1 + + return c_temp + + @staticmethod + def _reorder_to_buckets(parameters, bucket_batch_size, xs, ys, zs, cs): + for i, (xi,yi,zi, ci) in enumerate(zip(xs, ys, zs, cs)): #cut rest of batch + b_mult = int(np.floor(xi.shape[0] / bucket_batch_size) * bucket_batch_size) + + xs[i] = xi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *xi.shape[1:])) + zs[i] = zi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *zi.shape[1:])) + + v = parameters["store_dict"]["y"] + if v == "tensor" or v == "numpy": + ys[i] = yi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *yi.shape[1:])) + else: raise NotImplementedError("") + + #---- + #For U, etc + add_ind = 0 + for k,v in parameters["store_dict"].items(): + if k != "x" and k != "y" and k != "z": + if v == "tensor" or v == "numpy": + cs[i][add_ind] = ci[add_ind][None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *ci[add_ind].shape[1:])) + else: raise NotImplementedError("") + add_ind += 1 + + return xs, ys, zs, cs + + @staticmethod + def _pad_conditions(parameters, bucket_batch_size, c, unitary_pad=None, params_pad=None, pad_with_memmap=False): + ci_list = [] + ci_k_list = [] + + memmap_cleans = [] #TensorDicts and paths we need to delete later + + def _alloc_mem(shape, k, c0_add_ind): + # allocating zeros is better memory wise than torch.cat(ci_s) and F.pad(ci, pad, "constant", 0) + mem = np.prod(shape) * c0_add_ind.element_size() / (1024*1024*1024) + print(f"[INFO]: allocate memory for {k} {shape} on {c0_add_ind.device} approx. {mem:.3f} GB") + + if pad_with_memmap: + prefix_path = f"tmp_DELETE_pad_conditions_MixedCachedOpenCLIPDataset_{k}" + print(f"[INFO]: (MixedCachedOpenCLIPDataset._pad_conditions): {pad_with_memmap=} allocating TensorDict using memmap_like at {prefix_path}") + + b, *_ = shape + tensor_dict = TensorDict({"ci_s": torch.empty(shape, dtype=c0_add_ind.dtype), + }, batch_size=[b]) + tensor_dict = tensor_dict.memmap_like(prefix=prefix_path) + + ci_s = tensor_dict["ci_s"] + memmap_cleans.append((tensor_dict, prefix_path)) + else: + ci_s = torch.zeros(shape, device=c0_add_ind.device, dtype=c0_add_ind.dtype) + + return ci_s + + add_ind = 0 + for k,v in parameters["store_dict"].items(): + if k != "x" and k != "y" and k != "z": + + if v == "tensor" and k == "U": # hardcoded U padding !! + assert exists(unitary_pad) and isinstance(unitary_pad, int) + + n = sum([ci[add_ind].shape[0] for ci in c]) + if bucket_batch_size > 0: shape = (n, bucket_batch_size, 2, unitary_pad, unitary_pad) + else: shape = (n, 2, unitary_pad, unitary_pad) + + ci_s = _alloc_mem(shape, k, c[0][add_ind]) + + #tensor product pad, else was zero pad + if 1: + run_i = 0 + for i,ci in enumerate(c): + ci = ci[add_ind] + + assert ci.shape[-2]==ci.shape[-1] + U_side = ci.shape[-2] + for jj in range(unitary_pad//U_side): + ci_s[run_i:run_i+ci.shape[0], ..., U_side*jj:U_side*(jj+1), U_side*jj:U_side*(jj+1)] = ci.to(ci_s.device) + + run_i += ci.shape[0] + + ci_list.append(ci_s) + ci_k_list.append(k) + + add_ind += 1 + continue + + elif v == "tensor" and k == "params": # hardcoded paramter padding !! + assert exists(params_pad) #and len(list(params_pad))==2 + + n = sum(ci[add_ind].shape[0] for ci in c) + if bucket_batch_size > 0: shape = (n, bucket_batch_size, *params_pad) + else: shape = (n, *params_pad) + + ci_s = _alloc_mem(shape, k, c[0][add_ind]) + + elif v == "numpy": raise NotImplementedError("") + else: raise NotImplementedError("") + + + run_i = 0 + for i,ci in enumerate(c): + ci = ci[add_ind] + ci_s[run_i:run_i+ci.shape[0], ..., :ci.shape[-2], :ci.shape[-1]] = ci + run_i += ci.shape[0] + + ci_list.append(ci_s) + ci_k_list.append(k) + + add_ind += 1 + + return ci_list, ci_k_list, memmap_cleans + + @classmethod + def _create_train_valid_datasets(dataset_cls, device, parameters, test_split, x, y, z, ci_list, ci_k_list, shuffle: bool = True): + splits = max(int(x.shape[0] * test_split), 1) + + if shuffle: + x, y, z, *ci_list = shuffle_tensor_dataset(x, y, z, *ci_list) + + x, x_test = x[splits:], x[:splits] + y, y_test = y[splits:], y[:splits] + z, z_test = z[splits:], z[:splits] + + print(f"Split: Train {x.shape[0]} - Test {x_test.shape[0]} \n") + + dataset = dataset_cls(device, **parameters) + dataset.x = x + dataset.y = y + dataset.z = z + + dataset_test = dataset_cls(device, **parameters) + dataset_test.x = x_test + dataset_test.y = y_test + dataset_test.z = z_test + + for ci, k in zip(ci_list, ci_k_list): + ci, ci_test = ci[splits:], ci[:splits] + + setattr(dataset , str(k), ci) + setattr(dataset_test, str(k), ci_test) + + return dataset, dataset_test + + #----------------------------------- + + def get_dataloaders(self, batch_size, text_encoder, p_valid=0.1, y_on_cpu=False, return_tensor_datasets=False, shuffle=True, shuffle_cpu_copy=True, caching=True): + #------------------------- + # caching + + self.text_encoder = text_encoder + + print("[DEBUG]: run get_dataloaders.x_y_preprocess", flush=True) + x_proc, y_proc, *z_proc = ConfigDataset.x_y_preprocess(self, + balance_max=None, + shuffle=False, + max_samples=None, + make_unique=False) # ... z_proc is `'z' and all other 'c' + if caching: + if self.bucket_batch_size <= 0: + y_proc = self.caching(y_proc, y_on_cpu=y_on_cpu) + + else: + y_proc = self.caching([yi.reshape((-1)) for yi in y_proc], y_on_cpu=y_on_cpu) + y_proc = y_proc.reshape((-1, self.bucket_batch_size)) + + #------------------------- + # valid split and to device + + print("[DEBUG]: run get_dataloaders.valid_split", flush=True) + x, x_valid, y, y_valid, (z, z_valid) = self.valid_split(x_proc, y_proc, *z_proc, p_valid=p_valid, y_type="tensor", split_sequential=False) + + if self.params_config.dataset_to_gpu: + x, x_valid = x.to("cuda"), x_valid.to("cuda") + z, z_valid = list(iz.to("cuda") for iz in z), list(iz_valid.to("cuda") for iz_valid in z_valid) + + if not y_on_cpu: + y, y_valid = y.to("cuda"), y_valid.to("cuda") + + #------------------------- + # create dataloaders + + ds = TensorDataset(x, y, *z) + ds_valid = TensorDataset(x_valid, y_valid, *z_valid) + + if return_tensor_datasets: + return ds, ds_valid + + if isinstance(self.collate_fn, str): + collate_fn = getattr(self, self.collate_fn, None) + else: + collate_fn = self.collate_fn + + if not exists(collate_fn): + print("[WARNING]: self.collate_fn does not exist, using torch.utils.data.default_collate.") + collate_fn = torch.utils.data.default_collate + + if self.params_config.dataset_to_gpu: + train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, collate_fn=collate_fn) + valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, collate_fn=collate_fn) + + else: + train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=4, collate_fn=collate_fn) + valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=4, collate_fn=collate_fn) + + self.dataloaders = DataLoaders(train_loader, valid_loader) + return self.dataloaders + + #----------------------------------- + + @staticmethod + def from_datasets(*args, **kwargs): + raise NotImplementedError() diff --git a/genQC/dataset/mixed_cached_qc_dataset.py b/genQC/dataset/mixed_cached_qc_dataset.py deleted file mode 100644 index 754f2b3..0000000 --- a/genQC/dataset/mixed_cached_qc_dataset.py +++ /dev/null @@ -1,556 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/mixed_cached_qc_dataset.ipynb. - -# %% auto 0 -__all__ = ['Mixed_Cached_OpenClip_Dataset_config', 'Mixed_Cached_OpenClip_Dataset'] - -# %% ../../src/dataset/mixed_cached_qc_dataset.ipynb 3 -from ..imports import * -from .qc_dataset import Qc_Config_Dataset_config, Qc_Config_Dataset -from .config_dataset import Config_Dataset -from .cached_qc_dataset import Cached_OpenClip_Dataset -from ..config_loader import * -from .dataset_helper import * -from ..util import DataLoaders -import dataclasses - -# %% ../../src/dataset/mixed_cached_qc_dataset.ipynb 4 -@dataclass -class Mixed_Cached_OpenClip_Dataset_config(Qc_Config_Dataset_config): - pad_constant: int - collate_fn: str - bucket_batch_size: int - num_down_scales: int # for flex pad attn mask - -# %% ../../src/dataset/mixed_cached_qc_dataset.ipynb 5 -class Mixed_Cached_OpenClip_Dataset(Cached_OpenClip_Dataset): - """Dataset that uses multiple cached dataset and combines them with padding, either i) Bucket or ii) Max. Also provides a corresponding `collate_fn` for training.""" - - req_params = [f.name for f in dataclasses.fields(Mixed_Cached_OpenClip_Dataset_config)] - - cut_multiple = 4 #needed for proper downscaling! - - @property - def params_config(self): - params_config = {} - for p in self.req_params: params_config[p] = getattr(self, p) - params_config["gate_pool"] = [class_to_str(gate) for gate in params_config["gate_pool"]] - params_config = Mixed_Cached_OpenClip_Dataset_config(**params_config) - return params_config - - #----------------------------------- - # CAUSAL ATTENTION PADDING - - def flexPadAttn_padding_collate_fn(self, b): - """this function is called for training for every batch""" - z_0 = max(x[2][0] for x in b) # space - z_1 = max(x[2][1] for x in b) # time - - #round time to next multiple of 8 for conv layers! - z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - #--------------- - # key_padding_mask ... [N, S] -inf where we want no attention - # we will create here [N, s, t] and then reshaping is easy - # note this is key pad mask not directly attention mask! we need this for loss masking - # Nb: add rnd to the padding, so we train with pad and on smaller systems - - #we need 3 different ones for the different unet layers - key_padding_mask = torch.zeros((len(b), z_0, z_1), device=self.device) - - padd_rnds = torch.randint(low=0, high=2, size=(len(b),2), dtype=torch.int32) #roll 50/50 if we allow padding - - xs=[] - ys=[] - for i,((x,y,z), padd_rnd) in enumerate(zip(b, padd_rnds)): - # for i,(x,y,z) in enumerate(b): - x = x[:z_0, :z_1] # cut down to max [bits, time] of batch - - #------------------- - space, time = z[0], z[1] - - if space < z_0 and padd_rnd[0]: space = torch.randint(low=space, high=z_0+1, size=(1,), dtype=torch.int32) - if time < z_1 and padd_rnd[1]: time = torch.randint(low=time , high=z_1+1, size=(1,), dtype=torch.int32) - - time = (torch.ceil(time / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - key_padding_mask[i, space:, :] = float('-inf') - key_padding_mask[i, :, time:] = float('-inf') - - #------------------- - - xs.append(x) - ys.append(y) - - key_padding_mask_list = [key_padding_mask] - for j in range(1, self.num_down_scales): - key_padding_mask_list.append(F.max_pool1d(key_padding_mask_list[j-1], kernel_size=2)) - - xs=torch.stack(xs) - ys=torch.stack(ys) - return xs, ys, key_padding_mask_list - - def flexPadAttn_TimeOnly_padding_collate_fn(self, b): - """this function is called for training for every batch""" - z_0 = max(x[2][0] for x in b) # space - z_1 = max(x[2][1] for x in b) # time - - #round time to next multiple of 8 for conv layers! - z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - #--------------- - # key_padding_mask ... [N, S] -inf where we want no attention - # we will create here [N, s, t] and then reshaping is easy - # note this is key pad mask not directly attention mask! we need this for loss masking - # Nb: add rnd to the padding, so we train with pad and on smaller systems - - #we need 3 different ones for the different unet layers - key_padding_mask = torch.zeros((len(b), z_0, z_1), device=self.device) - - padd_rnds = torch.randint(low=0, high=2, size=(len(b)), dtype=torch.int32) #roll 50/50 if we allow padding - - xs=[] - ys=[] - for i,((x,y,z), padd_rnd) in enumerate(zip(b, padd_rnds)): - # for i,(x,y,z) in enumerate(b): - x = x[:z_0, :z_1] # cut down to max [bits, time] of batch - - #------------------- - time = z[1] - - if time < z_1 and padd_rnd: time = torch.randint(low=time , high=z_1+1, size=(1,), dtype=torch.int32) - time = (torch.ceil(time / self.cut_multiple) * self.cut_multiple).to(torch.int32) - key_padding_mask[i, :, time:] = float('-inf') - - #------------------- - - xs.append(x) - ys.append(y) - - key_padding_mask_list = [key_padding_mask] - for j in range(1, self.num_down_scales): - key_padding_mask_list.append(F.max_pool1d(key_padding_mask_list[j-1], kernel_size=2)) - - xs=torch.stack(xs) - ys=torch.stack(ys) - return xs, ys, key_padding_mask_list - - #----------------------------------- - # BUCKET PADDING, all x,y are already passed as batch - - def cut_padding_Bucket_collate_fn(self, b): - """this function is called for training for every batch""" - - b = b[0] - - x = b[0] - y = b[1] - z = b[2] - - #--------------- - - z_0 = torch.max(z[:, 0]) # space - z_1 = torch.max(z[:, 1]) # time - - #round time to next multiple of cut_multiple for conv layers! - z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - #--------------- - - x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch - - return x, y - - def cut_padding_Bucket_collate_fn_compilation(self, b): - """this function is called for training for every batch""" - - b = b[0] - - x = b[0] - y = b[1] - U = b[2] - z = b[3] - - #--------------- - - z_0 = torch.max(z[:, 0]) # space - z_1 = torch.max(z[:, 1]) # time - - #round time to next multiple of cut_multiple for conv layers! - z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - #--------------- - - x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch - - bit_exp = 2**z_0 - U = U[:, :, :bit_exp, :bit_exp] # [b, Re/Im, 2^n, 2^n] - - return x, y, U - - def cut_padding_Bucket_collate_fn_compilation_params(self, b): - """this function is called for training for every batch, order in b is store dict""" - - b = b[0] # {'x': 'tensor', 'y': 'numpy', 'params': 'tensor', 'U': 'tensor', 'z': 'tensor'} - - x = b[0] - y = b[1] - p = b[2] - U = b[3] - z = b[4] - - #--------------- - - z_0 = torch.max(z[:, 0]) # space - z_1 = torch.max(z[:, 1]) # time - - #round time to next multiple of cut_multiple for conv layers! - z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - #--------------- - - x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch - - p = p[:, :, :z_1] - - bit_exp = 2**z_0 - U = U[:, :, :bit_exp, :bit_exp] # [b, Re/Im, 2^n, 2^n] - - return x, y, p, U - - #----------------------------------- - # MAX PADDING, x are passes as sampled list (batch), std collate them - - def cut_padding_collate_fn(self, b): - """this function is called for training for every batch""" - z_0 = max(x[2][0] for x in b) # space - z_1 = max(x[2][1] for x in b) # time - - #round time to next multiple of cut_multiple for conv layers! - z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - #--------------- - - x_sample = b[0][0] - xs = torch.zeros((len(b), z_0, z_1), dtype=x_sample.dtype, device=x_sample.device) - - # xs=[] - ys=[] - for i,(x,y,z) in enumerate(b): - #x = x[:z_0, :z_1] # cut down to max [bits, time] of batch - xs[i] = x[:z_0, :z_1] - - #xs.append(x) - ys.append(y) - - #xs=torch.stack(xs) - ys=torch.stack(ys) - - return xs, ys - - def cut_padding_collate_fn_compilation(self, b): - """this function is called for training for every batch""" - z_0 = max(x[3][0] for x in b) # space - z_1 = max(x[3][1] for x in b) # time - - #round time to next multiple of cut_multiple for conv layers! - z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - bit_exp = 2**z_0 - - #--------------- - - x_sample = b[0][0] - xs = torch.zeros((len(b), z_0, z_1), dtype=x_sample.dtype, device=x_sample.device) - - y_sample = b[0][1] - ys = torch.zeros((len(b), *y_sample.shape), dtype=y_sample.dtype, device=y_sample.device) - - U_sample = b[0][2] - Us = torch.zeros((len(b), 2, bit_exp, bit_exp), dtype=U_sample.dtype, device=U_sample.device) - - for i,(x,y,U,z) in enumerate(b): - xs[i] = x[:z_0, :z_1] - ys[i] = y - Us[i] = U[:, :bit_exp, :bit_exp] - - return xs, ys, Us - - def cut_padding_collate_fn_compilation_params(self, b): - """this function is called for training for every batch, order in b is store dict""" - # {'x': 'tensor', 'y': 'numpy', 'params': 'tensor', 'U': 'tensor', 'z': 'tensor'} - - z_0 = max(x[4][0] for x in b) # space - z_1 = max(x[4][1] for x in b) # time - - #round time to next multiple of cut_multiple for conv layers! - z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32) - - bit_exp = 2**z_0 - - #--------------- - - x_sample = b[0][0] - xs = torch.zeros((len(b), z_0, z_1), dtype=x_sample.dtype, device=x_sample.device) - - y_sample = b[0][1] - ys = torch.zeros((len(b), *y_sample.shape), dtype=y_sample.dtype, device=y_sample.device) - - p_sample = b[0][2] - ps = torch.zeros((len(b), p_sample.shape[-2], z_1), dtype=p_sample.dtype, device=p_sample.device) - - U_sample = b[0][3] - Us = torch.zeros((len(b), 2, bit_exp, bit_exp), dtype=U_sample.dtype, device=U_sample.device) - - for i,(x,y,p,U,z) in enumerate(b): - xs[i] = x[:z_0, :z_1] - ys[i] = y - ps[i] = p[:, :z_1] - Us[i] = U[:, :bit_exp, :bit_exp] - - return xs, ys, ps, Us - - #----------------------------------- - - def get_dataloaders(self, batch_size, text_encoder, p_valid=0.1, y_on_cpu=False): - self.text_encoder = text_encoder - - excepts = [] - if y_on_cpu: excepts.append("y") - if self.params_config.dataset_to_gpu: self.to("cuda", excepts=excepts) - - x_proc, y_proc, *z_proc = Qc_Config_Dataset.x_y_preprocess(self, balance_max=None, shuffle=False) # ... z_proc is `'z' and all other 'c' - - if self.bucket_batch_size <= 0: - y_proc = self.caching(y_proc, y_on_cpu=y_on_cpu) - - else: - y_proc = self.caching([yi.reshape((-1)) for yi in y_proc], y_on_cpu=y_on_cpu) - y_proc = y_proc.reshape((-1, self.bucket_batch_size)) - - x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc) #only possible after str y is cached as tensor - x, x_valid, y, y_valid, (z, z_valid) = self.valid_split(x_proc, y_proc, *z_proc, p_valid=p_valid) - - ds = TensorDataset(x, y, *z) - ds_valid = TensorDataset(x_valid, y_valid, *z_valid) - - collate_fn = getattr(self, self.collate_fn) - - if self.params_config.dataset_to_gpu: - train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, collate_fn=collate_fn) - valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, collate_fn=collate_fn) - - else: - train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12, collate_fn=collate_fn) - valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12, collate_fn=collate_fn) - - self.dataloaders = DataLoaders(train_loader, valid_loader) - return self.dataloaders - - #----------------------------------- - - @staticmethod - def from_datasets(datasets: list[Qc_Config_Dataset], balance_maxes: list, pad_constant, device: torch.device=torch.device("cpu"), bucket_batch_size=None, max_samples=None, **parameters): - assert pad_constant != 0, "can NOT be 0! and not any other gate!" - - xs = [] - ys = [] - zs = [] - cs = [] - - cut_multiple = Mixed_Cached_OpenClip_Dataset.cut_multiple - - max_qubits = max(dataset.params_config.num_of_qubits for dataset in datasets) - max_gates = max(dataset.params_config.max_gates for dataset in datasets) - max_gates = int(np.ceil(max_gates /cut_multiple) * cut_multiple) - - parameters["num_of_qubits"] = max_qubits - parameters["max_gates"] = max_gates - parameters["random_samples"] = sum([dataset.params_config.random_samples for dataset in datasets]) - parameters["min_gates"] = min([dataset.params_config.min_gates for dataset in datasets]) - parameters["comment"] = f"Generated with 'from_datasets' with {len(datasets)} datasets. Qubits: {[dataset.params_config.num_of_qubits for dataset in datasets]}." - parameters["pad_constant"] = pad_constant - parameters["bucket_batch_size"] = bucket_batch_size - - parameters["store_dict"] = {} - for dataset in datasets: - parameters["store_dict"] |= dataset.params_config.store_dict #needs python 3.9 for union of dict - parameters["store_dict"]["z"] = "tensor" #add special item - - if isinstance(max_samples, int): - max_samples = [max_samples] * len(datasets) - else: - assert isinstance(max_samples, (list, np.ndarray)) - max_samples = np.array(max_samples, dtype=int) - - if isinstance(balance_maxes, int): - balance_maxes = [balance_maxes] * len(datasets) - else: - assert isinstance(balance_maxes, (list, np.ndarray)) - balance_maxes = np.array(balance_maxes, dtype=int) - - for i, (dataset, balance_max) in tqdm(enumerate(zip(datasets,balance_maxes)), total=len(datasets)): - # do x_y_preprocess now, we can't balance all together with mixed conditions - - dataset = dataset.to(device) - - x, y, *c = dataset.x_y_preprocess(balance_max=balance_max, max_samples=max_samples[i], shuffle=True) - x = x.to(device) # [b, s, t] - - print(f" - dataset size after balancing {x.shape[0]}") - - #------- - # store original size - z = torch.zeros((x.shape[0], 2), device=device, dtype=torch.int32) - z[:, 0] = max(dataset.params_config.num_of_qubits, 1) - - red_x = torch.sum(x.abs(), dim=1) # [b, t] .. collaps the zeros to get circuit length - z[:, 1] = torch.count_nonzero(red_x, dim=1) # [b] - z[z[:, 1]==0, 1] = 1 # make sure we don*t have 0, so we cheat and set it to 1 (there's only 1 unique zero gate circuit anyways). Needed for padding attn mask - - for i in range(x.shape[0]): - x[i, z[i, 0]:, :] = pad_constant - x[i, :, z[i, 1]:] = pad_constant - - z[:, 1] = (torch.ceil(z[:, 1] / cut_multiple) * cut_multiple).to(torch.int32) #for cut needs multiple - - #------- - # now pad x, padding is defined from last dim forward! - pad = (0, max_gates-dataset.params_config.max_gates, 0, max_qubits-dataset.params_config.num_of_qubits) - x = F.pad(x, pad, "constant", pad_constant) - - # if c is missing something of the union we set it to a zero tensor - for k,v in parameters["store_dict"].items(): - if k != "x" and k != "y" and k != "z": - - if k not in dataset.params_config.store_dict: - empty_tensor = torch.zeros((1,), device=device) - - if k == "U": #scetchy hardcoded for compilation - empty_tensor = torch.zeros((x.shape[0], 2, 1, 1), device=device) # unitary is [b, Re/Im, 2^n, 2^n] - - assert len(c) == 0 - c.append(empty_tensor) #scetchy bcs if c is not empty we could break ordering!!! - - #combine datasets - xs.append(x.cpu()) - ys.append(y) - zs.append(z) - cs.append([*c]) - - dataset = dataset.to("cpu") #helps with gpu mem overflowing - #----------------- - - has_U = "U" in parameters["store_dict"] - has_p = "params" in parameters["store_dict"] - - if bucket_batch_size > 0: - collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn.__name__ - if has_U: - collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn_compilation.__name__ - if has_p: - collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn_compilation_params.__name__ - - else: - collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn.__name__ - if has_U: - collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn_compilation.__name__ - if has_p: - collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn_compilation_params.__name__ - - parameters["collate_fn"] = collate_fn_name - - #----------------- - if bucket_batch_size > 0: - for i, (xi,yi,zi, ci) in enumerate(zip(xs, ys, zs, cs)): #cut rest of batch - b_mult = int(np.floor(xi.shape[0] / bucket_batch_size) * bucket_batch_size) - - xs[i] = xi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *xi.shape[1:])) - zs[i] = zi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *zi.shape[1:])) - - t = parameters["store_dict"]["y"] - if v == "tensor" or v == "numpy": - ys[i] = yi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *yi.shape[1:])) - else: raise NotImplementedError("") - - #---- - #For U, etc - add_ind = 0 - for k,v in parameters["store_dict"].items(): - if k != "x" and k != "y" and k != "z": - if v == "tensor" or v == "numpy": - cs[i][add_ind] = ci[add_ind][None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *ci[add_ind].shape[1:])) - else: raise NotImplementedError("") - add_ind += 1 - - x = torch.cat(xs) - y = ys # torch.cat(ys) is wrong, y is list of numpy or str!! not a tensor - z = torch.cat(zs) - c = cs - - #----------------- - - mixed_Cached_OpenClip_Dataset = Mixed_Cached_OpenClip_Dataset(device, **parameters) - mixed_Cached_OpenClip_Dataset.x = x - mixed_Cached_OpenClip_Dataset.y = y - mixed_Cached_OpenClip_Dataset.z = z - - add_ind = 0 - for k,v in parameters["store_dict"].items(): - if k != "x" and k != "y" and k != "z": - - if v == "tensor" and k == "U": # hardcoded U padding !! - - n = sum([ci[add_ind].shape[0] for ci in c]) - if bucket_batch_size > 0: shape = (n, bucket_batch_size, 2, 2**max_qubits, 2**max_qubits) - else: shape = (n, 2, 2**max_qubits, 2**max_qubits) - - # allocating zeros is better memory wise than torch.cat(ci_s) and F.pad(ci, pad, "constant", 0) - mem = np.prod(shape) * c[0][add_ind].element_size() * 1e-9 - print(f"[INFO]: allocate memory for {k} {shape} on {c[0][add_ind].device} approx. {mem:.3f} GB") - ci_s = torch.zeros(shape, device=c[0][add_ind].device) - - run_i = 0 - for i,ci in enumerate(c): - ci = ci[add_ind] - if bucket_batch_size > 0: ci_s[run_i:run_i+ci.shape[0], :, :, :ci.shape[-2], :ci.shape[-1]] = ci - else: ci_s[run_i:run_i+ci.shape[0], :, :ci.shape[-2], :ci.shape[-1]] = ci - run_i += ci.shape[0] - - elif v == "tensor" and k == "params": # hardcoded paramter padding !! - - max_params = max(ci[add_ind].shape[-2] for ci in c) - - n = sum(ci[add_ind].shape[0] for ci in c) - if bucket_batch_size > 0: shape = (n, bucket_batch_size, max_params, max_gates) - else: shape = (n, max_params, max_gates) - - # allocating zeros is better memory wise than torch.cat(ci_s) and F.pad(ci, pad, "constant", 0) - mem = np.prod(shape) * c[0][add_ind].element_size() * 1e-9 - print(f"[INFO]: allocate memory for {k} {shape} on {c[0][add_ind].device} approx. {mem:.3f} GB") - ci_s = torch.zeros(shape, device=c[0][add_ind].device) - - run_i = 0 - for i,ci in enumerate(c): - ci = ci[add_ind] - if bucket_batch_size > 0: ci_s[run_i:run_i+ci.shape[0], :, :ci.shape[-2], :ci.shape[-1]] = ci - else: ci_s[run_i:run_i+ci.shape[0], :ci.shape[-2], :ci.shape[-1]] = ci - run_i += ci.shape[0] - - elif v == "numpy": raise NotImplementedError("") - else: raise NotImplementedError("") - - setattr(mixed_Cached_OpenClip_Dataset, str(k), ci_s) - add_ind += 1 - - return mixed_Cached_OpenClip_Dataset - - #------------------------------------ - - # def plot_example(self): print("plot_example not implemented for Mixed_Cached_OpenClip_Dataset") - # def plot_distribution(self): print("plot_distribution not implemented for Mixed_Cached_OpenClip_Dataset") - - @staticmethod - def from_config_file(config_path, device: torch.device, save_path: str=None): - config = load_config(config_path) - config["target"] = class_to_str(Mixed_Cached_OpenClip_Dataset) - return Config_Dataset.from_config(config, device, save_path) diff --git a/genQC/dataset/qc_dataset.py b/genQC/dataset/qc_dataset.py deleted file mode 100644 index 077004d..0000000 --- a/genQC/dataset/qc_dataset.py +++ /dev/null @@ -1,170 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/dataset/qc_dataset.ipynb. - -# %% auto 0 -__all__ = ['Qc_Config_Dataset_config', 'Qc_Config_Dataset'] - -# %% ../../src/dataset/qc_dataset.ipynb 3 -from ..imports import * -from .config_dataset import Config_Dataset, Config_Dataset_config -from ..config_loader import * -from .dataset_helper import * -from ..platform.qcircuit_dataset_construction import decode_circuit -from ..platform.simulation.qcircuit_sim import schmidt_rank_vector, instruction_name_to_qiskit_gate -import qiskit.quantum_info as qi - -# %% ../../src/dataset/qc_dataset.ipynb 4 -@dataclass -class Qc_Config_Dataset_config(Config_Dataset_config): - optimized: bool - dataset_to_gpu: bool - random_samples: int - num_of_qubits: int - min_gates: int - max_gates: int - gate_pool: list[str] - -# %% ../../src/dataset/qc_dataset.ipynb 5 -class Qc_Config_Dataset(Config_Dataset): - """Dataset for quantum circuits, access `gate_pool` directly and all other paras with `.params_config`""" - - req_params = [f.name for f in dataclasses.fields(Qc_Config_Dataset_config)] - add_balance_fn = None - - def __init__(self, device: torch.device=torch.device("cpu"), **parameters): - super().__init__(device, **parameters) - self.gate_pool = parameters["gate_pool"] #[get_obj_from_str(gate) for gate in parameters["gate_pool"]] - - @property - def params_config(self): - params_config = super().params_config - #params_config["gate_pool"] = [class_to_str(gate) for gate in params_config["gate_pool"]] - params_config = Qc_Config_Dataset_config(**params_config) - return params_config - - #---------------------------- - - def x_y_preprocess(self, balance_max=None, shuffle=False, max_samples=None): - #params_config = self.params_config - #if params_config.dataset_to_gpu: self.to("cuda") - - z_proc = [] - for k,v in self.store_dict.items(): - if k != "x" and k != "y": - z_proc.append(getattr(self, k)) - - x_proc, y_proc = self.x, self.y - - #--------------------- - if shuffle: - x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc) - - if exists(max_samples): - x_proc = x_proc[:max_samples] - y_proc = y_proc[:max_samples] - z_proc = (iz[:max_samples] for iz in z_proc) - - #--------------------- - t = self.store_dict["y"] - if exists(balance_max): - if t == "tensor" or t == "numpy": x_proc, y_proc, *z_proc = balance_tensor_dataset(x_proc, y_proc, *z_proc, make_unique=True, - samples=balance_max, add_balance_fn=self.add_balance_fn) - else: print(f"[WARNING]: Unsupported y type: `{t}`. Not balancing dataset!") - else: print(f"[INFO]: Not balancing dataset! {balance_max=}") - - #--------------------- - if shuffle: - x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc) - - return x_proc, y_proc, *z_proc - - def valid_split(self, x, y, *z, p_valid=0.1): - splits = max(int(x.shape[0] * p_valid), 1) - x, x_valid = x[splits:].clone(), x[:splits].clone() - - t = self.store_dict["y"] - if t == "tensor" : y, y_valid = y[splits:].clone(), y[:splits].clone() - elif t == "numpy": y, y_valid = y[splits:] , y[:splits] - - else: raise NotImplementedError("Not implemented") - - try: - z = list(iz[splits:].clone() for iz in z) - z_valid = list(iz[:splits].clone() for iz in z) - except: - z = list(iz[splits:] for iz in z) - z_valid = list(iz[:splits] for iz in z) - - return x, x_valid, y, y_valid, (z, z_valid) - - def get_dataloaders(self, batch_size, p_valid=0.1, balance_max=None, max_samples=None, y_on_cpu=False): - - excepts = [] - if y_on_cpu: excepts.append("y") - if self.params_config.dataset_to_gpu: self.to("cuda", excepts=excepts) - - x_proc, y_proc, *z_proc = self.x_y_preprocess(balance_max=balance_max, max_samples=max_samples) - x, x_valid, y, y_valid, (z, z_valid) = self.valid_split(x_proc, y_proc, *z_proc, p_valid=p_valid) - - ds = TensorDataset(x, y, *z) - ds_valid = TensorDataset(x_valid, y_valid, *z_valid) - - if self.params_config.dataset_to_gpu: - train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True) - valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True) - - else: - train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12) - valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12) - - self.dataloaders = DataLoaders(train_loader, valid_loader) - return self.dataloaders - - #---------------------------- - - def plot_example(self): - params_config = self.params_config - enc_tensor = self.x[0] - - while enc_tensor.dim()>2: enc_tensor=enc_tensor[0] - - params = None - if hasattr(self, "params"): params=self.params[0] - - if isinstance(self.gate_pool[0], str): - gate_pool = [instruction_name_to_qiskit_gate(gate) for gate in self.gate_pool] - else: - gate_pool = self.gate_pool - - qc = decode_circuit(enc_tensor, gate_pool, params_tensor=params) - - t = self.store_dict["y"] - if t == "tensor" : label = self.y[0].cpu().tolist() - elif t == "tensor_list": - print("Not implemented") - return - else : - label = self.y[0]#.tolist() - while len(label.shape)>0: label=label[0] - - print(f"Label: ``{label}`` SRV is: {schmidt_rank_vector(qi.DensityMatrix(qc))}") - display(qc.draw("mpl", plot_barriers=False)) - - def plot_distribution(self): - if hasattr(self, "dataloaders"): x, y, *z = self.dataloaders.train.dataset.tensors - else: x, y = self.x, self.y - - t = self.store_dict["y"] - if t == "tensor" : data={"svr":[iy for iy in y.cpu().tolist()]} - elif t == "numpy": data={"svr":[iy for iy in y.tolist()]} - else: # list tensor_list - print("Not implemented") - return - - print("Train dataset (x, y):", x.shape, y.shape) - print("Train uniques x :", torch.unique(x, dim=0).shape) - - #real data distribution - df = pd.DataFrame(data) - cnts = df['svr'].value_counts(normalize=True) - for n,v in zip(cnts.index, cnts.values): print(f"{n}: {v*100:.1f}%") - ax = df['svr'].value_counts().plot(kind='bar') diff --git a/genQC/imports.py b/genQC/imports.py index 4d01c4f..a5dc530 100644 --- a/genQC/imports.py +++ b/genQC/imports.py @@ -4,14 +4,19 @@ #------------------------------------ # Python -import math, itertools, functools, copy, asyncio, time, importlib, datetime, importlib, os, dataclasses, platform +import math, itertools, functools, copy, asyncio, time, importlib, datetime, importlib, \ + os, dataclasses, platform, sys, subprocess, pathlib, ast, weakref, enum, abc, \ + typing, random + from datetime import datetime from PIL import Image from dataclasses import dataclass, asdict, is_dataclass -from typing import Union, Optional, TypeVar, Callable, Any +from typing import Union, Optional, TypeVar, Callable, Any, List, Tuple, Iterable, Sequence + import numpy as np -import pandas as pd +np.set_printoptions(edgeitems=40, linewidth=200, formatter=dict(float=lambda x: "%.3g" % x)) + import scipy import matplotlib.pyplot as plt @@ -21,14 +26,12 @@ import torch import torch.nn as nn import torch.nn.functional as F -import torchvision -from torchvision import datasets from torch.utils.data import DataLoader, TensorDataset torch.set_printoptions(linewidth=200) #------------------------------------ -# runtime +# Runtime def in_colab(): "Check if the code is running in Google Colaboratory" @@ -52,16 +55,31 @@ def in_notebook(): IN_NOTEBOOK = in_notebook() -if IN_NOTEBOOK: from tqdm.notebook import trange, tqdm -else: from tqdm import trange, tqdm +if IN_NOTEBOOK: + from tqdm.notebook import trange, tqdm +else: + from tqdm import trange, tqdm #------------------------------------ -# python commons +# Python commons + +from inspect import isfunction, ismethod -def exists(val): return val is not None -def default(val, d): - if exists(val): return val - return d() if isfunction(d) else d +def exists(val): + return val is not None + +def not_exists(val): + return val is None + +def default(val, default_value): + if exists(val): + return val + return default_value() if isfunction(default_value) else default_value #------------------------------------ -# .... +# Fail-safe + +if not IN_NOTEBOOK: + def display(*args, **kwargs): + pass + diff --git a/genQC/inference/eval_metrics.py b/genQC/inference/eval_metrics.py new file mode 100644 index 0000000..8d2d001 --- /dev/null +++ b/genQC/inference/eval_metrics.py @@ -0,0 +1,59 @@ +"""Different metrics used for evaluation.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/inference/eval_metrics.ipynb. + +# %% auto 0 +__all__ = ['BaseNorm', 'UnitaryFrobeniusNorm', 'UnitaryInfidelityNorm'] + +# %% ../../src/inference/eval_metrics.ipynb 2 +from ..imports import * +from scipy.stats import unitary_group + +# %% ../../src/inference/eval_metrics.ipynb 4 +class BaseNorm(abc.ABC): + """Base class for norms.""" + + @staticmethod + @abc.abstractmethod + def distance(approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: raise NotImplementedError() + + @staticmethod + @abc.abstractmethod + def name() -> str: raise NotImplementedError() + +# %% ../../src/inference/eval_metrics.ipynb 6 +class UnitaryFrobeniusNorm(BaseNorm): + """ + The Frobenius-Norm for unitaries: defined in https://arxiv.org/pdf/2106.05649.pdf. + """ + + def __call__(self, approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: + return Unitary_FrobeniusNorm.distance(approx_U, target_U) + + @staticmethod + def distance(approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: + d = 0.5 * torch.linalg.matrix_norm((approx_U-target_U), ord="fro")**2 + return d + + @staticmethod + def name() -> str: return "Frobenius-Norm" + +# %% ../../src/inference/eval_metrics.ipynb 7 +class UnitaryInfidelityNorm(BaseNorm): + """ + The Infidelity-Norm for unitaries: defined in https://link.aps.org/accepted/10.1103/PhysRevA.95.042318, TABLE I: 1. + """ + + def __call__(self, approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: + return Unitary_infidelity.distance(approx_U, target_U) + + @staticmethod + def distance(approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: + """Supports batched intputs, can be used as loss. Input shapes [b, n, n] or [n, n].""" + d = torch.matmul(torch.transpose(target_U, -2, -1).conj(), approx_U) # out [b, n, n] or [n, n] + d = torch.diagonal(d, offset=0, dim1=-2, dim2=-1).sum(-1) # do partial (batched) trace, out [b, n] or [n] + d = 1.0 - (d / target_U.shape[-1]).abs().square() + return d + + @staticmethod + def name() -> str: return "Unitary-Infidelity" diff --git a/genQC/inference/evaluation_helper.py b/genQC/inference/evaluation_helper.py new file mode 100644 index 0000000..6c6d6b5 --- /dev/null +++ b/genQC/inference/evaluation_helper.py @@ -0,0 +1,27 @@ +"""Handy helper functions for model evaluations.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/inference/evaluation_helper.ipynb. + +# %% auto 0 +__all__ = ['get_srvs', 'get_unitaries'] + +# %% ../../src/inference/evaluation_helper.ipynb 2 +from ..imports import * +from ..utils.async_fn import run_parallel_jobs +from ..platform.simulation import Simulator + +# %% ../../src/inference/evaluation_helper.ipynb 4 +def get_srvs(simulator: Simulator, backend_obj_list: Sequence, n_jobs: int = 1, **kwargs): + """Returns SRVs of a given list of backen objects `backend_obj_list`.""" + def _f(backend_obj): + return simulator.backend.schmidt_rank_vector(backend_obj, **kwargs) + + return run_parallel_jobs(_f, backend_obj_list, n_jobs) + +# %% ../../src/inference/evaluation_helper.ipynb 6 +def get_unitaries(simulator: Simulator, backend_obj_list: Sequence, n_jobs: int = 1, **kwargs): + """Returns unitaries of a given list of backen objects `backend_obj_list`.""" + def _f(backend_obj): + return simulator.backend.get_unitary(backend_obj, **kwargs) + + return run_parallel_jobs(_f, backend_obj_list, n_jobs) diff --git a/genQC/inference/infer_compilation.py b/genQC/inference/infer_compilation.py deleted file mode 100644 index 23fae46..0000000 --- a/genQC/inference/infer_compilation.py +++ /dev/null @@ -1,369 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/inference/infer_compilation.ipynb. - -# %% auto 0 -__all__ = ['split_U_to_tensor', 'get_new_unitary_indices', 'get_new_unitary_indices_batch', 'generate_comp_tensors', - 'check_correct_gates', 'check_correct_unitary_exact', 'check_correct_unitary_distance', 'get_gate_and_U_acc', - 'test_comp_acc', 'test_comp_acc_on_testset', 'test_comp_acc_on_rnd_samples', 'plot_hist_overview'] - -# %% ../../src/inference/infer_compilation.ipynb 2 -from ..imports import * -from ..util import * -from .infer_misc import * -from .infer_gate_hist import get_tensor_gate_length -import genQC.platform.qcircuit_dataset_construction as data_con -from ..dataset.dataset_helper import check_duplicates_in_dataset, uniquify_tensor_dataset, shuffle_tensor_dataset -from ..platform.simulation.qcircuit_sim import instruction_name_to_qiskit_gate - -from joblib import Parallel, delayed -import qiskit.quantum_info as qi - -# %% ../../src/inference/infer_compilation.ipynb 4 -def split_U_to_tensor(U: np.ndarray): - U_r, U_i = torch.Tensor(np.real(U)), torch.Tensor(np.imag(U)) - U = torch.stack([U_r, U_i], dim=0) - return U - -# %% ../../src/inference/infer_compilation.ipynb 5 -def get_new_unitary_indices(Us, dataset, silent=False): - if type(Us) == list: - Us = torch.stack([split_U_to_tensor(U) for U in Us]) #numpy to torch - - if not silent: - print(f"- Checking {Us.shape[0]} unitaries for duplicates in dataset, {torch.unique(Us, dim=0).shape[0]} given unitaries are unique.") - - # need to check uniques only - Us_dataset = torch.unique(dataset.U, dim=0) - - # to vecs - Us = torch.reshape(Us , [Us.shape[0] , -1]).to(Us_dataset.device) - Us_dataset = torch.reshape(Us_dataset, [Us_dataset.shape[0], -1]) - - #--------------- - - #check - comp = ( Us_dataset.unsqueeze(dim=0) == Us.unsqueeze(dim=1) ) # gives [num of Us, num of dataset, ch] - comp = torch.all(comp, dim=-1) # gives [num of Us, num of dataset] - - #reduce - comp = torch.all(comp==False, dim=1) # gives indices that ARE NOT in datset - # comp = torch.any(comp, dim=1) # gives indices that ARE in datset - - #get indices - comp = comp.nonzero().squeeze(dim=1) - - if not silent: - print(f"- Checked {Us.shape[0]} given unitaries with dataset. Returned indices of {comp.shape[0]} not in dataset unitaries.") - - return comp.cpu() - -# %% ../../src/inference/infer_compilation.ipynb 6 -def get_new_unitary_indices_batch(Us, dataset, auto_batch_size=32, silent=False, n_jobs=1): - if type(Us) == list: - Us = torch.stack([split_U_to_tensor(U) for U in Us]) #numpy to torch - - if not silent: - print(f"- Checking {Us.shape[0]} unitaries for duplicates in dataset, {torch.unique(Us, dim=0).shape[0]} given unitaries are unique.") - - #---------------------------------------- - samples = Us.shape[0] - num_batches = int(np.ceil(samples/auto_batch_size)) - - Us_chunks = Us.chunk(num_batches) - - indices = [] - - if n_jobs > 1: - f = lambda Us_chunk: get_new_unitary_indices(Us_chunk, dataset, silent=True) - indices = Parallel(n_jobs=n_jobs)(delayed(f)(Us_chunk) for Us_chunk in Us_chunks) - - else: - for Us_chunk in Us_chunks: - comp = get_new_unitary_indices(Us_chunk, dataset, silent=True) - indices.append(comp) - - indices = torch.cat(indices) - - if not silent: - print(f"- Checked {samples} given unitaries with dataset. Returned indices of {indices.shape[0]} not in dataset unitaries.") - - return indices - -# %% ../../src/inference/infer_compilation.ipynb 8 -def generate_comp_tensors(pipeline, prompt, U, samples, system_size, num_of_qubits, max_gates, g, no_bar=True, unique=False, auto_batch_size=512): - #---------------------- - #prepare condtions - - prompt = str(prompt) - c = pipeline.text_encoder.tokenize_and_push_to_device(prompt) - - U = U.unsqueeze(0).to(pipeline.device) - if system_size > num_of_qubits: - n = 2**system_size - pad = (0, n-U.shape[-1], 0, n-U.shape[-2]) - U = F.pad(U, pad, "constant", 0) - - #---------------------- - #sample and post process to tensor encodings - - batch_samples = [auto_batch_size] * int(np.floor(samples/auto_batch_size)) - if samples % auto_batch_size > 0: batch_samples.append(samples % auto_batch_size) - if len(batch_samples) == 0: batch_samples.append(samples) - - out_tensor_list = [] - for batch_sample in batch_samples: - - c_batch = c.repeat(batch_sample, *[1]*(c.dim()-1)) - U_batch = U.repeat(batch_sample, *[1]*(U.dim()-1)) - - latents = torch.randn((c_batch.shape[0], pipeline.model.clr_dim, system_size, max_gates)) - out_tensor = pipeline(latents=latents, c=c_batch, U=U_batch, g=g, no_bar=no_bar) - out_tensor_list.append(out_tensor) - - out_tensor = torch.cat(out_tensor_list) - # out_tensor = pipeline(latents=latents, c=c, U=U, g=g, no_bar=no_bar) - - out_tensor = pipeline.model.invert_clr(out_tensor) - out_tensor = out_tensor[:, :num_of_qubits] - - if unique: out_tensor = torch.unique(out_tensor, dim=0) - - if not no_bar: print(f"[INFO]: (generate_comp_tensors) Generated {'unique_cnt ' if unique else ''}{out_tensor.shape[0]} tensors") - - return out_tensor - -# %% ../../src/inference/infer_compilation.ipynb 10 -def check_correct_gates(qc, num_of_qubits, gate_pool, max_gates, allowed_gate_clrs): - tensor = data_con.encode_circuit(qc, num_of_qubits, data_con.gate_pool_to_gate_classes(gate_pool), max_gates) - gen_gate_clrs = torch.unique(tensor.abs()).tolist() - gate_corr = set(gen_gate_clrs).issubset(set(allowed_gate_clrs)) # are gates correct? - return gate_corr - -# %% ../../src/inference/infer_compilation.ipynb 11 -def check_correct_unitary_exact(qc, U): - is_U = qi.Operator(qc).to_matrix() - is_U = split_U_to_tensor(is_U) - - u_corr = torch.allclose(is_U, U) # is U correct? - return u_corr - -# %% ../../src/inference/infer_compilation.ipynb 12 -def check_correct_unitary_distance(qc, target_U, norms): - is_U = qi.Operator(qc).to_matrix() - is_U = torch.complex(torch.Tensor(np.real(is_U)), torch.Tensor(np.imag(is_U))) - - target_U = torch.complex(target_U[0], target_U[1]) - - d = [] - for norm in norms: - u_dist = norm.distance(is_U, target_U).item() - d.append(u_dist) - - return d - -# %% ../../src/inference/infer_compilation.ipynb 13 -def get_gate_and_U_acc(out_tensor, allowed_gate_clrs, U, gate_pool, num_of_qubits, max_gates, norms=[], no_bar=True): - - if isinstance(gate_pool[0], str): - gate_pool = [instruction_name_to_qiskit_gate(gate) for gate in gate_pool] - - #------------------------- - #decode - qc_list, error_cnt = convert_tensors_to_circuits(out_tensor, gate_pool) - if not no_bar: print(f"Error circuits: {error_cnt}") - - #------------------------- - acc = [] # combinded acc - gate_acc = [] # only gates acc - u_acc = [] # only U acc - u_norms = [] # list of tuple(norms) for every qc - - comb_corr_qc = [] - gate_corr_qc = [] - u_corr_qc = [] - - #only check circuits that are non-error! - for qc in qc_list: - - #--------------- - # check if in out_tensor only color that correspond to the condtion gate_pool - gate_corr = check_correct_gates(qc, num_of_qubits, gate_pool, max_gates, allowed_gate_clrs) - - #--------------- - # check unitary - u_corr = check_correct_unitary_exact(qc, U) # true or false - u_norm = check_correct_unitary_distance(qc, U, norms) # metrics values list - - #--------------- - acc.append(gate_corr and u_corr) - gate_acc.append(gate_corr) - u_acc.append(u_corr) - u_norms.append(u_norm) - - if gate_corr and u_corr: comb_corr_qc.append(qc) - if gate_corr: gate_corr_qc.append(qc) - if u_corr: u_corr_qc.append(qc) - - #average accuracy over sample - acc = np.mean(acc).item() - gate_acc = np.mean(gate_acc).item() - u_acc = np.mean(u_acc).item() - - return acc, gate_acc, u_acc, np.array(u_norms), error_cnt, comb_corr_qc, gate_corr_qc, u_corr_qc, qc_list - -# %% ../../src/inference/infer_compilation.ipynb 15 -def test_comp_acc(pipeline, samples, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices: callable, Us, ys, train_dataset=None, norms=[]): - - if exists(train_dataset): - not_dups_ind = get_new_unitary_indices_batch(Us, train_dataset) - Us = [Us[i] for i in not_dups_ind] - ys = [ys[i] for i in not_dups_ind] - - #-------------------- - acc_s = [] - gate_acc_s = [] - u_acc_s = [] - u_norms_s = [] - uniques_cnt_s = [] - error_cnt_s = [] - - num_found_distinct_circuits_s = [] - - for U,y in tqdm(zip(Us,ys), total=len(Us)): - - allowed_gate_clrs = str_cond_to_gate_indices(y) - if isinstance(U, np.ndarray): - U = split_U_to_tensor(U) - - out_tensor = generate_comp_tensors(pipeline, y, U, samples, system_size, num_of_qubits, max_gates, g, unique=False) - outs = get_gate_and_U_acc(out_tensor, allowed_gate_clrs, U, gate_pool, num_of_qubits, max_gates, norms) - - acc, gate_acc, u_acc, u_norms, error_cnt, comb_corr_qc, gate_corr_qc, u_corr_qc, qc_list = outs - - if len(qc_list) > 0: - uniques_cnt = torch.stack([data_con.encode_circuit(qc, num_of_qubits, data_con.gate_pool_to_gate_classes(gate_pool), max_gates) for qc in qc_list]).unique(dim=0).shape[0] #how many uniques in sample (not counting erroro circuits) - # uniques_cnt = out_tensor.shape[0] - error_cnt #was with unique acc definition - else: - uniques_cnt = 0 - - if len(comb_corr_qc) > 0: - num_found_distinct_circuits = torch.stack([data_con.encode_circuit(qc, num_of_qubits, data_con.gate_pool_to_gate_classes(gate_pool), max_gates) for qc in comb_corr_qc]).unique(dim=0).shape[0] #how many distinct exact solutions we have - else: - num_found_distinct_circuits = 0 - - #-------------------- - acc_s.append(acc) - gate_acc_s.append(gate_acc) - u_acc_s.append(u_acc) - u_norms_s.append(u_norms) - uniques_cnt_s.append(uniques_cnt) - error_cnt_s.append(error_cnt) - num_found_distinct_circuits_s.append(num_found_distinct_circuits) - - solved_tasks = np.count_nonzero(num_found_distinct_circuits_s) - print(f"Solved {solved_tasks} correctly (at least one qc) that is {100*solved_tasks/len(num_found_distinct_circuits_s):0.2f}%") - - return acc_s, gate_acc_s, u_acc_s, u_norms_s, uniques_cnt_s, error_cnt_s, num_found_distinct_circuits_s - -# %% ../../src/inference/infer_compilation.ipynb 16 -def test_comp_acc_on_testset(pipeline, samples, num_of_U, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices: callable, - prompt_mod: callable, test_dataset, train_dataset=None, norms=[], fix_y=None): - '''returns: acc_s, gate_acc_s, u_acc_s, uniques_cnt_s, error_cnt_s, num_found_circuits_s, task_qc_len_s''' - - if hasattr(test_dataset, "z"): # mixed dataset has padding but a z record! - Us, ys, zs = uniquify_tensor_dataset(test_dataset.U, test_dataset.y, test_dataset.z) - Us, ys, zs = shuffle_tensor_dataset(Us, ys, zs) - Us, ys, zs = Us[:num_of_U], ys[:num_of_U], zs[:num_of_U] - task_qc_len_s = zs[:, 1] - - else: # not mixed dataset has no padding - Us, ys, xs = uniquify_tensor_dataset(test_dataset.U, test_dataset.y, test_dataset.x) - Us, ys, xs = shuffle_tensor_dataset(Us, ys, xs) - Us, ys, xs = Us[:num_of_U], ys[:num_of_U], xs[:num_of_U] - task_qc_len_s = get_tensor_gate_length(xs) - - if exists(fix_y): ys = [fix_y for y in ys] - else: ys = [prompt_mod(y) for y in ys] - - - print(f"Picked {Us.shape[0]} unitaries from test set") - print(f"Sample task: {ys[0]}") - print(Us[0]) - print(xs[0]) - - out = test_comp_acc(pipeline, samples, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices, Us.cpu(), ys, train_dataset, norms) - return *out, task_qc_len_s - -# %% ../../src/inference/infer_compilation.ipynb 17 -def test_comp_acc_on_rnd_samples(pipeline, samples, num_of_U, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices: callable, - prompt_mod: callable, # takes a single prompt and returns it modified - rnd_min_gates, rnd_max_gates, - train_dataset=None, norms=[], fix_y=None): - '''returns: acc_s, gate_acc_s, u_acc_s, uniques_cnt_s, error_cnt_s, num_found_circuits_s, task_qc_len_s''' - - enc_t, ys, Us = data_con.gen_compilation_rndGates_dataset(num_of_U, num_of_qubits, rnd_min_gates, rnd_max_gates, gate_pool) - task_qc_len_s = get_tensor_gate_length(enc_t) #should give a complexity meassure, longer circuits have a more complex unitary to compile? - - if exists(fix_y): ys = [fix_y for y in ys] - else: ys = [prompt_mod(y) for y in ys] - - print(f"Sample task: {ys[0]}") - print(split_U_to_tensor(Us[0])) - print(enc_t[0]) - - out = test_comp_acc(pipeline, samples, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices, Us, ys, train_dataset, norms) - return *out, task_qc_len_s - -# %% ../../src/inference/infer_compilation.ipynb 19 -def plot_hist_overview(out_tuple, num_of_samples_per_U, rnd_min_gates, rnd_max_gates, max_gates, num_of_qubits): - acc_s, gate_acc_s, u_acc_s, u_norms_s, uniques_cnt_s, error_cnt_s, num_found_circuits_s, task_qc_len_s = out_tuple - - if not exists(rnd_min_gates): rnd_min_gates = "" - if not exists(rnd_max_gates): rnd_max_gates = "" - - fig, axs = plt.subplots(2, 3, figsize=(13, 6.4), squeeze=False, constrained_layout=True) - fig.suptitle(f"Histogram of compilation accuracies (Unitary cnt={len(acc_s)}, samples_per_U={num_of_samples_per_U} {rnd_min_gates=} {rnd_max_gates=} {max_gates=} qubits={num_of_qubits})") - - n = 20 - density = False - bins = np.linspace(0,1, n+1) - - #----------------- - plt.sca(axs[0, 0]) - plt.title("Combined accuracy") - plt.xlabel(r"Accuracy") - plt.ylabel(r"Bin population" if density==False else "Accuracy distribution") - plt.hist(acc_s, density=density, bins=n*4) - - #----------------- - plt.sca(axs[0, 1]) - plt.title("Unitary accuracy") - plt.xlabel(r"Accuracy") - plt.hist(u_acc_s, density=density, bins=bins) - - #----------------- - plt.sca(axs[0, 2]) - plt.title("Gate accuracy") - plt.xlabel(r"Accuracy") - plt.hist(gate_acc_s, density=density, bins=bins) - - #----------------- - plt.sca(axs[1, 0]) - plt.title("Generated unique circuits") - plt.ylabel(r"Bin population" if density==False else "Number distribution") - plt.xlabel(r"Number of unique circuits") - plt.hist(uniques_cnt_s, density=density, bins=n) - - #----------------- - plt.sca(axs[1, 1]) - plt.title("Generated error circuits") - plt.xlabel(r"Number of error circuits") - plt.hist(error_cnt_s, density=density, bins=n) - - #----------------- - plt.sca(axs[1, 2]) - plt.title("Absolute number of distinct correct circuits") - plt.xlabel(r"Number of found circuits") - plt.hist(num_found_circuits_s, density=density, bins=n*4) - - #----------------- - plt.show() diff --git a/genQC/inference/infer_gate_hist.py b/genQC/inference/infer_gate_hist.py deleted file mode 100644 index 8c6b06b..0000000 --- a/genQC/inference/infer_gate_hist.py +++ /dev/null @@ -1,24 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/inference/infer_gate_hist.ipynb. - -# %% auto 0 -__all__ = ['get_tensor_gate_length', 'get_circuit_gate_length'] - -# %% ../../src/inference/infer_gate_hist.ipynb 2 -from ..imports import * - -# %% ../../src/inference/infer_gate_hist.ipynb 4 -def get_tensor_gate_length(clr_tensor, padding_token=0): - '''Careful with padding tokens!''' - assert clr_tensor.dim() == 3 #[b, s, t] - - collabsed_clr_tensor = (clr_tensor != padding_token).to(torch.int8) - red_clr_tensor = torch.sum(collabsed_clr_tensor, dim=1) # [b, t] - return torch.count_nonzero(red_clr_tensor, dim=1) # [b] - -# %% ../../src/inference/infer_gate_hist.ipynb 5 -def get_circuit_gate_length(qcs): - lengths = torch.zeros(len(qcs), dtype=int) - for i,qc in enumerate(qcs): - if hasattr(qc, "data"): - lengths[i] = len(qc.data) - return lengths diff --git a/genQC/inference/infer_misc.py b/genQC/inference/infer_misc.py deleted file mode 100644 index d6caf85..0000000 --- a/genQC/inference/infer_misc.py +++ /dev/null @@ -1,48 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/inference/infer_misc.ipynb. - -# %% auto 0 -__all__ = ['get_rnd_gatepool_subset', 'convert_tensors_to_circuits'] - -# %% ../../src/inference/infer_misc.ipynb 2 -from ..imports import * -from ..platform.qcircuit_dataset_construction import decode_circuit, gate_pool_to_gate_classes -from ..platform.simulation.qcircuit_sim import instruction_name_to_qiskit_gate - -# %% ../../src/inference/infer_misc.ipynb 4 -def get_rnd_gatepool_subset(gate_pool, min_sub_gate_pool_cnt=2): - rng = np.random.default_rng() - - n = len(gate_pool) + 1 - c_range = np.arange(n-1) - - sub_gate_pool_cnt = rng.integers(min_sub_gate_pool_cnt, n) - sub_gate_pool_ind = rng.choice(c_range, size=sub_gate_pool_cnt, replace=False) - sub_gate_pool = [gate_pool[ind] for ind in sub_gate_pool_ind] # pick random subeset of gates - - return sub_gate_pool - -# %% ../../src/inference/infer_misc.ipynb 6 -def convert_tensors_to_circuits(out_tensor, gate_pool, params_tensor=None, place_barrier=False): - if isinstance(gate_pool[0], str): - gate_pool = [instruction_name_to_qiskit_gate(gate) for gate in gate_pool] - - error_cnt = 0 - qc_list = [] - - if not exists(params_tensor): - params_tensor = [None]*out_tensor.shape[0] - - #TODO: para this loop - - for i,(enc_tensor,p) in enumerate(zip(out_tensor, params_tensor)): - try: - qc = decode_circuit(enc_tensor=enc_tensor, gate_pool=gate_pool, place_barrier=place_barrier, params_tensor=p) - - except Exception as e: - error_cnt += 1 - # print(e) - continue - - qc_list.append(qc) - - return qc_list, error_cnt diff --git a/genQC/inference/infer_srv.py b/genQC/inference/infer_srv.py deleted file mode 100644 index 5d9cc0f..0000000 --- a/genQC/inference/infer_srv.py +++ /dev/null @@ -1,512 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/inference/infer_srv.ipynb. - -# %% auto 0 -__all__ = ['get_all_srvs', 'generate_srv_tensors', 'convert_tensors_to_srvs', 'get_srv_accuracy', 'true_sample_bin_dist', - 'test_srv_clr_distribution_bin_samples', 'test_srv_clr_distribution', 'test_guidance_dep', - 'test_srv_acc_vs_length', 'test_srv_acc_vs_maxLength', 'test_srv_length_distribution', - 'plot_srv_clr_distribution_hist', 'plot_srv_clr_distribution_bin_accuracy', 'plot_guidance_dep', - 'plot_srv_acc_vs_length', 'plot_srv_acc_vs_maxLength'] - -# %% ../../src/inference/infer_srv.ipynb 2 -from ..imports import * -from .infer_misc import * -from .infer_gate_hist import get_circuit_gate_length -from ..platform.qcircuit_util import get_entanglement_bins -from ..platform.simulation.qcircuit_sim import schmidt_rank_vector -from .infer_compilation import generate_comp_tensors - -from joblib import Parallel, delayed -import qiskit.quantum_info as qi - -# %% ../../src/inference/infer_srv.ipynb 4 -def get_all_srvs(num_of_qubits): - srvs = [x for x in itertools.product(*([[1,2]]*num_of_qubits))] - srvs = np.array(srvs, dtype=int)[np.sum(srvs, axis=1)!=num_of_qubits+1].tolist() - srvs = sorted(srvs, key=lambda x: sum(x)) - return srvs - -# %% ../../src/inference/infer_srv.ipynb 5 -def generate_srv_tensors(pipeline, prompt, samples, system_size, num_of_qubits, max_gates, g, no_bar=True, unique=False, auto_batch_size=512): - if samples==0: - out_tensor = torch.zeros((0, system_size, max_gates)) - return out_tensor - - #---------------------- - #prepare condtions - - prompt = str(prompt) - c = pipeline.text_encoder.tokenize_and_push_to_device(prompt) - - #---------------------- - #sample and post process to tensor encodings - - batch_samples = [auto_batch_size] * int(np.floor(samples/auto_batch_size)) - if samples % auto_batch_size > 0: batch_samples.append(samples % auto_batch_size) - if len(batch_samples) == 0: batch_samples.append(samples) - - out_tensor_list = [] - for batch_sample in batch_samples: - - c_batch = c.repeat(batch_sample, *[1]*(c.dim()-1)) - - latents = torch.randn((c_batch.shape[0], pipeline.model.clr_dim, system_size, max_gates)) - out_tensor = pipeline(latents=latents, c=c_batch, g=g, no_bar=no_bar, enable_guidance=True) - out_tensor_list.append(out_tensor) - - out_tensor = torch.cat(out_tensor_list) - out_tensor = pipeline.model.invert_clr(out_tensor) - out_tensor = out_tensor[:, :num_of_qubits] - - if unique: out_tensor = torch.unique(out_tensor, dim=0) - - if not no_bar: print(f"[INFO]: (generate_srv_tensors) Generated {'unique_cnt ' if unique else ''}{out_tensor.shape[0]} tensors") - - return out_tensor - -# %% ../../src/inference/infer_srv.ipynb 7 -def convert_tensors_to_srvs(out_tensor, gate_pool, sort_srv=False, place_barrier=False, n_jobs=1): - qc_list, error_cnt = convert_tensors_to_circuits(out_tensor, gate_pool=gate_pool, place_barrier=place_barrier) - - srv_list = [] - - #--------------------------------------------- - # This is a bottle-neck for more qubits, speed up with async - - if n_jobs > 1: - assert sort_srv == False - - f = lambda qc: schmidt_rank_vector(qi.DensityMatrix(qc)) - # srv_list = Parallel(n_jobs=n_jobs, prefer="threads")(delayed(f)(qc) for qc in qc_list) #prefer="threads" - srv_list = Parallel(n_jobs=n_jobs)(delayed(f)(qc) for qc in qc_list) - - else: - for qc in qc_list: - srv = schmidt_rank_vector(qi.DensityMatrix(qc)) - - if sort_srv: srv = sorted(srv) - srv_list.append(srv) - - return qc_list, error_cnt, srv_list - -# %% ../../src/inference/infer_srv.ipynb 9 -def get_srv_accuracy(srv_list, target_srv): - if not isinstance(srv_list , (torch.Tensor, torch.IntTensor, torch.FloatTensor, torch.LongTensor)): srv_list = torch.tensor(srv_list) - if not isinstance(target_srv, (torch.Tensor, torch.IntTensor, torch.FloatTensor, torch.LongTensor)): target_srv = torch.tensor(target_srv, device=srv_list.device) - - srv_uniques, srv_uniques_cnt = torch.unique(srv_list, dim=0, return_counts=True) - - if srv_uniques.numel() == 0: return 0 - - comp = torch.all(target_srv==srv_uniques, dim=1) - index = comp.nonzero().squeeze() - - if index.dim() == 0: correct_srv_percentage = srv_uniques_cnt[index]/srv_uniques_cnt.sum() - else: correct_srv_percentage = 0 - - return correct_srv_percentage - -# %% ../../src/inference/infer_srv.ipynb 11 -def true_sample_bin_dist(samples_per_bin, bin_size): - true_samples = [max(samples_per_bin//bin_size, 1) for i in range(bin_size)] - - if samples_per_bin-sum(true_samples) > 0: - true_samples[0] += (samples_per_bin-sum(true_samples)) - - # assert sum(true_samples)==samples_per_bin - # assert len(true_samples)==bin_size - - # print(f"{true_samples=}") - - return true_samples - -# %% ../../src/inference/infer_srv.ipynb 12 -def test_srv_clr_distribution_bin_samples(pipeline, samples_per_bin, system_size, num_of_qubits, max_gates, g, gate_pool, silent=False, device="cpu", U=None, prompt_mod: callable=lambda c: c, - only_diag=False, n_jobs=1): - dist_srvs = get_all_srvs(num_of_qubits) - cond_srvs = dist_srvs - - values = torch.zeros((len(cond_srvs), len(dist_srvs)), device=device) - - #--------------------- - - ent_bins, ent_labels = get_entanglement_bins(num_of_qubits) - - i = 0 - - for ent_bin in tqdm(ent_bins, total=len(ent_bins)): - - true_samples = true_sample_bin_dist(samples_per_bin, len(ent_bin)) - - for ind,srv in tqdm(enumerate(ent_bin), total=len(ent_bin)): - if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False) - else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False) - - qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs) - - if only_diag: - values[i, i] = get_srv_accuracy(svr_list, srv) - else: - for j, dist_srv in enumerate(dist_srvs): - values[i, j] = get_srv_accuracy(svr_list, dist_srv) - - if not silent: - print(f"{cond_srv}: unique_cnt {out_tensor.unique(dim=0).shape[0]} error_cnt {error_cnt} acc {values[i, i]:.2f}") - - i += 1 - - return values - -# %% ../../src/inference/infer_srv.ipynb 13 -def test_srv_clr_distribution(pipeline, samples_per_srv, system_size, num_of_qubits, max_gates, g, gate_pool, silent=False, device="cpu", U=None, prompt_mod: callable=lambda c: c, - dist_srvs=None, cond_srvs=None, only_diag=False, n_jobs=1): - if not exists(dist_srvs): - dist_srvs = get_all_srvs(num_of_qubits) - - if not exists(cond_srvs): - cond_srvs = dist_srvs - - values = torch.zeros((len(cond_srvs), len(dist_srvs)), device=device) - - #--------------------- - - for i, cond_srv in tqdm(enumerate(cond_srvs), total=len(cond_srvs)): - - if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(cond_srv), U, samples_per_srv, system_size, num_of_qubits, max_gates, g=g, unique=False) - else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(cond_srv), samples_per_srv, system_size, num_of_qubits, max_gates, g=g, unique=False) - - qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs) - - if only_diag: - values[i, i] = get_srv_accuracy(svr_list, srv) - else: - for j, dist_srv in enumerate(dist_srvs): - values[i, j] = get_srv_accuracy(svr_list, dist_srv) - - if not silent: - print(f"{cond_srv}: unique_cnt {out_tensor.unique(dim=0).shape[0]} error_cnt {error_cnt} acc {values[i, i]:.2f}") - - return values - -# %% ../../src/inference/infer_srv.ipynb 14 -def test_guidance_dep(pipeline, srvs, samples, system_size, num_of_qubits, max_gates, gs, gate_pool, prompt_mod: callable=lambda c: c, U=None, n_jobs=1): - guidance_dep_out = [] - - for srv in srvs: - unique_percentage_list = [] - error_cnt_percentage_list = [] - correct_srv_percentage_list = [] - - for g in tqdm(gs): - if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, samples, system_size, num_of_qubits, max_gates, g=g, unique=False) - else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), samples, system_size, num_of_qubits, max_gates, g=g, unique=False) - - #--------------------------------- - #calculate the copy percentage, dataset and sample? - - unique_percentage = out_tensor.unique(dim=0).shape[0]/out_tensor.shape[0] - unique_percentage_list.append(unique_percentage) - - #--------------------------------- - #decode tensors, get srv - - qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs) - error_cnt_percentage_list.append(error_cnt/out_tensor.shape[0]) - - #--------------------------------- - #record the correct number - - correct_srv_percentage = get_srv_accuracy(svr_list, srv) - correct_srv_percentage_list.append(correct_srv_percentage) - - guidance_dep_out.append((unique_percentage_list, error_cnt_percentage_list, correct_srv_percentage_list)) - - return guidance_dep_out - -# %% ../../src/inference/infer_srv.ipynb 15 -def test_srv_acc_vs_length(pipeline, samples, system_size, num_of_qubits, max_gates, g, gate_pool, prompt_mod: callable=lambda c: c, U=None, n_jobs=1): - ent_bins, ent_labels = get_entanglement_bins(num_of_qubits) - - ent_ls = [] - ent_accs = [] - ent_cnts = [] - - for ent_bin in tqdm(ent_bins, total=len(ent_bins)): - ls_acc = dict() #keep track over bins - ls_cnt = dict() - - true_samples = true_sample_bin_dist(samples_per_bin, len(ent_bin)) - - for ind,srv in enumerate(ent_bin): - if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False) - else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False) - - qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs) - - lengths = get_circuit_gate_length(qc_list) #work in qc space to check only non errors - - if lengths.numel() < 1: continue - - for l in lengths.unique(): #range(lengths.min(), lengths.max()): - indices = (lengths==l).nonzero().squeeze() - - if indices.numel() > 0: - srvs = torch.tensor(svr_list)[indices] - if indices.dim() == 0: srvs = srvs.unsqueeze(0) - - acc = get_srv_accuracy(srvs, srv) - - #---------- - t = ls_acc.pop(l, []) - t.append(acc) - ls_acc[l] = t - - t = ls_cnt.pop(l, 0) - t += srvs.shape[0] - ls_cnt[l] = t - - ls = sorted(ls_acc) # sorted keys (l) - accs = [np.mean(ls_acc[l]) for l in ls] # average acc per l - cnts = [np.sum(ls_cnt[l]) for l in ls] - - ent_ls.append(ls) - ent_accs.append(accs) - ent_cnts.append(cnts) - - return ent_ls, ent_accs, ent_cnts, ent_labels - -# %% ../../src/inference/infer_srv.ipynb 16 -def test_srv_acc_vs_maxLength(pipeline, samples_per_bin, system_size, num_of_qubits, max_gates_list, g, gate_pool, prompt_mod: callable=lambda c: c, U=None, n_jobs=1): - ent_bins, ent_labels = get_entanglement_bins(num_of_qubits) - - ent_accs = [] - for ent_bin in tqdm(ent_bins, total=len(ent_bins)): - - true_samples = true_sample_bin_dist(samples_per_bin, len(ent_bin)) - - bin_accs = [] - for max_gates in max_gates_list: - - accs = [] - for ind,srv in enumerate(ent_bin): - if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False) - else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False) - - qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs) - - acc = get_srv_accuracy(svr_list, srv) - - accs.append(acc) - bin_accs.append(np.mean(accs)) - ent_accs.append(bin_accs) - - return ent_accs, ent_labels - -# %% ../../src/inference/infer_srv.ipynb 17 -def test_srv_length_distribution(pipeline, samples_per_bin, system_size, num_of_qubits, max_gates, g, gate_pool, silent=False, U=None, prompt_mod: callable=lambda c: c, n_jobs=1): - ent_bins, ent_labels = get_entanglement_bins(num_of_qubits) - - ls = [] - - for ent_bin in tqdm(ent_bins, total=len(ent_bins)): - - true_samples = true_sample_bin_dist(samples_per_bin, len(ent_bin)) - - bin_ls = [] - - for ind,srv in tqdm(enumerate(ent_bin), total=len(ent_bin)): - if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False) - else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False) - - qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs) - - qc_ls = get_circuit_gate_length(qc_list) #tensor [qcs] - bin_ls.append(qc_ls) - - ls.append(torch.cat(bin_ls)) - - return ls #[ent_bins, num_of_non_err_samples] - -# %% ../../src/inference/infer_srv.ipynb 19 -def plot_srv_clr_distribution_hist(values, samples, num_of_qubits, save=False, dist_srvs=None, cond_srvs=None): - if not exists(dist_srvs): - dist_srvs = get_all_srvs(num_of_qubits) - - if not exists(cond_srvs): - cond_srvs = dist_srvs - - n = len(dist_srvs) - values = values.cpu() - - fig = plt.figure(figsize=(12,12))#, constrained_layout=True) - plt.title(f"Generated samples per condition: {samples}") - plt.ylabel(r"Condition") - plt.xlabel(r"Generated distribution") - - #-------------------------------------------- - if num_of_qubits < 6 or 0: - plt.yticks(range(len(cond_srvs)), [str(b) for b in cond_srvs]) - plt.xticks(range(n), [str(b) for b in dist_srvs], rotation=90 if n>3 else 0) - else: - plt.yticks([]) - plt.xticks([]) - - #-------------------------------------------- - plt.imshow(values, vmin=0, vmax=1) - # plt.imshow(values.cpu(), norm="log") - plt.colorbar() - - #-------------------------------------------- - #print acc - x_shift = 1*40 if num_of_qubits==5 else 0 - if num_of_qubits < 4: - for i in range(n): - plt.text(x_shift+i, i, f"{values[i, i]:0.2f}", color='black', ha='center', va='center', fontsize="large") - - #-------------------------------------------- - #draw rects - off = 0.5 - for i in range(2, num_of_qubits): - w = scipy.special.comb(num_of_qubits, i, exact=True) - plt.gca().add_patch(plt.Rectangle((off, off), w, w, ls="-", ec="white", fc="none")) #, transform=plt.gca().transAxes)) - off += w - - #-------------------------------------------- - #print average acc for rects - off = 0 - for i in [0]+list(range(2, num_of_qubits+1)): - w = scipy.special.comb(num_of_qubits, i, exact=True) - d1 = off - d2 = d1 + w - mean_acc = values[d1:d2, d1:d2].diag().mean() - plt.text(off+2*w/3, off+w/7, f"{mean_acc:0.2f}", color='red', ha='center', va='center', fontsize="x-large") - off += w - - #-------------------------------------------- - if save: - plt.savefig('plot_srv_clr_distribution_hist.svg', bbox_inches='tight') - - plt.show() - -# %% ../../src/inference/infer_srv.ipynb 20 -def plot_srv_clr_distribution_bin_accuracy(values, samples, num_of_qubits, save=False, plot_percentages=False, trainSet_srv=None): - values = values.cpu().diag() - ent_bins, ent_labels = get_entanglement_bins(num_of_qubits) - - n = sum(len(srvs) for srvs in ent_bins) - x = np.arange(n) # the label locations - width = 0.8 - - #------------------------ - fig = plt.figure(figsize=(6.6, 4), constrained_layout=True) - # plt.title(f"Generated samples per condition: {samples}", fontsize=14) - plt.ylabel(r"Accuracy", fontsize=25) - plt.yticks(fontsize=14) - plt.xticks([]) - - i = 0 - for j,(label, srvs) in enumerate(zip(ent_labels, ent_bins)): - label = f"{sum(srvs[0])-num_of_qubits}" - incre = len(srvs) - rects = plt.bar(x[i:i+incre], values[i:i+incre], width, label=label) - i += incre - if plot_percentages: plt.gca().bar_label(rects, padding=3, fmt="%0.2f") - - ncols = len(ent_labels)//2+1 if len(ent_labels) > 5 else len(ent_labels) - leg1 = plt.legend(loc="lower center", fontsize=14, ncols=ncols, title="# of entangled qubits:", title_fontsize=14,bbox_to_anchor=(0.5, 1.01)) - ax = fig.add_artist(leg1) - - if exists(trainSet_srv): - if trainSet_srv.shape[-1]==num_of_qubits: - srvs = [] - for s in ent_bins: srvs.extend(s) - - dataset_percentages = [get_srv_accuracy(trainSet_srv, srv).cpu() for srv in srvs] - xmin = x - width*0.55 - xmax = x + width*0.55 - ag = plt.hlines(dataset_percentages, xmin, xmax, label="Random sampling" , color="black", linestyle="-", linewidths=2.3) - - plt.legend(handles=[ag], fontsize=14, frameon=False) - - ymin, ymax = plt.ylim() - plt.ylim(ymin, ymax+0.04) - - if save: - plt.savefig(f"plot_srv_clr_distribution_bin_accuracy.svg", bbox_inches='tight', transparent=True) - - plt.show() - -# %% ../../src/inference/infer_srv.ipynb 21 -def plot_guidance_dep(srvs, gs, guidance_dep_out, samples, save=False): - assert len(srvs) == len(guidance_dep_out) - - n = len(srvs) - fig, axs = plt.subplots(1, n, figsize=(12, 5), squeeze=False, constrained_layout=True) - fig.suptitle(fr"Generated {samples} samples per $g$ and SRV") - - for i,srv in enumerate(srvs): - unique_percentage_list, error_cnt_percentage_list, correct_srv_percentage_list = guidance_dep_out[i] - - #--------------------------------- - #plot now gs vs the numbers - - plt.sca(axs[0, i]) - plt.xlabel(r"Guidance scale $g$") - plt.title(f"SRV = {srv}") - plt.plot(gs, unique_percentage_list , label="Unique tensors percentage") - plt.plot(gs, error_cnt_percentage_list , label="Error circuits percentage") - plt.plot(gs, correct_srv_percentage_list, label="Correct SRV percentage") - - if i == (n-1): plt.legend() - - if save: - plt.savefig("plot_guidance_dep.svg", bbox_inches='tight', transparent=True) - - plt.show() - -# %% ../../src/inference/infer_srv.ipynb 22 -def plot_srv_acc_vs_length(ent_ls, ent_accs, ent_cnts, ent_labels, samples, plot_dist=True, save=False): - fig, axs = plt.subplots(2 if plot_dist else 1, 1, figsize=(12, 7), squeeze=False, constrained_layout=True) - - #------------------- - plt.sca(axs[0,0]) - plt.title(f"Generated samples per entanglement: {samples}") - plt.ylabel("Accuracy") - plt.xlabel("Gate number") - for i,ent_label in enumerate(ent_labels): - plt.plot(ent_ls[i], ent_accs[i], label=f"{ent_label}") - plt.legend() - - #------------------- - if plot_dist: - plt.sca(axs[1,0]) - plt.title(f"Used samples per l to calculate accuracy, should match gate distribution") - plt.ylabel("Used samples") - plt.xlabel("Gate number") - for i,ent_label in enumerate(ent_labels): - plt.plot(ent_ls[i], ent_cnts[i], label=f"{ent_label}") - plt.legend() - - #------------------- - if save: - plt.savefig('plot_srv_acc_vs_length.svg', bbox_inches='tight') - - plt.show() - -# %% ../../src/inference/infer_srv.ipynb 23 -def plot_srv_acc_vs_maxLength(ent_accs, ent_labels, max_gates_list, samples, plot_dist=True, save=False): - fig = plt.figure(figsize=(12, 4), constrained_layout=True) - - plt.title(f"Generated samples per maxGates per entanglement: {samples}") - plt.ylabel("Accuracy") - plt.xlabel("Max number of gates / tensor size") - plt.xticks(max_gates_list) - - for ent_acc,ent_label in zip(ent_accs, ent_labels): - plt.plot(max_gates_list, ent_acc, label=f"{ent_label}") - - plt.legend() - - if save: - plt.savefig('plot_srv_acc_vs_length.svg', bbox_inches='tight') - - plt.show() diff --git a/genQC/inference/sampling.py b/genQC/inference/sampling.py new file mode 100644 index 0000000..497e81b --- /dev/null +++ b/genQC/inference/sampling.py @@ -0,0 +1,340 @@ +"""Sampling functions for model inference.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/inference/sampling.ipynb. + +# %% auto 0 +__all__ = ['get_batch_samples', 'batched_sampling', 'prepare_prompts', 'generate_tensors', 'generate_compilation_tensors', + 'decode_tensors_to_backend'] + +# %% ../../src/inference/sampling.ipynb 2 +from ..imports import * +from ..utils.async_fn import run_parallel_jobs +from ..platform.simulation import Simulator +from ..platform.tokenizer.base_tokenizer import BaseTokenizer +from ..pipeline.pipeline import Pipeline + +# %% ../../src/inference/sampling.ipynb 4 +def get_batch_samples(samples: int, auto_batch_size: int = 512) -> list[int]: + batch_samples = [auto_batch_size] * int(np.floor(samples/auto_batch_size)) + + if samples % auto_batch_size > 0: + batch_samples.append(samples % auto_batch_size) + + if len(batch_samples) == 0: + batch_samples.append(samples) + + assert sum(batch_samples) == samples + return batch_samples + +# %% ../../src/inference/sampling.ipynb 5 +def batched_sampling(pipeline: Pipeline, + cond_kwargs: dict[str, torch.Tensor], + samples: int, + system_size: int, + num_of_qubits: int, + max_gates: int, + g: float = 1.0, + init_latents: Optional[torch.Tensor] = None, + no_bar: bool = True, + unique: bool = False, + auto_batch_size: int = 512, + enable_params: bool = True, + reduce_spatial: bool = True, + return_predicted_x0: bool = False): + + """ e.g. cond_kwargs.keys = {"c", "micro_cond", "negative_c", "U"} """ + + assert "c" in cond_kwargs + + c_in = cond_kwargs["c"].shape[0] + if c_in == 1: + # Same conditions for all samples + for cond in cond_kwargs.values(): + assert cond.shape[0] == 1 + + cond_kwargs = {kw : val.repeat(auto_batch_size, *[1]*(val.dim()-1)) + for kw, val in cond_kwargs.items()} + + else: + # Different conditions for all samples + for cond in cond_kwargs.values(): + assert cond.shape[0] == samples + + cond_kwargs = {kw:val.to(pipeline.device) + for kw, val in cond_kwargs.items()} + + #---------------------------------------- + if exists(init_latents): + assert init_latents.shape[0] == samples + init_latents = init_latents.to(pipeline.device) + + #---------------------------------------- + + # Sample and post process to tensor encodings + batch_samples = get_batch_samples(samples=samples, auto_batch_size=auto_batch_size) + + #---------------------------------------- + + off = 0 + out_tensor_list = [] + predicted_x0_list = [] + + for batch_sample in batch_samples: + #------------ + if c_in == 1: + # Same conditions for all samples + _cond_kwargs = {kw:val[:batch_sample] + for kw, val in cond_kwargs.items()} + else: + # Different conditions for all samples + _cond_kwargs = {kw:val[off:off+batch_sample] + for kw, val in cond_kwargs.items()} + + #------------ + if exists(init_latents): + latents = init_latents[off:off+batch_sample] + + else: + if pipeline.embedder.channel_last: + latents = torch.randn((batch_sample, system_size, max_gates, pipeline.model.params_config.clr_dim)) + else: + latents = torch.randn((batch_sample, pipeline.model.params_config.clr_dim, system_size, max_gates)) + + off += batch_sample + + #------------ + out_tensor = pipeline.denoising(latents=latents, + g=g, + no_bar=no_bar, + # enable_guidance=True, + return_predicted_x0=return_predicted_x0, + **_cond_kwargs) + + if return_predicted_x0: + out_tensor, predicted_x0 = out_tensor + + out_tensor_list.append(out_tensor) + + if return_predicted_x0: + # predicted_x0 ... [timesteps, *out_tensor.shape] + predicted_x0_list.append(predicted_x0) + + #---------------------------------------- + + out_tensor_raw = torch.cat(out_tensor_list).to(pipeline.device) + + if return_predicted_x0: + predicted_x0_raw = torch.cat(predicted_x0_list, dim=1).to(pipeline.device) + + if enable_params: out_tensor, params = pipeline.embedder.invert(out_tensor_raw, reduce_spatial=reduce_spatial) + else: out_tensor = pipeline.embedder.invert(out_tensor_raw) + + #---------------------------------------- + + out_tensor = out_tensor[:, :num_of_qubits] + + if unique: + if enable_params: + raise NotImplementedError("We have unique and enable_params enabled, how should we handle that?") + out_tensor = torch.unique(out_tensor, dim=0) + + if not no_bar: print(f"[INFO]: (generate_comp_tensors) Generated {'unique_cnt ' if unique else ''}{out_tensor.shape[0]} tensors") + + if enable_params: + if return_predicted_x0: + return out_tensor, params, predicted_x0_raw + return out_tensor, params + + elif return_predicted_x0: + return out_tensor, predicted_x0_raw + + return out_tensor + +# %% ../../src/inference/sampling.ipynb 6 +def prepare_prompts(pipeline: Pipeline, + prompt: str | Sequence[str], + negative_prompt: Optional[str | Sequence[str]] = None): + + # Prepare conditions + c = pipeline.text_encoder.tokenize_and_push_to_device(prompt) + + if exists(negative_prompt): + negative_c = pipeline.text_encoder.tokenize_and_push_to_device(negative_prompt) + assert negative_c.shape[0] == 1 + else: + negative_c = None + + return c, negative_c + +# %% ../../src/inference/sampling.ipynb 8 +def generate_tensors(pipeline: Pipeline, + prompt: str | Sequence[str], + samples: int, + system_size: int, + num_of_qubits: int, + max_gates: int, + g: float = 1.0, + init_latents: Optional[torch.Tensor] = None, + no_bar: bool = True, + unique: bool = False, + auto_batch_size: int = 512, + enable_params: bool = False, + reduce_spatial: bool = True, + return_predicted_x0: bool = False, + negative_prompt: Optional[str | Sequence[str]] = None, + micro_cond: Optional[torch.Tensor] = None) -> torch.Tensor: + + if exists(micro_cond): + raise NotImplementedError() + + # Prepare conditions + c, negative_c = prepare_prompts(pipeline, prompt, negative_prompt) + + cond_kwargs = {"c":c} + if exists(negative_c): cond_kwargs["negative_c"] = negative_c + if exists(micro_cond): cond_kwargs["micro_cond"] = micro_cond + + # Perform sampling + out = batched_sampling(pipeline=pipeline, + cond_kwargs=cond_kwargs, + samples=samples, + system_size=system_size, + num_of_qubits=num_of_qubits, + max_gates=max_gates, + g=g, + init_latents=init_latents, + no_bar=no_bar, + unique=unique, + auto_batch_size=auto_batch_size, + enable_params=enable_params, + reduce_spatial=reduce_spatial, + return_predicted_x0=return_predicted_x0) + return out + +# %% ../../src/inference/sampling.ipynb 9 +def generate_compilation_tensors(pipeline: Pipeline, + prompt: str | Sequence[str], + U: torch.Tensor, + samples: int, + system_size: int, + num_of_qubits: int, + max_gates: int, + g: float = 1.0, + tensor_prod_pad: bool = True, + init_latents: Optional[torch.Tensor] = None, + no_bar: bool = True, + unique: bool = False, + auto_batch_size: int = 512, + enable_params: bool = True, + reduce_spatial: bool = True, + return_predicted_x0: bool = False, + negative_prompt: Optional[str | Sequence[str]] = None, + negative_u: Optional[torch.Tensor] = None, + micro_cond: Optional[torch.Tensor] = None) -> torch.Tensor: + """ + Samples tensor encodings from the DM for the given sample parameters. + + What kind of unitary padding we have depends on what we used for model training, so it depends on the concrete model weights. + """ + + if torch.is_complex(U): + U = torch.stack([U.real, U.imag], dim=-3) + + if exists(micro_cond): + raise NotImplementedError() + + # Prepare conditions + c, negative_c = prepare_prompts(pipeline, prompt, negative_prompt) + + cond_kwargs = {"c":c} + if exists(negative_c): cond_kwargs["negative_c"] = negative_c + if exists(micro_cond): cond_kwargs["micro_cond"] = micro_cond + + def tensor_pad(U): + # Prepare unitary condition + assert U.dim() in [3, 4] + if U.dim() == 3: + # [2, N, N] to [1, 2, N, N] + U = U.unsqueeze(0) + + if system_size > num_of_qubits: + N = 2**system_size + + if tensor_prod_pad: + # Pad with identity tensor product, assume Big Endian + + U_pad = torch.zeros((U.shape[0], 2, N, N), device=U.device, dtype=U.dtype) + + U_side = U.shape[-1] + for jj in range(N//U_side): + _slice = slice(U_side * jj, U_side * (jj+1)) + U_pad[..., _slice, _slice] = U + + U = U_pad + + else: + # zero pad + pad = (0, N-U.shape[-1], 0, N-U.shape[-2]) + U = F.pad(U, pad, "constant", 0) + return U + + cond_kwargs["U"] = tensor_pad(U) + if exists(negative_u): + cond_kwargs["negative_u"] = tensor_pad(negative_u) + + # Perform sampling + out = batched_sampling(pipeline=pipeline, + cond_kwargs=cond_kwargs, + samples=samples, + system_size=system_size, + num_of_qubits=num_of_qubits, + max_gates=max_gates, + g=g, + init_latents=init_latents, + no_bar=no_bar, + unique=unique, + auto_batch_size=auto_batch_size, + enable_params=enable_params, + reduce_spatial=reduce_spatial, + return_predicted_x0=return_predicted_x0) + return out + +# %% ../../src/inference/sampling.ipynb 11 +def decode_tensors_to_backend(simulator: Simulator, + tokenizer: BaseTokenizer, + tensors: torch.Tensor, + params: Optional[torch.Tensor] = None, + silent: bool = True, + n_jobs: int = 1, + filter_errs: bool = True) -> tuple[Sequence[any], int]: + tensors = tensors.cpu() + + if exists(params): + params = params.cpu() + iter_pack = zip(tensors, params) + _decode = lambda x, p: tokenizer.decode(x, p) + + else: + iter_pack = zip(tensors, ) + _decode = lambda x: tokenizer.decode(x) + + def _f(iter_vars): + try: + instructions = _decode(*iter_vars) + backend_obj = simulator.backend.genqc_to_backend(instructions, place_barriers=False) + return backend_obj + except Exception as err: + if silent: return None + raise err + + pot_qcs = run_parallel_jobs(_f, iter_pack, n_jobs) + + if filter_errs: + backend_obj_list = [pot_qc for pot_qc in pot_qcs if exists(pot_qc)] + err_cnt = sum(1 for pot_qc in pot_qcs if not_exists(pot_qc)) + assert len(backend_obj_list) + err_cnt == len(pot_qcs) + else: + backend_obj_list = pot_qcs + err_cnt = None + + return backend_obj_list, err_cnt diff --git a/genQC/models/clip/__init__.py b/genQC/models/clip/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/genQC/models/clip/frozen_open_clip.py b/genQC/models/clip/frozen_open_clip.py new file mode 100644 index 0000000..941d78a --- /dev/null +++ b/genQC/models/clip/frozen_open_clip.py @@ -0,0 +1,239 @@ +"""Interface to the [OpenCLIP](https://github.com/mlfoundations/open_clip) library.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/models/clip/frozen_open_clip.ipynb. + +# %% auto 0 +__all__ = ['FrozenOpenCLIPEmbedderConfig', 'FrozenOpenCLIPEmbedder', 'CachedFrozenOpenCLIPEmbedderConfig', + 'CachedFrozenOpenCLIPEmbedder'] + +# %% ../../../src/models/clip/frozen_open_clip.ipynb 2 +from ...imports import * +from ..config_model import ConfigModel +from ...utils.async_fn import run_parallel_jobs +from ...utils.misc_utils import infer_torch_device +import open_clip + +# %% ../../../src/models/clip/frozen_open_clip.ipynb 5 +@dataclass +class FrozenOpenCLIPEmbedderConfig: + arch: str + version: str + #device: str + max_length: int + freeze: bool + layer: str + +# %% ../../../src/models/clip/frozen_open_clip.ipynb 6 +class FrozenOpenCLIPEmbedder(ConfigModel): + """Loads and freezes the [OpenCLIP](https://github.com/mlfoundations/open_clip) transformer encoder for text prompts.""" + + LAYERS = [ + # "pooled", + "last", + "penultimate" + ] + + njobs = 1 + + def __init__(self, arch="ViT-B-32", version="datacomp_xl_s13b_b90k", max_length=77, freeze=True, layer="penultimate", **kwargs): + super().__init__(**kwargs) + + assert layer in self.LAYERS + self.params_config = FrozenOpenCLIPEmbedderConfig(arch, version, max_length, freeze, layer) + + model, _, _ = open_clip.create_model_and_transforms(arch, device="cpu", pretrained=version) + self.device = "cpu" + + del model.visual + self.model = model + # self.to(device) + + self.tokenizer = open_clip.get_tokenizer(arch) + assert torch.numel(self.tokenizer("test")) + + assert max_length <= 77 # max set by the clip + self.max_length = max_length + + if freeze: self.freeze() + + self.layer = layer + if self.layer == "last": self.layer_idx = 0 + elif self.layer == "penultimate": self.layer_idx = 1 + else: raise NotImplementedError() + + #create empty token, can also be, e.g., A nice picture + self.empty_token = self.tokenize_and_push_to_device("") + + def freeze(self, freeze: bool = True): + super().freeze(freeze=freeze) + + for param in self.model.parameters(): + param.requires_grad = not freeze + + def to(self, device): + self.model = self.model.to(device) + self.device = device + return self + + @torch.inference_mode() + def tokenize_and_push_to_device(self, text, to_device=True): + if self.njobs > 1: + + tokens_list = run_parallel_jobs(self.tokenizer, np.array_split(text, self.njobs), self.njobs) + tokens = torch.cat(tokens_list, dim=0) + + else: + # tokens = open_clip.tokenize(text) + tokens = self.tokenizer(text) + + if to_device: + tokens = tokens.to(self.device) + return tokens + + @torch.inference_mode() + def forward(self, c, **kwargs): + return self.encode_with_transformer(c) + + @torch.inference_mode() + def encode_with_transformer(self, text): + cast_dtype = self.model.transformer.get_cast_dtype() + + x = self.model.token_embedding(text).to(cast_dtype) # [batch_size, n_ctx, d_model] + x = x + self.model.positional_embedding[None, :x.shape[1]].to(cast_dtype) + + if not self.model.transformer.batch_first: + x = x.permute(1, 0, 2) # NLD -> LND + + x = self.text_transformer_forward(x, attn_mask=self.model.attn_mask) + + if not self.model.transformer.batch_first: + x = x.permute(1, 0, 2) # LND -> NLD + + x = self.model.ln_final(x) # [batch_size, n_ctx, transformer.width] + + return x + + @torch.inference_mode() + def text_transformer_forward(self, x: torch.Tensor, attn_mask=None): + for i, r in enumerate(self.model.transformer.resblocks): + if i == len(self.model.transformer.resblocks) - self.layer_idx: + break + #if self.model.transformer.grad_checkpointing and not torch.jit.is_scripting(): + #x = checkpoint(r, x, attn_mask) + #else: + + x = r(x, attn_mask=attn_mask) + + return x + + #-------------------------------------------------------------- + + def get_config(self, save_path=None, without_metadata=False): + return super().get_config(save_path=None, without_metadata=without_metadata) + + def store_model(self, config_path: str, save_path: str=None, without_metadata=False): + super().store_model(config_path, save_path=None, without_metadata=without_metadata) + + @staticmethod + def from_config(config, device: torch.device, save_path: str=None): + config["save_path"] = None + return ConfigModel.from_config(config, device, save_path=None) + +# %% ../../../src/models/clip/frozen_open_clip.ipynb 17 +@dataclass +class CachedFrozenOpenCLIPEmbedderConfig(FrozenOpenCLIPEmbedderConfig): + enable_cache_token_limit: bool + +# %% ../../../src/models/clip/frozen_open_clip.ipynb 18 +class CachedFrozenOpenCLIPEmbedder(FrozenOpenCLIPEmbedder): + """Adds caching support to `FrozenOpenCLIPEmbedder`.""" + + def __init__(self, arch="ViT-B-32", version="datacomp_xl_s13b_b90k", max_length=77, freeze=True, layer="penultimate", enable_cache_token_limit: bool = True, **kwargs): + super().__init__(arch=arch, version=version, max_length=max_length, freeze=freeze, layer=layer, **kwargs) + self.enable_cache_token_limit = enable_cache_token_limit + + self.params_config = CachedFrozenOpenCLIPEmbedderConfig(arch, version, max_length, freeze, layer, enable_cache_token_limit) + + def get_token_count(self, tokens, padding_token=0): + # tokens .. [b, seq] + collabsed_tokens = (tokens != padding_token).to(torch.int32) + return torch.count_nonzero(collabsed_tokens, dim=-1) # [b] + + @torch.inference_mode() + def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_index=None, b_size=2048, y_on_cpu=False): + self.cached_empty_token_index = cached_empty_token_index + if exists(str_list): self.cached_tokens = self.tokenize_and_push_to_device(str_list) + elif exists(tokens): self.cached_tokens = tokens + else: raise RuntimeError("please provide str_list or tokens") + + # note: we need to split the tokens in batches for forward pass, n gets large + # cached_tokens [n, 77] ... int + # cached_embeddings [n, 77, 512] ... float + + if self.enable_cache_token_limit: + self.max_length = self.get_token_count(self.cached_tokens).max().item() + self.params_config.max_length = self.max_length + self.params_config.enable_cache_token_limit = self.enable_cache_token_limit + print(f"[INFO]: - `generate_cache` infered a TOKEN limit of {self.max_length}") + + #self.cached_tokens = self.cached_tokens[:, :self.max_length] + + n = self.cached_tokens.shape[0] + + n_chunks = int(np.ceil(n / b_size)) + + in_device = self.cached_tokens.device + + last_ind = 0 + for i, cached_tokens in tqdm(enumerate(self.cached_tokens.chunk(n_chunks)), total=n_chunks): + + x = super().forward(cached_tokens.to(self.device)) # ... [b, seq, ch] + + if i == 0: + mem = n * x.shape[1] * x.shape[2] * x.element_size() * 1e-9 + print(f"[INFO]: caching trying to allocate memory {(n, x.shape[1], x.shape[2])} on {'cpu' if y_on_cpu else self.device}, approx. {mem:.3f} GB") + self.cached_embeddings = torch.zeros((n, x.shape[1], x.shape[2]), device="cpu" if y_on_cpu else self.device, dtype=x.dtype) # alloc huge memory !! + + self.cached_embeddings[last_ind:last_ind+x.shape[0]] = x.to(self.cached_embeddings.device) + + last_ind += x.shape[0] + + if self.enable_cache_token_limit: + self.cached_embeddings = self.cached_embeddings[:, :self.max_length] + + if not y_on_cpu: + self.cached_embeddings = self.cached_embeddings.to(in_device) + + @torch.inference_mode() + def look_up_cos_sim_cached_index(self, str_list: list=None, tokens=None): + if exists(str_list): tokens = self.tokenize_and_push_to_device(str_list) + else: raise RuntimeError("please provide str_list or tokens") + + emb = super().forward(tokens.to(self.device)) + c_emb = self.cached_embeddings + #----------------- + # do cos sim search + + emb = emb.flatten(start_dim=1) # [m, seq*ch] + c_emb = c_emb.flatten(start_dim=1) # [n, seq*ch] + + norm_emb = emb / torch.linalg.vector_norm( emb, dim=1, keepdim=True) + norm_c_emb = c_emb / torch.linalg.vector_norm(c_emb, dim=1, keepdim=True) + + sim = torch.matmul(norm_c_emb, norm_emb.T) # matmul out is [n, m] + max_idx = torch.argmax(sim, dim=0) # reduce the c_emb dim, [m] + + return max_idx + + # @torch.inference_mode() + def forward(self, c, **kwargs): + in_device = c.device + + if c.dim() == 1: c_emb = self.cached_embeddings[c.to(self.cached_embeddings.device)].to(in_device) #list of ints + elif c.dim() == 2: c_emb = super().forward(c.to(self.device)) #tokenized input + else: raise NotImplementedError("") + + if self.enable_cache_token_limit: + c_emb = c_emb[:, :self.max_length] + + return c_emb diff --git a/genQC/models/clip/unitary_clip.py b/genQC/models/clip/unitary_clip.py new file mode 100644 index 0000000..fca32cd --- /dev/null +++ b/genQC/models/clip/unitary_clip.py @@ -0,0 +1,686 @@ +"""Contrastive pre-training of an unitary encoder""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/models/clip/unitary_clip.ipynb. + +# %% auto 0 +__all__ = ['RotaryMultiheadAttention', 'FeedForwardBlock', 'UnitaryEncoderAttnBlock', 'UnitaryTextEncoderConfig', + 'UnitaryTextEncoder', 'SelfAttnBlock', 'PackingTransformer', 'CoreTransformer', 'CircuitEncoderConfig', + 'CircuitEncoder', 'UnitaryCLIPConfig', 'UnitaryCLIP'] + +# %% ../../../src/models/clip/unitary_clip.ipynb 2 +from ...imports import * +from ..config_model import ConfigModel +import genQC.models.transformers.attention as attn +import genQC.models.layers as layers +from ..position_encoding import LearnedPositionalEmbedding, RotaryPositionalEmbedding, RotaryPositionalEmbedding2D + +# %% ../../../src/models/clip/unitary_clip.ipynb 4 +class RotaryMultiheadAttention(nn.Module): + """ + MultiheadAttention described in the paper: Attention Is All You Need (https://arxiv.org/abs/1706.03762). + We add a rotary position encoding (RoPE). + + The attention core is `F.scaled_dot_attention` from pytorch. + Could be switched to `https://github.com/Dao-AILab/flash-attention` or `xFormers`. + """ + + def __init__(self, + in_dim: int, + embed_dim: int, + num_heads: int, + bias: bool = True, + p_rope: float = 1.0, + max_seq_len: int = 4096, + base_rope: float = 10_000, + enable_qk_norm: bool = False) -> None: + + super().__init__() + + self.num_heads = num_heads + self.bias = bias + self.head_dim = embed_dim // num_heads + + self.q_proj = nn.Linear(in_dim, embed_dim, bias=bias) + self.k_proj = nn.Linear(in_dim, embed_dim, bias=bias) + self.v_proj = nn.Linear(in_dim, embed_dim, bias=bias) + + self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + + self.enable_qk_norm = enable_qk_norm + if self.enable_qk_norm: + self.q_norm = nn.RMSNorm(self.head_dim) + self.k_norm = nn.RMSNorm(self.head_dim) + + self.rope = RotaryPositionalEmbedding(head_dim=self.head_dim, p=p_rope, max_seq_len=max_seq_len, base=base_rope) + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.xavier_normal_(self.q_proj.weight) + nn.init.xavier_normal_(self.k_proj.weight) + nn.init.xavier_normal_(self.v_proj.weight) + nn.init.xavier_normal_(self.out_proj.weight) + + if self.bias: + nn.init.zeros_(self.q_proj.bias) + nn.init.zeros_(self.k_proj.bias) + nn.init.zeros_(self.v_proj.bias) + nn.init.zeros_(self.out_proj.bias) + + + def forward(self, query: torch.Tensor, key: torch.Tensor, value: torch.Tensor, pos_idx: Optional[torch.Tensor] = None) -> torch.Tensor: + """ + Assumes batch first. When `pos_idx` is provided we use RoPE, else NOT! + + Shapes: + query ... [b, n1, c] + key/value ... [b, n2, c] + """ + + assert key.shape == value.shape + + b, n1, _ = query.shape + _, n2, _ = key.shape + + q = self.q_proj(query) + k = self.k_proj(key) + v = self.v_proj(value) + + q = q.view(b, n1, self.num_heads, self.head_dim) + k = k.view(b, n2, self.num_heads, self.head_dim) + v = v.view(b, n2, self.num_heads, self.head_dim) + + if self.enable_qk_norm: + q = self.q_norm(q) + k = self.k_norm(k) + + if exists(pos_idx): + q = self.rope(q, pos_idx=pos_idx) + k = self.rope(k, pos_idx=pos_idx) + + # scaled_dot_product_attention takes [b, num_heads, seq, head_dim] + q = q.permute((0, 2, 1, 3)) + k = k.permute((0, 2, 1, 3)) + v = v.permute((0, 2, 1, 3)) + + # see https://pytorch.org/docs/stable/generated/torch.nn.functional.scaled_dot_product_attention.html + attn = F.scaled_dot_product_attention(query=q, + key=k, + value=v, + attn_mask=None, + dropout_p=0.0, + is_causal=False, + scale=None, + #enable_gqa=False + ) + + # back to [b, seq, num_heads, head_dim] + attn = attn.permute((0, 2, 1, 3)) + + # pack heads together + attn = attn.reshape(b, n1, self.num_heads * self.head_dim) + attn = self.out_proj(attn) + return attn + +# %% ../../../src/models/clip/unitary_clip.ipynb 5 +class FeedForwardBlock(nn.Module): + """ + A small dense feed-forward network as used in `transformers`. Assumes channel last. + Inspired by https://arxiv.org/pdf/2401.11605 and added + from https://arxiv.org/pdf/2002.05202 a modification to SiGLU structure. + """ + + def __init__(self, in_dim: int, hidden_dim: int, dropout: float = 0.0) -> None: + super().__init__() + self.hidden_dim = hidden_dim + self.proj_in = nn.Linear(in_dim, 2*hidden_dim) # factor two for GLU part split + self.proj_out = nn.Linear(hidden_dim, in_dim) + self.act = nn.SiLU() + self.drop = nn.Dropout(dropout) + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.zeros_(self.proj_out.bias) + # nn.init.xavier_normal_(self.proj_out.weight) + + def siglu(self, x: torch.Tensor) -> torch.Tensor: + x = self.proj_in(x) + return x[..., :self.hidden_dim] * self.act(x[..., self.hidden_dim:]) + + #@torch.compile + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.siglu(x) + x = self.drop(x) + x = self.proj_out(x) + return x + +# %% ../../../src/models/clip/unitary_clip.ipynb 7 +class UnitaryEncoderAttnBlock(nn.Module): + """A self-attention block with 2d-RoPE.""" + + def __init__(self, + ch: int, + y_emb_size: int, + num_heads: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000) -> None: + super().__init__() + + self.self_att = RotaryMultiheadAttention(in_dim=ch+y_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope) + self.self_att.rope = RotaryPositionalEmbedding2D(head_dim=self.self_att.head_dim, p=p_rope, base=base_rope) + + self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch) + self.norm_self = nn.RMSNorm(ch) + self.norm_ff = nn.RMSNorm(ch) + self.drop = nn.Dropout(dropout) + + self._init_weights() + + def _init_weights(self) -> None: + + # note a bonus of res-pos-norm is that we can init as identity! + nn.init.zeros_(self.norm_self.weight) + nn.init.zeros_(self.norm_ff.weight) + + def forward(self, x: torch.Tensor, y_emb: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor: + """ + Assumes batch first. + + Shapes: + x ... [b, n, ch1] + y_emb ... [b, n, ch2] + pos_idx ... [b, n, 2] or [n, 2] + """ + + # Self-attention part + self_out = torch.cat([x, y_emb], dim=-1) + self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx) + self_out = self.norm_self(self_out) + self_out = self.drop(self_out) + x + + # Feed-Forward part + feed_out = self.ff(self_out) + feed_out = self.norm_ff(feed_out) + feed_out = self.drop(feed_out) + self_out + return feed_out + +# %% ../../../src/models/clip/unitary_clip.ipynb 8 +@dataclass +class UnitaryTextEncoderConfig: + text_embed_ch: int + text_encoding_ch: int + text_attn_num_heads: int + text_attn_depth: int + + unitary_encoding_ch: int + unitary_downscale_factor: int + + main_num_heads: int + main_depth: int + + use_rope: bool + p_rope: float + base_rope: float + dropout: float + +# %% ../../../src/models/clip/unitary_clip.ipynb 9 +class UnitaryTextEncoder(ConfigModel): + def __init__(self, + text_embed_ch: int, + text_encoding_ch: int, + text_attn_num_heads: int, + text_attn_depth: int, + unitary_encoding_ch: int, + unitary_downscale_factor: int, + main_num_heads: int, + main_depth: int, + use_rope: bool, + p_rope: float, + base_rope: float, + dropout: float) -> None: + """ + text_embed_ch ... number of channels of the input text encodings `y_emb` + + The text channels `text_encoding_ch` are concatenated with the unitary channels `unitary_encoding_ch`. + """ + super().__init__() + + self.params_config = UnitaryTextEncoderConfig(text_embed_ch=text_embed_ch, + text_encoding_ch=text_encoding_ch, + text_attn_num_heads=text_attn_num_heads, + text_attn_depth=text_attn_depth, + unitary_encoding_ch=unitary_encoding_ch, + unitary_downscale_factor=unitary_downscale_factor, + main_num_heads=main_num_heads, + main_depth=main_depth, + use_rope=use_rope, + p_rope=p_rope, + base_rope=base_rope, + dropout=dropout) + + # Text pre-process + self.text_proj = nn.Linear(text_embed_ch, text_encoding_ch) + self.text_norm = nn.RMSNorm(text_encoding_ch) + + self.text_attn_blocks = nn.ModuleList([attn.BasisSelfAttnBlock(ch=text_encoding_ch, + num_heads=text_attn_num_heads, + dropout=dropout, + batch_first=True) + for d in range(text_attn_depth) + ]) + + # Unitary pre-process + self.unitary_proj = nn.Conv2d(2, unitary_encoding_ch, kernel_size=1, stride=1, padding="same") + self.unitary_downscale = nn.PixelUnshuffle(unitary_downscale_factor) + self.unitary_downscale_factor = unitary_downscale_factor + + self.use_rope = use_rope + if not self.use_rope: + self.unitary_pos_enc = layers.PositionalEncoding2D(d_model=unitary_encoding_ch, freq_factor=1_000) + + # Main transformer + self.encoding_ch = unitary_encoding_ch * (unitary_downscale_factor**2) + + self.transformer_blocks = nn.ModuleList([UnitaryEncoderAttnBlock(ch=self.encoding_ch, + y_emb_size=text_encoding_ch, + num_heads=main_num_heads, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + for d in range(main_depth) + ]) + + self.norm_final = nn.RMSNorm(self.encoding_ch) + + print(f"[INFO]: Creating `UnitaryTextEncoder` with `{unitary_downscale_factor=}` and `encoding_ch={self.encoding_ch}`.") + self._init_weights() + + def _init_weights(self) -> None: + # nn.init.xavier_normal_(self.text_proj.weight) + # nn.init.xavier_normal_(self.unitary_proj.weight) + + nn.init.zeros_(self.text_proj.bias) + nn.init.zeros_(self.unitary_proj.bias) + + def preproc_text(self, y_emb): + y_emb = self.text_proj(y_emb) # ... [batch, seq_y, text_encoding_ch] + + for text_attn_block in self.text_attn_blocks: + y_emb = text_attn_block(y_emb) + + return y_emb + + def preproc_unitary(self, U): + u_emb = self.unitary_proj(U) # ... [batch, unitary_encoding_ch, N, N] + if not self.use_rope: + u_emb = self.unitary_pos_enc(u_emb) + u_emb = self.unitary_downscale(u_emb) # ... [batch, unitary_encoding_ch * r^2, N/r, N/r] + + # Reshape and permute from image to sentence shape + b, ch, *_ = u_emb.shape + u_emb = torch.reshape(u_emb, (b, ch, -1)) # to [batch, unitary_encoding_ch * r^2, (N/r)^2] + u_emb = torch.permute(u_emb, (0, 2, 1)) # to [batch, (N/r)^2, unitary_encoding_ch * r^2] + + return u_emb + + def forward(self, y_emb: torch.Tensor, U: torch.Tensor, pool: bool = False, penultimate: bool = False) -> torch.Tensor: + """ + penultimate_output = False ... take all attn layers + penultimate_output = True ... skip the last attn layers + + Shapes: + y_emb ... [b, seq, text_embed_ch] + U ... [b, 2, N, N] + """ + + # Pre-process multimodial inputs + x = self.preproc_unitary(U) # ... [batch, seq_u, unitary_encoding_ch * r^2] + y_emb = self.preproc_text(y_emb) # ... [batch, seq_y, text_encoding_ch] + + y_emb = y_emb.mean(dim=1, keepdim=True) # ... [batch, 1, text_encoding_ch] + y_emb = self.text_norm(y_emb) + y_emb = y_emb.expand(x.shape[0], x.shape[1], -1) # ... [batch, seq_u, text_encoding_ch] + + # Main transformer pass + if self.use_rope: + N = U.shape[-1] // self.unitary_downscale_factor + pos = torch.arange(N).expand(N, -1) + pos_idx = torch.stack([pos.T, pos], dim=-1).reshape(-1, 2) # ... [seq_u, 2] + else: + pos_idx = None + + if not penultimate: + for transformer_block in self.transformer_blocks: + x = transformer_block(x, y_emb=y_emb, pos_idx=pos_idx) + + else: + for transformer_block in self.transformer_blocks[:-1]: + x = transformer_block(x, y_emb=y_emb, pos_idx=pos_idx) + + if pool: + x = torch.mean(x, dim=1) # [batch, ch] + + x = self.norm_final(x) + return x + +# %% ../../../src/models/clip/unitary_clip.ipynb 11 +class SelfAttnBlock(nn.Module): + """A self-attention block with RoPE.""" + + def __init__(self, ch: int, num_heads: int, dropout: float = 0.0, p_rope: float = 1.0, base_rope: float = 10_000) -> None: + super().__init__() + + self.self_att = RotaryMultiheadAttention(in_dim=ch, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope) + + self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch, dropout=dropout) + self.norm_self = nn.RMSNorm(ch) + self.norm_ff = nn.RMSNorm(ch) + self.drop = nn.Dropout(dropout) + + self._init_weights() + + def _init_weights(self) -> None: + + # note a bonus of res-pos-norm is that we can init as identity! + nn.init.zeros_(self.norm_self.weight) + nn.init.zeros_(self.norm_ff.weight) + + def forward(self, x: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor: + """ + Assumes batch first. + + Shapes: + x ... [b, n, ch] + pos_idx ... [b, n] + """ + + # Self-attention part + self_out = x + self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx) + self_out = self.norm_self(self_out) + self_out = self.drop(self_out) + x + + # Feed-Forward part + feed_out = self.ff(self_out) + feed_out = self.norm_ff(feed_out) + feed_out = self.drop(feed_out) + self_out + return feed_out + +# %% ../../../src/models/clip/unitary_clip.ipynb 12 +class PackingTransformer(ConfigModel): + """ + The first stage packing/unpacking transformers of the CirDiT model. + Applies a RoPE for time dimension only, not on spatial dimension. + """ + + def __init__(self, + ch: int, + depth: int, + num_heads: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000) -> None: + super().__init__() + + self.blocks = nn.ModuleList([ + SelfAttnBlock(ch=ch, + num_heads=num_heads, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + for d in range(depth) + ]) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """ + Shapes: + x ... [b, s, t, ch] + """ + + b, s, t, ch = x.shape + + # create pos_idx such that they only depend on the time position + pos_idx = torch.arange(t, device=x.device, dtype=torch.int32).expand(b, s, -1) + pos_idx = pos_idx.reshape(b, -1) + + # flatten spatial and time into seq + x = x.reshape(b, s*t, ch) + + for block in self.blocks: + x = block(x=x, pos_idx=pos_idx) + + # undo flatten + x = x.reshape(b, s, t, ch) + + return x + +# %% ../../../src/models/clip/unitary_clip.ipynb 13 +class CoreTransformer(nn.Module): + """ + The main transformer of the `CirDiT` model. + Applies a RoPE for time dimension. + """ + + def __init__(self, + ch: int, + depth: int, + num_heads: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000) -> None: + super().__init__() + + self.blocks = nn.ModuleList([ + SelfAttnBlock(ch=ch, + num_heads=num_heads, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + for d in range(depth) + ]) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """ + Shapes: + x ... [b, t, ch] + """ + + pos_idx = torch.arange(x.shape[1], device=x.device, dtype=torch.int32) + + for block in self.blocks: + x = block(x=x, pos_idx=pos_idx) + + return x + +# %% ../../../src/models/clip/unitary_clip.ipynb 14 +@dataclass +class CircuitEncoderConfig: + embedder_config: dict + + ch_packing: int + ch_core: int + + depth_packing: int + depth_core: int + + num_heads_packing: int + num_heads_core: int + + dropout: float + p_rope: float + base_rope: float + +# %% ../../../src/models/clip/unitary_clip.ipynb 15 +class CircuitEncoder(ConfigModel): + def __init__(self, + embedder_config: Optional[dict], + ch_packing: int, + ch_core: int, + depth_packing: int, + depth_core: int, + num_heads_packing: int, + num_heads_core: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000, + embedder: Optional[nn.Module] = None) -> None: + super().__init__() + + if exists(embedder): + self.embedder = embedder + embedder_config = self.embedder.get_config(None) + else: + assert exists(embedder_config) + + self.params_config = CircuitEncoderConfig(embedder_config=embedder_config, + ch_packing=ch_packing, + ch_core=ch_core, + depth_packing=depth_packing, + depth_core=depth_core, + num_heads_packing=num_heads_packing, + num_heads_core=num_heads_core, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + + if not_exists(embedder): + self.embedder = ConfigModel.from_config(embedder_config, device=None, silent=True) + + self.packing = PackingTransformer(ch=ch_packing, + depth=depth_packing, + num_heads=num_heads_packing, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + + self.core = CoreTransformer(ch=ch_core, + depth=depth_core, + num_heads=num_heads_core, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + + self.encoding_ch = ch_core + + self.proj_in = nn.Linear(self.embedder.embedding_dim, ch_packing) + self.core_proj = nn.Linear(ch_packing, ch_core) + + self.norm_packing = nn.RMSNorm(ch_packing) + self.norm_core = nn.RMSNorm(ch_core) + self.norm_final = nn.RMSNorm(ch_core) + + self.qubit_pos_enc = LearnedPositionalEmbedding(dim=ch_packing, max_seq_len=64) #here max number of qubits + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.orthogonal_(self.core_proj.weight) + nn.init.zeros_(self.core_proj.bias) + + def forward(self, tokens: torch.Tensor, params: torch.Tensor, pool: bool = False) -> torch.Tensor: + # Embed the circuits + x = self.embedder(h=tokens, w=params) + + # Pre-process circuit and add pos-encoding + b, s, t, _ = x.shape + + x = self.proj_in(x) + x = self.qubit_pos_enc(x) + + # Pack spatial into tokens + x = self.norm_packing(x) + x = self.packing(x=x) + + # Downsample, reduce spatial, ... [b, t, ch_core] + x_main = x.mean(dim=1) + x_main = self.core_proj(x_main) + + # Core transformer + x_main = self.norm_core(x_main) + x_main = self.core(x=x_main) + + if pool: + x_main = torch.mean(x_main, dim=1) # [b, ch] + + x_main = self.norm_final(x_main) + return x_main + +# %% ../../../src/models/clip/unitary_clip.ipynb 17 +@dataclass +class UnitaryCLIPConfig: + text_encoder_config: dict + clip_embed_size: int + +# %% ../../../src/models/clip/unitary_clip.ipynb 18 +class UnitaryCLIP(ConfigModel): + + def __init__(self, + text_encoder_config: Optional[dict], + unitary_text_encoder: UnitaryTextEncoder, + circuit_encoder: CircuitEncoder, + clip_embed_size: int, + text_encoder: Optional[nn.Module] = None) -> None: + super().__init__() + + if exists(text_encoder): + self.text_encoder = text_encoder + text_encoder_config = self.text_encoder.get_config(None) + else: + assert exists(text_encoder_config) + + self.params_config = UnitaryCLIPConfig(text_encoder_config=text_encoder_config, + clip_embed_size=clip_embed_size) + + if not_exists(text_encoder): + if "device" in text_encoder_config: + device = text_encoder_config["device"] + else: + device = "cpu" + + self.text_encoder = ConfigModel.from_config(text_encoder_config, device=device, silent=True) + + self.unitary_text_encoder = unitary_text_encoder + self.circuit_encoder = circuit_encoder + + self.unitary_text_proj = nn.Linear(self.unitary_text_encoder.encoding_ch, clip_embed_size) + self.circuit_proj = nn.Linear(self.circuit_encoder.encoding_ch , clip_embed_size) + self.temperature = torch.nn.Parameter(torch.zeros(1)) + + self._init_weights() + + def _init_weights(self) -> None: + initrange = 0.1 + self.unitary_text_proj.bias.data.zero_() + self.unitary_text_proj.weight.data.uniform_(-initrange, initrange) + self.circuit_proj.bias.data.zero_() + self.circuit_proj.weight.data.uniform_(-initrange, initrange) + + def forward(self, tokens: torch.Tensor, params: torch.Tensor, y: torch.Tensor, U: torch.Tensor) -> torch.Tensor: + + y_emb = self.text_encoder(y, pool=False) + + ut_enc = self.unitary_text_encoder(y_emb=y_emb, U=U, pool=True) + ut_enc = self.unitary_text_proj(ut_enc) # out [b, embed_size] + ut_enc = F.normalize(ut_enc, dim=-1) + + #------------ + + qc_enc = self.circuit_encoder(tokens=tokens, params=params, pool=True) + qc_enc = self.circuit_proj(qc_enc) # out [b, embed_size] + qc_enc = F.normalize(qc_enc, dim=-1) + + #------------ + + scores = torch.matmul(ut_enc, qc_enc.T) * torch.exp(self.temperature) #[b, b] + + #scores is: I=unitary_text T=circuit + #-------------------------------- + #| I1*T1 I1*T2 I1*T3 ... + #| I2*T1 + #| I3*T1 + # ... + #-------------------------------- + + return scores diff --git a/genQC/models/config_model.py b/genQC/models/config_model.py index 6b39be8..e300091 100644 --- a/genQC/models/config_model.py +++ b/genQC/models/config_model.py @@ -1,27 +1,51 @@ +"""Model base class that handles loading and storing from/to config files.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/models/config_model.ipynb. # %% auto 0 -__all__ = ['Config_Model'] +__all__ = ['ConfigModel'] -# %% ../../src/models/config_model.ipynb 3 +# %% ../../src/models/config_model.ipynb 2 from ..imports import * -from ..config_loader import * -from ..util import * +from ..utils.config_loader import * +from ..utils.misc_utils import * from datetime import datetime -# %% ../../src/models/config_model.ipynb 5 -class Config_Model(nn.Module): +# %% ../../src/models/config_model.ipynb 4 +class ConfigModel(nn.Module): """A basic `nn.Module` with IO functionality.""" - def __init__(self): super().__init__() - #--------------------- + def __init__(self, save_type=None): + super().__init__() + self.save_type = default(save_type, "safetensors") + def freeze(self, freeze: bool = True): + if freeze: self.eval() + else: self.train() + + for param in self.parameters(): + param.requires_grad = not freeze + + #Todo: add a debose/debug log here + + def unfreeze(self): + self.freeze(False) + + #--------------------- + + def check_save_type(self, save_path): + if exists(self.save_type) and exists(save_path): + if not save_path.endswith(f".{self.save_type}"): + save_path += f".{self.save_type}" + return save_path + def get_config(self, save_path=None, without_metadata=False): if not without_metadata: config = {} config["target"] = class_to_str(type(self)) - config["save_path"] = save_path + config["save_path"] = self.check_save_type(self.save_path) if hasattr(self, "save_path") and not exists(save_path) else self.check_save_type(save_path) config["save_datetime"] = datetime.now().strftime("%m/%d/%Y %H:%M:%S") + config["save_type"] = self.save_type config["params"] = self.params_config else: config = self.params_config @@ -30,41 +54,82 @@ def get_config(self, save_path=None, without_metadata=False): return config def store_model(self, config_path: str=None, save_path: str=None, without_metadata=False): - + config = self.get_config(save_path, without_metadata) if exists(config_path): if without_metadata: save_dataclass_yaml(config, config_path) else : save_dict_yaml(config, config_path) - + if exists(save_path): - torch.save(self.state_dict(), save_path) + store_model_state_dict(self.state_dict(), self.check_save_type(save_path)) #--------------------- @staticmethod - def from_config(config, device: torch.device, save_path: str=None): + def from_config(config, device: torch.device, save_path: str=None, verbose=True, silent=False, freeze: Optional[bool] = None): """Use this if we have a loaded config. Maybe within other classes (e.g. pipeline and nested models)""" + + _config = copy.deepcopy(config) - model = instantiate_from_config(config) - model = model.to(device) - print(f"[INFO]: `{class_to_str(type(model))}` instantiated from given config on {device}.") + if exists(device): _config["device"] = device # for loading sub-models + else: device = _config.pop("device", "cpu") + + if exists(freeze): + _freeze = freeze + + else: + if "is_frozen" in _config: + _freeze = _config.pop("is_frozen", None) + if not_exists(_freeze): + raise RuntimeError(f"The `is_frozen` flag in `config` is invalid. Please provide a boolean. `is_frozen` is: {freeze}") + else: + _freeze = True + #print(f"[INFO]: `{class_to_str(type(model))}`. No valid `is_frozen` flag in `config`. Model is frozen by default.") + #-------------------------------- + # instantiate model + model = instantiate_from_config(_config) + model = model.to(device) + if not silent: print(f"[INFO]: `{class_to_str(type(model))}` instantiated from given `config` on {device}.") + #-------------------------------- - if not exists(save_path): - if "save_path" in config: - save_path = config["save_path"] + # load pretrained weights + + model.save_type = _config.pop("save_type", None) + + if exists(model.save_type): + if not exists(save_path): + if "save_path" in _config: + save_path = model.check_save_type(_config["save_path"]) + + + if exists(save_path): + state_dict = load_model_state_dict(model.check_save_type(save_path), device) + + m, u = model.load_state_dict(state_dict, strict=False) + + if len(m) + len(u) > 0 and verbose: + print(f"[WARNING]: missing keys: {m}") + print(f"[WARNING]: unexpected keys: {u}") + else: - print("[INFO]: Found no key `save_path` path in config.") - - if exists(save_path): - model.load_state_dict(torch.load(save_path, map_location=torch.device(device).type, weights_only=True), strict=True) + if not silent: print(f"[INFO]: `{class_to_str(type(model))}`. No `save_path` provided. Found no key `save_path` in `config`. No state dict loaded.") + else: + if not silent: print(f"[INFO]: `{class_to_str(type(model))}`. Found no key `save_type` in `config`. No state dict loaded.") + + #-------------------------------- + # freeze + + if exists(_freeze): + model.freeze(_freeze) + if not silent: print(f"[INFO]: `{class_to_str(type(model))}`. Freeze model: {_freeze}") else: - print(f"[INFO]: `{class_to_str(type(model))}`. No save_path` provided. No state dict loaded.") + if not silent: print(f"[INFO]: `{class_to_str(type(model))}`. No valid `is_frozen` flag in `config`. Model is frozen by default.") return model @staticmethod def from_config_file(config_path, device: torch.device, save_path: str=None): config = load_config(config_path) - return Config_Model.from_config(config, device, save_path) + return ConfigModel.from_config(config, device, save_path) diff --git a/genQC/models/embedding/__init__.py b/genQC/models/embedding/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/genQC/models/embedding/base_embedder.py b/genQC/models/embedding/base_embedder.py new file mode 100644 index 0000000..8e22ad7 --- /dev/null +++ b/genQC/models/embedding/base_embedder.py @@ -0,0 +1,31 @@ +"""Class for base embedder.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/models/embedding/base_embedder.ipynb. + +# %% auto 0 +__all__ = ['BaseEmbedder'] + +# %% ../../../src/models/embedding/base_embedder.ipynb 2 +from ...imports import * +from ..config_model import ConfigModel + +# %% ../../../src/models/embedding/base_embedder.ipynb 4 +class BaseEmbedder(ConfigModel, abc.ABC): + def __init__(self) -> None: + super().__init__() + + # Note: While using DDP with huggingface-Accelerate we noticed + # the fixed weights didn't get synced if there is no parameter + # that requires a gradient. So we add a dummy to make sure + # all model instances/nodes have the same embedder! + self.dummy_parameter = torch.tensor(0.0) + self.dummy_parameter = nn.Parameter(self.dummy_parameter) + + def forward(self, *args, **kwargs): + return self.embed(*args, **kwargs) + + @abc.abstractmethod + def embed(self, x): pass + + @abc.abstractmethod + def invert(self, x): pass diff --git a/genQC/models/embedding/rotational_preset_embedder.py b/genQC/models/embedding/rotational_preset_embedder.py new file mode 100644 index 0000000..daeb6ed --- /dev/null +++ b/genQC/models/embedding/rotational_preset_embedder.py @@ -0,0 +1,700 @@ +"""Class for a rotational preset embedder.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/models/embedding/rotational_preset_embedder.ipynb. + +# %% auto 0 +__all__ = ['MultimodialEmbedder', 'MultimodialPresetEmbedderConfig', 'MultimodialPresetEmbedder', + 'RotationalMultimodialPresetEmbedder', 'RotationalMultimodialPresetEmbedderTiny'] + +# %% ../../../src/models/embedding/rotational_preset_embedder.ipynb 2 +from ...imports import * +from ...utils.math import gram_schmidt +from .base_embedder import BaseEmbedder + +# %% ../../../src/models/embedding/rotational_preset_embedder.ipynb 4 +class MultimodialEmbedder(BaseEmbedder): + + def __init__(self, zero_sum_space: bool) -> None: + super().__init__() + + self.zero_sum_space = zero_sum_space + + h_mean, h_std = torch.tensor(0.0), torch.tensor(1.0) + w_mean, w_std = torch.tensor(0.0), torch.tensor(1.0) + + self.register_buffer('h_mean', h_mean) + self.register_buffer('h_std', h_std) + + self.register_buffer('w_mean', w_mean) + self.register_buffer('w_std', w_std) + + def set_scaling(self, h: torch.Tensor, w: torch.Tensor) -> None: + self.h_mean, self.h_std = torch.tensor(0.0), torch.tensor(1.0) + self.w_mean, self.w_std = torch.tensor(0.0), torch.tensor(1.0) + + return #disbled; not needed for new emb initialization + + x = self.embed(h, w) + + if not self.channel_last: + x_h = x[:, :self.clr_dim] + x_w = x[:, self.clr_dim:] + else: + x_h = x[..., :self.clr_dim] + x_w = x[..., self.clr_dim:] + + self.h_mean, self.h_std = x_h.mean(), x_h.std() + self.w_mean, self.w_std = x_w.mean(), x_w.std() + + def scale_emb(self, x_emb: torch.Tensor) -> torch.Tensor: + # x_emb .. [b, ch, s, t] + + # mean + if not self.zero_sum_space: + if not self.channel_last: + x_emb[:, :self.clr_dim] -= self.h_mean + x_emb[:, self.clr_dim:] -= self.w_mean + else: + x_emb[..., :self.clr_dim] -= self.h_mean + x_emb[..., self.clr_dim:] -= self.w_mean + + # variance + if not self.channel_last: + x_emb[:, :self.clr_dim] /= self.h_std + x_emb[:, self.clr_dim:] /= self.w_std + else: + x_emb[..., :self.clr_dim] /= self.h_std + x_emb[..., self.clr_dim:] /= self.w_std + + return x_emb + + def invert_scale_emb(self, x_emb: torch.Tensor) -> torch.Tensor: + # x_emb .. [b, ch, s, t] + + # variance + if not self.channel_last: + x_emb[:, :self.clr_dim] *= self.h_std + x_emb[:, self.clr_dim:] *= self.w_std + else: + x_emb[..., :self.clr_dim] *= self.h_std + x_emb[..., self.clr_dim:] *= self.w_std + + # mean + if not self.zero_sum_space: + if not self.channel_last: + x_emb[:, :self.clr_dim] += self.h_mean + x_emb[:, self.clr_dim:] += self.w_mean + else: + x_emb[..., :self.clr_dim] += self.h_mean + x_emb[..., self.clr_dim:] += self.w_mean + + return x_emb + +# %% ../../../src/models/embedding/rotational_preset_embedder.ipynb 6 +@dataclass +class MultimodialPresetEmbedderConfig: + clr_dim: int + num_clrs: int + params_dim: int + num_params_per_clr: int + zero_sum_space: bool + explicit_node_type_embeddings: bool + channel_last: bool + parametrized_tokens: Optional[list[int]] = None + unique_class_values: Optional[list[int]] = None + +# %% ../../../src/models/embedding/rotational_preset_embedder.ipynb 7 +class MultimodialPresetEmbedder(MultimodialEmbedder): + """ + Embedder class for multimodial discrete and continuous data, e.g. parametrized gates/actions. + Embeddings are fixed and not trained. + """ + + def __init__(self, + clr_dim: int, + num_clrs: int, + params_dim: int, + num_params_per_clr: int, + zero_sum_space: bool, + explicit_node_type_embeddings: bool = True, + channel_last: bool = True, + parametrized_tokens: Optional[list[int]] = None, + unique_class_values: Optional[list[int]] = None) -> None: + """ + Note `explicit_node_type_embeddings` means we convert the `+-k` to all postive, but there are often unsused connection types. For instance, `1=H` the minus node is never used. + + To improve this and reduce the `clr_dim`, we can provide `unique_values` which are the only tokens that actually appear. + """ + super().__init__(zero_sum_space=zero_sum_space) + + + if exists(unique_class_values): + assert isinstance(unique_class_values, list) + self.unique_class_values_tensor = torch.tensor(unique_class_values) + + explicit_node_type_embeddings = False + + print(f"[INFO]: provided `unique_class_values` ({unique_class_values}), enforcing `num_clrs=len(unique_class_values)={len(unique_class_values)}`.") + num_clrs = len(unique_class_values) + + self.explicit_node_type_embeddings = explicit_node_type_embeddings + self.channel_last = channel_last + self.parametrized_tokens = parametrized_tokens + self.unique_class_values = unique_class_values + + if (num_params_per_clr*num_clrs) > params_dim and num_params_per_clr > 0: + print(f"[WARNING]: For `num_params_per_clr` larger 0, we need at least a `params_dim` (is {params_dim}) of" + f" `num_params_per_clr*num_clrs` (is {num_params_per_clr*num_clrs})," + f" automatically setting `params_dim` to {num_params_per_clr*num_clrs} to inforce this!") + + params_dim = num_params_per_clr*num_clrs + + if self.zero_sum_space and ((num_params_per_clr*num_clrs) + 1) > params_dim and num_params_per_clr > 0: + print(f"[WARNING]: `params_dim` is set to the minimum `num_params_per_clr*num_clrs`={num_params_per_clr*num_clrs}," + f" but for `{zero_sum_space=}` we need one more dimension, automatically setting it to" + f" `num_params_per_clr*num_clrs+1` {num_params_per_clr*num_clrs+1}.") + + params_dim = num_params_per_clr*num_clrs + 1 + + if self.zero_sum_space: + if self.explicit_node_type_embeddings and ((num_clrs*2 - 2) + 1) > clr_dim: + print(f"[WARNING]: `clr_dim` is set to {clr_dim} and `{explicit_node_type_embeddings=}`," + f" but for `{zero_sum_space=}` we need one more dimension than the number of tokens `(num_clrs*2 - 2)` (is {(num_clrs*2 - 2)})," + f" automatically setting it to `clr_dim=(num_clrs*2 - 2) + 1` {(num_clrs*2 - 2) + 1}.") + + # has empty and padd tokens, these only have the plus branch (so -2)! + clr_dim = (num_clrs*2 - 2) + 1 + + elif (num_clrs + 1) > clr_dim: + print(f"[WARNING]: `clr_dim` is set to {clr_dim} and `{explicit_node_type_embeddings=}`," + f" but for `{zero_sum_space=}` we need one more dimension than the number of tokens `num_clrs` (is {num_clrs})," + f" automatically setting it to `clr_dim=num_clrs+1` {num_clrs+1}.") + + clr_dim = num_clrs + 1 + + self.clr_dim = clr_dim + self.num_clrs = num_clrs + self.params_dim = params_dim + self.num_params_per_clr = num_params_per_clr + + self._num_discrete_embeddings = self.num_clrs + self._num_param_embeddings = self.num_params_per_clr * self.num_clrs + self.embedding_dim = self.clr_dim + self.params_dim + + if self.explicit_node_type_embeddings: + # use distinct embeddings for +-k and not just +-v + # has empty and padd tokens, these only have the plus branch (so -2)! + self._num_discrete_embeddings = self.num_clrs*2 - 2 + + self.num_embeddings = self._num_discrete_embeddings + self._num_param_embeddings + self.emb_clr = nn.Embedding(num_embeddings=self.num_embeddings, embedding_dim=self.embedding_dim) + print(f"[INFO]: Created `nn.Embedding` with a total of {self.num_embeddings} vectors in a {self.embedding_dim} dimensional space.") + + self.params_config = MultimodialPresetEmbedderConfig(clr_dim=self.clr_dim, + num_clrs=self.num_clrs, + params_dim=self.params_dim, + num_params_per_clr=self.num_params_per_clr, + zero_sum_space=self.zero_sum_space, + explicit_node_type_embeddings=self.explicit_node_type_embeddings, + channel_last=self.channel_last, + parametrized_tokens=self.parametrized_tokens) + + self._init_weights(zero_sum_space=self.zero_sum_space) + + def _init_weights(self, zero_sum_space) -> None: + self.emb_clr.weight.requires_grad = False + + _dtype = self.emb_clr.weight.dtype + self.emb_clr = self.emb_clr.to(torch.float64) + + # keep spaces ortho with clr + self.emb_clr.weight.data.zero_() + nn.init.orthogonal_(self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim]) + nn.init.orthogonal_(self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:]) + + if zero_sum_space: + assert self._num_discrete_embeddings < self.clr_dim, f"{self._num_discrete_embeddings} < {self.clr_dim}" + if self._num_param_embeddings > 0: + assert self._num_param_embeddings < self.params_dim, f"{self._num_param_embeddings} < {self.params_dim}" + + # Convert to zero-sum space + self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim] -= torch.mean(self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim], dim=-1, keepdim=True) + if self._num_param_embeddings > 0: + self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:] -= torch.mean(self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:], dim=-1, keepdim=True) + + # Orthonormalization that conserves zero-sum space + self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim] = gram_schmidt(self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim]) + if self._num_param_embeddings > 0: + self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:] = gram_schmidt(self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:]) + + self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim] /= torch.std(self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim], dim=-1, keepdim=True, correction=0) + if self._num_param_embeddings > 0: + self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:] /= torch.std(self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:], dim=-1, keepdim=True, correction=0) + + self.emb_clr = self.emb_clr.to(_dtype) + + def print_emb_matrix(self) -> None: + print(self.emb_clr.weight.data) + + #----------------------------------------------- + + def tokens_to_unique_class_values(self, x: torch.Tensor) -> torch.Tensor: + if exists(self.unique_class_values): + self.unique_class_values_tensor = self.unique_class_values_tensor.to(x.device) + return torch.searchsorted(self.unique_class_values_tensor, x) + return x + + def unique_class_values_to_tokens(self, x: torch.Tensor) -> torch.Tensor: + if exists(self.unique_class_values): + self.unique_class_values_tensor = self.unique_class_values_tensor.to(x.device) + return self.unique_class_values_tensor[x] + return x + + #----------------------------------------------- + + def embed_discrete(self, h: torch.Tensor) -> torch.Tensor: + + if self.unique_class_values: + # tokens are already correct + tokens = h + x_emb = self.emb_clr(tokens) + + elif self.explicit_node_type_embeddings: + # e.g. num_clrs=4: [-2, -1, zero, 1, 2, padd] to all positive [0, 1, 2 (zero), 3, 4, 5 (padd)] + tokens = h + x_emb = self.emb_clr(tokens + (self.num_clrs-2)) + + else: + sign = torch.sign(h + 0.1) #trick: add 0.1 so that the sign of 0 is +1, else the 0 token would be all 0s. + tokens = torch.abs(h) + + x_emb = self.emb_clr(tokens) + x_emb = x_emb * sign.unsqueeze(-1) # [b, s, t, ch] + + return x_emb, tokens + + + def embed(self, h: torch.Tensor, w: torch.Tensor) -> torch.Tensor: + """ + sample from p(x0|h, w) + h discrete + w cont + """ + + x_emb, tokens = self.embed_discrete(h) + + v_p = self.embed_continuous(w, tokens) + x_emb += v_p + + if not self.channel_last: + # contiguous important for multi-node cluster + x_emb = torch.permute(x_emb, (0, 3, 1, 2)).contiguous() # to [b, ch, s, t] + + return x_emb + + #----------------------------------------------- + + def get_discrete_sim(self, x: torch.Tensor) -> torch.Tensor: + #collaps clr to gate ... use cos sim + + clrs = self.emb_clr.weight.detach()[:self._num_discrete_embeddings] # is [clr_num, clr_dim] + + model_device = clrs.device + x = x.to(model_device) + + # to shape [b*space*time, clr_dim] + x_flat = x.reshape(-1, x.shape[-1]) + + #normalize for cos sim + norm_clr = F.normalize( clrs[:, :self.clr_dim], dim=1) #clrs / torch.linalg.vector_norm( clrs, dim=1, keepdim=True) #torch.linalg.vector_norm( clrs[:, :self.clr_dim], dim=1, keepdim=True) + norm_x_flat = F.normalize(x_flat[:, :self.clr_dim], dim=1) #x_flat / torch.linalg.vector_norm(x_flat, dim=1, keepdim=True) #torch.linalg.vector_norm(x_flat[:, :self.clr_dim], dim=1, keepdim=True) + + #matmul out is [clr_num, b*space*time] = [clr_num, clr_dim] x [b*space*time, clr_dim].T + sim = torch.matmul(norm_clr, norm_x_flat.T) + + return sim + + @torch.inference_mode() + def invert_discrete(self, x: torch.Tensor, return_sim: bool = False, finite_temperature: bool = False) -> torch.Tensor: + #collaps clr to gate ... use cos sim + + input_device = x.device + + if not self.channel_last: + x = x.permute(0, 2, 3, 1) + + #sim out is [clr_num, b*space*time] + sim = self.get_discrete_sim(x) + + if self.explicit_node_type_embeddings or self.unique_class_values: + #get highest similarity + if finite_temperature: + _cat = torch.distributions.categorical.Categorical(logits=sim.transpose(-1, -2)) + scores_flat = _cat.sample() + else: + scores_flat = torch.argmax(sim, dim=0) #reduce the clr_num dim + + if self.explicit_node_type_embeddings: + scores_flat = scores_flat - (self.num_clrs-2) + + else: + #get highest abs(similarity) and sign of it + abs_sim = sim.abs() + + if finite_temperature: + _cat = torch.distributions.categorical.Categorical(logits=abs_sim.transpose(-1, -2)) + max_idx = _cat.sample() + else: + max_idx = torch.argmax(abs_sim, dim=0) #reduce the clr_num dim + + sign = torch.sign(sim[max_idx, torch.arange(x_flat.shape[0])]) + scores_flat = max_idx * sign + + # back to [b, space, time] + scores = scores_flat.reshape(x.shape[0], x.shape[1], x.shape[2]).to(torch.int64) + scores = scores.to(input_device) + + if return_sim: + return scores, sim + return scores + + @torch.inference_mode() + def invert(self, x: torch.Tensor, reduce_spatial: bool = True) -> torch.Tensor: + """sample from p(h, w|x0)""" + + pred_tokens = self.invert_discrete(x) + pred_params = self.invert_continuous(x, pred_tokens, reduce_spatial=reduce_spatial) + + pred_tokens = self.unique_class_values_to_tokens(pred_tokens) + + return pred_tokens, pred_params + + #----------------------------------------------- + + def _prepare_params(self, tokens: torch.Tensor, w: torch.Tensor) -> torch.Tensor: + tokens = tokens.abs() + + # w ... [b, nP, s or 1, t] + + if self.parametrized_tokens: + # Force all non parameterized embeddings to all zero or random lambdas ! + pmask = self.get_parametrized_mask(tokens).unsqueeze(1) # [b, 1, s, t] + rnd_w = torch.zeros((w.shape[0], w.shape[1], pmask.shape[2], w.shape[3]), device=w.device) + w_m = torch.where(pmask, w, rnd_w) + + else: + # this does not include padding tokens! + pmask = (tokens > 0).unsqueeze(1) + w_m = torch.where(pmask, w, 0.0) # ... [b, nP, s, t] + + return w_m + + def _reduce_params_spatial(self, tokens: torch.Tensor, params: torch.Tensor) -> torch.Tensor: + tokens = tokens.abs() + + if self.parametrized_tokens: + #check if not param gate + mask = self.get_parametrized_mask(tokens).unsqueeze(1).float() # ... [b, 1, s, t] + else: + #check if not empty token + mask = (tokens > 0).unsqueeze(1).float() # ... [b, 1, s, t] + + # to catch all zero tokens at t, compute how many we have per timestep + red_mask = mask.sum(-2) # ... [b, 1, t] + red_mask = torch.where(red_mask > 0.0, red_mask, 1.0) + + params = (params*mask).sum(-2) / red_mask # ... [b, nP, s, t] to [b, nP, t] average over s, ignore masked positions + return params + + def get_parametrized_mask(self, tokens: torch.Tensor) -> torch.Tensor: + + parametrized_tokens = torch.tensor(self.parametrized_tokens, device=tokens.device) + + if exists(self.unique_class_values): + parametrized_tokens = self.tokens_to_unique_class_values(parametrized_tokens) + + pmask = torch.isin(tokens.abs(), parametrized_tokens) + + return pmask + +# %% ../../../src/models/embedding/rotational_preset_embedder.ipynb 9 +class RotationalMultimodialPresetEmbedder(MultimodialPresetEmbedder): + + def __init__(self, + clr_dim: int, + num_clrs: int, + params_dim: int, + num_params_per_clr: int, + zero_sum_space: bool, + explicit_node_type_embeddings: bool = True, + channel_last: bool = True, + parametrized_tokens: Optional[list[int]] = None, + unique_class_values: Optional[list[int]] = None + ) -> None: + + self.channel_last = channel_last + self.parametrized_tokens = parametrized_tokens + + if (2*num_params_per_clr*num_clrs) > params_dim and num_params_per_clr > 0: + print(f"[WARNING]: We need at least a `params_dim` (is {params_dim}) of `2*num_params_per_clr*num_clrs` (is {2*num_params_per_clr*num_clrs})," + f" automatically setting `params_dim` to {2*num_params_per_clr*num_clrs} to inforce this!") + + params_dim = 2*num_params_per_clr*num_clrs + + if zero_sum_space and (2*num_params_per_clr*num_clrs+1) > params_dim and num_params_per_clr > 0: + print(f"[WARNING]: `params_dim` is set to the minimum `2*num_params_per_clr*num_clrs`={2*num_params_per_clr*num_clrs}," + f" but for `{zero_sum_space=}` we need one more dimension, automatically setting it to" + f" `2*num_params_per_clr*num_clrs+1` {2*num_params_per_clr*num_clrs+1}.") + + params_dim = 2*num_params_per_clr*num_clrs + 1 + + super().__init__(clr_dim=clr_dim, + num_clrs=num_clrs, + params_dim=params_dim, + num_params_per_clr=2*num_params_per_clr, # pass factor 2 to create more embeddings for cos-sin encoding + zero_sum_space=zero_sum_space, + explicit_node_type_embeddings=explicit_node_type_embeddings, + channel_last=channel_last, + parametrized_tokens=parametrized_tokens, + unique_class_values=unique_class_values) + + self.num_params_per_clr = num_params_per_clr # remove the factor 2 + self._num_param_embeddings = self.num_params_per_clr * self.num_clrs + self.nP = num_params_per_clr + + self.params_config = MultimodialPresetEmbedderConfig(clr_dim=self.clr_dim, + num_clrs=self.num_clrs, + params_dim=self.params_dim, + num_params_per_clr=self.num_params_per_clr, + zero_sum_space=self.zero_sum_space, + explicit_node_type_embeddings=self.explicit_node_type_embeddings, + channel_last=self.channel_last, + parametrized_tokens=self.parametrized_tokens, + unique_class_values=self.unique_class_values) + + + def embed_continuous(self, w: torch.Tensor, tokens: torch.Tensor) -> torch.Tensor: + # take care that v_empty stays that! not apply params to all bits only to a [s,t] pos + # params ... [b, nP, t] + # w ... qc=[b, nP, t] mbqc=[b, nP, s, t] + + tokens = tokens.abs() + + if w.dim() == 3: + w = w.unsqueeze(2) # to [b, nP, 1, t] + + + w_m = self._prepare_params(tokens, w) + + w_m = w_m.unsqueeze(-1) # ... [b, nP, s, t, 1] + w_m = w_m * torch.pi # [-1, 1] to [-pi, pi] + + # first pick starting points of indices + # then add a numerator for all the number of paramters + # then add a numerator for cos-sin vectors + + #Note: .view(-1, 1, 1) introduces some numeric variances in 1e-07 range, but should be faster! + indices = self._num_discrete_embeddings + tokens * self.nP * 2 # ... [b, s, t] + indices = indices.unsqueeze(1) + torch.arange(self.nP, device=indices.device).view(-1, 1, 1) * 2 # ... [b, nP, s, t] + indices = indices.unsqueeze(1) + torch.arange(2, device=indices.device).view(-1, 1, 1, 1) # ... [b, 2, nP, s, t] + p_clrs = self.emb_clr(indices).contiguous() # ... [b, 2, nP, s, t, ch] + + v_p = torch.cos(w_m)*p_clrs[:, 0] + torch.sin(w_m)*p_clrs[:, 1] # ... [b, nP, s, t, ch] + v_p = torch.sum(v_p, dim=1) # ... [b, s, t, ch] + + return v_p + + @torch.inference_mode() + def invert_continuous(self, x: torch.Tensor, tokens: torch.Tensor, reduce_spatial: bool = True) -> torch.Tensor: + """reduce_spatial=True for circuits, False for mbqc""" + + model_device = self.emb_clr.weight.device + input_device = x.device + + if not self.channel_last: + x = x.permute(0, 2, 3, 1) # to [b, s, t, ch] + x = x.unsqueeze(1).unsqueeze(1) # to [b, 1, 1, s, t, ch] + + x = x.to(model_device) + tokens = tokens.to(model_device).abs() + + #----- + # params should [b, nP, max_gates] + # x ... [b, ch, s, t] + # tokens ... [b, , s, t] + + #Note: .view(-1, 1, 1) introduces some numeric variances in 1e-07 range, but should be faster! + indices = self._num_discrete_embeddings + tokens * self.nP * 2 # ... [b, s, t] + indices = indices.unsqueeze(1) + torch.arange(self.nP, device=indices.device).view(-1, 1, 1) * 2 # ... [b, nP, s, t] + indices = indices.unsqueeze(1) + torch.arange(2, device=indices.device).view(-1, 1, 1, 1) # ... [b, 2, nP, s, t] + p_clrs = self.emb_clr(indices).contiguous() # ... [b, 2, nP, s, t, ch] + + overlaps = (x * p_clrs).sum(-1) # ... [b, 2, nP, s, t] + params = torch.arctan2(overlaps[:, 1], overlaps[:, 0]) # ... [b, nP, s, t] + params = params / torch.pi # [-pi, pi] to [-1, 1] + + # now reduce spatial s, average over non empty token s + if reduce_spatial: + params = self._reduce_params_spatial(tokens, params) + + return params.to(input_device) + +# %% ../../../src/models/embedding/rotational_preset_embedder.ipynb 12 +class RotationalMultimodialPresetEmbedderTiny(MultimodialPresetEmbedder): + """Mostly the same as `RotationalMultimodialPresetEmbedder`, but the param embedding is not depending on the tokens.""" + + def __init__(self, + clr_dim: int, + num_clrs: int, + params_dim: int, + num_params_per_clr: int, + zero_sum_space: bool, + explicit_node_type_embeddings: bool = True, + channel_last: bool = True, + parametrized_tokens: Optional[list[int]] = None, + unique_class_values: Optional[list[int]] = None + ) -> None: + super(MultimodialPresetEmbedder, self).__init__(zero_sum_space=zero_sum_space) # call grandparent class + + if exists(unique_class_values): + assert isinstance(unique_class_values, list) + self.unique_class_values_tensor = torch.tensor(unique_class_values) + + explicit_node_type_embeddings = False + + print(f"[INFO]: provided `unique_class_values` ({unique_class_values}), enforcing `num_clrs=len(unique_class_values)={len(unique_class_values)}`.") + num_clrs = len(unique_class_values) + + self.zero_sum_space = zero_sum_space + self.explicit_node_type_embeddings = explicit_node_type_embeddings + self.channel_last = channel_last + self.parametrized_tokens = parametrized_tokens + self.unique_class_values = unique_class_values + # assert exists(parametrized_tokens) + + if (2*num_params_per_clr) > params_dim and num_params_per_clr > 0: + print(f"[WARNING]: We need at least a `params_dim` (is {params_dim}) of `2*num_params_per_clr` (is {2*num_params_per_clr})," + f" automatically setting `params_dim` to {2*num_params_per_clr} to inforce this!") + + params_dim = 2*num_params_per_clr + + if self.zero_sum_space and (2*num_params_per_clr+1) > params_dim and num_params_per_clr > 0: + print(f"[WARNING]: `params_dim` is set to the minimum `2*num_params_per_clr`={2*num_params_per_clr}," + f" but for `{zero_sum_space=}` we need one more dimension, automatically setting it to" + f" `2*num_params_per_clr+1` {2*num_params_per_clr+1}.") + + params_dim = 2*num_params_per_clr + 1 + + if self.zero_sum_space: + if self.explicit_node_type_embeddings and ((num_clrs*2 - 2) + 1) > clr_dim: + print(f"[WARNING]: `clr_dim` is set to {clr_dim} and `{explicit_node_type_embeddings=}`," + f" but for `{zero_sum_space=}` we need one more dimension than the number of tokens `(num_clrs*2 - 2)` (is {(num_clrs*2 - 2)})," + f" automatically setting it to `clr_dim=(num_clrs*2 - 2) + 1` {(num_clrs*2 - 2) + 1}.") + + # has empty and padd tokens, these only have the plus branch (so -2)! + clr_dim = (num_clrs*2 - 2) + 1 + + elif (num_clrs + 1) > clr_dim: + print(f"[WARNING]: `clr_dim` is set to {clr_dim} and `{explicit_node_type_embeddings=}`," + f" but for `{zero_sum_space=}` we need one more dimension than the number of tokens `num_clrs` (is {num_clrs})," + f" automatically setting it to `clr_dim=num_clrs+1` {num_clrs+1}.") + + clr_dim = num_clrs + 1 + + self.clr_dim = clr_dim + self.num_clrs = num_clrs + self.params_dim = params_dim + self.num_params_per_clr = num_params_per_clr + self.nP = num_params_per_clr + + self._num_discrete_embeddings = self.num_clrs + self._num_param_embeddings = self.num_params_per_clr * 2 + self.embedding_dim = self.clr_dim + self.params_dim + + if self.explicit_node_type_embeddings: + # use distinct embeddings for +-k and not just +-v + # has empty and padd tokens, these only have the plus branch (so -2)! + self._num_discrete_embeddings = self.num_clrs*2 - 2 + + self.num_embeddings = self._num_discrete_embeddings + self._num_param_embeddings + self.emb_clr = nn.Embedding(num_embeddings=self.num_embeddings, embedding_dim=self.embedding_dim) + print(f"[INFO]: Created `nn.Embedding` with a total of {self.num_embeddings} vectors in a {self.embedding_dim} dimensional space.") + + self.params_config = MultimodialPresetEmbedderConfig(clr_dim=self.clr_dim, + num_clrs=self.num_clrs, + params_dim=self.params_dim, + num_params_per_clr=self.num_params_per_clr, + zero_sum_space=self.zero_sum_space, + explicit_node_type_embeddings=self.explicit_node_type_embeddings, + channel_last=self.channel_last, + parametrized_tokens=self.parametrized_tokens, + unique_class_values=self.unique_class_values) + + self._init_weights(zero_sum_space=self.zero_sum_space) + + def embed_continuous(self, w: torch.Tensor, tokens: torch.Tensor) -> torch.Tensor: + # take care that v_empty stays that! not apply params to all bits only to a [s,t] pos + # params ... [b, nP, t] + # w ... qc=[b, nP, t] mbqc=[b, nP, s, t] + + tokens = tokens.abs() + + if w.dim() == 3: + w = w.unsqueeze(2) # to [b, nP, 1, t] + + w_m = self._prepare_params(tokens, w) + + w_m = w_m.unsqueeze(-1) # ... [b, nP, s, t, 1] + w_m = w_m * torch.pi # [-1, 1] to [-pi, pi] + + # first pick starting points of indices + # then add a numerator for all the number of paramters + # then add a numerator for cos-sin vectors + + #Note: .view(-1, 1, 1) introduces some numeric variances in 1e-07 range, but should be faster! + indices = torch.full_like(tokens, self._num_discrete_embeddings) #+ 0 * tokens * self.nP * 2 # ... [b, s, t] + indices = indices.unsqueeze(1) + torch.arange(self.nP, device=indices.device).view(-1, 1, 1) * 2 # ... [b, nP, s, t] + indices = indices.unsqueeze(1) + torch.arange(2, device=indices.device).view(-1, 1, 1, 1) # ... [b, 2, nP, s, t] + p_clrs = self.emb_clr(indices).contiguous() # ... [b, 2, nP, s, t, ch] + + # This cos-sin combination conserves mean and variance of the embeddings + v_p = torch.cos(w_m)*p_clrs[:, 0] + torch.sin(w_m)*p_clrs[:, 1] # ... [b, nP, s, t, ch] + v_p = torch.sum(v_p, dim=1) # ... [b, s, t, ch] + + return v_p + + @torch.inference_mode() + def invert_continuous(self, x: torch.Tensor, tokens: torch.Tensor, reduce_spatial: bool = True) -> torch.Tensor: + """reduce_spatial=True for circuits, False for mbqc""" + + model_device = self.emb_clr.weight.device + input_device = x.device + + if not self.channel_last: + x = x.permute(0, 2, 3, 1) # to [b, s, t, ch] + x = x.unsqueeze(1).unsqueeze(1) # to [b, 1, 1, s, t, ch] + + x = x.to(model_device) + tokens = tokens.to(model_device).abs() + + #----- + # params should [b, nP, max_gates] + # x ... [b, ch, s, t] + # tokens ... [b, , s, t] + + #Note: .view(-1, 1, 1) introduces some numeric variances in 1e-07 range, but should be faster! + indices = torch.full_like(tokens, self._num_discrete_embeddings) #+ 0 * tokens * self.nP * 2 # ... [b, s, t] + indices = indices.unsqueeze(1) + torch.arange(self.nP, device=indices.device).view(-1, 1, 1) * 2 # ... [b, nP, s, t] + indices = indices.unsqueeze(1) + torch.arange(2, device=indices.device).view(-1, 1, 1, 1) # ... [b, 2, nP, s, t] + p_clrs = self.emb_clr(indices).contiguous() # ... [b, 2, nP, s, t, ch] + + # Note we dont need to normalize x as this norm cancels in the fraction of arctan2(y/x) + overlaps = (x * p_clrs).sum(-1) # ... [b, 2, nP, s, t] + params = torch.arctan2(overlaps[:, 1], overlaps[:, 0]) # ... [b, nP, s, t] + params = params / torch.pi # [-pi, pi] to [-1, 1] + + # now reduce spatial s, average over non empty token s + if reduce_spatial: + params = self._reduce_params_spatial(tokens, params) + + return params.to(input_device) diff --git a/genQC/models/frozen_open_clip.py b/genQC/models/frozen_open_clip.py index 609d75c..81cc6e4 100644 --- a/genQC/models/frozen_open_clip.py +++ b/genQC/models/frozen_open_clip.py @@ -1,25 +1,30 @@ +"""Interface to the [OpenCLIP](https://github.com/mlfoundations/open_clip) library.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/models/frozen_open_clip.ipynb. # %% auto 0 -__all__ = ['FrozenOpenCLIPEmbedder_config', 'FrozenOpenCLIPEmbedder', 'CachedFrozenOpenCLIPEmbedder'] +__all__ = ['FrozenOpenCLIPEmbedderConfig', 'FrozenOpenCLIPEmbedder', 'CachedFrozenOpenCLIPEmbedderConfig', + 'CachedFrozenOpenCLIPEmbedder'] # %% ../../src/models/frozen_open_clip.ipynb 2 from ..imports import * -from .config_model import Config_Model +from .config_model import ConfigModel +from ..utils.async_fn import run_parallel_jobs +from ..utils.misc_utils import infer_torch_device import open_clip -# %% ../../src/models/frozen_open_clip.ipynb 4 +# %% ../../src/models/frozen_open_clip.ipynb 5 @dataclass -class FrozenOpenCLIPEmbedder_config: +class FrozenOpenCLIPEmbedderConfig: arch: str version: str - device: str + #device: str max_length: int freeze: bool layer: str -# %% ../../src/models/frozen_open_clip.ipynb 5 -class FrozenOpenCLIPEmbedder(Config_Model): +# %% ../../src/models/frozen_open_clip.ipynb 6 +class FrozenOpenCLIPEmbedder(ConfigModel): """Loads and freezes the [OpenCLIP](https://github.com/mlfoundations/open_clip) transformer encoder for text prompts.""" LAYERS = [ @@ -28,18 +33,23 @@ class FrozenOpenCLIPEmbedder(Config_Model): "penultimate" ] - def __init__(self, arch="ViT-H-14", version="laion2b_s32b_b79k", device="cpu", max_length=77, freeze=True, layer="penultimate"): - super().__init__() + njobs = 1 + + def __init__(self, arch="ViT-B-32", version="datacomp_xl_s13b_b90k", max_length=77, freeze=True, layer="penultimate", **kwargs): + super().__init__() + assert layer in self.LAYERS - self.params_config = FrozenOpenCLIPEmbedder_config(arch, version, device, max_length, freeze, layer) + self.params_config = FrozenOpenCLIPEmbedderConfig(arch, version, max_length, freeze, layer) - model, _, _ = open_clip.create_model_and_transforms(arch, device=torch.device(device), pretrained=version) - del model.visual + model, _, _ = open_clip.create_model_and_transforms(arch, device="cpu", pretrained=version) + self.device = "cpu" + del model.visual self.model = model - self.to(device) - + # self.to(device) + self.tokenizer = open_clip.get_tokenizer(arch) + assert torch.numel(self.tokenizer("test")) assert max_length <= 77 # max set by the clip self.max_length = max_length @@ -54,33 +64,37 @@ def __init__(self, arch="ViT-H-14", version="laion2b_s32b_b79k", device="cpu", m #create empty token, can also be, e.g., A nice picture self.empty_token = self.tokenize_and_push_to_device("") - def freeze(self): - self.model = self.model.eval() - - for param in self.parameters(): - param.requires_grad = False - + def freeze(self, freeze: bool = True): + super().freeze(freeze=freeze) + for param in self.model.parameters(): - param.requires_grad = False + param.requires_grad = not freeze def to(self, device): self.model = self.model.to(device) self.device = device return self - @torch.no_grad() + @torch.inference_mode() def tokenize_and_push_to_device(self, text, to_device=True): - # tokens = open_clip.tokenize(text) - tokens = self.tokenizer(text) + if self.njobs > 1: + + tokens_list = run_parallel_jobs(self.tokenizer, np.array_split(text, self.njobs), self.njobs) + tokens = torch.cat(tokens_list, dim=0) + + else: + # tokens = open_clip.tokenize(text) + tokens = self.tokenizer(text) + if to_device: tokens = tokens.to(self.device) return tokens - @torch.no_grad() + @torch.inference_mode() def forward(self, c, **kwargs): return self.encode_with_transformer(c) - @torch.no_grad() + @torch.inference_mode() def encode_with_transformer(self, text): cast_dtype = self.model.transformer.get_cast_dtype() @@ -99,13 +113,17 @@ def encode_with_transformer(self, text): return x - @torch.no_grad() + @torch.inference_mode() def text_transformer_forward(self, x: torch.Tensor, attn_mask=None): for i, r in enumerate(self.model.transformer.resblocks): if i == len(self.model.transformer.resblocks) - self.layer_idx: break - - x = r(x, attn_mask=attn_mask) + #if self.model.transformer.grad_checkpointing and not torch.jit.is_scripting(): + #x = checkpoint(r, x, attn_mask) + #else: + + x = r(x, attn_mask=attn_mask) + return x #-------------------------------------------------------------- @@ -119,14 +137,30 @@ def store_model(self, config_path: str, save_path: str=None, without_metadata=Fa @staticmethod def from_config(config, device: torch.device, save_path: str=None): config["save_path"] = None - return Config_Model.from_config(config, device, save_path=None) + return ConfigModel.from_config(config, device, save_path=None) # %% ../../src/models/frozen_open_clip.ipynb 17 +@dataclass +class CachedFrozenOpenCLIPEmbedderConfig(FrozenOpenCLIPEmbedderConfig): + enable_cache_token_limit: bool + +# %% ../../src/models/frozen_open_clip.ipynb 18 class CachedFrozenOpenCLIPEmbedder(FrozenOpenCLIPEmbedder): """Adds caching support to `FrozenOpenCLIPEmbedder`.""" + + def __init__(self, arch="ViT-B-32", version="datacomp_xl_s13b_b90k", max_length=77, freeze=True, layer="penultimate", enable_cache_token_limit: bool = True, **kwargs): + super().__init__(arch=arch, version=version, max_length=max_length, freeze=freeze, layer=layer, **kwargs) + self.enable_cache_token_limit = enable_cache_token_limit + + self.params_config = CachedFrozenOpenCLIPEmbedderConfig(arch, version, max_length, freeze, layer, enable_cache_token_limit) + + def get_token_count(self, tokens, padding_token=0): + # tokens .. [b, seq] + collabsed_tokens = (tokens != padding_token).to(torch.int32) + return torch.count_nonzero(collabsed_tokens, dim=-1) # [b] - @torch.no_grad() - def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_index=0, b_size=2048, y_on_cpu=False): + @torch.inference_mode() + def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_index=None, b_size=2048, y_on_cpu=False): self.cached_empty_token_index = cached_empty_token_index if exists(str_list): self.cached_tokens = self.tokenize_and_push_to_device(str_list) elif exists(tokens): self.cached_tokens = tokens @@ -136,6 +170,14 @@ def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_in # cached_tokens [n, 77] ... int # cached_embeddings [n, 77, 512] ... float + if self.enable_cache_token_limit: + self.max_length = self.get_token_count(self.cached_tokens).max().item() + self.params_config.max_length = self.max_length + self.params_config.enable_cache_token_limit = self.enable_cache_token_limit + print(f"[INFO]: - `generate_cache` infered a TOKEN limit of {self.max_length}") + + #self.cached_tokens = self.cached_tokens[:, :self.max_length] + n = self.cached_tokens.shape[0] n_chunks = int(np.ceil(n / b_size)) @@ -145,7 +187,7 @@ def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_in last_ind = 0 for i, cached_tokens in tqdm(enumerate(self.cached_tokens.chunk(n_chunks)), total=n_chunks): - x = super().forward(cached_tokens.to(self.device)) + x = super().forward(cached_tokens.to(self.device)) # ... [b, seq, ch] if i == 0: mem = n * x.shape[1] * x.shape[2] * x.element_size() * 1e-9 @@ -155,11 +197,14 @@ def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_in self.cached_embeddings[last_ind:last_ind+x.shape[0]] = x.to(self.cached_embeddings.device) last_ind += x.shape[0] - + + if self.enable_cache_token_limit: + self.cached_embeddings = self.cached_embeddings[:, :self.max_length] + if not y_on_cpu: - self.cached_embeddings = self.cached_embeddings.to(in_device) + self.cached_embeddings = self.cached_embeddings.to(in_device) - @torch.no_grad() + @torch.inference_mode() def look_up_cos_sim_cached_index(self, str_list: list=None, tokens=None): if exists(str_list): tokens = self.tokenize_and_push_to_device(str_list) else: raise RuntimeError("please provide str_list or tokens") @@ -180,10 +225,15 @@ def look_up_cos_sim_cached_index(self, str_list: list=None, tokens=None): return max_idx - @torch.no_grad() + # @torch.inference_mode() def forward(self, c, **kwargs): in_device = c.device - if c.dim() == 1: return self.cached_embeddings[c.to(self.cached_embeddings.device)].to(in_device) #list of ints - elif c.dim() == 2: return super().forward(c, **kwargs) #tokenized input + if c.dim() == 1: c_emb = self.cached_embeddings[c.to(self.cached_embeddings.device)].to(in_device) #list of ints + elif c.dim() == 2: c_emb = super().forward(c.to(self.device)) #tokenized input else: raise NotImplementedError("") + + if self.enable_cache_token_limit: + c_emb = c_emb[:, :self.max_length] + + return c_emb diff --git a/genQC/models/layers.py b/genQC/models/layers.py index 2ececf6..5746af7 100644 --- a/genQC/models/layers.py +++ b/genQC/models/layers.py @@ -1,14 +1,16 @@ +"""Common model layers.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/models/layers.ipynb. # %% auto 0 -__all__ = ['DownBlock2D', 'UpBlock2D', 'ResDownBlock2D', 'ResUpBlock2D', 'ResBlock2D', 'ResBlock2D_Conditional', 'FeedForward', +__all__ = ['DownBlock2D', 'UpBlock2D', 'ResDownBlock2D', 'ResUpBlock2D', 'ResBlock2D', 'ResBlock2DConditional', 'FeedForward', 'PositionalEncoding', 'TimeEmbedding', 'PositionalEncodingTransposed', 'PositionalEncoding2D', 'PositionalEncoding2DSpaceOnly'] -# %% ../../src/models/layers.ipynb 3 +# %% ../../src/models/layers.ipynb 2 from ..imports import * -# %% ../../src/models/layers.ipynb 5 +# %% ../../src/models/layers.ipynb 4 class DownBlock2D(nn.Module): """A 2d down scale block.""" def __init__(self, in_ch, out_ch, kernel_size=2, stride=2, padding=0, use_conv=True): @@ -29,7 +31,7 @@ def forward(self, x): x = self.convId(x) return x -# %% ../../src/models/layers.ipynb 6 +# %% ../../src/models/layers.ipynb 5 class UpBlock2D(nn.Module): """A 2d up scale block.""" def __init__(self, in_ch, out_ch, kernel_size=2, stride=2, padding=0, use_conv=True): @@ -37,7 +39,10 @@ def __init__(self, in_ch, out_ch, kernel_size=2, stride=2, padding=0, use_conv=T self.use_conv = use_conv self.up_sample = nn.Upsample(scale_factor=kernel_size) if self.use_conv: - self.conv1 = nn.Conv2d(in_ch, out_ch, kernel_size=(1,3), stride=1, padding="same") + if kernel_size==(1,2): kernel_size_conv = (1,3) + else: kernel_size_conv = 3 + + self.conv1 = nn.Conv2d(in_ch, out_ch, kernel_size=kernel_size_conv, stride=1, padding="same") else: self.convId = nn.Conv2d(in_ch, out_ch, kernel_size=1, stride=1, padding="same") if in_ch!=out_ch else nn.Identity() @@ -49,7 +54,7 @@ def forward(self, x): x = self.convId(x) return x -# %% ../../src/models/layers.ipynb 7 +# %% ../../src/models/layers.ipynb 6 class ResDownBlock2D(nn.Module): """A 2d residual down scale block.""" def __init__(self, in_ch, out_ch, kernel_size=2, stride=2, padding=0): @@ -69,7 +74,7 @@ def forward(self, x): r2 = self.down(r2) return self.act(r1 + r2) -# %% ../../src/models/layers.ipynb 8 +# %% ../../src/models/layers.ipynb 7 class ResUpBlock2D(nn.Module): """A 2d residual up scale block.""" def __init__(self, in_ch, out_ch, kernel_size=2, stride=2, padding=0): @@ -89,17 +94,17 @@ def forward(self, x): r2 = self.up(r2) return self.act(r1 + r2) -# %% ../../src/models/layers.ipynb 10 +# %% ../../src/models/layers.ipynb 9 class ResBlock2D(nn.Module): """A 2d residual block.""" - def __init__(self, in_ch, out_ch, kernel_size, skip=True): + def __init__(self, in_ch, out_ch, kernel_size, skip=True, num_groups=32): super().__init__() self.act = nn.SiLU() self.conv1 = nn.Conv2d( in_ch, out_ch, kernel_size=kernel_size, stride=1, padding ="same") self.conv2 = nn.Conv2d(out_ch, out_ch, kernel_size=kernel_size, stride=1, padding ="same") - self.norm1 = torch.nn.GroupNorm(num_groups=32, num_channels=in_ch) #, eps=1e-6, affine=True) - self.norm2 = torch.nn.GroupNorm(num_groups=32, num_channels=out_ch) #, eps=1e-6, affine=True) + self.norm1 = torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_ch) #, eps=1e-6, affine=True) + self.norm2 = torch.nn.GroupNorm(num_groups=num_groups, num_channels=out_ch) #, eps=1e-6, affine=True) self.skip = skip if self.skip: @@ -123,8 +128,8 @@ def forward(self, x): return self.skip_connection(x) + h -# %% ../../src/models/layers.ipynb 11 -class ResBlock2D_Conditional(nn.Module): +# %% ../../src/models/layers.ipynb 10 +class ResBlock2DConditional(nn.Module): """A 2d residual block with input of a time-step $t$ embedding.""" def __init__(self, in_ch, out_ch, t_emb_size, kernel_size, skip=True): super().__init__() @@ -162,7 +167,7 @@ def forward(self, x, t_emb): return self.skip_connection(x) + h -# %% ../../src/models/layers.ipynb 13 +# %% ../../src/models/layers.ipynb 12 class FeedForward(nn.Module): """A small dense feed-forward network as used in `transformers`.""" def __init__(self, in_ch, out_ch, inner_mult=1): @@ -177,18 +182,19 @@ def forward(self, x): x = self.proj2(x) return x -# %% ../../src/models/layers.ipynb 16 +# %% ../../src/models/layers.ipynb 15 class PositionalEncoding(nn.Module): """An absolute pos encoding layer.""" - def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000): + def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0): super().__init__() self.dropout = nn.Dropout(p=dropout) position = torch.arange(max_len).unsqueeze(1) - div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model)) + div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(freq_factor) / d_model)) pe = torch.zeros(max_len, d_model) pe[:, 0::2] = torch.sin(position * div_term) pe[:, 1::2] = torch.cos(position * div_term) + pe = pe.contiguous() self.register_buffer('pe', pe) def forward(self, x: torch.Tensor) -> torch.Tensor: @@ -199,11 +205,11 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: x = x + self.pe[None, :x.size(1)] return self.dropout(x) -# %% ../../src/models/layers.ipynb 17 +# %% ../../src/models/layers.ipynb 16 class TimeEmbedding(PositionalEncoding): """A time embedding layer""" - def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000): - super().__init__(d_model, dropout, max_len) + def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0): + super().__init__(d_model, dropout, max_len, freq_factor) self.ff = FeedForward(d_model, d_model) def forward(self, t: torch.Tensor): @@ -211,11 +217,11 @@ def forward(self, t: torch.Tensor): x = self.ff(x) return self.dropout(x) -# %% ../../src/models/layers.ipynb 18 +# %% ../../src/models/layers.ipynb 17 class PositionalEncodingTransposed(PositionalEncoding): - def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000): - super().__init__(d_model, dropout, max_len) - self.pe = torch.permute(self.pe, (1, 0)) # [max_len, d_model] to [d_model, max_len] + def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0): + super().__init__(d_model, dropout, max_len, freq_factor) + self.pe = torch.permute(self.pe, (1, 0)).contiguous() # [max_len, d_model] to [d_model, max_len] def forward(self, x: torch.Tensor) -> torch.Tensor: """ @@ -225,23 +231,23 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: x = x + self.pe[None, :, :x.size(2)] return self.dropout(x) -# %% ../../src/models/layers.ipynb 19 +# %% ../../src/models/layers.ipynb 18 class PositionalEncoding2D(PositionalEncodingTransposed): """A 2D absolute pos encoding layer.""" - def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000): - super().__init__(d_model=d_model//2, dropout=dropout, max_len=max_len) + def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0): + super().__init__(d_model=d_model//2, dropout=dropout, max_len=max_len, freq_factor=freq_factor) self.d_model_half = d_model//2 # self.proj = nn.Conv2d(d_model, d_model, kernel_size=1, stride=1, padding ="same") def forward(self, x: torch.Tensor) -> torch.Tensor: """ Arguments: - x: Tensor, shape ``[batch_size, gate_color, space , time]`` + x: Tensor, shape ``[batch_size, gate_color, space, time]`` """ p1 = self.pe[None, :, :x.size(2), None] #space encoding p2 = self.pe[None, :, None, :x.size(3)] #time encoding - + x[:, :self.d_model_half] = x[:, :self.d_model_half] + p1 x[:, self.d_model_half:] = x[:, self.d_model_half:] + p2 @@ -249,10 +255,10 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: return self.dropout(x) -# %% ../../src/models/layers.ipynb 20 +# %% ../../src/models/layers.ipynb 19 class PositionalEncoding2DSpaceOnly(PositionalEncodingTransposed): - def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000): - super().__init__(d_model=d_model, dropout=dropout, max_len=max_len) + def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0): + super().__init__(d_model=d_model, dropout=dropout, max_len=max_len, freq_factor=freq_factor) def forward(self, x: torch.Tensor) -> torch.Tensor: """ diff --git a/genQC/models/position_encoding.py b/genQC/models/position_encoding.py new file mode 100644 index 0000000..effcec6 --- /dev/null +++ b/genQC/models/position_encoding.py @@ -0,0 +1,148 @@ +"""Implementation of special position encodings.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/models/position_encoding.ipynb. + +# %% auto 0 +__all__ = ['RotaryPositionalEmbedding', 'RotaryPositionalEmbedding2D', 'LearnedPositionalEmbedding'] + +# %% ../../src/models/position_encoding.ipynb 2 +from ..imports import * + +# %% ../../src/models/position_encoding.ipynb 4 +class RotaryPositionalEmbedding(nn.Module): + """ + This class implements the Rotary Positional Embeddings (RoPE), + proposed in https://arxiv.org/abs/2104.09864. + + Code adjusted from https://github.com/pytorch/torchtune/blob/main/torchtune/modules/position_embeddings.py + > Copyright (c) Meta Platforms, Inc. and affiliates. + > All rights reserved. + + Additionally adds p-RoPE from https://openreview.net/pdf?id=GtvuNrk58a + Note: p=0 coincides with NoPE, while the case p=1 with RoPE + """ + + def __init__(self, head_dim: int, p: float = 1.0, max_seq_len: int = 4096, base: float = 10_000) -> None: + super().__init__() + self.head_dim = head_dim + self.p = p + self.base = base # max_wavelength; the lowest frequencies rotate at roughly 1/base radians per token; i.e. we can resolve 2pi*base tokens + self.max_seq_len = max_seq_len + + self.rope_angles = int(self.p * (self.head_dim//2)) #division factor two is for cos-sin split + self.nope_angles = self.head_dim//2 - self.rope_angles + + timescale = self.base ** (2.0 * torch.arange(0, self.head_dim//2, dtype=torch.float32) / self.head_dim) + timescale[self.rope_angles:] = torch.full((self.nope_angles,), fill_value=torch.inf, dtype=torch.float32) + + theta = 1.0 / timescale + + self.register_buffer("theta", theta, persistent=False) + self.rebuild_rope_cache(self.max_seq_len) + + def rebuild_rope_cache(self, max_seq_len: int = 4096) -> None: + + # Create position indexes [0, 1, ..., max_seq_len - 1] + seq_idx = torch.arange(max_seq_len, dtype=self.theta.dtype, device=self.theta.device) + + # Outer product of theta and position index: output shape [max_seq_len, head_dim//2] + idx_theta = torch.einsum("i, j -> ij", seq_idx, self.theta) + + # cache includes both the cos and sin: output shape [max_seq_len, head_dim//2, 2] + rope_cache = torch.stack([torch.cos(idx_theta), torch.sin(idx_theta)], dim=-1) + self.register_buffer("rope_cache", rope_cache, persistent=False) + + def forward(self, x: torch.Tensor, pos_idx: Optional[torch.Tensor] = None) -> torch.Tensor: + """ + Shape: + x ... [b, seq, n_heads, head_dim] + pos_idx ... [b, seq] or [seq] + """ + + seq_len = x.size(1) + xshaped = x.type_as(self.rope_cache).reshape(*x.shape[:-1], -1, 2) #split head_dim [b, seq, n_h, head_dim/2, 2] + + # extract the values based on whether pos_idx is set or not, shape [seq, head_dim/2, 2] + if exists(pos_idx): + rope_cache = self.rope_cache[pos_idx] + + # reshape the cache to [b, seq, 1, head_dim/2, 2] + rope_cache = rope_cache.view(-1, seq_len, 1, xshaped.size(3), 2) + + else: + rope_cache = self.rope_cache[:seq_len] + + # reshape the cache to [1, seq, 1, head_dim/2, 2] + rope_cache = rope_cache.view(1, seq_len, 1, xshaped.size(3), 2) + + # out has shape [b, seq, n_h, head_dim/2, 2] + x_out = torch.stack( + [ + xshaped[..., 0] * rope_cache[..., 0] - xshaped[..., 1] * rope_cache[..., 1], # x cos - y sin + xshaped[..., 1] * rope_cache[..., 0] + xshaped[..., 0] * rope_cache[..., 1], # x sin + y cos + ], dim=-1) + + # flatten to shape [b, seq, n_h, head_dim] + x_out = x_out.flatten(3) + return x_out.type_as(x) + +# %% ../../src/models/position_encoding.ipynb 7 +class RotaryPositionalEmbedding2D(nn.Module): + + def __init__(self, head_dim: int, p: float = 1.0, max_seq_len: int = 4096, base: float = 10_000) -> None: + super().__init__() + self.rope = RotaryPositionalEmbedding(head_dim=head_dim//2, p=p, max_seq_len=max_seq_len, base=base) + + def forward(self, x: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor: + """ + The tensor `pos_idx` specifies the x and y coordinates of sequence elements of x. + + Shape: + x ... [b, seq, n_heads, head_dim] + pos_idx ... [b, seq, 2] or [seq, 2] + """ + + xshaped = x.reshape(*x.shape[:-1], -1, 2) #split head_dim [b, seq, n_h, head_dim/2, 2] + + x_out = torch.cat( + [ + self.rope(xshaped[..., 0], pos_idx=pos_idx[..., 0]), # coord 1 + self.rope(xshaped[..., 1], pos_idx=pos_idx[..., 1]), # coord 2 + ], dim=-1) + + return x_out + +# %% ../../src/models/position_encoding.ipynb 10 +class LearnedPositionalEmbedding(nn.Module): + """ + This class implements a Learned Positional Embedding, e.g. used for spatial circuit dimension. + """ + + def __init__(self, dim: int, max_seq_len: int = 64) -> None: + super().__init__() + + self.dim = dim + self.max_seq_len = max_seq_len + + _pos_encoding = torch.zeros((self.max_seq_len, self.dim), dtype=torch.float32) + self.pos_encoding = nn.Parameter(_pos_encoding) + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.orthogonal_(self.pos_encoding) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """ + Assumes channel last. + + Shapes: + x ... [b, s, t, ch] + """ + + seq_len = x.shape[1] + + pos_encoding = self.pos_encoding[:seq_len].view(1, seq_len, 1, -1) # to [1, s, 1, ch] + x = x + pos_encoding + + return x diff --git a/genQC/models/transformers/__init__.py b/genQC/models/transformers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/genQC/models/transformers/attention.py b/genQC/models/transformers/attention.py new file mode 100644 index 0000000..0b723ba --- /dev/null +++ b/genQC/models/transformers/attention.py @@ -0,0 +1,160 @@ +"""Common transformer and attention blocks.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/models/transformers/attention.ipynb. + +# %% auto 0 +__all__ = ['FeedForwardBlock', 'BasisSelfAttnBlock', 'BasisCrossAttnBlock', 'SpatialTransformerSelfAttn', 'SpatialTransformer'] + +# %% ../../../src/models/transformers/attention.ipynb 2 +from ...imports import * + +# %% ../../../src/models/transformers/attention.ipynb 4 +class FeedForwardBlock(nn.Module): + """ + A small dense feed-forward network as used in `transformers`. Assumes channel last. + Inspired by https://arxiv.org/pdf/2401.11605. + From https://arxiv.org/pdf/2002.05202 a modification to SiGLU + """ + + def __init__(self, in_dim: int, hidden_dim: int, dropout: float = 0.0) -> None: + super().__init__() + self.hidden_dim = hidden_dim + self.proj_in = nn.Linear(in_dim, 2*hidden_dim) # factor two for GLU part split + self.proj_out = nn.Linear(hidden_dim, in_dim) + self.act = nn.SiLU() + self.drop = nn.Dropout(dropout) + + def siglu(self, x: torch.Tensor) -> torch.Tensor: + x = self.proj_in(x) + return x[..., :self.hidden_dim] * self.act(x[..., self.hidden_dim:]) + + #@torch.compile + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.siglu(x) + x = self.drop(x) + x = self.proj_out(x) + return x + +# %% ../../../src/models/transformers/attention.ipynb 6 +class BasisSelfAttnBlock(nn.Module): + """A self attention block, i.e. a `transformer` encoder.""" + def __init__(self, ch, num_heads, dropout=0.0, batch_first=False): + super().__init__() + self.self_att = nn.MultiheadAttention(ch, num_heads=num_heads, batch_first=batch_first) #[t, b, c] + self.ff = FeedForwardBlock(ch, 2*ch) + self.norm1 = nn.LayerNorm(ch) + self.norm2 = nn.LayerNorm(ch) + self.drop = nn.Dropout(dropout) + + def forward(self, x, attn_mask=None, key_padding_mask=None, need_weights=False): + #x ... [ t, batch, ch] + + self_out = self.norm1(x) + self_out, _ = self.self_att(self_out, key=self_out, value=self_out, attn_mask=attn_mask, key_padding_mask=key_padding_mask, need_weights=need_weights) + self_out = self.drop(self_out) + x + + feed_out = self.norm2(self_out) + feed_out = self.ff(feed_out) + feed_out = self.drop(feed_out) + self_out + + return feed_out + +# %% ../../../src/models/transformers/attention.ipynb 7 +class BasisCrossAttnBlock(nn.Module): + """A cross attention block, i.e. a `transformer` decoder.""" + def __init__(self, ch, num_heads, dropout=0.0, batch_first=False): + super().__init__() + self.self_att = nn.MultiheadAttention(ch, num_heads=num_heads, batch_first=batch_first) #[t, b, c] + self.cross_att = nn.MultiheadAttention(ch, num_heads=num_heads, batch_first=batch_first) + self.ff = FeedForwardBlock(ch, 2*ch) + self.norm1 = nn.LayerNorm(ch) + self.norm2 = nn.LayerNorm(ch) + self.norm3 = nn.LayerNorm(ch) + self.drop = nn.Dropout(dropout) + + def forward(self, x, c_emb, attn_mask=None, key_padding_mask=None, need_weights=False): + #x ... [ t, batch, ch] + #c_emb ... [seq, batch, ch] + + self_out = self.norm1(x) + self_out, _ = self.self_att(self_out, key=self_out, value=self_out, attn_mask=attn_mask, key_padding_mask=key_padding_mask, need_weights=need_weights) + self_out = self.drop(self_out) + x + + cross_out = self.norm2(self_out) + cross_out, _ = self.cross_att(cross_out, key=c_emb, value=c_emb, need_weights=need_weights) + cross_out = self.drop(cross_out) + self_out + + feed_out = self.norm3(cross_out) + feed_out = self.ff(feed_out) + feed_out = self.drop(feed_out) + cross_out + + return feed_out + +# %% ../../../src/models/transformers/attention.ipynb 9 +class SpatialTransformerSelfAttn(nn.Module): + """A spatial residual `transformer`, only uses self-attention.""" + def __init__(self, ch, num_heads, depth, dropout=0.0, num_groups=32): + super().__init__() + self.norm = torch.nn.GroupNorm(num_groups=num_groups, num_channels=ch, eps=1e-6, affine=True) + self.transformer_blocks = nn.ModuleList([BasisSelfAttnBlock(ch=ch, num_heads=num_heads, dropout=dropout) for d in range(depth)]) + + def forward(self, x, attn_mask=None, key_padding_mask=None): + #x ... [batch, ch, space, time] + #c_emb ... [batch, seq, ch] + b, ch, space, time = x.shape + + x_in = x + + #------------------------- + x = self.norm(x) + + x = torch.reshape(x, (b, ch, space*time)) + x = torch.permute(x, (2, 0, 1))#.contiguous() # to [t, batch, ch] + + #------------------------- + for transformer_block in self.transformer_blocks: + x = transformer_block(x, attn_mask, key_padding_mask) + + #------------------------- + + x = torch.permute(x, (1, 2, 0)) # back to [batch, ch, t] + x = torch.reshape(x, (b, ch, space, time))#.contiguous() + + return x + x_in + +# %% ../../../src/models/transformers/attention.ipynb 10 +class SpatialTransformer(nn.Module): + """A spatial residual `transformer`, uses self- and cross-attention on conditional input.""" + + def __init__(self, ch, cond_emb_size, num_heads, depth, dropout=0.0, num_groups=32): + super().__init__() + self.cat_proj = nn.Linear(cond_emb_size, ch) + self.norm = torch.nn.GroupNorm(num_groups=num_groups, num_channels=ch, eps=1e-6, affine=True) + self.transformer_blocks = nn.ModuleList([BasisCrossAttnBlock(ch=ch, num_heads=num_heads, dropout=dropout) for d in range(depth)]) + + def forward(self, x, c_emb, attn_mask=None, key_padding_mask=None): + #x ... [batch, ch, space, time] + #c_emb ... [batch, seq, ch] + b, ch, space, time = x.shape + + x_in = x + + #------------------------- + x = self.norm(x) + + x = torch.reshape(x, (b, ch, space*time)) + x = torch.permute(x, (2, 0, 1))#.contiguous() # to [t, batch, ch] + + c_emb = self.cat_proj(c_emb) + c_emb = torch.permute(c_emb, (1, 0, 2))#.contiguous() # to [seq, batch, ch] + + #------------------------- + for transformer_block in self.transformer_blocks: + x = transformer_block(x, c_emb, attn_mask, key_padding_mask) + + #------------------------- + + x = torch.permute(x, (1, 2, 0)) # back to [batch, ch, t] + x = torch.reshape(x, (b, ch, space, time))#.contiguous() + + return x + x_in diff --git a/genQC/models/transformers/cirdit_multimodal.py b/genQC/models/transformers/cirdit_multimodal.py new file mode 100644 index 0000000..93ffbc8 --- /dev/null +++ b/genQC/models/transformers/cirdit_multimodal.py @@ -0,0 +1,759 @@ +"""The multimodal circuit generation model: *Circuit Diffusion Transformer* (CirDiT).""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/models/transformers/cirdit_multimodal.ipynb. + +# %% auto 0 +__all__ = ['RotaryMultiheadAttention', 'FeedForwardBlock', 'SelfAttnBlock', 'AdaptiveSelfAttnBlock', 'CrossAttnBlock', + 'CoreTransformer', 'PackingTransformer', 'UnpackingTransformer', 'TimeEmbedding', 'CirDiTConfig', 'CirDiT', + 'UnitaryCLIPPartialNoiseCompilationCirDiTConfig', 'UnitaryCLIPPartialNoiseCompilationCirDiT'] + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 2 +from ...imports import * +from ..config_model import * +from ..position_encoding import RotaryPositionalEmbedding, LearnedPositionalEmbedding +from ..layers import PositionalEncoding + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 4 +class RotaryMultiheadAttention(nn.Module): + """ + MultiheadAttention described in the paper: Attention Is All You Need (https://arxiv.org/abs/1706.03762). + We add a rotary position encoding (RoPE). + + The attention core is `F.scaled_dot_attention` from pytorch. + Could be switched to `https://github.com/Dao-AILab/flash-attention` or `xFormers`. + """ + + def __init__(self, + in_dim: int, + embed_dim: int, + num_heads: int, + bias: bool = True, + p_rope: float = 1.0, + max_seq_len: int = 4096, + base_rope: float = 10_000, + enable_qk_norm: bool = False) -> None: + + super().__init__() + + self.num_heads = num_heads + self.bias = bias + self.head_dim = embed_dim // num_heads + + self.q_proj = nn.Linear(in_dim, embed_dim, bias=bias) + self.k_proj = nn.Linear(in_dim, embed_dim, bias=bias) + self.v_proj = nn.Linear(in_dim, embed_dim, bias=bias) + + self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + + self.enable_qk_norm = enable_qk_norm + if self.enable_qk_norm: + self.q_norm = nn.RMSNorm(self.head_dim) + self.k_norm = nn.RMSNorm(self.head_dim) + + self.rope = RotaryPositionalEmbedding(head_dim=self.head_dim, p=p_rope, max_seq_len=max_seq_len, base=base_rope) + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.xavier_normal_(self.q_proj.weight) + nn.init.xavier_normal_(self.k_proj.weight) + nn.init.xavier_normal_(self.v_proj.weight) + nn.init.xavier_normal_(self.out_proj.weight) + + if self.bias: + nn.init.zeros_(self.q_proj.bias) + nn.init.zeros_(self.k_proj.bias) + nn.init.zeros_(self.v_proj.bias) + nn.init.zeros_(self.out_proj.bias) + + + def forward(self, query: torch.Tensor, key: torch.Tensor, value: torch.Tensor, pos_idx: Optional[torch.Tensor] = None) -> torch.Tensor: + """ + Assumes batch first. When `pos_idx` is provided we use RoPE, else NOT! + + Shapes: + query ... [b, n1, c] + key/value ... [b, n2, c] + """ + + assert key.shape == value.shape + + b, n1, _ = query.shape + _, n2, _ = key.shape + + q = self.q_proj(query) + k = self.k_proj(key) + v = self.v_proj(value) + + q = q.view(b, n1, self.num_heads, self.head_dim) + k = k.view(b, n2, self.num_heads, self.head_dim) + v = v.view(b, n2, self.num_heads, self.head_dim) + + if self.enable_qk_norm: + q = self.q_norm(q) + k = self.k_norm(k) + + if exists(pos_idx): + q = self.rope(q, pos_idx=pos_idx) + k = self.rope(k, pos_idx=pos_idx) + + # scaled_dot_product_attention takes [b, num_heads, seq, head_dim] + q = q.permute((0, 2, 1, 3)) + k = k.permute((0, 2, 1, 3)) + v = v.permute((0, 2, 1, 3)) + + # see https://pytorch.org/docs/stable/generated/torch.nn.functional.scaled_dot_product_attention.html + attn = F.scaled_dot_product_attention(query=q, + key=k, + value=v, + attn_mask=None, + dropout_p=0.0, + is_causal=False, + scale=None, + #enable_gqa=False + ) + + # back to [b, seq, num_heads, head_dim] + attn = attn.permute((0, 2, 1, 3)) + + # pack heads together + attn = attn.reshape(b, n1, self.num_heads * self.head_dim) + attn = self.out_proj(attn) + return attn + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 6 +class FeedForwardBlock(nn.Module): + """ + A small dense feed-forward network as used in `transformers`. Assumes channel last. + Inspired by https://arxiv.org/pdf/2401.11605 and added + from https://arxiv.org/pdf/2002.05202 a modification to SiGLU structure. + """ + + def __init__(self, + in_dim: int, + hidden_dim: int, + out_dim: Optional[int] = None, + dropout: float = 0.0) -> None: + super().__init__() + out_dim = default(out_dim, in_dim) + + self.hidden_dim = hidden_dim + self.proj_in = nn.Linear(in_dim, 2*hidden_dim) # factor two for GLU part split + self.proj_out = nn.Linear(hidden_dim, out_dim) + self.act = nn.SiLU() + self.drop = nn.Dropout(dropout) + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.zeros_(self.proj_out.bias) + # nn.init.xavier_normal_(self.proj_out.weight) + + def siglu(self, x: torch.Tensor) -> torch.Tensor: + x = self.proj_in(x) + return x[..., :self.hidden_dim] * self.act(x[..., self.hidden_dim:]) + + #@torch.compile + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.siglu(x) + x = self.drop(x) + x = self.proj_out(x) + return x + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 7 +class SelfAttnBlock(nn.Module): + """A self-attention block which includes the time condition `t_emb`, see https://arxiv.org/pdf/2312.02139.""" + + def __init__(self, ch: int, t_emb_size: int, num_heads: int, dropout: float = 0.0, p_rope: float = 1.0, base_rope: float = 10_000) -> None: + super().__init__() + + self.self_att = RotaryMultiheadAttention(in_dim=ch+t_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope) + + self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch) + self.norm_self = nn.RMSNorm(ch) + self.norm_ff = nn.RMSNorm(ch) + self.drop = nn.Dropout(dropout) + + self._init_weights() + + def _init_weights(self) -> None: + + # note a bonus of res-pos-norm is that we can init as identity! + nn.init.zeros_(self.norm_self.weight) + nn.init.zeros_(self.norm_ff.weight) + + def forward(self, x: torch.Tensor, t_emb: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor: + """ + Assumes batch first. + + Shapes: + x ... [b, n, ch] + t_emb ... [b, 1, t_emb_size] + pos_idx ... [b, n] or [n] + """ + + t_emb_self = t_emb.expand(x.shape[0], x.shape[1], -1) + + # Self-attention part + self_out = torch.cat([x, t_emb_self], dim=-1) # concat time tokens + self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx) + self_out = self.norm_self(self_out) + self_out = self.drop(self_out) + x + + # Feed-Forward part + feed_out = self.ff(self_out) + feed_out = self.norm_ff(feed_out) + feed_out = self.drop(feed_out) + self_out + return feed_out + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 8 +class AdaptiveSelfAttnBlock(nn.Module): + """A self-attention block which includes the time condition `t_emb`, see https://arxiv.org/pdf/2312.02139.""" + + def __init__(self, + ch: int, + mod_ch: int, + t_emb_size: int, + num_heads: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000) -> None: + super().__init__() + + self.self_att = RotaryMultiheadAttention(in_dim=ch+t_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope) + + self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch) + self.norm_self = nn.RMSNorm(ch) + self.norm_ff = nn.RMSNorm(ch) + self.drop = nn.Dropout(dropout) + + self.adaRMS_modulation = nn.Linear(mod_ch, 6*ch) + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.zeros_(self.adaRMS_modulation.bias) + + def forward(self, x: torch.Tensor, mod: torch.Tensor, t_emb: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor: + """ + Assumes batch first. + + Shapes: + x ... [b, n, ch] + mod ... [b, n, mod_ch] + t_emb ... [b, 1, t_emb_size] + pos_idx ... [b, n] or [n] + """ + + scale_att, shift_att, gate_attn, scale_mlp, shift_mlp, gate_mlp = self.adaRMS_modulation(mod).chunk(6, dim=-1) + + t_emb_self = t_emb.expand(x.shape[0], x.shape[1], -1) + + # Self-attention part + self_out = x * (1.0 + scale_att) + shift_att + self_out = torch.cat([self_out, t_emb_self], dim=-1) # concat time tokens + self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx) + self_out = self.norm_self(self_out) * gate_attn.tanh() + self_out = self.drop(self_out) + x + + # Feed-Forward part + feed_out = self_out * (1.0 + scale_mlp) + shift_mlp + feed_out = self.ff(feed_out) + feed_out = self.norm_ff(feed_out) * gate_mlp.tanh() + feed_out = self.drop(feed_out) + self_out + return feed_out + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 9 +class CrossAttnBlock(nn.Module): + """A cross-attention block which includes the time condition `t_emb`, see https://arxiv.org/pdf/2312.02139""" + + def __init__(self, ch: int, t_emb_size: int, num_heads: int, dropout: float = 0.0, p_rope: float = 1.0, base_rope: float = 10_000) -> None: + super().__init__() + + self.self_att = RotaryMultiheadAttention(in_dim=ch+t_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope) + self.multi_att = RotaryMultiheadAttention(in_dim=ch+t_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope) + + self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch) + self.norm_self = nn.RMSNorm(ch) + self.norm_multi = nn.RMSNorm(ch) + self.norm_ff = nn.RMSNorm(ch) + self.drop = nn.Dropout(dropout) + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.zeros_(self.norm_self.weight) + nn.init.zeros_(self.norm_multi.weight) + nn.init.zeros_(self.norm_ff.weight) + + def forward(self, x: torch.Tensor, c_emb: torch.Tensor, t_emb: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor: + """ + Assumes batch first. + + Shapes: + x ... [b, n1, ch] + c_emb ... [b, n2, ch] + t_emb ... [b, 1, t_emb_size] + pos_idx ... [b, n1] or [n1] + """ + + t_emb_self = t_emb.expand( x.shape[0], x.shape[1] , -1) + t_emb_multi = t_emb.expand(c_emb.shape[0], x.shape[1] + c_emb.shape[1], -1) + + # Self-attention part + self_out = torch.cat([x, t_emb_self], dim=-1) # concat time tokens + self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx) + self_out = self.norm_self(self_out) + self_out = self.drop(self_out) + x + + # Multimodial-attention part + multi_out = torch.cat([self_out, c_emb], dim=1) # concat latents with condition ... [b, n1+n2, ch] + + multi_out = torch.cat([multi_out, t_emb_multi], dim=-1) # concat time tokens + multi_out = self.multi_att(query=multi_out, key=multi_out, value=multi_out, pos_idx=None) + + multi_out, multi_out_gate = multi_out[:, :x.shape[1]], multi_out[:, x.shape[1]:] + multi_out_gate = multi_out_gate.mean(dim=1, keepdim=True) # ... [b, 1, ch] + + multi_out = self.norm_multi(multi_out) * multi_out_gate.tanh() + multi_out = self.drop(multi_out) + self_out + + # Feed-Forward part + feed_out = self.ff(multi_out) + feed_out = self.norm_ff(feed_out) + feed_out = self.drop(feed_out) + multi_out + return feed_out + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 11 +class CoreTransformer(nn.Module): + """ + The main transformer of the CirDiT model, intakes time (attn-concat) and condition encodings (cross-attn). + Applies a RoPE for time dimension. + """ + + def __init__(self, + ch: int, + c_emb_size: int, + t_emb_size: int, + depth: int, + num_heads: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000) -> None: + super().__init__() + + self.norm = nn.RMSNorm(ch) + + self.c_proj = nn.Linear(c_emb_size, ch) + self.blocks = nn.ModuleList([ + CrossAttnBlock(ch=ch, + t_emb_size=t_emb_size, + num_heads=num_heads, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + for d in range(depth) + ]) + + def forward(self, x: torch.Tensor, c_emb: torch.Tensor, t_emb: torch.Tensor) -> torch.Tensor: + """ + Shapes: + x ... [b, t, ch] + c_emb ... [b, seq, c_emb_size] + t_emb ... [b, 1, t_emb_size] + """ + + c_emb = self.c_proj(c_emb) + pos_idx = torch.arange(x.shape[1], device=x.device, dtype=torch.int32) + + x = self.norm(x) + + for block in self.blocks: + x = block(x=x, c_emb=c_emb, t_emb=t_emb, pos_idx=pos_idx) + + return x + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 13 +class PackingTransformer(ConfigModel): + """ + The first stage packing/unpacking transformers of the CirDiT model, intakes time (attn-concat). + Applies a RoPE for time dimension only, not on spatial dimension. + """ + + def __init__(self, + ch: int, + t_emb_size: int, + depth: int, + num_heads: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000) -> None: + super().__init__() + + self.norm = nn.RMSNorm(ch) + self.blocks = nn.ModuleList([ + SelfAttnBlock(ch=ch, + t_emb_size=t_emb_size, + num_heads=num_heads, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + for d in range(depth) + ]) + + def forward(self, x: torch.Tensor, t_emb: torch.Tensor, return_penultimate: bool = False) -> torch.Tensor: + """ + Shapes: + x ... [b, s, t, ch] + t_emb ... [b, 1, t_emb_size] + """ + + b, s, t, ch = x.shape + + # create pos_idx such that they only depend on the time position + pos_idx = torch.arange(t, device=x.device, dtype=torch.int32).expand(b, s, -1) + pos_idx = pos_idx.reshape(b, -1) + + # flatten spatial and time into seq + x = x.reshape(b, s*t, ch) + x = self.norm(x) + + if return_penultimate: + for block in self.blocks[:-1]: + x = block(x=x, t_emb=t_emb, pos_idx=pos_idx) + + penultimate = x + x = self.blocks[-1](x=x, t_emb=t_emb, pos_idx=pos_idx) + + else: + for block in self.blocks: + x = block(x=x, t_emb=t_emb, pos_idx=pos_idx) + + # undo flatten + x = x.reshape(b, s, t, ch) + + if return_penultimate: + penultimate = penultimate.reshape(b, s, t, ch) + return x, penultimate + + return x + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 14 +class UnpackingTransformer(ConfigModel): + """ + The first stage packing/unpacking transformers of the CirDiT model, intakes time (attn-concat). + Applies a RoPE for time dimension only, not on spatial dimension. + """ + + def __init__(self, + ch: int, + mod_ch: int, + t_emb_size: int, + depth: int, + num_heads: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000) -> None: + super().__init__() + + self.norm = nn.RMSNorm(ch) + self.blocks = nn.ModuleList([ + AdaptiveSelfAttnBlock(ch=ch, + mod_ch=mod_ch, + t_emb_size=t_emb_size, + num_heads=num_heads, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + for d in range(depth) + ]) + + def forward(self, x: torch.Tensor, mod: torch.Tensor, t_emb: torch.Tensor) -> torch.Tensor: + """ + Shapes: + x ... [b, s, t, ch] + t_emb ... [b, 1, t_emb_size] + """ + + b, s, t, ch = x.shape + *_, mod_ch = mod.shape + + # create pos_idx such that they only depend on the time position + pos_idx = torch.arange(t, device=x.device, dtype=torch.int32).expand(b, s, -1) + pos_idx = pos_idx.reshape(b, -1) + + # flatten spatial and time into seq + x = x.reshape(b, s*t, ch) + mod = mod.reshape(b, s*t, mod_ch).contiguous() + + x = self.norm(x) + + for block in self.blocks: + x = block(x=x, mod=mod, t_emb=t_emb, pos_idx=pos_idx) + + # undo flatten + x = x.reshape(b, s, t, ch) + return x + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 16 +class TimeEmbedding(PositionalEncoding): + """A time embedding layer.""" + + def __init__(self, d_model: int, + dropout: float = 0.0, + max_len: int = 5000, + freq_factor: float = 10_000.0) -> None: + super().__init__(d_model=d_model, dropout=dropout, max_len=max_len, freq_factor=freq_factor) + + self.ff = FeedForwardBlock(in_dim=d_model, hidden_dim=2*d_model) + + def forward(self, t: torch.Tensor) -> torch.Tensor: + x = self.pe[t] + x = self.ff(x) + return self.dropout(x) + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 18 +@dataclass +class CirDiTConfig: + clr_dim: int + ch_packing: int + ch_core: int + c_emb_size: int + t_emb_size: int + depth_packing: int + depth_core: int + num_heads_packing: int + num_heads_core: int + dropout: float + p_rope: float + base_rope: float + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 19 +class CirDiT(ConfigModel): + """ + The proposed Circuit Diffusion Transformer (CirDiT). + """ + + def __init__(self, + clr_dim: int, + ch_packing: int, + ch_core: int, + c_emb_size: int, + t_emb_size: int, + depth_packing: int, + depth_core: int, + num_heads_packing: int, + num_heads_core: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000) -> None: + super().__init__() + + self.ch_packing = ch_packing + self.ch_core = ch_core + + self.params_config = CirDiTConfig(clr_dim=clr_dim, + ch_packing=ch_packing, + ch_core=ch_core, + c_emb_size=c_emb_size, + t_emb_size=t_emb_size, + depth_packing=depth_packing, + depth_core=depth_core, + num_heads_packing=num_heads_packing, + num_heads_core=num_heads_core, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + + self.packing = PackingTransformer(ch=ch_packing, + t_emb_size=t_emb_size, + depth=depth_packing, + num_heads=num_heads_packing, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + + self.unpacking = UnpackingTransformer(ch=ch_packing, + mod_ch=ch_core, + t_emb_size=t_emb_size, + depth=depth_packing, + num_heads=num_heads_packing, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + + self.core = CoreTransformer(ch=ch_core, + c_emb_size=c_emb_size, + t_emb_size=t_emb_size, + depth=depth_core, + num_heads=num_heads_core, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + + self.proj_in = nn.Linear(clr_dim, ch_packing) + self.proj_out = nn.Linear(ch_packing, clr_dim) + self.core_proj = nn.Linear(ch_packing, ch_core) + + self.t_emb = TimeEmbedding(d_model=t_emb_size, max_len=5000) #here max number of timetseps + self.qubit_pos_enc = LearnedPositionalEmbedding(dim=ch_packing, max_seq_len=64) #here max number of qubits + + self._init_weights() + + def _init_weights(self) -> None: + nn.init.orthogonal_(self.core_proj.weight) + nn.init.zeros_(self.core_proj.bias) + nn.init.zeros_(self.proj_out.bias) + + def main_pass(self, x: torch.Tensor, t_emb: torch.Tensor, c_emb: torch.Tensor) -> torch.Tensor: + b, s, t, _ = x.shape + + x = self.proj_in(x) + x = self.qubit_pos_enc(x) + + # Pack spatial into tokens + x_main, x = self.packing(x=x, t_emb=t_emb, return_penultimate=True) + + # Downsample, reduce spatial, ... [b, t, ch_core] + x_main = x_main.mean(dim=1) + x_main = self.core_proj(x_main) + + # Core transformer + x_main = self.core(x=x_main, c_emb=c_emb, t_emb=t_emb) - x_main # subtraction such that if core=ident at init we cancel the signal + x_main = x_main.unsqueeze(1).expand(b, s, t, self.ch_core) + + # Unpack tokens into spatial + x = self.unpacking(x=x, mod=x_main, t_emb=t_emb) + x = self.proj_out(x) + + return x + + def forward(self, x: torch.Tensor, t: torch.Tensor, c_emb: torch.Tensor, micro_cond: Optional[torch.Tensor] = None) -> torch.Tensor: + """ + Assumes a `channel_last` embedding of circuits. + + Shapes: + x ... [b, s, t, ch] + t ... [b] + c_emb ... [b, seq, c_emb_size] + micro_cond ... [b] + """ + + t_emb = self.t_emb(t) #.detach() + t_emb = t_emb.unsqueeze(1) # to [b, 1, ch] + + x = self.main_pass(x, t_emb, c_emb) + return x + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 21 +@dataclass +class UnitaryCLIPPartialNoiseCompilationCirDiTConfig(CirDiTConfig): + unitary_encoder_config: dict + +# %% ../../../src/models/transformers/cirdit_multimodal.ipynb 22 +class UnitaryCLIPPartialNoiseCompilationCirDiT(CirDiT): + """Extends `CirDiT` to the multimodal unitary compilation model.""" + + def __init__(self, + clr_dim: int, + ch_packing: int, + ch_core: int, + c_emb_size: int, + t_emb_size: int, + depth_packing: int, + depth_core: int, + num_heads_packing: int, + num_heads_core: int, + dropout: float = 0.0, + p_rope: float = 1.0, + base_rope: float = 10_000, + unitary_encoder_config: Optional[dict] = None, + unitary_encoder: Optional[nn.Module] = None) -> None: + + super().__init__(clr_dim=clr_dim, + ch_packing=ch_packing, + ch_core=ch_core, + c_emb_size=c_emb_size, + t_emb_size=t_emb_size, + depth_packing=depth_packing, + depth_core=depth_core, + num_heads_packing=num_heads_packing, + num_heads_core=num_heads_core, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope) + + if exists(unitary_encoder_config): #load a trained encoder + self.unitary_encoder = ConfigModel.from_config(unitary_encoder_config, device=None, silent=True) + + elif exists(unitary_encoder): + self.unitary_encoder = unitary_encoder + unitary_encoder_config = self.unitary_encoder.get_config() + + unitary_encoder_config = {"target": unitary_encoder_config["target"], + "params": unitary_encoder_config["params"]} + + else: + raise RuntimeError("Provide either `unitary_encoder_config` to load a pretrained encoder or a `unitary_encoder` model directly!`") + + self.params_config = UnitaryCLIPPartialNoiseCompilationCirDiTConfig( + clr_dim=clr_dim, + ch_packing=ch_packing, + ch_core=ch_core, + c_emb_size=c_emb_size, + t_emb_size=t_emb_size, + depth_packing=depth_packing, + depth_core=depth_core, + num_heads_packing=num_heads_packing, + num_heads_core=num_heads_core, + dropout=dropout, + p_rope=p_rope, + base_rope=base_rope, + unitary_encoder_config=unitary_encoder_config + ) + + #-------- + + self.empty_cond = nn.Parameter(torch.randn((1, 1, c_emb_size))) + + self.t_emb = TimeEmbedding(d_model=t_emb_size, max_len=5000) #here max number of timetseps + self.t_emb2 = TimeEmbedding(d_model=t_emb_size, max_len=5000) #here max number of timetseps + + def forward(self, + x: torch.Tensor, + t_h: torch.Tensor, + t_w: torch.Tensor, + c_emb: torch.Tensor, + U: torch.Tensor, + rnd: Optional[torch.Tensor] = None) -> torch.Tensor: + """ + Assumes a channel_last embedding of circuits. + + Shapes: + x ... [b, s, t, ch] + t_h ... [b] + t_w ... [b] + c_emb ... [b, seq, c_emb_size] + U ... [b, 2, N, N] + rnd ... [b] + """ + + t_emb = self.t_emb(t_h) + self.t_emb2(t_w) + t_emb = t_emb.unsqueeze(1) # to [b, 1, ch] + + #------ + + u_emb = self.unitary_encoder(y_emb=c_emb, U=U, penultimate=True).detach() # [batch, seq1+seq2, ch] + + if not_exists(rnd): + # one means we dont drop, so U is not all zero + rnd = 1-torch.isclose(U, torch.zeros_like(U)).all(dim=(1, 2, 3)).type(torch.int64) + rnd = rnd.view(-1, 1, 1) + + # Note: we ignore text drop and unitary drop, we replace all with a learned uncond token here + u_emb = u_emb * rnd + (1-rnd) * self.empty_cond.expand(u_emb.shape) + + #------ + + x = self.main_pass(x, t_emb, u_emb) + return x diff --git a/genQC/models/transformers.py b/genQC/models/transformers/transformers.py similarity index 93% rename from genQC/models/transformers.py rename to genQC/models/transformers/transformers.py index a27ed32..633e634 100644 --- a/genQC/models/transformers.py +++ b/genQC/models/transformers/transformers.py @@ -1,13 +1,13 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/models/transformers.ipynb. +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/models/transformers/transformers.ipynb. # %% auto 0 __all__ = ['BasisSelfAttnBlock', 'BasisCrossAttnBlock', 'SpatialTransformerSelfAttn', 'SpatialTransformer'] -# %% ../../src/models/transformers.ipynb 2 -from ..imports import * +# %% ../../../src/models/transformers/transformers.ipynb 2 +from ...imports import * import genQC.models.layers as layers -# %% ../../src/models/transformers.ipynb 4 +# %% ../../../src/models/transformers/transformers.ipynb 4 class BasisSelfAttnBlock(nn.Module): """A self attention block, i.e. a `transformer` encoder.""" def __init__(self, ch, num_heads, dropout=0): @@ -32,7 +32,7 @@ def forward(self, x, attn_mask=None, key_padding_mask=None, need_weights=False): return feed_out -# %% ../../src/models/transformers.ipynb 5 +# %% ../../../src/models/transformers/transformers.ipynb 5 class BasisCrossAttnBlock(nn.Module): """A cross attention block, i.e. a `transformer` decoder.""" def __init__(self, ch, cond_emb_size, num_heads, dropout=0.0): @@ -63,7 +63,7 @@ def forward(self, x, c_emb, attn_mask=None, key_padding_mask=None, need_weights= return feed_out -# %% ../../src/models/transformers.ipynb 7 +# %% ../../../src/models/transformers/transformers.ipynb 7 class SpatialTransformerSelfAttn(nn.Module): """A spatial residual `transformer`, only uses self-attention.""" def __init__(self, ch, num_heads, depth, dropout=0.0): @@ -98,7 +98,7 @@ def forward(self, x, attn_mask=None, key_padding_mask=None): return x + x_in -# %% ../../src/models/transformers.ipynb 8 +# %% ../../../src/models/transformers/transformers.ipynb 8 class SpatialTransformer(nn.Module): """A spatial residual `transformer`, uses self- and cross-attention on conditional input.""" diff --git a/genQC/models/unet_qc.py b/genQC/models/unet_qc.py index 228f8ce..e81c20f 100644 --- a/genQC/models/unet_qc.py +++ b/genQC/models/unet_qc.py @@ -6,9 +6,9 @@ # %% ../../src/models/unet_qc.ipynb 3 from ..imports import * -from .config_model import Config_Model +from .config_model import ConfigModel import genQC.models.layers as layers -import genQC.models.transformers as transformers +import genQC.models.transformers.transformers as transformers from .unitary_encoder import Unitary_encoder, Unitary_encoder_config # %% ../../src/models/unet_qc.ipynb 5 @@ -19,7 +19,7 @@ def __init__(self, ch_in, ch_out, t_emb_size, cond_emb_size, num_heads=8, num_re self.resBlocks = nn.ModuleList() for i in range(num_res_blocks): - self.resBlocks.append(layers.ResBlock2D_Conditional(ch_in, ch_out, t_emb_size, kernel_size=(1, 3))) + self.resBlocks.append(layers.ResBlock2DConditional(ch_in, ch_out, t_emb_size, kernel_size=(1, 3))) ch_in = ch_out self.transformer_depth = transformer_depth @@ -111,14 +111,16 @@ class QC_Cond_UNet_config: transformer_depths: list[int] # %% ../../src/models/unet_qc.ipynb 10 -class QC_Cond_UNet(Config_Model): +class QC_Cond_UNet(ConfigModel): """Conditional U-Net model for quantum circuits. Implemets `embedd_clrs` and `invert_clr` functions to embed and decode color-tensors.""" + + channel_last = False def __init__(self, model_features=[32,32,64], clr_dim=8, num_clrs=8, t_emb_size=128, cond_emb_size=512, num_heads=[8,8,2], num_res_blocks=[2, 2, 4], transformer_depths=[1,2,1]): super().__init__() - + self.clr_dim = clr_dim self.num_clrs = num_clrs @@ -160,7 +162,7 @@ def _init_weights(self): #-------------------------------------------- - def embedd_clrs(self, x): + def embed(self, x): sign = torch.sign(x + 0.1) #trick: add 0.1 so that the sign of 0 is +1, else the 0 token would be all 0s. clr = self.emb_clr(torch.abs(x)) x = clr * sign[:, :, :, None] @@ -168,7 +170,7 @@ def embedd_clrs(self, x): return x @torch.no_grad() - def invert_clr(self, x): + def invert(self, x): #collaps clr to gate ... use cos sim clrs = self.emb_clr.weight.detach() # is [clr_num, clr_dim] @@ -201,7 +203,7 @@ def invert_clr(self, x): #-------------------------------------------- - def forward(self, x, t, c_emb, attn_mask=None, key_padding_mask=None): + def forward(self, x, t, c_emb, attn_mask=None, key_padding_mask=None, **kwargs): if attn_mask is None: attn_mask = [None] * len(self.enc_chs) if key_padding_mask is None: key_padding_mask = [None] * len(self.enc_chs) @@ -235,8 +237,8 @@ def __init__(self, model_features=[32,32,64], clr_dim=8, num_clrs=8, t_emb_size= self.unitary_encoder = Unitary_encoder(**unitary_encoder_config) self.params_config = QC_Compilation_UNet_config(model_features, self.clr_dim, self.num_clrs, self.t_emb_size, self.cond_emb_size, num_heads, num_res_blocks, transformer_depths, self.unitary_encoder.params_config) - def forward(self, x, t, c_emb, U, attn_mask=None, key_padding_mask=None): + def forward(self, x, t, c_emb, U, attn_mask=None, key_padding_mask=None, **kwargs): u_emb = self.unitary_encoder(U) # [batch, seq2, ch] c_emb = torch.cat([c_emb, u_emb], dim=1) # [batch, seq1+seq2, ch] - out = super().forward(x, t, c_emb, attn_mask, key_padding_mask) + out = super().forward(x, t, c_emb, attn_mask, key_padding_mask, **kwargs) return out diff --git a/genQC/models/unitary_encoder.py b/genQC/models/unitary_encoder.py index 0e60d62..12600a9 100644 --- a/genQC/models/unitary_encoder.py +++ b/genQC/models/unitary_encoder.py @@ -5,9 +5,9 @@ # %% ../../src/models/unitary_encoder.ipynb 2 from ..imports import * -from .config_model import Config_Model +from .config_model import ConfigModel import genQC.models.layers as layers -import genQC.models.transformers as transformers +import genQC.models.transformers.transformers as transformers # %% ../../src/models/unitary_encoder.ipynb 4 @dataclass @@ -19,7 +19,7 @@ class Unitary_encoder_config: dropout: float # %% ../../src/models/unitary_encoder.ipynb 5 -class Unitary_encoder(Config_Model): +class Unitary_encoder(ConfigModel): """Encoder for unitary conditions.""" def __init__(self, cond_emb_size, model_features=None, num_heads=8, transformer_depths=[4, 4], dropout=0.1): super().__init__() diff --git a/genQC/pipeline/callbacks.py b/genQC/pipeline/callbacks.py new file mode 100644 index 0000000..78776f5 --- /dev/null +++ b/genQC/pipeline/callbacks.py @@ -0,0 +1,23 @@ +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/pipeline/callbacks.ipynb. + +# %% auto 0 +__all__ = ['CancelFitException', 'CancelBatchException', 'CancelEpochException', 'Callback', 'run_cbs'] + +# %% ../../src/pipeline/callbacks.ipynb 2 +from ..imports import * +from operator import attrgetter, itemgetter + +# %% ../../src/pipeline/callbacks.ipynb 4 +class CancelFitException(Exception): pass +class CancelBatchException(Exception): pass +class CancelEpochException(Exception): pass + +# %% ../../src/pipeline/callbacks.ipynb 5 +class Callback(): order=0 + +# %% ../../src/pipeline/callbacks.ipynb 6 +def run_cbs(cbs, method_nm, pipeline=None): + if not exists(cbs): return + for cb in sorted(cbs, key=attrgetter('order')): + method = getattr(cb, method_nm, None) + if method: method(pipeline) diff --git a/genQC/pipeline/compilation_diffusion_pipeline.py b/genQC/pipeline/compilation_diffusion_pipeline.py new file mode 100644 index 0000000..7b396e5 --- /dev/null +++ b/genQC/pipeline/compilation_diffusion_pipeline.py @@ -0,0 +1,104 @@ +"""Special extension to `DiffusionPipeline`.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/pipeline/compilation_diffusion_pipeline.ipynb. + +# %% auto 0 +__all__ = ['DiffusionPipeline_Compilation'] + +# %% ../../src/pipeline/compilation_diffusion_pipeline.ipynb 2 +from ..imports import * +from .diffusion_pipeline import DiffusionPipeline + +# %% ../../src/pipeline/compilation_diffusion_pipeline.ipynb 4 +class DiffusionPipeline_Compilation(DiffusionPipeline): + """A special `DiffusionPipeline` that accounts for unitary conditions, i.e. compilation.""" + + #------------------------------------ + + @torch.no_grad() + def __call__(self, latents, c, U, g, negative_c=None, negative_u=None, no_bar=False): + + latents = latents.to(self.device) + c = c.to(self.device) + U = U.to(self.device) + + return self.denoising(latents, c=c, U=U, negative_c=negative_c, negative_u=negative_u, enable_guidance=True, g=g, no_bar=no_bar) + + #------------------------------------ + + def empty_unitary_fn(self, U): + # U ... [b , 2, n, n] + + u = torch.zeros_like(U) + return u + + def get_guidance_U(self, U: torch.Tensor, enable_guidance: bool = True, negative_u: Optional[torch.Tensor] = None): + if not exists(U): return U + U = U.to(self.device) + if enable_guidance: + if exists(negative_u): u = negative_u.to(self.device) + else: u = self.empty_unitary_fn(U).to(self.device) + U = torch.cat([u, U]) + return U + + @torch.no_grad() + def denoising(self, latents, c, U, negative_c=None, negative_u=None, enable_guidance=True, g=1.0, t_start_index=0, no_bar=False, return_predicted_x0=False): + U = self.get_guidance_U(U, enable_guidance, negative_u) + return super().denoising(latents, c, negative_c, enable_guidance, g, t_start_index=t_start_index, + no_bar=no_bar, return_predicted_x0=return_predicted_x0, U=U) + + def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=False, g=7.5, U: torch.Tensor=None): + if enable_guidance: + x = torch.cat([latents] * 2) #uses batch layer combine here + + if ts.numel() > 1: chunk_ts = torch.cat([ts] * 2) + else: chunk_ts = ts + + eps_u, eps_c = self.model(x, chunk_ts, c_emb, U=U).chunk(2) + + eps = self.CFG(eps_u, eps_c, g) + + else: + eps = self.model(latents, ts, c_emb, U=U) + + x = self.scheduler.step(eps, ts, latents) + return x.prev_sample, x.pred_original_sample + + #------------------------------------ + + def train_step(self, data, train, **kwargs): + latents, y, U = data + b, s, t = latents.shape + + #start async memcpy + latents = latents.to(self.device, non_blocking=self.non_blocking) + latents = self.embedder.embed(latents) + + #do the cond embedding with CLIP + y = y.to(self.device, non_blocking=self.non_blocking) + U = U.to(self.device, non_blocking=self.non_blocking) + + if self.enable_guidance_train and train: + rnd_y, rnd_U = torch.empty((2*b,), device=self.device).bernoulli_(p=1.0-self.guidance_train_p).type(torch.int64).chunk(2, dim=0) + + y = self.cfg_drop(y, self.empty_token_fn(y) , rnd_y) + U = self.cfg_drop(U, self.empty_unitary_fn(U), rnd_U) + + + y_emb = self.text_encoder(y, pool=False) + + #sample timesteps + timesteps = torch.randint(low=0, high=self.scheduler.num_train_timesteps, size=(b,), device=self.device, dtype=torch.int64) + + #forward noising + noise = torch.randn(latents.shape, device=self.device) + noisy_latents = self.scheduler.add_noise(latents, noise, timesteps, train=train) + + #predict eps + eps = self.model(noisy_latents, timesteps, y_emb, U=U) + + #comp mse + loss = self.loss_fn(eps, noise) + + #log the loss + return loss diff --git a/genQC/pipeline/diffusion_pipeline.py b/genQC/pipeline/diffusion_pipeline.py index d2fb1c7..68266c4 100644 --- a/genQC/pipeline/diffusion_pipeline.py +++ b/genQC/pipeline/diffusion_pipeline.py @@ -7,10 +7,8 @@ from ..imports import * from ..scheduler.scheduler import Scheduler from .pipeline import Pipeline -from ..config_loader import * -from ..models.config_model import Config_Model - -from huggingface_hub import snapshot_download +from ..utils.config_loader import * +from ..models.config_model import ConfigModel # %% ../../src/pipeline/diffusion_pipeline.ipynb 3 class DiffusionPipeline(Pipeline): @@ -21,6 +19,7 @@ def __init__(self, scheduler: Scheduler, model: nn.Module, text_encoder: nn.Module, + embedder: nn.Module, # clr embeddings or a VAE for latent diffusion device: torch.device, enable_guidance_train = True, guidance_train_p = 0.1, @@ -28,20 +27,24 @@ def __init__(self, ): super().__init__(model, device) self.scheduler = scheduler - self.scheduler.to_device(device) + self.scheduler.to(device) self.text_encoder = text_encoder - self.text_encoder.eval() + # self.text_encoder.eval() + self.trainables.append(self.text_encoder) + + self.embedder = embedder + self.trainables.append(self.embedder) self.enable_guidance_train = enable_guidance_train self.guidance_train_p = guidance_train_p self.cached_text_enc = cached_text_enc self.empty_token = self.text_encoder.empty_token - + if cached_text_enc: def cached_empty_token_fn(c): - if c.dim() == 1: return self.text_encoder.cached_empty_token_index # yields then a list of ints + if c.dim() == 1: return self.text_encoder.cached_empty_token_index.expand(c.shape) # yields then a list of ints elif c.dim() == 2: return self.empty_token.expand(c.shape) # tokenized input else: raise NotImplementedError("") @@ -51,15 +54,16 @@ def cached_empty_token_fn(c): self.empty_token_fn = lambda c: self.empty_token.expand(c.shape) # for own clip #------------------------------------ - + add_config = {} def params_config(self, save_path: str): params_config = {} params_config["scheduler"] = self.scheduler.get_config() - params_config["model"] = self.model.get_config(save_path=save_path+"model.pt") - params_config["text_encoder"] = self.text_encoder.get_config(save_path=save_path+"text_encoder.pt") + params_config["model"] = self.model.get_config(save_path=save_path+"model") + params_config["text_encoder"] = self.text_encoder.get_config(save_path=save_path+"text_encoder") + params_config["embedder"] = self.embedder.get_config(save_path=save_path+"embedder") params_config["device"] = str(self.device) params_config["enable_guidance_train"] = self.enable_guidance_train @@ -75,57 +79,72 @@ def store_pipeline(self, config_path: str, save_path: str): save_dict_yaml(config, config_path+"config.yaml") #only store weights of these submodels - self.model.store_model(config_path=None, save_path=save_path+"model.pt") - self.text_encoder.store_model(config_path=None, save_path=save_path+"text_encoder.pt") + self.model.store_model(config_path=None, save_path=save_path+"model") + self.text_encoder.store_model(config_path=None, save_path=save_path+"text_encoder") + self.embedder.store_model(config_path=None, save_path=save_path+"embedder") @staticmethod - def from_config_file(config_path, device: torch.device): + def from_config_file(config_path, device: torch.device, save_path: Optional[str] = None): config = load_config(config_path+"config.yaml") config = config_to_dict(config) + def _get_save_path(config_save_path, appendix): + _save_path = default(save_path, config_path) + appendix + if "save_path" in config_save_path: + if exists(config_save_path["save_path"]): + _save_path = config_save_path["save_path"] + else: + config_save_path.pop("save_path") + return _save_path + if exists(device): config["params"]["device"] = device - config["params"]["scheduler"]["params"]["device"] = device - - config["params"]["scheduler"] = instantiate_from_config(config["params"]["scheduler"]) - - model_path = config_path+"model.pt" if config["params"]["model"]["save_path"] is None else config["params"]["model"]["save_path"] - config["params"]["model"] = Config_Model.from_config(config["params"]["model"], device, model_path) - config["params"]["text_encoder"] = Config_Model.from_config(config["params"]["text_encoder"], device, config["params"]["text_encoder"]["save_path"]) + config["params"]["scheduler"] = Scheduler.from_config(config["params"]["scheduler"], device, _get_save_path(config["params"]["scheduler"], "")) + + config["params"]["model"] = ConfigModel.from_config(config["params"]["model"], device, _get_save_path(config["params"]["model"], "model")) + config["params"]["text_encoder"] = ConfigModel.from_config(config["params"]["text_encoder"], device, _get_save_path(config["params"]["text_encoder"], "text_encoder")) + + if "embedder" in config["params"]: + config["params"]["embedder"] = ConfigModel.from_config(config["params"]["embedder"], device, _get_save_path(config["params"]["embedder"], "embedder")) + else: + config["params"]["embedder"] = config["params"]["model"] #for legacy loading model + add_config = config["params"].pop("add_config", None) pipeline = instantiate_from_config(config) if exists(pipeline.add_config): - pipeline.gate_pool = [gate for gate in add_config["dataset"]["params"]["gate_pool"]] pipeline.add_config = add_config - - return pipeline - + + params = add_config["dataset"]["params"] + + if "gate_pool" in params: + # pipeline.gate_pool = [get_obj_from_str(gate) for gate in params["gate_pool"]] + pipeline.gate_pool = [gate for gate in params["gate_pool"]] - @classmethod - def from_pretrained(cls, repo_id: str, device: torch.device, **kwargs): - """Load a model pipeline directly from Huggingface.""" - model_path = snapshot_download(repo_id=repo_id, repo_type="model", allow_patterns=["*.pt", "*.yaml", "*.safetensors"], **kwargs) - pipeline = cls.from_config_file(model_path+"/", device) return pipeline - + #------------------------------------ # Inference functions - - @torch.no_grad() - def __call__(self, latents=None, c=None, seed=None, timesteps=None, no_bar=False, enable_guidance=True, g=7.5): + + # @torch.no_grad() + @torch.inference_mode() + def __call__(self, latents=None, c=None, negative_c=None, seed=None, timesteps=None, no_bar=False, enable_guidance=True, g=7.5, micro_cond=None): if exists(seed): torch.manual_seed(seed) if exists(timesteps): self.scheduler.set_timesteps(self.timesteps) + + self.text_encoder.eval() + self.model.eval() latents = latents.to(self.device) - x0 = self.denoising(latents, c=c, no_bar=no_bar, enable_guidance=enable_guidance, g=g) + x0 = self.denoising(latents, c=c, negative_c=negative_c, no_bar=no_bar, enable_guidance=enable_guidance, g=g, micro_cond=micro_cond) return x0 - @torch.no_grad() - def latent_filling(self, org_latents: torch.Tensor, mask: torch.Tensor, c=None, enable_guidance=True, g=7.5, + # @torch.no_grad() + @torch.inference_mode() + def latent_filling(self, org_latents: torch.Tensor, mask: torch.Tensor, c=None, negative_c=None, enable_guidance=True, g=7.5, t_start_index=0, no_bar=False, return_predicted_x0=False, **kwargs): """mask: area with ones is going to be filled""" if mask.dim() == 4: assert list(org_latents.shape) == list(mask.shape) # diff mask per sample and channel @@ -135,9 +154,9 @@ def latent_filling(self, org_latents: torch.Tensor, mask: torch.Tensor, c=None, self.model.eval() self.text_encoder.eval() - self.scheduler.to_device(self.device) + self.scheduler.to(self.device) - c_emb = self.prepare_c_emb(c, enable_guidance, **kwargs) + c_emb = self.prepare_c_emb(c, enable_guidance, negative_c, **kwargs) org_latents = org_latents.to(self.device, non_blocking=self.non_blocking) @@ -186,74 +205,82 @@ def latent_filling(self, org_latents: torch.Tensor, mask: torch.Tensor, c=None, #------------------------------------ # Helper functions - def get_guidance_condition(self, c, enable_guidance): + def get_guidance_condition(self, c: torch.Tensor, enable_guidance: bool = True, negative_c: Optional[torch.Tensor] = None): if not exists(c): return c c = c.to(self.device) if enable_guidance: - u = self.empty_token_fn(c).to(self.device) + if exists(negative_c): u = negative_c.to(self.device) + else: u = self.empty_token_fn(c).to(self.device) c = torch.cat([u, c]) - c = c.type(torch.int64) + c = c.type(torch.int64) #to token dtype return c - def prepare_c_emb(self, c, enable_guidance, **kwargs): - c = self.get_guidance_condition(c, enable_guidance) + def prepare_c_emb(self, c: torch.Tensor, enable_guidance: bool = True, negative_c: Optional[torch.Tensor] = None, **kwargs): + c = self.get_guidance_condition(c, enable_guidance, negative_c) c_emb = self.text_encoder(c, pool=False) return c_emb - @torch.no_grad() - def denoising(self, latents: torch.Tensor, c=None, enable_guidance=True, g=7.5, t_start_index=0, no_bar=False, return_predicted_x0=False, **kwargs): + # @torch.no_grad() + @torch.inference_mode() + def denoising(self, latents: torch.Tensor, c=None, negative_c=None, enable_guidance=True, g=7.5, t_start_index=0, no_bar=False, + return_predicted_x0=False, micro_cond=None, **kwargs): self.model.eval() self.text_encoder.eval() - self.scheduler.to_device(self.device) + self.scheduler.to(self.device) - c_emb = self.prepare_c_emb(c, enable_guidance, **kwargs) + c_emb = self.prepare_c_emb(c, enable_guidance, negative_c, **kwargs) latents = latents.to(self.device, non_blocking=self.non_blocking) if return_predicted_x0: predicted_x0 = list() for i, t in enumerate(tqdm(self.scheduler.timesteps[t_start_index:], disable=no_bar)): - timesteps = (torch.ones((1)) * t).type(torch.int64).to(self.device, non_blocking=self.non_blocking) - - latents, x0 = self.denoising_step(latents, timesteps, c_emb=c_emb, enable_guidance=enable_guidance, g=g, **kwargs) - - if return_predicted_x0: predicted_x0.append(x0.cpu()) - - if return_predicted_x0: return latents.cpu(), predicted_x0 - return latents.cpu() - - # @torch.no_grad() - def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=True, g=7.5, **kwargs): + timesteps = torch.tensor([t], device=self.device) + + latents, x0 = self.denoising_step(latents, timesteps, c_emb=c_emb, enable_guidance=enable_guidance, g=g, micro_cond=micro_cond, **kwargs) + + if return_predicted_x0: + predicted_x0.append(x0) + + if return_predicted_x0: + predicted_x0 = torch.stack(predicted_x0, dim=0) # [timesteps, *latents.shape] + return latents, predicted_x0 + + return latents + + def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=True, g=7.5, micro_cond=None, **kwargs): if enable_guidance: x = torch.cat([latents] * 2) #uses batch layer combine here if ts.numel() > 1: chunk_ts = torch.cat([ts] * 2) else: chunk_ts = ts - - eps_u, eps_c = self.model(x, chunk_ts, c_emb).chunk(2) + + eps_u, eps_c = self.model(x, chunk_ts, c_emb, micro_cond=micro_cond).chunk(2) eps = self.CFG(eps_u, eps_c, g) - + + x = self.scheduler.step(eps, ts, latents, uncond_model_output=eps_u) + else: eps = self.model(latents, ts, c_emb) - - x = self.scheduler.step(eps, ts, latents) + x = self.scheduler.step(eps, ts, latents) + return x.prev_sample, x.pred_original_sample - guidance_sample_mode = "rescaled" # one of: normal, fastai, rescaled + guidance_sample_mode = "normal" # one of: normal, fastai, rescaled def CFG(self, eps_u, eps_c, g): """Apply Classifier-free-guidance sampling""" dim = list(range(1, eps_u.dim())) # reduce all but batches - if self.guidance_sample_mode == "normal": # from https://arxiv.org/pdf/2207.12598.pdf, w=g+1 + if self.guidance_sample_mode == "normal": # from https://arxiv.org/pdf/2207.12598.pdf, w=g+1 s=g+1 eps = eps_u + g * (eps_c-eps_u) elif self.guidance_sample_mode == "fastai": # from fastAi less 11 eps = eps_u + g*(eps_c-eps_u) * torch.linalg.vector_norm(eps_u, dim=dim, keepdim=True) / torch.linalg.vector_norm(eps_c-eps_u, dim=dim, keepdim=True) eps = eps * torch.linalg.vector_norm(eps_u, dim=dim, keepdim=True) / torch.linalg.vector_norm(eps, dim=dim, keepdim=True) - elif self.guidance_sample_mode == "rescaled": # from https://arxiv.org/pdf/2305.08891.pd + elif self.guidance_sample_mode == "rescaled": # from https://arxiv.org/pdf/2305.08891.pdf phi = 0.7 eps_cfg = eps_u + g * (eps_c-eps_u) @@ -267,36 +294,55 @@ def CFG(self, eps_u, eps_c, g): #------------------------------------ # Training functions + def sample_timesteps_low_variance(self, b: int, scheduler: Scheduler, shuffle: bool = False, continuous_time: bool = False) -> torch.Tensor: + """Low variance sampling, see https://arxiv.org/abs/2406.07524 and originaly https://arxiv.org/abs/2107.00630.""" + + start = torch.linspace(0, 1.0-1.0/b, b, device=self.device, dtype=torch.float32) + ts = start + torch.rand_like(start) / b + + if continuous_time: + ts = ts.clamp(0., 1.) + else: + ts = (ts * scheduler.num_train_timesteps).floor().clamp(0, scheduler.num_train_timesteps-1).to(torch.int64) + + if shuffle: + return ts[torch.randperm(b)] + return ts + def train_on_epoch(self, data_loader: DataLoader, train=True): - self.scheduler.to_device(self.device, non_blocking=self.non_blocking) + self.scheduler.to(self.device, non_blocking=self.non_blocking) super().train_on_epoch(data_loader, train) - #@torch.autocast(device_type=device.type) - def train_step(self, data, **kwargs): + def cfg_drop(self, y, y_drop, rnd): + """A value of `rnd` one means we take `y`. A value of `rnd` zero means we drop `y` and use `empty_token_fn`.""" + rnd = self.scheduler.unsqueeze_vector_to_shape(rnd, y.shape) # e.g. [b, 1, 1] + y = y * rnd + (1-rnd) * y_drop + return y + + def train_step(self, data, train, **kwargs): latents, y = data b, s, t = latents.shape #start async memcpy latents = latents.to(self.device, non_blocking=self.non_blocking) - latents = self.model.embedd_clrs(latents) #this is only new tensor - + latents = self.embedder.embed(latents) + #do the cond embedding with CLIP y = y.to(self.device, non_blocking=self.non_blocking) + U = U.to(self.device, non_blocking=self.non_blocking) - if self.enable_guidance_train: - rnd = torch.rand((b,), device=self.device) - rnd = (rnd > self.guidance_train_p).type(torch.int64) # todo: change to bernoulli dist fn - rnd = self.scheduler.unsqueeze_vector_to_shape(rnd, y.shape) # e.g. [b, 1, 1] - y = y * rnd + (1-rnd) * self.empty_token_fn(y) - + if self.enable_guidance_train and train: + rnd_y = torch.empty((b,), device=self.device).bernoulli_(p=1.0-self.guidance_train_p).type(torch.int64) + y = self.cfg_drop(y, self.empty_token_fn(y), rnd_y) + y_emb = self.text_encoder(y, pool=False) - + #sample timesteps timesteps = torch.randint(low=0, high=self.scheduler.num_train_timesteps, size=(b,), device=self.device, dtype=torch.int64) #forward noising noise = torch.randn(latents.shape, device=self.device) - noisy_latents = self.scheduler.add_noise(latents, noise, timesteps) + noisy_latents = self.scheduler.add_noise(latents, noise, timesteps, train=train) #predict eps eps = self.model(noisy_latents, timesteps, y_emb) diff --git a/genQC/pipeline/diffusion_pipeline_special.py b/genQC/pipeline/diffusion_pipeline_special.py index cbec969..6ef189c 100644 --- a/genQC/pipeline/diffusion_pipeline_special.py +++ b/genQC/pipeline/diffusion_pipeline_special.py @@ -1,60 +1,12 @@ # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/pipeline/diffusion_pipeline_special.ipynb. # %% auto 0 -__all__ = ['DiffusionPipeline_attnPadded', 'DiffusionPipeline_Compilation'] +__all__ = ['DiffusionPipeline_Compilation'] # %% ../../src/pipeline/diffusion_pipeline_special.ipynb 2 from ..imports import * from .diffusion_pipeline import DiffusionPipeline -# %% ../../src/pipeline/diffusion_pipeline_special.ipynb 3 -class DiffusionPipeline_attnPadded(DiffusionPipeline): - """A special `DiffusionPipeline` with attention masking.""" - def train_step(self, data, **kwargs): - latents, y, key_padding_mask_list = data - b, s, t = latents.shape - - #start async memcpy - loss_mask = (key_padding_mask_list[0].to(self.device, non_blocking=self.non_blocking)>-1.0).float().unsqueeze(1) - - shaped_mask = [] - for key_padding_mask in key_padding_mask_list: - key_padding_mask = key_padding_mask.to(self.device, non_blocking=self.non_blocking) - key_padding_mask = key_padding_mask.reshape((b, -1)) #from [b, s, t] to [b, -1] aka [N, L] - shaped_mask.append(key_padding_mask) - - latents = latents.to(self.device, non_blocking=self.non_blocking) - latents = self.model.embedd_clrs(latents) #this is only new tensor - self.scheduler.to_device(self.device, non_blocking=self.non_blocking) - - #do the cond embedding with CLIP - y = y.to(self.device, non_blocking=self.non_blocking) - - if self.enable_guidance_train: - rnd = torch.rand((b,), device=self.device) - rnd = (rnd > self.guidance_train_p).type(torch.int64) # todo: change to bernoulli dist fn - rnd = self.scheduler.unsqueeze_vector_to_shape(rnd, y.shape) # e.g. [b, 1, 1] - y = y * rnd + (1-rnd) * self.empty_token_fn(y) - - y_emb = self.text_encoder(y, pool=False) - - #sample timesteps - timesteps = torch.randint(low=0, high=self.scheduler.num_train_timesteps, size=(b,), device=self.device, dtype=torch.int64) - - #forward noising - noise = torch.randn(latents.shape, device=self.device) - noisy_latents = self.scheduler.add_noise(latents, noise, timesteps) - - #predict eps - eps = self.model(noisy_latents, timesteps, y_emb, key_padding_mask=shaped_mask) - - #comp mse - loss = self.loss_fn(eps*loss_mask, noise*loss_mask) - # loss = self.loss_fn(eps, noise) - - #log the loss - return loss - # %% ../../src/pipeline/diffusion_pipeline_special.ipynb 4 class DiffusionPipeline_Compilation(DiffusionPipeline): """A special `DiffusionPipeline` that accounts for unitary conditions, i.e. compilation.""" @@ -82,12 +34,12 @@ def get_guidance_U(self, U, enable_guidance): return U @torch.no_grad() - def denoising(self, latents, c, U, enable_guidance, g, no_bar=False, return_predicted_x0=False): + def denoising(self, latents, c, U, enable_guidance=True, g=0, no_bar=False, return_predicted_x0=False): U = self.get_guidance_U(U, enable_guidance) # self.unitary_encoder.eval() - return super().denoising(latents, c, enable_guidance, g, no_bar=no_bar, return_predicted_x0=return_predicted_x0, U=U) + return super().denoising(latents, c, enable_guidance=enable_guidance, g=g, no_bar=no_bar, return_predicted_x0=return_predicted_x0, U=U) - def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=False, g=7.5, U: torch.Tensor=None): + def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=False, g=7.5, U: torch.Tensor=None, micro_cond=None): if enable_guidance: x = torch.cat([latents] * 2) #uses batch layer combine here diff --git a/genQC/metrics.py b/genQC/pipeline/metrics.py similarity index 52% rename from genQC/metrics.py rename to genQC/pipeline/metrics.py index 918db94..b10ca4d 100644 --- a/genQC/metrics.py +++ b/genQC/pipeline/metrics.py @@ -1,49 +1,58 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../src/metrics.ipynb. +"""Definition of metrics used during training.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/pipeline/metrics.ipynb. # %% auto 0 __all__ = ['Metric', 'Mean', 'Accuracy'] -# %% ../src/metrics.ipynb 3 -from .imports import * -from .util import virtual +# %% ../../src/pipeline/metrics.ipynb 2 +from ..imports import * -# %% ../src/metrics.ipynb 4 -class Metric: - """Base metric class.""" +# %% ../../src/pipeline/metrics.ipynb 3 +class Metric(abc.ABC): + """Base metric class.""" def __init__(self, name: str, device): self.name = name self.device = torch.device(device) - self.reset_state() + self.reset_state() def __repr__(self): return f"{self.name}={self.result()}" - def update_state(self, inp, tar=None): self.empty=False def reset_state(self): self.empty=True - @virtual + @abc.abstractmethod def _eval(self, inp, tar): pass - @virtual + + @abc.abstractmethod def result(self): pass -# %% ../src/metrics.ipynb 5 +# %% ../../src/pipeline/metrics.ipynb 4 class Mean(Metric): - """Mean metric, used for loss ..""" - def __init__(self, name: str, device): super().__init__(name, device) + """Mean metric, used for loss.""" + + def __init__(self, name: str, device): + super().__init__(name, device) + @torch.inference_mode() - def update_state(self, inp: torch.Tensor, tar: torch.Tensor=None, weight: float=1): + def update_state(self, inp: torch.Tensor, tar: torch.Tensor = None, weight: float = 1): super().update_state(inp, tar) val = self._eval(inp, tar) self.weighted_sum += torch.sum(val * weight) - self.weight += weight * torch.numel(val) + self.weight += weight * torch.numel(val) + @torch.inference_mode() def reset_state(self): super().reset_state() self.weighted_sum = torch.tensor(0.0, device=self.device) self.weight = torch.tensor(0.0, device=self.device) - def _eval(self, inp, tar): return inp + + def _eval(self, inp, tar): + return inp + @torch.inference_mode() - def result(self): return (self.weighted_sum/self.weight).cpu() + def result(self): + return (self.weighted_sum/self.weight).cpu() -# %% ../src/metrics.ipynb 6 +# %% ../../src/pipeline/metrics.ipynb 5 class Accuracy(Mean): """Accuracy metric.""" @torch.inference_mode() diff --git a/genQC/pipeline/multimodal_diffusion_pipeline.py b/genQC/pipeline/multimodal_diffusion_pipeline.py new file mode 100644 index 0000000..ea7d27e --- /dev/null +++ b/genQC/pipeline/multimodal_diffusion_pipeline.py @@ -0,0 +1,410 @@ +"""Multimodal extension to `DiffusionPipeline`.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/pipeline/multimodal_diffusion_pipeline.ipynb. + +# %% auto 0 +__all__ = ['MultimodalDiffusionPipeline_ParametrizedCompilation'] + +# %% ../../src/pipeline/multimodal_diffusion_pipeline.ipynb 2 +from ..imports import * +from .compilation_diffusion_pipeline import DiffusionPipeline_Compilation + +from ..scheduler.scheduler import Scheduler +from ..utils.config_loader import * +from ..models.config_model import ConfigModel + +# %% ../../src/pipeline/multimodal_diffusion_pipeline.ipynb 4 +class MultimodalDiffusionPipeline_ParametrizedCompilation(DiffusionPipeline_Compilation): + """A special `DiffusionPipeline_Compilation` that accounts for multimodal parametrized gates.""" + + def __init__(self, *args, scheduler_w, **kwargs): + super().__init__(*args, **kwargs) + self.scheduler_w = scheduler_w + self.scheduler_w.to(self.device) + + def params_config(self, *args, **kwargs): + params_config = super().params_config(*args, **kwargs) + params_config["scheduler_w"] = self.scheduler_w.get_config() + return params_config + + @staticmethod + def from_config_file(config_path, device: torch.device, save_path: Optional[str] = None): + config = load_config(config_path+"config.yaml") + config = config_to_dict(config) + + def _get_save_path(config_save_path, appendix): + + _save_path = default(save_path, config_path) + appendix + if "save_path" in config_save_path: + if exists(config_save_path["save_path"]): + _save_path = config_save_path["save_path"] + else: + config_save_path.pop("save_path") + return _save_path + + if exists(device): + config["params"]["device"] = device + config["params"]["scheduler"]["params"]["device"] = device + + config["params"]["scheduler"] = Scheduler.from_config(config["params"]["scheduler"] , device, _get_save_path(config["params"]["scheduler"] , "")) + config["params"]["scheduler_w"] = Scheduler.from_config(config["params"]["scheduler_w"], device, _get_save_path(config["params"]["scheduler_w"], "")) + + config["params"]["model"] = ConfigModel.from_config(config["params"]["model"], device, _get_save_path(config["params"]["model"], "model")) + config["params"]["text_encoder"] = ConfigModel.from_config(config["params"]["text_encoder"], device, _get_save_path(config["params"]["text_encoder"], "text_encoder")) + config["params"]["embedder"] = ConfigModel.from_config(config["params"]["embedder"], device, _get_save_path(config["params"]["embedder"], "embedder")) + + add_config = config["params"].pop("add_config", None) + + pipeline = instantiate_from_config(config) + + if exists(pipeline.add_config): + pipeline.add_config = add_config + + params = add_config["dataset"]["params"] + + if "gate_pool" in params: + # pipeline.gate_pool = [get_obj_from_str(gate) for gate in params["gate_pool"]] + pipeline.gate_pool = [gate for gate in params["gate_pool"]] + + return pipeline + + #------------------------------------ + + # @torch.no_grad() + @torch.inference_mode() + def denoising(self, latents, c, U, negative_c=None, negative_u=None, enable_guidance=True, g=1.0, t_start_index=0, no_bar=False, return_predicted_x0=False): + return super().denoising(latents=latents, c=c, U=U, negative_c=negative_c, negative_u=negative_u, enable_guidance=enable_guidance, g=g, t_start_index=t_start_index, + no_bar=no_bar, return_predicted_x0=return_predicted_x0) + + #------------------------------------ + + sample_type = "joint" + + def denoising_step(self, + latents: torch.Tensor, + ts: Union[int, torch.IntTensor], + c_emb: torch.Tensor = None, + enable_guidance = False, + g: float = 7.5, + U: torch.Tensor = None, + **kwargs) -> Tuple[torch.Tensor, torch.Tensor]: + + match self.sample_type: + case "joint": + x_tm1, x0 = self.denoising_step_joint(latents, ts, c_emb, enable_guidance, g, U) + + case "w": + # Here the single mode denoising functions + x_tm1, x0 = self.denoising_step_single_mode_w(latents, ts, c_emb, enable_guidance, g, U) + + case _: + raise NotImplementedError("") + + return x_tm1, x0 + + #------------------------------------ + # Cleaned steps + + def _get_guidance_scales(self, g: float, ts_h: torch.Tensor, ts_w: torch.Tensor): + g_h , g_w = g, g + lambda_h, lambda_w = g, g + + if hasattr(self, "g_h"): + if isinstance(self.g_h, Callable): + assert ts_h.numel() == 1 + g_h = self.g_h(ts_h) + else: + g_h = self.g_h + + if hasattr(self, "g_w"): + if isinstance(self.g_w, Callable): + assert ts_w.numel() == 1 + g_w = self.g_w(ts_w) + else: + g_w = self.g_w + + if hasattr(self, "lambda_h"): + if isinstance(self.lambda_h, Callable): + assert ts_h.numel() == 1 + lambda_h = self.lambda_h(ts_h) + else: + lambda_h = self.lambda_h + + if hasattr(self, "lambda_w"): + if isinstance(self.lambda_w, Callable): + assert ts_w.numel() == 1 + lambda_w = self.lambda_w(ts_w) + else: + lambda_w = self.lambda_w + + return g_h, g_w, lambda_h, lambda_w + + def denoising_step_joint(self, + latents: torch.Tensor, + ts: Union[int, torch.IntTensor], + c_emb: torch.Tensor = None, + enable_guidance = False, + g: float = 7.5, + U: torch.Tensor = None, + ) -> Tuple[torch.Tensor, torch.Tensor]: + + # Prepare variables + g_h, g_w, lambda_h, lambda_w = self._get_guidance_scales(g, ts_h=ts, ts_w=ts) + + # assert enable_guidance + c_emb_u, c_emb_c = c_emb.chunk(2) + U_u , U_c = U.chunk(2) + + ts_expanded = ts.expand(latents.shape[0]) + T_h_expanded = torch.ones_like(ts_expanded) * (self.scheduler.num_train_timesteps-1) + T_w_expanded = torch.ones_like(ts_expanded) * (self.scheduler_w.num_train_timesteps-1) + + # Get latents of modes + noisy_latents = torch.randn_like(latents) + latents_h, latents_w = latents[..., :self.embedder.clr_dim], latents[..., self.embedder.clr_dim:] + noisy_latents_h, noisy_latents_w = noisy_latents[..., :self.embedder.clr_dim], noisy_latents[..., self.embedder.clr_dim:] + + # Get all combinations + latents_chunked_h = torch.cat([ + latents_h, # sh_h + latents_h, # sh_hw + latents_h, # sh_hwc + + noisy_latents_h, # sw_w + latents_h, # sw_hw + latents_h, # sw_hwc + ]) + + latents_chunked_w = torch.cat([ + noisy_latents_w, # sh_h + latents_w, # sh_hw + latents_w, # sh_hwc + + latents_w, # sw_w + latents_w, # sw_hw + latents_w, # sw_hwc + ]) + + t_h_chunked = torch.cat([ + ts_expanded, # sh_h + ts_expanded, # sh_hw + ts_expanded, # sh_hwc + + T_h_expanded, # sw_w + ts_expanded, # sw_hw + ts_expanded, # sw_hwc + ]) + + t_w_chunked = torch.cat([ + T_w_expanded, # sh_h + ts_expanded, # sh_hw + ts_expanded, # sh_hwc + + ts_expanded, # sw_w + ts_expanded, # sw_hw + ts_expanded, # sw_hwc + ]) + + c_emb_chunked = torch.cat([ + c_emb_u, # sh_h + c_emb_u, # sh_hw + c_emb_c, # sh_hwc + + c_emb_u, # sw_w + c_emb_u, # sw_hw + c_emb_c, # sw_hwc + ]) + + U_chunked = torch.cat([ + U_u, # sh_h + U_u, # sh_hw + U_c, # sh_hwc + + U_u, # sw_w + U_u, # sw_hw + U_c, # sw_hwc + ]) + + # Make all predictions we need + latents_chunked = torch.cat([latents_chunked_h, latents_chunked_w], dim=-1) + + pred = self.model(latents_chunked, t_h=t_h_chunked, t_w=t_w_chunked, c_emb=c_emb_chunked, U=U_chunked) + pred_h, pred_w = pred[..., :self.embedder.clr_dim], pred[..., self.embedder.clr_dim:] + + sh_h, sh_hw, sh_hwc, _, _, _ = pred_h.chunk(6) + _, _, _, sw_w, sw_hw, sw_hwc = pred_w.chunk(6) + + # Combine into CFG + sh_bar = sh_h + g_h * (sh_hw - sh_h) + lambda_h * (sh_hwc - sh_hw) + sw_bar = sw_w + g_w * (sw_hw - sw_w) + lambda_w * (sw_hwc - sw_hw) + + # Do denoise step with CFG++ + x_h = self.scheduler.step(sh_bar, ts, latents_h, uncond_model_output=sh_h) + x_w = self.scheduler_w.step(sw_bar, ts, latents_w, uncond_model_output=sw_w) + + return torch.cat([x_h.prev_sample, x_w.prev_sample], dim=-1), torch.cat([x_h.pred_original_sample, x_w.pred_original_sample], dim=-1) + + #------------------------------------ + + def denoising_step_single_mode_w(self, + latents: torch.Tensor, + ts: Union[int, torch.IntTensor], + c_emb: torch.Tensor = None, + enable_guidance = False, + g: float = 7.5, + U: torch.Tensor = None + ) -> Tuple[torch.Tensor, torch.Tensor]: + + assert enable_guidance # TODO: remove this + + chunk_latents = torch.cat([latents] * 2, dim=0) + + if ts.numel() > 1: chunk_ts = torch.cat([ts] * 2, dim=0) + else: chunk_ts = ts + + T = torch.ones_like(chunk_ts) * (self.scheduler.num_train_timesteps-1) + TZero = torch.zeros_like(chunk_ts) + + #------------------------ + # 1. Get: s(h|w), s(w|h) and s(h|w,c), s(w|h,c) + # Note here we set t_h=0 + + def f1(chunk_latents, chunk_ts): + x = chunk_latents.clone() + + s_hw, s_hwc = self.model(x, t_h=TZero, t_w=chunk_ts, c_emb=c_emb, U=U).chunk(2) + + sw_hw, sw_hwc = s_hw[..., self.embedder.clr_dim:], s_hwc[..., self.embedder.clr_dim:] + + return sw_hw, sw_hwc + + #------------------------ + # 2. Get: s(w), s(w|c) + + def f2(chunk_latents, chunk_ts): + x = chunk_latents.clone() + x[..., :self.embedder.clr_dim] = torch.randn_like(x[..., :self.embedder.clr_dim]) #remove h + + s_w, s_wc = self.model(x, t_h=T, t_w=chunk_ts, c_emb=c_emb, U=U).chunk(2) + + sw_w, sw_wc = s_w[..., self.embedder.clr_dim:], s_wc[..., self.embedder.clr_dim:] + + return sw_w, sw_wc + + #------------------------------------------------ + + sw_hw, sw_hwc = f1(chunk_latents, chunk_ts) + sw_w, sw_wc = f2(chunk_latents, chunk_ts) + + g_w = g + + if hasattr(self, "g_w"): + if isinstance(self.g_w, Callable): + assert ts.numel() == 1 + g_w = self.g_w(chunk_ts) + else: + g_w = self.g_w + + gamma_w = g_w #was no/2 + lambda_w = g_w + + if hasattr(self, "lambda_w"): + if isinstance(self.lambda_w, Callable): + assert ts.numel() == 1 + lambda_w = self.lambda_w(chunk_ts) + else: + lambda_w = self.lambda_w + + sw_bar = sw_w + gamma_w * (sw_hw - sw_w) + lambda_w * (sw_hwc - sw_hw) + + latents_h, latents_w = latents[..., :self.embedder.clr_dim], latents[..., self.embedder.clr_dim:] + + #CFG++ + x_h = latents_h + x_w = self.scheduler_w.step(sw_bar, ts, latents_w, uncond_model_output=sw_w) + + return torch.cat([x_h, x_w.prev_sample], dim=-1), torch.cat([x_h, x_w.pred_original_sample], dim=-1) + + #------------------------------------ + + def train_step(self, data, train, **kwargs): + target_tokens, y, params, U = data + b, s, t = target_tokens.shape + + #start async memcpy + target_tokens = target_tokens.to(self.device, non_blocking=self.non_blocking) + params = params.to(self.device, non_blocking=self.non_blocking) + + latents = self.embedder(h=target_tokens, w=params) + + #do the cond embedding with CLIP + U = U.to(torch.float32) + + y = y.to(self.device, non_blocking=self.non_blocking) + U = U.to(self.device, non_blocking=self.non_blocking) + + if self.enable_guidance_train and train: #CFG training + rnd = torch.empty((b,), device=self.device).bernoulli_(p=1.0-self.guidance_train_p).type(torch.int64) + + y_drop = self.cfg_drop(y, self.empty_token_fn(y) , rnd) + U_drop = self.cfg_drop(U, self.empty_unitary_fn(U), rnd) + + else: + rnd = torch.ones((b,), dtype=torch.int64, device=self.device) + y_drop, U_drop = y, U + + y_emb = self.text_encoder(y_drop, pool=False) + + #-------------------- + + shuffle = torch.tensor(0, dtype=bool).bernoulli_(p=0.95) + + timesteps_h = self.sample_timesteps_low_variance(b, self.scheduler) + timesteps_w = self.sample_timesteps_low_variance(b, self.scheduler_w, shuffle=shuffle) + + + noise = torch.randn_like(latents) + noisy_latents_h = self.scheduler.add_noise( latents[..., :self.embedder.clr_dim], noise[..., :self.embedder.clr_dim], timesteps_h, train=train) + noisy_latents_w = self.scheduler_w.add_noise(latents[..., self.embedder.clr_dim:], noise[..., self.embedder.clr_dim:], timesteps_w, train=train) + + noisy_latents = torch.cat([noisy_latents_h, noisy_latents_w], dim=-1) + + #-------------------- + model_output = self.model(x=noisy_latents, t_h=timesteps_h, t_w=timesteps_w, c_emb=y_emb, U=U_drop, rnd=rnd) + + #-------------------- + + if self.scheduler.prediction_type == "epsilon": + pred_target = noise + raise NotImplementedError() + + elif self.scheduler.prediction_type == "v-type": + alphas_cumprod_h = self.scheduler.unsqueeze_vector_to_shape(self.scheduler.alphas_cumprod[timesteps_h], latents.shape) + alphas_cumprod_w = self.scheduler_w.unsqueeze_vector_to_shape(self.scheduler_w.alphas_cumprod[timesteps_w], latents.shape) + + pred_target_h = alphas_cumprod_h.sqrt() * noise[..., :self.embedder.clr_dim] - (1-alphas_cumprod_h).sqrt() * latents[..., :self.embedder.clr_dim] + pred_target_w = alphas_cumprod_w.sqrt() * noise[..., self.embedder.clr_dim:] - (1-alphas_cumprod_w).sqrt() * latents[..., self.embedder.clr_dim:] + + else: + raise NotImplementedError(f"{self.scheduler.prediction_type} does is not implemented for {self.__class__}") + + #-------------------- + + t_h = timesteps_h / (self.scheduler.num_train_timesteps-1) + # t_h = torch.sin(t_h*(torch.pi/2))**2 + # t_h = torch.sin(t_h*(torch.pi/2)) + # -> else linear + + t_h = self.scheduler.unsqueeze_vector_to_shape(t_h, latents.shape) + SNR_h = (1.0-t_h) / (t_h+1e-8) + 1e-8 # flip prob to snr + mse_loss_weight_h = (1.0 - alphas_cumprod_h) * F.sigmoid(SNR_h.log()) + + SNR_w = alphas_cumprod_w / (1.0-alphas_cumprod_w+1e-8) + 1e-8 + + #comp mse + mse_flat = lambda out, target: (out-target).square().mean(dim=list(range(1, len(out.shape)))) + loss_h = mse_flat(model_output[..., :self.embedder.clr_dim], pred_target_h.detach()) * mse_loss_weight_h.squeeze().detach() + loss_w = mse_flat(model_output[..., self.embedder.clr_dim:], pred_target_w.detach()) * mse_loss_weight_w.squeeze().detach() + + loss = loss_h.mean() + loss_w.mean() + return loss diff --git a/genQC/pipeline/pipeline.py b/genQC/pipeline/pipeline.py index cab0d87..54c4e0b 100644 --- a/genQC/pipeline/pipeline.py +++ b/genQC/pipeline/pipeline.py @@ -1,20 +1,39 @@ +"""Basic PyTorch pipeline for general training.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/pipeline/pipeline.ipynb. # %% auto 0 -__all__ = ['Pipeline_IO', 'Pipeline'] +__all__ = ['Loss', 'CheckpointCB', 'PipelineIO', 'Pipeline'] -# %% ../../src/pipeline/pipeline.ipynb 3 +# %% ../../src/pipeline/pipeline.ipynb 2 from ..imports import * -from ..util import virtual, number_of_paramters, DataLoaders -from ..metrics import * -from ..config_loader import * +from ..utils.misc_utils import DataLoaders +from .metrics import * +from ..utils.config_loader import * +from .callbacks import run_cbs, Callback + +from huggingface_hub import snapshot_download + +# %% ../../src/pipeline/pipeline.ipynb 4 +Loss = Callable[[torch.Tensor, torch.Tensor], torch.Tensor] # %% ../../src/pipeline/pipeline.ipynb 5 -nn.Loss = Callable[[torch.Tensor, torch.Tensor], torch.Tensor] +class CheckpointCB(Callback): + def __init__(self, ck_interval=None, ck_path=None): + super().__init__() + self.ck_interval = ck_interval + self.ck_path = ck_path + + def after_epoch(self, pipeline): + if exists(self.ck_interval) and exists(self.ck_path): + if (pipeline.epoch%self.ck_interval) == 0 and pipeline.epoch>0: + store_dir = f"{self.ck_path}ck_{pipeline.epoch}/" + pipeline.store_pipeline(config_path=store_dir, save_path=store_dir) -# %% ../../src/pipeline/pipeline.ipynb 8 -class Pipeline_IO: +# %% ../../src/pipeline/pipeline.ipynb 7 +class PipelineIO(abc.ABC): """A class providing basic IO functionality.""" + def get_config(self, save_path: str, without_metadata=False): params_config = self.params_config(save_path) @@ -40,9 +59,10 @@ def get_config(self, save_path: str, without_metadata=False): self.config = config return config - @virtual + @abc.abstractmethod def params_config(self, save_path: str): return None - + + @abc.abstractmethod def store_pipeline(self, config_path: str, save_path: str): if exists(config_path): os.makedirs(config_path, exist_ok=True) if exists(save_path): @@ -50,42 +70,67 @@ def store_pipeline(self, config_path: str, save_path: str): if hasattr(self, "fit_losses"): np.savetxt(save_path + "fit_losses.txt", self.fit_losses) if hasattr(self, "fit_valid_losses"): np.savetxt(save_path + "fit_valid_losses.txt", self.fit_valid_losses) - @virtual @staticmethod + @abc.abstractmethod def from_config_file(config_path, device: torch.device, save_path: str=None): return None -# %% ../../src/pipeline/pipeline.ipynb 9 -class Pipeline(Pipeline_IO): - """A `Pipeline_IO` class providing basic pytorch model training functionality.""" + @classmethod + def from_pretrained(cls, repo_id: str, device: torch.device, use_auth_token: bool = False, **kwargs): + """Load a model pipeline directly from Huggingface.""" + model_path = snapshot_download(repo_id=repo_id, repo_type="model", allow_patterns=["*.pt", "*.yaml", "*.safetensors"], use_auth_token=use_auth_token, **kwargs) + pipeline = cls.from_config_file(model_path+"/", device) + return pipeline + +# %% ../../src/pipeline/pipeline.ipynb 8 +class Pipeline(PipelineIO): + """A `PipelineIO` class providing basic pytorch model training functionality.""" def __init__(self, model: nn.Module, device: torch.device): - self.model = model + self.model = model.to(device) self.device = device - + + self.trainables = [] + self.trainables.append(self.model) + #------------------------------------ - @virtual + @abc.abstractmethod def __call__(self, inp): pass - @virtual + @abc.abstractmethod def train_step(self, data, train=True, **kwargs): pass #------------------------------------ - - def compile(self, optim_fn: type(torch.optim.Optimizer), loss_fn: nn.Loss, metrics: Union[Metric, list[Metric]]=None, lr=None, **kwargs): + + def _get_parameters(self): + parameters = itertools.chain(*[trainable.parameters() for trainable in self.trainables]) + return parameters + + def compile(self, optim_fn: type(torch.optim.Optimizer), loss_fn: Loss, metrics: Union[Metric, list[Metric]]=None, lr=None, cbs=None, compile_model=False, **kwargs): self.loss_fn = loss_fn() self.optim_fn = optim_fn - self.optimizer = optim_fn(self.model.parameters(), lr=lr, **kwargs) if lr else None + + if lr: self._reset_opt(lr, **kwargs) + else: self.optimizer = None metrics = {m.name:m for m in metrics} if metrics else {} #metrics |= {f"{m.name}_valid":m for m in metrics.values()} metrics["loss"] = Mean("loss", self.device) metrics["loss_valid"] = Mean("loss_valid", self.device) - self.metrics = metrics - - def _reset_opt(self, lr, **kwargs): self.optimizer = self.optim_fn(self.model.parameters(), lr, **kwargs) + self.metrics = metrics + self.cbs = cbs + + if platform.system() == "Linux" and compile_model: + print("[INFO]: Linux, compile model with torch") + torch._dynamo.reset() + #self.model = torch.compile(self.model) #, fullgraph=True, mode ="max-autotune") + + for model in self.trainables: + model.compile() + + def _reset_opt(self, lr, **kwargs): self.optimizer = self.optim_fn(self._get_parameters(), lr=lr, **kwargs) def _set_opt_param(self, lr, **kwargs): '''at least lr: Does not reset existing optimizer, only changes learn rate.''' @@ -109,21 +154,26 @@ def train_on_batch(self, data, train=True): #backprob loss.backward() + # torch.nn.utils.clip_grad_norm_(self.model.parameters(), 1) + #update weights self.optimizer.step() return loss.detach() def train_on_epoch(self, data_loader: DataLoader, train=True): - self.model.train(train) + # self.model.train(train) + for model in self.trainables: + model.train(train) mode = "" if train else "_valid" - + with self.progress_bar(total=len(data_loader), epoch=self.epoch, unit=" batch") as batch_prgb: - for batch, data in enumerate(data_loader): + for self.batch, data in enumerate(data_loader): + loss = self.train_on_batch(data, train=train) self.metrics["loss"+mode].update_state(loss) - + if train: self.fit_losses.append(loss.item()) if self.lr_sched: self.lr_sched.step() @@ -131,27 +181,33 @@ def train_on_epoch(self, data_loader: DataLoader, train=True): #pack up metrics self.out_metric_dict = {m.name:m.result().tolist() for m in self.metrics.values() if not m.empty} self.end_batch_metrics(batch_prgb, **self.out_metric_dict) - + # run_cbs(self.cbs, "after_batch", self) # e.g. if max-number of batches is needed + #run on train and one on valid def fit(self, num_epochs: int, data_loaders: DataLoaders, lr: float=None, lr_sched=None, log_summary=True): if not hasattr(self, "loss_fn"): raise RuntimeError("'compile' has to be called first") - self._set_opt_param(lr=lr) - if lr_sched: self.lr_sched = lr_sched(self.optimizer) - else: self.lr_sched = None - + self._set_opt_param(lr=lr) + if not hasattr(self, "lr_sched"): + if lr_sched: self.lr_sched = lr_sched(self.optimizer) + else: self.lr_sched = None + + self.epoch = 0 self.num_epochs = num_epochs - self.epochs = range(num_epochs) + self.fit_losses = [] self.fit_valid_losses = [] self.batch_size = data_loaders.train.batch_size self.dataset_size_train = len(data_loaders.train) if data_loaders.valid: self.dataset_size_valid = len(data_loaders.valid) + + run_cbs(self.cbs, "before_fit", self) + + self.epochs = range(self.epoch, num_epochs) #after callback so we could resume training on a specific self.epoch - - with self.progress_bar(total=num_epochs, desc="Fit", unit=" epoch") as epoch_prgb: + with self.progress_bar(total=len(self.epochs), desc="Fit", unit=" epoch") as epoch_prgb: for self.epoch in self.epochs: - + #reset all metrics for m in self.metrics.values(): m.reset_state() @@ -165,23 +221,36 @@ def fit(self, num_epochs: int, data_loaders: DataLoaders, lr: float=None, lr_sch self.out_metric_dict["loss_valid"] ]) self.end_epoch_metrics(epoch_prgb, **self.out_metric_dict) - + run_cbs(self.cbs, "after_epoch", self) + self.fit_summary(log_summary=log_summary) + run_cbs(self.cbs, "after_fit", self) #------------------------------------ - def summary(self): print("Number of model parameters:", number_of_paramters(self.model)) + def summary(self): + + cnt_params = lambda parameters: sum([p.numel() for p in parameters]) + + s = "Pipeline stats of explicit trainables" + + for trainable in self.trainables: + name = str(trainable.__class__) + all_params = trainable.parameters() + trainable_params = filter(lambda p: p.requires_grad, trainable.parameters()) + s += "\n" + f" - {name}: Total={cnt_params(all_params):0.2e} Trainable={cnt_params(trainable_params):0.2e}" + return s def fit_summary(self, figsize=(12,2), log_summary=True, return_fig=False): - fig = plt.figure(figsize=figsize, constrained_layout=True) - plt.xlabel("Batches") + fig = plt.figure(figsize=figsize, constrained_layout=True, dpi=150) + plt.xlabel("Number of batches / update steps") plt.ylabel("Loss") if log_summary: plt.yscale('log') plt.plot(self.fit_losses, label="train") if len(self.fit_valid_losses) > 0: data = np.array(self.fit_valid_losses) - plt.plot(data[:,0],data[:,1], label="valid", color="tab:orange") - plt.plot(data[:,0],data[:,1], ".", color="tab:orange") + plt.plot(data[:, 0],data[:, 1], label="valid", color="tab:orange") + plt.plot(data[:, 0],data[:, 1], ".", color="tab:orange") plt.legend() if return_fig: return fig plt.show() @@ -223,4 +292,3 @@ def end_epoch_metrics(self, prgb:tqdm, epoch: int=None, **metrics): self.end_pro def end_batch_metrics(self, prgb:tqdm, batch: int=None, **metrics): self.end_progress_bar_iteration(prgb, False, "Batch", batch, **metrics) #------------------------------------ - diff --git a/genQC/pipeline/unitary_clip_pipeline.py b/genQC/pipeline/unitary_clip_pipeline.py new file mode 100644 index 0000000..031f95c --- /dev/null +++ b/genQC/pipeline/unitary_clip_pipeline.py @@ -0,0 +1,128 @@ +"""Pipeline for contrastive pre-training of an unitary encoder""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/pipeline/unitary_clip_pipeline.ipynb. + +# %% auto 0 +__all__ = ['UnitaryCLIPPipeline'] + +# %% ../../src/pipeline/unitary_clip_pipeline.ipynb 2 +from ..imports import * +from .pipeline import Pipeline +from ..utils.config_loader import * +from ..models.config_model import ConfigModel + +# %% ../../src/pipeline/unitary_clip_pipeline.ipynb 3 +class UnitaryCLIPPipeline(Pipeline): + def __init__(self, + model: nn.Module, + device: torch.device) -> None: + super().__init__(model, device) + + self.loss = nn.CrossEntropyLoss() + self.device = device + + #------------------------------------ + + add_config = {} + + def params_config(self, save_path: str) -> dict: + params_config = {} + + params_config["model"] = self.model.get_config(save_path=save_path+"model") + params_config["unitary_text_encoder"] = self.model.unitary_text_encoder.get_config(save_path=None) + params_config["circuit_encoder"] = self.model.circuit_encoder.get_config(save_path=None) + + params_config["device"] = str(self.device) + params_config["add_config"] = self.add_config + + return params_config + + def store_pipeline(self, config_path: str, save_path: str): + super().store_pipeline(config_path, save_path) + config = self.get_config(save_path) + save_dict_yaml(config, config_path+"config.yaml") + + self.model.store_model(config_path=None, save_path=save_path+"model") + + @staticmethod + def from_config_file(config_path, device: torch.device, save_path: str=None): + config = load_config(config_path+"config.yaml") + config = config_to_dict(config) + + def _get_save_path(config_save_path, appendix): + _save_path = default(save_path, config_path) + appendix + if "save_path" in config_save_path: + _save_path = config_save_path["save_path"] + return _save_path + + if exists(device): + config["params"]["device"] = device + config["params"]["model"]["params"]["text_encoder_config"]["device"] = device + + unitary_text_encoder = ConfigModel.from_config(config["params"].pop("unitary_text_encoder", None), device, None) + circuit_encoder = ConfigModel.from_config(config["params"].pop("circuit_encoder", None), device, None) + + config["params"]["model"]["params"]["unitary_text_encoder"] = unitary_text_encoder + config["params"]["model"]["params"]["circuit_encoder"] = circuit_encoder + config["params"]["model"] = ConfigModel.from_config(config["params"]["model"], device, _get_save_path(config["params"]["model"], "model")) + + add_config = config["params"].pop("add_config", None) + + pipeline = instantiate_from_config(config) + + if exists(pipeline.add_config): + pipeline.add_config = add_config + + return pipeline + + #------------------------------------ + # Inference functions + + @torch.no_grad() + def __call__(self, tokens: torch.Tensor, params: torch.Tensor, y: torch.Tensor, U: torch.Tensor, softmax=True) -> torch.Tensor: + #compute the score of img-label pairs for classification!! + self.model.eval() + + scores = self.model(tokens=tokens, params=params, y=y, U=U) #[b, b] + + if softmax: + scores = F.softmax(scores, dim-1) + + return scores + + #------------------------------------ + # Training functions + + def get_loss(self, tokens: torch.Tensor, params: torch.Tensor, y: torch.Tensor, U: torch.Tensor) -> torch.Tensor: + + scores = self.model(tokens=tokens, params=params, y=y, U=U) #[b, b] + + #scores is: I=unitary_text T=circuit + #-------------------------------- + #| I1*T1 I1*T2 I1*T3 ... + #| I2*T1 + #| I3*T1 + # ... + #-------------------------------- + + target = torch.arange(scores.shape[0], device=scores.device) + + loss_unitary_text = self.loss(scores , target) + loss_circuit = self.loss(scores.T, target) + + #symmetric loss + loss = (loss_unitary_text + loss_circuit) / 2.0 + + return loss + + def train_step(self, data, **kwargs): + tokens, y, params, U = data + + tokens = tokens.to(self.device) + params = params.to(self.device) + y = y.to(self.device) + U = U.to(torch.float32).to(self.device) + + loss = self.get_loss(tokens=tokens, params=params, y=y, U=U) + + return loss diff --git a/genQC/platform/backends/__init__.py b/genQC/platform/backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/genQC/platform/backends/base_backend.py b/genQC/platform/backends/base_backend.py new file mode 100644 index 0000000..253131d --- /dev/null +++ b/genQC/platform/backends/base_backend.py @@ -0,0 +1,31 @@ +"""Base class of corresponding backends.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/platform/backends/base_backend.ipynb. + +# %% auto 0 +__all__ = ['BaseBackend'] + +# %% ../../../src/platform/backends/base_backend.ipynb 2 +from ...imports import * + +# %% ../../../src/platform/backends/base_backend.ipynb 3 +class BaseBackend(abc.ABC): + """Backends implement at least these functions.""" + + BASIC_BACKEND_TYPE = type[Any] + + @abc.abstractmethod + def backend_to_genqc(self, *args, **kwargs): + raise NotImplementedError() + + @abc.abstractmethod + def genqc_to_backend(self, *args, **kwargs): + raise NotImplementedError() + + @abc.abstractmethod + def get_unitary(self, *args, **kwargs): + raise NotImplementedError() + + @abc.abstractmethod + def draw(self, *args, **kwargs) -> None: + raise NotImplementedError() diff --git a/genQC/inference/export_cudaq.py b/genQC/platform/backends/circuits_cudaq.py similarity index 50% rename from genQC/inference/export_cudaq.py rename to genQC/platform/backends/circuits_cudaq.py index d0bc009..289da03 100644 --- a/genQC/inference/export_cudaq.py +++ b/genQC/platform/backends/circuits_cudaq.py @@ -1,64 +1,48 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/inference/export_cudaq.ipynb. +"""[CUDA-Q](https://github.com/NVIDIA/cuda-quantum) based quantum circuit backend.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/platform/backends/circuits_cudaq.ipynb. # %% auto 0 -__all__ = ['backend', 'CircuitInstruction', 'CircuitInstructions', 'CircuitsCudaqBackend', 'tensor_to_instructions', - 'genqc_to_cudaq'] +__all__ = ['ParametrizedCudaqKernel', 'CircuitsCudaqBackend'] + +# %% ../../../src/platform/backends/circuits_cudaq.ipynb 2 +from ...imports import * +from .base_backend import BaseBackend +from ..circuits_instructions import CircuitInstructions -# %% ../../src/inference/export_cudaq.ipynb 2 -from ..imports import * -from typing import Sequence, List, Optional import cudaq -# %% ../../src/inference/export_cudaq.ipynb 4 +# %% ../../../src/platform/backends/circuits_cudaq.ipynb 4 @dataclass -class CircuitInstruction(): - name: str - control_nodes: Sequence[int] - target_nodes: Sequence[int] - params: Sequence[float] - -# %% ../../src/inference/export_cudaq.ipynb 5 -class CircuitInstructions(): - def __init__(self, tensor_shape: torch.Size) -> None: - assert len(tensor_shape) == 2 # ... [qubits, time] - self.tensor_shape = tensor_shape - self._instructions = [] - self.instruction_names_set = set() - - def add_instruction(self, - name: str, - control_nodes: Sequence[int], - target_nodes: Sequence[int], - params: Sequence[float]) -> None: - self.instruction_names_set.add(name) - self._instructions.append(CircuitInstruction(name, control_nodes, target_nodes, params)) +class ParametrizedCudaqKernel: + kernel: cudaq.kernel + params: list[float] # currently only support 1 angle per gate - @property - def data(self) -> List[CircuitInstruction]: return self._instructions +# %% ../../../src/platform/backends/circuits_cudaq.ipynb 6 +class CircuitsCudaqBackend(BaseBackend): - @property - def length(self) -> int: return len(self._instructions) - - @property - def num_qubits(self) -> int: return self.tensor_shape[0] + BASIC_BACKEND_TYPE = type[cudaq.kernel] - @property - def max_gates(self) -> int: return self.tensor_shape[1] + def __init__(self, target: str = "qpp-cpu") -> None: + cudaq.reset_target() + cudaq.set_target(target) # 'nvidia' - def __repr__(self) -> str: return str(self._instructions) + def backend_to_genqc(self): + raise NotImplementedError("Not implemeted cudaq to genQC.") - def print(self) -> None: - for instruction in self.data: - print(instruction) - -# %% ../../src/inference/export_cudaq.ipynb 7 -class CircuitsCudaqBackend(): - - BASIC_BACKEND_TYPE = type[cudaq.kernel] - # Has to match with insides of belows kernel - KERNEL_VOCABULARY = {"h":1, "cx":2, "z":3, "x":4, "y":5, "ccx":6, "swap":7} - + KERNEL_VOCABULARY = {"h":1, + "cx":2, + "z":3, + "x":4, + "y":5, + "ccx":6, + "swap":7, + "rx":8, + "ry":9, + "rz":10, + "cp":11,} + def _construct_kernel(self, gate_list: List[str], target_1_nodes_list: List[int], @@ -79,7 +63,9 @@ def place_gate_kernel(gate: int, target_1: int, target_2: int, control_1: int, - control_2: int): + control_2: int, + theta: float): + if gate == 1: h(qvector[target_1]) elif gate == 2: cx(qvector[control_1], qvector[target_1]) elif gate == 3: z(qvector[target_1]) @@ -87,12 +73,23 @@ def place_gate_kernel(gate: int, elif gate == 5: y(qvector[target_1]) elif gate == 6: x.ctrl(qvector[control_1], qvector[control_2], qvector[target_1]) elif gate == 7: swap(qvector[target_1], qvector[target_2]) + + elif gate == 8: rx(theta, qvector[target_1]) + elif gate == 9: ry(theta, qvector[target_1]) + elif gate == 10: rz(theta, qvector[target_1]) + + elif gate == 11: + # R1 applies the unitary transformation; i.e. it is a phase gate + # R1(λ) = | 1 0 | + # | 0 exp(iλ) | + r1.ctrl(theta, qvector[target_1], qvector[target_2]) + @cudaq.kernel - def kernel(input_state: List[complex]): + def kernel(input_state: list[complex], thetas: list[float]): qvector = cudaq.qvector(input_state) for i in range(num_gates): - place_gate_kernel(gate_list[i], qvector, target_1_nodes_list[i], target_2_nodes_list[i], control_1_nodes_list[i], control_2_nodes_list[i]) + place_gate_kernel(gate_list[i], qvector, target_1_nodes_list[i], target_2_nodes_list[i], control_1_nodes_list[i], control_2_nodes_list[i], thetas[i]) return kernel @@ -105,7 +102,7 @@ def check_error_circuit(self, if gate not in self.KERNEL_VOCABULARY: raise NotImplementedError(f"Unknown gate {gate}, not in `self.KERNEL_VOCABULARY`.") - if gate in ["h", "z", "x", "y"]: + if gate in ["h", "z", "x", "y", "rx", "ry", "rz"]: if num_target_nodes != 1 or num_control_nodes !=0: return False elif gate in ["cx"]: @@ -114,18 +111,27 @@ def check_error_circuit(self, elif gate in ["ccx"]: if num_target_nodes != 1 or num_control_nodes !=2: return False - elif gate in ["swap"]: + elif gate in ["swap", "cp"]: if num_target_nodes != 2 or num_control_nodes !=0: return False else: raise NotImplementedError(f"Unknown gate {gate}, implemetation is faulty!") return True - - - def export_cudaq(self, instructions: CircuitInstructions) -> cudaq.kernel: + + def genqc_to_backend(self, instructions: CircuitInstructions) -> cudaq.kernel: """Convert given genQC `CircuitInstructions` to a `cudaq.kernel`.""" + _params = torch.tensor([ + instruction.params if instruction.params else torch.nan + for instruction in instructions.data + ]) # ... [seq, nP] + + assert _params.shape[1] == 1 #only support nP=1 for now + _params = _params.squeeze() + + #-------------------- + # num_qubits = instructions.num_qubits num_gates = instructions.length @@ -144,9 +150,6 @@ def export_cudaq(self, instructions: CircuitInstructions) -> cudaq.kernel: gate = instruction.name.lower() control_nodes = instruction.control_nodes target_nodes = instruction.target_nodes - - if len(instruction.params) > 0: - raise NotImplementedError(f"Only support non parametrized gates currently.") num_target_nodes = len(target_nodes) num_control_nodes = len(control_nodes) @@ -167,11 +170,14 @@ def export_cudaq(self, instructions: CircuitInstructions) -> cudaq.kernel: control_2_nodes_list[i] = control_nodes[1] #-------------------- - kernel= self._construct_kernel(gate_list, target_1_nodes_list, target_2_nodes_list, control_1_nodes_list, control_2_nodes_list) - return kernel + _kernel = self._construct_kernel(gate_list, target_1_nodes_list, target_2_nodes_list, control_1_nodes_list, control_2_nodes_list) + + return ParametrizedCudaqKernel(kernel=_kernel, params=_params.tolist()) - def get_unitary(self, kernel: cudaq.kernel, num_qubits: int) -> np.ndarray: + def get_unitary(self, parametrizedCudaqKernel: ParametrizedCudaqKernel, num_qubits: int) -> np.ndarray: """Return the unitary matrix of a `cudaq.kernel`. Currently relies on simulation, could change in future releases of cudaq.""" + + kernel, thetas = parametrizedCudaqKernel.kernel, parametrizedCudaqKernel.params N = 2**num_qubits U = np.zeros((N, N), dtype=np.complex128) @@ -180,7 +186,7 @@ def get_unitary(self, kernel: cudaq.kernel, num_qubits: int) -> np.ndarray: state_j = np.zeros((N), dtype=np.complex128) state_j[j] = 1 - U[:, j] = np.array(cudaq.get_state(kernel, state_j), copy=False) + U[:, j] = np.array(cudaq.get_state(kernel, state_j, thetas), copy=False) return U @@ -189,52 +195,3 @@ def draw(self, kernel: cudaq.kernel, num_qubits: int, **kwargs) -> None: c = [0] * (2**num_qubits) c[0] = 1 print(cudaq.draw(kernel, c)) - -# %% ../../src/inference/export_cudaq.ipynb 9 -def tensor_to_instructions(tensor: torch.Tensor, - vocabulary_inverse: dict, - params_tensor: Optional[torch.Tensor] = None, - params_4pi_normalization: bool = True, - sign_labels: dict = {"control_nodes":-1, "target_nodes":+1}) -> CircuitInstructions: - """Convert a given `torch.Tensor` to `CircuitInstructions`.""" - - assert tensor.dim() == 2, f"{tensor.shape=}" - num_of_qubits, time = tensor.shape - - instructions = CircuitInstructions(tensor_shape=tensor.shape) - - for t in range(time): - enc_time_slice = tensor[:, t] # contains all bits at time t - - for gate_index, gate in vocabulary_inverse.items(): - - target_nodes = (enc_time_slice == (sign_labels["target_nodes"] * gate_index)).nonzero(as_tuple=True)[0] - control_nodes = (enc_time_slice == (sign_labels["control_nodes"] * gate_index)).nonzero(as_tuple=True)[0] - - if target_nodes.nelement() > 0: - params = [] - if exists(params_tensor): - params = params_tensor[:, t] - if params_4pi_normalization: - params = (params+1.0) * 2.0*np.pi # [-1, 1] to [0, 4pi] - params = params.tolist() - - instructions.add_instruction(gate, control_nodes.tolist(), target_nodes.tolist(), params) - - break #break on first hit, per def only one gate allowed per t - - elif control_nodes.nelement() > 0: # no target but control means error - raise RuntimeError("target_nodes.nelement() <= 0 but control_nodes.nelement() > 0") - - #else we are fine with tensors that have time steps with no action! - - return instructions - -# %% ../../src/inference/export_cudaq.ipynb 10 -backend = CircuitsCudaqBackend() - -def genqc_to_cudaq(tensor: torch.Tensor, vocabulary_inverse: dict) -> cudaq.kernel: - """Convert given `torch.Tensor` to a `cudaq.kernel`.""" - instructions = tensor_to_instructions(tensor, vocabulary_inverse) - kernel = backend.export_cudaq(instructions) - return kernel diff --git a/genQC/platform/backends/circuits_pennylane.py b/genQC/platform/backends/circuits_pennylane.py new file mode 100644 index 0000000..7515d5b --- /dev/null +++ b/genQC/platform/backends/circuits_pennylane.py @@ -0,0 +1,118 @@ +"""[PennyLane](https://pennylane.ai/) based quantum circuit backend.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/platform/backends/circuits_pennylane.ipynb. + +# %% auto 0 +__all__ = ['instruction_name_to_pennylane_name', 'ParametrizedPennylaneCircuit', 'CircuitsPennylaneBackend'] + +# %% ../../../src/platform/backends/circuits_pennylane.ipynb 2 +from ...imports import * +from .base_backend import BaseBackend +from ..circuits_instructions import CircuitInstructions +from ..tokenizer.base_tokenizer import Vocabulary +from ...utils.config_loader import get_obj_from_str + +import pennylane as qml +import pennylane.ops as pennylane_ops + +# %% ../../../src/platform/backends/circuits_pennylane.ipynb 4 +def instruction_name_to_pennylane_name(name: str) -> str: + """Maps instruction names to PennyLane names.""" + + _maps = { + "ccx": "Toffoli", + "cp": "CPhase", + "cx": "CNOT", + } + + if name in _maps: + name = _maps[name] + else: + name = name.upper() + + return name + +# %% ../../../src/platform/backends/circuits_pennylane.ipynb 5 +@dataclass +class ParametrizedPennylaneCircuit: + circuit: qml.QNode + params: torch.Tensor + +# %% ../../../src/platform/backends/circuits_pennylane.ipynb 7 +class CircuitsPennylaneBackend(BaseBackend): + """A backend for [PennyLane](https://pennylane.ai/).""" + + def backend_to_genqc(self, qc: ParametrizedPennylaneCircuit, ignore_barriers: bool = True) -> CircuitInstructions: + """Convert a given Pennylane `ParametrizedPennylaneCircuit` to genQC `CircuitInstructions`.""" + raise NotImplementedError() + + def genqc_to_backend(self, + instructions: CircuitInstructions, + flip_qubit_order: bool = True, + place_barriers: bool = False, + ignore_errors: bool = False, + place_error_placeholders: bool = False) -> ParametrizedPennylaneCircuit: + """ + Convert given genQC `CircuitInstructions` to a `ParametrizedPennylaneCircuit`. + - flip_qubit_order ... e.g. needed when using little-endian definition. + """ + + _params = torch.tensor([ + instruction.params if instruction.params else torch.nan + for instruction in instructions.data + ]) # ... [seq, nP] + + assert _params.shape[1] == 1 #only support nP=1 for now + _params = _params.squeeze() # swap so we have batched [1, seq] + + N = instructions.num_qubits + dev = qml.device("default.qubit", wires=N) + + @qml.qnode(dev, interface='torch') + def _circuit(params): + for i, instruction in enumerate(instructions.data): + + _name = instruction_name_to_pennylane_name(instruction.name) + + op = getattr(pennylane_ops, _name) + + # The first wire provided corresponds to the control qubit. + # e.g. is qml.H(0) + _wires = (*instruction.control_nodes, *instruction.target_nodes) + + if flip_qubit_order: + _wires = [N-n-1 for n in _wires] + + try: + if op.num_params > 0: + op(params[i], wires=_wires) + else: + op(wires=_wires) + except Exception as err: + if ignore_errors: continue + elif place_error_placeholders: + qml.Identity(wires=_wires) + raise err + + if place_barriers: qml.Barrier(wires=list(range(N))) + + # dummy return, as we only care about the unitary + # return qml.expval(qml.PauliZ(0)) + return qml.state() + + #run once to test for errors + try: + _circuit(_params) + except Exception as err: + raise err + + return ParametrizedPennylaneCircuit(circuit=_circuit, params=_params) + + def get_unitary(self, qc: ParametrizedPennylaneCircuit) -> torch.Tensor: + """Return the unitary matrix of a `ParametrizedPennylaneCircuit`.""" + return qml.matrix(qc.circuit)(qc.params) + + def draw(self, qc: ParametrizedPennylaneCircuit, style:str = "black_white", **kwargs) -> None: + """Draw the given Pennylane `ParametrizedPennylaneCircuit`""" + fig, ax = qml.draw_mpl(qc.circuit, decimals=2, show_all_wires=True, style=style, **kwargs)(qc.params.cpu().numpy()) + return fig diff --git a/genQC/platform/backends/circuits_qiskit.py b/genQC/platform/backends/circuits_qiskit.py new file mode 100644 index 0000000..2c30599 --- /dev/null +++ b/genQC/platform/backends/circuits_qiskit.py @@ -0,0 +1,205 @@ +"""[Qiskit](https://github.com/Qiskit/qiskit) based quantum circuit backend.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/platform/backends/circuits_qiskit.ipynb. + +# %% auto 0 +__all__ = ['get_number_of_gate_params', 'instruction_name_to_qiskit_gate', 'get_target_control_qubits', 'CircuitsQiskitBackend'] + +# %% ../../../src/platform/backends/circuits_qiskit.ipynb 2 +from ...imports import * +from .base_backend import BaseBackend +from ..circuits_instructions import CircuitInstructions +from ..tokenizer.base_tokenizer import Vocabulary +from ...utils.config_loader import get_obj_from_str + +import qiskit.circuit.library as ql +import qiskit.quantum_info as qi + +from qiskit import QuantumCircuit, transpile +from qiskit.circuit.gate import Gate + +# %% ../../../src/platform/backends/circuits_qiskit.ipynb 4 +def get_number_of_gate_params(gate_cls: type[Gate]) -> int: + # python: gives you the number of any arguments BEFORE *args, minus the ones that have a default, -1 for self parameter of classes + return gate_cls.__init__.__code__.co_argcount - len(gate_cls.__init__.__defaults__) - 1 + +# %% ../../../src/platform/backends/circuits_qiskit.ipynb 6 +def instruction_name_to_qiskit_gate(name: str) -> Gate: + match name: + case "swap": name = "Swap" + case "cp": name = "CPhase" + case _: name = name.upper() + + return get_obj_from_str(f"qiskit.circuit.library.standard_gates.{name}Gate") + +# %% ../../../src/platform/backends/circuits_qiskit.ipynb 7 +def get_target_control_qubits(qc: QuantumCircuit, gate: Gate) -> Tuple[List[int], List[int]]: + """Get the target and control qubits of a Qiskit `Gate` of a `QuantumCircuit`.""" + + acts_on_cnt = gate.operation.num_qubits + acts_on = [qc.find_bit(qubit).index for qubit in gate.qubits] # order: (*control_qubits, *target_qubits) + + assert acts_on_cnt == len(acts_on), "error in: acts_on_cnt == len(acts_on)" + + num_ctrl_qubits = gate.operation.num_ctrl_qubits if hasattr(gate.operation, "num_ctrl_qubits") else 0 + num_targ_qubits = acts_on_cnt - num_ctrl_qubits + + control_qubits, target_qubits = acts_on[:-num_targ_qubits], acts_on[-num_targ_qubits:] + return control_qubits, target_qubits + +# %% ../../../src/platform/backends/circuits_qiskit.ipynb 9 +class CircuitsQiskitBackend(BaseBackend): + + BASIC_BACKEND_TYPE = type[QuantumCircuit] + + def backend_to_genqc(self, qc: QuantumCircuit, ignore_barriers: bool = True) -> CircuitInstructions: + """Convert a given Qiskit `QuantumCircuit` to genQC `CircuitInstructions`.""" + + if ignore_barriers: + gates = [] + for gate in qc.data: + if gate.operation.name != "barrier": + gates.append(gate) + else: + gates = qc.data + + instructions = CircuitInstructions(tensor_shape=torch.Size([qc.num_qubits, len(gates)])) + + for gate in gates: + control_qubits, target_qubits = get_target_control_qubits(qc, gate) + + #Correction to qiskit v1.3.1 + #`cp` is symmetric but qiskit uses + #`cp(theta, control_qubit, target_qubit)` target and control + #We make it only target connections like `swap` gates + if gate.operation.name == "cp": + target_qubits.extend(control_qubits) + control_qubits = [] + + instructions.add_instruction(gate.operation.name, control_qubits, target_qubits, gate.operation.params) + + return instructions + + def genqc_to_backend(self, + instructions: CircuitInstructions, + place_barriers: bool = True, + ignore_errors: bool = False, + place_error_placeholders: bool = False) -> QuantumCircuit: + """Convert given genQC `CircuitInstructions` to a Qiskit `QuantumCircuit`.""" + + gate_classes = {name:instruction_name_to_qiskit_gate(name) for name in instructions.instruction_names_set} + qc = QuantumCircuit(instructions.num_qubits) + + for instruction in instructions.data: + gate_cls = gate_classes[instruction.name] + num_of_paramters = get_number_of_gate_params(gate_cls) + + control_qubits, target_qubits = instruction.control_nodes, instruction.target_nodes + params = instruction.params[:num_of_paramters] + + try: + qc.append(gate_cls(*params), [*control_qubits, *target_qubits], []) + except Exception as err: + if ignore_errors: continue + elif place_error_placeholders: + qc.append(ql.UnitaryGate(np.eye(2**instructions.num_qubits), label="Err"), range(instructions.num_qubits)) + # qc.append(ql.UnitaryGate(np.eye(2), label="Err"), [0]) + continue + raise err + + if place_barriers: qc.barrier() + + return qc + + def get_unitary(self, qc: QuantumCircuit, remove_global_phase: bool = True) -> np.ndarray: + """Return the unitary matrix of a `QuantumCircuit`.""" + U = qi.Operator(qc).to_matrix().astype(np.complex128) + if remove_global_phase: + U *= np.exp(-1j * qc.global_phase) + return U + + def schmidt_rank_vector(self, qc: Optional[QuantumCircuit] = None, densityMatrix: Optional[qi.DensityMatrix] = None) -> List[int]: + """Return the SRV of a `qi.DensityMatrix`.""" + + if not exists(densityMatrix): + densityMatrix = qi.DensityMatrix(qc) + + systems_cnt = len(densityMatrix.dims()) + total_trace = set(range(systems_cnt)) + rank_vector = [] + + for i in range(systems_cnt): + trace = list(total_trace - {i}) + red_densityMatrix = qi.partial_trace(densityMatrix, trace) + # r = np.count_nonzero(np.linalg.eigvals(red_densityMatrix) > 1e-14) # was slower during testing + r = np.linalg.matrix_rank(red_densityMatrix, hermitian=True).item() + rank_vector.append(r) + + return rank_vector + + def optimize_circuit(self, + qc: QuantumCircuit, + vocabulary: Vocabulary, + optimization_level: int = 1, + silent: bool = True) -> QuantumCircuit: + """Use `qiskit.compiler.transpile` to optimize a circuit.""" + + if optimization_level == 0: + return qc + + while optimization_level > 0: + try: + qc_opt = transpile(qc, optimization_level=optimization_level, basis_gates=vocabulary) + return qc_opt + + except Exception as er: + if not silent: print(er) + pass + + optimization_level -= 1 + + return qc + + def rnd_circuit(self, + num_of_qubits: int, + num_of_gates:int, + gate_pool: Union[Sequence[Gate], Sequence[str]], + rng: np.random.Generator) -> QuantumCircuit: + """Create a random `QuantumCircuit`.""" + + qc = QuantumCircuit(num_of_qubits) + gate_indices = rng.choice(len(gate_pool), num_of_gates) + + gate_pool = list(gate_pool) + if isinstance(gate_pool[0], str): + gate_pool = [instruction_name_to_qiskit_gate(gate) for gate in gate_pool] + + for gate_index in gate_indices: + gate_cls = gate_pool[gate_index] + num_of_paramters = get_number_of_gate_params(gate_cls) + params = rng.uniform(low=0, high=4.0*np.pi, size=num_of_paramters) if num_of_paramters > 0 else [] + + gate = gate_cls(*params) + act_qubits = rng.choice(num_of_qubits, gate.num_qubits, replace=False) # order: (*act_qubits)=(*control_qubits, *target_qubits) + qc.append(gate, [*act_qubits], []) + + return qc + + def randomize_params(self, qc: QuantumCircuit, rng: np.random.Generator) -> QuantumCircuit: + """Randomize all parameters of a `QuantumCircuit`. This creates a new `QuantumCircuit` and therefore deletes global phase.""" + + qc_new = QuantumCircuit(qc.num_qubits) + + for gate in qc.data: + gate_cls = instruction_name_to_qiskit_gate(gate.operation.name) + control_qubits, target_qubits = get_target_control_qubits(qc, gate) + params = rng.uniform(low=0, high=4.0*np.pi, size=len(gate.operation.params)) + + qc_new.append(gate_cls(*params), [*control_qubits, *target_qubits], []) + + return qc_new + + def draw(self, qc: QuantumCircuit, **kwargs) -> None: + """Draw the given `QuantumCircuit` using Qiskit.""" + return qc.draw("mpl", **kwargs) + # plt.show() diff --git a/genQC/platform/circuits_generation.py b/genQC/platform/circuits_generation.py new file mode 100644 index 0000000..495f936 --- /dev/null +++ b/genQC/platform/circuits_generation.py @@ -0,0 +1,306 @@ +"""Functions to create a quantum circuit dataset.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/platform/circuits_generation.ipynb. + +# %% auto 0 +__all__ = ['CircuitConditionType', 'get_rnd_encoded_circuit', 'get_rnd_encoded_circuits', 'generate_circuit_dataset'] + +# %% ../../src/platform/circuits_generation.ipynb 2 +from ..imports import * +from .backends.base_backend import BaseBackend +from .tokenizer.circuits_tokenizer import CircuitTokenizer, Vocabulary +from ..dataset.dataset_helper import get_unique_elements_indices +from ..utils.async_fn import MemoryMappedArray, Parallel, delayed + +# %% ../../src/platform/circuits_generation.ipynb 4 +class CircuitConditionType(enum.Enum): + SRV = enum.auto() + UNITARY = enum.auto() + +# %% ../../src/platform/circuits_generation.ipynb 5 +def get_rnd_encoded_circuit(backend: BaseBackend, + tokenizer: CircuitTokenizer, + condition: CircuitConditionType, + num_of_qubits: int, + gate_pool: Optional[Sequence[str]], + min_gates: int, + max_gates: int, + rng: np.random.Generator, + optimized: bool = True, + post_randomize_params: bool = True, + return_params: bool = True) -> Tuple[Any, torch.Tensor, ...]: + """Generate a random circuit with corresponding condition.""" + + gate_pool = default(gate_pool, tokenizer.vocabulary) + + qc = backend.rnd_circuit(num_of_qubits, rng.integers(min_gates, max_gates+1), gate_pool, rng) + + if optimized: + qc = backend.optimize_circuit(qc, gate_pool) + + if post_randomize_params: + qc = backend.randomize_params(qc, rng) + + match condition: + case CircuitConditionType.SRV: + condition = torch.tensor(backend.schmidt_rank_vector(qc)) + + case CircuitConditionType.UNITARY: + U = backend.get_unitary(qc) + U_r, U_i = torch.from_numpy(np.real(U)), torch.from_numpy(np.imag(U)) + condition = torch.stack([U_r, U_i], dim=0) + + case _: raise NotImplementedError(f"Not implemented given condition: {condition}") + + instructions = backend.backend_to_genqc(qc) + enc_tuple = tokenizer.encode(instructions, max_gates, return_params_tensor=return_params) # qc_tensor, params_tensor + + return qc, condition, *enc_tuple + +# %% ../../src/platform/circuits_generation.ipynb 6 +def get_rnd_encoded_circuits(backend: BaseBackend, + tokenizer: CircuitTokenizer, + condition: CircuitConditionType, + samples: int, + num_of_qubits: int, + min_gates: int, + max_gates: int, + min_sub_gate_pool_cnt: int = 1, + max_sub_gate_pool_cnt: Optional[int] = None, + fixed_sub_gate_pool: Optional[Sequence[str]] = None, + max_num_params: Optional[int] = None, + filter_unique: bool = True, + optimized: bool = True, + post_randomize_params: bool = True, + return_params: bool = True, + silent: bool = False) -> Tuple[torch.Tensor, ...]: + """ + Generate ´samples´ number of random circuits with corresponding condition. + Creates prompts for conditioning. + """ + + if condition not in [CircuitConditionType.SRV, CircuitConditionType.UNITARY]: + raise NotImplementedError(f"Not implemented {condition}") + + sub_gate_pool = fixed_sub_gate_pool + gate_pool = list(tokenizer.vocabulary) + + rng = np.random.default_rng() + n = len(gate_pool) + 1 + c_range = np.arange(n-1) + + if exists(max_sub_gate_pool_cnt): + max_sub_gate_pool_cnt = max(min_sub_gate_pool_cnt, min(max_sub_gate_pool_cnt+1, n)) + else: + max_sub_gate_pool_cnt = n + + #------------------ + # Generate single circuits sequentially + + x = [] + y = [] + + if condition is CircuitConditionType.UNITARY: + u = [] + + if return_params: + p = [] # Note: params is of different size -> keep list + + for i in tqdm(range(samples), disable=silent): + + if not exists(fixed_sub_gate_pool): + sub_gate_pool_cnt = rng.integers(min_sub_gate_pool_cnt, max_sub_gate_pool_cnt) + sub_gate_pool_ind = rng.choice(c_range, size=sub_gate_pool_cnt, replace=False) + + #NOTE: with this we have always the same ordering of the prompt gates!! + sub_gate_pool_ind = np.sort(sub_gate_pool_ind) + + sub_gate_pool = [gate_pool[ind] for ind in sub_gate_pool_ind] + + val = get_rnd_encoded_circuit(backend=backend, + tokenizer=tokenizer, + condition=condition, + num_of_qubits=num_of_qubits, + gate_pool=sub_gate_pool, + min_gates=min_gates, + max_gates=max_gates, + rng=rng, + optimized=optimized, + post_randomize_params=post_randomize_params, + return_params=return_params) + + if return_params: + _, cond, qc_tensor, params_tensor = val + p.append(params_tensor) + else: + _, cond, qc_tensor = val + + x.append(qc_tensor) + + match condition: + case CircuitConditionType.SRV: + label = f"Generate SRV: {cond.tolist()}" + + case CircuitConditionType.UNITARY: + label = f"Compile using: {[str(gate) for gate in sub_gate_pool]}" + u.append(cond) + + case _: raise NotImplementedError(f"Not implemented given condition: {condition}") + + y.append(label) + + #------------------ + # Make tensors unique and combine tensors and arrays + + x = torch.stack(x, dim=0) + y = np.array(y) + + if condition is CircuitConditionType.UNITARY: + u = torch.stack(u, dim=0) + + if filter_unique: + tensor_unique, tensor_indices = get_unique_elements_indices(x) + + x = x[tensor_indices] + y = y[tensor_indices] + if return_params: p = [p[i] for i in tensor_indices.tolist()] + if condition is CircuitConditionType.UNITARY: u = u[tensor_indices] + + if not silent: + print(f"[INFO]: Generated unique circuits: {tensor_unique.shape[0]}.") + + if not exists(max_num_params): + p_max_para = max(pi.shape[0] for pi in p) + p_min_value = min(pi.min() if pi.numel()>0 else 0 for pi in p) + p_max_value = max(pi.max() if pi.numel()>0 else 0 for pi in p) + if not silent: print(f"[INFO]: No max_num_params provided, infered {p_max_para=}, {p_min_value=} and {p_max_value=}.") + else: + if not silent: print(f"[INFO]: Using provided {max_num_params=}.") + p_max_para = max_num_params + + p_t = torch.zeros((len(p), p_max_para, max_gates)) + for i,pi in enumerate(p): + p_t[i, :pi.shape[0], :pi.shape[1]] = pi + p = p_t + + if return_params: + return x, y, u, p + return x, y, u + +# %% ../../src/platform/circuits_generation.ipynb 8 +def generate_circuit_dataset(backend: BaseBackend, + tokenizer: CircuitTokenizer, + condition: CircuitConditionType, + total_samples: int, + num_of_qubits: int, + min_gates: int, + max_gates: int, + batch_samples: int = 128, + n_jobs: int = 1, + unitary_dtype: torch.dtype = torch.float16, + min_sub_gate_pool_cnt: int = 1, + max_sub_gate_pool_cnt: Optional[int] = None, + fixed_sub_gate_pool: Optional[Sequence[str]] = None, + max_num_params: Optional[int] = None, + filter_unique: bool = True, + optimized: bool = True, + post_randomize_params: bool = True, + return_params: bool = True) -> Tuple[torch.Tensor, ...]: + """ + Generates ´samples´ number of random circuits with corresponding condition. + Supports large scale dataset with large unitaries. Uses memory mapping and parallelization. + + - ´unitary_dtype´ only relevant for ´condition=CircuitConditionType.UNITARY´ + """ + + if condition not in [CircuitConditionType.UNITARY]: + raise NotImplementedError(f"Not implemented {condition=}") + + if not return_params: + raise NotImplementedError(f"Not implemented {return_params=}") + + total_samples = int(total_samples) + batch_samples = min(int(batch_samples), total_samples) + njobs = max(min(n_jobs, total_samples//batch_samples), 1) + + #------------------ + # Check data sizes + + gen_data = functools.partial(get_rnd_encoded_circuits, + backend=backend, + tokenizer=tokenizer, + condition=condition, + samples=batch_samples, + num_of_qubits=num_of_qubits, + min_gates=min_gates, + max_gates=max_gates, + min_sub_gate_pool_cnt=min_sub_gate_pool_cnt, + max_sub_gate_pool_cnt=max_sub_gate_pool_cnt, + fixed_sub_gate_pool=fixed_sub_gate_pool, + max_num_params=max_num_params, + filter_unique=filter_unique, + optimized=optimized, + post_randomize_params=post_randomize_params, + return_params=return_params, + silent=True) + + x, y, u, p = gen_data() + x_global = torch.zeros((total_samples, *x.shape[1:]), dtype=x.dtype) + y_global = np.empty(total_samples, dtype=y.dtype) + u_global = torch.zeros((total_samples, 2, u.shape[-2], u.shape[-1]), dtype=unitary_dtype) + p_global = torch.zeros((total_samples, *p.shape[1:]), dtype=p.dtype) + + #------------------ + # Run memory mapped parallel generation + + def _f(idx, x_map, y_map, u_map, p_map): + x, y, u, p = gen_data() + + off = x.shape[0] + idx *= batch_samples + + x_map[idx:idx+off] = x + y_map[idx:idx+off] = y + u_map[idx:idx+off] = u + p_map[idx:idx+off] = p + + + def _scope(): + x_map = MemoryMappedArray(x_global) + y_map = MemoryMappedArray(y_global, type="numpy") + u_map = MemoryMappedArray(u_global) + p_map = MemoryMappedArray(p_global) + + with Parallel(n_jobs=n_jobs) as parallel: + loop_set = range(int(np.floor(total_samples/batch_samples))) + _ = parallel(delayed(_f)(idx, x_map.obj_memmap, y_map.obj_memmap, u_map.obj_memmap, p_map.obj_memmap) for idx in loop_set) + + return x_map.get_obj(), y_map.get_obj(), u_map.get_obj(), p_map.get_obj() + + (x_global, x_file), (y_global, y_file), (u_global, u_file), (p_global, p_file) = _scope() + + MemoryMappedArray.clean([x_file, y_file, u_file, p_file]) + + #------------------ + # Collect results and remove the holes + + x_global_nonzero = torch.logical_not((x_global==0).all(-1).all(-1)).nonzero(as_tuple=True) + print(f"[INFO]: Generated {x_global_nonzero[0].shape[0]} valid circuits.") + + # complex indexing makes copy not view + x_global = x_global[x_global_nonzero]#.contiguous().clone() + u_global = u_global[x_global_nonzero]#.contiguous().clone() + p_global = p_global[x_global_nonzero]#.contiguous().clone() + y_global = y_global[x_global_nonzero] #np.ascontiguousarray(y_global[x_global_nonzero]) + + if filter_unique: + tensor_unique, tensor_indices = get_unique_elements_indices(x_global) + + x_global = x_global[tensor_indices] + y_global = y_global[tensor_indices] + u_global = u_global[tensor_indices] + p_global = p_global[tensor_indices] + + print(f"[INFO]: After filtering unique circuits: {x_global.shape[0]}.") + + return x_global, y_global, u_global, p_global diff --git a/genQC/platform/circuits_instructions.py b/genQC/platform/circuits_instructions.py new file mode 100644 index 0000000..05615f5 --- /dev/null +++ b/genQC/platform/circuits_instructions.py @@ -0,0 +1,54 @@ +"""Classes for quantum circuit instructions.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/platform/circuits_instructions.ipynb. + +# %% auto 0 +__all__ = ['CircuitInstruction', 'CircuitInstructions'] + +# %% ../../src/platform/circuits_instructions.ipynb 2 +from ..imports import * + +# %% ../../src/platform/circuits_instructions.ipynb 4 +@dataclass +class CircuitInstruction(): + """Basic quantum circuit instruction.""" + name: str + control_nodes: Sequence[int] + target_nodes: Sequence[int] + params: Sequence[float] + +# %% ../../src/platform/circuits_instructions.ipynb 5 +class CircuitInstructions(): + """Basic quantum circuit instruction handler.""" + + def __init__(self, tensor_shape: torch.Size) -> None: + assert len(tensor_shape) == 2 # ... [qubits, time] + self.tensor_shape = tensor_shape + self._instructions = [] + self.instruction_names_set = set() + + def add_instruction(self, + name: str, + control_nodes: Sequence[int], + target_nodes: Sequence[int], + params: Sequence[float]) -> None: + self.instruction_names_set.add(name) + self._instructions.append(CircuitInstruction(name, control_nodes, target_nodes, params)) + + @property + def data(self) -> List[CircuitInstruction]: return self._instructions + + @property + def length(self) -> int: return len(self._instructions) + + @property + def num_qubits(self) -> int: return self.tensor_shape[0] + + @property + def max_gates(self) -> int: return self.tensor_shape[1] + + def __repr__(self) -> str: return str(self._instructions) + + def print(self) -> None: + for instruction in self.data: + print(instruction) diff --git a/genQC/platform/qcircuit_dataset_construction.py b/genQC/platform/qcircuit_dataset_construction.py deleted file mode 100644 index 429b93a..0000000 --- a/genQC/platform/qcircuit_dataset_construction.py +++ /dev/null @@ -1,223 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/platform/qcircuit_dataset_construction.ipynb. - -# %% auto 0 -__all__ = ['get_target_control_qubits', 'encode_circuit', 'decode_circuit', 'get_rnd_encoded_circuit', 'get_rnd_encoded_circuits', - 'gen_qc_dataset', 'get_specific_rnd_srv_circuit', 'gen_compilation_rndGates_dataset'] - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 3 -from ..imports import * -from .simulation.qcircuit_sim import * -import genQC.dataset.dataset_helper as dahe - -import qiskit.quantum_info as qi -from qiskit import QuantumCircuit -from qiskit.circuit.gate import Gate -import qiskit.circuit.library as ql - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 5 -def get_target_control_qubits(qc: QuantumCircuit, gate: Gate): - acts_on_cnt = gate.operation.num_qubits - acts_on = [qc.find_bit(qubit).index for qubit in gate.qubits] # order: (*control_qubits, *target_qubits) - - assert acts_on_cnt == len(acts_on), "error in: acts_on_cnt == len(acts_on)" - - num_ctrl_qubits = gate.operation.num_ctrl_qubits if hasattr(gate.operation, "num_ctrl_qubits") else 0 - num_targ_qubits = acts_on_cnt - num_ctrl_qubits - - control_qubits, target_qubits = acts_on[:-num_targ_qubits], acts_on[-num_targ_qubits:] - - return control_qubits, target_qubits - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 6 -def encode_circuit(qc: QuantumCircuit, num_of_qubits, gate_classes: dict, max_gates: int, sign_labels={"control_qubits":-1, "target_qubits":+1}, return_params=False): - # circuit tensor - # [qbits, time] .. in +- gate_number - # 0 for empty - - tensor = torch.zeros((num_of_qubits, max_gates), dtype=torch.int32) - params = [] - - for t, gate in enumerate(qc.data): - params.append(gate.operation.params) - - gate_id = gate_classes[gate.operation.name] #for new tensor just use this as the abs(T) and then assign the sign dep on the c/t - - control_qubits, target_qubits = get_target_control_qubits(qc, gate) - - for bit in control_qubits: - tensor[bit, t] = gate_id * sign_labels["control_qubits"] - - for bit in target_qubits: - tensor[bit, t] = gate_id * sign_labels["target_qubits"] - - if return_params: - num_of_max_params = max(len(para) for para in params) - params_tensor = torch.zeros((num_of_max_params, max_gates), dtype=torch.float32) - - for t, para in enumerate(params): - params_tensor[:len(para), t] = torch.tensor(para) - - return tensor, params_tensor - - return tensor - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 7 -def decode_circuit(enc_tensor: torch.Tensor, gate_pool: list[Gate], place_barrier=True, sign_labels={"control_qubits":-1, "target_qubits":+1}, params_tensor=None): - # should have dim 2, [bits, sequence] - #minus ... control_qubits - #plus ... target_qubits - - assert enc_tensor.ndim == 2, f"{enc_tensor.shape=}" - num_of_qubits, time = enc_tensor.shape - - gate_qiskit_classes = {(i+1):x for i,x in enumerate(gate_pool)} - - qc = QuantumCircuit(num_of_qubits) - - for t in range(time): - enc_time_slice = enc_tensor[:, t] # only contains all bits at time t - - for gate_index,gate_qiskit_class in gate_qiskit_classes.items(): - target_qubits = (enc_time_slice == (sign_labels["target_qubits"] *gate_index)).nonzero() - control_qubits = (enc_time_slice == (sign_labels["control_qubits"]*gate_index)).nonzero() - - if target_qubits.nelement() > 0: - num_of_paramters = get_number_of_gate_params(gate_qiskit_class) - if exists(params_tensor) and num_of_paramters > 0 : params = params_tensor[:num_of_paramters, t].tolist() - else: params = [0] * num_of_paramters - - qc.append(gate_qiskit_class(*params), [*control_qubits.tolist(), *target_qubits.tolist()], []) - if place_barrier: qc.barrier() - break #break on first hit, per def only one gate allowed per t - - elif control_qubits.nelement() > 0: #no target but control means error - raise RuntimeError("control_qubits.nelement() > 0") - #else we are fine with tensor that have time steps with no action! - - return qc - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 10 -def get_rnd_encoded_circuit(num_of_qubits, min_gates, max_gates, gate_pool, gate_classes, rng, optimized=True, return_params=False): - qc = rnd_circuit(num_of_qubits, rng.integers(min_gates, max_gates+1), gate_pool, rng) - if optimized: qc = optimize_circuit(qc, gate_pool) - svr = schmidt_rank_vector(qi.DensityMatrix(qc)) - - if return_params: - qc_tensor, params_tensor = encode_circuit(qc, num_of_qubits, gate_classes, max_gates, return_params=return_params) - return qc, qc_tensor, svr, params_tensor - - qc_tensor = encode_circuit(qc, num_of_qubits, gate_classes, max_gates, return_params=return_params) - return qc, qc_tensor, svr - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 11 -def get_rnd_encoded_circuits(samples, num_of_qubits=3, min_gates=3, max_gates=10, gate_pool=[ql.HGate, ql.CXGate], optimized=True, silent=False, return_params=False): - gate_classes = gate_pool_to_gate_classes(gate_pool) #{x().name:(i+1) for i,x in enumerate(gate_pool)} - - rng = np.random.default_rng() - - data = [] - label = [] - params = [] - - for i in tqdm(range(samples), disable=silent): - if return_params: - qc, qc_tensor, svr, params_tensor = get_rnd_encoded_circuit(num_of_qubits, min_gates, max_gates, gate_pool, gate_classes, rng, optimized, return_params=return_params) - params.append(params_tensor) - - else: - qc, qc_tensor, svr = get_rnd_encoded_circuit(num_of_qubits, min_gates, max_gates, gate_pool, gate_classes, rng, optimized, return_params=return_params) - - data.append(qc_tensor) - label.append(svr) - - if return_params: return data, label, params - return data, label - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 13 -def gen_qc_dataset(samples, num_of_qubits, min_gates, max_gates, gate_pool, optimized, silent=False): - tensor, srv = get_rnd_encoded_circuits(samples, num_of_qubits, min_gates, max_gates, gate_pool, optimized, silent) - - # make sure we have unique circuits - tensor = torch.stack(tensor, dim=0) - tensor_unique, tensor_indices = dahe.get_unique_elements_indices(tensor) - - if not silent: print(f"Generated unique circuits: {tensor_unique.shape[0]}") - - #-------------------------- - #select uniques only - - x = tensor[tensor_indices] - y = torch.Tensor(srv).type(torch.int32)[tensor_indices] #leave as tensor, treat as 2D condition, combine cond into one large (each cat) - - return x,y - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 15 -def get_specific_rnd_srv_circuit(srv, requested_length, gate_pool, max_i=2000, silent=True, fix_length_after_optimizing=True, requested_length_tolerance=0): - rng = np.random.default_rng() - - num_of_qubits = len(srv) - is_srv = None - - if requested_length < sum(srv)-num_of_qubits: return None #not possible to generate this srv, to few gates - - i = 0 - while is_srv != srv: # brute-force sample a SRV - qc = rnd_circuit(num_of_qubits, requested_length, gate_pool, rng) - qc = optimize_circuit(qc, gate_pool) - - if i > max_i: - if not silent: print(f"Max i reached: {srv=} {requested_length=} {requested_length_tolerance=} {max_i=}") - return None #raise RuntimeError("max i reached") - i += 1 - - #--------------- - if fix_length_after_optimizing and len(qc.data) < requested_length-requested_length_tolerance: - continue - - is_srv = schmidt_rank_vector(qi.DensityMatrix(qc)) - - return qc - -# %% ../../src/platform/qcircuit_dataset_construction.ipynb 17 -def gen_compilation_rndGates_dataset(samples, num_of_qubits, min_gates, max_gates, gate_pool, min_sub_gate_pool_cnt=1, silent=False): - '''Samples rnd circuit with a rnd subset of gates and return qc with gate label and unitary''' - - gate_classes = {x().name:(i+1) for i,x in enumerate(gate_pool)} #+1 for empty! global gate classes so we fix the indices! 1...H 2...CX so on - - #------------------------------- - - rng = np.random.default_rng() - n = len(gate_pool) + 1 - c_range = np.arange(n-1) - - tensor = [] - label = [] - U = [] - - for i in tqdm(range(samples), disable=silent): - sub_gate_pool_cnt = rng.integers(min_sub_gate_pool_cnt, n) - sub_gate_pool_ind = rng.choice(c_range, size=sub_gate_pool_cnt, replace=False) - sub_gate_pool = [gate_pool[ind] for ind in sub_gate_pool_ind] # pick random subeset of gates - - qc, qc_tensor, svr = get_rnd_encoded_circuit(num_of_qubits, min_gates, max_gates, sub_gate_pool, gate_classes, rng, optimized=True) - - tensor.append(qc_tensor) - label.append(f"Compile using: {[x().name for x in sub_gate_pool]}") - U.append(qi.Operator(qc).to_matrix()) - - #------------------------------- - - # make sure we have unique circuits - tensor = torch.stack(tensor, dim=0) - tensor_unique, tensor_indices = dahe.get_unique_elements_indices(tensor) - - if not silent: print(f"generated unique circuits: {tensor_unique.shape[0]}") - - #-------------------------- - #select uniques only - - x = tensor[tensor_indices] - y = [label[i] for i in tensor_indices.tolist()] - U = [ U[i] for i in tensor_indices.tolist()] - - return x,y,U diff --git a/genQC/platform/qcircuit_evaluation.py b/genQC/platform/qcircuit_evaluation.py deleted file mode 100644 index 4a8882a..0000000 --- a/genQC/platform/qcircuit_evaluation.py +++ /dev/null @@ -1,68 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/platform/qcircuit_evaluation.ipynb. - -# %% auto 0 -__all__ = ['sort_into_bins', 'extract_gate_number', 'get_gate_stat_from_tensors', 'get_gate_stat_from_circuits'] - -# %% ../../src/platform/qcircuit_evaluation.ipynb 2 -from ..imports import * -from .qcircuit_dataset_construction import * -from .simulation.qcircuit_sim import schmidt_rank_vector, optimize_circuit - -import qiskit.quantum_info as qi -from qiskit import QuantumCircuit - -# %% ../../src/platform/qcircuit_evaluation.ipynb 4 -def sort_into_bins(x, y, y_uniques): - - x_binned = [] - y_binned = [] - - for y_unique in y_uniques: - - comp = torch.all(y==y_unique, dim=-1) - indices = comp.nonzero().squeeze() - - x_binned.append(x[indices]) - y_binned.append(y[indices]) - - y_bins = [y[0] for y in y_binned] - - return x_binned, y_binned, y_bins - -# %% ../../src/platform/qcircuit_evaluation.ipynb 5 -def extract_gate_number(qc: QuantumCircuit, gate_pool, max_gates): - gate_classes = {"empty":0} | {x().name:i+1 for i,x in enumerate(gate_pool)} - - gate_cnt = np.zeros(len(gate_classes), dtype=int) - - if hasattr(qc, "data"): - for t, gate in enumerate(qc.data): - gate_id = gate_classes[gate.operation.name] - gate_cnt[gate_id] += 1 - - gate_cnt[0] = max_gates - sum(gate_cnt[1:]) - - return gate_cnt, gate_classes - -# %% ../../src/platform/qcircuit_evaluation.ipynb 6 -def get_gate_stat_from_tensors(tensors, gate_pool): - for i,tensor in tqdm(enumerate(tensors), total=tensors.shape[0]): - qc = decode_circuit(tensor, gate_pool) - - t_gate_cnts, gate_dict = extract_gate_number(qc, gate_pool, max_gates=tensor.shape[1]) - - if i > 0: gate_cnts = np.vstack([gate_cnts, t_gate_cnts]) - else: gate_cnts = t_gate_cnts - - return gate_cnts, gate_dict - -# %% ../../src/platform/qcircuit_evaluation.ipynb 7 -def get_gate_stat_from_circuits(qcs: list, gate_pool, max_gates): - for i,qc in tqdm(enumerate(qcs), total=len(qcs)): - - t_gate_cnts, gate_dict = extract_gate_number(qc, gate_pool, max_gates) - - if i > 0: gate_cnts = np.vstack([gate_cnts, t_gate_cnts]) - else: gate_cnts = t_gate_cnts - - return gate_cnts, gate_dict diff --git a/genQC/platform/qcircuit_metrics.py b/genQC/platform/qcircuit_metrics.py deleted file mode 100644 index 00dd884..0000000 --- a/genQC/platform/qcircuit_metrics.py +++ /dev/null @@ -1,20 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/platform/qcircuit_metrics.ipynb. - -# %% auto 0 -__all__ = ['Unitary_FrobeniusNorm'] - -# %% ../../src/platform/qcircuit_metrics.ipynb 3 -from ..imports import * - -# %% ../../src/platform/qcircuit_metrics.ipynb 5 -class Unitary_FrobeniusNorm: - #defined in https://arxiv.org/pdf/2106.05649.pdf - - @staticmethod - def distance(approx_U: torch.tensor, target_U: torch.tensor): - d = 0.5 * torch.linalg.matrix_norm((approx_U-target_U), ord="fro")**2 - return d - - @staticmethod - def name(): - return "Frobenius-Norm" diff --git a/genQC/platform/qcircuit_util.py b/genQC/platform/qcircuit_util.py deleted file mode 100644 index 747f91e..0000000 --- a/genQC/platform/qcircuit_util.py +++ /dev/null @@ -1,39 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/platform/qcircuit_util.ipynb. - -# %% auto 0 -__all__ = ['get_element_matching_indices', 'get_entanglement_bins'] - -# %% ../../src/platform/qcircuit_util.ipynb 2 -from ..imports import * - -# %% ../../src/platform/qcircuit_util.ipynb 4 -def get_element_matching_indices(a, b): - """Compares (2d) `a` with `b`. Returns the indices of `b`, where a element of `a` matches with `b`.""" - # Expand dimensions of a to match the shape of b for element-wise comparison - expanded_a = a.unsqueeze(0).expand(b.shape[0], *a.shape) # [b0, a0, a1] - expanded_b = b.unsqueeze(1) # [b0, 1, b1] - - # Compare all vector entries of a with all vectors of b - matches = torch.all(expanded_a == expanded_b, dim=-1) - - matching_indices = torch.nonzero(torch.any(matches, dim=1)).squeeze() - - if matching_indices.dim() == 0: matching_indices = torch.tensor([matching_indices]) - - return matching_indices - -# %% ../../src/platform/qcircuit_util.ipynb 5 -def get_entanglement_bins(num_of_qubits): - """Returns all SRV sorted in entangle bins which correspond to a number of entangled qubits.""" - dist_srvs = [x for x in itertools.product(*([[1,2]]*num_of_qubits))] - dist_srvs = np.array(dist_srvs, dtype=int)[np.sum(dist_srvs, axis=1)!=num_of_qubits+1].tolist() - dist_srvs = sorted(dist_srvs, key=lambda x: sum(x)) - dist_srvs = np.array(dist_srvs) - - entangle = [1] + [scipy.special.comb(num_of_qubits, i, exact=True) for i in range(2, num_of_qubits)] - - entanglement_bins = np.split(dist_srvs, np.cumsum(entangle)) - - ent_bits = [f"{sum(n[0])-num_of_qubits} qubit entangled" for n in entanglement_bins] - - return [x.tolist() for x in entanglement_bins], ent_bits diff --git a/genQC/platform/simulation.py b/genQC/platform/simulation.py new file mode 100644 index 0000000..a4be881 --- /dev/null +++ b/genQC/platform/simulation.py @@ -0,0 +1,55 @@ +"""Class to load and run corresponding backends.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/platform/simulation.ipynb. + +# %% auto 0 +__all__ = ['GenericBackendType', 'CircuitBackendType', 'TensorEncodingType', 'is_circuit_type', 'Simulator'] + +# %% ../../src/platform/simulation.ipynb 2 +from ..imports import * +from .backends.base_backend import BaseBackend + +# %% ../../src/platform/simulation.ipynb 4 +class CircuitBackendType(enum.Enum): + QISKIT = enum.auto() + CUDAQ = enum.auto() + PENNYLANE = enum.auto() + +GenericBackendType = Union[CircuitBackendType] + +# %% ../../src/platform/simulation.ipynb 5 +class TensorEncodingType(enum.Enum): + CIRCUIT = enum.auto() + +def is_circuit_type(backend_type): return backend_type in CircuitBackendType + +# %% ../../src/platform/simulation.ipynb 7 +class Simulator(): + """Basic class for handling backend types.""" + + def __init__(self, backend: GenericBackendType, *args, **kwargs) -> BaseBackend: + match backend: + case CircuitBackendType.QISKIT: + from genQC.platform.backends.circuits_qiskit import CircuitsQiskitBackend + backend = CircuitsQiskitBackend(*args, **kwargs) + + case CircuitBackendType.CUDAQ: + from genQC.platform.backends.circuits_cudaq import CircuitsCudaqBackend + backend = CircuitsCudaqBackend(*args, **kwargs) + + case CircuitBackendType.PENNYLANE: + from genQC.platform.backends.circuits_pennylane import CircuitsPennylaneBackend + backend = CircuitsPennylaneBackend(*args, **kwargs) + + case _: + raise NotImplementedError(f"Not implemented given backend: {backend}") + + self.backend = backend + + + def backend_to_genqc(self, *args, **kwargs): + return self.backend.backend_to_genqc(*args, **kwargs) + + + def genqc_to_backend(self, *args, **kwargs): + return self.backend.genqc_to_backend(*args, **kwargs) diff --git a/genQC/platform/simulation/qcircuit_sim.py b/genQC/platform/simulation/qcircuit_sim.py deleted file mode 100644 index 3084670..0000000 --- a/genQC/platform/simulation/qcircuit_sim.py +++ /dev/null @@ -1,102 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/platform/simulation/qcircuit_sim.ipynb. - -# %% auto 0 -__all__ = ['get_number_of_gate_params', 'gate_pool_to_gate_classes', 'instruction_name_to_qiskit_gate', 'schmidt_rank_vector', - 'rnd_circuit', 'optimize_circuit', 'plot_svr_stat'] - -# %% ../../../src/platform/simulation/qcircuit_sim.ipynb 2 -from ...imports import * -from ...config_loader import * - -import qiskit.quantum_info as qi -from qiskit import QuantumCircuit, transpile -from qiskit.circuit.gate import Gate -import qiskit.circuit.library as ql - -# %% ../../../src/platform/simulation/qcircuit_sim.ipynb 4 -def get_number_of_gate_params(gate_cls): - return gate_cls.__init__.__code__.co_argcount - len(gate_cls.__init__.__defaults__) - 1 # python: gives you the number of any arguments BEFORE *args, minus ones that have a default, -1 for self parameter of classes - -# %% ../../../src/platform/simulation/qcircuit_sim.ipynb 5 -def gate_pool_to_gate_classes(gate_pool: list[Gate]): - """Creates a vocabulary from a gate pool.""" - classes = {} - - for i,cls in enumerate(gate_pool): - num_of_paramters = get_number_of_gate_params(cls) - name = cls(*[0]*num_of_paramters).name - classes[name] = (i+1) - - return classes - -# %% ../../../src/platform/simulation/qcircuit_sim.ipynb 6 -def instruction_name_to_qiskit_gate(name: str) -> Gate: - match name: - case "swap": name = "Swap" - case "cp": name = "CPhase" - case _: name = name.upper() - - return get_obj_from_str(f"qiskit.circuit.library.standard_gates.{name}Gate") - -# %% ../../../src/platform/simulation/qcircuit_sim.ipynb 7 -def schmidt_rank_vector(densityMatrix: qi.DensityMatrix): - """Return the SRV of a `qi.DensityMatrix`.""" - systems_cnt = len(densityMatrix.dims()) - total_trace = set(range(systems_cnt)) - rank_vector = [] - - for i in range(systems_cnt): - trace = list(total_trace - {i}) - red_densityMatrix = qi.partial_trace(densityMatrix, trace) - # r = np.count_nonzero(np.linalg.eigvals(red_densityMatrix) > 1e-14) # was slower during testing - r = np.linalg.matrix_rank(red_densityMatrix, hermitian=True).item() - rank_vector.append(r) - - return rank_vector - -# %% ../../../src/platform/simulation/qcircuit_sim.ipynb 8 -def rnd_circuit(num_of_qubits, num_of_gates, gate_pool: list[Gate], rng): - """Create a random circuit.""" - qc = QuantumCircuit(num_of_qubits) - gate_indices = rng.choice(len(gate_pool), num_of_gates) - - for gate_index in gate_indices: - gate_qiskit_class = gate_pool[gate_index] - - num_of_paramters = get_number_of_gate_params(gate_qiskit_class) - params = rng.uniform(low=0, high=2*np.pi, size=num_of_paramters) if num_of_paramters > 0 else [] # random between 0 and 2pi - - gate = gate_qiskit_class(*params) - act_qubits = rng.choice(num_of_qubits, gate.num_qubits, replace=False) # order: (*act_qubits)=(*control_qubits, *target_qubits) - qc.append(gate, [*act_qubits], []) - - return qc - -# %% ../../../src/platform/simulation/qcircuit_sim.ipynb 9 -def optimize_circuit(qc: QuantumCircuit, gate_pool: list[Gate], optimization_level=2): - """Use qiskit.compiler.transpile to optimize a circuit.""" - basis_gates = gate_pool_to_gate_classes(gate_pool).keys() - - while optimization_level > 0: - try: - qc_opt = transpile(qc, optimization_level=optimization_level, basis_gates=basis_gates) #target=target - return qc_opt - except Exception as er: pass - - optimization_level -= 1 - - return qc - -# %% ../../../src/platform/simulation/qcircuit_sim.ipynb 11 -def plot_svr_stat(num_of_qubits, min_gates, max_gates, gs, samples, sort=False, opt=True, rng=np.random.default_rng()): - svr_list = list() - for i in range(samples): - qc = rnd_circuit(num_of_qubits, rng.integers(min_gates, max_gates+1), gs, rng) - if opt: qc = optimize_circuit(qc, gs) - svr = schmidt_rank_vector(qi.DensityMatrix(qc)) - if sort: svr = sorted(svr) - svr_list.append(svr) - df = pd.DataFrame(data={"svr":svr_list}) - cnts = df['svr'].value_counts(normalize=True) - for n,v in zip(cnts.index, cnts.values): print(f"{n}: {v*100:.1f}%") - df['svr'].value_counts().plot(kind='bar') diff --git a/genQC/platform/tokenizer/__init__.py b/genQC/platform/tokenizer/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/genQC/platform/tokenizer/base_tokenizer.py b/genQC/platform/tokenizer/base_tokenizer.py new file mode 100644 index 0000000..09f79c8 --- /dev/null +++ b/genQC/platform/tokenizer/base_tokenizer.py @@ -0,0 +1,36 @@ +"""Base class of corresponding tokenizers.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/platform/tokenizer/base_tokenizer.ipynb. + +# %% auto 0 +__all__ = ['Vocabulary', 'VocabularyInverse', 'invert_vocabulary', 'BaseTokenizer'] + +# %% ../../../src/platform/tokenizer/base_tokenizer.ipynb 2 +from ...imports import * + +# %% ../../../src/platform/tokenizer/base_tokenizer.ipynb 3 +Vocabulary = dict[str, int] | dict[Any, int] +VocabularyInverse = dict[int, str] | dict[int, Any] + +def invert_vocabulary(vocabulary: Vocabulary) -> VocabularyInverse: + vocabulary_inverse = {token:gate for gate, token in vocabulary.items()} + return vocabulary_inverse + +# %% ../../../src/platform/tokenizer/base_tokenizer.ipynb 4 +class BaseTokenizer(abc.ABC): + + def __init__(self, vocabulary: Vocabulary) -> None: + self.vocabulary = vocabulary + self.vocabulary_inverse = invert_vocabulary(vocabulary) + + @abc.abstractmethod + def tokenize(self, *args, **kwargs): + raise NotImplementedError() + + @abc.abstractmethod + def encode(self, *args, **kwargs): + raise NotImplementedError() + + @abc.abstractmethod + def decode(self, *args, **kwargs): + raise NotImplementedError() diff --git a/genQC/platform/tokenizer/circuits_tokenizer.py b/genQC/platform/tokenizer/circuits_tokenizer.py new file mode 100644 index 0000000..d36de3e --- /dev/null +++ b/genQC/platform/tokenizer/circuits_tokenizer.py @@ -0,0 +1,143 @@ +"""Class to tokenize quantum circuits. Encode and decode quantum circuits into and from tensor representations.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/platform/tokenizer/circuits_tokenizer.ipynb. + +# %% auto 0 +__all__ = ['CircuitTokenizer'] + +# %% ../../../src/platform/tokenizer/circuits_tokenizer.ipynb 2 +from ...imports import * +from .base_tokenizer import BaseTokenizer, Vocabulary +from ..circuits_instructions import CircuitInstructions + +# %% ../../../src/platform/tokenizer/circuits_tokenizer.ipynb 3 +class CircuitTokenizer(BaseTokenizer): + + def __init__(self, vocabulary: Vocabulary, sign_labels: Optional[dict[str, int]] = None) -> None: + if 0 in vocabulary.values(): + print(f"[WARNING]: The value 0 is reserved for background tokens, i.e. qubit time position which are not effected by gates.") + print(f"[WARNING]: Automatically incrementing all vocabulary values by one ...") + vocabulary = {k:v+1 for k,v in vocabulary.items()} + assert 0 not in vocabulary.values() + + super().__init__(vocabulary) + self.sign_labels = default(sign_labels, {"control_nodes": -1, "target_nodes": +1}) + + def tokenize(self, instructions: CircuitInstructions) -> torch.Tensor | Tuple[torch.Tensor, torch.Tensor]: + """Convert given instructions to a tensor. Identical to `CircuitTokenizer.encode`.""" + return self.encode(instructions=instructions) + + def encode(self, + instructions: CircuitInstructions, + max_gates: Optional[int] = None, + return_params_tensor: bool = True, + params_4pi_normalization: bool = True, + randomize_params: bool = False) -> torch.Tensor | Tuple[torch.Tensor, torch.Tensor]: + """Convert given `CircuitInstructions` to a `torch.Tensor`.""" + + assert len(instructions.tensor_shape) == 2 + num_of_qubits, time = instructions.tensor_shape + max_gates = default(max_gates, time) + + tensor = torch.zeros((num_of_qubits, max_gates), dtype=torch.int32) + params = [] + + for t, instruction in zip(range(max_gates), instructions.data): # this way we limit the number of gates even if there are more instructions + + if instruction.name not in self.vocabulary: raise Warning(f"`{instruction.name}` not in vocabulary.") + + params.append(instruction.params) + + gate_id = self.vocabulary[instruction.name] + + control_qubits, target_qubits = instruction.control_nodes, instruction.target_nodes + + for bit in control_qubits: + tensor[bit, t] = gate_id * self.sign_labels["control_nodes"] + + for bit in target_qubits: + tensor[bit, t] = gate_id * self.sign_labels["target_nodes"] + + if return_params_tensor: + num_of_max_params = max([0] + [len(para) for para in params]) + params_tensor = torch.zeros((num_of_max_params, max_gates), dtype=torch.float32) + + for t, para in enumerate(params): + para = torch.tensor(para) + + if randomize_params: + para = 2.0*torch.rand_like(para) - 1.0 # rnd [-1, 1] + + elif params_4pi_normalization: + para = para % (4.0*np.pi) # limit to [0, 4pi] + para = (para-2.0*np.pi) / (2.0*np.pi) # [0, 4pi] to [-1, +1] + + params_tensor[:len(para), t] = para + + return tensor, params_tensor + return tensor + + def decode(self, + tensor: torch.Tensor, + params_tensor: Optional[torch.Tensor] = None, + params_4pi_normalization: bool = True, + ignore_errors: bool = False, + place_error_placeholders: bool = False) -> CircuitInstructions: + """Convert a given `torch.Tensor` to `CircuitInstructions`.""" + + assert tensor.dim() == 2, f"{tensor.shape=}" + num_of_qubits, time = tensor.shape + + instructions = CircuitInstructions(tensor_shape=tensor.shape) + + for t in range(time): + enc_time_slice = tensor[:, t] # contains all bits at time t + + _gate_placed = False + + for gate_index, gate in self.vocabulary_inverse.items(): + + target_nodes = (enc_time_slice == (self.sign_labels["target_nodes"] * gate_index)).nonzero(as_tuple=True)[0] + control_nodes = (enc_time_slice == (self.sign_labels["control_nodes"] * gate_index)).nonzero(as_tuple=True)[0] + + _gate_placed = False + + if target_nodes.nelement() > 0: + params = [] + if exists(params_tensor): + params = params_tensor[:, t] + if params_4pi_normalization: + params = (params+1.0) * 2.0*np.pi # [-1, 1] to [0, 4pi] + params = params.tolist() + + instructions.add_instruction(gate, control_nodes.tolist(), target_nodes.tolist(), params) + _gate_placed = True + + break #break on first hit, per def only one gate allowed per t + + elif control_nodes.nelement() > 0: # no target but control means error + if not ignore_errors: + raise RuntimeError("target_nodes.nelement() <= 0 but control_nodes.nelement() > 0") + + if not _gate_placed and place_error_placeholders: + # note we place a h gate with no qubits, so this is always an error + instructions.add_instruction("h", [], [], []) + + #else # we are fine with tensors that have time steps with no action! + + return instructions + + @staticmethod + def get_parametrized_tokens(vocabulary: Vocabulary) -> List[int]: + parametrized_names = "rx ry rz phase cp crx cry crz u u2 u3".split() + non_parametrized_names = "x y z h cx cy cz ch ccx swap s sdg t tdg".split() + + parametrized_tokens = [] + for name, token in vocabulary.items(): + + if name in parametrized_names: + parametrized_tokens.append(token) + elif name not in non_parametrized_names: + raise NotImplementedError(f"Unknown gate {name}! Please add it to the known list.") + + return parametrized_tokens diff --git a/genQC/platform/tokenizer/tensor_tokenizer.py b/genQC/platform/tokenizer/tensor_tokenizer.py new file mode 100644 index 0000000..21497ed --- /dev/null +++ b/genQC/platform/tokenizer/tensor_tokenizer.py @@ -0,0 +1,362 @@ +"""Class to further tokenize tensor representations.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../../src/platform/tokenizer/tensor_tokenizer.ipynb. + +# %% auto 0 +__all__ = ['GatePairTokenizer', 'sort_config', 'get_topk_depth_unpacked'] + +# %% ../../../src/platform/tokenizer/tensor_tokenizer.ipynb 2 +from ...imports import * +from .base_tokenizer import BaseTokenizer, Vocabulary + +# %% ../../../src/platform/tokenizer/tensor_tokenizer.ipynb 4 +class GatePairTokenizer(BaseTokenizer): + + def __init__(self, unique_class_values, zero_token, padding_token, device): + super().__init__({}) + + self.padding_token = padding_token + self.not_gates_tokens = torch.tensor([zero_token, padding_token]).to(device) + + self.current_tokens = torch.tensor(unique_class_values, device=device) + self._current_depth = 0 + + self.token_lookup = {} #reduced forms, used for gadget extraction + self.token_lookup_raw = {} #the raw form, used for encoding + + self.token_depth = {tok:0 for tok in self.current_tokens.cpu().tolist()} + self.token_cnts = {} + + def learn(self, tensors, max_depth, max_iters): + # loop over get bets and then replace + + current_tensor = tensors + self._current_depth = 0 + + for i in tqdm(range(max_iters), total=max_iters): + + overlap_pairs = self.extract_new_gate_overlap_pairs(current_tensor) + overlap_pairs_std_form = self.standardize_overlap_pairs(overlap_pairs) + + top_pairs, topv = self.get_topk_pairs(overlap_pairs_std_form, k=1) + top_pair = top_pairs[0] + + if top_pair.abs().sum() < 1: + print("break: top_pair.abs().sum() < 1") + break + + if topv < 2: + print("break: no more pair with cnt > 1") + break + + current_tensor = self.learn_step(current_tensor, top_pair, topv=topv) + + current_max_depth = max(self.token_depth.values()) + if current_max_depth > max_depth: + print(f"break: max_depth {max_depth} reached") + break + + print("break: max_iters reached") + return current_tensor + + def to(self, device): + self.not_gates_tokens = self.not_gates_tokens.to(device) + self.current_tokens = self.current_tokens.to(device) + + for k, v in self.token_lookup.items(): + self.token_lookup[k] = self.token_lookup[k].to(device) + + for k, v in self.token_lookup_raw.items(): + self.token_lookup_raw[k] = self.token_lookup_raw[k].to(device) + + return self + + def tokenize(self, tensors): + """Identical to `GatePairTokenizer.encode`.""" + return self.encode(tensors=tensors) + + #--------------------------------------- + # Replace pairs with new tokens + + def learn_step(self, current_tensor, top_pair, new_tokens: Optional[torch.Tensor] = None, topv: Optional[torch.Tensor] = None): + + top_pair_reduced = top_pair[top_pair.abs().sum(-1)>0].unique_consecutive(dim=0) + + if not_exists(new_tokens): + new_tokens = self.current_tokens.max() + 1 + torch.arange(top_pair_reduced.shape[0], device=current_tensor.device) + self.current_tokens = torch.cat([self.current_tokens, new_tokens]) + + key = tuple(new_tokens.cpu().tolist()) + self.token_lookup[key] = top_pair_reduced #top_pair[top_pair.sum(-1)>0] + self.token_lookup_raw[key] = top_pair + self.token_cnts[key] = topv + + _current_depth = max(self.token_depth[k] for k in top_pair_reduced.flatten().cpu().tolist()) + 1 + + for tok in new_tokens.cpu().tolist(): + self.token_depth[tok] = _current_depth + + if _current_depth > self._current_depth: + self._current_depth = _current_depth + print(f"New depth reached {self._current_depth}") + else: + assert top_pair_reduced.shape[0] == new_tokens.shape[0] + + # 1) Replace one all even pairs + current_overlap_pairs = self.extract_current_gate_overlap_pairs(current_tensor, odd_pairs=False) + current_tensor = self.replace_current_overlap_pairs(current_tensor, current_overlap_pairs, top_pair, top_pair_reduced, new_tokens, odd_pairs=False) + + # 2) Then Replace one all odd pairs + current_overlap_pairs = self.extract_current_gate_overlap_pairs(current_tensor, odd_pairs=True) + current_tensor = self.replace_current_overlap_pairs(current_tensor, current_overlap_pairs, top_pair, top_pair_reduced, new_tokens, odd_pairs=True) + + return current_tensor + + def extract_current_gate_overlap_pairs(self, current_tensor, odd_pairs: bool = True): + # Extract overlap_pairs + # ToDo optimize loops + + seq = current_tensor.shape[-1] + seq_half = seq // 2 + assert seq % 2 == 0 + + overlap_pairs = [] + for current_tensor_i in current_tensor: + _overlap_pairs = [] + + if odd_pairs: + for t in range(seq_half-1): + _overlap_pairs.append(current_tensor_i[:, 1+2*t:1+2*(t+1)]) + else: + for t in range(seq_half): + _overlap_pairs.append(current_tensor_i[:, 2*t:2*(t+1)]) + + overlap_pairs.append(torch.stack(_overlap_pairs)) + + overlap_pairs = torch.stack(overlap_pairs) + return overlap_pairs + + def replace_current_overlap_pairs(self, current_tensor, overlap_pairs, top_pair, top_pair_reduced, new_tokens, odd_pairs): + + overlap_pairs_std = self.standardize_overlap_pairs(overlap_pairs) + is_top_overlap_pair = (overlap_pairs_std==top_pair).all(dim=(-1,-2), keepdim=False) + + new_tensor = torch.full_like(current_tensor, self.padding_token) + + for i in range(is_top_overlap_pair.shape[0]): #ToDo: this loop can be put in parallel! is batch dim + t = 1 if odd_pairs else 0 + + for j in range(is_top_overlap_pair.shape[1]): + + if is_top_overlap_pair[i, j]: #replace + + new_col = torch.zeros((current_tensor.shape[1]), dtype=overlap_pairs.dtype, device=overlap_pairs.device) + + for new_token, top_pair_reduced_i in zip(new_tokens, top_pair_reduced): + ind = (overlap_pairs[i, j]==top_pair_reduced_i).all(-1) + new_col = torch.where(ind, new_token, new_col) + + new_col = new_col.unsqueeze(-1) + + tp1 = t + 1 + + else: #just copy old + new_col = overlap_pairs[i, j] + tp1 = t + 2 + + new_tensor[i, :, t:tp1] = new_col + t = tp1 + + if odd_pairs: + # copy first and last col + new_tensor[..., 0] = current_tensor[..., 0] + new_tensor[..., -1] = current_tensor[..., -1] + + return new_tensor + + #--------------------------------------- + # Find new pairs + + def extract_new_gate_overlap_pairs(self, current_tensor): + #current_tensor = current_tensor.abs() + + isgate_token = 1 - torch.isin(current_tensor.abs(), self.not_gates_tokens.to(current_tensor.device)).int() + + # These are postions of the pairs (therefore shape-1) in which we have an overlap + overlaps = isgate_token[..., :-1] + isgate_token[..., 1:] + overlaps = (overlaps>1).int() + + # Number of overlaps two gates have! we can say here only take 2 overlaps, or min 2, or min 1, eg.. -> 0 means parallel!! + overlaps_cnt = torch.count_nonzero(overlaps, dim=1) + overlaps_ind = (overlaps_cnt>0) + + # Extract overlap_pairs + # ToDo optimize loops + + overlap_pairs = [] + for current_tensor_i, overlaps_ind_i in zip(current_tensor, overlaps_ind): + for t in range(current_tensor_i.shape[-1]-1): + if overlaps_ind_i[t]: + overlap_pairs.append(current_tensor_i[:, t:t+2]) + + overlap_pairs = torch.stack(overlap_pairs) + return overlap_pairs + + def standardize_overlap_pairs(self, overlap_pairs): + # Now we convert to std form, where the 1st gate gives the main order and the 2nd the secondory, this should remove all(?) redundant combinations! + + # 1) sort inner SECOND gate such that gate 2 is always on top + inner_sorted_gate2, inner_sorted_gate2_indices = torch.sort(overlap_pairs[..., 1], dim=-1, descending=True, stable=False) + inner_sorted_gate1 = torch.gather(overlap_pairs[..., 0], dim=-1, index=inner_sorted_gate2_indices) + + inner_overlap_pairs = torch.stack((inner_sorted_gate1, inner_sorted_gate2), dim=-1) + + # 2) sort outer FISRT gate such that gate 1 is always on top, NOTE WE NEED STABLE SORT TO CONSERVE INNER ORDER + outer_sorted_gate1, outer_sorted_gate1_indices = torch.sort(inner_overlap_pairs[..., 0], dim=-1, descending=True, stable=True) + outer_sorted_gate2 = torch.gather(inner_overlap_pairs[..., 1], dim=-1, index=outer_sorted_gate1_indices) + + overlap_pairs_std_form = torch.stack((outer_sorted_gate1, outer_sorted_gate2), dim=-1) + + return overlap_pairs_std_form.contiguous() + + def get_topk_pairs(self, overlap_pairs, k): + # Now we can easily count the unique valid pairs! + pot_pairs, pot_pairs_cnts = overlap_pairs.unique(dim=0, return_counts=True) + + # Get topk best pairs + topv, topi = torch.topk(pot_pairs_cnts, k) + top_pairs = pot_pairs[topi] + + return top_pairs, topv + + #--------------------------------------- + # Encoding + + def encode(self, tensors): + # just replay all the pair replacements from learn, i.e. the vocab + + s = tensors.shape[1] + current_tensor = tensors + + for new_tokens, top_pair in tqdm(self.token_lookup_raw.items()): + top_pair = self.standardize_vocab_pair(top_pair, s, sort=True) + new_tokens = torch.tensor(new_tokens, device=top_pair.device, dtype=top_pair.dtype) + + current_tensor = self.learn_step(current_tensor, top_pair, new_tokens=new_tokens) + + return current_tensor + + def standardize_vocab_pair(self, vocab_pair, s, sort: bool = True): + + if vocab_pair.shape[0]<2: # repeat for special gadgets which have full symetric sequential connection + vocab_pair = vocab_pair.repeat(2, 1) + + vocab_pair = F.pad(vocab_pair, [0, 0, 0, s-vocab_pair.shape[0]]) # pad to full systemsize to have nice plotting + + if sort: + vocab_pair = self.standardize_overlap_pairs(vocab_pair) + + return vocab_pair.contiguous() + + #--------------------------------------- + # Decoding + + def unpack_col(self, col): + # col is [s, 1] + s, _ = col.shape + + current_tokens = col.unique() + current_tokens = current_tokens[current_tokens!=0] + k = tuple(current_tokens.tolist()) + + if k in self.token_lookup: + + # Unpack one col + unpacked = torch.zeros((s, 2), dtype=col.dtype, device=col.device) + new_config = self.token_lookup[k] + + for current_token, new_config_i in zip(current_tokens, new_config): + ind = (col==current_token) + unpacked = torch.where(ind, new_config_i, unpacked) + + # Repeat unpacking for both new cols + col1, col2 = unpacked.chunk(2, dim=-1) + + unpacked1 = self.unpack_col(col1) + unpacked2 = self.unpack_col(col2) + + unpacked = torch.cat([unpacked1, unpacked2], dim=-1) + return unpacked + + return col + + def decode(self, tensor, cut_padding: bool = False): + # split into cols we unpack, then recursively + # tensor ... [s, t] + assert tensor.dim() == 2 + + cols = tensor.chunk(tensor.shape[-1], dim=-1) + unpacked = torch.cat([self.unpack_col(col) for col in cols], dim=-1) + + if cut_padding: + # Cut from right as this was added padding in packing + unpacked = unpacked[..., :tensor.shape[-1]] + + return unpacked + +# %% ../../../src/platform/tokenizer/tensor_tokenizer.ipynb 6 +def sort_config(vocab_config): + """Sort a vocab_config for nicer plotting.""" + + t = vocab_config.shape[-1] + all_inds = set(range(t)) + + # Sort one ind, gather the rest + for i in reversed(range(t)): + gather_inds = all_inds - {i} + + sorted_gates = [None] * t + + sorted_gate_i, sorted_gate_i_indices = torch.sort(vocab_config[..., i], dim=-1, descending=True, stable=True) + sorted_gates[i] = sorted_gate_i + + for gather_ind in gather_inds: + sorted_gates[gather_ind] = torch.gather(vocab_config[..., gather_ind], dim=-1, index=sorted_gate_i_indices) + + vocab_config = torch.stack(sorted_gates, dim=-1) + + return vocab_config + +# %% ../../../src/platform/tokenizer/tensor_tokenizer.ipynb 7 +def get_topk_depth_unpacked(gate_pair_tokenizer, s, use_raw=False, standardize=True): + """Useful for plotting.""" + + # Sort into depths + unpacked_vocab_configs_depths = {} + unpacked_vocab_configs_cnts_depths = {} + + if use_raw: + iters = zip(gate_pair_tokenizer.token_lookup_raw.items(), gate_pair_tokenizer.token_cnts.values()) + else: + iters = zip(gate_pair_tokenizer.token_lookup.items(), gate_pair_tokenizer.token_cnts.values()) + + for (vocab_tokens, vocab_config), vocab_config_cnts in tqdm(iters, total=len(gate_pair_tokenizer.token_cnts)): + + tok = vocab_tokens[0] + token_depth = gate_pair_tokenizer.token_depth[tok] + + if standardize: + vocab_config = gate_pair_tokenizer.standardize_vocab_pair(vocab_config, s, sort=False) + unpacked_vocab_config = gate_pair_tokenizer.decode(vocab_config) + + #-------- + unpacked_vocab_config = sort_config(unpacked_vocab_config) + + if token_depth not in unpacked_vocab_configs_depths: + unpacked_vocab_configs_depths[token_depth] = [] + unpacked_vocab_configs_cnts_depths[token_depth] = [] + + unpacked_vocab_configs_depths[token_depth].append(unpacked_vocab_config) + unpacked_vocab_configs_cnts_depths[token_depth].append(vocab_config_cnts) + + return unpacked_vocab_configs_depths, unpacked_vocab_configs_cnts_depths diff --git a/genQC/printing.py b/genQC/printing.py deleted file mode 100644 index 0ba6250..0000000 --- a/genQC/printing.py +++ /dev/null @@ -1,86 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../src/printing.ipynb. - -# %% auto 0 -__all__ = ['display_colums', 'ndarray_to_latex', 'tensor_to_latex', 'print_markdown', 'print_table'] - -# %% ../src/printing.ipynb 3 -from .imports import * -from ipywidgets import widgets -if IN_NOTEBOOK: from IPython.display import Markdown - -# %% ../src/printing.ipynb 4 -def display_colums(display_list, num_col=3): - - outputs = [widgets.Output() for i in range(num_col)] - - for i in range(len(display_list)//num_col+1): - - ds = display_list[i*num_col:(i+1)*num_col] - - for d,output in zip(ds,outputs): - with output: - display(d) - - columns = widgets.HBox(outputs) - display(columns) - -# %% ../src/printing.ipynb 8 -def ndarray_to_latex(arr): - """Returns a LaTeX `{pmatrix*}[r]` as a string""" - if len(arr.shape) > 2: raise ValueError('pmatrix can at most display two dimensions') - lines = str(arr).replace('[', '').replace(']', '').splitlines() - rv = [r'\begin{pmatrix*}[r]'] - rv += [' ' + ' & '.join(l.split()) + r'\\' for l in lines] - rv += [r'\end{pmatrix*}'] - return '\n'.join(rv) - -# %% ../src/printing.ipynb 9 -def tensor_to_latex(tensor): - """Returns a `LaTeX {pmatrix*}[r]` as a string """ - if len(tensor.shape) > 2: raise ValueError('pmatrix can at most display two dimensions') - lines = str(tensor.numpy()).replace('[', '').replace(']', '').splitlines() - rv = [r'\begin{pmatrix*}[r]'] - rv += [' ' + ' & '.join(l.split()) + r'\\' for l in lines] - rv += [r'\end{pmatrix*}'] - return '\n'.join(rv) - -# %% ../src/printing.ipynb 11 -def print_markdown(text, print_raw=False): - if IN_NOTEBOOK and not print_raw: display(Markdown(text)) - else: print(text) - -# %% ../src/printing.ipynb 14 -def print_table(col_headings: list, data: np.array, row_headings=None, print_raw=False): - assert len(col_headings) == data.shape[1] - if row_headings is not None: assert len(row_headings) == data.shape[0] - - #-------------------------------- - head = "" - if row_headings is not None: head = "| " + head - - for col_heading in col_headings: head += f"|{col_heading}" - head += "|\n" - - #-------------------------------- - seperator = "" - if row_headings is not None: seperator = "|--" - - for col_heading in col_headings: seperator += "|--" - seperator += "|\n" - - #-------------------------------- - body = "" - for i, row in enumerate(data): - body_row = "" - for x in row: - body_row += f"|{x:.2f}" - - if row_headings is not None: - body_row = f"|{row_headings[i]}" + body_row - - body += body_row + "|\n" - - #-------------------------------- - table = head + seperator + body - - print_markdown(table, print_raw) diff --git a/genQC/scheduler/scheduler.py b/genQC/scheduler/scheduler.py index fc8c98a..6269434 100644 --- a/genQC/scheduler/scheduler.py +++ b/genQC/scheduler/scheduler.py @@ -1,30 +1,54 @@ +"""Base class for schedulers.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/scheduler/scheduler.ipynb. # %% auto 0 __all__ = ['Scheduler'] -# %% ../../src/scheduler/scheduler.ipynb 3 +# %% ../../src/scheduler/scheduler.ipynb 2 from ..imports import * -from ..util import virtual -from ..config_loader import * +from ..utils.config_loader import * -# %% ../../src/scheduler/scheduler.ipynb 4 -class Scheduler: +# %% ../../src/scheduler/scheduler.ipynb 3 +class Scheduler(abc.ABC): """Base class for all diffusion schedulers""" + def __init__(self): pass - @virtual + @abc.abstractmethod def set_timesteps(self): pass - @virtual + @abc.abstractmethod def step(self): pass - @virtual + @abc.abstractmethod def add_noise(self): pass + @abc.abstractmethod + def to(self): pass #--------------------------------------- + @staticmethod + def from_config(config, device: torch.device, save_path: str=None, verbose=True, silent=False): + """Use this if we have a loaded config.""" + + _config = copy.deepcopy(config) + + if exists(device): _config["device"] = device # for loading sub-models + else: device = _config.pop("device", "cpu") + + if "beta_schedule" in _config["params"]: + beta_schedule = _config["params"]["beta_schedule"] + + if "path:" in beta_schedule: + _config["params"]["beta_schedule"] = "path:" + save_path + beta_schedule[len("path:"):] + + scheduler = instantiate_from_config(_config) + return scheduler + + #--------------------------------------- + def get_config(self, without_metadata=False): if not without_metadata: config = {} @@ -37,13 +61,20 @@ def get_config(self, without_metadata=False): return config @property - @virtual + @abc.abstractmethod def params_config(self): return None #--------------------------------------- def unsqueeze_vector_to_shape(self, vec, shape): - vec = vec.flatten() - while len(vec.shape) < len(shape): - vec = vec.unsqueeze(-1) - return vec + return vec.view(*vec.shape, *([1] * (len(shape)-len(vec.shape))) ) + + #--------------------------------------- + + @classmethod + def from_scheduler(cls, scheduler, **kwargs): + _kwargs = scheduler.params_config + _kwargs = _kwargs | kwargs + + new_scheduler = cls(**_kwargs) + return new_scheduler diff --git a/genQC/scheduler/scheduler_ddim.py b/genQC/scheduler/scheduler_ddim.py index 5e2742c..9dc654f 100644 --- a/genQC/scheduler/scheduler_ddim.py +++ b/genQC/scheduler/scheduler_ddim.py @@ -1,20 +1,24 @@ +"""Denoising diffusion implicit models [(DDIM)](https://arxiv.org/abs/2010.02502).""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/scheduler/scheduler_ddim.ipynb. # %% auto 0 __all__ = ['DDIMSchedulerOutput', 'DDIMScheduler'] -# %% ../../src/scheduler/scheduler_ddim.ipynb 3 +# %% ../../src/scheduler/scheduler_ddim.ipynb 2 from ..imports import * from .scheduler_ddpm import DDPMScheduler -# %% ../../src/scheduler/scheduler_ddim.ipynb 4 +# %% ../../src/scheduler/scheduler_ddim.ipynb 3 @dataclass class DDIMSchedulerOutput: prev_sample: torch.FloatTensor pred_original_sample: Optional[torch.FloatTensor] = None - + +# %% ../../src/scheduler/scheduler_ddim.ipynb 4 class DDIMScheduler(DDPMScheduler): """A `Scheduler` implementing [(DDIM)](https://arxiv.org/abs/2010.02502).""" + def __init__(self, device: Union[str, torch.device], num_train_timesteps: int = 1000, @@ -22,9 +26,11 @@ def __init__(self, beta_end: float = 0.02, beta_schedule: str = "linear", input_perturbation = 0.1, + prediction_type = "epsilon", + enable_zero_terminal_snr = True, eta: float = 0 ): - super().__init__(device, num_train_timesteps, beta_start, beta_end, beta_schedule, input_perturbation) + super().__init__(device, num_train_timesteps, beta_start, beta_end, beta_schedule, input_perturbation, prediction_type, enable_zero_terminal_snr) self.eta = eta #for stable diff ddim @@ -43,8 +49,8 @@ def params_config(self): #------------------------------------ # Inference functions - def set_timesteps(self, num_inference_steps: int): - super().set_timesteps(num_inference_steps) + def set_timesteps(self, num_inference_steps: Optional[int] = None, timesteps: Optional[torch.Tensor] = None): + super().set_timesteps(num_inference_steps=num_inference_steps, timesteps=timesteps) self.timesteps += self.steps_offset clamp_style = None # one of: None, "static", "dynamic" @@ -52,11 +58,13 @@ def set_timesteps(self, num_inference_steps: int): def step(self, model_output: torch.FloatTensor, timesteps: Union[int, torch.IntTensor], - sample: torch.FloatTensor + sample: torch.FloatTensor, + uncond_model_output: torch.FloatTensor = None # for CFG++ ) -> DDIMSchedulerOutput: """Denoising step""" prev_timesteps = timesteps - self.num_train_timesteps // self.num_inference_steps + # prev_timestep = torch.clamp(prev_timestep, 0, self.num_train_timesteps - 1) # NEW #get variance sched alphas_cumprod = self.unsqueeze_vector_to_shape(self.alphas_cumprod[timesteps], sample.shape) @@ -70,10 +78,29 @@ def step(self, #calc vars betas_cumprod = 1.0 - alphas_cumprod betas_cumprod_tm1 = 1.0 - alphas_cumprod_tm1 + + uncond_model_output = default(uncond_model_output, model_output) + + if self.prediction_type == "epsilon": + #estimate predicted sample + x0 = (sample - betas_cumprod.sqrt() * model_output) / alphas_cumprod.sqrt() + eps = uncond_model_output #model_output - #estimate predicted sample - x0 = (sample - betas_cumprod.sqrt() * model_output) / alphas_cumprod.sqrt() + elif self.prediction_type == "v-type": + a = alphas_cumprod.sqrt() + b = betas_cumprod.sqrt() + + x0 = a * sample - b * model_output + # eps = a * model_output + b * sample + eps = a * uncond_model_output + b * sample + + elif self.prediction_type == "x0": + x0 = model_output + eps = (sample - alphas_cumprod.sqrt() * uncond_model_output) / betas_cumprod.sqrt() + else: + raise NotImplementedError(f"{self.prediction_type} is not implemented for {self.__class__}.step()") + if self.clamp_style == None: pass elif self.clamp_style == "static": x0 = torch.clamp(x0, -1, 1) elif self.clamp_style == "dynamic": raise NotImplementedError("clamp_style == 'dynamic'") @@ -85,14 +112,12 @@ def step(self, std = self.eta * variance.sqrt() #direction to xt - dir_xt = ( betas_cumprod_tm1 - std.square() ).sqrt() * model_output + dir_xt = (betas_cumprod_tm1 - std.square()).sqrt() * eps #sample noise - noise = torch.randn(model_output.shape, device=self.device) - + noise = torch.randn_like(x0) + #estimate the prev sample xtm1 = alphas_cumprod_tm1.sqrt() * x0 + dir_xt + std * noise - # print(f"{timesteps=} {prev_timesteps=} ;;; x0: {x0.mean()}+-{x0.std()} xtm1: {xtm1.mean()}+-{xtm1.std()}") - return DDIMSchedulerOutput(prev_sample=xtm1, pred_original_sample=x0) diff --git a/genQC/scheduler/scheduler_ddpm.py b/genQC/scheduler/scheduler_ddpm.py index fd9c4e3..ac3d79d 100644 --- a/genQC/scheduler/scheduler_ddpm.py +++ b/genQC/scheduler/scheduler_ddpm.py @@ -1,18 +1,22 @@ +"""Denoising diffusion probabilistic models [(DDPM)](https://arxiv.org/abs/2006.11239): reverse beta is fixed and diagonal.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/scheduler/scheduler_ddpm.ipynb. # %% auto 0 __all__ = ['DDPMSchedulerOutput', 'DDPMScheduler'] -# %% ../../src/scheduler/scheduler_ddpm.ipynb 3 +# %% ../../src/scheduler/scheduler_ddpm.ipynb 2 from ..imports import * from .scheduler import Scheduler +from ..utils.config_loader import load_tensor -# %% ../../src/scheduler/scheduler_ddpm.ipynb 4 +# %% ../../src/scheduler/scheduler_ddpm.ipynb 3 @dataclass class DDPMSchedulerOutput: prev_sample: torch.FloatTensor pred_original_sample: Optional[torch.FloatTensor] = None - + +# %% ../../src/scheduler/scheduler_ddpm.ipynb 4 class DDPMScheduler(Scheduler): """A `Scheduler` implementing [(DDPM)](https://arxiv.org/abs/2006.11239)""" @@ -24,55 +28,82 @@ def __init__(self, beta_start: float = 0.0001, beta_end: float = 0.02, beta_schedule: str = "linear", - input_perturbation = 0.1 + input_perturbation = 0.1, + prediction_type = "epsilon", + enable_zero_terminal_snr = True ): super().__init__() self.device = device self.num_train_timesteps = torch.tensor(num_train_timesteps) self.num_inference_steps = torch.tensor(num_train_timesteps) - self.beta_start = beta_start - self.beta_end = beta_end + self.beta_start = beta_start + self.beta_end = beta_end self.beta_schedule = beta_schedule - + self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps)[::-1].copy().astype(np.int64)) #careful is defined reversed for easy denoising looping - + + self.input_perturbation = input_perturbation # Input Perturbation Reduces Exposure Bias in Diffusion Models, https://arxiv.org/pdf/2301.11706.pdf + self.prediction_type = prediction_type # one of "epsilon", "v-type", "x0", "mu" + + if self.prediction_type not in ["epsilon", "v-type", "x0"]: + raise NotImplementedError(f"{self.prediction_type} does is not implemented for {self.__class__}") + + #----------- + if beta_schedule == "linear": self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32) - elif beta_schedule == "linear_sqrt": + elif beta_schedule == "linear_sqrt": #LDM self.betas = torch.linspace(beta_start ** 0.5, beta_end ** 0.5, num_train_timesteps, dtype=torch.float32) ** 2 - elif beta_schedule == "cos_alpha": #cosine-based-variance - f = lambda t: np.cos((t/self.num_train_timesteps + 0.008)*np.pi/2.016)**2 + elif beta_schedule == "cos_alpha": #cosine-based-variance + #print("[INFO]: using cos_alpha beta-schedule, ignoring beta_start and beta_end!") + f = lambda t: np.cos((t/self.num_train_timesteps + 0.008)*np.pi/2.016)**2 # is alpha_bar + _betas = [] + for i in range(self.num_train_timesteps): + b = 1.0-(f(i+1.0)/f(i)) + if not enable_zero_terminal_snr: # v-type allows zero terminal SNR + b = min(b, 0.999) # clipping disables zero terminal SNR + _betas.append(b) + self.betas = torch.tensor(_betas, dtype=torch.float32) + + elif beta_schedule == "cos_alpha4": #cosine-based-variance + #print("[INFO]: using cos_alpha4 beta-schedule, ignoring beta_start and beta_end!") + f = lambda t: np.cos((t/self.num_train_timesteps + 0.008)*np.pi/2.016)**4 # is alpha_bar _betas = [] - for i in range(self.num_train_timesteps): - _betas.append(min(1.0-(f(i+1.0)/f(i)),0.999)) - self.betas = torch.tensor(_betas, dtype=torch.float32) + for i in range(self.num_train_timesteps): + b = 1.0-(f(i+1.0)/f(i)) + if not enable_zero_terminal_snr: # v-type allows zero terminal SNR + b = min(b, 0.999) # clipping disables zero terminal SNR + _betas.append(b) + self.betas = torch.tensor(_betas, dtype=torch.float32) + + elif "path:" in beta_schedule: + _save_path = beta_schedule[len("path:"):] + self.betas = load_tensor(save_path=_save_path, device=device)["0"] + + print(f"[INFO]: Loaded beta_schedule ({beta_schedule}).") + else: - raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}") - + raise NotImplementedError(f"{beta_schedule} is not implemented for {self.__class__}") + + #----------- + + if (self.prediction_type in ["v-type", "x0"]) and enable_zero_terminal_snr and (beta_schedule not in ["cos_alpha", "laplace"]): # v-type allows zero terminal SNR + self.betas = self.enforce_zero_terminal_snr(self.betas) + + #----------- + self.sigmas = torch.sqrt(self.betas) - self.alphas = 1.0 - self.betas - self.alphas_cumprod = torch.cumprod(self.alphas, dim=0) - - ## Is this used anymore? - self.sqrt_alphas = torch.sqrt(self.alphas) - self.sqrt_alphas_cumprod = torch.sqrt(self.alphas_cumprod) - self.sqrt_one_minus_alphas_cumprod = torch.sqrt(1 - self.alphas_cumprod) - ## - + self.alphas_cumprod = torch.cumprod(self.alphas, dim=0) #only do cumprod witj alphas, as betas will go within precision to zero + #---------- - # Input Perturbation Reduces Exposure Bias in Diffusion Models - # https://arxiv.org/pdf/2301.11706.pdf - - self.input_perturbation = input_perturbation - #---------- + self.to(self.device) + - self.to_device(self.device) - @property def params_config(self): params_config = {} @@ -81,32 +112,68 @@ def params_config(self): params_config["beta_start"] = self.beta_start params_config["beta_end"] = self.beta_end params_config["beta_schedule"] = self.beta_schedule - params_config["input_perturbation"] = self.input_perturbation + params_config["input_perturbation"] = self.input_perturbation + params_config["prediction_type"] = self.prediction_type return params_config - def to_device(self, device: Union[str, torch.device], non_blocking=False): - #non_blocking = self.non_blocking - + def to(self, device: Union[str, torch.device], non_blocking=False): self.device = device self.alphas_cumprod = self.alphas_cumprod.to(device, non_blocking=non_blocking) - self.sqrt_alphas_cumprod = self.sqrt_alphas_cumprod.to(device, non_blocking=non_blocking) - self.sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod.to(device, non_blocking=non_blocking) self.sigmas = self.sigmas.to(device, non_blocking=non_blocking) - self.sqrt_alphas = self.sqrt_alphas.to(device, non_blocking=non_blocking) self.betas = self.betas.to(device, non_blocking=non_blocking) self.num_train_timesteps = self.num_train_timesteps.to(device, non_blocking=non_blocking) self.num_inference_steps = self.num_inference_steps.to(device, non_blocking=non_blocking) - + return self + + #------------------------------------ + + @property + def SNR(self): + alphas_bar = self.alphas_cumprod + betas_bar = 1.0 - alphas_bar + return alphas_bar / betas_bar + #------------------------------------ # Inference functions + + def enforce_zero_terminal_snr(self, betas): + # Algorithm 1 in https://arxiv.org/pdf/2305.08891.pdf + + # Convert betas to alphas_bar_sqrt + alphas = 1 - betas + alphas_bar = alphas.cumprod(0) + alphas_bar_sqrt = alphas_bar.sqrt() + + # Store old values. + alphas_bar_sqrt_0 = alphas_bar_sqrt[0].clone() + alphas_bar_sqrt_T = alphas_bar_sqrt[-1].clone() + # Shift so last timestep is zero. + alphas_bar_sqrt -= alphas_bar_sqrt_T + # Scale so first timestep is back to old value. + alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T) + + # Convert alphas_bar_sqrt to betas + alphas_bar = alphas_bar_sqrt ** 2 + alphas = alphas_bar[1:] / alphas_bar[:-1] + alphas = torch.cat([alphas_bar[0:1], alphas]) + betas = 1 - alphas + return betas + + def set_timesteps(self, num_inference_steps: Optional[int] = None, timesteps: Optional[torch.Tensor] = None): + if exists(num_inference_steps): + if num_inference_steps >= self.num_train_timesteps: raise ValueError("num_inference_steps >= self.num_train_timesteps") + self.num_inference_steps = torch.tensor(num_inference_steps) + step_ratio = self.num_train_timesteps // self.num_inference_steps + timesteps = (np.arange(0, num_inference_steps) * step_ratio.item()).round()[::-1].copy().astype(np.int64) + self.timesteps = torch.from_numpy(timesteps) + + elif exists(timesteps): + self.num_inference_steps = torch.tensor(timesteps.shape[0]) + self.timesteps = timesteps.clone() + + else: + raise RuntimeError("provide `num_inference_steps` or `timesteps`") - def set_timesteps(self, num_inference_steps: int): - if num_inference_steps >= self.num_train_timesteps: raise ValueError("num_inference_steps >= self.num_train_timesteps") - self.num_inference_steps = torch.tensor(num_inference_steps) - step_ratio = self.num_train_timesteps // self.num_inference_steps - timesteps = (np.arange(0, num_inference_steps) * step_ratio.item()).round()[::-1].copy().astype(np.int64) - self.timesteps = torch.from_numpy(timesteps) - def step(self, model_output: torch.FloatTensor, timesteps: Union[int, torch.IntTensor], @@ -114,46 +181,44 @@ def step(self, ) -> DDPMSchedulerOutput: """Denoising step""" - sqrt_alphas_cumprod = self.unsqueeze_vector_to_shape(self.sqrt_alphas_cumprod[timesteps], sample.shape) + sqrt_alphas_cumprod = self.unsqueeze_vector_to_shape(self.sqrt_alphas_cumprod[timesteps], sample.shape) sqrt_one_minus_alphas_cumprod = self.unsqueeze_vector_to_shape(self.sqrt_one_minus_alphas_cumprod[timesteps], sample.shape) - + sigmas = self.unsqueeze_vector_to_shape(self.sigmas[timesteps], sample.shape) sqrt_alphas = self.unsqueeze_vector_to_shape(self.sqrt_alphas[timesteps], sample.shape) betas = self.unsqueeze_vector_to_shape(self.betas[timesteps], sample.shape) - - non_zero_t = (timesteps!=0).float() - - #estimate the final img - x0 = (sample - sqrt_one_minus_alphas_cumprod * model_output) / sqrt_alphas_cumprod #DDPM eq.15 - + + if self.prediction_type == "epsilon": + #estimate the final img + x0 = (sample - sqrt_one_minus_alphas_cumprod * model_output) / sqrt_alphas_cumprod #DDPM eq.15 + + xt_coeff = betas / sqrt_one_minus_alphas_cumprod + mu_t = (sample - xt_coeff * model_output) / sqrt_alphas + + elif self.prediction_type == "v-type": + x0 = sqrt_alphas_cumprod * sample - sqrt_one_minus_alphas_cumprod * model_output + + prev_timesteps = timesteps - self.num_train_timesteps // self.num_inference_steps + alphas_cumprod_tm1 = self.unsqueeze_vector_to_shape(self.alphas_cumprod[prev_timesteps], sample.shape) + + non_zero_tm1 = (prev_timesteps>=0.0).float() + non_zero_tm1 = self.unsqueeze_vector_to_shape(non_zero_tm1, sample.shape) + alphas_cumprod_tm1 = alphas_cumprod_tm1 * non_zero_tm1 + (1.0 - non_zero_tm1) * self.alphas_cumprod[0] + + mu_t = (betas * alphas_cumprod_tm1.sqrt() * x0 + sqrt_alphas * (1.0-alphas_cumprod_tm1) * sample) / sqrt_one_minus_alphas_cumprod + + else: + raise NotImplementedError(f"{self.prediction_type} is not implemented for {self.__class__}.step()") + #less noisy latent + non_zero_t = (timesteps>0).float() noise = torch.randn(sample.shape, device=self.device) noise = noise * non_zero_t.reshape(-1, 1, 1, 1) - - xt_coeff = betas / sqrt_one_minus_alphas_cumprod - xt = (sample - xt_coeff * model_output) / sqrt_alphas + sigmas * noise + + xt = mu_t + sigmas * noise return DDPMSchedulerOutput(prev_sample=xt, pred_original_sample=x0) - def add_noise_LEdit(self, original_samples: torch.FloatTensor): - # LEDITS: Real Image Editing with DDPM Inversion and Semantic Guidance; Note: SEGA (Semantic Guidance) is just multiple negative promts with a pixel based weight - # https://arxiv.org/pdf/2307.00522.pdf - - noisy_latents = [] - noises = [] - - noisy_latent_t = original_samples - - for t in self.timesteps[::-1]: #start from no noise and diffuse in non analytic fashion - noise_t = torch.randn_like(noise) - alpha_t = self.unsqueeze_vector_to_shape(self.alphas[t], original_samples.shape) - noisy_latent_t = torch.sqrt(alpha_t) * noisy_latent_t + torch.sqrt(1.0 - alpha_t) * noise_t - - noises.append(noise_t) - noisy_latents.append(noisy_latent_t) - - return noisy_latents[::-1], noises[::-1] # invert to self.timestep definition - #------------------------------------ # Training functions @@ -161,12 +226,13 @@ def add_noise(self, original_samples: torch.FloatTensor, noise: torch.FloatTensor, timesteps: torch.IntTensor, + train: bool=False ) -> torch.FloatTensor: - alphas_cumprod = self.unsqueeze_vector_to_shape(self.alphas_cumprod[timesteps], original_samples.shape) - noisy_latents = torch.sqrt(alphas_cumprod) * original_samples + torch.sqrt(1.0 - alphas_cumprod) * noise + alphas_cumprod = self.unsqueeze_vector_to_shape(self.alphas_cumprod[timesteps], original_samples.shape) + noisy_latents = torch.sqrt(alphas_cumprod) * original_samples + torch.sqrt(1.0 - alphas_cumprod) * noise # F^2 - if self.input_perturbation is not None: + if exists(self.input_perturbation) and train: noisy_latents = noisy_latents + torch.sqrt(1.0 - alphas_cumprod) * torch.randn_like(noise) * self.input_perturbation - return noisy_latents + return noisy_latents diff --git a/genQC/scheduler/scheduler_dpm.py b/genQC/scheduler/scheduler_dpm.py new file mode 100644 index 0000000..31bb173 --- /dev/null +++ b/genQC/scheduler/scheduler_dpm.py @@ -0,0 +1,125 @@ +"""DPM-Solver++: Fast Solver for Guided Sampling of Diffusion Probabilistic Models [(DPM-Solver)](https://arxiv.org/abs/2206.00927) [(DPM-Solver++)](https://arxiv.org/abs/2211.01095).""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/scheduler/scheduler_dpm.ipynb. + +# %% auto 0 +__all__ = ['DPMSchedulerOutput', 'DPMScheduler'] + +# %% ../../src/scheduler/scheduler_dpm.ipynb 2 +from ..imports import * +from .scheduler_ddpm import DDPMScheduler + +# %% ../../src/scheduler/scheduler_dpm.ipynb 3 +@dataclass +class DPMSchedulerOutput: + prev_sample: torch.FloatTensor + pred_original_sample: Optional[torch.FloatTensor] = None + +# %% ../../src/scheduler/scheduler_dpm.ipynb 4 +class DPMScheduler(DDPMScheduler): + """A `Scheduler` implementing [(DPM-Solver++)](https://arxiv.org/abs/2211.01095).""" + + def __init__(self, + device: Union[str, torch.device], + num_train_timesteps: int = 1000, + beta_start: float = 0.0001, + beta_end: float = 0.02, + beta_schedule: str = "linear", + input_perturbation = 0.1, + prediction_type = "epsilon", + enable_zero_terminal_snr = True, + solver_order: int = 2, + **kwargs + ) -> None: + super().__init__(device, num_train_timesteps, beta_start, beta_end, beta_schedule, input_perturbation, prediction_type, enable_zero_terminal_snr) + + self.solver_order = solver_order + if self.solver_order != 2: + raise NotImplementedError(f"{self.solver_order=} is not implemented for {self.__class__}") + + @property + def params_config(self): + params_config = super().params_config + params_config["solver_order"] = self.solver_order + return params_config + + #------------------------------------ + # Inference functions + + def step(self, + model_output: torch.FloatTensor, + timesteps: torch.IntTensor, + sample: torch.FloatTensor, + uncond_model_output: torch.FloatTensor = None, # for CFG++ + ) -> DPMSchedulerOutput: + """ + Denoising step of DPM-Solver++(2M) (Lu et al., 2022b), + implemeted as CFG++ variant (CFG++, https://arxiv.org/pdf/2406.08070) + """ + + uncond_model_output = default(uncond_model_output, model_output) + + assert timesteps.numel() == 1 + + # note: here we enforce the sampling to be strictly defined by self.timesteps + is_warmup_step = (self.timesteps[0] == timesteps) + # is_last_step = (self.timesteps[-1] == timesteps) + + alphas_cumprod = self.unsqueeze_vector_to_shape(self.alphas_cumprod[timesteps], sample.shape) + betas_cumprod = 1.0 - alphas_cumprod + + prev_timesteps = timesteps - self.num_train_timesteps // self.num_inference_steps + prev_timesteps = prev_timesteps.clamp(0, self.num_train_timesteps-1) + + alphas_cumprod_tm1 = self.unsqueeze_vector_to_shape(self.alphas_cumprod[prev_timesteps], sample.shape) + betas_cumprod_tm1 = 1.0 - alphas_cumprod_tm1 + + # --------- + if self.prediction_type == "v-type": + a = alphas_cumprod.sqrt() + b = betas_cumprod.sqrt() + + x0 = a * sample - b * model_output + x0_uncond = a * sample - b * uncond_model_output + + elif self.prediction_type == "x0": + x0 = model_output + x0_uncond = uncond_model_output + + else: + raise NotImplementedError(f"{self.prediction_type} is not implemented for {self.__class__}.step()") + + # --------- + solver_order = self.solver_order + # mod here for adyptive adjust, if needed + if solver_order == 2: + pass + + else: + raise NotImplementedError(f"{solver_order} is not implemented for {self.__class__}") + + # --------- + + lambda_t = 0.5 * torch.log(alphas_cumprod / betas_cumprod) + lambda_tm1 = 0.5 * torch.log(alphas_cumprod_tm1 / betas_cumprod_tm1) + + h_tm1 = lambda_tm1 - lambda_t + + if is_warmup_step: + x_dir = alphas_cumprod_tm1.sqrt() * (x0 - torch.exp(-h_tm1) * x0_uncond) + + else: + r_tm1 = self.last_h_tm1 / h_tm1 + + sqrt_alphas_cumprod_tm1 = alphas_cumprod_tm1.sqrt() + exp_mhtm1 = torch.exp(-h_tm1) + + x_dir = sqrt_alphas_cumprod_tm1 * x0 - sqrt_alphas_cumprod_tm1 * exp_mhtm1 * x0_uncond + sqrt_alphas_cumprod_tm1 * (0.5/r_tm1) * (x0_uncond-self.last_x0_uncond) * (1.0-exp_mhtm1) + + xtm1 = (betas_cumprod_tm1/betas_cumprod).sqrt() * sample + x_dir + + # is needed for multistesp integration of DPM + self.last_x0_uncond = x0_uncond + self.last_h_tm1 = h_tm1 + + return DPMSchedulerOutput(prev_sample=xtm1, pred_original_sample=x0) diff --git a/genQC/util.py b/genQC/util.py deleted file mode 100644 index bc1dec0..0000000 --- a/genQC/util.py +++ /dev/null @@ -1,128 +0,0 @@ -# AUTOGENERATED! DO NOT EDIT! File to edit: ../src/util.ipynb. - -# %% auto 0 -__all__ = ['MemoryCleaner', 'virtual', 'DataLoaders', 'infer_torch_device', 'number_of_paramters', 'normalize_tensor', - 'scale_tensor', 'savePdf', 'savePng', 'saveSvg', 'plot_image_grid', 'latents_to_pil'] - -# %% ../src/util.ipynb 3 -from .imports import * -import gc, sys, traceback - -# %% ../src/util.ipynb 5 -class MemoryCleaner(): - """CLass with static methods to clean (gpu) memory.""" - - @staticmethod - def _clean_ipython_hist(): - # Code in this function mainly copied from IPython source - if not 'get_ipython' in globals(): return - ip = get_ipython() - user_ns = ip.user_ns - ip.displayhook.flush() - pc = ip.displayhook.prompt_count + 1 - for n in range(1, pc): user_ns.pop('_i'+repr(n),None) - user_ns.update(dict(_i='',_ii='',_iii='')) - hm = ip.history_manager - hm.input_hist_parsed[:] = [''] * pc - hm.input_hist_raw[:] = [''] * pc - hm._i = hm._ii = hm._iii = hm._i00 = '' - - @staticmethod - def _clean_tb(): - if hasattr(sys, 'last_traceback'): - traceback.clear_frames(sys.last_traceback) - delattr(sys, 'last_traceback') - if hasattr(sys, 'last_type'): delattr(sys, 'last_type') - if hasattr(sys, 'last_value'): delattr(sys, 'last_value') - - @staticmethod - def purge_mem(): - """Clear all. Purge all memory.""" - MemoryCleaner._clean_tb() - MemoryCleaner._clean_ipython_hist() - gc.collect() - torch.cuda.empty_cache() - -# %% ../src/util.ipynb 7 -def virtual(f: callable) -> callable: - '''Decorator to enfore subclass method implementations and raises error at method calls.''' - @functools.wraps(f) - def inner(self, *args, **kwargs): raise NotImplementedError(f"Virtual method {f.__name__} needs to be implemented by subclass {self.__class__.__name__}.") - return inner - -# %% ../src/util.ipynb 10 -class DataLoaders: - """Combines train and valid `DataLoader`.""" - def __init__(self, *dls: list[DataLoader]): self.train, self.valid = dls[:2] - -# %% ../src/util.ipynb 11 -def infer_torch_device(): - if torch.cuda.is_available(): - torch.backends.cudnn.benchmark = True - - dev_cap = torch.cuda.get_device_capability() - - if dev_cap[0] >= 8: # AMPERE - print(f"[INFO]: Cuda device has a capability of {dev_cap[0]}.{dev_cap[1]} (>= 8), allowing tf32 matmul.") - torch.backends.cuda.matmul.allow_tf32 = True - - return torch.device("cuda") - return torch.device("cpu") - -# %% ../src/util.ipynb 13 -def number_of_paramters(model: nn.Module): return sum([p.flatten().shape[0] for p in model.parameters()]) - -# %% ../src/util.ipynb 14 -def normalize_tensor(t: torch.Tensor): - """[0,1] to [-1,1]""" - return t * 2.0 - 1.0 - -def scale_tensor(t: torch.Tensor): - """[-1,1] to [0,1]""" - return (t / 2.0 + 0.5).clamp(0.0, 1.0) - -# %% ../src/util.ipynb 16 -def savePdf(filename): plt.savefig(filename + '.pdf', bbox_inches='tight') -def savePng(filename): plt.savefig(filename + '.png', bbox_inches='tight') -def saveSvg(filename): plt.savefig(filename + '.svg', bbox_inches='tight') - -# %% ../src/util.ipynb 17 -def plot_image_grid(imgs: Union[list, np.array, torch.Tensor], labels: list=None, labels_fs="medium", - figsize=(16, 4), cols=8, cmap="Greys", show_colorbar=False, **imshow_kwargs): - if type(imgs) is list: n = len(imgs) - elif type(imgs) is np.ndarray: n = imgs.shape[0] - elif type(imgs) is torch.Tensor: n = imgs.shape[0] - else: raise NotImplementedError("err type:", type(imgs)) - - if n == 0: return - - cols = min(n, cols) - rows = math.ceil(n/cols) - - fig, axs = plt.subplots(rows, cols, figsize=figsize, squeeze=False, constrained_layout=True) - for i, (r, c) in enumerate(itertools.product(range(rows), range(cols))): - plt.sca(axs[r,c]) - plt.axis('off') - - if i >= n: continue - - if labels is not None: plt.title(labels[i], fontsize=labels_fs) - p = plt.imshow(imgs[i], cmap=cmap, **imshow_kwargs) #cmap ignored for RGB - if show_colorbar: plt.colorbar(p) - - plt.show() - -# %% ../src/util.ipynb 19 -def latents_to_pil(latents:torch.Tensor, channels=None): - if channels is None: - channels = latents.shape[1] if len(latents.shape) > 3 else 1 - - images = scale_tensor(latents) - images = images.detach().cpu().permute(0, 2, 3, 1).numpy() - - if channels == 1: images = images[:, :, :, 0] - - images = (images * 255).round().astype(np.uint8) - - pil_images = [Image.fromarray(image) for image in images] - return pil_images diff --git a/genQC/utils/__init__.py b/genQC/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/genQC/utils/async_fn.py b/genQC/utils/async_fn.py new file mode 100644 index 0000000..57267b6 --- /dev/null +++ b/genQC/utils/async_fn.py @@ -0,0 +1,60 @@ +"""Basic functions for async executions.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/utils/async_fn.ipynb. + +# %% auto 0 +__all__ = ['run_parallel_jobs', 'MemoryMappedArray'] + +# %% ../../src/utils/async_fn.ipynb 2 +from ..imports import * +from joblib import Parallel, delayed + +from tensordict.tensordict import MemoryMappedTensor +import tempfile + +# %% ../../src/utils/async_fn.ipynb 4 +def run_parallel_jobs(f: callable, loop_set, n_jobs: int = 1): + if n_jobs > 1: res = Parallel(n_jobs=n_jobs)(delayed(f)(x) for x in loop_set) + else: res = [f(x) for x in loop_set] + return res + +# %% ../../src/utils/async_fn.ipynb 6 +class MemoryMappedArray(): + def __init__(self, obj, type="tensor"): + self.obj = obj + self.type = type + assert type in ["tensor", "numpy"] + + with tempfile.NamedTemporaryFile(delete=False) as file: + # Note can bes simplified with python 3.12 as we can set delete=true, and delete_on_close=True, so it will be kept and we dont need to delete + # see https://docs.python.org/3.12/library/tempfile.html + + self.temporaryFileName = file.name + file.close() + + if self.type == "numpy": + self.obj_memmap = np.memmap(filename=self.temporaryFileName, dtype=obj.dtype, mode='w+', shape=obj.shape) + self.obj_memmap[:] = self.obj[:] + self.obj_memmap.flush() + + elif self.type == "tensor": + self.obj_memmap = MemoryMappedTensor.from_tensor(self.obj.cpu(), filename=self.temporaryFileName, existsok=True) + + else: + raise NotImplementedError() + + def get_obj(self): + if self.type == "numpy": + self.obj = self.obj_memmap.copy() + + elif self.type == "tensor": + self.obj = self.obj_memmap.contiguous().clone().to(self.obj.device) + + del self.obj_memmap + return self.obj, self.temporaryFileName + + @staticmethod + def clean(temp_files): + for temp_file in temp_files: + try: os.remove(temp_file) + except Exception as e: print(f"[ERROR]: {e}") diff --git a/genQC/utils/config_loader.py b/genQC/utils/config_loader.py new file mode 100644 index 0000000..c0f7f40 --- /dev/null +++ b/genQC/utils/config_loader.py @@ -0,0 +1,120 @@ +"""Functions to load and store models and datasets.""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/utils/config_loader.ipynb. + +# %% auto 0 +__all__ = ['class_to_str', 'load_config', 'config_to_dict', 'save_dataclass_yaml', 'save_dict_yaml', 'get_obj_from_str', + 'instantiate_from_config', 'store_model_state_dict', 'load_model_state_dict', 'store_tensor', 'load_tensor'] + +# %% ../../src/utils/config_loader.ipynb 3 +from ..imports import * +from omegaconf import OmegaConf + +from safetensors.torch import load_file as load_safetensors +from safetensors.torch import save_file as save_safetensors +from safetensors.numpy import load_file as load_safetensors_np +from safetensors.numpy import save_file as save_safetensors_np +from safetensors import safe_open + +# %% ../../src/utils/config_loader.ipynb 5 +def class_to_str(cls): + return str(cls)[8:-2] + +# %% ../../src/utils/config_loader.ipynb 6 +def load_config(file_path): + return OmegaConf.load(f"{file_path}") + +# %% ../../src/utils/config_loader.ipynb 7 +def config_to_dict(config): + return OmegaConf.to_container(config) + +# %% ../../src/utils/config_loader.ipynb 8 +def save_dataclass_yaml(data_obj, file_path): + conf = OmegaConf.structured(data_obj) + with open(file_path, 'w') as f: + OmegaConf.save(config=conf, f=f) + +# %% ../../src/utils/config_loader.ipynb 9 +def save_dict_yaml(dict_obj, file_path): + conf = OmegaConf.create(dict_obj) + with open(file_path, 'w') as f: + OmegaConf.save(config=conf, f=f) + +# %% ../../src/utils/config_loader.ipynb 14 +def get_obj_from_str(string, reload=False, invalidate_cache=True): + module, cls = string.rsplit(".", 1) + if invalidate_cache: + importlib.invalidate_caches() + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=None), cls) + +# %% ../../src/utils/config_loader.ipynb 15 +def instantiate_from_config(config): + if not "target" in config: raise KeyError("Expected key `target` to instantiate.") + if not "params" in config: print("[WARNING] Expected key `params` to instantiate.") + return get_obj_from_str(config["target"])(**config.get("params", dict())) + +# %% ../../src/utils/config_loader.ipynb 17 +def store_model_state_dict(state_dict, save_path): + print(f"[INFO]: Saving model to `{save_path}`.") + + if save_path.endswith("ckpt") or save_path.endswith("pt"): + torch.save(state_dict, save_path) + + elif save_path.endswith("safetensors"): + save_safetensors(state_dict, save_path) + + else: + raise NotImplementedError(f"unknown filetype: {save_path}") + +# %% ../../src/utils/config_loader.ipynb 18 +def load_model_state_dict(save_path, device): + print(f"[INFO]: Loading model from `{save_path}` onto device: {device}.") + + if save_path.endswith("ckpt") or save_path.endswith("pt"): + state_dict = torch.load(save_path, map_location=torch.device(device).type, weights_only=True) + + elif save_path.endswith("safetensors"): + state_dict = load_safetensors(save_path, device=torch.device(device).type) + + else: + raise NotImplementedError(f"unknown filetype: {save_path}") + + return state_dict + +# %% ../../src/utils/config_loader.ipynb 21 +def store_tensor(tensor, save_path, type="tensor"): + print(f"[INFO]: Saving tensor to `{save_path}`.") + + if type=="numpy" and save_path.endswith("safetensors"): + save_path = save_path.replace(".safetensors", ".pt") + + if save_path.endswith("ckpt") or save_path.endswith("pt") or type=="numpy": + # serializing a string larger than 4 GiB requires pickle protocol 4 or higher; Protocol version 5 was added in Python 3.8. + torch.save(tensor, save_path, pickle_protocol=5) + + elif save_path.endswith("safetensors") and type=="tensor": + save_safetensors(tensor, save_path) + + else: + raise NotImplementedError(f"unknown filetype: {save_path} or unknown type {type}") + +# %% ../../src/utils/config_loader.ipynb 22 +def load_tensor(save_path, device, type="tensor"): + print(f"[INFO]: Loading tensor from `{save_path}` onto device: {device}.") + + if type=="numpy" and save_path.endswith("safetensors"): + save_path = save_path.replace(".safetensors", ".pt") + + if save_path.endswith("ckpt") or save_path.endswith("pt") or type=="numpy": + tensor = torch.load(save_path, map_location=torch.device(device).type, weights_only=False) + + elif save_path.endswith("safetensors") and type=="tensor": + tensor = load_safetensors(save_path, device=torch.device(device).type) + + else: + raise NotImplementedError(f"unknown filetype: {save_path} or unknown type {type}") + + return tensor diff --git a/genQC/utils/math.py b/genQC/utils/math.py new file mode 100644 index 0000000..c2d1563 --- /dev/null +++ b/genQC/utils/math.py @@ -0,0 +1,41 @@ +"""Miscellaneous math and algorithm code""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/utils/math.ipynb. + +# %% auto 0 +__all__ = ['matrix_power', 'gram_schmidt'] + +# %% ../../src/utils/math.ipynb 2 +from ..imports import * + +# %% ../../src/utils/math.ipynb 4 +def matrix_power(x: torch.Tensor, p: float) -> torch.Tensor: + """ + Power of a matrix using Eigenspace Decomposition. Assuming decomposition of `x` exists. + """ + + vals, vecs = torch.linalg.eig(x) + vals_pow = torch.pow(vals, p) + matrix_pow = torch.matmul(vecs, torch.matmul(torch.diag(vals_pow), torch.inverse(vecs))) + + return matrix_pow + +# %% ../../src/utils/math.ipynb 6 +def gram_schmidt(X: torch.Tensor): + """ + Perform Gram–Schmidt orthonormalization on the vectors given by the rows of matrix X. + """ + assert X.dim() == 2, "Only 2-dim tensor supported." + + X_type = X.dtype + X = X.to(torch.float64) + Q = [] + for q in X: + # Take the current row vector + # Subtract projec+tions onto existing basis vectors + for v in Q: + q = q - torch.dot(q, v) * v + # Normalize the vector + q = q / torch.norm(q) + Q.append(q) + return torch.stack(Q).to(X_type) diff --git a/genQC/utils/misc_utils.py b/genQC/utils/misc_utils.py new file mode 100644 index 0000000..08350e3 --- /dev/null +++ b/genQC/utils/misc_utils.py @@ -0,0 +1,235 @@ +"""Miscellaneous util code""" + +# AUTOGENERATED! DO NOT EDIT! File to edit: ../../src/utils/misc_utils.ipynb. + +# %% auto 0 +__all__ = ['MemoryCleaner', 'virtual', 'cache_data', 'DataLoaders', 'infer_torch_device', 'number_of_paramters', + 'normalize_tensor', 'scale_tensor', 'savePdf', 'savePng', 'saveSvg', 'plot_image_grid', 'latents_to_pil', + 'set_seed', 'get_element_matching_indices', 'get_entanglement_bins'] + +# %% ../../src/utils/misc_utils.ipynb 2 +from ..imports import * +import gc, traceback, inspect + +# %% ../../src/utils/misc_utils.ipynb 4 +class MemoryCleaner(): + """CLass with static methods to clean (gpu) memory.""" + + @staticmethod + def _clean_ipython_hist(): + # Code in this function mainly copied from IPython source + if not 'get_ipython' in globals(): return + ip = get_ipython() + user_ns = ip.user_ns + ip.displayhook.flush() + pc = ip.displayhook.prompt_count + 1 + for n in range(1, pc): user_ns.pop('_i'+repr(n),None) + user_ns.update(dict(_i='',_ii='',_iii='')) + hm = ip.history_manager + hm.input_hist_parsed[:] = [''] * pc + hm.input_hist_raw[:] = [''] * pc + hm._i = hm._ii = hm._iii = hm._i00 = '' + + @staticmethod + def _clean_tb(): + if hasattr(sys, 'last_traceback'): + traceback.clear_frames(sys.last_traceback) + delattr(sys, 'last_traceback') + if hasattr(sys, 'last_type'): delattr(sys, 'last_type') + if hasattr(sys, 'last_value'): delattr(sys, 'last_value') + + @staticmethod + def purge_mem(): + """Clear all. Purge all memory.""" + MemoryCleaner._clean_tb() + MemoryCleaner._clean_ipython_hist() + gc.collect() + torch.cuda.empty_cache() + + @staticmethod + def free_memory(to_delete: list): + """Remove objs of `to_delete` from namespace""" + calling_namespace = inspect.currentframe().f_back + for _var in to_delete: + del _var + calling_namespace.f_locals.pop(_var, None) + gc.collect() + torch.cuda.empty_cache() + +# %% ../../src/utils/misc_utils.ipynb 6 +def virtual(f: callable) -> callable: + '''Decorator to enfore subclass method implementations and raises error at method calls.''' + @functools.wraps(f) + def inner(self, *args, **kwargs): raise NotImplementedError(f"Virtual method {f.__name__} needs to be implemented by subclass {self.__class__.__name__}.") + return inner + +# %% ../../src/utils/misc_utils.ipynb 8 +def cache_data(file_name, force_recompute): + """ + A decorator that memorizes the result of a function and stores it. + Note, if the function or its arguments change we ignore it, we only check if the file exists! + + Parameters: + - file_name (str): The name of the file to store the memoized results. + - force_recompute (bool): If True, existing cache is ignored. + """ + + #------------------- + def load(): + if os.path.exists(file_name) and not force_recompute: + return torch.load(file_name) + return None + + #------------------- + def save(cache): + if exists(cache): + os.makedirs(file_name[:file_name.rfind("/")] + "/", exist_ok=True) + torch.save(cache, file_name) + + #------------------- + def decorator(func: callable) -> callable: + @functools.wraps(func) + def inner(*args, **kwargs): + + cache = load() + + if not exists(cache): # run function normally + print(f"Computing: {func.__name__}") + cache = func(*args, **kwargs) + + save(cache) + print(f"Result saved") + + else: # loaded already from cache + print(f"Result retrieved from cache: {func.__name__}") + + return cache + return inner + return decorator + +# %% ../../src/utils/misc_utils.ipynb 10 +class DataLoaders: + """Combines train and valid `DataLoader` objects.""" + def __init__(self, *dls: list[DataLoader]): self.train, self.valid = dls[:2] + +# %% ../../src/utils/misc_utils.ipynb 11 +def infer_torch_device(): + if torch.cuda.is_available(): + torch.backends.cudnn.benchmark = True + + dev_cap = torch.cuda.get_device_capability() + + if dev_cap[0] >= 8: # AMPERE and up + print(f"[INFO]: Cuda device has a capability of {dev_cap[0]}.{dev_cap[1]} (>= 8), allowing tf32 matmul.") + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + + return torch.device("cuda") + return torch.device("cpu") + +# %% ../../src/utils/misc_utils.ipynb 13 +def number_of_paramters(model: nn.Module): return sum([p.flatten().shape[0] for p in model.parameters()]) + +# %% ../../src/utils/misc_utils.ipynb 14 +def normalize_tensor(t: torch.Tensor): + """[0,1] to [-1,1]""" + return t * 2.0 - 1.0 + +def scale_tensor(t: torch.Tensor): + """[-1,1] to [0,1]""" + return (t / 2.0 + 0.5).clamp(0.0, 1.0) + +# %% ../../src/utils/misc_utils.ipynb 16 +def savePdf(filename): plt.savefig(filename + '.pdf', bbox_inches='tight') +def savePng(filename): plt.savefig(filename + '.png', bbox_inches='tight') +def saveSvg(filename): plt.savefig(filename + '.svg', bbox_inches='tight') + +# %% ../../src/utils/misc_utils.ipynb 17 +def plot_image_grid(imgs: Union[list, np.array, torch.Tensor], labels: list=None, labels_fs="medium", + figsize=(16, 4), cols=8, cmap="Greys", show_colorbar=False, **imshow_kwargs): + if type(imgs) is list: n = len(imgs) + elif type(imgs) is np.ndarray: n = imgs.shape[0] + elif type(imgs) is torch.Tensor: n = imgs.shape[0] + else: raise NotImplementedError("err type:", type(imgs)) + + if n == 0: return + + cols = min(n, cols) + rows = math.ceil(n/cols) + + fig, axs = plt.subplots(rows, cols, figsize=figsize, squeeze=False, constrained_layout=True) + for i, (r, c) in enumerate(itertools.product(range(rows), range(cols))): + plt.sca(axs[r,c]) + plt.axis('off') + + if i >= n: continue + + if labels is not None: plt.title(labels[i], fontsize=labels_fs) + p = plt.imshow(imgs[i], cmap=cmap, **imshow_kwargs) #cmap ignored for RGB + if show_colorbar: plt.colorbar(p) + + plt.show() + +# %% ../../src/utils/misc_utils.ipynb 19 +def latents_to_pil(latents:torch.Tensor, channels=None): + if channels is None: + channels = latents.shape[1] if len(latents.shape) > 3 else 1 + + images = scale_tensor(latents) + images = images.detach().cpu().permute(0, 2, 3, 1).numpy() + + if channels == 1: images = images[:, :, :, 0] + + images = (images * 255).round().astype(np.uint8) + + pil_images = [Image.fromarray(image) for image in images] + return pil_images + +# %% ../../src/utils/misc_utils.ipynb 21 +def set_seed(seed: int): + """Sets a seed to pytorch, numpy and python. Additionally sets cuda flags.""" + + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + + # see https://pytorch.org/docs/stable/notes/randomness.html + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.deterministic = True + torch.use_deterministic_algorithms(True) + + # see https://docs.nvidia.com/cuda/cublas/index.html#results-reproducibility + os.environ["CUBLAS_WORKSPACE_CONFIG"] = ":4096:8" + +# %% ../../src/utils/misc_utils.ipynb 22 +def get_element_matching_indices(a: torch.Tensor, b: torch.Tensor) -> torch.Tensor: + """Compares (2d) `a` with `b`. Returns the indices of `b`, where a element of `a` matches with `b`.""" + # Expand dimensions of a to match the shape of b for element-wise comparison + expanded_a = a.unsqueeze(0).expand(b.shape[0], *a.shape) # [b0, a0, a1] + expanded_b = b.unsqueeze(1) # [b0, 1, b1] + + # Compare all vector entries of a with all vectors of b + matches = torch.all(expanded_a == expanded_b, dim=-1) + + matching_indices = torch.nonzero(torch.any(matches, dim=1)).squeeze() + + if matching_indices.dim() == 0: matching_indices = torch.tensor([matching_indices]) + + return matching_indices + +# %% ../../src/utils/misc_utils.ipynb 23 +def get_entanglement_bins(num_of_qubits: int) -> Tuple[List[List], List[str]]: + """Returns all SRV sorted in entangle bins, corresponding to a number of entangled qubits.""" + + dist_srvs = [x for x in itertools.product(*([[1,2]]*num_of_qubits))] + dist_srvs = np.array(dist_srvs, dtype=int)[np.sum(dist_srvs, axis=1)!=num_of_qubits+1].tolist() + dist_srvs = sorted(dist_srvs, key=lambda x: sum(x)) + dist_srvs = np.array(dist_srvs) + + entangle = [1] + [scipy.special.comb(num_of_qubits, i, exact=True) for i in range(2, num_of_qubits)] + + entanglement_bins = np.split(dist_srvs, np.cumsum(entangle)) + + ent_bits = [f"{sum(n[0])-num_of_qubits} qubit entangled" for n in entanglement_bins] + + return [x.tolist() for x in entanglement_bins], ent_bits diff --git a/get_started_files/figure-commonmark/cell-3-output-1.png b/get_started_files/figure-commonmark/cell-3-output-1.png new file mode 100644 index 0000000..14303bf Binary files /dev/null and b/get_started_files/figure-commonmark/cell-3-output-1.png differ diff --git a/index_files/figure-commonmark/cell-3-output-2.png b/index_files/figure-commonmark/cell-3-output-2.png deleted file mode 100644 index 23dd0f3..0000000 Binary files a/index_files/figure-commonmark/cell-3-output-2.png and /dev/null differ diff --git a/pyproject.toml b/pyproject.toml index f2c07bf..7df2707 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,11 @@ [build-system] requires = ["setuptools>=64.0"] build-backend = "setuptools.build_meta" + +[project] +name="genQC" +requires-python=">=3.12" +dynamic = [ "keywords", "description", "version", "dependencies", "optional-dependencies", "readme", "license", "authors", "classifiers", "entry-points", "scripts", "urls"] + +[tool.uv] +cache-keys = [{ file = "pyproject.toml" }, { file = "settings.ini" }, { file = "setup.py" }] \ No newline at end of file diff --git a/saves/qc_unet_config_Compilation_3_qubit/config.yaml b/saves/qc_unet_config_Compilation_3_qubit/config.yaml deleted file mode 100644 index 9da2141..0000000 --- a/saves/qc_unet_config_Compilation_3_qubit/config.yaml +++ /dev/null @@ -1,77 +0,0 @@ -target: genQC.pipeline.diffusion_pipeline_special.DiffusionPipeline_Compilation -params: - scheduler: - target: genQC.scheduler.scheduler_ddim.DDIMScheduler - params: - device: cpu - num_train_timesteps: 1000 - beta_start: 0.0001 - beta_end: 0.02 - beta_schedule: cos_alpha - input_perturbation: 0.1 - eta: 1 - model: - target: genQC.models.unet_qc.QC_Compilation_UNet - save_path: null - params: - model_features: - - 128 - - 128 - - 256 - clr_dim: 8 - num_clrs: 8 - t_emb_size: 256 - cond_emb_size: 512 - num_heads: - - 8 - - 8 - - 2 - num_res_blocks: - - 2 - - 2 - - 4 - transformer_depths: - - 1 - - 2 - - 1 - unitary_encoder_config: - cond_emb_size: 512 - model_features: - - 2 - - 32 - - 64 - - 512 - num_heads: 8 - transformer_depths: - - 2 - - 2 - dropout: 0.2 - text_encoder: - target: genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder - save_path: null - params: - arch: ViT-B-32 - version: laion2b_s34b_b79k - device: cpu - max_length: 77 - freeze: true - layer: penultimate - device: cpu - enable_guidance_train: true - guidance_train_p: 0.1 - cached_text_enc: true - add_config: - dataset: - params: - num_of_qubits: 3 - min_gates: 2 - max_gates: 12 - gate_pool: - - h - - cx - - z - - x - - ccx - - swap - pad_constant: 7 - diff --git a/saves/qc_unet_config_Compilation_3_qubit/model.pt b/saves/qc_unet_config_Compilation_3_qubit/model.pt deleted file mode 100644 index 5d0b913..0000000 Binary files a/saves/qc_unet_config_Compilation_3_qubit/model.pt and /dev/null differ diff --git a/saves/qc_unet_config_SRV_3to8_qubit/config.yaml b/saves/qc_unet_config_SRV_3to8_qubit/config.yaml deleted file mode 100644 index 9a8065c..0000000 --- a/saves/qc_unet_config_SRV_3to8_qubit/config.yaml +++ /dev/null @@ -1,58 +0,0 @@ -target: genQC.pipeline.diffusion_pipeline.DiffusionPipeline -params: - scheduler: - target: genQC.scheduler.scheduler_ddim.DDIMScheduler - params: - device: cpu - num_train_timesteps: 1000 - beta_start: 0.0001 - beta_end: 0.02 - beta_schedule: cos_alpha - input_perturbation: 0.1 - eta: 1 - model: - target: genQC.models.unet_qc.QC_Cond_UNet - save_path: null - params: - model_features: - - 128 - - 128 - - 256 - clr_dim: 4 - num_clrs: 4 - t_emb_size: 256 - cond_emb_size: 512 - num_heads: - - 8 - - 8 - - 2 - num_res_blocks: - - 2 - - 2 - - 4 - transformer_depths: - - 1 - - 2 - - 1 - text_encoder: - target: genQC.models.frozen_open_clip.CachedFrozenOpenCLIPEmbedder - save_path: null - params: - arch: ViT-B-32 - version: laion2b_s34b_b79k - device: cpu - max_length: 77 - freeze: true - layer: penultimate - device: cpu - enable_guidance_train: true - guidance_train_p: 0.1 - cached_text_enc: true - add_config: - dataset: - comment: 'Generated with ''from_datasets'' with 6 datasets. Qubits: [3, 4, 5,6, 7, 8].' - params: - gate_pool: - - h - - cx - pad_constant: 3 diff --git a/saves/qc_unet_config_SRV_3to8_qubit/model.pt b/saves/qc_unet_config_SRV_3to8_qubit/model.pt deleted file mode 100644 index 5721c1b..0000000 Binary files a/saves/qc_unet_config_SRV_3to8_qubit/model.pt and /dev/null differ diff --git a/settings.ini b/settings.ini index b3e438a..e62ae21 100644 --- a/settings.ini +++ b/settings.ini @@ -3,9 +3,9 @@ ### Python library ### repo = genQC lib_name = %(repo)s -version = 0.1.1 -min_python = 3.10 -license = apache2 +version = 0.2.0 +min_python = 3.12 +license = apache2 black_formatting = False ### nbdev ### @@ -24,17 +24,19 @@ doc_baseurl = /%(repo)s git_url = https://github.com/%(user)s/%(repo)s title = %(lib_name)s +readme_nb = get_started.ipynb + custom_quarto_yml = True custom_sidebar = True ### PyPI ### author = Florian Fuerrutter author_email = f.fuerrutter@gmail.com -copyright = 2024 onwards, %(author)s +copyright = 2025 onwards, %(author)s audience = Developers -description = Generating quantum circuits with diffusion models -keywords = quantum-information diffusion-models generative-models +description = Generative quantum circuits +keywords = quantum-information diffusion-model generative-model language = English status = 3 -requirements = torch numpy matplotlib scipy pandas omegaconf qiskit tqdm joblib open_clip_torch ipywidgets pylatexenc huggingface_hub -dev_requirements = jupyterlab nbdev cudaq +requirements = torch numpy matplotlib scipy omegaconf qiskit tqdm joblib open_clip_torch ipywidgets pylatexenc safetensors tensordict huggingface_hub +dev_requirements = jupyterlab nbdev cudaq pennylane diff --git a/setup.py b/setup.py index e3281ae..d8de687 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,14 @@ -from pkg_resources import parse_version +import shlex from configparser import ConfigParser -import setuptools, shlex + +import setuptools +from pkg_resources import parse_version + assert parse_version(setuptools.__version__)>=parse_version('36.2') # note: all settings are in settings.ini; edit there, not here config = ConfigParser(delimiters=['=']) -config.read('settings.ini', encoding='utf-8') +config.read('settings.ini', encoding="utf-8") cfg = config['DEFAULT'] cfg_keys = 'version description keywords author author_email'.split() @@ -18,17 +21,21 @@ 'mit': ('MIT License', 'OSI Approved :: MIT License'), 'gpl2': ('GNU General Public License v2', 'OSI Approved :: GNU General Public License v2 (GPLv2)'), 'gpl3': ('GNU General Public License v3', 'OSI Approved :: GNU General Public License v3 (GPLv3)'), + 'agpl3': ('GNU Affero General Public License v3', 'OSI Approved :: GNU Affero General Public License (AGPLv3)'), 'bsd3': ('BSD License', 'OSI Approved :: BSD License'), } -statuses = [ '1 - Planning', '2 - Pre-Alpha', '3 - Alpha', +statuses = [ '0 - Pre-Planning', '1 - Planning', '2 - Pre-Alpha', '3 - Alpha', '4 - Beta', '5 - Production/Stable', '6 - Mature', '7 - Inactive' ] -py_versions = '3.6 3.7 3.8 3.9 3.10'.split() +py_versions = '3.7 3.8 3.9 3.10 3.12 3.13'.split() -requirements = shlex.split(cfg.get('requirements', '')) +requirements = ['packaging'] +requirements += shlex.split(cfg.get('requirements', '')) if cfg.get('pip_requirements'): requirements += shlex.split(cfg.get('pip_requirements', '')) min_python = cfg['min_python'] lic = licenses.get(cfg['license'].lower(), (cfg['license'], None)) dev_requirements = (cfg.get('dev_requirements') or '').split() +project_urls = {} +if cfg.get('doc_host'): project_urls["Documentation"] = cfg['doc_host'] + cfg.get('doc_baseurl', '') setuptools.setup( name = cfg['lib_name'], @@ -45,13 +52,12 @@ extras_require={ 'dev': dev_requirements }, dependency_links = cfg.get('dep_links','').split(), python_requires = '>=' + cfg['min_python'], - long_description = open('README.md', encoding='utf-8').read(), + long_description = open('README.md', encoding="utf8").read(), long_description_content_type = 'text/markdown', zip_safe = False, entry_points = { 'console_scripts': cfg.get('console_scripts','').split(), 'nbdev': [f'{cfg.get("lib_path")}={cfg.get("lib_path")}._modidx:d'] }, - **setup_cfg) - - + project_urls = project_urls, + **setup_cfg) \ No newline at end of file diff --git a/src/404.qmd b/src/404.qmd new file mode 100644 index 0000000..e242c18 --- /dev/null +++ b/src/404.qmd @@ -0,0 +1,13 @@ +--- +title: Page Not Found +--- + +The page you requested cannot be found (perhaps it was moved or renamed). + +$$ +\begin{equation*} + \langle \;\text{you}\;|\;\text{this page}\;|\;\text{you}\;\rangle \;= \;\text{?} +\end{equation*} +$$ + +You may want to try searching to find the page's new location. \ No newline at end of file diff --git a/src/_quarto.yml b/src/_quarto.yml index 35d10bb..7c8c5ea 100644 --- a/src/_quarto.yml +++ b/src/_quarto.yml @@ -3,76 +3,86 @@ project: format: html: - # page-layout: full + page-layout: full theme: - light: simplex + light: [simplex, webpage/custom.scss] # dark: darkl - css: styles.css + css: webpage/styles.css + mainfont: "Lexend" toc: true code-copy: true code-overflow: wrap grid: content-mode: standard - sidebar-width: "350px" + sidebar-width: "380px" body-width: "1100px" - margin-width: "250px" + margin-width: "300px" #"250px" + gutter-width: 2.5rem website: - favicon: "assets/logo.png" + page-footer: + center: "Copyright 2025, Florian Fürrutter" + favicon: "webpage/assets/logo.png" open-graph: true - repo-actions: [issue] + repo-actions: [issue, source] back-to-top-navigation: true page-navigation: true navbar: - logo: "assets/logo.png" - background: "#5cb4c1" + logo: "webpage/assets/logo.png" + logo-alt: "genQC logo" search: true left: - - icon: file-text - href: "https://arxiv.org/abs/2311.02041" - text: "paper-arxiv" + - text: "Overview" + icon: list-ul + href: index.qmd + + - text: "Get Started" + icon: cursor-fill + href: get_started.ipynb + + - text: "Tutorials" + icon: cup-hot + href: examples/tutorials.qmd + + - text: "API Reference" + icon: file-text + href: webpage/api_reference.qmd + + - text: Research + icon: book-half + href: webpage/research.qmd + right: - icon: github href: "https://github.com/FlorianFuerrutter/genQC" - text: code repository + text: Code Repository sidebar: - collapse-level: 2 - style: "floating" - background: "#dde8ea" - foreground: "#674ea7" - - contents: - - text: "genQC · Generative Quantum Circuits" - href: index.ipynb - - section: Examples - contents: examples/* - - - section: Lib - contents: - - section: Pipeline - contents: pipeline/* - - section: Scheduler - contents: scheduler/* - - section: Inference - contents: inference/* - - section: Models - contents: models/* - - section: Dataset - contents: dataset/* - - section: Platform - contents: - - auto: platform/* - - section: Simulation - contents: platform/simulation/* - - section: Miscellaneous functions - contents: - - auto: /*.ipynb - - - text: "Release notes" - href: RELEASES.md + #------------------------------------- + - title: "Tutorials" + collapse-level: 1 + header: "Tutorials" + style: "floating" + contents: + - text: "Tutorials Overview" + href: examples/tutorials.qmd + - text: "---" + - auto: examples/**/!(tutorials.qmd)*{.qmd,.ipynb} + + #------------------------------------- + - title: "API Reference" + collapse-level: 1 + header: "API Reference" + style: "floating" + contents: + - text: "Modules Overview" + href: webpage/api_reference.qmd + - text: "Release notes" + href: RELEASES.md + - text: "---" + - auto: "/!(webpage|examples)/**/*" metadata-files: [nbdev.yml] diff --git a/src/benchmark/bench_compilation.ipynb b/src/benchmark/bench_compilation.ipynb new file mode 100644 index 0000000..7d14ceb --- /dev/null +++ b/src/benchmark/bench_compilation.ipynb @@ -0,0 +1,750 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "77b00dc5-b55a-4d1c-a364-b4226779b409", + "metadata": {}, + "source": [ + "# Compilation benchmark\n", + "\n", + "> Functions to test and benchmark unitary compilation." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a36b0d5a-5131-439c-82f5-bc551ecb24e1", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp benchmark.bench_compilation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a593dee3-594e-4760-a02d-db5559f5f25f", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *" + ] + }, + { + "cell_type": "markdown", + "id": "7f7792a9-71d4-4443-93db-8b6361a42d76", + "metadata": {}, + "source": [ + "## Special unitaries" + ] + }, + { + "cell_type": "markdown", + "id": "584cfa35-3391-4d21-a130-773b8f701d86", + "metadata": {}, + "source": [ + "#### Quantum Fourier transform\n", + "\n", + "$$\n", + "\\begin{equation}\n", + " \\mathrm{QFT}: |x\\rangle \\mapsto \\frac{1}{\\sqrt{N}} \\sum_{k=0}^{N-1} \\omega_N^{xk}\\;|k\\rangle,\n", + "\\end{equation}\n", + "$$\n", + "where\n", + "$$\n", + "\\begin{equation}\n", + " \\omega_N=\\exp{\\frac{2\\pi i}{N}} \\quad\\text{and}\\quad N=2^{\\text{qubits}}.\n", + "\\end{equation}\n", + "$$" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e9337291-4d9e-4524-8fe8-9d198a2abb24", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class SpecialUnitaries:\n", + " \"\"\"Special unitary matrices to benchmark compilation.\"\"\"\n", + " \n", + " @staticmethod\n", + " def QFT(num_qubits: int) -> torch.Tensor:\n", + " \"\"\"The Quantum Fourier transform (QFT) unitary for `num_qubits`-qubits.\"\"\"\n", + " \n", + " N = 2**num_qubits\n", + " wN = np.exp(2.0j*np.pi/N)\n", + "\n", + " U = torch.zeros((N, N), dtype=torch.complex128) \n", + " for x in range(N):\n", + " U[:, x] = torch.tensor([np.power(wN, x*k, dtype=complex) for k in range(N)])\n", + "\n", + " U *= 1.0/np.sqrt(N) \n", + " return U" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0b263212-b4e5-4d66-b2dd-d0eb44b4ec91", + "metadata": {}, + "outputs": [], + "source": [ + "# test QFT for N=4\n", + "QFT_2_qubits = 0.5 * torch.tensor([[1, 1, 1, 1],\n", + " [1, 1j, -1, -1j],\n", + " [1, -1, 1, -1],\n", + " [1, -1j, -1, 1j]], dtype=torch.complex128)\n", + "\n", + "assert torch.allclose(SpecialUnitaries.QFT(num_qubits=2), QFT_2_qubits)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7420fa00-b96b-4ed9-b465-ee8015abf665", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[ 0.3540+0.0000j, 0.3540+0.0000j, 0.3540+0.0000j, 0.3540+0.0000j, 0.3540+0.0000j, 0.3540+0.0000j, 0.3540+0.0000j, 0.3540+0.0000j],\n", + " [ 0.3540+0.0000j, 0.2500+0.2500j, 0.0000+0.3540j, -0.2500+0.2500j, -0.3540+0.0000j, -0.2500-0.2500j, 0.0000-0.3540j, 0.2500-0.2500j],\n", + " [ 0.3540+0.0000j, 0.0000+0.3540j, -0.3540+0.0000j, 0.0000-0.3540j, 0.3540+0.0000j, 0.0000+0.3540j, -0.3540+0.0000j, 0.0000-0.3540j],\n", + " [ 0.3540+0.0000j, -0.2500+0.2500j, 0.0000-0.3540j, 0.2500+0.2500j, -0.3540+0.0000j, 0.2500-0.2500j, 0.0000+0.3540j, -0.2500-0.2500j],\n", + " [ 0.3540+0.0000j, -0.3540+0.0000j, 0.3540+0.0000j, -0.3540+0.0000j, 0.3540+0.0000j, -0.3540+0.0000j, 0.3540+0.0000j, -0.3540+0.0000j],\n", + " [ 0.3540+0.0000j, -0.2500-0.2500j, 0.0000+0.3540j, 0.2500-0.2500j, -0.3540+0.0000j, 0.2500+0.2500j, 0.0000-0.3540j, -0.2500+0.2500j],\n", + " [ 0.3540+0.0000j, 0.0000-0.3540j, -0.3540+0.0000j, 0.0000+0.3540j, 0.3540+0.0000j, 0.0000-0.3540j, -0.3540+0.0000j, 0.0000+0.3540j],\n", + " [ 0.3540+0.0000j, 0.2500-0.2500j, 0.0000-0.3540j, -0.2500-0.2500j, -0.3540+0.0000j, -0.2500+0.2500j, 0.0000+0.3540j, 0.2500+0.2500j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.round(SpecialUnitaries.QFT(3), 3)" + ] + }, + { + "cell_type": "markdown", + "id": "acdc565a-f01b-4e1e-8cc7-ffba730f46b2", + "metadata": {}, + "source": [ + "## Hamiltonian evolutions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "86c32afa-32ad-4a52-9aa6-144d8f76f1f2", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "sigma_x = torch.tensor([[0, 1],\n", + " [1, 0]],\n", + " dtype=torch.complex128)\n", + "\n", + "sigma_y = torch.tensor([[ 0, -1j],\n", + " [1j, 0]],\n", + " dtype=torch.complex128)\n", + "\n", + "sigma_z = torch.tensor([[1, 0],\n", + " [0, -1]],\n", + " dtype=torch.complex128)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c9c7d034-6310-4f42-a626-686880060cd1", + "metadata": {}, + "outputs": [], + "source": [ + "assert torch.allclose(sigma_x@sigma_x, torch.eye(2, dtype=torch.complex128))\n", + "assert torch.allclose(sigma_y@sigma_y, torch.eye(2, dtype=torch.complex128))\n", + "assert torch.allclose(sigma_z@sigma_z, torch.eye(2, dtype=torch.complex128))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "485317ad-e46c-4652-bd2b-89c0c03afea2", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def qubit_tensor_product(num_qubits: int, *ops: torch.Tensor, pos: int | Sequence[int]) -> torch.Tensor:\n", + " \"\"\"\n", + " Make tensor product with identities, assumes `ops` placed at `pos` in the tensor product ordering.\n", + " \"\"\"\n", + "\n", + " _ops = [torch.eye(2) for i in range(num_qubits)]\n", + "\n", + " if isinstance(pos, int):\n", + " pos = [pos]\n", + " elif isinstance(pos, Sequence):\n", + " assert len(pos) == len(ops)\n", + " else:\n", + " raise NotImplementedError()\n", + "\n", + " for pos_i, ops_i in zip(pos, ops):\n", + " _ops[pos_i] = ops_i\n", + " \n", + " mat = _ops[0]\n", + " for op in _ops[1:]:\n", + " mat = torch.kron(mat, op)\n", + "\n", + " return mat" + ] + }, + { + "cell_type": "markdown", + "id": "f5bddb3e-39fc-496f-9cdc-6cc1423c0b49", + "metadata": {}, + "source": [ + "$\\sigma_x \\otimes I$:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26bf121b-5a83-4ef3-b222-3ffe79dc4fea", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n", + " [0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j],\n", + " [1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n", + " [0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "qubit_tensor_product(2, sigma_x, pos=0)" + ] + }, + { + "cell_type": "markdown", + "id": "2f43cbd2-e966-46cc-99f5-8f36af63636f", + "metadata": {}, + "source": [ + "$I \\otimes \\sigma_x$:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a467fa4e-182f-47df-a4e5-50d98ff98e81", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j],\n", + " [1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n", + " [0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j],\n", + " [0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "qubit_tensor_product(2, sigma_x, pos=-1)" + ] + }, + { + "cell_type": "markdown", + "id": "ae425eac-ae09-4658-8e45-eaf370f6e7ed", + "metadata": {}, + "source": [ + "$\\sigma_z \\otimes \\sigma_z$:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c716c26d-86d2-42fd-9414-b5ef833cd000", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[ 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n", + " [ 0.+0.j, -1.+0.j, 0.+0.j, -0.+0.j],\n", + " [ 0.+0.j, 0.+0.j, -1.+0.j, -0.+0.j],\n", + " [ 0.+0.j, -0.+0.j, -0.+0.j, 1.-0.j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "qubit_tensor_product(2, sigma_z, sigma_z, pos=[0, 1])" + ] + }, + { + "cell_type": "markdown", + "id": "e7f9d723-3105-47b5-8a6a-0ee492061e21", + "metadata": {}, + "source": [ + "#### Base Hamiltonian" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ecb265cc-521a-44fd-bf8d-c97b46938932", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class BaseHamiltonian(abc.ABC):\n", + " \"\"\"Base implementation of a Hamiltonian.\"\"\"\n", + "\n", + " def __init__(self, device: Optional[str | torch.device] = None) -> None:\n", + " self.device = default(device, \"cpu\")\n", + " self._generate_matrix()\n", + " \n", + " if not torch.allclose(self.data.adjoint(), self.data):\n", + " raise RuntimeError(\"Generated Hamiltonian matrix is not self-adjoint!\")\n", + " \n", + " @abc.abstractmethod\n", + " def _generate_matrix(self) -> torch.Tensor:\n", + " \"\"\"Generates the Hamiltonian matrix into `self.data`.\"\"\"\n", + " raise NotImplementedError()\n", + "\n", + " def get_evolution(self, t: float | torch.Tensor, split_complex_channel: bool = False, dtype: Optional[torch.dtype] = None) -> torch.Tensor:\n", + " \"\"\"\n", + " Assuming `h_bar=1`. Returns the unitary evolution in marix form.\n", + " \"\"\"\n", + " U = torch.linalg.matrix_exp(-1j * t * self.data)\n", + "\n", + " if split_complex_channel:\n", + " U = torch.stack([torch.real(U), torch.imag(U)])\n", + "\n", + " if exists(dtype):\n", + " U = U.to(dtype)\n", + " \n", + " return U" + ] + }, + { + "cell_type": "markdown", + "id": "5135f0f1-c46d-4ba7-b27c-d665e5df66d7", + "metadata": {}, + "source": [ + "#### Ising Hamiltonian\n", + "\n", + "Defined as\n", + "$$\n", + "H = -J \\sum_{\\langle i, j \\rangle} \\sigma_i^z \\sigma_j^z - h \\sum_i \\sigma_i^x,\n", + "$$\n", + "where $J$ is the coupling constant and $h$ a magnetic field." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8e1da512-6e2f-4eb4-aade-5cb822b6e6f9", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class IsingHamiltonian(BaseHamiltonian):\n", + " \"\"\"Implementation of the Ising Hamiltonian on a qubit chain.\"\"\"\n", + " \n", + " def __init__(self, \n", + " h: float, \n", + " J: float, \n", + " num_qubits: int, \n", + " periodic_boundary: bool = True,\n", + " device: Optional[str | torch.device] = None) -> None:\n", + " \"\"\"\n", + " h: Magnetic field \n", + " J: Coupling constant \n", + " \"\"\"\n", + " self.h = h\n", + " self.J = J \n", + " self.num_qubits = num_qubits\n", + " self.periodic_boundary = periodic_boundary\n", + " super().__init__(device)\n", + " \n", + " def _generate_matrix(self) -> torch.Tensor:\n", + " \"\"\"\n", + " Note: We take big endian convention in placing the `i,j`-sigmas in tensor product ordering.\n", + " For little endian we need to use `pos = self.num_qubits-i`.\n", + " \"\"\"\n", + " \n", + " N = 2**self.num_qubits\n", + " ham = torch.zeros((N, N), dtype=torch.complex128)\n", + "\n", + " pairs = [(i, i+1) for i in range(self.num_qubits-1)]\n", + " \n", + " if self.periodic_boundary:\n", + " pairs.append((self.num_qubits-1, 0))\n", + "\n", + " for (i, j) in pairs:\n", + " Z_term = qubit_tensor_product(self.num_qubits, sigma_z, sigma_z, pos=[i, j])\n", + "\n", + " # Coupling + Perturbation\n", + " ham += -self.J * Z_term\n", + "\n", + " # Magnetic\n", + " for i in range(self.num_qubits):\n", + " ham += -self.h * qubit_tensor_product(self.num_qubits, sigma_x, pos=i)\n", + "\n", + " self.data = ham.to(self.device)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "77a808ae-11db-448e-b52b-bd83f0299c12", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[-2.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n", + " [ 0.+0.j, 2.+0.j, 0.+0.j, 0.+0.j],\n", + " [ 0.+0.j, 0.+0.j, 2.+0.j, 0.+0.j],\n", + " [ 0.+0.j, 0.+0.j, 0.+0.j, -2.+0.j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hamiltonian = IsingHamiltonian(h=0, J=1, num_qubits=2)\n", + "hamiltonian.data" + ] + }, + { + "cell_type": "markdown", + "id": "23dbc4f0-e61f-46f0-9624-eb8e60c2a59e", + "metadata": {}, + "source": [ + "Eigenvalues of this Hamiltonian:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1cd6ff54-0f31-44a1-bbfa-d5753b1a8c55", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([-2., -2., 2., 2.], dtype=torch.float64)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.linalg.eigvalsh(hamiltonian.data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dd201def-ba78-4262-b733-3484a0d916f6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n", + " [0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j],\n", + " [0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n", + " [0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "e, v = torch.linalg.eigh(hamiltonian.data)\n", + "v" + ] + }, + { + "cell_type": "markdown", + "id": "6935b595-89f5-4734-a80f-44d0342c33a1", + "metadata": {}, + "source": [ + "And the evolution unitary is:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "715e07a0-f2bd-41ed-9d53-7bf370eebb9c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[0.5000+0.8660j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n", + " [0.0000+0.0000j, 0.5000-0.8660j, 0.0000+0.0000j, 0.0000+0.0000j],\n", + " [0.0000+0.0000j, 0.0000+0.0000j, 0.5000-0.8660j, 0.0000+0.0000j],\n", + " [0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.5000+0.8660j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hamiltonian.get_evolution(t=np.pi/6)" + ] + }, + { + "cell_type": "markdown", + "id": "3b01a2f2-14fc-453a-9076-8442b7a51c6a", + "metadata": {}, + "source": [ + "#### XXZ Hamiltonian\n", + "\n", + "Defined as\n", + "$$\n", + "H = -J \\sum_{\\langle i, j \\rangle} ( \\sigma_i^x \\sigma_j^x + \\sigma_i^y \\sigma_j^y + \\Delta \\sigma_i^z \\sigma_j^z ) - h \\sum_i \\sigma_i^x,\n", + "$$\n", + "where $J$ is the coupling constant, $\\Delta$ a perturbation and $h$ a magnetic field." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bcff4597-de9d-436e-908a-ff683e868478", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class XXZHamiltonian(BaseHamiltonian):\n", + " \"\"\"Implementation of the XXZ Hamiltonian on a qubit chain.\"\"\"\n", + " \n", + " def __init__(self, \n", + " h: float, \n", + " J: float, \n", + " delta: float, \n", + " num_qubits: int, \n", + " periodic_boundary: bool = True,\n", + " device: Optional[str | torch.device] = None) -> None:\n", + " \"\"\"\n", + " h: Magnetic field \n", + " J: Coupling constant \n", + " delta: Perturbation\n", + " \"\"\"\n", + " self.h = h\n", + " self.J = J \n", + " self.delta = delta\n", + " self.num_qubits = num_qubits\n", + " self.periodic_boundary = periodic_boundary\n", + " super().__init__(device)\n", + " \n", + " def _generate_matrix(self) -> torch.Tensor:\n", + " \"\"\"\n", + " Note: We take big endian convention in placing the `i,j`-sigmas in tensor product ordering.\n", + " For little endian we need to use `pos = self.num_qubits-i`.\n", + " \"\"\"\n", + " \n", + " N = 2**self.num_qubits\n", + " ham = torch.zeros((N, N), dtype=torch.complex128)\n", + "\n", + " pairs = [(i, i+1) for i in range(self.num_qubits-1)]\n", + " \n", + " if self.periodic_boundary:\n", + " pairs.append((self.num_qubits-1, 0))\n", + "\n", + " for (i, j) in pairs:\n", + " X_term = qubit_tensor_product(self.num_qubits, sigma_x, sigma_x, pos=[i, j])\n", + " Y_term = qubit_tensor_product(self.num_qubits, sigma_y, sigma_y, pos=[i, j])\n", + " Z_term = qubit_tensor_product(self.num_qubits, sigma_z, sigma_z, pos=[i, j])\n", + "\n", + " # Coupling + Perturbation\n", + " ham += -self.J * (X_term + Y_term + self.delta * Z_term)\n", + "\n", + " # Magnetic\n", + " for i in range(self.num_qubits):\n", + " ham += -self.h * qubit_tensor_product(self.num_qubits, sigma_x, pos=i)\n", + "\n", + " self.data = ham.to(self.device)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b4bb213f-bd28-4b4e-97a4-f8f285596394", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[-2.+0.j, -1.+0.j, -1.+0.j, 0.+0.j],\n", + " [-1.+0.j, 2.+0.j, -4.+0.j, -1.+0.j],\n", + " [-1.+0.j, -4.+0.j, 2.+0.j, -1.+0.j],\n", + " [ 0.+0.j, -1.+0.j, -1.+0.j, -2.+0.j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hamiltonian = XXZHamiltonian(h=1, J=1, delta=1, num_qubits=2)\n", + "hamiltonian.data" + ] + }, + { + "cell_type": "markdown", + "id": "9f261b77-79ba-41fc-b47a-eeff4c05a7d7", + "metadata": {}, + "source": [ + "Eigenvalues of this Hamiltonian:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "db06d406-5477-44a5-aec8-f7fab7895130", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([-4.0000e+00, -2.0000e+00, 8.8818e-16, 6.0000e+00], dtype=torch.float64)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.linalg.eigvalsh(hamiltonian.data)" + ] + }, + { + "cell_type": "markdown", + "id": "85448086-ac18-4fa4-a887-8887ac062d19", + "metadata": {}, + "source": [ + "And the evolution unitary is:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0c9a6406-65f2-41c0-8eb2-26479e4df2f8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[ 0.3750+0.6495j, -0.3750+0.2165j, -0.3750+0.2165j, -0.1250-0.2165j],\n", + " [-0.3750+0.2165j, -0.3750+0.2165j, 0.6250+0.2165j, -0.3750+0.2165j],\n", + " [-0.3750+0.2165j, 0.6250+0.2165j, -0.3750+0.2165j, -0.3750+0.2165j],\n", + " [-0.1250-0.2165j, -0.3750+0.2165j, -0.3750+0.2165j, 0.3750+0.6495j]], dtype=torch.complex128)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hamiltonian.get_evolution(t=np.pi/6)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d29002e5-db43-4562-86bc-b2cb0ec8ab57", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[ 0.3750, -0.3750, -0.3750, -0.1250],\n", + " [-0.3750, -0.3750, 0.6250, -0.3750],\n", + " [-0.3750, 0.6250, -0.3750, -0.3750],\n", + " [-0.1250, -0.3750, -0.3750, 0.3750]],\n", + "\n", + " [[ 0.6495, 0.2165, 0.2165, -0.2165],\n", + " [ 0.2165, 0.2165, 0.2165, 0.2165],\n", + " [ 0.2165, 0.2165, 0.2165, 0.2165],\n", + " [-0.2165, 0.2165, 0.2165, 0.6495]]], dtype=torch.float64)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hamiltonian.get_evolution(t=np.pi/6, split_complex_channel=True)" + ] + }, + { + "cell_type": "markdown", + "id": "a12db28d-f69a-41bd-863d-9d2cd12494df", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7eea527e-0aa8-4814-a413-c5e580d51969", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/config_loader.ipynb b/src/config_loader.ipynb deleted file mode 100644 index 8c94d19..0000000 --- a/src/config_loader.ipynb +++ /dev/null @@ -1,247 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Config loader" - ] - }, - { - "cell_type": "markdown", - "id": "1bb62f14-03c7-4d64-b1b9-f1d3ae309b01", - "metadata": {}, - "source": [ - "Code using `omegaconf` to handle IO." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp config_loader" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from omegaconf import OmegaConf" - ] - }, - { - "cell_type": "markdown", - "id": "9b6c0b5e-4779-4c4a-98e9-46a3dca8bee6", - "metadata": {}, - "source": [ - "## IO" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cb173637-3d18-4f94-8b95-76cda4117b1e", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def class_to_str(cls):\n", - " return str(cls)[8:-2]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e0d5bc35-cc53-42fb-8fcd-8f2bc66c7c9b", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def load_config(file_path):\n", - " return OmegaConf.load(f\"{file_path}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b105c04a-66d1-4450-8ee0-87aae618e60a", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def config_to_dict(config):\n", - " return OmegaConf.to_container(config)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e3d81e5c-cf3d-4152-ab66-acd6e42ec3c9", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def save_dataclass_yaml(data_obj, file_path):\n", - " conf = OmegaConf.structured(data_obj)\n", - " with open(file_path, 'w') as f:\n", - " OmegaConf.save(config=conf, f=f)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bf89bbfc-9d4e-442f-96ea-db1ab99505e9", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def save_dict_yaml(dict_obj, file_path):\n", - " conf = OmegaConf.create(dict_obj)\n", - " with open(file_path, 'w') as f:\n", - " OmegaConf.save(config=conf, f=f)" - ] - }, - { - "cell_type": "markdown", - "id": "ef21ca53-aa2c-4faa-877f-a9b39eeb8ff4", - "metadata": {}, - "source": [ - "Test" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ed2b17fc-e4d9-4967-89cb-4a0bb28e39a2", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'target': 'omegaconf.omegaconf.OmegaConf', 'clr_dim': 80, 'features': [1, 2, 3]}" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "@dataclass\n", - "class MyConfig: \n", - " target:str = class_to_str(OmegaConf)\n", - " clr_dim: int = 80\n", - " features: list[int]=None\n", - " \n", - "c = MyConfig()\n", - "c.features = [1,2,3]\n", - "\n", - "OmegaConf.structured(c)" - ] - }, - { - "cell_type": "markdown", - "id": "a3cbe4ce-7e90-413b-b55e-e07a9eeb6d8f", - "metadata": {}, - "source": [ - "## Object config load" - ] - }, - { - "cell_type": "markdown", - "id": "3398beb4-8b77-4a8b-9075-b3f6a9775bcd", - "metadata": {}, - "source": [ - "Mostly taken from: https://github.com/Stability-AI/stablediffusion" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6547d020-5764-4379-92b2-583d8f6f4bc5", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_obj_from_str(string, reload=False):\n", - " module, cls = string.rsplit(\".\", 1)\n", - " if reload:\n", - " module_imp = importlib.import_module(module)\n", - " importlib.reload(module_imp)\n", - " return getattr(importlib.import_module(module, package=None), cls)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ffc7fa40-81aa-42ed-ac23-8562ffdc8e4f", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def instantiate_from_config(config):\n", - " if not \"target\" in config: raise KeyError(\"Expected key `target` to instantiate.\")\n", - " if not \"params\" in config: print(f\"[WARNING] Expected key `params` to instantiate.\")\n", - " return get_obj_from_str(config[\"target\"])(**config.get(\"params\", dict()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36032308-bd0e-4409-9db0-9d89fc258e5a", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def load_model_from_config(config, ckpt, device):\n", - " \n", - " print(f\"Loading model from {ckpt}\")\n", - " pl_sd = torch.load(ckpt, map_location=torch.device(device).type, weights_only=True)\n", - " \n", - " model = instantiate_from_config(config.model)\n", - " \n", - " sd = pl_sd[\"state_dict\"]\n", - " m, u = model.load_state_dict(sd, strict=True)\n", - " \n", - " return model.to(device)" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/dataset/balancing.ipynb b/src/dataset/balancing.ipynb new file mode 100644 index 0000000..23e4603 --- /dev/null +++ b/src/dataset/balancing.ipynb @@ -0,0 +1,157 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Dataset balancing\n", + "\n", + "> Helper functions used to balance a dataset." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp dataset.balancing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "import genQC.dataset.dataset_helper as dahe" + ] + }, + { + "cell_type": "markdown", + "id": "7132df6f-d099-40e0-95a8-9d735211b2dc", + "metadata": {}, + "source": [ + "## Qircuit length balancing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e57ce23e-30fc-434f-9443-3cf97f507b89", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_tensor_gate_length(clr_tensor: torch.Tensor, padding_token: int = 0) -> torch.Tensor:\n", + " \"\"\"\n", + " Returns the gate count of a tokenized circuit.\n", + " Make sure you use use the correct `padding_token`.\n", + " \n", + " \"\"\"\n", + " assert clr_tensor.dim() == 3, \"[b, s, t]\"\n", + " \n", + " red_clr_tensor = (clr_tensor != padding_token).any(dim=1) # [b, t]\n", + " return torch.count_nonzero(red_clr_tensor, dim=1) # [b]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36032308-bd0e-4409-9db0-9d89fc258e5a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def add_balance_fn_quantile_qc_length(indices: Union[np.ndarray, torch.Tensor], \n", + " x: Union[np.ndarray, torch.Tensor], \n", + " y: Union[np.ndarray, torch.Tensor], \n", + " *z, \n", + " padding_token: int = 0,\n", + " balance_quantile: float = 0.5,\n", + " device: torch.device = torch.device(\"cpu\"),\n", + " quantile_length_weights: Optional[Callable[[torch.Tensor, torch.Tensor], torch.Tensor]] = None) -> torch.Tensor:\n", + " \"\"\"Balances according to gate length.\"\"\"\n", + " \n", + " xb = x[indices].to(device)\n", + " l = get_tensor_gate_length(xb, padding_token=padding_token).to(device)\n", + " \n", + " l_uniques, l_uniques_cnt = torch.unique(l, dim=0, return_counts=True)\n", + "\n", + " #-----------------------------------\n", + " # samples = torch.min(l_uniques_cnt)\n", + " # samples = torch.median(l_uniques_cnt)\n", + " samples = torch.quantile(l_uniques_cnt.float(), balance_quantile, interpolation='nearest', dim=0).to(l_uniques_cnt.dtype)\n", + " samples = max(samples, 2)\n", + "\n", + " #-----------------------------------\n", + " sub_ind = list() \n", + " for l_unique in l_uniques.to(device): \n", + " comp = (l==l_unique)\n", + " ind = comp.nonzero().squeeze().cpu()\n", + " \n", + " if ind.dim() > 0:\n", + " if exists(quantile_length_weights):\n", + " _samples = int(quantile_length_weights(l_unique, samples))\n", + " else:\n", + " _samples = samples\n", + " \n", + " ind = dahe.shuffle_tensor_dataset(ind) \n", + " ind = ind[:_samples]\n", + " else:\n", + " ind = ind[None]\n", + " \n", + " sub_ind.append(ind)\n", + "\n", + " sub_ind = torch.cat(sub_ind, dim=0)\n", + " \n", + " indices = indices[sub_ind]\n", + " \n", + " if indices.ndim < 1: \n", + " indices = indices[None]\n", + " \n", + " return indices" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/dataset/cached_qc_dataset.ipynb b/src/dataset/cached_dataset.ipynb similarity index 52% rename from src/dataset/cached_qc_dataset.ipynb rename to src/dataset/cached_dataset.ipynb index 5015d3f..a6d5234 100644 --- a/src/dataset/cached_qc_dataset.ipynb +++ b/src/dataset/cached_dataset.ipynb @@ -5,39 +5,45 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# Cached quantum circuit dataset" + "# Cached dataset\n", + "\n", + "> Classes to create a dataset with cached labels." ] }, { - "cell_type": "markdown", - "id": "21762ddf-229e-4e48-aab6-b897c30ba1a4", + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", "metadata": {}, + "outputs": [], "source": [ - "Quantum circuit dataset that caches the `y` prompts using the CLIP encoder. This speeds up training significantly!" + "#| default_exp dataset.cached_dataset" ] }, { "cell_type": "code", "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", "metadata": {}, "outputs": [], "source": [ - "#| default_exp dataset.cached_qc_dataset" + "#| export\n", + "from genQC.imports import *\n", + "from genQC.dataset.config_dataset import ConfigDataset, ConfigDatasetConfig\n", + "from genQC.utils.config_loader import *" ] }, { "cell_type": "code", "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "id": "ac8e640e-c614-4d52-b772-c173b2682ad9", "metadata": {}, "outputs": [], "source": [ "#| export\n", - "from genQC.imports import *\n", - "from genQC.dataset.qc_dataset import Qc_Config_Dataset\n", - "from genQC.dataset.config_dataset import Config_Dataset\n", - "from genQC.config_loader import *" + "@dataclass\n", + "class CachedOpenCLIPDatasetConfig(ConfigDatasetConfig):\n", + " pass" ] }, { @@ -48,11 +54,17 @@ "outputs": [], "source": [ "#| export\n", - "class Cached_OpenClip_Dataset(Qc_Config_Dataset):\n", - " \"\"\"Adds `.caching` to the `Quantum circuit dataset` class.\"\"\"\n", + "class CachedOpenCLIPDataset(ConfigDataset):\n", + " \"\"\"\n", + " Adds `.caching` to the `ConfigDataset` class.\n", " \n", - " def x_y_preprocess(self, balance_max, max_samples=None):\n", - " x_proc, y_proc, *z = super().x_y_preprocess(balance_max=balance_max, max_samples=max_samples) \n", + " Cached dataset that caches the label `y` prompts using the CLIP `text_encoder`. This speeds up training significantly.\n", + " \"\"\"\n", + "\n", + " #-----------------------------------\n", + " \n", + " def x_y_preprocess(self, balance_max, shuffle=False, max_samples=None, make_unique=True):\n", + " x_proc, y_proc, *z = super().x_y_preprocess(balance_max=balance_max, shuffle=shuffle, max_samples=max_samples, make_unique=make_unique) \n", " y_proc = self.caching(y_proc)\n", " return x_proc, y_proc, *z\n", " \n", @@ -77,74 +89,24 @@ " if y_on_cpu: y_tok = y_tok.cpu()\n", " \n", " \n", - " #now for using cache we need the uniques and the corresponding indices of the uniques\n", - " y_uniques, y_ptrs = torch.unique(torch.cat([self.text_encoder.empty_token.to(y_tok.device), y_tok]), dim=0, return_inverse=True)\n", + " # Now for using cache we need the uniques and the corresponding indices of the uniques\n", + " y_uniques, y_ptrs = torch.unique(torch.cat([self.text_encoder.empty_token.to(y_tok.device), y_tok], dim=0), dim=0, return_inverse=True)\n", " \n", " cached_empty_token_index = y_ptrs[0] #store what index the empty token has \n", " y_ptrs = y_ptrs[1:] #remove the cat empty token\n", " \n", - " #use cache\n", + " # Use cache\n", " print(\" - generate_cache\") \n", " self.text_encoder.generate_cache(tokens=y_uniques, cached_empty_token_index=cached_empty_token_index, y_on_cpu=y_on_cpu)\n", " \n", - " print(\"[INFO]: Generated cache\") \n", - " return y_ptrs\n", + " print(f\"[INFO]: Generated cache, {y_ptrs.shape=}\") \n", + " return y_ptrs.clone()\n", " \n", " #-------------------------------------------\n", " \n", " def get_dataloaders(self, batch_size, text_encoder, p_valid=0.1, balance_max=None, max_samples=None):\n", " self.text_encoder = text_encoder \n", - " return super().get_dataloaders(batch_size, p_valid, balance_max, max_samples)\n", - " \n", - " #-------------------------------------------\n", - " \n", - " @staticmethod\n", - " def from_config_file(config_path, device: torch.device, save_path: str=None):\n", - " config = load_config(config_path)\n", - " config[\"target\"] = class_to_str(Cached_OpenClip_Dataset) \n", - " return Config_Dataset.from_config(config, device, save_path)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9d0e389a-3567-4974-ae7a-a02b15760fb5", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "\n", - "# #| export\n", - "# class Cached_QcClip_Dataset(Qc_Config_Dataset):\n", - "# def x_y_preprocess(self, balance_max, use_new_tensor):\n", - "# x_proc, y_proc = super().x_y_preprocess(balance_max, use_new_tensor)\n", - " \n", - "# #-------------------------------------------\n", - "# print(\"[INFO]: Generate cache\") \n", - " \n", - "# #now for using cache we need the uniques nad the corresponding indices of the uniques\n", - "# empty_token = self.text_encoder.empty_token.expand(y_proc.shape)[:1] # [1, ...]\n", - " \n", - "# y_uniques, y_ptrs = torch.unique(torch.cat([empty_token, y_proc]), dim=0, return_inverse=True)\n", - " \n", - "# cached_empty_token_index = y_ptrs[0] #store what index the empty token has \n", - "# y_ptrs = y_ptrs[1:] #remove the cat empty token\n", - "\n", - "# #use cache\n", - "# self.text_encoder.generate_cache(tokens=y_uniques, cached_empty_token_index=cached_empty_token_index)\n", - " \n", - "# print(\"[INFO]: Generated cache\") \n", - "# return x_proc, y_ptrs\n", - " \n", - "# def get_dataloaders(self, batch_size, text_encoder, p_valid=0.1, balance_max=None, use_new_tensor=True):\n", - "# self.text_encoder = text_encoder \n", - "# return super().get_dataloaders(batch_size, p_valid, balance_max, use_new_tensor)\n", - " \n", - "# @staticmethod\n", - "# def from_config_file(config_path, device: torch.device, save_path: str=None):\n", - "# config = load_config(config_path)\n", - "# config[\"target\"] = class_to_str(Cached_QcClip_Dataset) \n", - "# return Config_Dataset.from_config(config, device, save_path)" + " return super().get_dataloaders(batch_size, p_valid, balance_max, max_samples) " ] }, { diff --git a/src/dataset/circuits_dataset.ipynb b/src/dataset/circuits_dataset.ipynb new file mode 100644 index 0000000..8c87bbc --- /dev/null +++ b/src/dataset/circuits_dataset.ipynb @@ -0,0 +1,438 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Quantum circuit dataset\n", + "\n", + "> Dataset for quantum circuits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp dataset.circuits_dataset" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.dataset.cached_dataset import CachedOpenCLIPDataset, CachedOpenCLIPDatasetConfig\n", + "from genQC.dataset.mixed_cached_dataset import MixedCachedOpenCLIPDataset, MixedCachedOpenCLIPDatasetConfig\n", + "from genQC.utils.config_loader import *\n", + "from genQC.dataset.config_dataset import ConfigDataset\n", + "from genQC.dataset.dataset_helper import shuffle_tensor_dataset\n", + "from genQC.utils.misc_utils import MemoryCleaner" + ] + }, + { + "cell_type": "markdown", + "id": "93281557-359a-4b89-906d-36ebfc72bf98", + "metadata": {}, + "source": [ + "## Simple Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "016fc327-f986-4d69-b5f0-1b39466fb528", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class CircuitsConfigDatasetConfig(CachedOpenCLIPDatasetConfig):\n", + " optimized: bool\n", + " random_samples: int \n", + " num_of_qubits: int \n", + " min_gates: int \n", + " max_gates: int \n", + " max_params: int\n", + " gate_pool: list[str]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36032308-bd0e-4409-9db0-9d89fc258e5a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitsConfigDataset(CachedOpenCLIPDataset):\n", + " \"\"\"Dataset for quantum circuits, access `gate_pool` directly and all other paras with `.params_config`\"\"\"\n", + " \n", + " req_params = [f.name for f in dataclasses.fields(CircuitsConfigDatasetConfig)]\n", + "\n", + " #-----------------------------------\n", + " def __init__(self, device: torch.device=torch.device(\"cpu\"), **parameters) -> None:\n", + " super().__init__(device, **parameters) \n", + "\n", + " \n", + " if isinstance(list(parameters[\"gate_pool\"])[0], str):\n", + " self.gate_pool = list(parameters[\"gate_pool\"])\n", + " \n", + " else:\n", + " try:\n", + " self.gate_pool = [get_obj_from_str(node) for node in parameters[\"gate_pool\"]] \n", + " except Exception as er:\n", + " print(f\"[WARNING]: error => {er}\")\n", + " print(f\"[WARNING]: gate_pool is passed as str\")\n", + " self.gate_pool = [str(node) for node in parameters[\"gate_pool\"]] \n", + " \n", + " @property\n", + " def params_config(self):\n", + " params_config = super().params_config \n", + " \n", + " if type(self) == CircuitsConfigDataset:\n", + " params_config = CircuitsConfigDatasetConfig(**params_config)\n", + " return params_config " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e6322ed9-c703-41df-88a3-6b163c051af1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'target': '__main__.CircuitsConfigDataset',\n", + " 'device': 'cpu',\n", + " 'comment': '',\n", + " 'save_path': None,\n", + " 'save_datetime': '06/01/2025 11:31:35',\n", + " 'save_type': 'safetensors',\n", + " 'params': CircuitsConfigDatasetConfig(store_dict={'x': 'tensor', 'y': 'tensor_list'}, dataset_to_gpu=None, optimized=None, random_samples=None, num_of_qubits=None, min_gates=None, max_gates=None, max_params=None, gate_pool=['qiskit.circuit.library.standard_gates.h.HGate', 'qiskit.circuit.library.standard_gates.x.CXGate'])}" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "init = {k:None for k in CircuitsConfigDataset.req_params}\n", + "init[\"gate_pool\"] = [\"qiskit.circuit.library.standard_gates.h.HGate\",\n", + " \"qiskit.circuit.library.standard_gates.x.CXGate\"]\n", + "init[\"store_dict\"] = {\"x\":\"tensor\", \"y\":\"tensor_list\"}\n", + "\n", + "a = CircuitsConfigDataset(**init)\n", + "a.get_config()" + ] + }, + { + "cell_type": "markdown", + "id": "d85fedcd-5a95-4466-956f-055d887fe773", + "metadata": {}, + "source": [ + "## Mixed Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "75c9a200-f9eb-42f9-b3c3-e074e377737a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class MixedCircuitsConfigDatasetConfig(CircuitsConfigDatasetConfig, MixedCachedOpenCLIPDatasetConfig):\n", + " pass" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e804a8d7-dcf3-40e4-83a5-cd98207c8dea", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class MixedCircuitsConfigDataset(CircuitsConfigDataset, MixedCachedOpenCLIPDataset):\n", + " \"\"\"\n", + " Dataset that uses multiple cached dataset and combines them with padding, either i) Bucket or ii) Max.\n", + " Also provides a corresponding `collate_fn` for training.\n", + " \"\"\"\n", + "\n", + " req_params = [f.name for f in dataclasses.fields(MixedCircuitsConfigDatasetConfig)]\n", + "\n", + " #-----------------------------------\n", + " \n", + " @property\n", + " def params_config(self):\n", + " params_config = super().params_config \n", + " if type(self) == MixedCircuitsConfigDataset:\n", + " params_config = MixedCircuitsConfigDatasetConfig(**params_config)\n", + " return params_config \n", + "\n", + " #-----------------------------------\n", + "\n", + " def _get_cut_sizes(self, z):\n", + " z_0 = torch.max(z[:, 0]) # space\n", + " z_1 = torch.max(z[:, 1]) # time\n", + " z_1 = (torch.ceil(z_1 / self.model_scale_factor) * self.model_scale_factor).to(torch.int32)\n", + " return z_0, z_1\n", + " \n", + " def _cut(self, x, y, z): \n", + " z_0, z_1 = self._get_cut_sizes(z)\n", + " \n", + " x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch\n", + " return x, y\n", + " \n", + " def _cut_compilation_params(self, x, y, p, U, z): \n", + " z_0, z_1 = self._get_cut_sizes(z)\n", + " bit_exp = 2**z_0\n", + " \n", + " x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch\n", + " p = p[:, :, :z_1] # cut down to max [b, nP , time] of batch \n", + " U = U[:, :, :bit_exp, :bit_exp] # [b, Re/Im, 2^n, 2^n]\n", + " return x, y, p, U\n", + " \n", + " #-----------------------------------\n", + " # BUCKET PADDING, all x,y are already passed as batch\n", + " \n", + " def cut_padding_Bucket_collate_fn(self, b): \n", + " \"\"\"this function is called for training for every batch, order in b is store dict\"\"\" \n", + "\n", + " x, y, z = b[0]\n", + " x, y = self._cut(x, y, z)\n", + " return x, y \n", + "\n", + " \n", + " def cut_padding_Bucket_collate_fn_compilation(self, b): \n", + " \"\"\"this function is called for training for every batch\"\"\" \n", + " raise NotImplementedError()\n", + "\n", + "\n", + " def cut_padding_Bucket_collate_fn_compilation_params(self, b): \n", + " \"\"\"this function is called for training for every batch, order in b is store dict\"\"\" \n", + " \n", + " b = b[0] # {'x': 'tensor', 'y': 'numpy', 'params': 'tensor', 'U': 'tensor', 'z': 'tensor'}\n", + " \n", + " x = b[0]\n", + " y = b[1] \n", + " p = b[2]\n", + " U = b[3]\n", + " z = b[4]\n", + " \n", + " #---------------\n", + " \n", + " x, y, p, U = self._cut_compilation_params(x, y, p, U, z)\n", + " \n", + " return x, y, p, U\n", + " \n", + " #-----------------------------------\n", + " # MAX PADDING, x are passes as sampled list (batch), std collate them\n", + " \n", + " def cut_padding_collate_fn(self, b): \n", + " \"\"\"this function is called for training for every batch\"\"\" \n", + " x, y, z = torch.utils.data.default_collate(b)\n", + " x, y = self._cut(x, y, z)\n", + " return x, y \n", + "\n", + " def cut_padding_collate_fn_compilation(self, b):\n", + " \"\"\"this function is called for training for every batch\"\"\" \n", + " raise NotImplementedError()\n", + " \n", + " def cut_padding_collate_fn_compilation_params(self, b):\n", + " \"\"\"this function is called for training for every batch, order in b is store dict\"\"\" \n", + " # {'x': 'tensor', 'y': 'numpy', 'params': 'tensor', 'U': 'tensor', 'z': 'tensor'}\n", + " x, y, p, U, z = torch.utils.data.default_collate(b)\n", + " x, y, p, U = self._cut_compilation_params(x, y, p, U, z) \n", + " return x, y, p, U \n", + " \n", + " #-----------------------------------\n", + " \n", + " @staticmethod\n", + " def _preprocess_dataset(dataset, device, balance_max, max_samples, i, shuffle, make_unique, pad_constant, \n", + " model_scale_factor, parameters, max_gates, max_qubits):\n", + "\n", + " dataset = dataset.to(device)\n", + "\n", + " existing_z_type = dataset.store_dict.pop(\"z\", None) # remove z, as it would mess up `ConfigDataset.x_y_preprocess`, it would be put in `*c`.\n", + " if exists(existing_z_type):\n", + " assert existing_z_type == \"tensor\"\n", + " z = dataset.z\n", + " else:\n", + " z = None\n", + " \n", + " x, y, *c = ConfigDataset.x_y_preprocess(dataset, balance_max=balance_max, max_samples=max_samples[i], shuffle=shuffle, make_unique=make_unique) \n", + " x = x.to(device) # [b, s, t] \n", + " \n", + " print(f\" - dataset size after balancing {x.shape[0]}\")\n", + "\n", + " #-------\n", + " # store original size\n", + " if not_exists(z):\n", + " z = torch.zeros((x.shape[0], 2), device=device, dtype=torch.int32)\n", + " z[:, 0] = max(dataset.params_config.num_of_qubits, 1)\n", + " \n", + " red_x = torch.sum(x.abs(), dim=1) # [b, t] .. collaps the zeros to get circuit length\n", + " z[:, 1] = torch.count_nonzero(red_x, dim=1) # [b] \n", + " z[z[:, 1]==0, 1] = 1 \n", + "\n", + " # Create masks for space and time padding\n", + " space_mask = torch.arange(x.shape[1], device=x.device).unsqueeze(0) >= z[:, 0].unsqueeze(1)\n", + " time_mask = torch.arange(x.shape[2], device=x.device).unsqueeze(0) >= z[:, 1].unsqueeze(1)\n", + "\n", + " # Apply masks to pad_constant to handle both dimensions\n", + " x = torch.where(space_mask.unsqueeze(2), pad_constant, x)\n", + " x = torch.where( time_mask.unsqueeze(1), pad_constant, x)\n", + " \n", + " z[:, 1] = (torch.ceil(z[:, 1] / model_scale_factor) * model_scale_factor).to(torch.int32) #for cut needs multiple\n", + "\n", + " #-------\n", + " \n", + " # now pad x, padding is defined from last dim forward! \n", + " pad = (0, max_gates-dataset.params_config.max_gates, 0, max_qubits-dataset.params_config.num_of_qubits) \n", + " x = F.pad(x, pad, \"constant\", pad_constant)\n", + " \n", + " #-------\n", + "\n", + " c = MixedCachedOpenCLIPDataset._add_missing_conditions(parameters, dataset, c, x.shape[0], \"cpu\")\n", + "\n", + " dataset = dataset.to(\"cpu\") #helps with gpu mem overflowing\n", + " del dataset\n", + " \n", + " return x.cpu(), y, z.cpu(), *[ic.cpu() for ic in c]\n", + " \n", + " @staticmethod\n", + " def from_datasets(datasets: list[CircuitsConfigDataset], balance_maxes: list, pad_constant, device: torch.device=torch.device(\"cpu\"), bucket_batch_size=None, \n", + " max_samples=None, shuffle=True, make_unique=True, test_split=0.05, pad_with_memmap=False, **parameters):\n", + " if pad_constant == 0:\n", + " print(\"[WARNING]: >pad_constant == 0<; This could be an error!\")\n", + " \n", + " model_scale_factor = parameters[\"model_scale_factor\"]\n", + " \n", + " max_qubits = max(dataset.params_config.num_of_qubits for dataset in datasets)\n", + " max_gates = max(dataset.params_config.max_gates for dataset in datasets)\n", + " max_gates = int(np.ceil(max_gates /model_scale_factor) * model_scale_factor)\n", + " max_params = max(dataset.params_config.max_params for dataset in datasets)\n", + " \n", + " parameters[\"num_of_qubits\"] = max_qubits\n", + " parameters[\"max_gates\"] = max_gates\n", + " parameters[\"max_params\"] = max_params\n", + " parameters[\"random_samples\"] = sum([dataset.params_config.random_samples for dataset in datasets])\n", + " parameters[\"min_gates\"] = min([dataset.params_config.min_gates for dataset in datasets])\n", + " parameters[\"comment\"] = f\"Generated with 'from_datasets' with {len(datasets)} datasets. Qubits: {[dataset.params_config.num_of_qubits for dataset in datasets]}.\"\n", + " parameters[\"pad_constant\"] = pad_constant\n", + " parameters[\"bucket_batch_size\"] = bucket_batch_size\n", + " \n", + " parameters[\"store_dict\"] = {}\n", + " for dataset in datasets:\n", + " parameters[\"store_dict\"] |= dataset.params_config.store_dict #needs python 3.9 for union of dict \n", + " parameters[\"store_dict\"][\"z\"] = \"tensor\" #add special item\n", + "\n", + " #-----------------\n", + " \n", + " xs, ys, zs, cs = MixedCircuitsConfigDataset._preprocess_datasets(datasets, device, balance_maxes, max_samples, shuffle, make_unique, pad_constant, \n", + " model_scale_factor, parameters, max_gates=max_gates, max_qubits=max_qubits) \n", + " #-----------------\n", + "\n", + " has_U = \"U\" in parameters[\"store_dict\"]\n", + " has_p = \"params\" in parameters[\"store_dict\"]\n", + " \n", + " if bucket_batch_size > 0:\n", + " collate_fn_name = MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn.__name__\n", + " if has_U: \n", + " collate_fn_name = MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn_compilation.__name__\n", + " if has_p: \n", + " collate_fn_name = MixedCircuitsConfigDataset.cut_padding_Bucket_collate_fn_compilation_params.__name__\n", + " \n", + " else:\n", + " collate_fn_name = MixedCircuitsConfigDataset.cut_padding_collate_fn.__name__ \n", + " if has_U: \n", + " collate_fn_name = MixedCircuitsConfigDataset.cut_padding_collate_fn_compilation.__name__\n", + " if has_p: \n", + " collate_fn_name = MixedCircuitsConfigDataset.cut_padding_collate_fn_compilation_params.__name__\n", + "\n", + " parameters[\"collate_fn\"] = collate_fn_name\n", + " \n", + " #-----------------\n", + " if bucket_batch_size > 0:\n", + " xs, ys, zs, cs = MixedCachedOpenCLIPDataset._reorder_to_buckets(parameters, bucket_batch_size, xs, ys, zs, cs)\n", + " \n", + " x = torch.cat(xs)\n", + " y = ys # torch.cat(ys) is wrong, y is list of numpy or str!! not a tensor\n", + " \n", + " if isinstance(y, list): \n", + " match parameters[\"store_dict\"][\"y\"]:\n", + " case \"numpy\": y = np.concatenate(y, axis=0)\n", + " case \"tensor\": y = torch.cat(y, dim=0)\n", + " case _: raise NotImplementedError()\n", + " \n", + " z = torch.cat(zs)\n", + " c = cs\n", + " \n", + " #-----------------\n", + "\n", + " params_pad = (max_params, max_gates)\n", + " unitary_pad = 2**max_qubits\n", + " \n", + " ci_list, ci_k_list, memmap_cleans = MixedCachedOpenCLIPDataset._pad_conditions(parameters, bucket_batch_size, c, unitary_pad=unitary_pad, params_pad=params_pad, pad_with_memmap=pad_with_memmap)\n", + " \n", + " #----------------- \n", + "\n", + " mixed_CircuitsConfigDataset, mixed_CircuitsConfigDataset_test = \\\n", + " MixedCircuitsConfigDataset._create_train_valid_datasets(device, parameters, test_split, x, y, z, ci_list, ci_k_list, shuffle=shuffle)\n", + "\n", + " if pad_with_memmap:\n", + " mixed_CircuitsConfigDataset.memmap_cleans = memmap_cleans\n", + " mixed_CircuitsConfigDataset_test.memmap_cleans = memmap_cleans\n", + " \n", + " return mixed_CircuitsConfigDataset, mixed_CircuitsConfigDataset_test" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/dataset/config_dataset.ipynb b/src/dataset/config_dataset.ipynb index 5441fe6..c226f2b 100644 --- a/src/dataset/config_dataset.ipynb +++ b/src/dataset/config_dataset.ipynb @@ -5,7 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# Config dataset" + "# Config dataset\n", + "\n", + "> Base class for managing, loading and saving." ] }, { @@ -27,7 +29,8 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.config_loader import *\n", + "from genQC.utils.config_loader import *\n", + "from genQC.dataset.dataset_helper import *\n", "\n", "from huggingface_hub import snapshot_download" ] @@ -41,9 +44,10 @@ "source": [ "#| export\n", "@dataclass\n", - "class Config_Dataset_config:\n", + "class ConfigDatasetConfig:\n", " \"\"\"Config `dataclass` used for storage.\"\"\"\n", - " store_dict: dict " + " store_dict: dict \n", + " dataset_to_gpu: bool" ] }, { @@ -54,13 +58,16 @@ "outputs": [], "source": [ "#| export\n", - "class Config_Dataset(): \n", + "class ConfigDataset(): \n", " \"\"\"Base class for datasets, manages loading and saving.\"\"\"\n", " \n", - " req_params = [f.name for f in dataclasses.fields(Config_Dataset_config)]\n", - " comment = \"\"\n", + " req_params = [f.name for f in dataclasses.fields(ConfigDatasetConfig)]\n", + " comment = \"\"\n", + " add_balance_fn = None\n", " \n", - " def __init__(self, device: torch.device=torch.device(\"cpu\"), **parameters):\n", + " def __init__(self, device: torch.device=torch.device(\"cpu\"), save_type=None, **parameters) -> None:\n", + " self.save_type = default(save_type, \"safetensors\")\n", + " \n", " req_params = self.req_params \n", " for p in req_params:\n", " if p not in parameters: raise RuntimeError(f\"Missing parameter `{p}` in argument `**parameters: dict`\") \n", @@ -92,6 +99,145 @@ " setattr(self, str(k), x)\n", " \n", " return self\n", + "\n", + " def memory_summary(self) -> None:\n", + " print(\"##################### Dataset memory summary #####################\")\n", + " print(\"Name || Type || Memory || Device || Shape\")\n", + " print(\"---------------------------------------------------------------\")\n", + " \n", + " total_mem = 0.0\n", + " byte_to_giga = 1 / (1024**3)\n", + " \n", + " for k,v in self.store_dict.items(): \n", + " mem = 0.0\n", + " dev = \"None\"\n", + " shape = \"None\"\n", + " dtype = \"None\"\n", + " \n", + " x = getattr(self, str(k))\n", + " \n", + " if v == \"tensor\":\n", + " mem += float(x.dtype.itemsize) * np.prod([s for s in x.shape], dtype=np.double) * byte_to_giga\n", + " dev = x.device\n", + " shape = x.shape\n", + " dtype = x.dtype\n", + " \n", + " elif v == \"tensor_list\": \n", + " dev = []\n", + " for x_i in x:\n", + " mem += float(x_i.dtype.itemsize) * np.prod([s for s in x_i.shape], dtype=np.double) * byte_to_giga\n", + " dev.append(x_i.device)\n", + " shape = (len(x), x[0].shape)\n", + " dtype = x[0].dtype\n", + " \n", + " elif v == \"list\": \n", + " shape = (len(x))\n", + " dtype = \"python\"\n", + " \n", + " elif v == \"numpy\": \n", + " shape = x.shape\n", + " dtype = x.dtype\n", + "\n", + " \n", + " print(f\" - [{str(k):>8}] ({str(dtype):>15} {str(v):>8}): {mem:3.4f} GB ({str(dev):6}) | {shape}\")\n", + " total_mem += mem\n", + " \n", + " print(\"--------------------------------------\")\n", + " print(f\" Total memory used: {total_mem:3.4f} GB \")\n", + " print(\"---------------------------------------------------------------\")\n", + "\n", + " #----------------------------\n", + " \n", + " def x_y_preprocess(self, balance_max=None, shuffle=False, max_samples=None, make_unique=True):\n", + " z_proc = []\n", + " for k,v in self.store_dict.items(): \n", + " if k != \"x\" and k != \"y\":\n", + " z_proc.append(getattr(self, k))\n", + " \n", + " x_proc, y_proc = self.x, self.y\n", + " \n", + " #---------------------\n", + " if shuffle:\n", + " x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc)\n", + " \n", + " if exists(max_samples):\n", + " x_proc = x_proc[:max_samples]\n", + " y_proc = y_proc[:max_samples]\n", + " z_proc = (iz[:max_samples] for iz in z_proc) \n", + " \n", + " #---------------------\n", + " t = self.store_dict[\"y\"]\n", + " if exists(balance_max): \n", + " if t == \"tensor\" or t == \"numpy\": x_proc, y_proc, *z_proc = balance_tensor_dataset(x_proc, y_proc, *z_proc, make_unique=make_unique, shuffle_lables=shuffle, \n", + " samples=balance_max, add_balance_fn=self.add_balance_fn, njobs=1) \n", + " else: print(f\"[WARNING]: Unsupported y type: `{t}`. Not balancing dataset!\")\n", + " else: print(f\"[INFO]: Not balancing dataset! {balance_max=}\")\n", + " \n", + " #---------------------\n", + " if shuffle:\n", + " x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc)\n", + " \n", + " return x_proc, y_proc, *z_proc\n", + " \n", + " def valid_split(self, x, y, *z, p_valid=0.1, y_type=None, split_sequential=False):\n", + " \"\"\"\n", + " split_sequential ... if true split data ordered (valid-train order), else split randomly (the same as shuffle and then seq. split)\n", + " \"\"\"\n", + " \n", + " if split_sequential: ind = torch.arange(x.shape[0])\n", + " else: ind = torch.randperm(x.shape[0]) \n", + " \n", + " splits = max(int(x.shape[0] * p_valid), 1) \n", + " ind, ind_valid = ind[splits:], ind[:splits]\n", + "\n", + " #### Note: advanced indexing always creates copy not view. So we can skip the .clone()\n", + " x, x_valid = x[ind], x[ind_valid]\n", + " \n", + " t = y_type if exists(y_type) else self.store_dict[\"y\"]\n", + " if t == \"tensor\" : y, y_valid = y[ind], y[ind_valid] \n", + " elif t == \"numpy\": y, y_valid = y[ind], y[ind_valid]\n", + " \n", + " z = list(z)\n", + " z_valid = [None] * len(z)\n", + " for i, iz in enumerate(z):\n", + " # assert tensors for now\n", + " z[i], z_valid[i] = iz[ind], iz[ind_valid]\n", + " \n", + " z, z_valid = tuple(z), tuple(z_valid)\n", + " \n", + " return x, x_valid, y, y_valid, (z, z_valid)\n", + " \n", + " def get_dataloaders(self, batch_size, p_valid=0.1, balance_max=None, max_samples=None, y_on_cpu=False, shuffle=True):\n", + " #-------------------------\n", + " # valid split and to device\n", + " \n", + " x_proc, y_proc, *z_proc = self.x_y_preprocess(balance_max=balance_max, max_samples=max_samples, shuffle=shuffle) \n", + " x, x_valid, y, y_valid, (z, z_valid) = self.valid_split(x_proc, y_proc, *z_proc, p_valid=p_valid)\n", + "\n", + "\n", + " if self.params_config.dataset_to_gpu:\n", + " x, x_valid = x.to(\"cuda\"), x_valid.to(\"cuda\")\n", + " z, z_valid = list(iz.to(\"cuda\") for iz in z), list(iz_valid.to(\"cuda\") for iz_valid in z_valid)\n", + "\n", + " if not y_on_cpu:\n", + " y, y_valid = y.to(\"cuda\"), y_valid.to(\"cuda\")\n", + "\n", + " #-------------------------\n", + " # create dataloaders\n", + " \n", + " ds = TensorDataset(x, y, *z)\n", + " ds_valid = TensorDataset(x_valid, y_valid, *z_valid)\n", + " \n", + " if self.params_config.dataset_to_gpu: \n", + " train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True)\n", + " valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True)\n", + "\n", + " else: \n", + " train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12)\n", + " valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12)\n", + "\n", + " self.dataloaders = DataLoaders(train_loader, valid_loader) \n", + " return self.dataloaders\n", " \n", " #----------------------------\n", " \n", @@ -99,16 +245,22 @@ " def params_config(self):\n", " params_config = {} \n", " for p in self.req_params: params_config[p] = getattr(self, p)\n", + " \n", + " if type(self) == ConfigDataset:\n", + " params_config = ConfigDatasetConfig(**params_config)\n", " return params_config \n", - " \n", + "\n", + " #----------------------------\n", + " \n", " def get_config(self, save_path=None, without_metadata=False):\n", " if not without_metadata: \n", " config = {}\n", " config[\"target\"] = class_to_str(type(self))\n", " config[\"device\"] = str(self.device)\n", " config[\"comment\"] = self.comment\n", - " config[\"save_path\"] = self.save_path if hasattr(self, \"save_path\") else save_path\n", + " config[\"save_path\"] = self.save_path if hasattr(self, \"save_path\") and not exists(save_path) else save_path\n", " config[\"save_datetime\"] = datetime.now().strftime(\"%m/%d/%Y %H:%M:%S\")\n", + " config[\"save_type\"] = self.save_type\n", " config[\"params\"] = self.params_config \n", " else:\n", " config = self.params_config \n", @@ -117,22 +269,41 @@ " return config\n", " \n", " def save_dataset(self, config_path: str, save_path: str):\n", + " if exists(config_path): os.makedirs(config_path[:config_path.rfind(\"/\")] + \"/\", exist_ok=True)\n", + " if exists(save_path): os.makedirs(save_path[:save_path.rfind(\"/\")] + \"/\", exist_ok=True)\n", + " \n", " config = self.get_config(save_path, without_metadata=False)\n", " save_dict_yaml(config, config_path) \n", " self.store_x_y(save_path) \n", " \n", " #----------------------------\n", + "\n", + " def check_save_type(self, save_path):\n", + " if exists(self.save_type) and exists(save_path):\n", + " if not save_path.endswith(f\".{self.save_type}\"):\n", + " save_path += f\".{self.save_type}\"\n", + " return save_path\n", " \n", " def store_x_y(self, path_str): \n", " for k,v in self.store_dict.items(): \n", " x = getattr(self, str(k))\n", - " torch.save(x, path_str + f\"_{k}.pt\")\n", - " \n", - " def load_x_y(self, path_str):\n", + "\n", + " # torch.save(x, path_str + f\"_{k}.pt\")\n", + " store_tensor({\"0\": x}, self.check_save_type(path_str + f\"_{k}\"), type=v)\n", + " \n", + " def load_x_y(self, path_str, device: Optional[torch.device] = None, make_contiguous: bool = True):\n", " self.save_path = path_str\n", " \n", - " for k,v in self.store_dict.items(): \n", - " x = torch.load(path_str + f\"_{k}.pt\", weights_only=False)\n", + " for k,v in self.store_dict.items(): \n", + " # x = torch.load(path_str + f\"_{k}.pt\", map_location=device)\n", + " x = load_tensor(self.check_save_type(path_str + f\"_{k}\"), device, type=v)\n", + "\n", + " if isinstance(x, dict):\n", + " x = x[\"0\"]\n", + "\n", + " if v == \"tensor\" and make_contiguous:\n", + " x = x.contiguous() #load memmap into memory\n", + " \n", " setattr(self, str(k), x)\n", " \n", " #----------------------------\n", @@ -153,7 +324,7 @@ " if \"save_path\" in config: save_path = config[\"save_path\"]\n", " else: print(\"[INFO]: Found no key `save_path` path in config and no `save_path` arg provided.\")\n", " \n", - " if exists(save_path): config_dataset.load_x_y(save_path)\n", + " if exists(save_path): config_dataset.load_x_y(save_path, device=device, make_contiguous=make_contiguous)\n", " else: print(\"[INFO]: No save_path` provided. Nothing loaded.\")\n", "\n", " #--------------------------------\n", @@ -170,7 +341,7 @@ " If this method is called with `ConfigDataset.from_config_file` we use the given `target`, else use the caller class.\n", " \"\"\"\n", " config = load_config(config_path)\n", - " if cls is not Config_Dataset:\n", + " if cls is not ConfigDataset:\n", " config[\"target\"] = class_to_str(cls) \n", " return cls.from_config(config, device, save_path, make_contiguous)\n", "\n", @@ -178,7 +349,13 @@ " def from_huggingface(cls, repo_id: str, device: torch.device, **kwargs): \n", " \"\"\"Load a dataset directly from Huggingface.\"\"\"\n", " dataset_path = snapshot_download(repo_id=repo_id, repo_type=\"dataset\", allow_patterns=[\"*.pt\", \"*.yaml\", \"*.safetensors\"], **kwargs) \n", - " dataset = cls.from_config_file(config_path=dataset_path+\"/config.yaml\", device=device, save_path=dataset_path+\"/dataset\") \n", + "\n", + " try:\n", + " name = repo_id.split(\"/\")[-1]\n", + " dataset = cls.from_config_file(config_path=dataset_path+f\"/{name}.yaml\", device=device, save_path=dataset_path+f\"/{name}\") \n", + " except Exception as e:\n", + " dataset = cls.from_config_file(config_path=dataset_path+\"/config.yaml\", device=device, save_path=dataset_path+\"/dataset\") \n", + " \n", " return dataset " ] }, @@ -207,6 +384,13 @@ "display_name": "python3", "language": "python", "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/src/dataset/dataset_helper.ipynb b/src/dataset/dataset_helper.ipynb index 6ea643c..bd6d3ce 100644 --- a/src/dataset/dataset_helper.ipynb +++ b/src/dataset/dataset_helper.ipynb @@ -5,15 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# Dataset helper functions" - ] - }, - { - "cell_type": "markdown", - "id": "3d455168-ceb0-4c95-a7d5-3307cf3fb0dd", - "metadata": {}, - "source": [ - "Some comonly used functions for datasets." + "# Dataset helper functions\n", + "\n", + "> Some comonly used functions for datasets." ] }, { @@ -35,7 +29,8 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.config_loader import *" + "from genQC.utils.config_loader import *\n", + "from genQC.utils.async_fn import run_parallel_jobs" ] }, { @@ -62,39 +57,7 @@ " comp = (dataset==x) \n", " comp = torch.reshape(comp, [comp.shape[0], -1]) \n", " comp = torch.all(comp, dim=1)\n", - " \n", - " num = comp.nonzero().squeeze().numel() \n", - " return bool(num)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9369f5e5-3545-49d3-b03a-543cf4620e1d", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "#| export\n", - "def check_duplicates_in_dataset_python(xs, dataset):\n", - " cnt = 0\n", - " \n", - " raise NotImplementedError(\"\")\n", - " \n", - " # f = lambda x: int(check_duplicate_in_dataset(x, dataset))\n", - " # res = async_loop_consumer(f, xs)\n", - " # cnt = sum(res)\n", - " \n", - " comp = []\n", - " \n", - " for i,x in enumerate(xs):\n", - " if check_duplicate_in_dataset(x, dataset):\n", - " #print(f\"[INFO] Duplicate in dataset at index={i}\")\n", - " comp.append(i)\n", - " cnt += 1 \n", - " # print(f\"[INFO] Found {cnt}/{xs.shape[0]} duplicates in dataset of {dataset.shape[0]}.\")\n", - " \n", - " return cnt, comp" + " return comp.any().item() " ] }, { @@ -132,12 +95,6 @@ " comp = comp.nonzero()\n", " num = comp.shape[0] \n", " \n", - " # except Exception as er:\n", - " # print(\"[WARNING] check_duplicates_in_dataset:\", er)\n", - " # print(\"We will use python instead.\")\n", - " # raise NotImplementedError(\"\")\n", - " # # cnt, comp = check_duplicates_in_dataset_python(xs, dataset)\n", - " \n", " if return_ind: return num, comp.squeeze() #comp is [i_xs, i_dataset] pairs\n", " return num" ] @@ -198,14 +155,35 @@ "outputs": [], "source": [ "#| export\n", - "def shuffle_tensor_dataset(x, y=None, *z):\n", + "def shuffle_tensor_dataset(x, y=None, *z, cpu_copy=True):\n", " '''Assumes numpy or tensor objects with same length.'''\n", " rand_indx = torch.randperm(x.shape[0])\n", " \n", " if exists(y):\n", " assert x.shape[0] == y.shape[0] \n", - " for iz in z: assert x.shape[0] == iz.shape[0] \n", - " return x[rand_indx], y[rand_indx], *(iz[rand_indx] for iz in z)\n", + " for iz in z: assert x.shape[0] == iz.shape[0] \n", + "\n", + "\n", + " if cpu_copy:\n", + "\n", + " def _cpu_array_index(var): \n", + " if type(var) == np.ndarray:\n", + " var = var[rand_indx]\n", + " else:\n", + " device = var.device\n", + " var = var.to(\"cpu\")\n", + " var = var[rand_indx]\n", + " var[:] = var.to(device) \n", + " return var\n", + "\n", + " x = _cpu_array_index(x)\n", + " y = _cpu_array_index(y)\n", + " z = (_cpu_array_index(iz) for iz in z)\n", + " \n", + " return x, y, *z\n", + "\n", + " else:\n", + " return x[rand_indx], y[rand_indx], *(iz[rand_indx] for iz in z)\n", " \n", " return x[rand_indx]" ] @@ -221,12 +199,14 @@ "def get_unique_elements_indices(tensor):\n", " '''Returns indices of unique_elements in `tensor`.'''\n", " tensor_unique, ptrs, cnt = torch.unique(tensor, dim=0, return_inverse=True, return_counts=True)\n", - " _, ind_sorted = torch.sort(ptrs, stable=True) #e.g. gets the index that points to zero at pos [0]\n", + " _, ind_sorted = torch.sort(ptrs, dim=0, stable=True) #e.g. gets the index that points to zero at pos [0]\n", " \n", - " cum_sum = cnt.cumsum(0)\n", - " cum_sum = torch.cat((torch.tensor([0], device=tensor.device), cum_sum[:-1]))\n", - " \n", - " return tensor_unique, ind_sorted[cum_sum]" + " cum_sum = cnt.cumsum(dim=0)\n", + " cum_sum = torch.cat([torch.tensor([0], device=tensor.device), cum_sum[:-1]], dim=0)\n", + "\n", + " idx = ind_sorted[cum_sum].cpu()\n", + " \n", + " return tensor[idx], idx" ] }, { @@ -258,47 +238,65 @@ "outputs": [], "source": [ "#| export\n", - "def balance_tensor_dataset(x, y, *z, samples: int=None, make_unique: bool=True, y_uniques=None, shuffle_lables: bool=True, add_balance_fn: callable=None):\n", + "def balance_tensor_dataset(x, y, *z, samples: int=None, make_unique: bool=True, y_uniques=None, shuffle_lables: bool=True, add_balance_fn: callable=None, njobs=1):\n", " '''Assumes `x` is tensor and `y` is tensor or numpy.'''\n", " \n", " y_type = type(y)\n", " assert y_type in [np.ndarray, torch.Tensor]\n", + "\n", + " print(f\" - balance_tensor_dataset, {njobs=}, number of samples={x.shape[0]}\")\n", " \n", " #------------------------------\n", " \n", " if make_unique:\n", " x, y, *z = uniquify_tensor_dataset(x, y, *z)\n", " assert x.shape[0] == y.shape[0]\n", + "\n", + " print(f\" - uniquify_tensor_dataset, number of samples now {x.shape[0]}\")\n", " \n", " #bcs unique sorts, we need to shuffle the dataset before picking the first 'samples' entries\n", " x, y, *z = shuffle_tensor_dataset(x, y, *z) \n", " \n", " #------------------------------\n", + "\n", + " search_y = y_uniques if exists(y_uniques) else y\n", " \n", - " if y_type == np.ndarray: y_uniques_temp, y_uniques_cnt = np.unique(y, return_counts=True, axis=0)\n", - " else: y_uniques_temp, y_uniques_cnt = torch.unique(y, return_counts=True, dim=0)\n", + " if y_type == np.ndarray: _, y_ptrs, y_uniques_cnt = np.unique(search_y, return_counts=True, return_inverse=True, axis=0)\n", + " else: _, y_ptrs, y_uniques_cnt = torch.unique(search_y, return_counts=True, return_inverse=True, dim=0)\n", " \n", - " if y_uniques is None: y_uniques = y_uniques_temp\n", - " if samples is None: \n", + " if not exists(samples): \n", " if y_type == np.ndarray: samples = np.min(y_uniques_cnt) # the actual balancing count\n", " else: samples = torch.min(y_uniques_cnt)\n", " \n", + " print(f\" - balancing\")\n", + " # ToDo: make parallel \n", + " \n", " ind = list() \n", - " for y_unique in y_uniques:\n", + " # for y_unique in tqdm(y_uniques, total=y_uniques.shape[0]): \n", + " for y_ptr_index in tqdm(range(y_uniques_cnt.shape[0]), total=y_uniques_cnt.shape[0]):\n", " \n", " if y_type == np.ndarray:\n", - " comp = (y==y_unique)\n", + " comp = (y_ptrs==y_ptr_index)\n", " indices = np.squeeze(np.nonzero(comp))\n", " indices = indices if indices.ndim > 0 else indices[None]\n", " \n", - " else:\n", - " comp = torch.all(y==y_unique, dim=1)\n", - " indices = comp.nonzero().squeeze().cpu()\n", + " else: \n", + " comp = (y_ptrs==y_ptr_index)\n", + " \n", + " indices = comp.nonzero().squeeze() #.cpu()\n", " indices = indices if indices.dim() > 0 else indices[None]\n", - " \n", + "\n", " #special add balncing, e.g., for circuit length\n", " if add_balance_fn is not None: indices = add_balance_fn(indices, x, y, *z)\n", - " \n", + "\n", + " if not y_type == np.ndarray: indices = indices.cpu()\n", + "\n", + " indices = shuffle_tensor_dataset(indices) \n", + "\n", + " #fixes bug: shuffle_tensor_dataset removes dim if numpy array only has 1 element! \n", + " if y_type == np.ndarray: indices = indices if indices.ndim > 0 else indices[None]\n", + " else: indices = indices if indices.dim() > 0 else indices[None]\n", + " \n", " ind.append(indices[:samples]) #limit samples\n", " \n", " if y_type == np.ndarray: ind = np.concatenate(ind, axis=0)\n", @@ -314,44 +312,6 @@ " return xb, yb, *zb" ] }, - { - "cell_type": "markdown", - "id": "70824159-6cd4-49eb-a4e6-b6dc3ebdfebe", - "metadata": {}, - "source": [ - "#| hide\n", - "## Converters" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7c7464a6-6a5f-4e4d-8f15-bc7bd8cc991e", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "#| export\n", - "def map_old_tensor_to_new(x):\n", - " raise DeprecationWarning(\"[WARNING] There really should be no more old tensors arround .... delete them\")\n", - " print(\"[WARNING] There really should be no more old tensors arround .... delete them\")\n", - " \n", - " b, gc, bits, t = x.shape\n", - " \n", - " x = x.reshape((b, gc//3, 3, bits, t)) # [b, g-c, bits, t] -> [b, g, c, bits, t] \n", - " x = torch.argmax(x, dim=2) # [b, g, c, bits, t]-> [b, g, bits, t] \n", - " \n", - " gate = torch.concat([torch.zeros_like(x[:,:1]), x], dim=1) # add zeros for empty token \n", - " gate = torch.argmax(gate, dim=1)\n", - " \n", - " control_target = torch.sum(x, dim=1)\n", - " mapped_tensor = torch.zeros_like(control_target)\n", - " mapped_tensor[control_target==1] = -1\n", - " mapped_tensor[control_target==2] = 1\n", - " \n", - " return gate * mapped_tensor # is now [b, space, time] with elements +-gate_number" - ] - }, { "cell_type": "markdown", "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", diff --git a/src/dataset/mixed_cached_dataset.ipynb b/src/dataset/mixed_cached_dataset.ipynb new file mode 100644 index 0000000..79b5419 --- /dev/null +++ b/src/dataset/mixed_cached_dataset.ipynb @@ -0,0 +1,399 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Mixed cached dataset\n", + "\n", + "> Dataset that combines and handles multiple cached datasets." + ] + }, + { + "cell_type": "markdown", + "id": "21cae8fe-2a9d-4588-80f2-0a8c8def322b", + "metadata": {}, + "source": [ + "This is useful for multiple qubits. Here we also handle paddings." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp dataset.mixed_cached_dataset" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.dataset.cached_dataset import CachedOpenCLIPDataset, CachedOpenCLIPDatasetConfig, ConfigDataset\n", + "from genQC.dataset.dataset_helper import *\n", + "from genQC.utils.misc_utils import DataLoaders, MemoryCleaner\n", + "from tensordict import TensorDict" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "119077c9-999b-44f7-8099-79037503d7e7", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class MixedCachedOpenCLIPDatasetConfig(CachedOpenCLIPDatasetConfig):\n", + " pad_constant: int\n", + " collate_fn: str\n", + " bucket_batch_size: int\n", + " model_scale_factor: int" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0037efb5-d3a9-46e4-94d1-3dd80297e934", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class MixedCachedOpenCLIPDataset(CachedOpenCLIPDataset): \n", + " \"\"\"Dataset that uses multiple cached dataset and combines them with padding, either i) Bucket or ii) Max.\"\"\"\n", + " \n", + " req_params = [f.name for f in dataclasses.fields(MixedCachedOpenCLIPDatasetConfig)]\n", + "\n", + " #-----------------------------------\n", + " @property\n", + " def params_config(self):\n", + " params_config = super().params_config \n", + " if type(self) == MixedCachedOpenCLIPDataset:\n", + " params_config = MixedCachedOpenCLIPDatasetConfig(**params_config)\n", + " return params_config \n", + " \n", + " #-----------------------------------\n", + " # functions to combine multiple datasets together\n", + "\n", + " @classmethod\n", + " def _preprocess_datasets(dataset_cls, datasets, device, balance_maxes, max_samples, shuffle, \n", + " make_unique, pad_constant, model_scale_factor, parameters, **kwargs):\n", + " xs = []\n", + " ys = []\n", + " zs = []\n", + " cs = []\n", + " \n", + " if isinstance(max_samples, int):\n", + " max_samples = [max_samples] * len(datasets)\n", + " else:\n", + " assert isinstance(max_samples, (list, np.ndarray))\n", + "\n", + " if isinstance(balance_maxes, int):\n", + " balance_maxes = [balance_maxes] * len(datasets)\n", + " else:\n", + " assert isinstance(balance_maxes, (list, np.ndarray))\n", + " \n", + " for i, (dataset, balance_max) in tqdm(enumerate(zip(datasets, balance_maxes)), total=len(datasets)):\n", + "\n", + " x, y, z, *c = dataset_cls._preprocess_dataset(dataset, device, balance_max, max_samples, i, shuffle, make_unique, pad_constant, model_scale_factor, parameters, **kwargs)\n", + " MemoryCleaner.purge_mem()\n", + " \n", + " #combine datasets\n", + " xs.append(x.cpu()) \n", + " ys.append(y)\n", + " zs.append(z.cpu()) \n", + " cs.append([ic.cpu() for ic in c])\n", + "\n", + " del x\n", + " del y\n", + " del z\n", + " del c\n", + " \n", + " for k in datasets[i].store_dict.keys(): \n", + " setattr(datasets[i], str(k), None)\n", + " del dataset\n", + " \n", + " MemoryCleaner.purge_mem()\n", + "\n", + " return xs, ys, zs, cs\n", + " \n", + " @staticmethod\n", + " def _add_missing_conditions(parameters, dataset, c, batch_size, device):\n", + " # if c is missing something of the union we set it to a zero tensor, e.g. used for combining SRV with compilation\n", + " c_temp = []\n", + " c_temp_index = 0\n", + " \n", + " for k,v in parameters[\"store_dict\"].items(): \n", + " if k != \"x\" and k != \"y\" and k != \"z\": \n", + " if k not in dataset.params_config.store_dict:\n", + " empty_tensor = torch.zeros((1,), device=device)\n", + " \n", + " if k == \"U\": #scetchy hardcoded for compilation\n", + " empty_tensor = torch.zeros((batch_size, 2, 1, 1), device=device) # unitary is [b, Re/Im, 2^n, 2^n]\n", + " \n", + " c_temp.append(empty_tensor) \n", + " \n", + " else: # done to conserve the ordering of c args!!!\n", + " c_temp.append(c[c_temp_index])\n", + " c_temp_index += 1\n", + "\n", + " return c_temp\n", + "\n", + " @staticmethod\n", + " def _reorder_to_buckets(parameters, bucket_batch_size, xs, ys, zs, cs):\n", + " for i, (xi,yi,zi, ci) in enumerate(zip(xs, ys, zs, cs)): #cut rest of batch \n", + " b_mult = int(np.floor(xi.shape[0] / bucket_batch_size) * bucket_batch_size) \n", + " \n", + " xs[i] = xi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *xi.shape[1:])) \n", + " zs[i] = zi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *zi.shape[1:]))\n", + " \n", + " v = parameters[\"store_dict\"][\"y\"]\n", + " if v == \"tensor\" or v == \"numpy\": \n", + " ys[i] = yi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *yi.shape[1:])) \n", + " else: raise NotImplementedError(\"\")\n", + " \n", + " #----\n", + " #For U, etc\n", + " add_ind = 0\n", + " for k,v in parameters[\"store_dict\"].items(): \n", + " if k != \"x\" and k != \"y\" and k != \"z\": \n", + " if v == \"tensor\" or v == \"numpy\": \n", + " cs[i][add_ind] = ci[add_ind][None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *ci[add_ind].shape[1:])) \n", + " else: raise NotImplementedError(\"\") \n", + " add_ind += 1 \n", + "\n", + " return xs, ys, zs, cs\n", + "\n", + " @staticmethod\n", + " def _pad_conditions(parameters, bucket_batch_size, c, unitary_pad=None, params_pad=None, pad_with_memmap=False):\n", + " ci_list = []\n", + " ci_k_list = []\n", + "\n", + " memmap_cleans = [] #TensorDicts and paths we need to delete later\n", + " \n", + " def _alloc_mem(shape, k, c0_add_ind):\n", + " # allocating zeros is better memory wise than torch.cat(ci_s) and F.pad(ci, pad, \"constant\", 0)\n", + " mem = np.prod(shape) * c0_add_ind.element_size() / (1024*1024*1024)\n", + " print(f\"[INFO]: allocate memory for {k} {shape} on {c0_add_ind.device} approx. {mem:.3f} GB\")\n", + "\n", + " if pad_with_memmap:\n", + " prefix_path = f\"tmp_DELETE_pad_conditions_MixedCachedOpenCLIPDataset_{k}\"\n", + " print(f\"[INFO]: (MixedCachedOpenCLIPDataset._pad_conditions): {pad_with_memmap=} allocating TensorDict using memmap_like at {prefix_path}\")\n", + " \n", + " b, *_ = shape\n", + " tensor_dict = TensorDict({\"ci_s\": torch.empty(shape, dtype=c0_add_ind.dtype),\n", + " }, batch_size=[b])\n", + " tensor_dict = tensor_dict.memmap_like(prefix=prefix_path)\n", + " \n", + " ci_s = tensor_dict[\"ci_s\"]\n", + " memmap_cleans.append((tensor_dict, prefix_path))\n", + " else:\n", + " ci_s = torch.zeros(shape, device=c0_add_ind.device, dtype=c0_add_ind.dtype) \n", + " \n", + " return ci_s\n", + "\n", + " add_ind = 0\n", + " for k,v in parameters[\"store_dict\"].items(): \n", + " if k != \"x\" and k != \"y\" and k != \"z\": \n", + " \n", + " if v == \"tensor\" and k == \"U\": # hardcoded U padding !!\n", + " assert exists(unitary_pad) and isinstance(unitary_pad, int)\n", + " \n", + " n = sum([ci[add_ind].shape[0] for ci in c])\n", + " if bucket_batch_size > 0: shape = (n, bucket_batch_size, 2, unitary_pad, unitary_pad)\n", + " else: shape = (n, 2, unitary_pad, unitary_pad)\n", + " \n", + " ci_s = _alloc_mem(shape, k, c[0][add_ind]) \n", + "\n", + " #tensor product pad, else was zero pad\n", + " if 1:\n", + " run_i = 0\n", + " for i,ci in enumerate(c):\n", + " ci = ci[add_ind] \n", + "\n", + " assert ci.shape[-2]==ci.shape[-1]\n", + " U_side = ci.shape[-2]\n", + " for jj in range(unitary_pad//U_side): \n", + " ci_s[run_i:run_i+ci.shape[0], ..., U_side*jj:U_side*(jj+1), U_side*jj:U_side*(jj+1)] = ci.to(ci_s.device) \n", + " \n", + " run_i += ci.shape[0]\n", + " \n", + " ci_list.append(ci_s)\n", + " ci_k_list.append(k)\n", + " \n", + " add_ind += 1\n", + " continue\n", + " \n", + " elif v == \"tensor\" and k == \"params\": # hardcoded paramter padding !!\n", + " assert exists(params_pad) #and len(list(params_pad))==2\n", + " \n", + " n = sum(ci[add_ind].shape[0] for ci in c)\n", + " if bucket_batch_size > 0: shape = (n, bucket_batch_size, *params_pad)\n", + " else: shape = (n, *params_pad)\n", + " \n", + " ci_s = _alloc_mem(shape, k, c[0][add_ind]) \n", + " \n", + " elif v == \"numpy\": raise NotImplementedError(\"\") \n", + " else: raise NotImplementedError(\"\") \n", + " \n", + " \n", + " run_i = 0\n", + " for i,ci in enumerate(c):\n", + " ci = ci[add_ind] \n", + " ci_s[run_i:run_i+ci.shape[0], ..., :ci.shape[-2], :ci.shape[-1]] = ci \n", + " run_i += ci.shape[0]\n", + "\n", + " ci_list.append(ci_s)\n", + " ci_k_list.append(k)\n", + " \n", + " add_ind += 1\n", + "\n", + " return ci_list, ci_k_list, memmap_cleans\n", + " \n", + " @classmethod\n", + " def _create_train_valid_datasets(dataset_cls, device, parameters, test_split, x, y, z, ci_list, ci_k_list, shuffle: bool = True):\n", + " splits = max(int(x.shape[0] * test_split), 1)\n", + "\n", + " if shuffle:\n", + " x, y, z, *ci_list = shuffle_tensor_dataset(x, y, z, *ci_list)\n", + "\n", + " x, x_test = x[splits:], x[:splits]\n", + " y, y_test = y[splits:], y[:splits]\n", + " z, z_test = z[splits:], z[:splits]\n", + "\n", + " print(f\"Split: Train {x.shape[0]} - Test {x_test.shape[0]} \\n\")\n", + " \n", + " dataset = dataset_cls(device, **parameters) \n", + " dataset.x = x\n", + " dataset.y = y\n", + " dataset.z = z\n", + " \n", + " dataset_test = dataset_cls(device, **parameters) \n", + " dataset_test.x = x_test\n", + " dataset_test.y = y_test\n", + " dataset_test.z = z_test\n", + " \n", + " for ci, k in zip(ci_list, ci_k_list): \n", + " ci, ci_test = ci[splits:], ci[:splits]\n", + " \n", + " setattr(dataset , str(k), ci)\n", + " setattr(dataset_test, str(k), ci_test)\n", + " \n", + " return dataset, dataset_test\n", + " \n", + " #-----------------------------------\n", + " \n", + " def get_dataloaders(self, batch_size, text_encoder, p_valid=0.1, y_on_cpu=False, return_tensor_datasets=False, shuffle=True, shuffle_cpu_copy=True, caching=True):\n", + " #-------------------------\n", + " # caching\n", + " \n", + " self.text_encoder = text_encoder\n", + "\n", + " print(\"[DEBUG]: run get_dataloaders.x_y_preprocess\", flush=True)\n", + " x_proc, y_proc, *z_proc = ConfigDataset.x_y_preprocess(self, \n", + " balance_max=None, \n", + " shuffle=False, \n", + " max_samples=None, \n", + " make_unique=False) # ... z_proc is `'z' and all other 'c'\n", + " if caching:\n", + " if self.bucket_batch_size <= 0: \n", + " y_proc = self.caching(y_proc, y_on_cpu=y_on_cpu)\n", + " \n", + " else: \n", + " y_proc = self.caching([yi.reshape((-1)) for yi in y_proc], y_on_cpu=y_on_cpu)\n", + " y_proc = y_proc.reshape((-1, self.bucket_batch_size))\n", + " \n", + " #-------------------------\n", + " # valid split and to device\n", + "\n", + " print(\"[DEBUG]: run get_dataloaders.valid_split\", flush=True)\n", + " x, x_valid, y, y_valid, (z, z_valid) = self.valid_split(x_proc, y_proc, *z_proc, p_valid=p_valid, y_type=\"tensor\", split_sequential=False)\n", + "\n", + " if self.params_config.dataset_to_gpu:\n", + " x, x_valid = x.to(\"cuda\"), x_valid.to(\"cuda\")\n", + " z, z_valid = list(iz.to(\"cuda\") for iz in z), list(iz_valid.to(\"cuda\") for iz_valid in z_valid)\n", + "\n", + " if not y_on_cpu:\n", + " y, y_valid = y.to(\"cuda\"), y_valid.to(\"cuda\")\n", + "\n", + " #-------------------------\n", + " # create dataloaders\n", + "\n", + " ds = TensorDataset(x, y, *z)\n", + " ds_valid = TensorDataset(x_valid, y_valid, *z_valid)\n", + "\n", + " if return_tensor_datasets:\n", + " return ds, ds_valid\n", + "\n", + " if isinstance(self.collate_fn, str):\n", + " collate_fn = getattr(self, self.collate_fn, None)\n", + " else:\n", + " collate_fn = self.collate_fn\n", + " \n", + " if not exists(collate_fn):\n", + " print(\"[WARNING]: self.collate_fn does not exist, using torch.utils.data.default_collate.\")\n", + " collate_fn = torch.utils.data.default_collate\n", + "\n", + " if self.params_config.dataset_to_gpu: \n", + " train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, collate_fn=collate_fn)\n", + " valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, collate_fn=collate_fn)\n", + "\n", + " else: \n", + " train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=4, collate_fn=collate_fn)\n", + " valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=4, collate_fn=collate_fn)\n", + "\n", + " self.dataloaders = DataLoaders(train_loader, valid_loader) \n", + " return self.dataloaders\n", + " \n", + " #-----------------------------------\n", + " \n", + " @staticmethod\n", + " def from_datasets(*args, **kwargs):\n", + " raise NotImplementedError()" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/dataset/mixed_cached_qc_dataset.ipynb b/src/dataset/mixed_cached_qc_dataset.ipynb deleted file mode 100644 index fb778fb..0000000 --- a/src/dataset/mixed_cached_qc_dataset.ipynb +++ /dev/null @@ -1,641 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Mixed cached dataset" - ] - }, - { - "cell_type": "markdown", - "id": "21cae8fe-2a9d-4588-80f2-0a8c8def322b", - "metadata": {}, - "source": [ - "Dataset that combines and handles multiple cached datasets, e.g. for multiple qubits. Here we also handle paddings." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp dataset.mixed_cached_qc_dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from genQC.dataset.qc_dataset import Qc_Config_Dataset_config, Qc_Config_Dataset\n", - "from genQC.dataset.config_dataset import Config_Dataset\n", - "from genQC.dataset.cached_qc_dataset import Cached_OpenClip_Dataset\n", - "from genQC.config_loader import *\n", - "from genQC.dataset.dataset_helper import *\n", - "from genQC.util import DataLoaders\n", - "import dataclasses" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "119077c9-999b-44f7-8099-79037503d7e7", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "@dataclass\n", - "class Mixed_Cached_OpenClip_Dataset_config(Qc_Config_Dataset_config):\n", - " pad_constant: int\n", - " collate_fn: str\n", - " bucket_batch_size: int\n", - " num_down_scales: int # for flex pad attn mask" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0037efb5-d3a9-46e4-94d1-3dd80297e934", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "class Mixed_Cached_OpenClip_Dataset(Cached_OpenClip_Dataset): \n", - " \"\"\"Dataset that uses multiple cached dataset and combines them with padding, either i) Bucket or ii) Max. Also provides a corresponding `collate_fn` for training.\"\"\"\n", - " \n", - " req_params = [f.name for f in dataclasses.fields(Mixed_Cached_OpenClip_Dataset_config)]\n", - " \n", - " cut_multiple = 4 #needed for proper downscaling!\n", - " \n", - " @property\n", - " def params_config(self):\n", - " params_config = {}\n", - " for p in self.req_params: params_config[p] = getattr(self, p) \n", - " params_config[\"gate_pool\"] = [class_to_str(gate) for gate in params_config[\"gate_pool\"]]\n", - " params_config = Mixed_Cached_OpenClip_Dataset_config(**params_config)\n", - " return params_config \n", - " \n", - " #-----------------------------------\n", - " # CAUSAL ATTENTION PADDING\n", - "\n", - " def flexPadAttn_padding_collate_fn(self, b): \n", - " \"\"\"this function is called for training for every batch\"\"\"\n", - " z_0 = max(x[2][0] for x in b) # space\n", - " z_1 = max(x[2][1] for x in b) # time\n", - " \n", - " #round time to next multiple of 8 for conv layers!\n", - " z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - " \n", - " #---------------\n", - " # key_padding_mask ... [N, S] -inf where we want no attention\n", - " # we will create here [N, s, t] and then reshaping is easy\n", - " # note this is key pad mask not directly attention mask! we need this for loss masking\n", - " # Nb: add rnd to the padding, so we train with pad and on smaller systems\n", - " \n", - " #we need 3 different ones for the different unet layers \n", - " key_padding_mask = torch.zeros((len(b), z_0, z_1), device=self.device) \n", - " \n", - " padd_rnds = torch.randint(low=0, high=2, size=(len(b),2), dtype=torch.int32) #roll 50/50 if we allow padding\n", - " \n", - " xs=[]\n", - " ys=[]\n", - " for i,((x,y,z), padd_rnd) in enumerate(zip(b, padd_rnds)):\n", - " # for i,(x,y,z) in enumerate(b):\n", - " x = x[:z_0, :z_1] # cut down to max [bits, time] of batch\n", - " \n", - " #------------------- \n", - " space, time = z[0], z[1]\n", - " \n", - " if space < z_0 and padd_rnd[0]: space = torch.randint(low=space, high=z_0+1, size=(1,), dtype=torch.int32) \n", - " if time < z_1 and padd_rnd[1]: time = torch.randint(low=time , high=z_1+1, size=(1,), dtype=torch.int32) \n", - " \n", - " time = (torch.ceil(time / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - " \n", - " key_padding_mask[i, space:, :] = float('-inf') \n", - " key_padding_mask[i, :, time:] = float('-inf') \n", - " \n", - " #------------------- \n", - " \n", - " xs.append(x)\n", - " ys.append(y)\n", - " \n", - " key_padding_mask_list = [key_padding_mask]\n", - " for j in range(1, self.num_down_scales):\n", - " key_padding_mask_list.append(F.max_pool1d(key_padding_mask_list[j-1], kernel_size=2)) \n", - " \n", - " xs=torch.stack(xs)\n", - " ys=torch.stack(ys) \n", - " return xs, ys, key_padding_mask_list \n", - " \n", - " def flexPadAttn_TimeOnly_padding_collate_fn(self, b): \n", - " \"\"\"this function is called for training for every batch\"\"\"\n", - " z_0 = max(x[2][0] for x in b) # space\n", - " z_1 = max(x[2][1] for x in b) # time\n", - " \n", - " #round time to next multiple of 8 for conv layers!\n", - " z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - " \n", - " #---------------\n", - " # key_padding_mask ... [N, S] -inf where we want no attention\n", - " # we will create here [N, s, t] and then reshaping is easy\n", - " # note this is key pad mask not directly attention mask! we need this for loss masking\n", - " # Nb: add rnd to the padding, so we train with pad and on smaller systems\n", - " \n", - " #we need 3 different ones for the different unet layers \n", - " key_padding_mask = torch.zeros((len(b), z_0, z_1), device=self.device) \n", - " \n", - " padd_rnds = torch.randint(low=0, high=2, size=(len(b)), dtype=torch.int32) #roll 50/50 if we allow padding\n", - " \n", - " xs=[]\n", - " ys=[]\n", - " for i,((x,y,z), padd_rnd) in enumerate(zip(b, padd_rnds)):\n", - " # for i,(x,y,z) in enumerate(b):\n", - " x = x[:z_0, :z_1] # cut down to max [bits, time] of batch\n", - " \n", - " #------------------- \n", - " time = z[1]\n", - " \n", - " if time < z_1 and padd_rnd: time = torch.randint(low=time , high=z_1+1, size=(1,), dtype=torch.int32) \n", - " time = (torch.ceil(time / self.cut_multiple) * self.cut_multiple).to(torch.int32) \n", - " key_padding_mask[i, :, time:] = float('-inf') \n", - " \n", - " #------------------- \n", - " \n", - " xs.append(x)\n", - " ys.append(y)\n", - " \n", - " key_padding_mask_list = [key_padding_mask]\n", - " for j in range(1, self.num_down_scales):\n", - " key_padding_mask_list.append(F.max_pool1d(key_padding_mask_list[j-1], kernel_size=2)) \n", - " \n", - " xs=torch.stack(xs)\n", - " ys=torch.stack(ys) \n", - " return xs, ys, key_padding_mask_list \n", - "\n", - " #-----------------------------------\n", - " # BUCKET PADDING, all x,y are already passed as batch\n", - " \n", - " def cut_padding_Bucket_collate_fn(self, b): \n", - " \"\"\"this function is called for training for every batch\"\"\" \n", - " \n", - " b = b[0]\n", - " \n", - " x = b[0]\n", - " y = b[1]\n", - " z = b[2]\n", - " \n", - " #---------------\n", - " \n", - " z_0 = torch.max(z[:, 0]) # space\n", - " z_1 = torch.max(z[:, 1]) # time\n", - " \n", - " #round time to next multiple of cut_multiple for conv layers!\n", - " z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - " \n", - " #--------------- \n", - " \n", - " x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch\n", - " \n", - " return x, y\n", - "\n", - " def cut_padding_Bucket_collate_fn_compilation(self, b): \n", - " \"\"\"this function is called for training for every batch\"\"\" \n", - " \n", - " b = b[0]\n", - " \n", - " x = b[0]\n", - " y = b[1] \n", - " U = b[2]\n", - " z = b[3]\n", - " \n", - " #---------------\n", - " \n", - " z_0 = torch.max(z[:, 0]) # space\n", - " z_1 = torch.max(z[:, 1]) # time\n", - " \n", - " #round time to next multiple of cut_multiple for conv layers!\n", - " z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - " \n", - " #--------------- \n", - " \n", - " x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch\n", - " \n", - " bit_exp = 2**z_0\n", - " U = U[:, :, :bit_exp, :bit_exp] # [b, Re/Im, 2^n, 2^n]\n", - " \n", - " return x, y, U\n", - "\n", - " def cut_padding_Bucket_collate_fn_compilation_params(self, b): \n", - " \"\"\"this function is called for training for every batch, order in b is store dict\"\"\" \n", - " \n", - " b = b[0] # {'x': 'tensor', 'y': 'numpy', 'params': 'tensor', 'U': 'tensor', 'z': 'tensor'}\n", - " \n", - " x = b[0]\n", - " y = b[1] \n", - " p = b[2]\n", - " U = b[3]\n", - " z = b[4]\n", - " \n", - " #---------------\n", - " \n", - " z_0 = torch.max(z[:, 0]) # space\n", - " z_1 = torch.max(z[:, 1]) # time\n", - " \n", - " #round time to next multiple of cut_multiple for conv layers!\n", - " z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - " \n", - " #--------------- \n", - " \n", - " x = x[:, :z_0, :z_1] # cut down to max [b, bits, time] of batch\n", - "\n", - " p = p[:, :, :z_1]\n", - " \n", - " bit_exp = 2**z_0\n", - " U = U[:, :, :bit_exp, :bit_exp] # [b, Re/Im, 2^n, 2^n]\n", - " \n", - " return x, y, p, U\n", - " \n", - " #-----------------------------------\n", - " # MAX PADDING, x are passes as sampled list (batch), std collate them\n", - " \n", - " def cut_padding_collate_fn(self, b): \n", - " \"\"\"this function is called for training for every batch\"\"\" \n", - " z_0 = max(x[2][0] for x in b) # space\n", - " z_1 = max(x[2][1] for x in b) # time\n", - " \n", - " #round time to next multiple of cut_multiple for conv layers!\n", - " z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - " \n", - " #--------------- \n", - "\n", - " x_sample = b[0][0]\n", - " xs = torch.zeros((len(b), z_0, z_1), dtype=x_sample.dtype, device=x_sample.device)\n", - " \n", - " # xs=[]\n", - " ys=[]\n", - " for i,(x,y,z) in enumerate(b):\n", - " #x = x[:z_0, :z_1] # cut down to max [bits, time] of batch\n", - " xs[i] = x[:z_0, :z_1]\n", - " \n", - " #xs.append(x)\n", - " ys.append(y)\n", - " \n", - " #xs=torch.stack(xs)\n", - " ys=torch.stack(ys) \n", - " \n", - " return xs, ys \n", - "\n", - " def cut_padding_collate_fn_compilation(self, b):\n", - " \"\"\"this function is called for training for every batch\"\"\" \n", - " z_0 = max(x[3][0] for x in b) # space\n", - " z_1 = max(x[3][1] for x in b) # time\n", - " \n", - " #round time to next multiple of cut_multiple for conv layers!\n", - " z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - "\n", - " bit_exp = 2**z_0\n", - " \n", - " #--------------- \n", - "\n", - " x_sample = b[0][0]\n", - " xs = torch.zeros((len(b), z_0, z_1), dtype=x_sample.dtype, device=x_sample.device)\n", - "\n", - " y_sample = b[0][1]\n", - " ys = torch.zeros((len(b), *y_sample.shape), dtype=y_sample.dtype, device=y_sample.device)\n", - "\n", - " U_sample = b[0][2]\n", - " Us = torch.zeros((len(b), 2, bit_exp, bit_exp), dtype=U_sample.dtype, device=U_sample.device)\n", - " \n", - " for i,(x,y,U,z) in enumerate(b):\n", - " xs[i] = x[:z_0, :z_1]\n", - " ys[i] = y\n", - " Us[i] = U[:, :bit_exp, :bit_exp]\n", - " \n", - " return xs, ys, Us \n", - "\n", - " def cut_padding_collate_fn_compilation_params(self, b):\n", - " \"\"\"this function is called for training for every batch, order in b is store dict\"\"\" \n", - " # {'x': 'tensor', 'y': 'numpy', 'params': 'tensor', 'U': 'tensor', 'z': 'tensor'}\n", - " \n", - " z_0 = max(x[4][0] for x in b) # space\n", - " z_1 = max(x[4][1] for x in b) # time\n", - " \n", - " #round time to next multiple of cut_multiple for conv layers!\n", - " z_1 = (torch.ceil(z_1 / self.cut_multiple) * self.cut_multiple).to(torch.int32)\n", - "\n", - " bit_exp = 2**z_0\n", - " \n", - " #--------------- \n", - "\n", - " x_sample = b[0][0]\n", - " xs = torch.zeros((len(b), z_0, z_1), dtype=x_sample.dtype, device=x_sample.device)\n", - "\n", - " y_sample = b[0][1]\n", - " ys = torch.zeros((len(b), *y_sample.shape), dtype=y_sample.dtype, device=y_sample.device)\n", - "\n", - " p_sample = b[0][2]\n", - " ps = torch.zeros((len(b), p_sample.shape[-2], z_1), dtype=p_sample.dtype, device=p_sample.device)\n", - " \n", - " U_sample = b[0][3]\n", - " Us = torch.zeros((len(b), 2, bit_exp, bit_exp), dtype=U_sample.dtype, device=U_sample.device)\n", - " \n", - " for i,(x,y,p,U,z) in enumerate(b):\n", - " xs[i] = x[:z_0, :z_1]\n", - " ys[i] = y\n", - " ps[i] = p[:, :z_1]\n", - " Us[i] = U[:, :bit_exp, :bit_exp]\n", - " \n", - " return xs, ys, ps, Us \n", - " \n", - " #-----------------------------------\n", - "\n", - " def get_dataloaders(self, batch_size, text_encoder, p_valid=0.1, y_on_cpu=False):\n", - " self.text_encoder = text_encoder\n", - " \n", - " excepts = []\n", - " if y_on_cpu: excepts.append(\"y\")\n", - " if self.params_config.dataset_to_gpu: self.to(\"cuda\", excepts=excepts)\n", - " \n", - " x_proc, y_proc, *z_proc = Qc_Config_Dataset.x_y_preprocess(self, balance_max=None, shuffle=False) # ... z_proc is `'z' and all other 'c'\n", - " \n", - " if self.bucket_batch_size <= 0: \n", - " y_proc = self.caching(y_proc, y_on_cpu=y_on_cpu)\n", - " \n", - " else: \n", - " y_proc = self.caching([yi.reshape((-1)) for yi in y_proc], y_on_cpu=y_on_cpu)\n", - " y_proc = y_proc.reshape((-1, self.bucket_batch_size))\n", - " \n", - " x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc) #only possible after str y is cached as tensor\n", - " x, x_valid, y, y_valid, (z, z_valid) = self.valid_split(x_proc, y_proc, *z_proc, p_valid=p_valid)\n", - " \n", - " ds = TensorDataset(x, y, *z)\n", - " ds_valid = TensorDataset(x_valid, y_valid, *z_valid)\n", - "\n", - " collate_fn = getattr(self, self.collate_fn)\n", - " \n", - " if self.params_config.dataset_to_gpu: \n", - " train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, collate_fn=collate_fn)\n", - " valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, collate_fn=collate_fn)\n", - "\n", - " else: \n", - " train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12, collate_fn=collate_fn)\n", - " valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12, collate_fn=collate_fn)\n", - "\n", - " self.dataloaders = DataLoaders(train_loader, valid_loader) \n", - " return self.dataloaders\n", - " \n", - " #-----------------------------------\n", - " \n", - " @staticmethod\n", - " def from_datasets(datasets: list[Qc_Config_Dataset], balance_maxes: list, pad_constant, device: torch.device=torch.device(\"cpu\"), bucket_batch_size=None, max_samples=None, **parameters):\n", - " assert pad_constant != 0, \"can NOT be 0! and not any other gate!\"\n", - " \n", - " xs = []\n", - " ys = []\n", - " zs = []\n", - " cs = []\n", - " \n", - " cut_multiple = Mixed_Cached_OpenClip_Dataset.cut_multiple\n", - " \n", - " max_qubits = max(dataset.params_config.num_of_qubits for dataset in datasets)\n", - " max_gates = max(dataset.params_config.max_gates for dataset in datasets)\n", - " max_gates = int(np.ceil(max_gates /cut_multiple) * cut_multiple)\n", - " \n", - " parameters[\"num_of_qubits\"] = max_qubits\n", - " parameters[\"max_gates\"] = max_gates\n", - " parameters[\"random_samples\"] = sum([dataset.params_config.random_samples for dataset in datasets])\n", - " parameters[\"min_gates\"] = min([dataset.params_config.min_gates for dataset in datasets])\n", - " parameters[\"comment\"] = f\"Generated with 'from_datasets' with {len(datasets)} datasets. Qubits: {[dataset.params_config.num_of_qubits for dataset in datasets]}.\"\n", - " parameters[\"pad_constant\"] = pad_constant\n", - " parameters[\"bucket_batch_size\"] = bucket_batch_size\n", - " \n", - " parameters[\"store_dict\"] = {}\n", - " for dataset in datasets:\n", - " parameters[\"store_dict\"] |= dataset.params_config.store_dict #needs python 3.9 for union of dict \n", - " parameters[\"store_dict\"][\"z\"] = \"tensor\" #add special item\n", - "\n", - " if isinstance(max_samples, int):\n", - " max_samples = [max_samples] * len(datasets)\n", - " else:\n", - " assert isinstance(max_samples, (list, np.ndarray))\n", - " max_samples = np.array(max_samples, dtype=int)\n", - "\n", - " if isinstance(balance_maxes, int):\n", - " balance_maxes = [balance_maxes] * len(datasets)\n", - " else:\n", - " assert isinstance(balance_maxes, (list, np.ndarray))\n", - " balance_maxes = np.array(balance_maxes, dtype=int)\n", - " \n", - " for i, (dataset, balance_max) in tqdm(enumerate(zip(datasets,balance_maxes)), total=len(datasets)):\n", - " # do x_y_preprocess now, we can't balance all together with mixed conditions\n", - " \n", - " dataset = dataset.to(device)\n", - " \n", - " x, y, *c = dataset.x_y_preprocess(balance_max=balance_max, max_samples=max_samples[i], shuffle=True) \n", - " x = x.to(device) # [b, s, t] \n", - " \n", - " print(f\" - dataset size after balancing {x.shape[0]}\")\n", - "\n", - " #-------\n", - " # store original size\n", - " z = torch.zeros((x.shape[0], 2), device=device, dtype=torch.int32)\n", - " z[:, 0] = max(dataset.params_config.num_of_qubits, 1)\n", - " \n", - " red_x = torch.sum(x.abs(), dim=1) # [b, t] .. collaps the zeros to get circuit length\n", - " z[:, 1] = torch.count_nonzero(red_x, dim=1) # [b] \n", - " z[z[:, 1]==0, 1] = 1 # make sure we don*t have 0, so we cheat and set it to 1 (there's only 1 unique zero gate circuit anyways). Needed for padding attn mask \n", - " \n", - " for i in range(x.shape[0]):\n", - " x[i, z[i, 0]:, :] = pad_constant\n", - " x[i, :, z[i, 1]:] = pad_constant\n", - " \n", - " z[:, 1] = (torch.ceil(z[:, 1] / cut_multiple) * cut_multiple).to(torch.int32) #for cut needs multiple\n", - "\n", - " #-------\n", - " # now pad x, padding is defined from last dim forward! \n", - " pad = (0, max_gates-dataset.params_config.max_gates, 0, max_qubits-dataset.params_config.num_of_qubits) \n", - " x = F.pad(x, pad, \"constant\", pad_constant)\n", - " \n", - " # if c is missing something of the union we set it to a zero tensor\n", - " for k,v in parameters[\"store_dict\"].items(): \n", - " if k != \"x\" and k != \"y\" and k != \"z\":\n", - " \n", - " if k not in dataset.params_config.store_dict:\n", - " empty_tensor = torch.zeros((1,), device=device)\n", - " \n", - " if k == \"U\": #scetchy hardcoded for compilation\n", - " empty_tensor = torch.zeros((x.shape[0], 2, 1, 1), device=device) # unitary is [b, Re/Im, 2^n, 2^n]\n", - " \n", - " assert len(c) == 0\n", - " c.append(empty_tensor) #scetchy bcs if c is not empty we could break ordering!!!\n", - " \n", - " #combine datasets\n", - " xs.append(x.cpu()) \n", - " ys.append(y)\n", - " zs.append(z) \n", - " cs.append([*c])\n", - "\n", - " dataset = dataset.to(\"cpu\") #helps with gpu mem overflowing\n", - " #-----------------\n", - "\n", - " has_U = \"U\" in parameters[\"store_dict\"]\n", - " has_p = \"params\" in parameters[\"store_dict\"]\n", - " \n", - " if bucket_batch_size > 0:\n", - " collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn.__name__\n", - " if has_U: \n", - " collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn_compilation.__name__\n", - " if has_p: \n", - " collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_Bucket_collate_fn_compilation_params.__name__\n", - " \n", - " else:\n", - " collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn.__name__ \n", - " if has_U: \n", - " collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn_compilation.__name__\n", - " if has_p: \n", - " collate_fn_name = Mixed_Cached_OpenClip_Dataset.cut_padding_collate_fn_compilation_params.__name__\n", - "\n", - " parameters[\"collate_fn\"] = collate_fn_name\n", - " \n", - " #-----------------\n", - " if bucket_batch_size > 0:\n", - " for i, (xi,yi,zi, ci) in enumerate(zip(xs, ys, zs, cs)): #cut rest of batch \n", - " b_mult = int(np.floor(xi.shape[0] / bucket_batch_size) * bucket_batch_size) \n", - " \n", - " xs[i] = xi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *xi.shape[1:])) \n", - " zs[i] = zi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *zi.shape[1:]))\n", - " \n", - " t = parameters[\"store_dict\"][\"y\"]\n", - " if v == \"tensor\" or v == \"numpy\": \n", - " ys[i] = yi[None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *yi.shape[1:])) \n", - " else: raise NotImplementedError(\"\")\n", - " \n", - " #----\n", - " #For U, etc\n", - " add_ind = 0\n", - " for k,v in parameters[\"store_dict\"].items(): \n", - " if k != \"x\" and k != \"y\" and k != \"z\": \n", - " if v == \"tensor\" or v == \"numpy\": \n", - " cs[i][add_ind] = ci[add_ind][None, :b_mult].reshape((b_mult//bucket_batch_size, bucket_batch_size, *ci[add_ind].shape[1:])) \n", - " else: raise NotImplementedError(\"\") \n", - " add_ind += 1 \n", - " \n", - " x = torch.cat(xs)\n", - " y = ys # torch.cat(ys) is wrong, y is list of numpy or str!! not a tensor\n", - " z = torch.cat(zs)\n", - " c = cs\n", - " \n", - " #----------------- \n", - " \n", - " mixed_Cached_OpenClip_Dataset = Mixed_Cached_OpenClip_Dataset(device, **parameters) \n", - " mixed_Cached_OpenClip_Dataset.x = x\n", - " mixed_Cached_OpenClip_Dataset.y = y\n", - " mixed_Cached_OpenClip_Dataset.z = z\n", - " \n", - " add_ind = 0\n", - " for k,v in parameters[\"store_dict\"].items(): \n", - " if k != \"x\" and k != \"y\" and k != \"z\": \n", - " \n", - " if v == \"tensor\" and k == \"U\": # hardcoded U padding !!\n", - " \n", - " n = sum([ci[add_ind].shape[0] for ci in c])\n", - " if bucket_batch_size > 0: shape = (n, bucket_batch_size, 2, 2**max_qubits, 2**max_qubits)\n", - " else: shape = (n, 2, 2**max_qubits, 2**max_qubits)\n", - " \n", - " # allocating zeros is better memory wise than torch.cat(ci_s) and F.pad(ci, pad, \"constant\", 0)\n", - " mem = np.prod(shape) * c[0][add_ind].element_size() * 1e-9\n", - " print(f\"[INFO]: allocate memory for {k} {shape} on {c[0][add_ind].device} approx. {mem:.3f} GB\")\n", - " ci_s = torch.zeros(shape, device=c[0][add_ind].device) \n", - " \n", - " run_i = 0\n", - " for i,ci in enumerate(c):\n", - " ci = ci[add_ind] \n", - " if bucket_batch_size > 0: ci_s[run_i:run_i+ci.shape[0], :, :, :ci.shape[-2], :ci.shape[-1]] = ci \n", - " else: ci_s[run_i:run_i+ci.shape[0], :, :ci.shape[-2], :ci.shape[-1]] = ci \n", - " run_i += ci.shape[0]\n", - "\n", - " elif v == \"tensor\" and k == \"params\": # hardcoded paramter padding !!\n", - "\n", - " max_params = max(ci[add_ind].shape[-2] for ci in c)\n", - " \n", - " n = sum(ci[add_ind].shape[0] for ci in c)\n", - " if bucket_batch_size > 0: shape = (n, bucket_batch_size, max_params, max_gates)\n", - " else: shape = (n, max_params, max_gates)\n", - "\n", - " # allocating zeros is better memory wise than torch.cat(ci_s) and F.pad(ci, pad, \"constant\", 0)\n", - " mem = np.prod(shape) * c[0][add_ind].element_size() * 1e-9\n", - " print(f\"[INFO]: allocate memory for {k} {shape} on {c[0][add_ind].device} approx. {mem:.3f} GB\")\n", - " ci_s = torch.zeros(shape, device=c[0][add_ind].device) \n", - " \n", - " run_i = 0\n", - " for i,ci in enumerate(c):\n", - " ci = ci[add_ind] \n", - " if bucket_batch_size > 0: ci_s[run_i:run_i+ci.shape[0], :, :ci.shape[-2], :ci.shape[-1]] = ci \n", - " else: ci_s[run_i:run_i+ci.shape[0], :ci.shape[-2], :ci.shape[-1]] = ci \n", - " run_i += ci.shape[0]\n", - " \n", - " elif v == \"numpy\": raise NotImplementedError(\"\") \n", - " else: raise NotImplementedError(\"\") \n", - " \n", - " setattr(mixed_Cached_OpenClip_Dataset, str(k), ci_s)\n", - " add_ind += 1\n", - " \n", - " return mixed_Cached_OpenClip_Dataset\n", - " \n", - " #------------------------------------\n", - " \n", - " # def plot_example(self): print(\"plot_example not implemented for Mixed_Cached_OpenClip_Dataset\")\n", - " # def plot_distribution(self): print(\"plot_distribution not implemented for Mixed_Cached_OpenClip_Dataset\")\n", - " \n", - " @staticmethod\n", - " def from_config_file(config_path, device: torch.device, save_path: str=None):\n", - " config = load_config(config_path)\n", - " config[\"target\"] = class_to_str(Mixed_Cached_OpenClip_Dataset) \n", - " return Config_Dataset.from_config(config, device, save_path)" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/dataset/qc_dataset.ipynb b/src/dataset/qc_dataset.ipynb deleted file mode 100644 index 87c7984..0000000 --- a/src/dataset/qc_dataset.ipynb +++ /dev/null @@ -1,286 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Quantum circuit dataset" - ] - }, - { - "cell_type": "markdown", - "id": "233e6242-46fc-451b-9e58-abf60b1216ef", - "metadata": {}, - "source": [ - "Dataset for quantum circuits." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp dataset.qc_dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from genQC.dataset.config_dataset import Config_Dataset, Config_Dataset_config\n", - "from genQC.config_loader import *\n", - "from genQC.dataset.dataset_helper import *\n", - "from genQC.platform.qcircuit_dataset_construction import decode_circuit\n", - "from genQC.platform.simulation.qcircuit_sim import schmidt_rank_vector, instruction_name_to_qiskit_gate\n", - "import qiskit.quantum_info as qi" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "016fc327-f986-4d69-b5f0-1b39466fb528", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "@dataclass\n", - "class Qc_Config_Dataset_config(Config_Dataset_config):\n", - " optimized: bool\n", - " dataset_to_gpu: bool\n", - " random_samples: int \n", - " num_of_qubits: int \n", - " min_gates: int \n", - " max_gates: int \n", - " gate_pool: list[str] " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36032308-bd0e-4409-9db0-9d89fc258e5a", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "class Qc_Config_Dataset(Config_Dataset):\n", - " \"\"\"Dataset for quantum circuits, access `gate_pool` directly and all other paras with `.params_config`\"\"\"\n", - " \n", - " req_params = [f.name for f in dataclasses.fields(Qc_Config_Dataset_config)]\n", - " add_balance_fn = None\n", - " \n", - " def __init__(self, device: torch.device=torch.device(\"cpu\"), **parameters):\n", - " super().__init__(device, **parameters) \n", - " self.gate_pool = parameters[\"gate_pool\"] #[get_obj_from_str(gate) for gate in parameters[\"gate_pool\"]] \n", - " \n", - " @property\n", - " def params_config(self):\n", - " params_config = super().params_config \n", - " #params_config[\"gate_pool\"] = [class_to_str(gate) for gate in params_config[\"gate_pool\"]]\n", - " params_config = Qc_Config_Dataset_config(**params_config)\n", - " return params_config \n", - " \n", - " #----------------------------\n", - " \n", - " def x_y_preprocess(self, balance_max=None, shuffle=False, max_samples=None):\n", - " #params_config = self.params_config\n", - " #if params_config.dataset_to_gpu: self.to(\"cuda\")\n", - " \n", - " z_proc = []\n", - " for k,v in self.store_dict.items(): \n", - " if k != \"x\" and k != \"y\":\n", - " z_proc.append(getattr(self, k))\n", - " \n", - " x_proc, y_proc = self.x, self.y\n", - " \n", - " #---------------------\n", - " if shuffle:\n", - " x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc)\n", - " \n", - " if exists(max_samples):\n", - " x_proc = x_proc[:max_samples]\n", - " y_proc = y_proc[:max_samples]\n", - " z_proc = (iz[:max_samples] for iz in z_proc) \n", - " \n", - " #---------------------\n", - " t = self.store_dict[\"y\"]\n", - " if exists(balance_max): \n", - " if t == \"tensor\" or t == \"numpy\": x_proc, y_proc, *z_proc = balance_tensor_dataset(x_proc, y_proc, *z_proc, make_unique=True, \n", - " samples=balance_max, add_balance_fn=self.add_balance_fn) \n", - " else: print(f\"[WARNING]: Unsupported y type: `{t}`. Not balancing dataset!\")\n", - " else: print(f\"[INFO]: Not balancing dataset! {balance_max=}\")\n", - " \n", - " #---------------------\n", - " if shuffle:\n", - " x_proc, y_proc, *z_proc = shuffle_tensor_dataset(x_proc, y_proc, *z_proc)\n", - " \n", - " return x_proc, y_proc, *z_proc\n", - " \n", - " def valid_split(self, x, y, *z, p_valid=0.1):\n", - " splits = max(int(x.shape[0] * p_valid), 1)\n", - " x, x_valid = x[splits:].clone(), x[:splits].clone() \n", - " \n", - " t = self.store_dict[\"y\"]\n", - " if t == \"tensor\" : y, y_valid = y[splits:].clone(), y[:splits].clone() \n", - " elif t == \"numpy\": y, y_valid = y[splits:] , y[:splits] \n", - " \n", - " else: raise NotImplementedError(\"Not implemented\")\n", - " \n", - " try:\n", - " z = list(iz[splits:].clone() for iz in z)\n", - " z_valid = list(iz[:splits].clone() for iz in z) \n", - " except:\n", - " z = list(iz[splits:] for iz in z)\n", - " z_valid = list(iz[:splits] for iz in z) \n", - " \n", - " return x, x_valid, y, y_valid, (z, z_valid)\n", - " \n", - " def get_dataloaders(self, batch_size, p_valid=0.1, balance_max=None, max_samples=None, y_on_cpu=False):\n", - " \n", - " excepts = []\n", - " if y_on_cpu: excepts.append(\"y\")\n", - " if self.params_config.dataset_to_gpu: self.to(\"cuda\", excepts=excepts)\n", - " \n", - " x_proc, y_proc, *z_proc = self.x_y_preprocess(balance_max=balance_max, max_samples=max_samples) \n", - " x, x_valid, y, y_valid, (z, z_valid) = self.valid_split(x_proc, y_proc, *z_proc, p_valid=p_valid)\n", - " \n", - " ds = TensorDataset(x, y, *z)\n", - " ds_valid = TensorDataset(x_valid, y_valid, *z_valid)\n", - " \n", - " if self.params_config.dataset_to_gpu: \n", - " train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True)\n", - " valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True)\n", - "\n", - " else: \n", - " train_loader = DataLoader(dataset=ds , batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12)\n", - " valid_loader = DataLoader(dataset=ds_valid, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=12)\n", - "\n", - " self.dataloaders = DataLoaders(train_loader, valid_loader) \n", - " return self.dataloaders\n", - "\n", - " #----------------------------\n", - " \n", - " def plot_example(self):\n", - " params_config = self.params_config\n", - " enc_tensor = self.x[0]\n", - " \n", - " while enc_tensor.dim()>2: enc_tensor=enc_tensor[0]\n", - "\n", - " params = None\n", - " if hasattr(self, \"params\"): params=self.params[0]\n", - "\n", - " if isinstance(self.gate_pool[0], str):\n", - " gate_pool = [instruction_name_to_qiskit_gate(gate) for gate in self.gate_pool]\n", - " else:\n", - " gate_pool = self.gate_pool\n", - " \n", - " qc = decode_circuit(enc_tensor, gate_pool, params_tensor=params)\n", - " \n", - " t = self.store_dict[\"y\"]\n", - " if t == \"tensor\" : label = self.y[0].cpu().tolist()\n", - " elif t == \"tensor_list\": \n", - " print(\"Not implemented\")\n", - " return\n", - " else : \n", - " label = self.y[0]#.tolist()\n", - " while len(label.shape)>0: label=label[0]\n", - " \n", - " print(f\"Label: ``{label}`` SRV is: {schmidt_rank_vector(qi.DensityMatrix(qc))}\")\n", - " display(qc.draw(\"mpl\", plot_barriers=False))\n", - "\n", - " def plot_distribution(self):\n", - " if hasattr(self, \"dataloaders\"): x, y, *z = self.dataloaders.train.dataset.tensors \n", - " else: x, y = self.x, self.y\n", - " \n", - " t = self.store_dict[\"y\"]\n", - " if t == \"tensor\" : data={\"svr\":[iy for iy in y.cpu().tolist()]}\n", - " elif t == \"numpy\": data={\"svr\":[iy for iy in y.tolist()]}\n", - " else: # list tensor_list \n", - " print(\"Not implemented\")\n", - " return\n", - " \n", - " print(\"Train dataset (x, y):\", x.shape, y.shape)\n", - " print(\"Train uniques x :\", torch.unique(x, dim=0).shape) \n", - " \n", - " #real data distribution \n", - " df = pd.DataFrame(data) \n", - " cnts = df['svr'].value_counts(normalize=True)\n", - " for n,v in zip(cnts.index, cnts.values): print(f\"{n}: {v*100:.1f}%\") \n", - " ax = df['svr'].value_counts().plot(kind='bar')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e6322ed9-c703-41df-88a3-6b163c051af1", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'target': '__main__.Qc_Config_Dataset',\n", - " 'device': 'cpu',\n", - " 'comment': '',\n", - " 'save_path': None,\n", - " 'save_datetime': '08/26/2024 21:37:39',\n", - " 'params': Qc_Config_Dataset_config(store_dict={'x': 'tensor', 'y': 'tensor_list'}, optimized=None, dataset_to_gpu=None, random_samples=None, num_of_qubits=None, min_gates=None, max_gates=None, gate_pool=['h', 'cx', 'x'])}" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "init = {k:None for k in Qc_Config_Dataset.req_params}\n", - "init[\"gate_pool\"] = [\"h\", \"cx\", \"x\"]\n", - "init[\"store_dict\"] = {\"x\":\"tensor\", \"y\":\"tensor_list\"}\n", - "\n", - "a = Qc_Config_Dataset(**init)\n", - "a.get_config()" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/examples/0_hello_circuit.ipynb b/src/examples/0_hello_circuit.ipynb deleted file mode 100644 index 19a5d0e..0000000 --- a/src/examples/0_hello_circuit.ipynb +++ /dev/null @@ -1,534 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "69a855f1-55dd-482e-94f2-9ad02804be4d", - "metadata": {}, - "source": [ - "# Generate a circuit" - ] - }, - { - "cell_type": "markdown", - "id": "e41e2465-49d8-46b8-b046-6ae1becfb268", - "metadata": {}, - "source": [ - "A minimal example to generate a circuit. We load a pre-trained (SRV, 3 to 8 qubit) model and condition on a given Schmidt-Rank-Vector (SRV)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3bde494e-9091-41a4-a601-bbcf9712c564", - "metadata": {}, - "outputs": [], - "source": [ - "from genQC.imports import *\n", - "from genQC.pipeline.diffusion_pipeline import DiffusionPipeline\n", - "import genQC.inference.infer_srv as infer_srv\n", - "import genQC.util as util" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "029be4f3-0d9a-4d0a-93d9-2338fda7a983", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" - ] - } - ], - "source": [ - "device = util.infer_torch_device() # use cuda if we can\n", - "util.MemoryCleaner.purge_mem() # clean existing memory alloc" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4f36be1a-a5c6-4484-a096-4a37c9772e84", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "device(type='cuda')" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "device" - ] - }, - { - "cell_type": "markdown", - "id": "f77a3020-247c-4ac0-aaf1-ee5c371b5f06", - "metadata": {}, - "source": [ - "## Setup and load" - ] - }, - { - "cell_type": "markdown", - "id": "742ae430-46f2-4099-ac8f-f422a4ddc1dc", - "metadata": {}, - "source": [ - "Load the pre-trained model directly from [Hugging Face: Floki00/qc_srv_3to8qubit](https://huggingface.co/Floki00/qc_srv_3to8qubit)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e5d60c23-9514-4432-bc82-622c088fced6", - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "dc1b8a80999f45f1a474f9ee25f32f4f", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Fetching 2 files: 0%| | 0/2 [00:00" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "fig, axs = plt.subplots(2, 4, figsize=(18,5), constrained_layout=True)\n", - "for qc,is_srv,ax in zip(qc_list, srv_list, axs.flatten()): \n", - " is_srv = [int(x) for x in is_srv]\n", - " qc.draw(\"mpl\", plot_barriers=False, ax=ax, style = \"clifford\")\n", - " ax.set_title(f\"{'Correct' if is_srv==srv else 'NOT correct'}, is SRV = {is_srv}\")\n", - "plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0244290d-5c57-4b70-b670-a839876a9ccf", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "52132fa0-9208-442d-a31d-af65bcfba714", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "genQC Version 0.1.0\n" - ] - } - ], - "source": [ - "import genQC\n", - "print(\"genQC Version\", genQC.__version__)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/examples/2_unitary_compilation.ipynb b/src/examples/2_unitary_compilation.ipynb deleted file mode 100644 index e6d40fe..0000000 --- a/src/examples/2_unitary_compilation.ipynb +++ /dev/null @@ -1,653 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "69a855f1-55dd-482e-94f2-9ad02804be4d", - "metadata": {}, - "source": [ - "# Compile unitaries" - ] - }, - { - "cell_type": "markdown", - "id": "e41e2465-49d8-46b8-b046-6ae1becfb268", - "metadata": {}, - "source": [ - "In this notebook we want use the unitary compilation model." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3bde494e-9091-41a4-a601-bbcf9712c564", - "metadata": {}, - "outputs": [], - "source": [ - "from genQC.imports import *\n", - "from genQC.pipeline.diffusion_pipeline import DiffusionPipeline\n", - "from qiskit import QuantumCircuit\n", - "from genQC.inference.infer_compilation import generate_comp_tensors, get_gate_and_U_acc\n", - "from genQC.printing import display_colums\n", - "from genQC.platform.simulation.qcircuit_sim import instruction_name_to_qiskit_gate\n", - "import genQC.platform.qcircuit_dataset_construction as data_const\n", - "import qiskit.quantum_info as qi\n", - "import genQC.util as util\n", - "import ast" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "029be4f3-0d9a-4d0a-93d9-2338fda7a983", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" - ] - } - ], - "source": [ - "device = util.infer_torch_device() # use cuda if we can\n", - "util.MemoryCleaner.purge_mem() # clean existing memory alloc" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3d4ad484-835f-45fd-8772-212ec7ff00c5", - "metadata": {}, - "outputs": [], - "source": [ - "def str_cond_to_gate_indices(y): # helper function, used to check if only allowed gates were used by the model!\n", - " assert y[:15] == \"Compile using: \"\n", - " c = ast.literal_eval(y[15:]) \n", - " gate_classes = data_const.gate_pool_to_gate_classes([instruction_name_to_qiskit_gate(gate) for gate in pipeline.gate_pool])\n", - " gate_clrs = [0] + [gate_classes[ic] for ic in c] # 0 is empty, always allowed!\n", - " return gate_clrs" - ] - }, - { - "cell_type": "markdown", - "id": "f77a3020-247c-4ac0-aaf1-ee5c371b5f06", - "metadata": {}, - "source": [ - "## Setup and load" - ] - }, - { - "cell_type": "markdown", - "id": "742ae430-46f2-4099-ac8f-f422a4ddc1dc", - "metadata": {}, - "source": [ - "Load the pre-trained model directly from [Hugging Face: Floki00/qc_unitary_3qubit](https://huggingface.co/Floki00/qc_unitary_3qubit)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e5d60c23-9514-4432-bc82-622c088fced6", - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "95b3e6a8ad944d45b38703e92799bc84", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Fetching 2 files: 0%| | 0/2 [00:00" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "compile_and_plot(U, prompt)" - ] - }, - { - "cell_type": "markdown", - "id": "8914651c-a30e-4a5b-aaa4-d98debd7147a", - "metadata": {}, - "source": [ - "#### Exercise 2" - ] - }, - { - "cell_type": "markdown", - "id": "3d0c618b-2e8d-4037-a1ec-482324112fb8", - "metadata": {}, - "source": [ - "Inspired from [(quantumcomputing.stackexchange.com/questions/12439/procedures-and-intuition-for-designing-simple-quantum-circuits/12440)](https://quantumcomputing.stackexchange.com/questions/12439/procedures-and-intuition-for-designing-simple-quantum-circuits/12440). Note, this unitary WAS in the training set." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1b170062-d7aa-4bef-b1d1-a68e85e682ba", - "metadata": {}, - "outputs": [], - "source": [ - "U = np.matrix([[1,0,0,0,0,0,0,0],\n", - " [0,0,0,0,0,0,0,1],\n", - " [0,1,0,0,0,0,0,0],\n", - " [0,0,1,0,0,0,0,0],\n", - " [0,0,0,1,0,0,0,0],\n", - " [0,0,0,0,1,0,0,0],\n", - " [0,0,0,0,0,1,0,0],\n", - " [0,0,0,0,0,0,1,0]], dtype=np.complex128) \n", - "\n", - "assert np.allclose(U.H@U, np.identity(2**num_of_qubits)) and np.allclose(U@U.H, np.identity(2**num_of_qubits)) #check if unitary" - ] - }, - { - "cell_type": "markdown", - "id": "dc81558e-a227-4490-94bc-044ba6dcd502", - "metadata": {}, - "source": [ - "Plot correct (exact) compiled circuits:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3886fbf5-3f6f-4a44-89b7-d0a332a69334", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABxMAAAETCAYAAAD9HCj7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACQw0lEQVR4nOzddXgU1xoG8Hd24x5IIGgSLCS4u7u7liJtcatAb0txaCmlUJxCi7sUKe5OcScJEoJ7iPvuzv0jTUpggcxmd2fl/T3PfZ7b7J45H5PJnJnzHRFEURRBRERERERERERERERERPQWhdwBEBEREREREREREREREZFpYjKRiIiIiIiIiIiIiIiIiLRiMpGIiIiIiIiIiIiIiIiItGIykYiIiIiIiIiIiIiIiIi0YjKRiIiIiIiIiIiIiIiIiLRiMpGIiIiIiIiIiIiIiIiItGIykYiIiIiIiIiIiIiIiIi0YjKRiIiIiIiIiIiIiIiIiLRiMpGIiIiIiIiIiIiIiIiItGIyUWaCIGD8+PEZ/71s2TIIgoB79+7JFtOHvB2v3Hr37g1BECAIAkqWLPnB76af2/PnzxspOiICgKioqIy/U0EQ8Ouvv8odEhERERERERERERFlkUUkE8PCwtC/f38UKlQIDg4OcHNzQ40aNTBr1iwkJibKHR4ZmJeXF1auXImff/4508/9/Px0TnzWrVsXvXv31qns+PHj4efnp1PZrMjOv6t3796oW7eupDJHjhzJVoJbEAQsW7ZMcrnsnMfsxqwP5h5/drx9nTk7O2PlypX47bff5AuKiIiIiIiIiIiIiHRiI3cA2bVz50506tQJ9vb26NmzJ0qWLImUlBScOHECI0eOxI0bN7Bo0SK5w3yvxMRE2NiYz6/BFON1dnZGjx495A6DiN7D1tYWPXr0wL179/DVV1/JHQ4RERERERERERERSWBaWSGJwsPD0bVrV/j6+uLQoUPIkydPxmeDBw/GnTt3sHPnThkj/DgHBwe5Q5DE3OIlIiIiIiIiIiIiIiIi3Zn1Mqe//PIL4uLisHjx4kyJxHRFihTB8OHDM/5bpVJh0qRJKFy4MOzt7eHn54dRo0YhOTk5Uzk/Pz+0bNkSR44cQcWKFeHo6IhSpUrhyJEjAIDNmzejVKlScHBwQIUKFXDp0qVM5Xv37g0XFxfcvXsXTZo0gbOzM/LmzYuJEydCFMVM383qHoS7d+9GrVq14OzsDFdXV7Ro0QI3btz4aLnx48dDEIR3fq5tb8bz58+jSZMm8PLygqOjI/z9/fHZZ599MN7049+5cwe9e/eGh4cH3N3d0adPHyQkJGQqm5iYiGHDhsHLywuurq5o3bo1Hj9+rPUchIaG4sGDBx/99+kiOTkZX3/9Nby9veHs7Ix27drh5cuXBqnrTatWrULlypXh5OQET09P1K5dG/v27QMAHDp0CAqFAmPHjs1UZs2aNRAEAQsWLDBITHXr1s20l92b/9NladKPSUxMRPHixVG8ePFMSxC/fv0aefLkQfXq1aFWq/Veb7ozZ86gefPm8PT0hLOzM0qXLo1Zs2Zl+k5oaCg6d+4Mb29vODo6IiAgAD/88IPs8d++fRsdOnSAj48PHBwckD9/fnTt2hXR0dEAgPbt26N8+fKZyrRq1QqCIODvv//OdA4EQcDu3bszYh8xYgRKlSoFFxcXuLm5oVmzZrhy5UqmY6Uvvbp+/XqMGjUKPj4+cHZ2RuvWrfHw4UOD/JuJiIiIiIiIiIiISH5mnUzcvn07ChUqhOrVq2fp+1988QXGjh2L8uXL47fffkOdOnUwZcoUdO3a9Z3v3rlzB927d0erVq0wZcoUREZGolWrVli9ejW++uor9OjRAxMmTEBYWBg6d+4MjUaTqbxarUbTpk2RO3du/PLLL6hQoQLGjRuHcePGSf53rly5Ei1atICLiwumTp2KMWPGIDg4GDVr1tTbnmovXrxA48aNce/ePXz33XeYM2cOPvnkE5w+fTpL5Tt37ozY2FhMmTIFnTt3xrJlyzBhwoRM3+nduzfmzJmD5s2bY+rUqXB0dESLFi20Hi8wMBA9e/bM9r9Lm6FDh+LKlSsYN24cBg4ciO3bt2PIkCEGqSvdhAkT8Omnn8LW1hYTJ07EhAkTUKBAARw6dAgAUL9+fQwaNAhTpkzBxYsXAQBPnz7F0KFD0bBhQwwYMMAgcf3www9YuXJlpv81adIEAJArVy691+fo6Ijly5fjzp07GQk6IG0mcXR0NJYtWwalUqn3egFg//79qF27NoKDgzF8+HBMnz4d9erVw44dOzK+c/XqVVSpUgWHDh1C3759MWvWLLRt2xbbt2+XNf6UlBQ0adIEp0+fxtChQzFv3jz069cPd+/eRVRUFACgVq1auHLlCmJiYgAAoiji5MmTUCgUOH78eMaxjh8/DoVCgRo1agAA7t69i61bt6Jly5aYMWMGRo4ciWvXrqFOnTp48uTJO7H8+OOP2LlzJ/73v/9h2LBh2L9/Pxo2bMj9aYmIiIiIiIiIiIgslWimoqOjRQBimzZtsvT9y5cviwDEL774ItPPR4wYIQIQDx06lPEzX19fEYB46tSpjJ/t3btXBCA6OjqK9+/fz/j5woULRQDi4cOHM37Wq1cvEYA4dOjQjJ9pNBqxRYsWop2dnfjy5cuMnwMQx40bl/HfS5cuFQGI4eHhoiiKYmxsrOjh4SH27ds3U9zPnj0T3d3d3/n528aNGydq+zW/Xc+WLVtEAOK5c+c+eLy3400//meffZbpe+3atRNz5syZ8d8XLlwQAYhffvllpu/17t37nWOm11OnTp0PxiKKaefa19f3o98Txf/+zQ0bNhQ1Gk3Gz7/66itRqVSKUVFRWTqOVLdv3xYVCoXYrl07Ua1WZ/rszTji4+PFIkWKiCVKlBCTkpLEFi1aiG5ubpmuN0M7efKkaGtr+87vU9++//57UaFQiMeOHRM3btwoAhBnzpxpsPpUKpXo7+8v+vr6ipGRkZk+e/N3ULt2bdHV1fWdc/7md+SI/9KlSyIAcePGje/9zrlz50QA4q5du0RRFMWrV6+KAMROnTqJVapUyfhe69atxXLlymX8d1JS0jvXZXh4uGhvby9OnDgx42eHDx8WAYj58uUTY2JiMn6+YcMGEYA4a9asj/47wsPDRQDitGnTPv6PJiIiIiIiIiIiIiKTYLYzE9Nn37i6umbp+7t27QIAfP3115l+/s033wDAO3srBgUFoVq1ahn/XaVKFQBpM8gKFiz4zs/v3r37Tp1vznYTBAFDhgxBSkoKDhw4kKWYgbTZVFFRUejWrRtevXqV8T+lUokqVarg8OHDWT7Wh3h4eAAAduzYgdTUVMnl3545V6tWLURERGT8nvbs2QMAGDRoUKbvDR06VOvxRFHMWFZW3/r165dp6ddatWpBrVbj/v37Bqlv69at0Gg0GDt2LBSKzH9yb8bh5OSEZcuWISQkBLVr18bOnTvx22+/ZbreDOnZs2fo2LEjypYti/nz5xu0rvHjx6NEiRLo1asXBg0ahDp16mDYsGEGq+/SpUsIDw/Hl19+mXGtp0v/Hbx8+RLHjh3DZ5999s45f3upYGPH7+7uDgDYu3fvO8sHpytXrhxcXFxw7NgxAGkzEPPnz4+ePXvi4sWLSEhIgCiKOHHiBGrVqpVRzt7ePuO6VKvViIiIgIuLCwICAjJmyb6pZ8+eme67HTt2RJ48eTLusURERERERERERERkWcw2mejm5gYAiI2NzdL379+/D4VCgSJFimT6uY+PDzw8PN5JJL2dTEjvzC9QoIDWn0dGRmb6uUKhQKFChTL9rFixYgAgaWnS27dvA0hLYnp7e2f63759+/DixYssH+tD6tSpgw4dOmDChAnw8vJCmzZtsHTp0nf2k3yft8+Xp6cngP/OS/r59/f3z/S9t38fxvCxWPUtLCwMCoUCQUFBH/1ujRo1MHDgQJw9exZNmjR5Z89KQ1GpVOjcuTPUajU2b94Me3t7g9ZnZ2eHJUuWIDw8HLGxsVi6dKnWvT31JSwsDABQsmTJ934nfUDAh76Tztjx+/v74+uvv8aff/4JLy8vNGnSBPPmzcvYLxEAlEolqlWrlrGk6fHjx1GrVi3UrFkTarUap0+fRnBwMF6/fp0pmajRaPDbb7+haNGisLe3h5eXF7y9vXH16tVMx09XtGjRTP8tCAKKFCmityWXiYiIiIiIiIiIiMi0mHUyMW/evLh+/bqkclnt8H/fvmfv+7koipLiyKr0vRhXrlyJ/fv3v/O/bdu2fbD8+/69arX6ne9t2rQJ//zzD4YMGYLHjx/js88+Q4UKFRAXF/fROI19XrLDlGNNTk7OmJEZFhb23llo+jZy5Ej8888/2LBhA/Lnz2+UOvfu3QsASEpKykiamxNjxz99+nRcvXoVo0aNQmJiIoYNG4YSJUrg0aNHGd+pWbMmzp07h6SkpIxkooeHB0qWLInjx49nJBrfTCb+9NNP+Prrr1G7dm2sWrUKe/fuxf79+1GiRIl39oIlIiIiIiIiIiIiIutjtslEAGjZsiXCwsLwzz//fPS7vr6+0Gg073T6P3/+HFFRUfD19dVrbBqN5p2lT2/dugUA8PPzy/JxChcuDADIlSsXGjZs+M7/6tat+8Hy6bPuoqKiMv38fUt6Vq1aFT/++CPOnz+P1atX48aNG1i3bl2W432f9PMfHh6e6ed37tzJ9rFNXeHChaHRaBAcHPzR744bNw4hISH49ddfER4eju+++87g8a1btw4zZ87Er7/+ijp16hi8PgC4evUqJk6ciD59+qBcuXL44osvtM6C05f0v6MPDT5In0mclQEKxo4/XalSpTB69GgcO3YMx48fx+PHj/H7779nfF6rVi2kpKRg7dq1ePz4cUbSsHbt2hnJxGLFiiF37twZZTZt2oR69eph8eLF6Nq1Kxo3boyGDRu+c89I9/Y9VBRF3LlzR9J9jYiIiIiIiIiIiIjMh1knE7/99ls4Ozvjiy++wPPnz9/5PCwsDLNmzQIANG/eHAAwc+bMTN+ZMWMGAKBFixZ6j2/u3LkZ/18URcydOxe2trZo0KBBlo/RpEkTuLm54aefftK6l+HLly8/WD49iZK+jxoAxMfHY/ny5Zm+FxkZ+c7MvLJlywJAlpc6/ZAmTZoAwDt78c2ZM0fr90NDQ/HgwYNs12sK2rZtC4VCgYkTJ74z0+vNc37mzBn8+uuv+PLLL/HNN99g5MiRmDt3Lo4ePWqw2K5fv44vvvgCPXr0wPDhww1Wz5tSU1PRu3dv5M2bF7NmzcKyZcvw/PlzfPXVVwars3z58vD398fMmTPfSZKl/w68vb1Ru3ZtLFmy5J1r783fkxzxx8TEQKVSZfpZqVKloFAoMv19VqlSBba2tpg6dSpy5MiBEiVKAEhLMp4+fRpHjx7NNCsRSJup+/bf/saNG/H48WOtsaxYsSLT8tKbNm3C06dP0axZs2z9G4mIiIiIiIiIiIjINNnIHUB2FC5cGGvWrEGXLl0QGBiInj17omTJkkhJScGpU6ewceNG9O7dGwBQpkwZ9OrVC4sWLUJUVBTq1KmDs2fPYvny5Wjbti3q1aun19gcHBywZ88e9OrVC1WqVMHu3buxc+dOjBo1Ct7e3lk+jpubGxYsWIBPP/0U5cuXR9euXeHt7Y0HDx5g586dqFGjRqak5dsaN26MggUL4vPPP8fIkSOhVCqxZMmSjGOkW758OebPn4927dqhcOHCiI2NxR9//AE3N7eMRGx2VKhQAR06dMDMmTMRERGBqlWr4ujRoxmzNd9ejjUwMBB16tTJWPLTFNStWxdHjx6VvBxqkSJF8MMPP2DSpEmoVasW2rdvD3t7e5w7dw558+bFlClTkJSUhF69eqFo0aL48ccfAQATJkzA9u3b0adPH1y7dg3Ozs7vrSN9VpjUfev69OkDABlLXL6pevXq7+z7me7IkSOoV68exo0bh/Hjx0uqc/Lkybh8+TIOHjwIV1dXlC5dGmPHjsXo0aPRsWPHD15v48ePx4QJE3D48OGPzsp9k0KhwIIFC9CqVSuULVsWffr0QZ48eRAaGoobN25kLFk6e/Zs1KxZE+XLl0e/fv3g7++Pe/fuYefOnbh8+bJs8R86dAhDhgxBp06dUKxYMahUKqxcuRJKpRIdOnTI+J6TkxMqVKiA06dPo1WrVhl/V7Vr10Z8fDzi4+PfSSa2bNkyY5Zl9erVce3aNaxevfq9v/scOXKgZs2a6NOnD54/f46ZM2eiSJEi6Nu3b5b/PURERERERERERERkPsw6mQgArVu3xtWrVzFt2jRs27YNCxYsgL29PUqXLo3p06dn6uD+888/UahQISxbtgxbtmyBj48Pvv/+e4wbN07vcSmVSuzZswcDBw7EyJEj4erqinHjxmHs2LGSj9W9e3fkzZsXP//8M6ZNm4bk5GTky5cPtWrVykgGvY+trS22bNmCQYMGYcyYMfDx8cGXX34JT0/PTGXTk6vr1q3D8+fP4e7ujsqVK2P16tXw9/eXHLM2K1asgI+PD9auXYstW7agYcOGWL9+PQICAuDg4KCXOgwpLi4OPj4+OpWdOHEi/P39MWfOHPzwww9wcnJC6dKl8emnnwIARo0ahTt37uDUqVMZ58LOzg7Lly9H1apVMXLkyHdmdb4pPj4eRYoUkRzXy5cvER8fj379+r3z2dKlS9+bUErfRzNPnjyS6rt48SJ++uknDBkyJFMC/7vvvsO2bdvQt29f3LhxAx4eHu+tVxAEnX4PTZo0weHDhzFhwgRMnz4dGo0GhQsXznSPKFOmDE6fPo0xY8ZgwYIFSEpKgq+vLzp37ixr/GXKlEGTJk2wfft2PH78GE5OTihTpgx2796NqlWrZvpu+izEmjVrZvzMx8cHRYoUwZ07d95JJo4aNQrx8fFYs2YN1q9fj/Lly2Pnzp3vXWJ31KhRuHr1KqZMmYLY2Fg0aNAA8+fPh5OTk6R/ExERERERERERERGZB0GUOs2KPqp3797YtGlTRsKF3u/y5csoV64cVq1ahU8++URy+d69e+PQoUO4ePEibGxs3pvEya7Y2FjkyJEDM2fOxODBgw1Sh66Cg4NRokQJ7NixwyDL9Wrz7bffYu3atbhz5w7s7e2NUicAVK5cGb6+vti4caPR6tQnc44/fTbqxo0b0bFjR0llRVFEREQEHj58iPLly2PatGkYMWKEgSIlIiIiIiIiIiIiIn0y+5mJZD4SExPh6OiY6WczZ86EQqFA7dq1dT7uw4cP4e3tjRIlSuD69evZDVOrY8eOIV++fCa5lOPhw4dRrVo1oyUS0+scM2aMUROJMTExuHLlyjv7fZoLc48/O6KjoyUt70xEREREREREREREpoPJRDKaX375BRcuXEC9evVgY2OD3bt3Y/fu3ejXrx8KFCig0zG//fZb9OjRAwDg4uKiz3AzadGihVGTdVIMHjzY6LMlz507Z9T6gLT9Q5OTk41er76Ye/zZ4eLigv3792f8d7FixWSMhoiIiIiIiIiIiIikYDKRjKZ69erYv38/Jk2ahLi4OBQsWBDjx4/HDz/8oPMxg4KCEBQUpMcoiUjfbGxs0LBhQ7nDICIiIiIiIiIiIiIdcM9EIiIiIiIiIiIiIiIiItJKIXcARERERERERERERERERGSamEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq2YTCQiIiIiIiIiIiIiIiIirZhMJCIiIiIiIiIiIiIiIiKtmEwkIiIiIiIiIiIiIiIiIq1s5A6AiExXRFQSLgS/woXgCISGRyExWQ1BAFydbFGqqCcqBHmhbPGccHGylTtUo1CpNAgNj8KF4AhcDHmFl5FJSEnVwN5OiXy5nFAhyAsVgrxQuIArBEGQO1yjeB2djIshr3Ah+BVC7kYjIUkFQQBcnGxRorAHKgZ5oVxgTrg628kdKhGRZLHxKbgUEoELIRG4ficScQmpEEXAycEGxf3dM+77Odzt5Q7VKERRxN1Hsf8+G7zCo+cJSE5Rw85WAW9PB5QP9EKFoJwo7u8BGxvrGLMYl5CKy6ERuBD8CtduRyL232vE0V6JAL//rhEvTwe5QyUikiwpWYVrtyNxIfgVLt98jajYZKjVIhzslSiUzw0VS6Td9/PmcpY7VKMQRRFPXiRkvCPffRyDpGQ1bJQKeLjaoUxADlQI8kLpYjlgb6eUO1yjSE5R4/qdf6+R0AhExqRApdbAwV4Jv7yuaddIoBfy5XaymndkIrIcKpUGIXejcCHkFS6GROBVZBJSVe/2AxbKb139gOnvg6Hh//UDujrZomSRtL7icoHW01esVmsQGh6NC8Fp18iL14kZfcV5vZ1QISjnv33FblAozP8aYTKRiDJJTdVg6+H7mL8+BEfOPf3o922UAtrW98XgrkGoU9HHIhvP8EexWLgpFIs338SrqOSPft83rwsGdCqOz9sVg3cORyNEaFwqlQbbjz7A/PUhOHD6yUe/r1AIaF23IAZ1CUSDKnktovEkIssliiIOnnmC+etD8PeRB1CrxY+WaVAlLwZ1CUTrugUtMon28nUilmy9hd83hOLek7iPfj+nuz0+b18MAzoFwj+/qxEiNC5RFHHswjPMXx+CzQfuQZWFa6R2BR8M7hqItvV9YWdrHR3MRGS+Lga/wvz1IVizKwyJyeqPfr9c8ZwY1CUQ3ZoVgrMFdh7GJ6Riza4wzN8Qgsuhrz/6fScHG3RvXgiDugSiXKCXESI0vqu3XmP++hCs2nEH8Ymqj36/VFFPDOoSiE9aFOZAUyIyeWEPY7BwYyiWbLmFiOiP9wP653PBgE6B+KxdMYscRJiaqsHfR9L6ig+d/XhfsVIpoE1dXwzqEoj6VfJYZF/x/SexWLjxJv7cfBMvI5M++v2CeZzRv2NxfN4+ALlzmm9fsSCK4sfffonI4omiiLW77mLE9DN4+ipRp2OULOKJ38fUQI1yufUcnTyeRyRi+NR/sGFvOHS5U9rZKjCwcyB+HFrBYl6qN+0Lx1fTzuDR83idyhf3d8f8H6qjXuW8eo6MiCj7jpx7ikE/nkLI3SidyufP7YQZI6uiU2N//QYmk4REFUbPPY9560KQkqqRXF4QgE6N/TH7u2pm/cL0plOXn2Pg5FO4euvjncna+Hg5YtrXlfFJi8IW+VJNROYtOCwSAyadxPGLz3Uq7+5ii7EDymH4JyWgVJr/4Bq1WoOZq25g0sJLiI5L1ekYtSv4YOHYGiju76Hf4GRy+340Bkw6maXOZG1cnWwxqm8ZjOhVyiIHYBGReXv2KgHDfj6NTft16we0t1NgSNcgTBxcAU6O5j+HSxRFbNgbjm9+PYPHLxJ0OkZQYQ8s+KE6alfMo+fo5PHydSK+/OUM1u25C41G+kVia6NA/07FMWV4RbOcvclkIhHh2asEDJh0EtsOP8j2sQQB+LJHCUweUtFsG870xnLwj6eyNALpYwoXcMWSCbXMuuF8+ToRQ6akJVb1YVCXQEz9qpJZNpxEZHniElLx/azzmLs2WC/H69jID/NGVUcuM06gHb/wDH3GHkPYw9hsHyuHmz3mjqqGrs0KmW0CLTFJhdFzL+C3ldd16lh4W+u6BfH7mBrI4+2U/YMREWWTSqXB9BXXMHbeRZ0Gj7ytetlcWDqxNor5ueshOnmEhkehz5hjOH31ZbaPZW+nwKTBFfB1z5Jmm2TVaETMXn0Do2afz9Js1Y+pVNILSyfWRokinnqIjogoe0RRxJpdYRg65R9ExqRk+3hFC7ph6aTaZj3Z4nlEIgZNPoXNB+/p5XjDugfhp2EVzXqyxaZ94Rj046kszUT8GP98Llg8oZbZTbZgMpHIyl279RqN++/BswjdZiO+T/nAnNizoInZLfMpiiK++fUMflt5Q6/HFQRg3qjqGNglUK/HNYbQ8Cg06rcbj57rNgrpfUoW8cS+hU3ZkUpEsnr2KgFNBuzVeabZ++TL5YT9i5ohsJCHXo9rDIs2hWLApJN6SZq9afgnJTBjZBWzW+76VWQSmg3ai/M3Xun1uLlzOmDfwmYoXSyHXo9LRCRFQqIKHb85iN0nHun1uM6ONtg6qyEaVs2n1+Maw/5/HqPt8P1ISMp+0uxNLWoXwMZf68PRwbwG3SanqNH9f0f01qGczsFOiU0z6qNF7YJ6PS4RkRQajYjhU0/rbWBpOkEAFo2tiS86BOj1uMYQHBaJRv324MlL/fYDlgnIgb2/NzW7VWtEUcR3M8/hl6XX9H7sWf+rimGflND7cQ2FyUQiK3b99mvU6bMTr/Uw6kabEoU9cHRpC+T0MI/1wkVRxNAp/2DeuhCD1TH7u6oY2t18Golb96JRq/cOvHid/VE32hQt6IZjy1rAx4sJRSIyvhcRiajdZydu3os2yPG9PR1wbFkLs1rabP66YAz+6R+DHX9g5+KY90N1s5mh+Do6GXU/24lrtyMNcnxPNzscWdKCCUUikkVSsgoth+zHwTMf3wddF3a2Cuyc19isEor7Tj1Cq6H79TJDU5uGVfNix9zGsLczj/1zU1LVaPflAew6rt9kczobpYAtMxuiZR0mFInI+ERRxIBJJ7Fo002D1fH7mBro36m4wY6vb8FhkajTZydeRWV/pTZtivu749jSFmYz+UQURXw97QxmrtLvpJM3TR9RGV/3LGWw4+uTea6vQETZ9iIiEY377zFYIhEAboRFodXQ/VCpDPMipm/Tll4zaCIRAIb9fBpbD90zaB36EhmTjEb9dhsskQgAtx/EoPmgfUhJ1e+oXyKij0lJVaP54H0GSyQCwMvIJDTutwev9bBktjH8ffi+QROJALBgQyh+XnzVoHXoi1qtQZth+w2WSASAyJgUNO6/G8/1vEIEEVFWfDb2uMESiQCQkqpB2+EHcOOO4e6j+nT99mu0+/KAwRKJAHDg9BN8Pu64wY6vb4MmnzJYIhEAVGoRnb45hIvB+p39T0SUFT/+cdmgiUQAGDj5JHYczf62UsYQEZWExv33GCyRCACh4dFoMWQfUg3Y1urTzFU3DJpIBIBvfj2Ljfv0s62UoTGZSGSlhkz5B09fSeu4urenC+7t6SKpzD9XXmDaMv1PA9e367dfY/TcC5LK6HI+AKDfhJN4+dr0Ow2/+uUMHjyLl1RGl3NyKTQCkxZellSGiCi7fvrjCi5I7LjS5R738Hk8hk81bIJOHyKiktB3wgnJ5XQ5J+PmX9T7srKGMGPFdZy49FxSGV3Ox/OIJAycfBJcMIaIjGnjvnCs3X1XUhld7nHxiSr0HnPM5AeYpqZq0Gv0MclLm+pyTlbvDMNf+02/03DH0QdYvOWWpDK6nI+kFDV6jznGAaZEZFSXQl5hwoJLksroco8TRaDv+BNmMcB02M+n8fiFtKVNdTkn566/wpTFVySVkUPI3Sh8P/OcpDK69hUPnHTSLAaYMplIZIU27gvXacSDu4st3F2kb5Q7fsFFkx6NmpqqQe8xx5Eq8QVX1/PxMjIJQ3827Y7lncceYPnftyWX0/WcTPnzCkejEpHRXA6NwI9/XJZcTtd73KodYfj78H3J5Yxp6JR/dJqJrss5SVVp0Hv0MZMejRpyNwpjJA4yAnS/RrYcvI/1e6R16hMR6epFRCIGTT4puZyu97jzN17h1+WmPcD0l6VXcTEkQnI5Xc/JwMmn8CrScCvAZFdkTDL66TDISNfzce12JCYvuiy5HBGRLlJS1egz9jhUammD+XS9xz2LSDT5AaZbD93Dml1hksvpek4mLbyEKzelt7vGolZr0GfMMSRLfGfV9XxERCebxQBT89r1mQzi6csE/HXgHp5HJMJGKaBIQTe0b+BndpuC64tGI+LA6cc4d/0VEpJUcHexQ7Oa+VHKQvayUas1GDn9rFHrTEnVYNTs89g2u5FR682qtbvDJM9Oya71e8LxVY8XqFI6l1HrzQpRFPHNr8a9RtQaEf+beQ77FzUzar1EABAdm4JN+8Px4Gk8RIjIn9sZHRv5I4e7vdyhyebc9Zc4eOYJYuNT4exogxrlcqN2BR+z2efuY/4385zkF8fsGjHjLFrWKQiFwvTO4fkbLyXPTsmuS6ERWL3rDnq3KWbUerPqhznnJb84ZtfIGefQsZE/bGw43pOMKzlFjW2H7yM0PBopqWp4ezqgfUM/FPBxkTs02dy+H42/jzzA6+hkONgrUbpYDrSoVcBi/j5/XnLFoEuYaTPx90vo36k4PN1M7/kqIirJ6Imsl5FJmLrkKqZ9U9mo9WbV9OXXJK9klF0/L76KwV2DkDuneeyjRZZDoxGx79RjXAj+rx+wea38KFnUMvoBdfE8IhGb9ofj2atEKBX/9RU7OVpGX/HK7Xdw5aZxV0pZtSMMX/UoifJBXkatNys0GhEjjNxXrFKL+H7Weeya38So9WbVhr3hOHPtpVHr3HLwPk5dfoEa5XIbtV4pLOMOQDq5fvs1Ji68hM0H7kMjilAqBUBM+2N2cz6Fvh0CMLpfWXiY4MO+IWg0IuauDcZvK6/j3pM4KBUCFIr/khzVyuTCD33LoEVt894YfNfxR7j/NM7o9e449hD3n8TCN6+r0ev+mPnrDbtP4vss2BBqksnEQ2eeGnQPsfc5cPoJboZHIcDfw+h1k3V68iIeExdexvK/byMpWQ0bZVqSR60WMXTKP+jevDDGDyxnkvctQ9m4LxxT/ryMS6GvoVAIUCoAjSatLSzm646RvUvh8/bFzDqpePt+NPadeixDvTE4eOYJGlXLZ/S6P0audnD++hCTTCY+ehaPbYeNv6/Jo+fx2HHsAdrW9zN63WSd4hNS8eMfV/D7xhBExqTARilAENLeB7+adgat6hTEuAHlTLLTy1COnX+KSYsu48DpJ1AIgFIpQPz3HdnHyxFDugZhZJ9SsLNVyh2qzhISVVi6VfoKJNmVmKzG8m238eWnJY1e98cs23YbSSnGX2JzydZbmDi4vMkN5E5JVeOPvwy7h5g2qSoNFm++iVF9yxq9brJOarUGc9YE47dV1/Hgafx//YDqtH7AGuVyY3TfsmhaM7/coRpNyN0oTFhwEX8duAe1JnNf8SCnU/iiQzGM6V/OJAeGZJUoipi3Tq5+wBD8Mb6WLHV/yL5TjxH2MNbo9e45+QhhD2NQuICb0ev+GDnfkU05mWgZw+r0ID4+HsOHD0euXLng6uqK3r17Y9myZbC1tUVSkukuPaGrw2efoMon27H54H2oNWLaC5JKzBilHxOfipmrbqBqj+14+lLaWsnmKDVVgy4jD2H41NMZiTa1RkSqSoTm30HpZ669RMsh+zHdxJdn+Ri5boYajWjwTY11cSH4ldFHmqRbtycMEVGmd3+R6xoBgN83hspWN1lXW3gzPAoVu23Dn5tvIik5rfNIpU5rB0WkzaheteMOKnTdZtJLb+jTmLkX0HnEIVz5dy87zb/toFqT9mxw+0E0+k44gX4TT0CjMe2lNz5EzvuMnPfX93kdnYy1u+RZXvPc9Vc4d12eNvhDFv0VKts1borXiDWxpnYwIioJNXvtwNSlVxEZkwIgrR1MVaW9G4oisPP4Q1T7dDt2HDV+cl0OK7ffRr0vduPw2acAAI0IpL7xjvzsVSLGzLuApgP2Ij4hVc5Qs2XdnruIik2Rpe75G0JM7hlCoxGxYIM8997X0cnYsNf09k7cfOCeTkuf68PvG0OhVpvuMuiWzprawZRUNTp8fRBfTTuDh8/iAbzRD/jvber0lRdoPngvZq++IWOkxnPs/FNU6rYNmw7cS3svfquvODYhFbNXB6NK97/x+Hm8zNHq7uy1l7gUKs87/ppdYYiKMb29E+V6BxFFYKEJ9gNevfUaJy49l6XujfvC8cKE905kMhGASqVC8+bNsWvXLvz222/YtGkTwsPDMWrUKAQEBMDBwUHuEPXqxp1ItByyH0kpKqg/sMSXWiPizsMYNB2wB4lJKiNGaHxDppzCXwfuAUi7kWmT/tIzYvpZrNpxx0iR6Vd8Qir2/WP82Rjpthwyvf2itsoYU3KKBntOPpKtfm1SUtXYcUy+DiNTvEashTW1ha8ik9Cw3x68eJ30wXZQpRYRFZOCRv324MkL831Zyoo5a25kLO+leU//TXr7+OdftzB6jvS95EzFloPy3Wd2HnuIZBlmPnzI3pOPZJmNkU7Odvh95LxGDpx+gth4eTr5rZ01tYMqlQYth+zDtduRH0zsqNUiUlUadPj6oEkm/vVp36lH6DX6GDSa/wbRaCOKwNELT9Htf4dNfk+b99ly8J5sdd++H4OQu1Gy1a/N9TuRsszGSLfl0D3Z6n4fOd/JHj6L12nvSso+a2oHAWDApJP4+0ha38f7bufpky+GTz1t8Xtbh4ZHocWQfUhM/nhf8d3HsWgyYA8SEs2zr1jOe1xCkhr7Tz+RrX5tkpJV2H3ioWz1m2I/oJzvqKkqDXbJ+Pv4GCYTAcyaNQuXL1/G8ePH8cknn6BJkyZYsWIFnj59irJly8odnt5NWnQJyanq93YWvkmtFnH1diTW77XcRvPOgxgs2nTzvQ8P2nz721moVOY3Wu7yzdeyjgQNDY9CnImN4jX2XommVv/bbtyJRIqR94h60/0ncSY5W9MaWFNbuGBDCJ68jP/gS1I6tUbE6+hkzFxluaNRExJVkpOD05ZdxXMTHi33Pq+jkxH+WL4Ow1SVBtfvRMpWvzZyt0Ny1/+2hEQVgmXs6BZF4BI7UWVhTe3g30ce4PTVlx9MmqUTxbR3wrHzLhohMnmIooiR088iqwt4azTA9qMP8c+VFwaNy1AuhPC+/ya547kQbHr3fPnPiWldI9bCmtrBkLtRWLr1tqR+wJEzzlr0rNmf/riMxOSs9xXfCIvC6p3mOdFC7nuM3PW/7eqtyIzZp3K48yAG0TKtmPA+cv+OTPHZIJ3VJxNFUcSMGTPQt29f+Pj4ZPzc19cXNjY2KFOmDAAgJCQElSpVQrFixVC/fn08ffpUrpCz5dmrBPy1/16WOlDTKRTAnDXBBoxKXr9vCIFSIW3vp6cvE7HzuOmOEngfuW+GoghclmkpAW1EUZT9nJhaA2EK8cj9O7FG1tQWqlQazFsXkqWXpHRqjYhFm0Itdpb++r13ERMvbaCHRiNi8WbTW7r6Yy7K3IEKmN497oLMiasLIREmNbvnyq0I2Zfgk/t3Yo2sqR0EgDlrgyW9/6g1IvaeeoS7j2IMGJV8zl57iau3IyHlT99GKci231J2PH2ZgKcv5R0MZHLtoMzxPHoeb1IDtKJikmWdqQnI/zuxRtbWDurSD/jwWTz2yrDvujG8ikzC2j13JfUVC0La84QpPcdnhWn0A5rWPc4U4jGF9/Q3yX1O5K7/Q6w+mRgSEoInT56gbdu2mX7+9OlTqFSqjNE3AwYMwOjRo3Hr1i20adMG3333nfGD1YMt/+6RKIVGA1wMiUDYQ8t8eVy1M0zyOVEqBKzdHWagiAzn9gP5f4emEEO62PhU2faCSHfrfrSs9b/t9gP547l133SuEWthTW3hycvPdeqwiY5LxQETW45EX9bsCoNC4hOhRgRWmuGS37dN4P5y657899k33ZG5XX4VmSTb3l3amMI1ctvEng2sgTW1gy9fJ+LIuaeS338EQTDJvd30Yd2eu7BRSutUVqlFbNx31+xWqzGF+4upPeubwvupKfxe0t0xgX4fU7tGrIE1tYOAjv2ASvPsB8yKrYfuQ6WSdj5EEbh2OxI3Tezd5mNeRydn7BUtF/YDaovBdO77iUkqPH6RIGsMpnaNvEkQzW0IgZ7t378fjRs3xs2bN1GsWLGMn69YsQK9evXCixcvoNFoUK5cOTx5ktaJGBcXh7x58yImRvcL3dfXF9HRxr8wktwaI9m9ISAoJZd1fjYbNimmt45xdkUX+EWn86FMugOXFwsMEJHhJOTojFSXKlo/u7enC9xdbD9Y3t3VDgA+Ov08Oi4Vfk3Xa/3M4fVfsI87lYVoDU+jcEFs/gnv/fxj5yTb50MUATEJ7o9GZy1gI0j0bIsU11paPzPaNRK5HfaxRz4aq1Tu7u64f9/y7mH6IEdbKFc7mOpYGgnevXQq6xixAXbxZ/QckfxifUZCY+fz8S++TR0P98dj9R+QASW71kaSZxutnxnrHmcXexKOkZuzEK1xxOSbCFHhlDa8+C36OicfOh8A4Pp4AhRq03iBTHapiqQcnbR+ZqxrxDbuLJxev/986Yrt4PtZUzuotsmNuLzfSi8oqtLuX1F/6z8omSXk/ASpTmV0eid0fTQWCo357Kuc6hCAhFz9tH5mrHucqb1Hx+UaBLVDYa2fGeucOL1YCNukW1mI1vBU9v6Izz1E62dGu0aSH8Dl+awsRCsd20LtrKkdFCEgpuCvOpVVJt6Ey8tFeo5IfkluDZDs3kS3vuLn82CTbD5bY2mU7ojNp/0d1ij3OFGEoImD2+PxWYrXGBI9OyDFtbrWz4zXD7gV9rHHsxCt4WkUjojNP/m9nxu8rxgANClwf/T9x4PVQXbbQaufmZgzZ04AQFjYf6NL4uPjMXnyZOTJkwfe3t549OgRChQokPG5i4sLHBwcEBFhjksQqWUqa8p0HE0qmuP5MIWRs6YQQzr5x1IIoimdD7x/53GjMrFzYgWsqy3MzvVljvf9rNDt3yWY5d8q73HvMoF4TKgtFEyiHTSFGKyLdbWDfB98Rzbe6wTR3JZAN4X7iynE8Cb54zGpd0KTaAdN6HxYCetqB0Xdnj1FEYKFtoNp9yBpM/QzmF3fqCnc40whhjeZQDwm1A6axvug6ZyPt9nIHYDcSpYsCV9fX3zzzTdQqVRQqVSYOnUqYmNjUa5cOYPVK9dIqA1776LLyMOSyykVAu7dPA8vTwcDRCWvEm3/Qkh4lKRnZqVSwIA+bTF31C+GC8wARvx6BtNXXNf62YdmDKSLPNEDAOBZc5XOMSxeOA/dW2gf+WlsiUkqOFVe/t7PP3ZOsn0+BAEFC+TGvRtRupU3gLHzLmDSwstaPzPWNTL7t1/Qt2NxncuTdHK0hXK1g9dvv0apDlt0Kntg52pUL5tbzxHJr9u3h7Fpf7ikTdeVCgHVyxfBMRO6f2XF0q238NlY7SMejXWPG/HVEPw4bJnO5fWtSIsN790byVjn5OmjcDg7fXjEq7Gs33MXXb/V/qxsrPMxeMBn+O1byxv1bsqsqR2MS0iFV+1VSE6R1kkhCDaYN30svuiw2kCRyWfyoksYN/+S5P1Sc3rY4+WVZxC0zOw2VWeuvkDVHtu1fmase1zLZg2wddbPOpfXt9ZD92P70QdaPzPWOTl8cDcqlfTWubw+Xbv1GqU7an9WNtb5qF+3OvYtnKRzeZLOmtpBACjWciPuPIyR1g9oo8CQvp3w27fTDReYTLYcvIf2Xx2UXE4QgDvB/8DHy8kAURlGbHwK3Kqt1PqZUe5xgoAihfLjlgm9R38/6xx+XnxV62fGuu//Pm8merUpqnN5fUpJVcOx0vL3PhcavK8YQF6fHHh8PUrn8oZk9TMT7ezssGnTJjg6OqJLly6YOHEiRo8eDQ8Pj4w1wfPnz4+HDx9mlImLi0NSUlLGyB1z0qaeLzzd7CSVsVEKaN/Q1yITiQAwqGug5EEYarWIvh0CDBOQAZUqmkPuEFC6mKfcIWRwdLBBUV83WWMobQK/kzeZxjUifwzWxprawpJFc6BySW8oFFnv+BMEIMDPHdXK5DJgZPLp36m4pEQiAKg1IgZ2DjRQRIZTqqj8bZCp3ePkvu8XLuBqMolEgNeItbKmdtDFyRaftiwieY9ARwcbdGnqb6Co5NWnTbGPf+ktCoWAgZ0DzSqRCAAlinhqW9XaqEztHif3fV+hEFCisPxtT7pifu6wtZG3q9DU3pGtgTW1gwAwqIv09xhz7QfMipa1C0ru87VRCmhdt6BZJRIBwNXZDn55XWSNwdTucaWKyB+PKfUV29kqUdzfXdYYTO0aeZPVJxMBoGLFirhw4QISEhJw6dIl1K9fH7du3UKZMmUAALlz50aRIkWwbds2AMDixYvf2ZTYXNjbKTGgU6CkTlSVWsSgLkEGjEpen7YsAgd7ZZZfqpQKAVVKeaNMgPk9MFUs4SVr/Y72ShT395A1hrdVDJL3nFQIMq3rSO7zoVQKJtfBYC2sqS0c2j1I2uwDERjWPcjsOgyzqk5FHxQt6AZlFp8NBAHI4WaP9g39DBuYAZQqmkP2DrIKMt9n3yZ3O2Rq5yPAzx3OjvIu3mJq58RaWFM7OLBzoOTZ6L3bFIWrs7RBqeYiX25ntK1XEEopCVZRRL+O5tep7OJkK/v7mKnd4+R+Rw70d4eTzO3Om+ztlLK/j5naNWItrKkd7NWmKOxss/5OoFQKqFkuN4JMKPGvT7a2CgzqLL2veHBX8+wrlvu+b2r3OLnPh52tAiWKmNbfltx9o3K/o38Ik4laXL16FRqNJmP0DQAsWLAAkyZNQtGiRbF161b8/LPpLMsh1fdflEaJwh5ZelkSAAzsUhx1KvoYPjCZuLnYYfGEWlla3kCpEODooMTiCbUMH5gBFPd3h6uzfKP/KwR5wUbmTty3VZZ5ORm563+bXz4XeMs4C7l00RxwdDCdl2lrZsltYbdmhdC6bkFk5V1JoRBQv0oe9O1guUvvCoKAlT/VgY2NAMVHbtGCkPb9VVPqwN5OaZwA9cjeTokyAfJ1kOVwt0fhAq6y1a+N3O2Q3PW/TalUyPpC7exog0ATG3hlrSy5HSwf5IXvPi+dpe/aKAX45XPBpCEVDByVvGb+rypyuttnOaE4Y2QVFPCRd2aDruS+71aSudPybZVKyNwOljKtdhAAKpeU93dkiufEGllyO+jpZo8/xtXM0neVCgHODjb4Y3zWvm+uRvYphdJFPbPcV/xFh2JoWDWv4QMzALnbQbnrf1uRgm7wcJVvwFi54jlhZ2tafQtyt0Omdo28ybR69U3E5cuX4eTkhKJF/1urt0SJEjh//jxu376Nw4cPI29e87xhAmlTug8saoay/3amaRt5kr7szcAugZjzXTWLnY2Rrlvzwlg2qTaUCuG9MzMEAfBwtcOhP5ub3IiJrFIqFfikuXz7FfZsVUS2ut+na7NCkpd50pc8Xo5oWDWfLHW/jyAI+FTG31Ov1qZ3jVgrS24LlUoF1v1SD+0a+P373+/eA9J/1qhqXmyd1Qi2EkaumqMqpXNhz4KmcHa0fW9CUakQYGujwKbp9dGsVgHjBqhHcrZFPVsVMblnqvqV8yKvtzzLE9koBXRrVkiWuj+kZyv59uvo3rywxd9vzIUlt4MA8OPQivjfZ2kJRW3PwultQYCfO44sboEc7vbGDM/oCvi44PiylsiXywkCoHXVmvTzNO3ryhjeo6RxA9QjOdvBRtXyIm8uZ9nq1ya/jzPqV84jW/29WpvGHlFvkrMdrFEuNwoXkHcrEkpj6e3gp62K4s/xNaFQCFrfB9MGUaYNBjyypLnss7oNzcXJFvsXNUOFwLTBBNr6RtPbwS86BGDBDzVM7r0mq7o1K5zlVXn0rWAeZ9StZFoTdhQKefsBTbGvuHNjf0mzl/UpVw4HNKmRX5a6s4JvqloMGDAA8fHxUHxseL4Zy5XTESdXtMLqKXXfGXVmoxTQoaEfji5pjrmjqkGptNzz8KZebYoiZFsHDO9RAq5v7d1TMI8zfvmqMm5u72gyG6PraqAOa8Prg5uzLbrLmMh8Hx8vJ3Ro5CdL3f06FjfJDsMBneSZgeVorzTJl2lrZeltoaODDTb8Wh+75jVGk+r53uk0rFcpD7bOaoid8xrDxYT2czOkupXy4Nb2jpgwqDx8vBwzfebpZodv+5TGre0dM5Kw5qpnq6JwkmkGtFz31w+xsVGgv0xxtWvgZ3KdygDQtWkh2Ubn6rKHDxmGpbeDCoWAn7+shNOrWqUlsd9aPaRsQE4snVQL59a2QX4f0/s7NYRifu64vrk95o6qhmK+mffKcbBX4vP2AbiyqR1G9C4lU4T6Ua9yHgT4ybMXkKne4+SKK6iwB2pXMK1OZQCoUtob5YrLs8TaIDPck9tSWXo7CACftw9A8Jb2GNI1CC5Omd8PfPO44NdvKiP0744oF2haM6oNxcvTAceXt8DaqXVRpXTmvk+lQkC7Br44vLg5Fo6tYXKrjkmR38cZber5ylJ3/47FTbKffaBM914XJxv0aGl6yUTvHI7o3ESevcK/aB9g0qtAmd7VS0Zjb6dE9xaF8c+q1ri/twucnWzg4mSD50c+wbpp9VG7Yh6zHWWiq6K+7pg+ogqeH+kOFycbODvZIGRbB4Tv7oIRvUshp4d8yz/qS+liOVC3kvFHXn7RIQDOJtohP6x7CaPXaW+nMNnNu4v6uqN5LeOPgunVuig83Cx71DuZFoVCQLNaBbBzXhM8PZR233dxssGTg92wf1EztKnna5IP+obk4+WE0f3K4eG+rgje2iHj2eDpoe74aXhF+OY1rSU6deHuaofebYw/cKFpjfwIMNERzX07BMBBhheWod1Mc58VJ0cbWdroWuVzo6xMnbdkvaqUzoXlP9bBizfef+7u6owL69uid5tiVrf8vKuzHQZ1DULItg64vaNTRjv44kh3/D6mhux7yemDIAgY1t3491+/vC5oWbug0evNitZ1feGbx/jL1g7rXsIk+1wEQcCwT4x/jeTxcpRtoC9ZrwB/D8z8X1U8P/xJRjsYuq0DwnZ1xtc9S1n8zPy32dkq0bVZYZxc0QoP9r3ZV9wdG35tgLqVLKOvWI57nOO/A5NMUWAhDzSqZvyZxn3aFoObi2nuyT20m/H7iu1s5Rvom1XW1UNG71UwjwtslAoolQqrayi1cXSwgVKpgI1SgeL+HpI2ITYH83+oDnsjzogr4OOMcQPKGa0+qaqXzY3P2xUzap2Th1REvtymO8p7zvfV4ORgvI7l3Dkd8OOwikarj+htuXM6QvlvO5hHpiUfTYmNjQKBhTwyng1MeWScLiYPrYA8b82+NCRHeyXmfF/NaPVJlcfbCT8OM+5+aH3aFkUtE5yNkW5M/7JG7Vi2s1Vg/g/VjVYf0ds83Owz3n/885v/wJHsEgQBRQq6ZbSDrs6m2dGlq34dixt9P55F42qa7EwWW1sFFo6tYdQ6q5b2xhftjfsOKkXPVkVRp6Jx2+nfx9SwuGdOMh9Ojv/1AwZYYD+gLgr4/NdXbAmTK95Up2Ieoy+vOWV4ReTOabx3UKnmfl/dqANM83o7YeKg8karT6rKpbyNvrLQ+IHlUVCGwU1SmOaTHBEZVGAhD0wcLL3TMDouFdFxqZLL/Tm+psmONEk3fUQV5M8tLYGg6/moWtobX31q/BEuUhTK74apX1WWXE7Xc7JwbE0OZCAio/F0s8fCsTUll9P1HjdleEUUKWja+/8M/6QEqpfNJbmcLuckXy4nzBhRRXJdxuTqbIclE2tJLqfrNTJuQDmULGr+M56IyDzY2CiwbHJtyQNMdb3H9e0QgEbVTGuv+Lc1qZFfpwGmupwTezsFlk6qbdIrYCgUAhZPqCV5gKmu18gnLQqjtUzLDhKRdZr5v6qSB5jqeo+rWS43hsqwKpoUxfzc8dNw6YP8dT0nf4yvafKrk/3ydSXJA0x1PR8VS3hhpBkspS+IoijKHQSZBo8aKwEAUSc/lTkS02Dp50Ol0qDFkH3Yd+qxQesZ2bsUfvlaelJKDkfPP0Xj/nuQkqoxWB1eHvY4tbIVivrKs0+JFBqNiHZfHsDfRx4YtJ5BXQIxj7MxyARY+n1fF5Z+Tob9/A/mrAk2aB0taxfAttmNzGJ0c9jDGFTrsR0vI5MMVoedrQJ7FjRBvcrGX0ZHF9/POoefF181aB0NquTFngVNTHbGDlkPS7/n68LSz8nvG0IwcPIpg9ZRqqgnTixvafKDSwEgOjYFNXpux42wKIPWs2hsDfTtaNrLmKVb8fdt9Bp9zKB1BPi549TKVhxcSrKz9Hu+Liz9nBw8/QTNBu1Fqspw/YC5cjjgn1WtUCi/aQ8uBQC1WoPWw/Zj1/FHBq3nyx4l8Nu3VQ1ah76cvPQcDfvuRlKK2mB15HCzx8mVLVHcRLdFeRPfWImslI2NAptnNECt8rkNVke/jgGY+lUlgx1f3+pUzIONv9aHrYE68zzd7LDn96ZmkUgE0kajrp9WDw2rGq7Dt0fLwpj9nXk8QBCR5Zn5bVX0am24/RPrV86DDb/WN4tEIgAULuCGfQubIoeBRoja2iiwflo9s0kkAsBPwyoadHmbGuVyY+ushkwkEpEsBnQOxBQdZiFkVYCfO/YtbGoWiUQgbV/l/YuaoagBVxP45atKZpNIBICerYti1v8M977mn88F+xc1ZSKRiGTRoGperPulHmyUhnlfy+luj30Lm5pFIhEAlEoFNv7aAPUq5TFYHX3aFsV0E1+l5k01yuXGX781gJ2Btgtzd7HF7gVNzCKRCDCZSGTVnJ1ssWdBU7SpV1Dvx/7fZ6Xx+5gaZrcxc+t6vtgxtxE8XPX7wlvQxxnHlrZAhSAvvR7X0BzsbbB9TiN0buKv92MP6x6E5ZPrmPTyPkRk2RQKAUsm1jLI0tMdG/lh57zGcHSw0fuxDals8Zw4tqyF3vcLdHexxd+zG6FtfT+9HtfQBEHA/NHV8f3nZfR+7FZ1CmLvgiZwcbLV+7GJiLLqu8/LYO6oanrvSK1SyhvHlraAj5d57UWdx9sJx5e3RKWS+n1vs7VJ2xt3ZJ/Sej2uMQz7pAQWT6il947UcsVz4sTylijgY9r7QxGRZWvf0A/bZjeCm7N+n8n987ng2LIWKBOQU6/HNTQnRxvsnNcY7Rv46f3Y3/QsiT/H1zKbwbbpmtcqgF3zmsDTTb99xflzO+Ho0haoXMq4+1hnB3twiayck6MNtsxsiCUTa8HdJfsNp19eFxz6sxl+/rKS2SUS0zWunh83trRHy9oF9HK8/p2K4/qW9ma7F5KDvQ3W/VIPq6fU1ctslfy5nbBnQRPM+q6a2T1AEJHlUSgEzBhZFXt/b4ICPs7ZPp6nmx1WTamDDb/Wh4O9eSUS05Uo4olrm9thYGf9zJxoUbsAbmzpgKY18+vleMYmCAJ+Gl4Rhxc3R6H8rtk+npuzLRZPqIVtsxvCmYlEIjIBg7sG4fy6NihXPPsdnna2CkwZXhEnlrdErpzS9qIyFblzOuLk8lb4cWgFvaxaUz4wJy6sa4OBXQL1EJ08PmtXDJc2tEXlktnv8LRRChg/sBxOr26FvLmy/+xFRJRdzWulva8009P7yuCugbj6V3sEFfbUy/GMzdHBBptm1MeKH2vrZbJFQR9n7F/UFL+OqGK2/YANquZF8NYOepuQ80X7Yri+uYPZJZu5ZyJlsPR1sKWyxvPx6Fk8xi+4iDW7wpCYLG0taC8Pe/TtUByj+paxmBH2oihi3e67mLr0Kq7cfC25fN1KeTC6b1k0MOAyocb27FUCxi+4hFU77iA+USWprKebHb5oH4Af+paFu55nfhLpgzXe9z/G2s5JTFwKfvzjMv7cfAuvo5MllXVyUKJHyyIYP7A88nib1yyMDzl89gkmL7qMQ2efSi5bulgO/K9PaXRrXshsBxi9LT4hFVMWX8GiTTcl7y3pYKdEt+aFMGFQec7CIJNkbff8rLC2c5KaqsHsNTcwe80NPHgaL6msUiGgXQNfjB9YHiWKmGfnqTbXb7/G+AWXsPXQfag10rrPfPO6YFj3IAztVgK2BloezdhUKg3mrw/BzFXXEf44TlJZhUJA67oFMX5gObPrPCXrYG33/KywtnMiiiJW7wzDL0uv4trtSMnlG1TJi9H9yqKuAZcJNbYnL+IxfsElrN55BwlJ0vqKc7jbo2+HAPzQtwxcnS2jH1AURWzcF46fF1/FpdAIyeVrV/DBD33LoHF1Mx1oy2QipbO2BuJjrPl8RMYkY/m229h04B4uhUQgIUl70iiHuz0qlfBCj5ZF0KmxP+ztlEaO1DhEUcTpqy+wcONNHL/4DHcfxWr9nkIhoLi/OxpUyYsBnYqb7QikrIiOTcHKHXewYe9dXAqNQFyC9mvEw9UOFUt44ZMWhdGlSSGzW+6PrIs13/ffx1rPSWKSChv2hmP1rjs4d/0VomJTtH7P2dEG5YrnROcm/ujZqqhFD5QIuRuF3zeE4OCZJwgJj4bmPR2qhfK7olZ5H/TrGIBqZXJZTBLxbckpavy1/x5W7riDs9dfvjf57ORgg7LFc6BDQz/0blOMe0KRSbPWe/6HWOs5Uas12HX8EZZuu4XTV1/g6ctErd+zs1WgdLEcaFGrAPp1DLDoWWaPn8dj0aab2Hn8Ia7dfo2UVI3W7+XxdkTV0rnwWdtiaFYzv8Vu6aDRiNh78hGWbL2Ff668wOMXCVq/Z2ujQKminmhWMz/6dyrOwTRk0qz1nv8h1npORFHEqcsvsGhTKI5ffI7wx+/vBwz0d0fDqvkwoHNxs9n3ThdRMclYsf0ONu4Lx6XQiPdOMPB0s0Olkt74pHlhdG7ib7ar9XyMKIo4e+0lFm4KxbELzxD2UPs1IghAcX8P1K+cBwM6FTfbVevSMZlIGay1gXgfno80arUGoeHRuHkvGj1GHQEArP+lPkoW8YRfPheL7ST8kMiYZFwKiUDLofsAAMsm1Ua+XM4oG5DDKpcrU6s1uHU/BqHhUfjk+yMAgLVT66FkEU8Uyu9qldcImSfe99/Fc5L2khD+OBbXb0ei6/8OAwBW/VQXgYU8UMzXzWI7CT8kPiEVl2++xuMX8eg95hgAYPvsxigXmNMqk2WiKOL+kzhcux2JLt8eApB2jQT4uaO4v7tVXiNknnjPfxfPSZonL+Jx5eZrdPjmIABgxeQ6KJTfFSWLesLO1jIHlH5ISqoa129H4u6jWPQcfRQA8Nf0BihbPKdFrU4gxbNXCbgc+hrtvz4AAFg+uQ7887mgVNEcFjvomCwP7/nv4jlJ8zo6rR+w1bC0fsDlk+sgXy4nlClmvf2AN+9FIzT8v77idVProWRRT/jns85+wKiYZFwKjUCLIf/1Fef1dkLZ4jktZgU/ALDM1DAR6Y1SqUCJIp4oUcQTduPSXgJa1dXP+tDmytPNHvWr5M14ce7cpJDMEclLqVQgsJAHAgt5ZJyTNvV8ZY6KiEg/BEFAofxuKJTfLeMe176hn7xByczZyRY1yuUGAPSbeBIALGpJb6kEQYBfPlf45XPlNUJEFilvLmfkzeWccY/r2Nhf5ojkZWerRPkgL5QP8sIXE04AAJrVKiBzVPLy8XJC05pOGddIJyu/RojIsuRwt0eDqnl5j/uXUqlAUGFPBBX+r6+4tZX3A3q42aNeZcvvK+YwWSIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCIiIiIiIiIiIiIiIiLSislEIiIiIiIiIiIiIiIiItKKyUQiIiIiIiIiIiIiIiIi0orJRCItomNToNGIUGtE3H8Si6RkldwhERERGU1Cogr3HsdCrRGh0YiIjU+ROyQiIiKjSUlV48HTuIx2MDImWe6QiIiIjEat1uDRs3io/+0bfRGRCFEU5Q6LiGRmI3cARKYgMiYZq3eG4fjFZ7gQ/AphD2MzPvNrugE2SgEliniiQpAXGlXNi/YN/WBnq5QxYiIiIv1JTFJh475wHDr7FBeCXyH4bhQ0mv9eFt2rr0QxX3dUCPJC7Qo+6NasENxc7GSMmIiISH/Uag12HnuIXSce4ULwK1y99RopqZqMz3PUXIWCeZxRIcgL1cvkRo+WheHj5SRjxERERPojiiJOXHyOzQfv4ULwK1wKjUBcwn8TK3LXW4NcORxQIcgLlUt645MWhVHU113GiIlIDkwmklW7dus1Zq6+gTU7w5CUon7v91RqEVduvsaVm6+xZMst5MrhgL4dAjC0ewnkzuloxIiJiIj05+GzOMxadQNLtt5CZMz7Zx+KInDzXjRu3ovGml1hGDH9DD5tWQRf9iiJYn58iSQiIvMUFZOM+etDsHBTKB48jf/gdx88jceDp/HYcvA+vp91Dh0a+eHLT0qiaplcRoqWiIhIv1JS1Viy5Rbmrw/BtduRH/zui9dJ2H3iEXafeIQJv19C4+r5MLRbEFrULgBBEIwUMRHJicucklVKSVVjzNwLKNd5K5ZsufXBRKI2L14n4cc/riCwzSas3nmHU/2JiMisiKKIhRtDEdT2L0xfcf2DiURt4hJUWLAhFKU6bMbUJVegUmk+XoiIiMiE7Dr+ECXabcYPcy58NJH4NpVaxPo94aj26XYMnfIP4hJSDRQlERGRYVwMfoWKXbdh4ORTH00karPv1GO0Grof7b48gGevEgwQIRGZGiYTyercDI9Cxa7bMHnRZag12UsCRsakoMf3R9H+q4OIieN+UkREZPpeRSahcf89GDDpZKala3SRkqrBdzPPo0bPHbj/JPbjBYiIiGSWnKLG5+OOo8XgfXjyMvudn3PXBqN0h804d/2lHqIjIiIyLFEUMWnhJVTu/rdOScS3bTv8AEFt/8LWQ/eyHxwRmTQmE8mqXA6NQM1eO/TSWL5p66H7aNB3N15HJ+v1uERERPr05EU8avfZiQOnn+j1uGevv0TNXjtw6160Xo9LRESkTwmJKrQcsg9LttzS63HDH8eh/ue7cOTcU70el4iISJ80GhEDJ5/C2HkXsz3B4k2RMSno8PVBLNum3/aViEwLk4lkNW6GR6FRv914FWWYhN/5G6/QbOBexMZzhiIREZmeiKgkNOq3ByF3owxy/EfPE9Cw3248eBpnkOMTERFlR0qqGh2+Pqj3ATXp4v5NVJ65+sIgxyciIsoOURTx5S+nsXBjqEGOr9EAn409jvV77hrk+EQkPyYTySqkpKrRacQhSYnEe3u64N6eLpLqOXv9Jb759azU8IiIiAxKFEX0m3ASwRITiVLbwofP4vHJd0eg0eMoVyIiIn2YvOgy9px8JKmM1HYwPlGFTiMOITqWA0yJiMi0rN9zF3PWBEsqI7UdFEWgz5hjuH2fK9YQWSImE8kqTF50WfLSpu4utnB3sZVc1x9/3cS+U9JeUomIiAxpw95wbD54T3I5XdrCE5eeY/bqG5LrIiIiMpSLwa/w0x9XJJfTpR18+CweI6afkVwXERGRoTyPSMSQn/6RXE6XdjAxWY3Pxh7nAFMiC8RkImUQRRGiaHk3+ht3InV6ccyOL8afQGKSyqh1GoOlXiOkP7xGyJxZ6vUbGZOMwT+eMmqdo2afx/0nsUatk+RhiX8z2WGp9xGyDpZ6/Wo0Ij4bd1yve0N9zJ+bb+HwWcMspyonS71GsoPng8iyWOrf9JdTTyMi2jDbPmlz4tJz/L4hxGj1kXz4bPAuSz4fTCZauVv3ovH1tNPwrr0KMXGpiIlLRbGWG/HbyuuIjDFeI2NIM1ddN+qLI5A2GnXjvnCj1mkodx/F4NsZZ5G77uqMa6Rw8w2YtvQqXkUmyR0emYD7T2IxatZ55Km/JuMa8WuyHj/9cRkvIhLlDo/og56+TMCkhZdQsPG6jOs3X4O1GDP3Ah4+s4y9/5Ztu23UF0cgbTTq/PV8ebREoiji8Nkn6Pj1QThXXo6YuFREx6agTp+d2LQvHKmpGrlDNLqIqCT8uuwaijTfkHEfyVVnNUZOP4uwhzFyh0f0QTFxKZi3Lhgl2v6Vcf3mqLkSQ346hRt3pK3sYqoOnH6MKzdfG73e6SuuG71OQ4hLSMXCjaEo3WFzxjXiUWMl+k88gSs3I+QOTxaXQyPQb8IJeNRYmdEOlu6wGYs2hSIuIVXu8IhIguQUNdbuCkP1T7cjOjYFMXGpcKmyHN2+PYwTF59ZRFLg3uNYrN9r/H0Mp6+4ztmJFir8USy+m3kOPvX+6wcs1Gw9pi65gpevra8fUKXSYMvBe2jwxa6M+4hT5WVoM2w/9v/z2KL+DgTREu6KJFlqqgaDfzqFP/66CaVSgFqd+TIQBMDOVoGFY2qiV5uiMkWZfVExycjbYC0Sk9WSy0ae6AEA8Ky5Sqe6q5TyxunVrXUqawrUag2++fUsZq+5AYUgvJOQFQTARqnAnO+roX+n4jJFKS+PGisBAFEnP5U5EnloNCJGzT6PX5Zefe81olQImPZ1ZXz5aUmZoiTSThRFTFt6DaNmn4coinj72U6hEABRxA/9ymLCoPIQBEGeQLNJoxFRvM0m3L6vW0IjO21hTnd7PDrQFQ72NjrVbYqs/b7/9GUCWg3ZhwshEe88PyoVae1AvlxO2DmvMcoE5JQxUuP5Y1MoBv/0D1TqtCTqm29W6edkcNdAzPy2KmxsOI6TTMumfeHoNfooEpPUgPDW9fvv33j35oWweEIts76Xtx2+H9sOP9CpbHbaQUEAwnZ2hn9+V53qNgU7jj5At/8dRlyCCgoBmZ6X0q+R9g38sPKnOnByNN9rJKviE1LRY9RRbD10/512MP38uDjZYP20+mheq4CMkcrH2p+VyLycufoCbYbtx/PXSVAohEwd/jZKASq1iBplc2HrrEbw8nSQMdLsGTXrPKYs1m3Ftuz2je6e3wRNa+bXqawpsvZ7nEYj4tsZZzFj5XWt/YAKIe354Ldvq2Jw1yCZojSuG3ci0WLIPtx/Epfx/pcu/VmhdLEc2Dm3MfL7OMsYqX7wjfYN8fHxGD58OHLlygVXV1f07t0by5Ytg62tLZKSLGcGlkYjovt3h/Hn5psA8E4iEUh7kUxO0aD3mGP4Y1OosUPUmzW7wnRKJOrDmWsvce2W8UfA6oMoiugz5jhmr74BUYTWmZ2iCKSqNBgw6SRmrbKMUbeUdaIoYvBPpzB1ydUPXiMqtYivpp3BlD+Nu9Qw6cZa2kEAGD//Ev438xzUmncTiUBaW6kRgUkLL+Praea779GxC890TiRmV0R0MrYeui9L3aR/LyISUe3T7bj877PN28+P6e3As4hE1Oi5A1fN9BlIirlrg9Fv4kmkqjQQxcyJGOC/czJ/XQh6jzlmESPbLZ01tYPrdoeh88hDSExWQ4SW6/ffv/F1u++i7fADUKnMc9bxs1cJ2H70oSx1iyKwZOstWerWh22H76P1sP2IT0zbvuPt56X0a2TroXtoNmgvUlLlee82luQUNZoO3Iu/j6Q927zdDqafn4REFVoN3Y/tR3RLYBPJyZrawbPXXqLuZ7vwKiptBZe3Zw6p/v0bP33tJWr22oEoM129TaMRsXjLTdnq/+Mv+eom/RJFEf0mnsD0Fdff2w+oEYFUlYghP/2DX5ddkyFK4wq5G4Xqn27Ho+fxAN49J+nPCjfCIlG1x994+jLB6DHqG5OJ/1KpVGjevDl27dqF3377DZs2bUJ4eDhGjRqFgIAAODiY7wiUty3YEIJN+++988L4PgMmn0LI3SiDxmQoJy+/kLn+57LWr6tl225j5Y47yGqX11fTzuBSyCuDxkSmZcPecPy+IesDDUbNPo9TZvr3YC2sqR08dOYJJi68lOXvz1x1A9sOm2dS7OQlef/u5G6HSX8+H3ccj57Hax2E9ia1WkRSihqth+2HWm2eyYesuHIzAsN+/idL3xUBrN4ZhsWbzTepYA2sqR188DQOPX84CuDdJOLbNCKw75/H+GXpVSNEpn+nr76QdWkpudthXT2PSESXkYcAZO0aOX7xGSb+nvVnK3M0YcElnLr8HJqPNG0aMa3DtfOIQ1a51BuZL2tqB5NT1Gg9dB9SVZqPboukVou48yAGg4y8B72+3HkQgxev5UsEn7z8nAPqLITU95mRM87i7LWXBoxIXhqNiDbD9yM+SZWld+RnEYn4dNRRI0VnOEwm/mvWrFm4fPkyjh8/jk8++QRNmjTBihUr8PTpU5QtW1bu8PRGFEXMWHEdUlZrE4S0BKQ5On9D3gSX3PXrQpdrRKkQMHedeV4jpJvfVl6HQkILYqMUMHv1DcMFRNlmLe0gAMxafQNKZdZvckqFgN9WmucM7PPBcreDlvvyYE3uPorBzuMPP/qSlE6tFnH/SRx2n3hk4MjkM29dCJSKrN9HBAH4bdV1dqaYMGtqBxdtCoVG8/EkUTpRBGavCTbL2Ylyv49dDIkwy7/7xZtvIjVVI+kambcuBMkpljk7MTFJhfnrg7WuZqGNKAIpqWqznplK1sea2sHNB+7h+eukjyYS06k1IjbsDcezV+Y3q+iCzO+DzyMS8fi5+Z03epcu/YDz1gUbLiCZHTzzBLfvx0h6Rz545glCw6MMG5iBMZmIf5MnM2agb9++8PHxyfi5r68vbGxsUKZMGQBA//79kS9fPrPdNwkADp15iruPYrP8UgCkXexLttwyu43E4xJScet+tKwxXAwxvw3pT199get3IiVdIyq1iNU77iDSTJd9IGmu3IzAmWsvPzoq900qtYi/DtzD8wiOzjVF1tQOPnwWh+1HH2T5gQ9Ie3k8ev6ZWT70XQqVtx26fPO1RW02bq0WbgyFQuLfvVIpYO5ay3x5jI5NwcrtdzKWv8oKUQSCw6JwirN1TZI1tYMpqWos2BCa5Q7UdM8jErH9qPkt2yh3Oxgdl4K7j2JljUEqtVqDueuynjhLFxWbgk37ww0TlMw27gtHdJy0/hCNCMxZE8znIDIL1tQOAsCctcFQSBgUBqSdo/TtosyJ3O0gAFzkSmZm7/yNl7gYEiG5H3Dt7rt4FWlZSySnm7s2WNIgdSBtoLqUVd5MkSCa4zA5PQsODkaJEiVw/Phx1KxZM+PnT58+Rd68ebFv3z40atQIx44dQ0BAAHx8fLI9utDX1xfR0cZPdCW71kOSR3NAkJ5Hdnk6HcrUJwaIyjA0Sg/E5huj9bN7e7rA3cX2g+XdXe0ApHUYfUh0XCr8mq7X+pmgeg23Jz9mIVrTkexSE0mebSFpauK/nJ/NgU3KPb3HZKqi808GALg/Gi1zJMaV4lwZiTm76FTW6cVC2CbJM0LX3d0d9++b51KVhmZN7WCqYxASvD/Xqazjq9WwS7io54gMKzr/j4BC+5JE+moLP9QOAoDbw+8hiB9uS82Ftd7343INhNqhiPSC6gS4P9b+LGbOVHYFEe8zXHpBUQOHqL9hH3tc/0FlAdvB97OmdlCjzIHYfD9ILyiqYR9zGA7Ru/UflAHF5R4Gtb2v1s+M9U7o/GwmbFLk2bdRFxqFK2Lzj5deUFTBLvYkHKP+1ntMckv0bIsUl+qAoJRc1vXRWCg08QaIyjSZ+rMS20LtrKkdBIDoAlMBwUZaIVGETeINOL9aapigDCQhR2ekulTR+pmx2kHHiHWwiz+XhWhNn6nf4wwl2aUaknJ01Kms8/N5sEm+q+eI5BeTbxxEpZvkcsrke3B5PscAEWVNdttBiXdOy/T48WMAQK5cuTL9fP/+/QCQMZ2/du3aRo3LEESFHZDlnfDeKivY6TcYg5N54q0oApD+siE7wRZp14j0ZGLa9UWWThTs0q5vXUYjmt19xDpYVTuYnWvQHK9fHTq99M8UYqDs0PnvRvHhzgmzpfO9QDTD52nrYFXtoM7P6+Z5/Yo6DKLVO6kd1nITdL13Cxb7Ppj950frSSaSebKqdhCCbvdlQTDLdpDvg6QXgi0ganSanGSWfzdZIOr4vCQK9nqOxLjM7KnWMHLmzAkACAsLQ7FixQAA8fHxmDx5MvLkyQNvb2+91ynXSKgZK65h5PSzkpcsAYAzpw6jRBFP/QdlII+fxyN/o3VaP/vQDIp0kSd6AAA8a67SLQBBgJ9vftwNjtKtvEx+3xCCgZN121j6yIGdqFRS/38vpsqjxkoAQNT1KHkDMbIVf99Gr9HHdCq78++NqFspj54jouyypnZwz4lHaDZor05lly2Zj85NCuk5IsNyr74CMe9ZlssobSGAZ08fwtHBMh45rfW+36T/Huw//VjSEugAkCunK55fizJITHK6EPwKFbtuk15QUGLGL5MwqOtf+g+KssWa2sGHz+JQsPHH7/9vU9rY4vtvh2P8oOUGiMpwqn+6Hf9c0b68sLHawSOH9qFiCfN5R4qISoJX7dWSy9nY2ODLIf3wy9d/GiAqeX3z6xnMXn1D0vLW6R6Eh8LDzbw7DqWw1mclc2dN7SAAOFdejoQklaQyCoWAdq0bY+P0qQaKyjD6jj+OPzdrXx3KWO3g7wvmoGfrojqXNyXWeo9bvPkmvhh/Qqey+/dsQ/WyufUckfwKNFqHR8+lDxaqWbUcji6L0n9ARmICw/TkV7JkSfj6+uKbb77B9u3bsWXLFjRo0ACxsbEWt8lw81oFdEokFvBxRnF/d/0HZEDeORxgZyvvJV7Ax1nW+nXRtEZ+nSaceXs6oGxATv0HRCanUbV8UErcXwAA3F1sUaWU+XSkWBNragdrls8NJx0SW7Y2CtSvnNcAERlWgdzytkM5PezhYM+RqOauZZ0CksvYKAW0qad9aUFzV6ZYDuTKoX354A8RBKBpzfwGiIiyy5rawfy5nRFYyEPy875aLaJFben3ArmZwvtYAR8XuUOQJKeHAyoGeUneT0xlptdIVrSoVUByIlGhAKqU8raqRCKZL2tqBwGgVZ0CsJG415lGY573OFNog0yhLabsaVI9v+TnAgDI4W6PCkFeBohIfm3r+0reM1EQgFZ1CxooIuNgMhGAnZ0dNm3aBEdHR3Tp0gUTJ07E6NGj4eHhYXGNZnF/D9Su4CMpESAIwOCugVAqzetysbNVonSxHLLGUCHI/JJrfvlc0bRGfknXiEIhYGDnQNjKnLwl48jj7YR2DaQ1mkqFgL4dilvM7CRLY03toIuTLfq0LSrp+rVRCujWrBC8PKUnD+Qm94N7hSAvCLqMUCGT0rNVUdjbSksKq9QiBnYubqCI5GVjo8CgLoGSXqiVSgGNq+VDofzS99Ugw7OmdlAQBAztFiRpprFCAZQJyGGWK5BUCJS3HcyXywm5czrKGoMuhnQLgkbCKGRBAIr5uqF2BR8DRiWfepXzoHABV0lJeI0m7TwSmQNragcBYFCXQMkDBNycbdHFzFapAYDygfL3S5aXuS2m7Mvv44yWtQtI7gfs37E47O0sc3DxgE7FoZZ4H7FRKtCnrXnP0mXP/78qVqyICxcuICEhAZcuXUL9+vVx69YtlClTRu7Q9O6HvmWgyeLbo1IhIIebPT5vF2DgqAzDFDpRzdH3n5fJ8s6aCoUAVycb9O9kntcI6ebbPqWzvKumQgE42CsxuGugQWOi7LGmdnBY9xKws1EgK3kAQUjreP26Z0nDB2YAcrdDcnfikn64u9ph2CdBWb7vKxUCGlXLh3IW/Pvv17E43Jxts5xQ1GhEfP+F5d1PLYk1tYM9WhZGHm+nLHcIaTTAmH5lDRuUgcjeDprp+2DnJv7wzeuS5WtEFIEx/ctZ7AAiQRAwtn+5LCfhlUoBfnld0LGRn0HjItIna2oHa1XwQbUyuSQlRkb0LmWWg6PlboeKFHSDu6tl7plnbb77vHSW20GFAnBytMHALpY5uBQAShTxROu6BbP8PigIwIDOxZHTw/wGqb+JycT3uHr1KjQaTaYROL1790b+/GlLE+XPnx+ffvqpTNFlT+Pq+TH7u2of/Z5SIcDRQYndC5qY5WwMAGhcLZ9sdSsVAuqZ6d5wtSr4YNHYGv92or//e0qFAHs7BXbMbYy8ubhsgTWpVNIbyyfXgUIhfPAaUSgE2NoosG1WI/jlczVegJRtltwOFvNzx18zGkCpVHzwwU8hAApBwJqf66KMmS7j3EjGdjCtfvNbGpa0+3FoRbSuV/CjCUWFQkBQYQ9smFbPKHHJJY+3E3bOawwHO+UHV3NIf5ZaOKYG6lQ0z+dCa2XJ7aCrsx32/d4Ebs62WepI/WlYRXRo5G+EyPSvSmlvuDrZyla/3O2wrhwdbLB/YVPkcLfP0jUyul9Z9GhZxAiRyadn66L4/vOPJ1WUSgE53e2xf1EzONibX+KBKJ0lt4OCIGDbrIYolN81S6ty9WhRGD/0LWv4wAwgj7cTShbxlK3+RlX5PmgpqpXJjaUTa2Wpr9jORontcxqZxDK7hrRqSh2UC8jx0YSiIKRtKzb9mypGisxwmEx8j8uXL8PJyQlFi/439XTZsmV49OgRRFHEo0ePsHLlShkjzJ4h3YKwflq9jHWr31wrPP3/Vy7ljdOrWpvlcjbpWtUpiLzeTrLU3ba+r1kn2D5vH4AtMxvCN0/ajV/bNVIhyAunVrRCzfKWuZwNfVj3FoWxc25jFC6QtmSbtmukTDFPHFvaEg34AGl2LL0dbFarAA4vbo4ShT0AaL9+i/q6Y+/vTdGxsXl2oAJAYCEP1JVpYEtxf3fZ6ib9s7FR4K8ZDfD9F2Xg7JjWOZr+ziQIaf/fRimgR4vCOLmipVXsEVW9bG6cWtkyY8T3m/eR9M533zwu+GtGA/TtaLmjci2VpbeDJYvmwLm1bVDr3+d4pZbrN4+3I5ZOqmXWs2pdnGzRs7U8SS5nRxt8asYJtqK+7ji/tg0a/Ltn9Jsd7un/39vTAb+PqYFJQyrIEqOx/TS8IhaMrp4x2FrbOWlYJS/Or2uDIgW5rDWZN0tvB71zOOLM6tbo2MgPCoWQKRmg+Le33M3ZFhMHl8fyH+votF+cqZBz64GBnblClSXp2boots9pBP9/Jwto60cpVzwnTqxoaRUDKV2d7XB0aQv0blMUNjZCpkSrIAACAEcHJUb0KoVtsxpZxPZggihK2S2BLI1GI2LvyUdYszsMa3ffhYC0tcO/aB+AUjLvN6gvExZcxPgFlySXizzRAwDgWXOVTvUe/KMZ6lcx/wSKRiPi4JknWLXjDp6+TIBSKaBwATd83q6YRS9flhUeNdIenKNOmudoPH0RRRFHzj3Fiu13sHLHHQBA3/YB+KxdMbMejEDWQRRFnLn6Eku33cLiLbcAAL1aF0WvVkVQq4KPRSzXtXFfODqPOKRT2ey0hbO/q4qh3UvoVK+p4n0/TVxCKtbsCsOek48QFZsCVydbVCnljS/aByCXGe4Npg+XQyPw5+abWLgxFCKAT5oXxictCqNh1Xxm3flE1iE0PAp//HUTs1ffgAigc2N/dGlaCC1qFYCNjfl3ety4E4mS7TfrVDY77WD/TsXx+5gaOtVrau48iMGiTaEIDotCYooK3p4O6NjQH23q+VpEx5hUqakabD18H38dCMem/fcgAPi6Z0n061g8Y6ClteKzEpmjJy/i8efmW5i06BJEEWhbzxctahdAlyaF4ORo/jOMY+JSkK/hWsQlqCSXzU47WKt8bhxb1lJyOVPGe1waURRx6MxTrNxxB6t23oEAoG+HAHzePkD2pXXl8vJ1IpZuvY1TV54jJj4VHq52aFglL3q0LAI3F8tZ6pfJRMpgqTfEV5FJCGyzCa+ikiWVy06DWbuCD44saW4RndD0fpb6N5MdPCdkziz1+k1N1aBS9224cvO15LK6toUFfZxxfUt7uDpbzkMzYLnXCOkPrxEyZ5Z8/XYecQgb94VLLqdrO+hor8S1ze2tPrFkDSz570YXPB9kziz5+h0//yIm/G7ciRYHFjWzuFWqLPka0RXPiXWxviFkZHW8PB0wf7T0EaHRcamIjkuVXM7JQYklE2sxkUhERCbB1laBZZNqZ1qCJKt0bQsXT6xlcYlEIiIyX3O/rwYvD+nLL+vaDv78ZSUmEomIyGSM6lsGZQKkr0CnazvYv1Nxi0skEhGTiWQlOjX2R+cm0va88mu6Hn5N10uuiy+ORERkasoWz4nR/cpKLqdLWzigU3E0rJpPcl1ERESGkiunIxboMMBUl3awdgUfDOkWJLkuIiIiQ7GzVWLZpNqwlbh8uS7toG8eF0z7upKkMkRkHphMJKuxeEItVC1t2P3bBnUJ5IsjERGZpDH9y6Fbs0IGraNx9XyY+b+qBq2DiIhIFx0b+2Pi4PIGraO4vzv+mtGAe6USEZHJKVs8J1b/XBcKA2YDcrrbY/eCJlylhshCMZlIVsPFyRa75zdBjXK5DXL8QV0CMef7alzelIiITJJCIWD55Dr4pEVhgxy/SfV82PJbQ9jbKQ1yfCIiouwa3a8sJgwyTEKxZBFPHPqzObw8HQxyfCIiouzq1Ngfq36qq9MWGB+TK4cDDv7ZDIGFPPR+bCIyDUwmklXxcLPHvt+bYnDXQL0d08lBiTnfV8PcUdU4ApWIiEyara0CK36sgynDK8LOVj+PgQqFgG/7lMLfcxrBydFGL8ckIiIyBEEQMHZAOayeUheebvqbNdG9eWEcW9YCebyd9HZMIiIiQ+jWvDAO/NEMfnld9HbMOhV9cHpVa5QJyKm3YxKR6WEykayOk6MN5o6qjkN/NoN/vuw1nHUq+uDqX+0xpFsQZyQSEZFZUCgEfPd5GVxc3xaVSnpl61jF/d1xakVLTP2qMuxsOSORiIjMQ/cWhXFjSwe0rlswW8fJndMBW2Y2wOqf68LTzV5P0RERERlWnYp5cG1z+2xPtkifYHHoz+bwz++qp+iIyFRx+DhZrXqV8yJkW0dsPnAP89eH4MSl51kqZ6MU0K6BHwZ1CUSdij5MIhIRkVkqUcQTp1e1xr5TjzF/fQh2HHsAUcxa2QZV8mJQl0C0qlMQtnqa4UhERGRMebydsHVWQ5y7/grz14dg3Z4wJKdoslS2XPGcGNQlEN2aFYKzk62BIyUiItI/FydbzB1VHcO6l8DvG0OwdOttRMWmZKmsfz4XDOwciD5ti3F5byIrwmQiWTV7OyW6NS+Mbs0LI+RuFE5eeo4Lwa9wMSQCEdFJSEnVwMFeify5nFEhyAvlA3OibqU8XL6GiIgsgkIhoGnN/GhaMz8ePI3DsQvPcCH4FS4Ev8LTV4lITlHDzlaBXDkcUT4wJyoEeaFWeR8UKegmd+hERETZJggCKpfyRuVS3pg+ojKOnHuKC8ERuBDyCuGPY5GUrIatjQIernYoVzytHaxa2htli+fkoFIiIrIIxfzcMWNkVUweUhGHzz1JaweDX+HW/WgkJKmgEAS4OtuiVNEcqBCUE5VKeKNGudzc6onICjGZSPSvwEIeCCzkgS86BMgdChERkdEVzOOCHi2LoEfLInKHQkREZHQ5PRzQoZE/OjTylzsUIiIio3NytEGL2gXRonb2lgAnIsvFdamIiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsmE4mIiIiIiIiIiIiIiIhIKyYTiYiIiIiIiIiIiIiIiEgrJhOJiIiIiIiIiIiIiIiISCsbuQMg+cTGp+BSSAQuhETg1r1oJCSqAADDf/4HpYvlQIUgL5Qo7AlbW+aciYjI8iQlq3D1ViQuBL/CjbDIjHZw8I+nEFjIAxWCcqJMsZxwcuTjEhERWR6VSoOQu1G4EPIKV26+zmgHB0w6iSIF3FAhKCfKB3rB3dVO5kiJiIj0T6MREfYwBheCX+FSaAQSklSACPSbcAIFfJxRIcgLFYK8kDuno9yhEhGZBPaOWRmVSoMdxx5g/voQHDj9BKL47ndmrwnO+P9ODjbo1qwQBnUJRPkgLyNGSkREpH+iKOL4hWeYvyEEmw/cR6pK88535q8Pyfj/SqWA1nULYlCXQDSokheCIBgzXCIiIr27dus1FmwIwaqdYYiNT33n84UbQzP9d52KPhjcNQht6/lyoCkREZm9B0/jsHBjKBZvuYXnEYnvfP7HXzcz/XeZgBwY2DkQn7QoDBcnW2OFSURkcphMtBKiKGLD3nCMnHEWD5/FZ7lcQpIKi7fcwuItt1CjXG7M/6E6ShfLYcBIiYiIDOP4hWcY/NMpXLsdmeUyarWILQfvY8vB+wjwc8ec76uhUbV8BoySiIjIMG6GR2HQj6dw6OxTSeWOnn+Go+efIY+XI34aXhG9Whfl4BoiIjI7zyMS8eXU09iwLxwajZbZFe9x5eZrDJh0EiOnn8W3n5XC//qU4eAaIrJKvPNZgRcRiej49SF0/fawpETi205eeo6KXbdh0sJLSE19dyYHERGRKUpIVOHLqadR57OdkhKJb7t5LxqN++9B/4knEBOXoscIiYiIDEet1mD68mso22mr5ETim56+SkSfMcfRcsg+PH6u+3slERGRMYmiiPV77qJE27+wbs9dSYnEN8UmpGLM3Iuo2uNvXL31Ws9REhGZPiYTLdz1269RttMWbD54Ty/HS1VpMHbeRTQesAex8exIJSIi0/bsVQKqfbods1bf0Lq0ty4WbbqJSt224cHTOP0ckIiIyEASElVoM/wARkw/i6QUtV6Ouev4I5TpuAXnrr/Uy/GIiIgMRaMRMXzqaXT99jAiopP1csyLIRGo1G0b/tofrpfjERGZCyYTLdi1W69Rp88uPH317vrf2XXk3FM07r8HcQnv7rFBRERkCp5HJKJOn50GGTV6634MavXagYfPmFAkIiLTlJSsQquh+7Dz2EO9HzsiOhn1P9+Fs9eYUCQiItMkiiIGTDqJOWuC9X7slFQNOo88jA177+r92EREporJRAv17FUCGvffg9cx+hl1o83pqy/RecQhiPqa6kFERKQnySlqNB+0F7fuxxisjgfP4tG4/x7Ec2ANERGZGFEU0XvMsWwta/oxcYkqNB24B/cexxqsDiIiIl1NWngZf/x102DH12hE9Pj+KI5feGawOoiITAmTiRZIFEUMnHwKzyKkzUi8t6cL7u3pIqnM7hOPsGiT4RpmIiIiXUxaeAkXQyIkldGlHQwNj8b3s89LKkNERGRo63bfxfo90pZf06UdjIxJwRfjT3CAKRERmZTzN15i4u+XJJXRpR1MVWnQe8wxDjAlIqvAZKIFWrf7LrYeui+5nLuLLdxdbCWXGzH9DEejEhGRybgQ/Ao/L74quZyu7eCcNcE4et5wMz+IiIikeB6RiCE//SO5nK7t4MEzT7BwY6jkckRERIaQnKJGnzHHodZIG+iiazt491EsB5gSkVVgMtHCpKZqMGL6WaPWGZegwth5F41aJ5GcRFHE6SsvkJyiRlKyGnPW3EDI3Si5w5JVTFwKVvx9G0nJaiQnq7F+z10kJqnkDous1IjpZyW/OGbX19POcFaGBUtKVmHD3rtITk677y/fdhvRsSlyh0UmJDlFjb/2h2dcI0u33kKkAbcbIPqQSQsvGXS7C22+n3UeCYl89iPr8eRFPBZuDE17/0lRY/fxh1CrNXKHJRuNRsS+U48y3pF/3xCCR8/i5Q6LrNTizTdx/U6kUeucsyYYdx4YbosNkpcoijh1+XnGPW7u2mDcuhctd1iyio5NwfJt//UDbtwXjqRkPgtaOkFkz5dF2bQvHJ1GHNKpbOSJHgAAz5qrJJe1s1Xg8YFu8PJ00KluInMgiiJWbr+DX5dfw7Xb/z2YCgBEAHUq+uC7z8qgac38ssVobA+fxeGnP65g+d+3kZiszvSZu4st+nUsjv99Vho5PXhvIOMIDotEiXabdSqbnXYQAE6vaoUqpXPpVJZM0+voZExdcgWLNt1E1FvJQwd7JXq1LopRX5RBwTwuMkVIcouJS8HUJVfx+8ZQvI7OnLyxt1WgR8siGNW3DArld5MpQrI2sfEpyNdgHWJ1WG4tu+3g4gm18Fm7YjqVJTIXl0Je4cc/rmDLofvQvDV4LX9uZwztFoQvPy0BO1ulTBEal0qlwew1NzB7TTDuP4nL9JlCIaB1nQIY1bcsKpX0lilCsjaiKKJU+824ERYluWx228FvepbEryOq6FSWTJMoili27Tamr7iOG3fe7QesXzkPvvu8DBpVyydbjMZ2/0ksfvrzClb8fQdJKZn7AT1d7dCvU1o/oKebvUwRkiFxZuIb4uPjMXz4cOTKlQuurq7o3bs3li1bBltbWyQlJckdXpbMXx8iS70pqRos2XJLlrqJjEGjSduLtNfoY5keIIC0BwgAOHHxOZoN2osZK64ZP0AZXLv1GhW6bMMfm2++k0gEgOi4VMxYcR1VPvkbD57GaTkCmRpLaAcXbJBvmTW52mAyjIfP4lDlk78xffn1dxKJAJCUrMafm2+iQtdtuHJT2v6cZBmevUpA9U+34+clV99JJAJAcqoGy/6+jQpdtuHc9ZcyREhSWUI7uHpnmE6JRH2Yty6Ys/TJom0/8gBVe2zH1sPvJhIB4NHzeHw36xyaDdxrFfunJSap0HLIPoz49azW9z2NRsT2Yw9Rved2/LVf2h6uJA9LaAePX3imUyJRH5ZsvcUVmiyIRiPii/En8NnY4wgO094PePT8MzQZsAezV98wfoAyuBwagQpdtmHxllvvJBIBIDI2Bb8uu4Yqn/yNx885O90SMZn4L5VKhebNm2PXrl347bffsGnTJoSHh2PUqFEICAiAg4Ppz6p5HZ2Mw+fk27PprwP3ZKubyNDGzL2QsRfM+1ZPTF9W8Ztfz2LF37eNFZosHj+PR4N+u/E6Jhlq9fs7jdQaEfeexKFRvz2IieOSgKbMEtpBANgsY1ukbYQ6mafY+BQ07r8H4Y9jP7hkrlotIjI6GQ377eFSXlYmMUmFpgP2IvRe9Af/7tVqEbHxqWjcfw/uPuLSV6bMYtrBg/dkq/tiSAQHkJHFOn3lBTp8fRCpKs0H339EEThy/hm6fHvYopProiii5w9Hsf/0Y4hI+3dro1aLUKtFdP32MI5xj3GTZjnt4H3Z6o6MScERGftlSb++m3kuY+LMe+9xGhGiCAyfehprd4UZMTrje/A0Dg377UZUbMpH+wHvPopF4/57EGcFA2usjY3cAZiKWbNm4fLly7h58yZ8fHwAAMWLF4efnx/q168vc3RZczHklaz1X7kVgdRUDWxtmaMmy/L0ZQKmLrkqqcyI6WfRrVlhi/17mL7iGl5HJWdpXzq1WsTtB9FYsuUWvvy0pBGiI11YQjv49GUCnrxMkK3+2PhU3L4fjQB/D9liIP1Ytu02bt6Lfu9L45vUmrSE4q/Lr2Hm/6oaPjgyCWt2heHKrddZ+q5aIyI2IRU/L76KReNqGjgy0pUltIOiKOL8DXnfCS8ER8A3r6usMRAZwnezzkHzb6fxx2g0InYee4ij55+hbqU8hg9OBmeuvsSm/fey9F1RTBuQ++1v53B6dWvDBkY6s4R2EADOB8vcDoa8QrNaBWSNgbLv0bN4TF8ubdWxb6afQafG/rCxscx+wGnLrqUlErPYDxhyNwor/r6NQV2DjBAdGYtlXt0SiaKIGTNmoG/fvhkNJgD4+vrCxsYGZcqUQUREBJo1a4aAgACUKlUKn332GZKTjbup/cdckLnBTE7RIPiucTc4JjKGPzfflDyq9GVkErYdlm9EnCElJKrw51+3svQAkU4Ugdlrgjlry0SxHdRnDFzu0tyJoojZa4IllVFrRCzectMqljSjtGtk1uobUAhZL6NWp+27HBVjWvdNSmMp7eC9x3GIjJF3JQhTaIuJ9C3kbhSOnn8m6f3HRilg3jppzxPmZN76YNgos94QajQizlx7icuhfFY2RZbSDqrVGlwKkfca4/ugZVj0VyggSHjYB/D0ZSJ2Hn9ooIjkFZeQiiVbbn1wRuI7hLR+QEuepW+NmEwEEBISgidPnqBt27aZfv706VOoVCqULVsWgiDg+++/x82bN3HlyhUkJiZi7ty58gT8Hrfuy7900s170XKHQKR3y7fdfu/Spu+jVAhYueOOYQKS2e4TD3Xaiyf8cazso+VJO0tpB02hDTKFGCh7LoVE4M6DmCzNPHhTXILKYl8eKbOb96Jx7Xak5GeDpBQ1tlroQCNzx3ZQf27dlz8GIn1buzsMSgmJMwBQqUVsOXjfIgcapaZqsH7PXaikdCojLcG6eqdlLwNoriylHXz8IgEJMu9ZaAptMWXf8m23JQ+GVyoErLLQfsAdRx9I/tsSxbS/hys3s7aaC5kHLnMK4PHjxwCAXLlyZfr5/v37AQBly5ZFjhw5ULt2bQCAQqFAxYoV8eDBA53r9PX1RXS0fhuYhJyfAM7ltX52b08XuLvYfrC8u6sdACDyRI8Pfi86LhV+Tddr/az3Z/3RL/5cFqIlMh/R+X8CFPaSyqg1InbsPQkPj04Giko+yS41AM92kkdpAUC9xm1hmyjPxtTu7u64f5+duNpYSjuY5NYQ8Gim9TNjtYM/T5uBWT/syEK0ZKpSHQKBXF9ILyiK6PX5MPSLO6H/oMikqOwLA7kHSS8oajBg6Hf4stch/QeVBWwH389S2sFUx5KAdx+tnxmrHdz69y54LO6QhWiJzEdCjk5QO1cCBKWkcmqNiDwFikKhjjJMYDLRKFyQmn+C5HIqlQqz5i/FH5MaGyCqrGFbqJ2ltINqG28g73daPzNWOxh66y48PDw+HiyZtOgCPwPCh6+Xt6k1IrbsOGKRz0HJrrUAj9aAIH1eWo26zWGbdNMAUZEustsOcmYigJw5cwIAwsL+GyEVHx+PyZMnI0+ePPD29s70/aSkJCxbtgzNmmnvsLRuGrkDIDIA3a5rAWo9x2EqNDolEgEAoqWeE/NmOe2gKSyfYQoxUPZk51mGz0HWQfffsyDyGjFFltMOmgK2g2R5snfvtsT7fjbe6dgOmiTLaQdNoQ0yhRgo23RdmtNS+7xEEYBu/YB8/7EsgsiFa5GSkoJixYrByckJU6dOhUqlwtSpU3H//n2UK1cOu3btyviuRqNBt27dkDt3bsyePVvGqN81YNJJLNwYqnP59JE3njVX6XyMzb81QLsGfjqXJzJF5btsxeXQCEnPEjZKAd2aFcKKn+oaLC657P/nMRr336NT2ZBtHVDc30O/AVG2WUo7OHv1DQyfelrn8vpoBycNKY/R/crpXJ7kd/t+NIq12qRT2d3zm6Bpzfx6johMzaNn8SjYZJ1OfQx/zWiA9g399B4TZY+ltIMHTz9Bw367dS6vj3awW7NCWDO1ns7liUzRr8uu4X+/nZW8vLWTgw0iT/aAna20GY2mTq3WIGetVYiOk7aEq0IhYPzAchjTn8/KpsZS2sGnLxOQt8Fancvrox0sXSwHrmxqp3N5Mg2l2m/GjbBIyf2APVsXxeIJtQwXmEx2HX+IFoP36VQ2bFcnFMrvpueISC6cmQjAzs4OmzZtgqOjI7p06YKJEydi9OjR8PDwQNmyZTN9d/DgwVAoFJg5c6YssX5IqaKecoeAUkVzyB0Ckd4N6FRccoehSi2ib4fihglIZvUr50H+3M6SyigVAqqVycVEooliO6jPGNgOmruivu6oWS43lAppIy/zejuhUbW8BoqKTEl+H2c0rpZP8v5ZOT3s0bJOAQNFRdnBdlCfMbAdJMvzaasiUEh8LlAqBXzWrpjFJRIBQKlUoG+H4pKflQCgT5tiBoiIsstS2kEfL0fk9JC2RY2+mUJbTNk3oHNxyZNMVWoRfdsHGCYgmTWulg8+Xo6SyigVAupU9GEi0cIwmfivihUr4sKFC0hISMClS5dQv3593Lp1C2XKlMn4zrfffouHDx9ixYoVUChM79RVCPSStX53FzsULuAqawxEhtC9eWE4O2Z9i1mFQkBxf3fULJ/bgFHJR6lUYEi3QEkrnao1IoZ2CzJcUJRtltAOlpe5HQTkb4tJP4Z2D4JawvQDhQAM7hoIpdL0/i7IMIZ0C4JaLeEaUQgY0Km4RXYqWwpLaAdz5XREAR9pA770rUJQTlnrJzKE3Dkd0bGRv6RBJGq1iIGdLXNwKZDW0a6RMOJWqRTQqk4B5Jf5HkXvZwntoCAIqBgk7/tYBZnrJ/34tGURODpk/bldqRBQqqgnqpT2/viXzZCNjQKDuwZCyhgStUbEkK7sB7Q0pnfnNxFXr16FRqPJGIFz48YNTJs2DWFhYahUqRLKli2LkSNHyhvkW8oE5ICtjXy/0kolvSDouo8akQlzcbLFn+NrZum7CgVgqxSwbFJti/57GNqtBCoGeWVpNKpCAbSqUxCdm/gbITLSF3NsB91d7RDg5y5b/Xm8HZEvt5Ns9ZP+dGjohzb1CmbpZUmpEFAuMCe+7FHS8IGRyWheqwC6NSuUpYE1SqWAoEIe+LZPacMHRnpjju0gAFQqIV8nliCYxsAeIkOY9nVleHk4ZDmhOLpfWQQVttwZSoULuGHS4ApZ+q5SKcDD1Q4zv61q4KhIn8y2HSwpbzKnssz1k364udhh4VgJ/YA2CiyZWMui+wG/6lESZQJyZKkdVAhA+wa+3N7BAmV9qo2VuXz5MpycnFC0aFEAQIkSJWDq20s6OtigYyM/rN19V5b6P21ZRJZ6iYyha7PCSE7R4PNxxwEBWmcjKBQCHO2V+Ht2I1QpnUuGKI3HydEGe35vipZD9uGfKy+gUAjQvDWLR6kQoNaIaFWnINZOrccZO2bGHNtBIK0tGj33gmx1W/LLgzVRKhVYO7Ueun93BFsP3YdSKbxz30+/71Us4YWd8xrDScIMdjJ/CoWAZZNrAwDW7r77nmsE0GiAUkU8sef3pnBzsZMjVNKR2baDrQpj88F7stTdvFYBeHk6yFI3kaHl93HG0SXN0aj/Hjx6Hq91GwwbpQCVWsR3n5fGxMHljR+kkY3qWwapKg0m/H4p49/+pvTH4lw5HLB/YTP45eNKVubEXNvBHi0KY/Kiy7LUXSi/K6qVsey+IGvSo2URJCWr0X/SSQgf6Ad0drTB9jmNUFHGAV3G4Oxki30Lm6HF4H04e/2l9n7Af9+J2jXww6opdSQvEU6mTxDNoSWgLDtx8Rlq9d6pU9nsbDScw90ejw90hYM9O9LIsoXcjcL89SFYsuUWEpJUGT/P6WGPQV0C0a9DcatauiU5RY2N+8IxZ00wzl5/memzBlXyYGj3EmhZuwATiWQ0z14loGDj9UhVaSSXzU47KAjAnZ3cWNzSaDQidhx9gDlrg3Hg9JNMn1Uq4YWh3YPQuUkh2Ntx6UprJYoidh1/iHnrQrDn5KNMnctlAnJg+Ccl0LVpITg68BmZjEOl0sC/2QY8eh4vuWx22kEA2DmvMZrX4r6gZNmiYpKxbNttzF4TjPDHsRk/t1EK6NjIH4O7BqJmeR8ZIzS+01deYO66YKzfexcq1X8NoW8eFwztHoTP2hWDp5u8+9iRdWnYdzcOnnny8S++Jbvt4LSvK2NE71I6lSXTdeNOJOavD8HSbbeQmKTO+Lm3p0NaP2DHAOTNZV39gOv33MWcNcE4H/wq02eNq+XDkG5BaFG7ABOJForJRAsjiiLKd9mKy6GvJZfNTqP53eelMWV4JcnliMxVXEIqgsOiEJeQCndXO5Qq6mn1+yDdeRCDJy/ioVAIKJjHBQXzuMgdElmpHt8fweqdYZLLZacdbFWnIP6e00hyOTIfD5/F4f6TOKg1IvLlckaRgkwcU2aPn8fj3pM4qNQa5PFyQjEZl10m6zblzysYNfu85HLZaQeLFHRD6LYOHEBGVkOjEXHjTiReRSXBwV6JIgXc4J3DUe6wZBURlYTb92OQmKxCTg8HlCziyc5kksW2w/fRdvgByeWy0w46OSjxYF9X5PTgDH1LFRufgpC70YhLSIWHqx1KFc0BW1vrfu65fT8aT14mQKkQ4JvXBQV82A9o6ZhMtEBnrr5A9Z473plq/DG6Npp+eV1wbXN7uDjZSipHRERkCE9exKNEu82Iik2RVE7XdtDJQYlrm9tzViIREZmEpGQVynfZhpC7UZLKZacT9eAfzVC/Sl7J5YiIiPRNFEW0HrofO449lFQuO+3g3FHVMLhrkORyRETmxLrT5xaqSulcGNGrpORy0XGpiI5LlVxu8YRaTCQSEZHJyJvLGbP+V1VyOV3bwalfVWYikYiITIaDvQ2WTaoteUaQru3ggE7FmUgkIiKTIQgCFo6tAQ9XaftV69oO1q2UBwM7B0ouR0Rkbjgz0UIlJatQ97NdOHPt5ce/nA3/+6w0fv6Sy5sSEZFpEUURPb4/ijW7pC93+v/27v9Vy7uO4/jrPt/1eDx6jqDLpmY7HrY2dZvJoomrySaZEm6x/WQJ1hpjBEmMWDvBatsPwljIVktkQyQwZi2q0foyYsuKwKEwk7O5TfvGMr8cbe6c4/E+p3/gE3Q6993O8TweP1/3+/rAfeDmXM/PdV3jsXHtojz/7XUe4QTApPPorkN5cOfBup7jup65ObDn0+loH98FWwCotx/+6nju3P7r1PPK9/zutvxh76YsWdhRv5MATBLuTLxMtbU25YWnbs+K3q66neMLd/TmsS+vqtt8APhfVSqVPPPNNdm4dlHdzvHJ1Vdk345PCIkATEpf27Yi27eM/4k1/63eJZ35xdPrhUQAJqXN65bke303p1Knf9e6O1vz4nfXC4nAtCEmXsa6Olvzm92fytpVC2o++6ufvy5P9308lXr9IgPABLU0N2b/47dmy8araj57861L8rMnb8uMtqaazwaAWqhUKtmxfXUevu+Gml9I/ei18/LyMxuyYN7M2g4GgBradkdv9j56S9paGms6d/EVs/Lysxuyore7pnMBJjOPOZ0GqtXRPLH3SL6+82CGLlYnNGvRgvbsfnhN1t20sEarA4D6+8GLb+W+R36XUwPDE5rTOas5TzxwUz63qceGGgCmjN+++k629r2SY38+P6E5zU0N6fvSyjywdUWam+1NBmBq6H97IFv7XsnvD5+c8Kwv3tmbHV9Zndmz3JkPTC9i4jTS//ZAHtx5MM+/dCLV0fF97XM6WrJtc28eumelH0sApqSTpwfz0JMHs/enx/Le0Pg217S2NOTu9R/OI/ffmIXz2+u0QgCon/cGL+Wx3YfznX1Hc/rc+DbXNDRUsmHNlfnW/Tdm+bL6vUoDAOqlWh3NU/uO5vE9r+X4398d9+dvvn5+vnHv9W6wAKYtMXEa+ts/LmTX/v786KUTOfLm2VSr5T+B9hlNWfWRedmysSd3r1+amTM8yg2AqW/g/HD2/ORYvv/CmznUfzrDF0eLx7U0N2T5sq7cdfvSbP1MT7rntP2fVwoAtTc0fCnP/fJ4nv3xG/nja//Mvy6MFI9raKjk6g91ZtMti3PPZ3uz+APeCQXA1FetjubnB/6aXfv7c+DQyZw6O/Qfj136wY7c9rGFufeuq22mAaY9MXGaGxy6lMP9Z/L6iXMZHK6msaGSjvbmLF/WlWWLZ6ex0aNrALh8jYyM5k9vnc2RYwO5MDiSsSQz25pyzdI5ubZnblqaa/tuDQCYTEZHx/LGiXM5/PqZnH93JNXRsbS1NOaqRbOzsrcr7TOb3+8lAkDdjI2N5S/vXMirR0/lzLmLuThSTVtrY66cPys3XNOdubNb3+8lAkwaYiIAAAAAAABQ5LYzAAAAAAAAoEhMBAAAAAAAAIrERAAAAAAAAKBITAQAAAAAAACKxEQAAAAAAACgSEwEAAAAAAAAisREAAAAAAAAoEhMBAAAAAAAAIrERAAAAAAAAKBITAQAAAAAAACKxEQAAAAAAACgSEwEAAAAAAAAisREAAAAAAAAoEhMBAAAAAAAAIrERAAAAAAAAKBITAQAAAAAAACKxEQAAAAAAACgSEwEAAAAAAAAisREAAAAAAAAoEhMBAAAAAAAAIrERAAAAAAAAKBITAQAAAAAAACKxEQAAAAAAACgSEwEAAAAAAAAisREAAAAAAAAoEhMBAAAAAAAAIrERAAAAAAAAKBITAQAAAAAAACKxEQAAAAAAACgSEwEAAAAAAAAisREAAAAAAAAoEhMBAAAAAAAAIrERAAAAAAAAKDo3xgOUFps3dnnAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "compile_and_plot(U, prompt)" - ] - }, - { - "cell_type": "markdown", - "id": "46d42b16-2fcf-422a-a206-3eee667f4d4b", - "metadata": {}, - "source": [ - "#### Exercise 3" - ] - }, - { - "cell_type": "markdown", - "id": "c9824ae0-f3c6-4755-8a5b-aacb22678ec9", - "metadata": {}, - "source": [ - "A randomly generated unitary (from a random circuit). This unitary WAS NOT in the training set, it is new to the model!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "63c51c9b-638a-42c4-8029-9add147d2255", - "metadata": {}, - "outputs": [], - "source": [ - "U = np.matrix([[ 0.70710678, 0. , 0. , 0. , 0.70710678, 0. , 0. , 0. ],\n", - " [ 0. , -0.70710678, 0. , 0. , 0. , -0.70710678, 0. , 0. ],\n", - " [-0.70710678, 0. , 0. , 0. , 0.70710678, 0. , 0. , 0. ],\n", - " [ 0. , 0.70710678, 0. , 0. , 0. , -0.70710678, 0. , 0. ],\n", - " [ 0. , 0. , 0.70710678, 0. , 0. , 0. , 0. , 0.70710678],\n", - " [ 0. , 0. , 0. , 0.70710678, 0. , 0. , 0.70710678, 0. ],\n", - " [ 0. , 0. , -0.70710678, 0. , 0. , 0. , 0. , 0.70710678],\n", - " [ 0. , 0. , 0. ,-0.70710678, 0. , 0. , 0.70710678, 0. ]], dtype=np.complex128)\n", - "\n", - "assert np.allclose(U.H@U, np.identity(2**num_of_qubits)) and np.allclose(U@U.H, np.identity(2**num_of_qubits)) #check if unitary" - ] - }, - { - "cell_type": "markdown", - "id": "5b5e50fd-da8d-47fb-aabb-92044aaba2ce", - "metadata": {}, - "source": [ - "Plot correct (exact) compiled circuits:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d6d5023f-b3f4-4cc6-81cb-eead8ffee190", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABxMAAADoCAYAAAAkPsqVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACOmElEQVR4nOzdd1gUVxsF8DNb6F1QbNhQVOy9gb3XxBpj7yXGmFiixpoYY4saaxJ7b5+9N2xR7BUroGKng7Rly3x/EIhIW2DZXeD8nodHd3Zm7svOsmXO3HsFURRFEBERERERERERERERERF9RmLoAoiIiIiIiIiIiIiIiIjIODFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTCQiIiIiIiIiIiIiIiKiVDFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTCQiIiIiIiIiIiIiIiKiVDFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTDQwQRAwc+bMpNsbNmyAIAh48eKFwWpKz+f1GtqAAQMgCAIEQUClSpXSXTfxsb1x44aeqiMiAAgPD0/6OxUEAQsXLjR0SURERERERERERESkpTwRJvr5+WH48OEoXbo0zMzMYGNjg4YNG2Lp0qWIjY01dHmUwxwdHbF582b89ttvyZaXLFkyy8FnkyZNMGDAgCxtO3PmTJQsWTJL22ojO7/XgAED0KRJk0xtc+7cuWwF3IIgYMOGDZneLjuPY3Zr1oXcXn92fP48s7S0xObNm7F48WLDFUVEREREREREREREWSIzdAHZdeTIEXTv3h2mpqbo168fKlWqhPj4eFy6dAkTJkyAj48P/vrrL0OXmabY2FjIZLnnMBhjvZaWlujTp4+hyyCiNMjlcvTp0wcvXrzAuHHjDF0OEREREREREREREWWCcaVCmfT8+XP06tULJUqUwNmzZ1G4cOGk+0aPHg1fX18cOXLEgBVmzMzMzNAlZEpuq5eIiIiIiIiIiIiIiIiyLlcPczp//nxERUVh7dq1yYLERK6urhg7dmzSbZVKhZ9//hllypSBqakpSpYsiSlTpkChUCTbrmTJkujQoQPOnTuHWrVqwdzcHJUrV8a5c+cAAHv37kXlypVhZmaGmjVr4vbt28m2HzBgAKysrODv74/WrVvD0tISRYoUwezZsyGKYrJ1tZ2D8NixY/Dw8IClpSWsra3Rvn17+Pj4ZLjdzJkzIQhCiuWpzc1448YNtG7dGo6OjjA3N0epUqUwaNCgdOtN3L+vry8GDBgAOzs72NraYuDAgYiJiUm2bWxsLL799ls4OjrC2toanTp1wps3b1J9DB4/foyAgIAMf7+sUCgU+P777+Hk5ARLS0t88cUXCAoKypG2PrVlyxbUqVMHFhYWsLe3h6enJ06ePAkAOHv2LCQSCaZPn55sm23btkEQBKxatSpHamrSpEmyuew+/cnK0KQZiY2NRfny5VG+fPlkQxCHhoaicOHCaNCgAdRqtc7bTXT16lW0a9cO9vb2sLS0RJUqVbB06dJk6zx+/Bg9evSAk5MTzM3N4ebmhqlTpxq8/mfPnqFr165wdnaGmZkZihUrhl69eiEiIgIA8OWXX6JGjRrJtunYsSMEQcDBgweTPQaCIODYsWNJtY8fPx6VK1eGlZUVbGxs0LZtW9y9ezfZvhKHXt25cyemTJkCZ2dnWFpaolOnTnj16lWO/M5EREREREREREREZHi5Okw8dOgQSpcujQYNGmi1/pAhQzB9+nTUqFEDixcvRuPGjTF37lz06tUrxbq+vr7o3bs3OnbsiLlz5yIsLAwdO3bE1q1bMW7cOPTp0wezZs2Cn58fevToAY1Gk2x7tVqNNm3aoFChQpg/fz5q1qyJGTNmYMaMGZn+PTdv3oz27dvDysoK8+bNw7Rp0/Dw4UM0atRIZ3OqBQYGolWrVnjx4gV+/PFHLFu2DF9//TW8vb212r5Hjx74+PEj5s6dix49emDDhg2YNWtWsnUGDBiAZcuWoV27dpg3bx7Mzc3Rvn37VPdXoUIF9OvXL9u/V2rGjBmDu3fvYsaMGRg5ciQOHTqEb775JkfaSjRr1iz07dsXcrkcs2fPxqxZs1C8eHGcPXsWANCsWTOMGjUKc+fOxa1btwAA7969w5gxY9CiRQuMGDEiR+qaOnUqNm/enOyndevWAICCBQvqvD1zc3Ns3LgRvr6+SQEdkNCTOCIiAhs2bIBUKtV5uwBw6tQpeHp64uHDhxg7diwWLVqEpk2b4vDhw0nr3Lt3D3Xr1sXZs2cxdOhQLF26FF26dMGhQ4cMWn98fDxat24Nb29vjBkzBitWrMCwYcPg7++P8PBwAICHhwfu3r2LyMhIAIAoivjnn38gkUhw8eLFpH1dvHgREokEDRs2BAD4+/tj//796NChA37//XdMmDAB9+/fR+PGjfH27dsUtcyZMwdHjhzBpEmT8O233+LUqVNo0aIF56clIiIiIiIiIiIiyqvEXCoiIkIEIHbu3Fmr9e/cuSMCEIcMGZJs+fjx40UA4tmzZ5OWlShRQgQgXr58OWnZiRMnRACiubm5+PLly6Tlf/75pwhA9PLySlrWv39/EYA4ZsyYpGUajUZs3769aGJiIgYFBSUtByDOmDEj6fb69etFAOLz589FURTFjx8/inZ2duLQoUOT1f3+/XvR1tY2xfLPzZgxQ0ztMH/ezr59+0QA4vXr19Pd3+f1Ju5/0KBBydb74osvxAIFCiTdvnnzpghA/O6775KtN2DAgBT7TGyncePG6dYiigmPdYkSJTJcTxT/+51btGghajSapOXjxo0TpVKpGB4ertV+MuvZs2eiRCIRv/jiC1GtVie779M6oqOjRVdXV9Hd3V2Mi4sT27dvL9rY2CR7vuW0f/75R5TL5SmOp65NnjxZlEgk4oULF8Tdu3eLAMQlS5bkWHsqlUosVaqUWKJECTEsLCzZfZ8eA09PT9Ha2jrFY/7pOoao//bt2yIAcffu3Wmuc/36dRGAePToUVEURfHevXsiALF79+5i3bp1k9br1KmTWL169aTbcXFxKZ6Xz58/F01NTcXZs2cnLfPy8hIBiEWLFhUjIyOTlu/atUsEIC5dujTD3+P58+ciAHHBggUZ/9JEREREREREREREZBRybc/ExN431tbWWq1/9OhRAMD333+fbPkPP/wAACnmVqxYsSLq16+fdLtu3boAEnqQubi4pFju7++fos1Pe7sJgoBvvvkG8fHxOH36tFY1Awm9qcLDw/HVV18hODg46UcqlaJu3brw8vLSel/psbOzAwAcPnwYSqUy09t/3nPOw8MDISEhScfp+PHjAIBRo0YlW2/MmDGp7k8UxaRhZXVt2LBhyYZ+9fDwgFqtxsuXL3Okvf3790Oj0WD69OmQSJL/yX1ah4WFBTZs2IBHjx7B09MTR44cweLFi5M933LS+/fv0a1bN1SrVg0rV67M0bZmzpwJd3d39O/fH6NGjULjxo3x7bff5lh7t2/fxvPnz/Hdd98lPdcTJR6DoKAgXLhwAYMGDUrxmH8+VLC+67e1tQUAnDhxIsXwwYmqV68OKysrXLhwAUBCD8RixYqhX79+uHXrFmJiYiCKIi5dugQPD4+k7UxNTZOel2q1GiEhIbCysoKbm1tSL9lP9evXL9nrbrdu3VC4cOGk11giIiIiIiIiIiIiyltybZhoY2MDAPj48aNW6798+RISiQSurq7Jljs7O8POzi5FkPR5mJB4Mr948eKpLg8LC0u2XCKRoHTp0smWlStXDgAyNTTps2fPACSEmE5OTsl+Tp48icDAQK33lZ7GjRuja9eumDVrFhwdHdG5c2esX78+xXySafn88bK3twfw3+OS+PiXKlUq2XqfHw99yKhWXfPz84NEIkHFihUzXLdhw4YYOXIkrl27htatW6eYszKnqFQq9OjRA2q1Gnv37oWpqWmOtmdiYoJ169bh+fPn+PjxI9avX5/q3J664ufnBwCoVKlSmuskXhCQ3jqJ9F1/qVKl8P3332PNmjVwdHRE69atsWLFiqT5EgFAKpWifv36SUOaXrx4ER4eHmjUqBHUajW8vb3x8OFDhIaGJgsTNRoNFi9ejLJly8LU1BSOjo5wcnLCvXv3ku0/UdmyZZPdFgQBrq6uOhtymYiIiIiIiIiIiIiMS64OE4sUKYIHDx5kajttT/inNe9ZWstFUcxUHdpKnItx8+bNOHXqVIqfAwcOpLt9Wr+vWq1Osd6ePXtw5coVfPPNN3jz5g0GDRqEmjVrIioqKsM69f24ZIcx16pQKJJ6ZPr5+aXZC03XJkyYgCtXrmDXrl0oVqyYXto8ceIEACAuLi4pNM9N9F3/okWLcO/ePUyZMgWxsbH49ttv4e7ujtevXyet06hRI1y/fh1xcXFJYaKdnR0qVaqEixcvJgWNn4aJv/76K77//nt4enpiy5YtOHHiBE6dOgV3d/cUc8ESERERERERERERUf6Ta8NEAOjQoQP8/Pxw5cqVDNctUaIENBpNipP+Hz58QHh4OEqUKKHT2jQaTYqhT58+fQoAKFmypNb7KVOmDACgYMGCaNGiRYqfJk2apLt9Yq+78PDwZMvTGtKzXr16mDNnDm7cuIGtW7fCx8cHO3bs0LretCQ+/s+fP0+23NfXN9v7NnZlypSBRqPBw4cPM1x3xowZePToERYuXIjnz5/jxx9/zPH6duzYgSVLlmDhwoVo3LhxjrcHAPfu3cPs2bMxcOBAVK9eHUOGDEm1F5yuJP4dpXfxQWJPYm0uUNB3/YkqV66Mn376CRcuXMDFixfx5s0brF69Oul+Dw8PxMfHY/v27Xjz5k1SaOjp6ZkUJpYrVw6FChVK2mbPnj1o2rQp1q5di169eqFVq1Zo0aJFiteMRJ+/hoqiCF9f30y9rhERERERERERERFR7pGrw8SJEyfC0tISQ4YMwYcPH1Lc7+fnh6VLlwIA2rVrBwBYsmRJsnV+//13AED79u11Xt/y5cuT/i+KIpYvXw65XI7mzZtrvY/WrVvDxsYGv/76a6pzGQYFBaW7fWKIkjiPGgBER0dj48aNydYLCwtL0TOvWrVqAKD1UKfpad26NQCkmItv2bJlqa7/+PFjBAQEZLtdY9ClSxdIJBLMnj07RU+vTx/zq1evYuHChfjuu+/www8/YMKECVi+fDnOnz+fY7U9ePAAQ4YMQZ8+fTB27Ngca+dTSqUSAwYMQJEiRbB06VJs2LABHz58wLhx43KszRo1aqBUqVJYsmRJipAs8Rg4OTnB09MT69atS/Hc+/Q4GaL+yMhIqFSqZMsqV64MiUSS7O+zbt26kMvlmDdvHhwcHODu7g4gIWT09vbG+fPnk/VKBBJ66n7+t7979268efMm1Vo2bdqUbHjpPXv24N27d2jbtm22fkciIiIiIiIiIiIiMk4yQxeQHWXKlMG2bdvQs2dPVKhQAf369UOlSpUQHx+Py5cvY/fu3RgwYAAAoGrVqujfvz/++usvhIeHo3Hjxrh27Ro2btyILl26oGnTpjqtzczMDMePH0f//v1Rt25dHDt2DEeOHMGUKVPg5OSk9X5sbGywatUq9O3bFzVq1ECvXr3g5OSEgIAAHDlyBA0bNkwWWn6uVatWcHFxweDBgzFhwgRIpVKsW7cuaR+JNm7ciJUrV+KLL75AmTJl8PHjR/z999+wsbFJCmKzo2bNmujatSuWLFmCkJAQ1KtXD+fPn0/qrfn5cKwVKlRA48aNk4b8NAZNmjTB+fPnMz0cqqurK6ZOnYqff/4ZHh4e+PLLL2Fqaorr16+jSJEimDt3LuLi4tC/f3+ULVsWc+bMAQDMmjULhw4dwsCBA3H//n1YWlqm2UZir7DMzls3cOBAAEga4vJTDRo0SDHvZ6Jz586hadOmmDFjBmbOnJmpNn/55RfcuXMHZ86cgbW1NapUqYLp06fjp59+Qrdu3dJ9vs2cOROzZs2Cl5dXhr1yPyWRSLBq1Sp07NgR1apVw8CBA1G4cGE8fvwYPj4+SUOW/vHHH2jUqBFq1KiBYcOGoVSpUnjx4gWOHDmCO3fuGKz+s2fP4ptvvkH37t1Rrlw5qFQqbN68GVKpFF27dk1az8LCAjVr1oS3tzc6duyY9Hfl6emJ6OhoREdHpwgTO3TokNTLskGDBrh//z62bt2a5rF3cHBAo0aNMHDgQHz48AFLliyBq6srhg4dqvXvQ0RERERERERERES5R64OEwGgU6dOuHfvHhYsWIADBw5g1apVMDU1RZUqVbBo0aJkJ7jXrFmD0qVLY8OGDdi3bx+cnZ0xefJkzJgxQ+d1SaVSHD9+HCNHjsSECRNgbW2NGTNmYPr06ZneV+/evVGkSBH89ttvWLBgARQKBYoWLQoPD4+kMCgtcrkc+/btw6hRozBt2jQ4Ozvju+++g729fbJtE8PVHTt24MOHD7C1tUWdOnWwdetWlCpVKtM1p2bTpk1wdnbG9u3bsW/fPrRo0QI7d+6Em5sbzMzMdNJGToqKioKzs3OWtp09ezZKlSqFZcuWYerUqbCwsECVKlXQt29fAMCUKVPg6+uLy5cvJz0WJiYm2LhxI+rVq4cJEyak6NX5qejoaLi6uma6rqCgIERHR2PYsGEp7lu/fn2agVLiPJqFCxfOVHu3bt3Cr7/+im+++SZZgP/jjz/iwIEDGDp0KHx8fGBnZ5dmu4IgZOk4tG7dGl5eXpg1axYWLVoEjUaDMmXKJHuNqFq1Kry9vTFt2jSsWrUKcXFxKFGiBHr06GHQ+qtWrYrWrVvj0KFDePPmDSwsLFC1alUcO3YM9erVS7ZuYi/ERo0aJS1zdnaGq6srfH19U4SJU6ZMQXR0NLZt24adO3eiRo0aOHLkSJpD7E6ZMgX37t3D3Llz8fHjRzRv3hwrV66EhYVFpn4nIiIiIiIiIiIiIsodBDGz3awoQwMGDMCePXuSAhdK2507d1C9enVs2bIFX3/9daa3HzBgAM6ePYtbt25BJpOlGeJk18ePH+Hg4IAlS5Zg9OjROdJGVj18+BDu7u44fPhwjgzXm5qJEydi+/bt8PX1hampqV7aBIA6deqgRIkS2L17t97a1KXcXH9ib9Tdu3ejW7dumdpWFEWEhITg1atXqFGjBhYsWIDx48fnUKVEREREREREREREpEu5vmci5R6xsbEwNzdPtmzJkiWQSCTw9PTM8n5fvXoFJycnuLu748GDB9ktM1UXLlxA0aJFjXIoRy8vL9SvX19vQWJim9OmTdNrkBgZGYm7d++mmO8zt8jt9WdHREREpoZ3JiIiIiIiIiIiIiLjwTCR9Gb+/Pm4efMmmjZtCplMhmPHjuHYsWMYNmwYihcvnqV9Tpw4EX369AEAWFlZ6bLcZNq3b6/XsC4zRo8erffektevX9dre0DC/KEKhULv7epKbq8/O6ysrHDq1Kmk2+XKlTNgNURERERERERERESUGQwTSW8aNGiAU6dO4eeff0ZUVBRcXFwwc+ZMTJ06Ncv7rFixIipWrKjDKolI12QyGVq0aGHoMoiIiIiIiIiIiIgoCzhnIhERERERERERERERERGlSmLoAoiIiIiIiIiIiIiIiIjIODFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTCQiIiIiIiIiIiIiIiKiVDFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTCQiIiIiIiIiIiIiIiKiVDFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTCQiIiIiIiIiIiIiIiKiVDFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTCQiIiIiIiIiIiIiIiKiVDFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTCQiIiIiIiIiIiIiIiKiVDFMJCIiIiIiIiIiIiIiIqJUMUwkIiIiIiIiIiIiIiIiolQxTCQiIiIiIiIiIiIiIiKiVMkMXQARERFRfvFRCcSoDF1FShYywFpu6CqIiIiIiIiIiMgYMUwkIiIi0oOPSqDDKSDaCMNESxlwuCUDRSIiIiIiIiIiSonDnBIRERHpQYzKOINEIKEuY+wxSUREREREREREhscwkYiIiIiIiIiIiIiIiIhSxTCRiIiIiIiIiIiIiIiIiFLFOROJiIgoVxJFEa/eR+Pmw2A8fRmB2Dg15DIJihayQM0KjqhQ2g4yGa+bIiIiIiIiIiIiyg6GiURERJSrvH4fjb/+9xhr9j7Fu6CYNNczN5Wie6tSGNWzAupUdoIgCHqskoiIiIiIiIiIKG8QRFEUDV0EERERUUaiYpT4ccl1rNr5CBoREARA208xjaoXwtpZHihX0jZni0zHh1ig/SmDNZ+hIy2BQuaGroKIiIiIiIiIiIwNeyYSEVGWqDSAygguR5EJAEeyzPv+uf0BfSafw4u3UUnLMnM51KXbH1Cl217MH1cHY3pXZC9FIiIiIiIiIiIiLTFMJCKiTFNpgNYngYh4Q1cC2JoAJ1oxUMzLDp0LQLcfzkCp0mRrP/FKDcbO88bzNx/x+4S6DBSJiIiIiIiIiIi0wFOvRESUaSrROIJEIKEOY+ghSTnD69pbdP0+IUjM7sDsidsv2eKDactvZr84IiIiIiIiIiKifIBhIhERERmlsEgFvpp0Dmq1dkHi9nlNsX1eU632Pefvuzh79W02KyQiIiIiIiIiIsr7GCYSERGRUfpunjc+hMRCo2WPRLeStnAraavVuoIADJx+AVExymxUSERERERERERElPdxzkQjoohX4/6zUNx+FILgcAUEAXCyN0P18gVQqaw9TORSQ5dIRESkF3efhGDTId8c278oAgHvorFs20NMHlI1x9oh0tbdJyHYc+oFwiIVsLKQo12jYvCo6cy5PXO5eKUa+868xLUHQYhXalC8kCX6dCiDIgUtDV0aERERERERkdYYJhqBq/cCsXLnI+w45o94lQYAIPn3vFFibwxTEwm+bu+KUT0roGZFRwNVSkREpB8rdz7K8TYEAVi96xEmDqwMqZSDNZBh3H8aioHTL+DmwxCYyiVQqkXIpAIWrL+H0sVs8Of0hmhWt4ihy6RMEkURK3c+wrTlN/Hx3x7QogaQyQRMXnodXzQvib9nNoK9jamBKyUiIiIiIiLKGM+cGVBgSCy6fX8G9focwtajfklBIpAQIn46rJsiXoONB56hVq8D6DP5HEIjFAaomIiIcrM9e/agXLlysLS0hKenJ3766Sc0bardHIP6FB2jxOYc7JWYSBSBgPfROHH5TY63pSvhVw/i4XfVkv3c7mmNlyuHG7o0yoJbD4NRr89B3HkcCgBQKDXQaETEKzXQiIDvq0i0Gn4ch88HGLhSyqypy25g7DxvhEXGQ6USoVKJUGtEKOITju2h8wGo2/sgwiL5mZ6IiIiIiIiMH3smGsiFG+/Q5bvTiIxKuFJZrc54Qij1v+ni9mP+OHn5DQ4ta4m6VQrmaJ1ERJQ3bNmyBZMmTcKuXbvQoEEDbNq0CYMHD8bYsWMNXVoKtx6FIFah1lt7F2+9RzuP4nprLzvs6naCXd1OSbejn16D75xOcO42xYBVUVaoVBp0+OYk4uITAsS0qDUiuv9wFq9O9YKjvZkeK6SsOnn5Neatu5/ucY1XavDyXRSGzbqE3Yua67E6IiIiovyt/Ukg2EDXczmaAkdaGaZtIqLsYs9EAzh/4x1aDj+OiChlUkCYGRqNiNAIBZoOPgrvu4E5UCEREeUlcXFxGDduHFavXo2GDRtCEAT069cPUqkU1atXB5AQNtavXx/169fHmTNnDFrvjYfBemtLEIAbPvprT5dUUeHwX9gLJUb9BdOCJQxdDmXSofMBCAqLSzdwSiRCxPr9T/VQFenC75seQJuZLuOVGuw98xLvgmJyvCYiIiIiShCsANSiYX4MFWISEekCw0Q9e/0+Gh2+OQmVWkz35JGluQx1KjvB0jz1zqNqjQiFUoN2o08gMCQ2p8olIqI84OLFi1AoFOjYsWPSssjISMTHx6N69eoIDw/H/Pnz4eXlhUOHDuG7776DWq2/noGfe+gXpre2RBG4/0x/7enSy2WDYFe3S7KeipR7/PW/J1oFiUDCcPerdz/O4YpIFwJDYnHy8hutLxiUywRsO+qXw1URERERERERZQ+HOdUjURQxeOZFxMapMzx55O5qj6tbO6Hu1wdx7X5QqutoNCIio5QY+cs/2PN7cwiCNtdAExFRfhMYGAgnJ6dky7Zs2QJzc3OUL18ep0+fhoeHB8zMzGBmZobixYvDz88P5cqVy1Q7vXr1wpMnT7Jd7wtNKwBlgc/69myf1xRuJW3T3M6tVMJ9t3Z2SfX+Jy8i8NUkrxTLg0Iiknpo5iibgpD+cEInu/pwaCmUoW9ResJOnewPANq0aQ1EcsQDfXmk7g0NHLRe//nrMP08TylbYsUCEPGV1usr4pX47fe/sGXJpRys6j9ubm7YsWOHXtoiIiIi+pQoingbGIOPMUqYyCVwcbaCTMZ+LkREuQXDRD06cuEVTl5+o9N9qjUi9p55iXPX36FpnSI63TflD4p4NfaffYmnLyMgl0nQrE4R1KnslPGGpDd+ryJx8FwAIqPiUdjJAt1blYK9jamhy6JcpGLFinjx4gXOnDkDT09PHDx4ENOmTUPlypUhlUoREhICe3v7pPXt7e0REhJisHoFaPJ0e9kV/ewG3u+Zi/ILrkKQyQ1dDmWRgMz1/s3s+mQYWXk9kfDYEhERUR4Vr1Tjf6deYO2+p7j2IAgfo5VJ98llEri72uHrdq4Y2KUsCthxfnAiImOW58JElUqFadOm4c8//4SlpWXSHFFPnxp+npll2x9CKhGyNE9ieqRSASt2PGKYSJkiiiKWbXuIn5bdhEZMGDZXKgAzV95CmeI22DavCaq6FTB0mflaYEgsvv7xHM7dfAe5VAKlSgNTEynG/HoFQ7qWw5KJ9SGX8yo+ylj16tUxY8YM9OzZEwDQvXt3eHh4oEiRhPeNAgUKICzsv6E+w8PDUaBA5v/+ddXb5fsF3li82SfF8tR6FX4qsUdijZ77M9VemRIFcfvQ7UxtkxUfYoH2p7K3D3V0BPwX9kSJUX/qfJ7E48dPoJC5TndJ6Rj72xWs3v0Y8cqMwyepREDTOqVw6q+cf55S9iji1SjYZCsio5QZrwxAJpVh5fwJ6NqyVA5XRnmd8psfgBA9DdtdwB7y5Yv005YBVfrif3gTqJ85TYsWtMCDfV2ztC2PPREZq/1nX2DozEsIi4yHRhQhfnY6VKnS4M7jUPj43sCUP25g+ohqmDSwKs9zEBEZqTwXJk6aNAmPHj2Cv78/oqKiUK9ePdSpU8fQZeHNh2id90pMpFaL2Hf2JUIjFHCwZW8l0s4vf93Br3/fRVz8f1fDq/7999HzcDTodxhXNndElXLaD8FGuhMSHofavQ/gXXAsVCoRKlXCcVLFJhyl9fufIeB9NA4sbQmJJHcNcRx8ai1Czm1Ouh0fFACzYhVQdvoRA1aV902fPh3Tp09Pul2pUiW0b98eAFC3bl1MnDgRCoUC0dHRCAgIQJkyZQxVKmpUcNRrezUr6re97Ag8thLK4Nd4u30G3m6fkbTcokwtlByzxoCVUWaN6lkBf2x7qPX64/pWysFqSFdMTaQY0b08lmzx0SootrM2Qeemur0wgPKpkDAgOtrQVeQpbwJjEP4x3tBlZIzHnoiMjFKpwZCZF7HpkK9266sSPjPNWnUbu088x/HVbVDYySInSyQioizIU2Hi27dvsWbNGvj6+sLOzg52dnZo2LBh0pxPEydOxJUrV+Dq6oo1a9ZAKpXqrbaracx7qCsajYgbPkFo1aBYjrZDeYP/60j8/OedpA9snxNFIDZOhYHTLuBmGnOPUc6asfIW3gfHQpnGichYhRpnvN9i/9mX+LJFSf0Wl02OLQfDseVgAIDqYyieTPFEsQELDFxV/hIfH48nT54kzb9mZ2eHH374AU2aNAEA/P7773p9j/ycvsO93BQmFu42GYW7TTZ0GaQDbqXs0KtNaew98yLd0Ekuk6BKOXu0blBUj9VRdnzb2x1/7XkClSoe6Q1IIpUImPNtLc4VRERERHmGSqVBj/FncfhCQOa3VYsJF7f3PQTvrZ1QqACHTSEiMiZ56pvrmTNnUKtWLTg5/TffW0hICNzd3XH37l0EBQXh4sWLKFmyJA4fPqzX2m49CoY0B3sPSQTg5kPDzW9FucuKHY+Q0dNRFIEHz8Jw/2mofoqiJDGxKqzf/zTDHg2xCjUWbLinp6p0TxRFvFjSD85df4S5S0VDl5OvPH78GABQuXLlpGX9+vXDlStXcOXKFbRs2dJQpQEAypeyhVtJWwh66nTLXkFkKOt/9kCzOoUhTyNMMpFLUKGULY6vagOpNE99bM/TihayxJm/28LGygSmqQzTJZUIkAjA9BHVMaxbeQNUSDlJFEXExKogfj6WG2VZvFINRTznFiUiyg0WbLiPwxcCoFKn/T4okwkoXcwaMlnKL3xKlYg3gTH4aqIX30uJiIxMnuqZGBISkixIDAwMxJUrV7B48WJcunQJbdq0AQC0adMGe/fuRefOnbXed1RUFHx8Us7fpK17j94ASP4maGkug7urfarru5exS/Zvanx8wxD975CHggDc8XmOq1fjslwj5R/7Tz+DQouhtyQSEZv3eaNrE86dqE8P/GOg0WR8fADg+oMgeHt7Q9BX6vKveFEAkL0hpD/8bx7kDkVQoEmfbNdz/fo1mAjaf9Fwd3eHlZVVttvNrapUqQKlUrv5vAxBEASM7lUB3/7mncPtAK0bFIOri02OtkOUFjNTGQ4ta4Xtx/yweLMPbj/+78Kw0sWsMa5vJQzqUg4W5nnqI3u+UKOiIx7s/RLLdzzEqp2PERGVMFSiRADaeRTH9/0qoUntwgauknTl5duPWL37Mdb87ylCIuIgignvMQVszTCkazmM7FEBLoXz7+eOzFKrNTh68TV+3/wAl+98SLrAzkQuQS13R3zftxI6NSnBObWIiIzMQ78wTF9xM90gEQBcnK3gd7QHyrTbBf/XH1Pcr1RpcP7me6zd+xRDurrlVLlERJRJeerMRLly5TB//ny8e/cOgiBg4MCBiI+Ph5ubGw4dOoRSpUoBAGxtbREWlrkJyn18fFCvXr2sF1d0AGBfHxD+GzbO3dUeV7d2SnezdbM907yv7tcHce3f4VPVKhV27dqNXUu2Zb1Gyj/K/gyYZXwCKy4uDgsW/I4FP57RQ1GUxMIVKPkdIDXLcFW1WoP69evnfE2fEUzMUGN3bJa3//jgPMKu/A9ucy/qpJ7GjRtDjNf+Ygpvb2/UrVtXJ21TzujXsSymrbiFyKh45NQFqaIIfNfHPWd2TqQlmUyCvh3Lom/HsnjzIRrNhx2FRBDgs6+r3i8UId0qWsgSc8fWxsyRNVC9xz6IIvDPpo6c4zwPuf80FBN+v4ZTV95ALpdAEf/fxWCiCASHx2HJ5geYv+4eWtYvigXf10FlzkeeJo1GxJItDzBv3T2Ef4yHUqVJ9hkgXqnB5TuBuOlzDlYWcozrVwmTBlbhUMFEREZi7pq7/35+zf4XOI1GxPSVNzGwS1mO0EGkJ6/fR2PJlgfYcsQP0bFKFC9kie/7VUafDmVgZpqnYiTKojz1LGjTpg1atmyJcuXKoXTp0ujRowdevnwJExMT2NnZISIiAgAQEREBe/vUewSmxd3dHd7eWe8hsWTnO+w+GwL1J52NfHzDUPfrg6m3V8YO62Z7YtD0C/DxC091HR/f/wJRiUyG/v16YniXb7NcI+UfP/0ZgLM3I9OdxwcATE3NsGDBZNSpOEc/hREAIDRShY4TH0OtxWhOhR1NsS8br01ZFS8KGBeYtW2V4R/wcuVwlJ12BBKTjANTbZw/fz7TPRPJuNlam2D55ProO+V8jrXxVdvSaN2Qcw2T8ShayBLm/35JY5CYd5iaSGFqknBBIYPEvOPEP6/RZexpxKs00IhIFiR+Ku7f5ae836LO1wdxYGkLznOfijiFCl9N9MKxS68zHEFFodRAEaHAz3/exvnr77BvSQtYWsj1VCkRkXEJCAjA0KFD8ebNG5QvXx5PnjzB8ePHUbSofufbDgmPw84T/lCqdHcl6PvgWBy79BodGrvobJ9ZFXTiL4Re+K8DR/Tjy6j4xwOYFS1nwKqIdMf7biBajzyO2Dg1lKqEz2KPnkfgu/neWL7jIS5uaA9rSxMDV0mGlqfCRIlEgo0bN2Ljxo0AgJUrVyadMG7QoAGWLVuGnj174sSJE5nuyWNlZZWtXixtPzzDjtMXki2LjlUl9SxMi49feIbrAIBGA3RoXgV165bMco2Uf8w2K4mLg4+medIjUQE7c4zu3wySHJzvk1LXziMGh88HpNsjy9xMiinDaqFuXf3PNxinBnAka9u+2zEb6pgIvFg+OGmZiUNRlPpha5brqV27DsykGa9HucvX7ctg98nnOHguQKv1n7yI0Go9QQAc7cywbLL+e/USEVHud+HGO3QcczJTJ0w1GhFxCjU6fHMSZ/5uB4+azjlYYe6iVmvQ7YezOO39RqupGBIp4jW4cOs9Oo45hROr23DYUyLKd9RqNTp37oxFixahWbNmWLVqFS5cuKD3IBEALt56D3UGw5tmlkwqwakrb4wiTHRqPQxOrYcBAEIv7oRpwVIMEinPiI5Rot3oE4iMSjkdTnSsCo+fh2Pw9IvYtai5AaojY5KnP20/efIkKUysVq0a7O3t4eHhAT8/P3To0EGvtdRyd8wTbVDeUK9KQTStXQRmJmmnL6ZyCZZOqscg0UB+HVMz3eMjl0ngXMAc/Tu56rEq3XAZsQJVN7yD25xzST/ZCRIp7xIEAZvmNEb18trN2/rVJC98Nckrg30ClmYyHF3ZCgXsdNMzloiI8o8PIbFo/83JNOeDEgSggJ0p0upcrFKLaDf6BAJDsj5cfF7z8593EoLENC50TO8xVcRrcPnuB0xaci2HqyQiMj7Hjh1D6dKl0axZMwAJI/BUq1YNMTExGDBgAEaNGoUlS5bopZabD0Mg1/Gw00qVBpfvZnFIpByiDHuP93vnofjQpYYuhUhnth3zgzKdC7oU8Rocufian18pb/VM/NyTJ08wePB/PV8WLlxosFoqlLaDexk7PPIPz3BoycySSATUdneES2Er3e6Y8ixBELB3cXP0nHAWJy6/gUYjJp0QMTWRQBSBVdMaolurUgauNP+qVNYBp/9ui3ajTkCpFhETqwKQcDLFzESKUkWtceqvNhxigPI8W2sTnFmT8LfgfS/jnvoZsbM2wYnVbVDL3UkH1RERUX7z9/8eQ6UW0xw9wsHWFMEX+sDRcwtCwhUp7hdFQKUSsWbfE0wZUi1ni80FYuNUWLTpfrojpmT0mCriNVi18zFmjqwBGyt+Ns4r7jwOwfUHQTA3k6Fto2K8CCybYmJVOHrxFUIiFKhQyhYeNZ05pHoecOfOHdSsWTPp9u3bt1GtWjXs3bsXHTt2RNeuXdGjRw+MHj0acnnGw0ErlUoEBGg3KsznfJ6+Q/xnYYRMJsDFOeW5Shdny2T/fi7gfRRU//b+f/46An5+flmqKU1iaQBZe/6/XDEMxfrPh9TCJotti/Dz88/atkQ5ZPOBh4j697xjWiSCiF1H76Btg4J6qopymouLi1bvDZ/K02Hi8ePHDV1CEkEQMKa3O0b8/I/O963RiBjTW//DHFLuZm4mw8FlrXD7UTCW73gE77uBeOgfjiFfumHWqBr8smYEGlQrhHdne2PXiedYseMhrvsEo3FNZ0wZWg3N6xZhr1HKN+xtTHF+fXvM+esu5qy5A41aRGauyxGEhJO37T2L46/pDVGkYOpfWomIiNKjUmnwx9aHiFNoMbF1OuLi1Vi6xQeTBlaBVJqnBwvK0O6Tz5NOGGeLAGw57ItRvfi9OLd7/T4a7b85gVfvoxEVo4JMKsDURIq+HV2xZCJHzsmKpVseYNbq21CqNIiLV8PaQg5rSzn2L2mB6hU4wlVuVqBAAVy+fBkA4O/vj/nz52PhwoV48eIFWrRoAQBwcnJCcHAwChcunOH+AgIC4OqaxdGPig0C7Orj027kLs5W8DvaI81NvNa1T3V5mXa74P/6IwAgJCQ06zWlofr/4iGRZX6u3eDT62Di5AKbai2y3LZSpdL570OUbaV+AKwqpLtKVFQUxoz5DojgaBB5ha+vL8qUKZOpbfJ0mGhs+nV0xYIN9/DiTRTUOuqeKJUIqFDaDt3Zg4yyqHoFR6yd5YGr9wJRr88h9O3gyiDRiJibydC/c1mUL2WLen0O4bfvaqNuFV4FRPmPiVyKWaNroEszF8xafRuHzgVAI/4XFH7u0+VVyjlg0sAq6NW2NK/AJiLKA44fP47Zs2fDwsICpUuXRtGiRTFjxowcb/fIxVeIiIrXyb7CP8bj6MXX6NjEMPNAnQ8OxNWwEEwsm3DiqMvVi9hf10PvdSza9ABx8dkLZwEgTqHGok0PMLJnBYO811crXwAje5TH8Nn/oF8nV1iay7Fq5yO915ERYznuaYmNU6Fhv0N49SE66XOcUgXEKtRYv+8pZFIBv0+oZ9gic5kNB55i+opbiIz+bx6ssMh4hEXGo+Ww47i9uwuKp9JzjHKH3r17Y9u2bahYsSLq16+PggULolq1alCr1Xj16hVq166N4OBgODpqFxq7uLjA19c3S7X8ut4Xm4++STafcMD7KJRptytlO86W8FrXHk0HHUHA++gU9we8j0r6f5mShXDiUNZqSktvHxky+84THxSAoKMr4Tb3QrbalstkWX6MiXLKsp3Psfp/AVAo084rzC0scfDgGpQqaqHHyignubhk/nsIw0Q9MjeTYdOcxmjU/7BO97v518Ywkac9txoREVFeUb2CI/YvbYmAd1HYePAZrtwNxHWfYASHxSWtY24qRfUKBVCroiN6tS2NelUKMkQkIsojnj9/junTp+P06dOwsbFBvXr10LBhQ/Tt2xcajQZz587N0hdjbRy/9BpKVdrDcWaGUqXBsUuvDBYmGoOIj/G49zRUZ/vzf/0Rbz7EoFgaw+blpDuPQ+DjF44ZI6ujTDEb9Jt6Xu815AVbj/ghJEKR6oViUbEqrN//DDNG1ICtNYez1YZGI2LqHzeTBYmfCv8Yj/nr72PZ5Pp6rox0xdbWFhcvXgQAaDQaODs7w83NDSVKlMDo0aNx7tw5NGjQQOth7ORyeaZ7qSRqWk/EpiNvki1TqcSkHoapCXgfne79UqkAz1pFs1xTmh4CmRrqBsD7//0GVVQons1ul7SsWP/5sCxXJ3M7EgTd/z5E2TR5eBGsOfAGCmXqQ50KAlC5XAG08Kys58rI2DBM1LMG1Qrh9/F1MW7B1XTX8/ENQ92vD8LHNyzd9Vb+1ADVyhfQZYlERERGz6WwFaYNrw4AEEURER/j0WjAYQgCcHvnF5DJ8vewcUREedXOnTsxePBg2NgkzFUkl8tx6dIl/Pbbb1Cr1Vi/fr1WvRQ1Gg1CQzMXZAW8i0gWcghCwnx+n0q8/fnyRKH/BiWiCLx6H4Hg4OBM1ZAWa1GDzL7zHXz/Bi9jE3qEvI2L1Xo7jajRSd0v36XsjZKdx1QQAP+ADzCTZXEeq8+IaU2MmYbNh57hxfGeaDk889OtiKKY5cc0s8c+q8cd0N2xT8vf/3uI6HTmbFKp1Nh36jE6eBbJsRrykicvIhGrSD1IBAC1RsTO436YMbSsHqsyHg4ODpBI8s53hqdPn6J06dKQSCSwsLDA+vXr9dp+o+qFoFLrZhS2T3nWdNb5PrPCZcRKQ5dAlGMKFTDH0kn1MHaed4r3YYlEgL21CbbPa2KY4sioMEw0gO/6VoJaI2L8omuQSoRUhzyNjlXh2v2gVLeXSgRoRBErpjTAsG7lc7pcIiIioyYIAuxsTCH/N0BkkEhElHfFxMTA2toaAODl5QVfX19UrFgRRYoUgSiKePfunVb7CQ0NhZOTU+YaLzEGsKmadNPB1hTBF/qkuurTQ91TXe7ouQUh4QoAwOHDx+C0IvX5ojLrQ+susDfJXG+tTs5Fkw13qa2I8AgUyuxjlxqTgoDbr8kWZecxFTVqNG7cFIgLyH5tAFBhKSDTvpfj0kn10W70SUwZUhXdfjiTqbkgI8LDM/98/Fdmj31Wjzugw2OfljJTAYu0p3CJiorBwEFDgIjrOVdDXmJWHCj1PSCzTnOVoKCQLD/3crugoCCth//MDcqXLw9vb2+DtV+yqDWa1ymMczfe62xqJzMTKXq0Kq2TfRFR+gZ/6YYSRawweekNPHgWhrh4NcxNpejaoiR+HVuLQ2ITAIaJBvND/8qoUaEA+k+9gNeB0akO4/E5QQAgAiWLWGHTr43RoFqhHK+TiIiIiIjIWPTt2xddu3bF4cOHUaJECVStWhWFCxfG27dvodFoULhwYa324+DggKCg1C/eTMu3829h+/FXSbdDIxRw9NySfL+2pnh6qDvKddyN0AhFin18uqxPr05YPF43cz1afzMeiMlcL7OssrWzzfRjl5qQCAXKf5G8F1+2HlNBipvXL8LFWTdz+bh2OoqIqLR7dX1qZM8K8Lr+Fv/c/gATuQQ/j66JyUtvaN2WrZ0dfLP4mObGY5+Wn/9+iFW7fZPNuZasfVsrnN6+GyWL6H8o29woVqFG5e4n0n0eN29QGjsu5NwxNWYODg6GLiHP+XFIVZy5pt1FPRmRyQR8+3VFWJjz1DWRvrSoVxQt6hXFDZ8g1P7qIK5t64RKZflaSf/hK7IBNa1TBA8PdMXKHY+wYsdDBLyPhiAk9DxMHBpAKhGg0YgQAZQqYo1velfEiO7lYW7GQ0dERERERPlL2bJlce/ePQAJQ57evXsXgwYNwtSpU6HRaPDLL79otR+JRJLpHikNqhfDPq+3iFOoASQMVZrYy/BzoRGKNO8DADNTKepXK6qzXjFKIXO98hs7FkRjx4JJt/fX9dB6W4mQ+ccuNQ4OIhztzZLNe5ydx9TGUo5KbkVhIpdmuzYAmZpvedXOR0n/97r2Dl6ZPJkuCEKWH9PMHPvsHHdAd8c+LZMG18Smwy8R/jE+xX0yqZAwJ3aVEjnWfl7Ut6Mr1u59ith/X7c+ZWtlgl/G1M1TvfPIsFrUK4r+nVyx7ah/tuYYlkoEuDhbYdqw6jqsjoi0ZW+TMLQ88wf6HJ8RBmZlIcfEQVUwfkBl/HP7A677BOHWoxD4BkTi6v0gtKxfBK0bFEOdyk6oV6UgJBLtv9AQERERERHlVffu3UONGjXg4uKCDRs25Hh7fTu44oeFV3WyL1EU0aeDq072lVtJJAK+7V0Rv665mxTQZpWpiQTDu5fXWZBIhlGkoCW2zWuCr388h+hYFeKVCWGEtaUcRQta4H+/tzBwhbnPovF18cg/HDcfhiSFtFKJACsLOX75pibqVS2YwR6IMmfppPq4ei8Ifq8j0+xlHPA+CmXa7ULA+6gU90kkAkxNpNizqBmDDCIiI8NXZSMhkQjwqOkMj38nFr56LxD1+hzCzJE1ULcKP9wRERHldhYywFIGRKsyXlffLGUJ9RER5SZz5szRa3u21ibo08EVmw/5ZqvHhYlcgr4dXGFjlbk5DvOioV3dMGvV7WzvR6nUYGSPCjqoiAytbaPieHKwG1bteowTl1/j8p1ALJ9cH73bleG82FlgIpfi1F9tceHme/y+6QEOngvAkC/L4cfBVVGyaNpzKRJlla21Cc6vb4/mQ4/h8fPwpJHXPqVSifB//THFcrlMAjNTKU6uboPqFdhjlojI2PC0EREREZEeWMuBwy2BGCMMEy1kCfUREVH6vu1dERsPPMvWPtRqEd/2dtdRRbmbs6MFujQrgUPnA5J6oWWWXCZB09qFUaoYg5G8wsnBHNNHVMeonhXg1Hgr2nkUZ5CYDYIgoHGtwnAvYw+nxlvxy5hacLQ3M3RZlIcVLGCO69s7YcbKW1iw4T7kMkm6r/EyacJ0T83qFMa62R4oUpDzohIRGSOGiURERER6Yi1naEdElJtVdSuAuWNrYeqym1nqnSiXSTB3bC1ULueQA9XlTqunNcT1B0F4GxSTag+W9EgkAhxsTbHhF88cqo6IiLLCzFSGeePqYFCXcli1+zHW7XuKj9FKSCQCpBIkDYEqkwro2NgFY3pXRJPahTM1Xy0REekXw0QiIiIiIiIiLY0fUBnB4Qos3nw/xXxQoREKOHpuQWiEIsV2MqmAH/pXwvf9Kumr1FzB0d4MXuvawXPAEQSFxaXovZLWYyqXCbCzNoXX2nYo7GShz5KJiEhLbqXssGRiPfw+vi78X3/E3Sch8H/9ERMXX8eBP1qgTcNinO+WiCiX4DgRRESUaTIBsDWSaX5sTRLqISIiItIHQRAwb1xtLJ/SAJbmMpiZ/HcSVBSBkHAFxE8yRjMTKSzNZVg5tQHmjq3NXhepKF3MBrd2dkG9KgUhERLmlUz0+WMql0kgkQioUcERt3d1QYXSdoYpmoiItCaRCHB1sUHXlqXwZYuSAAD3MvYMEomIchH2TCQiokyTSYATrQBV5kaiyplahIR6iIiIiPRpWLfy6NPeFTtP+GPhxvt47B8OuVwCRbwGpiYSKJUaVChth/EDKqNn69IwN+PX7/QULGCO8+vbw8c3DCt2PMT6A8+gVouQSgXEKdQJIaIA9OngijG9K6KqWwFDl0xERERElG/w2wwREWWJTMI3ESIiIsrfLMxlGNilHAZ2KYdH/uF46BeGbj+cxda5TVCxjD17zWWBu6s9Vv7UEPPG1ca9p2F4+fYjvp58HnsXN4dHDWfYWhvJ8BhERERERPkIzwMTERERERERZVOF0nZwsjcDADSuVRiO//6fssba0gQNqxeCW0lbAEC9KgUZJBIRUbY5mgLBKac21lvbRES5FcNEIiIiIiIiIiIiIsrzjrQydAVERLkTZ5kiIiIiIiIiIiIiIiIiolSxZyIRERERERFRblbAPm+2ZUBFC1rkjrZ47ImIiIhIDxgmEhEREREREeVi8uWLDF1CnvNgX1dDl6AVHnsiIiIi0gcOc0pEREREREREREREREREqWKYSERERERERERERERERESpYphIRERERERERERERERERKlimEhEREREREREREREREREqWKYSERERERERERERERERESpYphIRERERERERERERERERKlimEhEREREREREREREREREqWKYSERERERERERERERERESpkhm6ACIiIiIiyj0+KoEYlaGrSMlCBljLDV0FERERERERUd7DMJHyLVGlAtQaQ5cBSCUQZPxT/JxKA6hEQ1cByARAxj7cREREABKCxA6ngGgjDBMtZcDhlgwUiYiIiIiIiHSNCQblS6JKBVX/EcDHj4YuBbC2hmzjagaKn1BpgNYngYh4Q1cC2JoAJ1oxUCQiIgISeiQaY5AIJNQVo2KYSERERERERKRrPD1O+ZNaYxxBIpBQhzH0kDQiKtE4gkQgoQ5j6CFJRERERERERERERGQI7ApFREREREREREREREQ5QjloFBAWrv+G7e0gX7dS/+1Stri03IF3ITF6b7dwAQsEnOqVrX3k5ec6w0QiIiIiIgOLiVXh5bsoxMSpIAhAxMd42FqbGLos0oH3wTGIjVNBBPDsZQRKFbWGjOOnExFRPtHDCwiK009bTmbArqb6aYuIMiksHFCrDdMu5TrvQmKgMsBQcToJMPPwc51hIlE+p9GI8H/9EY+fhwMAwiIVhi2IUvgYHQ/fV5EAgOdvPqJ6hQIwkUsNXBUREWWHKIq4fCcQGw48xZW7gXjkHw7NJ9+V7BpuhmtxG9Sp7ISv25dB6wZFIZUygMoNYmJV2H7MD/vPvsR1n2B8CIlNuq9cxz0wM5GiWnkHNK5VGEO7uqFMcRsDVktERJSzguKAj0pDV0FERETZxTCRKB+KjIrHlsO+2HHcHzcfhiAmTpV0X9tRJ1HY0RyeNZ0xpKsbmtUpAolEMGC1+dPdJyFYvesxTl15A/83HyH+e4L5q0nnIJcJqORqj85NS2BoVzcUKWhp2GKJiChT9p15gRkrb+H+szAAgAAgtWsufV9FwvdVJLYd9UPJIlaYNKgKhnUrz/dlIxUVo8Ts1bfx5+7HiIxWQhCQ9P79qbh4Na7eD4L3vSDMW3cPrRsUxbxxtVHVrYD+iyYiIiIiIiLSAsNEonxEqdTgt3V38dvau4iJU6d5kutdcCz2nH6BnSeeo6yLDVZPa4hmdYvov+B86PHzcAyf9Q8u3HoPqUSAWpPyAClVIm4/DsWdJ6GY/ecd9O/kikXj68LextQAFRMRkbaCw+IwZu4V7DjuD+GTPFCbwVtevovCyF8uY9eJ51g7ywOlilnnWJ2Ueeeuv8PAaRfw4m1U0rLUPmOldt/JK29w+upbTB9eHZMHV4Vczh6oRESkG0qlBgfPvcTWo34IeJfwHtVxzElULeeA4d3Lo3oFRwNXSERERLkFv6kS5RNPnoejZq/9mL7iFmLiEsZtTu8kl1qdcKff649oPvQYRs+5jHilAcZ7zidEUcTSLQ9Qpes+/HP3AwCkGiQm3yZhmNqNB5+hQuc9OO39Rh+lEhFRFjx7GYEaPfdjx3F/AOm/B6cmcX2v6+9Qrfs+/HP7g44rpKxatfMRmg05ipfvojJeORWimPC5a8bKW2g76jiiYjgWHBERZU9gSCxmrrqFws224esfz2HfmZe4+TAEAOB9Lwjr9j9Dra8OoFr3fdh86BlUKo2BKyYiIiJjxzCRKB+4/zQU9fsewkO/8Exvq/k30Fq16xE6f3saingGiromiiJ+XHId382/CqVKkxTkakujAYJC49B25AnsO/MiZ4okIqIse/76IzwHHsHrD9E62d/HGCVaDT+Ga/eDdLI/yrrVux5h1JzLADIfEKfmzNV36PjNScR+MgQ9ERFRZtx9EoKKX/wPv629i5AIBRTKlEGhUqWBRgPcfRKKobMuoc1IXsxCRERE6WOYSDr10C8s09uERyrwRkcn1yil98ExaDbkKCKjlBn2dEuPKAInLr/G4BkXdVgdAcAfW30wf/39bO1DIyb0ZOwx/iwu32FvFSLKPeKVasxceSvLJ7AOer3EGe+3Oq5Kd5RKDb4YdxofgmO1Cpu2z2uK7fOapruOKAKxCjU6jjmJ0AiFjiqlzLpy9wNGzbmc5rDxn9LmuCY6d+M9vl9wVQcVEhFRfnP7UTDq9zmEsAgFFPHa9TZUxGtw6dYHeA44jJhYXsxCBCR8hs/J9XOKRiNmuqexsdSem+XW5wtRZjFMJJ05evEVKn25F+v3P9V6m/BIBVqNOI6OY04l9YAj3RFFEcNn/4OwyPgMg0RLcxnqVHaCpXnaU6mKIrD1iB/+d+q5rkvNtx75h2PC79czXE/b4yOKQN8p5/klkIhyjf1nX2LW6ttoO/JEpgPFg14v0e2Hsxg157LRDs/165o7uPskVKt5EQHAraQt3EraZrieKAKBoXEYO+9K9gqkLImNU6H/1AsAtOuRqO1xTbR692OjDsmJiPKjqBglek08ixdvPma4bsTHePSaeBav3+vvwul3QTFoMew4FPFqpPb1XxCAAnamyeZtTqRQavDQPxw9J5zN+UKJjNyhcwGo2Ws/PoTEarX+pVvvUaHLnqR5SQ1p0uLr6DvlvNbfjX7fdB/Nhx6FWm2c36VyA9+ASJTvvEfrUWNev49G1e77cOoKpyqi3IdhohGKjIrHI/9wAEBMLhriqHWDoviqbRkMnnFRq0AxMUh85B+B5VPqQyJJ5RMtZcv+sy9x8FyAVj0S3V3tcXVrJ7i72qe7niAAw2b/wyFQdGTorEvQaHEWUtvjo9aIeP7mI35dc0dHFRIZP1EUcedxCD5GKxEVo+TwgLlMj9alMWtUDVy6/SFTgWJikOjsaI6jK1tBJjO+j7X+ryPx8193crSNLYf94HWNoZO+zV9/D88CInUytGlqBCHhMwJP7OQ+H6OVyf6l7BFFMWmIaF58mr8olRq8eJtxaKdP5qZSmMqlaDL4aLqBYsTHeLQecRzvgmJhZ2Oit/qWbHmA6BhlqkEiADjYmiL4Qh842Jqmer8iXoNjl17j+gMOo045JzRCgacvIgxdRroaVCsIiURA08FHMwwUL916jzYjT8CzpjOKFbLUU4Vp69G6FI7981qrQPH3TfcxftE1DPqiHKRS4/sulVuULGKFupWd0GrE8QwDxdfvo9F0yFFYW8hR291RTxUS6U6ee6VQqVSYPHkyHBwcULx4cfz+++8oV66cocvSSsC7KAz46TycGm/FyF8S5l5pM/IEhs68iLeBxj8MqFQqwaY5nloFip8GiSdWt0aDaoX0WGn+sWjTA0h1HNKKYsKHv+1H/XS63/zo5sNg/HP7Q6bnSMyIKALLtz9koEJ5nlKpwR9bfVCm3S5U77Effq8+wjfgIwo22Ypx871zxXsnJZg+onqmAsVPg0Svte1QpriNnirNnFU7H+v8NT41f2x7mONt0H/ilWqs2Pko1Z4duiKKwPM3H3H04uuca4R0KuBdFNqPPoFq3fcBAKp134f2o0/g1XvD91LIrdbsfQKXljvQbMhRAECVrnuxbKsPxJxK8ckoKJUaTFp8DYWbbUPr4ccBAPX7HsLxS4Z/PZRKJVg32wONazqnGSgmBommJlIcWdEKVhZyvdSmiFdj9a7Hqc6PmBmCBFi61UdHVRH95/HzcLQbdQLFWm5Hr4leAIB2o07g2MVXBq4spQJ2Zjjzd1uYyCXpBoqJQWKP1qWwZqaHUXSSqF3JCaf+bJNhoJgYJK6b7YEBnY33vPn54EBMeXQv6fbsJw9wIvCdAStKSSaTYNOcxmjXqFi6gWJikOhgY4oTq1vDzib1Czso62pWdMSuhc0AACZyCa5s6QgbK/28D2dXbniuA3kwTJw0aRLu3r0Lf39/XLlyBb///jsqVapk6LIy9OR5OKp134etR/wQr9QgLl4NAIhXarDx4DNU7bYfz18b11V5qdEmUMztQeKLmGi0uOyVbFnZ04cNVE3aHvqFJQRVOXAFr0SSEFYZo+BTa/FkapOkn/vDSuPZ7PaGLitVf+5+rPOwN1FElBJ7OBwtfWbPnj0oV64cLC0t4enpiZ9++glNm2o3h5exiY1TodWI45jw+zU8f5P8RG1UjAordz5Cte778fh5uGEKpEzTNlDMLUFibJwKa/c+0UtbB71e5qrAIvzqQTz8rlqyn9s9rfFy5XBDl6aVfWdeIig0Lsd6JSYSBGDFDuP8vGVox48fR4MGDdCiRQsMGzYMs2bNMmg9bz5Eo07vgzh26TUi/+2RGBmtxLFLr1Gn90HOD58FM1fewg8LruJ1YAzCIuMBAB9C4zB1+U38wDlF8yyNRkT70SewfPtDhEQoEPrvsfcNiESviWex+4S/gStMP1A0VJAIAHtOPUe8DubgUqlE7Drhj5DwOB1UlXlhl/+HlytH4OXKEXg6rQX8F/U2SB25SUBAAFq3bo1KlSqhW7duqFy5Mt68Ma7hE+89DUW9rxPeJ2Pj1EnvlU9fRqL7+LN6+8ycGRkFisYYJCbKKFDMLUFibpJRoMggUT9uPgxGdKwSjWs54/t+lbBu31NERnG0EF3KU2Hi27dvsWbNGmzcuBF2dnYoVqwYGjZsCHd3dygUCtSrVw9WVlbw9fU1dKnJiKKIdqNPIiJKCVUqV68rVSLCPirQcczJXHEVZnqBYm4PEnMTr+s5d/WCRgPcexaG8EhFjrWRVY4tB8Ntzjm4zTmHMj/uhcTUAsUGLDB0Wak6eflNjoS9ACCTCjn6HKDcZ8uWLRg7dizWr1+PqKgoDB48GL/99htq1Khh6NKyZPDMi7h850OaJ03ilRqERirQbPBRDsuci2QUKOaWIBEAvO8FIuxjvF7a0ogwih4b2rKr2wkVl9xJ+ikx6i9IzCzh3G2KoUvTyuELAXppRxSBM1ffIk7BkQY+9fz5c0yfPh3Hjx/H6dOnce/ePdjZ2aFnz5745ZdfDFLTxMXXERgamyJgFkXgQ0gsJi/NeH5s+s+HkFgs3eqTdLL5Ux+jlVh/4BleGtnwl6Qbp73f4NqDIMTEqVPcFxGlxOhfrxjFHMmpBYqGDBIBYP3+Z0kXpWeXTCrBAS/9vNd9zr5BV5QYtRpFes2AxNQcJUasMkgduYVarUbnzp0xadIkPHjwAM2bN8eHDx9QtGhRQ5eWTM8JZxGRxgn96FgVvv3NG8Fhhgmw05NWoGjMQWKitAJFBok5J61AkUGifk1eegPzx9VBO4/iWGOEFyrkdjJDF6BLZ86cQa1ateDk5JS0LCQkBO7u7pDL5Thw4AAmTZpkwApTd/bqO7wLikl3Dgi1WoTvq0hcvReEelUL6rG6rEkMFAFg8IyLAIAvmpVgkKhHN3yCIZMKqQbUunLrUQia1S2SY/vPDlEU8WJJPzh3/RHmLhUNXU4K4ZEKvMzByblVahHe9zjXBSWIi4vDuHHjsG7dOjRs2BAA0K9fPwwbNgzVq1cHAHh4eODRo0f45ptvMHPmTANWm7EXbz5ix1F/ZPTqplaLCIlQYOsRPwzvXl4vtVH2TR+R8JycsfIW2o48gWOrWsPKQp6rgkQAuPkwRL/tPQrBUL22qBuqqHD4L+yFEqP+gmnBEoYuRys3fIL11pZKLeLe0zDUqeyU8cr5xM6dOzF48GDY2CS8BsjlcnTr1g2dO3fGli1btN6PRqNBaGhotuvRaEQcPv8yzZ6qoggc8HqJwMAgozzZaIz+2u2L6Ni0LwT6GK3Esq138OPACnqsivRhwfrbaQYOABCvVGH/6cdoUss4zonM/7Yivl2gQKN+h2BjJYOdtQk2zq6DuJgIxMXot5aAd5HJbgsCUsyNmHg7rTkTQyMUEMWE1zX/V8EIDnbQSW2ixgGZ6cugjonEy5XD4TJyNaSWtplsS4PgYO1e2x0cHCCR5O4+FseOHUPp0qXRrFnCsH7u7u6oVq0aAgMD8eOPP+LZs2e4ePGiQWu8+TAYbwLT/4MQRRFr9j7Bj4Or6qkq7SUGis2HHkPTwUfx29ha6P3jOaMOEhMlBoothx9H3ynnUaNiAUxafD3XBYk7XwfgWljCd6uXMdGoa1/AwBWlLTFQ7Df1PFqNOI7Ncxrj+4VXGSTq0fvgWGhEEccuvc7xkWR0LTc81/NUmBgSEpIsSAwMDMSVK1ewePFiSCQSFCqU9fAqKioKPj45M278ss1vEKfI+AoypVKDPzZdhtCjcI7UkRNGdzZDcLAtBs+4iAmLriAmToOl35WEVPECV6++MFhdglKJ6tnY/m5EeLKhTt8rsncF1fXr1yDKdXvl4q0Hb1IEiZbmMri72qe6vnsZu2T/psbHNwzRsf9dHX/qwl1YQvfj28eLAoA62drHh//Ng9yhCAo06ZPteq5fvwYTQbfvQI9fpj7mflrHKCvHxzcgAlevchio1Li7u8PKysrQZejNxYsXoVAo0LFjx6RlkZGRiI+PTwoTt23bhjNnzuDFixcGqlJ7f+55DBMTCRTxGV+ZHq/UYPHmBwwTc5nPA8VvvqqAvlMu5JogEQDuPNFfmCgIwO1H+gu4dOnlskGwq9sFdnU7GboUrcTGqfDkRYRe27z9OIRh4idiYmJgbW0NAPDy8oKvry+KFi2a6fev0NDQZN8ds0wwAcovAGSWaa4SGRGBQs5FAZE95bXi3ANwapXm3WqNiEVL12HRxA36q4n0w3UaYJ72hSURkdHo3msgEHFNj0VlQGIBuP2KNzIrwHcuSm31M0wdbr8BJo5JNx1sTRF8IfXvwk8PdU91uaPnFoSEK6BQxGPOrwsxZ+w+nZRWdWsoZFapn4f4nEYZjxd/DELRfnNhUiDzvevCIyK0fm0PCgqCo6NjxisasTt37qBmzZpJt2/fvo1q1aqhYMGCWLduHbp06ZKp/SmVSgQE6LZX6smL76BUpX/OM1ahxql//NG9ifF+R18zpQK6TrqJLt+dRtsGjpjctwiePzf80MsZcTAH1k+rhK+m3sGO4/74eURZeFSSws9P/69VxUURWYleexZzwa8VqgBImEcus0RR1PvvO2NwMQSFRKDz2FNwcTbDsvGVEBL0GiG55Jr/l+8Szlm+fBkAxJsbpogsnobt0qwEbvgEo1MTFyzf/hAfUxnpIqN2s/t8yS3PdRcXF8gzmUfkqTCxXLlymD9/Pt69ewdBEDBw4EDEx8fDzc0t2/v28fFBvXr1dFBlKlxGALa1MlxNIwLbdx7A9t835EwdOUViAZT/DSERFkDoJQzrM8jQFcFMIkVk+65Z3r6qrR1ON/hvnrHszpnYuHETxGl0MyRJkjJTAYtSyRa5u9rj6tb0T9atm+2Z5n11vz6YbNzv3+YtwG8TvdJcP6sEEzPU2J162KaNjw/OI+zK/+A2VzdX4DVu3BhivI6H3DAvDbimHM4to2OUmeOjVKpy7nUrl/P29kbdunUNXYbeBAYGpvhSvWXLFpibm6N8+YSQrXjx4tlup1evXnjyJOeHkfBVd4YC2tf75EU4qlWrDsF4LxqlNDgLtXHpdl1cuv0eMkTBKnADunWaZ+iytOKn7gigOD6/En/7vKZwK5n2VfZupRLuu7WzS6r3P3kRga8mJX/vFUXgzgO/pIsDcpRNQUh/OKGTXX04tBTK0LcoPWGnTvYHAG3atAYiA3W2v88pRQuIYsrPstk9rkDqxxYAZv2yEKt/vZn5YlOrw80NO3bs0Mm+DKVv377o2rUrDh8+jBIlSqBq1az1YnBwcEBQUPbP6IiiCLcvjiEsMu0TFQ4O9nj84Q0EvhFpZcuRl5iy4j5iUxnqEgBMTSSYOnU4RnY3zqkMKOtGzLmB/51Je643O1sr7Fi1GTUr6KbHXHZFRinRY9IVACJuPgpHkfqzcGipB1ycLfRei8fgs3j8/L/hf0MjFHD0TN5b28HWFE8PdUe5jrsRGpFyypLEZWZmJpj40wSM6fWXTmrrdtUWUVqe7ni3fQbig18h8PAyAIBJgaIo3HOa1m3Z2dpq/dru4GAcz6PsKFCgAC5fvgwA8Pf3x/z587Fw4cIs7y8gIACurq66Ki+BXT2gSB9AapbuamdPn4Tr+i9127YuWbgCpcYBElMcPfsQR9f0A1SRGW9nDBxbAs7dAUGCafP3YNroFQD0P2R0dPtukBugN7BKpdL98zojMnug9HhAaoWX74C6Tb8GIm/rt4bsMHEC3OaiefNmQLyBEtBKqwEhc9GViVyCSYOqoPWI42hVvyimDq2GH5dkbroBlUqZ7edLbnmu+/r6okyZMplqI0+FiW3atEHLli1Rrlw5lC5dGj169MDLly9hYmKS7X27u7vD29tbB1WmtHjHW+zxCoU6g9dxmRTo1/cLDOsyIkfqyAkfY9QYu/gFfN/EIl4JCA6NMPX7nujQULur0nKKoFQC85cZtIZPnT9/Tuc9E0fO98ftZ8mHkvDxDUPdrw+mur57GTusm+2JQdMvwMcvPNV1fHzDkt2eOnkiOjaaq5N6PxUvChiXxfOAyvAPeLlyOMpOOwKJSfofVrV1/vx5nfdMfPYqFn1np7xaJK1jlJXjY2Yqw7kcet3K7dzd3Q1dgl5VrFgRL168wJkzZ+Dp6YmDBw9i2rRpqFy5MqRSqaHLyzQx01M+C//+5LIxLgjmQlBCUgYBcsRAhqxfaELGJfrZDbzfMxflF1yFINPvvFLZo/8wiK9cyZUtWxb37t0DkDDk6d27d/H27VtMnToVz549Q6VKlbTqiSGRSHTWI2XIl+WxdKtPqvP4msglGNLVTTe9IPOJod1tMfvvh2mGiWamMnzzdXXYc6iwPGfqsFo4fTUwzaFO7axN0apRWaMI5iM+xqP32OOwtDDFxl88UartLjSqURhfjr+Cc2vboWRRa73WU76UA56+/AjNvy9DogiEhKcMDIGE0DCt+xIIqOhaSGevkYIEgJZhYtF+c5Gd2f4EHb625wa9e/fGtm3bULFiRdSvXx8FCxZEtWrVsrw/FxcX+Pr66q5AAB9CFWg+6iriFGmf9LQ0l2Lu+H5o13C8TtvWlRsPwzHo53vwrF4Ax68EoUyZkkCZ1dj6czU42mX/nHNOWnvgFX7b6IfxfUpjwWZ/WBaqgabt9mPhd+Uhk+o37JD98BOSXqT02a5MpvPndXreBcehz/S7sLOSYcaQsvhy0i1Ylv8Wm2ZWRdVyxj/CDpDQM7H5qKs4c+YsShQ2TM/E8t3OZ3rqru/6VMLGg88QGaXEnlMvMLx7eZQqao3nb7Sfa1smk+NxNp8vueW57uLikvk2Mr2FEZNIJNi4cSM2btwIAFi5cqXOThhbWVnlWC+WyXYh2HfhINQZPMkEQYIfRzRG2RKZGzPeUMIjFWg14jgCAlVYPrkhhs3+By0bFMWcjW9QunRpDOxiuPG5RUU8VBmvpje1a9eBYKrbDyD1a6hx3/9xshfe6FhVsp5rqfHxC89wnUQdWtRA3RyYwzNODeBI1rZ9t2M21DEReLF8cNIyE4eiKPXD1izXU7t2HZjpOG+pUlWF/r/4p5grNaNjlJnjU6lsgXzV+47SVr16dcyYMQM9e/YEAHTv3h0eHh4oUkS3c57qq7fL8FmXsP7AMyhV2n04K+hghjvnbuVwVaRriXMkyiWArbUcwWGFYFt1etIcisau+w9n8L/TL1LM05Baz7NPJfZcq9Fzf6baq1CuFG7vyfmrXT/EAu1PZW8f6ugI+C/siRKj/tT5PInHj59AoRz8vhsZFQ/bBptTLM+p4woAkyeOw7df56+LYLR179491KhRA0WKFMHWrVn/rJddM0ZUx7FLr+EbEIm4+P/OmJuZSlHWxQbTh+uh13AeYmkhx5qZjTB4xiWEf4xPdp+tlQn+mFSPQWIeVb2CI4Z3L4/Vux8j8pNAURAAextT/O/35kYTJLYecRymJlIcWdEqadqaPybVw8TF19Fk8FG9B4ojupfHkQuvoNHByUuZTECnJpk/wUj6Z2trmzQnokajgbOzM9zc3KBQKDB27FjcvXsXo0ePxooVK7Tan1wuz3QvlYyUKQO0bfQWRy++SnOaCisLE4z4qi7kcuObw/LSrfcY/Msl9GxTBpMHV0W5jnuwY0ELDJl5CQN/fgivte1QqICBhoHMwO+b7uO3jX5YN9sDHjWcsWCzPzb/2gQDp1/EzDWvsfnXxpDJ9PeYK7Pw+t3YsSAaO/533nG6W6VM70MQBJ0/r9Py+n00Bo49ikIFrHBidWuERCgA3EKzOkUw6JcHOLm6Te6YvsAkEsBVlCjhYrgpRoTzmd5k/vp7yW63HHY8C+0i28+XvPxcN75XaR168uRJsjCxS5cuOHnyJPr374/du3cbsLLkqroVQN0qTjBJ503T1ESCFvWK5Log8ZF/BE6sbo0q5RKGj5gxojq+alsGg2dcxPr9Tw1cZdaUtLBMNsQpADxr0cFA1aStZkXHTF/BkRkSiYCqbsY3LIjLiBWouuEd3OacS/rJTpCYU8zNZOkOh5ZdMqmAOpVywQcU0pvp06cjODgYwcHBWLVqFfz89DQkYg4Y2s1N6yDRVC7B6F4Vc7gi0rXEINHZ0RyuLtYoVsgSs0bVwKXbH9B25AlExRj/vGOVXO31OuF75XKGHfUhMwKPrYQy+DXebp+Bh99VS/p5sWyIoUvLkI2VCYoXSntuvJxQuWzuObb6NmfOHHTr1s3QZcDSQo6rWzth+ojqKFIwYXjDIgUtMGNEdVzd2gmWueACCGPzZYtSOP13W7T3LA47axPYWZugZb0iOL6qNfp1Kmvo8igHzRtXB1vnNkHNigVga5Xwt9OjdSlc394JNSoavsfZ50Hipxc4SaUSrJvtgcY1ndFk8FG8yERPiOxqWb8oCjpkf2QeUxMJhnUtD3OzPNX3IF94+vQpSpcuDYlEAlNTU6xevRrPnz/XOkjMSRt/8UT5Unawskj+vDI1kcDB1hRn/m5rtEFim5En0KN1KayZ6QGJJCEgsLdJqNlELkHTwUfxIcT4Rk/5fdN9jF90Detme2BA5/86c1Qp54BTf7bBsX9eo++U81Bp+b2aMvb6fTSaDjkKBxtTnFjdGnafXPi04Ic6aNeoGFqNOK51BwEiY2R8r9Q69HmYuH//frx9+xb//PMPundPfcJpQ9m/pCXKlbCFeSrdn8zNpKjkao8d85umsqXx+TxIbFCtUNJ9UomATXM8c32gmBu0blA06YOOrkklAprUcuYXjGzq1MQlx46RSi2ivWf258CjvCk+Ph5PnjxJFib2798fCxYswKZNm9CiRQsDVpexWu5OqF3JEXItrqIUBAFDvjRcT3jKvE+DRK+17WBqkvDZaPqI6rkqUKzlrt8TnrWM4ASrtgp3m4wa/1Og4pI7yX5Kjllj6NK0UruSk17nYK1RIfcc2/zMwlyGyUOq4u7uLwAAd3d/gR8HV+Xn5WyoWdERh5e3Qtg/fRH2T1+c/Kst6uXAqChkfDo0dsGNHV3ge6QHAGD55AYoXczwQ8NFxSjTDBITfR4ovnofpZfaJBIB4/pWgplJ9obUUSo1GNWrgo6qIn0qX758jk3PlF3Wlia4tq0TVk9riOrlHWBvkzAy1ze9KuLpoW5wdzW+C6fSChITFbAzM9pAMa0gMVHtSk4MFHUsvSARAGRSCTbNacxAkXK9PB0mHj9+3OhCw7Q42Jri2rZOWDyhHlw/6T5corAVlk9ugH82dYSNlXGPww2kHyQmkkolDBT1oGghS3Ru4gKpVPdnu9QaEd98xZ4+2TW8W3mIOdRtpbizJVo3yM5sE5SXPX78GABQuXLlpGUbN26Ej48P/P39cfr0aUOVprX9S1qgUAGzNANFQUjoobt3cXMUKajfXkSUdZ8HiZ8PqZKbAsX6VQulO+qErjWto9thiyltzeoW1kuvU0EAalQoAFtr4/8OQESUH1iYydC1Rck0g8REiYHi6J4V4KDHIXlHdC+PMi42Wl1wlxoTuQQTBlYx3JB2lKeZyKX4ur0rbu36Ate3dwYAjOxZAQXsst+jVtf+uf0h3SAx0eeBYqARBIqLNz9IN0hM9HmgqFYzUMyqt4HpB4mJZLL/AsWWw4/h+gMGipT75OkwMbcxN5NhePfyeHakOy5tbA8A2LmgKQZ9US7pqnxjpk2QmIiBon5MHFglxZx82SWVCChXwgYdG3MOhewqVcwaPVqVgjQHeidOHlwVUj1Ppk25R5UqVaBUKmFmZnxf3LRVpKAlbuzogi+bl4BMKsDURApBQFJvoVrujji3rj3aerCHbm6RUZCYKLcEig62pujVpnSOtyMIQP2qBZOGlKec16e9K8xNc/6zuSgCo3qydwgRkbGQSARMGFhFq7mbpdKEYE6fwxxbWshx+q82KOxonuoFTaERCjh6bkFohCLFfaZyCbq3KoW5Y2vpo1Qio1bQwQyjelZIN0hMlBgoNqtT2CjmdS9ZxCrDIDFRYqBYydU+x0bNyg+sLeVo3aBoukFiosRAcciXbnCyz73nYyj/4plmIyXLhSHAvaehePEmKsMgMdGngeKO4/46D70IqFe1IMZ+7a7Tobg0oojNvzbR6yTNedkfP9aHjZVcZ8dIKhXgUaMQhncvr5sdEhmxQgXMsWNBM7w5/RWWTqqHQgXMUdjJAnf3fIFr2zqjYfWM34vIOGgbJCbKLYGiPoIgUQRGczgyvbK1NsnxOdsEAbC1kuOrtmVytB0iIspbnB0tcHNnF1Quaw8TuSTZ90xRBELCFcl615vIJZAIwLdfu2PTnMYQ9DmON5GRKlvCFvO/r6N1wFbAzgzLpzSAhbnhhzb/onlJrYLERLUrOWHqsGr8288Ga0sTLJ/SIMMgMZFMJsGi8XVRsqh1DldGpHtMA0hnPGsVhv+xHloFiYkSA8X9S1rwKpgcMmdMLVQua59h7zcf3zDU/fogfHzD0l3v59E1Uaeyky5LzNcKFjDHpjmNM1xPm+MjlQiwtTLBhl88+fdE+UrBAuYY3r08nB3NUdDBjD20cplTV95kKkhM9Gmg2G7UCaOc66NulYLo0yHnwiABCb0S9dEDkpKbObI6bK1NcmzuRFEEFnxfxyhOShERUe7iaG+Gq1s7Ye/i5mhauzAEATAzlSZNgSKTCTA3lcLMVIpBXcrh/t4vMxWcEBERUf7Eb6ekU1np0i+VSmCeC3ti5hYW5jKc+rMtmg89hof+4Wn2AI2OVaU5AbAgJJzUmjiwMqYMrZqT5eZLHRq7YPOvjdFv6gUASPUYpXd8gIQeibZWJji7pi1KF+McF0SUe1Qt54AW9Ypg2eT6mZ6jZ/qI6hAEwNpCbrQ95pdOqo+Tl98gKCxOq3n2nryI0Gq/gpDQm2DDz54c1toAnB0tsGJKffSZfF6r9bU9rola1iuCIV3dslIaERERpFIJ2nu6oL2nC/xeRWLv6RcIeBeF5Tse4ZteFVHJ1R49WpeCtSXn5SUiIiLtMEwkygcKFjDHpY0dMGbuFWw+7AuJAGg7qqxUkjAX2R8/1sOgL8px6IMc8nV7VxRxskC/qefxNjBG6+OTqF5lJ2z6tTGDRCLKdQoWMMfRla2zvP204dV1WI3uOdiaYtfCZmg1/DiUKk2GgeJXk7wy3GfiRT5/z2iEciVtdVQpZVbvdmVw4eZ7/LXnSYbranNcgYTepsULW2LDL578zEVERDpRprgNJgysguCwOCzf8QhTh1aDI+fqIiIiokziZcxE+YSttQk2/doYh5a1hLurPQAkDXPyOYlEgCAkBIndWpXEowNdMfhLN57UymFN6xTBw/1d8V3fSrAwS7jWI61jJPt3eZGCFlg+pT4ubOjAIJGIyEg1rlUYB5a2hKlcmu1hMRO3/2t6Q/TtmLPz9lH6BEHAyqkNMLBLwnHQxacklyJW8FrbDkUKWupgb0RERERERES6wZ6JRPlMh8YuaO9ZHFfvBWHnCX9cfxCEO09CER2rApDQg6JR9UJoWK0Q+nVyhbOjhYErzl+sLU2waHxdzBpVA9uP+uHcjfe4ei8Qz99GQaMRIZdJULGMHepWdkJ7j+Jo71mcw9sREeUCbRoVw8WN7dF3ynk8fp65IS8/VaiAOdbN8kBbj+I6rI6ySiqVYM1MD1QsbY+py25AqdQgk4MLJPU0be9ZHH/PaITCTvzsRURERERERMaFYSJRPiQIAupVLYh6VQsmLfO++wH1+x7G0RWtULdKwXS2Jn2wspBjaLfyGNqtPADg6r1A1OtzCBc3tOfxISLKpWq5O+H2ri6Ytfo2lmz2QVy8OilISkvi/VKJgP6dy2LhD3Vgb2Oqv6IpQxKJgPEDKqND4+IY+fM/OHfjvVbbJR5bJ3szzB9XB/06uXIUCCIiynOc9Diiqj7bIiIiym8YJhIRAPDkFRERkR6Ymcowd2xtTBxYBRsOPMP6/U/xwDcszUCxdFFr9OngiqFd3VC0EIe+NGblS9nBa1173HkcglW7HmHv6RcIDlekuq6JXIKG1QphePfy+KJ5CZjIpXquloiISD92NTV0BURERKQLDBOJiIiIiPTM3sYU4/pWwri+lRAVo8SdxyHwfRWJactvQhAErJvtgRoVHOFgy16IuU218gXw5/RGWD2tIV5/iMatRyEYM/cKRIj4bWxtuJexh3sZe8jlHKaciIiIiPIJezsgLNww7VKuU7iABd6FxBik3WzLw891holERERERAZkZSFHoxrOaFTDGUu3+gAAWtQrauCqKLsEQUBxZysUd7bCzFW3AABft3c1cFVERERERPonX7fS0CVQLhJwqpehS8iyvPxc5+WwlD9JJYC1taGrSGBtnVAPJZEJgK2JoatIYGuSUA8RERERERERERERUX7EnomULwkyGWQbVwNqjaFLAaQSCDL+KX5KJgFOtAJUacwfpddahIR6iIiIiIiIiIiIiIjyIyYYlG8JMhn/AoyYTMLDQ0RERERERERERERkaOxvQ0REREREWrGQAZZGerWPpSyhPiIiIiIiIiLSLX7dJiIiIiIirVjLgcMtgRiVoStJyUKWUB8RERERERER6RbDRCIiIiIi0pq1nKEdERERERERUX7CYU6JiIiIiIiIiIiIiIiIKFUME4mIiIiIiIiIiIiIiIgoVQwTiYiIiIiIiIiIiIiIiChVDBOJiIiIiIiIiIiIiIiIKFUME4mIiIiIiIiIiIiIiIgoVTJDF0BEREREREREWaf85gcgJEw/jRWwh3z5Iv20RURERERERoFhIhEREREREVFuFhIGREcbugoiIiIiIsqjOMwpEREREREREREREREREaWKPROJiIiIiIiIiMjgNBoRp73f4IFvGN4GxgAA1u57gu6tSqF0MRsDV0dERET6oBw0CggL13/D9naQr1up/3ZzCYaJRERERERERERkMKERCqzf/xSLNz9AYGgcZFIBKrUIAJi16jYmL72BprUL4/t+ldCmYTFIpRxoi4iIKM8KCwfUasO0S2limEhERERERERERAax++Rz9Jl8DhKJgDhFwolDpeq/+2P/XeZ1/R0u3f6AUkWtcfqvtijmbGmIcomIiIjyJV7KRUREREREREREerdy50N8NckL8UpNUpCYFlEE4pUa+L/+iGo99uHZywg9VUlEREREDBOJiIiIiIiIiEiv9p15gW/nekP973CmnxMEoICdKQQh+XKlSoPwj/FoPvQYgkJj9VBpAlEUseZ/T6CIz3jYtcysS0REZGhRMUoc8HqJ6StuYuxv3gCAiYuvYfHmB7h85wNEMfX3aspfOMwpERERERERERHpTZxChQE/XYBak/bJSQdbUwRf6ANHzy0ICVcku0+tFvEhJBYzVt7Cyp8a5nS5AICP0Uos3eqDfWdfYO/iFjA1kaa6niiKGDvPG7tOPEfzukVQqpi1XuojIiLKrLeB0fh1zV2s2/8UCoUacrkEingNAGD/mZc4cv4VFEoNShaxwg/9K2NE9/KQydg/Lb/ikSciIiIiIiLKBqVSg7/3PEaDfocAAA37HcLfex5DqdQYuDIi47Tn1AvEZ/PvI16pwfoDzxAVo9RRVemzsTLB2TVtEfAuGl+OO51qr8NPg0Svte2MIkiMjIrH3DV3UbPnfgBAq+HHsP/sC/YyISLK57Yc9oVbpz34a88TxMapoRGRFCQCSLj973v1i7dR+G6+N2r1OoCHfmGGKpkMjGEiERERERER5RrHjx9HgwYN0KJFCwwbNgyzZs0yaD2KeDU8Bx7GuAVX8exlJADg6ctIjFtwFY0HHUG8ksMcEn1u0cb7iNPREKBbj/jpZD/acHIwTzNQ/DxIrFDaTm91pSU4LA7Vuu/DrNW3EPA+GgBw+3Eo+k+9gH5TzzNQJCLKp6avuIl+U88jKkYFpUq7i3vUahE+fmGo/dVBXL7zIYcrJGPEMJGIiIiIiIhyhefPn2P69Ok4fvw4Tp8+jXv37qFGjRoYOnQoevTogcePH+u9pjl/38HdJ6GIjlUlWx4dq8KdxyGYu+au3mvKyPngQMx/9ijpdperFw1YDeU3Pr5huPdMN70a4hRq/LHVRyf70lZageKUP24aVZAIAINnXETA++hkPU0AIDJaiYNeATjg9dJAlREZL41GxNMXEZna5snz8JwpJpPCIxX4EKL9XLLxSjX8X0fmYEX5Q2aP/9MXEQa9mGPFjof45a87yEoJKrWIOIUKLYcdz/TfCeV+DBOJiIiI9ESMioYYHGJ8P1HRhn5oiIi0snPnTgwePBg2NjYAALlcjho1auDvv//G1KlTcfLkSb3WI4oi/tz9GLGK1HtYxSrUWLnzEXv/EH3C71UkzEx0dzrq5bsone1LW58GigOmXQAAHDj30qiCxNAIBS7cfA+1OvXXn8hoJeb8ZXwXOxAZ2rajfqjZ6wAu3Xqv1fq/b7qPyl334eXbjzlcWca+X3gVTQcf1SpQjFeq0WuiFzqOOQW1msOyZ9WT5+Fw/3IvVu18lPHKAM54v0W1Hvuw9/SLnC0sDU+eh2Pc/KvpBokymYDSxawhkwmp3q8RE54/fSaf43Mnn5EZugAiIiKi/ECMioZq8GggVvsrRfXG3ByytSsgWFkauhIionTFxMTA2jphDjIvLy/4+vqiaNGiEEURf/31F3766Set9qPRaBAaGprteqJjVanOm/YpRbwar98GwtxUmu320mItajJ9pfDB92/wMjbhYpK3cdq/N2lEDYKDgzPZGtF/Xr9L/rcnCICDrWmK9RKXpXZfaIQi6URoTJwKQUFBEITUT3rmFAHArnl10GDAWQDA31NrwMlGZTR/H3efhkOSwUPi9yrCaOrVNwcHB0gk7GOhT0qlBscuvcKNhwnPuYiP8QauKHW925XB2Wtv0XbUSRxb2QqNajinue7vm+5j/KJrWDfbAyWKGH6O1AXf10HzocfQdPBReK1th0IFzFNdLzFIPH/jPc6uaQuplH8LWeVWyg7LfqyP0b9eBgCM7FkhzXXPeL9Fx29PYkCnsviyRUk9VZjc6F+vZHiRm4uzFfyO9kCZdrvg/zr1kFylFnHrUQg2HvTFoC/K5USpZIQYJhIRERHpQ1yccQaJQEJdcXEAw0QiMnJ9+/ZF165dcfjwYZQoUQJVq1YFAPz4448YOHAgChcurNV+QkND4eTkpIOKBKDiUkBqkeYaERGRcClWGEDO9U780LoL7E1MMrVNJ+eimFg24YRXZoY5jQiPQCGdPHaUb9nUBIoNAqT/hYXBF/qkufrTQ91TLHP03IKQcAUAQFQrUbBgwZypNSOFvwLs6gAya3QZ9jfwchkgqjLeTh/kjoDrT4DMKs1VwkLe6ei1MPcJCgqCo6OjocvIN7Ye9sU3c69AI4qIjUv4G2nU/zCGfOmGxRPrGlWYJZEIWDPTAwDSDRQ/DRIHdDaOMKWAnRnO/N023UDx8yCxqlsBA1WbdyQGiOkFip8GiSumNtD7BTAA8OxlBM5cfauz/ak1In7fdB8Du5Q1yO/zufPBgTgR9B6/VqgCAJj95AHq2hdA64LafT+gjDFMJCIiIiIiolyhbNmyuHfvHoCEIU/v3r2LtWvX4syZM4iIiMDbt2/RqVOnDPfj4OCAoKAgndQ0cMY1HL74Ls37OzYrg3XnA3XSVlqsvxkPxOjnghVbO1udPXaUP12+G4xuEy5DqUoI2EMjFHD03JJiPQdbUzw91B3lOu5GaIQi2X2f3nYqYIWHen5OiqKIKcvv48C5t9i7sAGkUgGDZlqjWItm2DCrNkxNcq4ncmY0GHAGzwJSHwbWRC5g/KBGGNcnf/49Ozg4GLoEnQkICMDQoUPx5s0blC9fHk+ePMHx48dRtGhRQ5cGANh+1BfDf/4nxdzCsQo11u57ioioeGyc09hA1aUutUCxsNN/Fw4ZY5CYKLVAMVG8UsMgMYekFyhevvMBw3/+x6BBIgBsPeIHE7kE8UrdDU3q4xeOh37hcHe119k+yXgxTCSdUqk0kMkydzWRKIpQq8VMb0dERERERPnXvXv3UKNGDXTr1g2DBw/O1LYSiURnPVKWTm4E7/sHERyuSHGfo50plk5qBEfHnB36TClk7rtUY8eCaOz4X0+u/XU9tN5WIujusaP8qa2nA6wtbyQFgqKIpF6GqQmNUKR5v6mJBH07lNXrc1IURYyd541DF97j/PoOSXMkXtjgjGZDjmHYnDvYu7iFUQSK62Y3RscxpxD+2XCSUomAwo6WmDSkNmysMtermYyLWq1G586dsWjRIjRr1gyrVq3ChQsXjCZIVKk0GDPXO0WQmCgmToX/nX6ByUOqonwpO/0Wl4HPA8U1MxsBANbue4rf1t41yiAx0eeB4vrZCb/Hd/O8ceNhMIPEHPJ5oNiqQcLf4bDZ/2BgZ8MGiQDwz50POg0SgYT5Fa/7BDFMzCeY3pDOXH8QhPKd9+DBM+3nHhFFEd/N80bvH89Bo8m5YX+IiIiIiChvmTNnDrp162boMlC6mA0ubeyAelWcYG9tggJ2prC3NkG9Kk64tLEDShUz/BxKRMZELpfgm14VYKaDsC1eqcGoXmnPT6VriUHirhPP4bW2XVKQCABODuY4u6YtAt5F48txpzOcT1UfGtVwxsE/WsKtpC0cbE1RwM4UdtYmaNuoGK5v78QgMQ84duwYSpcujWbNmgEA3N3dUa1aNRw5cgRDhgzB119/jY0bNxqsvtPeb6FSpx9eKOLVWL3rsZ4qypzEQLF7q5IYPCNhSHBjDxITJQaKJnIJev94DgBw7UEQg8QcNrJnBayY0gCjf72Mn/+8DQD4snlJgweJAHD3SfbnC/+cAAH3nup+v1m183UAWlz2QovLXtj86oWhy8lz2DORdKZYIUvIZRI0HXIMXmvaolLZ9IeMSAwS/9j2EPPH1YYko5nBiYiIiIiIjJBbKTtc2dIJbwOj8TYoBkWcLFCkIOehJUrLsG7l8cvfd7O1D0EAmtYujDLFbXRUVfrSCxITJQaKzYYcw5fjThtFD0WPms54fLAb/F9HIjRCgVJFrVHAzsygNZHu3LlzBzVr1ky6ffv2bVSrVg3t27dH+/btAQCdO3dG//79M9yXUqlEQECATuu7ef9thsG6Si3izqN38PPz02nbujS5bxFcvv0WT16q0KtVEXhUkhp1vZ9aNdENLUdfAwD8NLA0rGTh8PMLN2xReVyrWibo164oNh70BQAMal8A/v7+Bq4KiI5N3ktdJhPg4pxyXl0XZ8tk/34u4H0UVP8OVa7WaPDuQ6jO/x6KiyKykhT0LOaSbM7EzBJFMdf8bWeXi4sL5HJ5prZhmGhEVCoNDp4LwKJN93H7UQgAYMTP/2D26Jpo51HMqCYjTk1hJwucXdMOzYYczTBQ/DxInDCwip6rJQCIiVVhx3F//L7pAfxeRQIAJi6+jl++qYlGNQoZ/IoZAh4/D8cfW32w7WjCG1nnsacw9mt3DPnSDU4O5hlsTURERET6VKSgJUNEIi0ULWSJ4d3csG7fUyiyOOSaVCLglzE1M15RR6JilHjyIiLNIDFRYqDYZ8p5vA2MMZreyaWL2aB0MUNXQbpWoEABXL6cMKSiv78/5s+fj4ULFybdP3fuXAwdOlSrfQUEBMDV1VW3BdrUAIoNBKTpnL8QNTh/+gBcN36p27Z1ybEl4NwdECTYfuw5tq/8FojxNXRVGROkQPHhgFV5QGqB8QvOY/zIeYAq0tCV5W2W5YGSY4DYt4BlKbToNgUIPGzoqoAKSwHZf59TXZyt4He0R5qre61rn+ryMu12wf/1RwCARqXCjm1bsGPRdp2WGt2+G+QS/WchKpVK96+DRsrX1xdlypTJ1DZ5MkxUqVSYNm0a/vzzT1haWmLcuHFYvXo1nj59aujS0hQdo0SbkSdw61EwYuL+u2LnzpNQ9JxwFo2qF8LBZS1hZmrch0ybQJFBonF4GxgNzwFH8C44Jtlz7uKt92g1/DgGfVEOy6fUZ6BoQGv2PsHoOZchioBSlfAF+0NIHH7+8w7mrr2HM3+3Re1KTgaukoiIiIiIKPP++LE+ngVE4uKt91DEpwwUQyMUcPTckjS34qdkUgEbfvFE/aqF9FEqAMDa0gQnVrfRal0nB3Ot1yXKjt69e2Pbtm2oWLEi6tevj4IFC6JatWoAgJ9//hlly5ZFhw4dtNqXi4sLfH11G5DFKtSoO+BysvNOn7O0kGP9b9+iRvlpOm1bV9YeeIXfNvphzshyqF3RDqv+9xInrkzF2mmVUauinaHLS1O8UoPvFj3ENZ9wrJ9RBaIITFnxBMoyq7H152pwtOMwxznh8r0wDJtzH182c8ZPg1rgz72v8MfOLpg5fQK+bmPYuUy7/3gLt5/8FyQHvI9CmXa7Uqzn4mwJr3Xt0XTQEQS8j05xf8D7qKT/y03kmDplBPq0/Vmntcp++AnQ6HZ+R63alcl0/jporFxcXDK9jXEnU1k0adIkPHr0CP7+/oiKikK9evVQp04dQ5eVrq9+PIcbPsGIS6Xrf6xCjYu3P2DgtIvYPr+pAarLnNQCxUQMEo2DWq1BsyHH8PJdFFTq5HNViiIQF6/G+v1PUbSQBaYMqWaYIvO5U1fe4Js5l1OdGDlWoQYUajQfegxPDnZDYScLA1RIRERERESUdTKZBIeXt8LXP57DofMBKb77iCIQEq74bBsBAoAtc5ugR+vSeqyWyDjZ2tri4sWEufw0Gg2cnZ3h5uaGv//+Gzt27ICHhwceP36MmTNnZrgvuVye6V4q2pg4KAoL1t9HdKwqxX0mcgmqliuAbu2qG+XF7L9vuo/fNvolmyOxWaNKGDLzIobM8cGxla3QqIazgatMKV6pRq+JXrjx+CPOre+QNEeiZ70KaD70GAb+/BBea9uhUAGOeKVLZ7zfYvjcixjYpVzSHIlLppSDW5lHGP3rZTg5OmFkT/3N8/u5ZnWD4eP/MOn9VqUSk3oYpibgfXS69wOAUiWibeMKKFNGt50dlFl4PWjsWBCNHQsm3Z7uVinT+xAEIUdeB/MK4x43Mwvevn2LNWvWYOPGjbCzs0OxYsXQsGFDuLu74/z586hXrx4aNWqEcePGGbrUJE+eh+P4pVepBomJ4hRq7Dn1HAHvotJcx5gkBoqOdqZoOuRY0hCaizf7MEg0AkcuvMKr9ymDxE/FKtT4bc09o5g0Pj/6adnNDIf7Uao0WLHjkZ4qIiIiIiIi0i1TEyl2LWyGtbM8UNXNARIJIJelPFVlZiqFXCZBz9alcWNHFwaJRKl4+vQpSpcuDYlEgqFDh8LHxwerV6/WKkjMSdOHV8fQrm6wMJfBRJ7w9y0IgLWlHHUqOeHYqtZGGySOX3QtWZAIABKJgDUzPdC9VUm0HXUSl269N2CVKSUGiedvvMfZNW2TgkQAKGBnhjN/t4WJXIKmg4/iQ0isASvNW854v0XHb09iQKeySUFiopE9K2DFlAYY/etlrNppuPN4PVqXTrXTQnYULWiBmhULZLwi5Ql5Lkw8c+YMatWqBSen/9LwkJAQuLu7w9XVFefPn8elS5cQGBiI+/fvG7DS/6zb/zThXTQDUqmAjQef6aEi3fg0UBz96xUAwK6TzxkkGoEVOx6lO8REIo0o4vB53U6+TRl78eYjbj0KznC9OIUaq3czTCSi3O/mw4xf89Ly+n00AvklmIiIKNeSSAT06eCKO7u/wPVtndG3gyuqujmgRGFLuLrYoG5lJ/zyTU28O/sVtsxtgirlHDLeKVE+VL58eXh7exu6jBQEQcDiifXw+EBXTBpUBT1alcLonhVw5u+2uLixA2ysjG+4zbSCxETGGiimFyQmYqCoe+kFiYmMIVCsXckRlcvaQyLRTXgvl0kwpndFSKV5LmKiNOS5YU5DQkKSBYmBgYG4cuUKFi9ejKJF/xuXWCaTQSqVar3fqKgo+Pj46LTWRNfvBkCpxVUBingNrt3xx9Wr8TlSR05ZMLIQek5PGGu4ZW0beFaMxdWrVw1cVf72yD9Iq/VUKjX+ufYQxWwCc7gi+tSdp9GQy4R0e44mCglXwNvb2yiv4jNm7u7usLKyMnQZRATggNdLdBl7GjNGVMfMUTUyte2r91FoOvgoLM3luLmjM2Sp9GQgIiKi3KNGRUesne1h6DKIKAcUd7bC7NE1DV1Ghnad8E83SEyUGCgCQNtRJ/Fg7xcoUcRaX2Wmauxv3ukGiYkSA8XmQ4+h7cgTuL69EwOhLHr6IiLDIDFR4hCno3+9jGKFLNGxSebnrMsOQRCwfHJ9NBl0NNv7kkgEODua45teFXVQGeUWeS5MLFeuHObPn493795BEAQMHDgQ8fHxcHNzS1rn9u3bCA4ORsWK2j/ZfXx8UK9evZwoGSjaH7BvlHHvRFGDwwf24PDqHTlTR04p3AtwbAEAOOX9Dqe2fgco3hq2pvzOdRpgXiLD1RRxsVi8aB4W/3RRD0VREjMXoMyPgESLq/M0KtSvXz/na8pjvL29UbduXUOXQUQAmtctAs+azpi1+jYAaB0oJgaJz99EYdtvTRgkEhERERFRtnXwdMHh5a3QzqN4husmBoodPF3gUtjwFyx/368SRvQon26QmCgxULz3NJRBYjaULWGDLb82wRfNS2h1of/InhVQ3NkSLeoV0UN1KXnWKowxX1fEyh2PtOrEkCZRxJZfG8PSQq674sjoCaIoZuNZY3w0Gg0GDhyIvXv3onTp0ujRowe2bt2Khw8fAkjoqdi1a1fs2bMHhQoV0nq/Odkz8dLdSEz96xUU8ekfChO5gMXflkDN8oZ/c9KGKIpYvPM9dp0JwbDOBVHOxQx/7HqPyGg1VowvhTJFzQxdYr61+VgQ1hwKhEKZ/nNOKgH2zXVDQQe+MeiTSi2i/fjHiIhKfyhaQQA8qlpj/uiMg2FKjj0T847nrz9i1a5H2HToGQJD4yAIQJdmJTC2tzs8ajobVa9dMTgEqkGjDF1GmmTrVkJwNMxcB1ExSrQffRIXbr5P0UOxeo99AIDbu75IWvZ5kNizTe6eN+nx83Cs2PEQ24/5IzRCAUEQ0LttaYzp7Y46lXU7kT3pj0qlwZGLr7Bk8wNcuPkeIgCXwlYY2aM8BnUpBycHc0OXSHmI8qtBQHS0fhqztIR8+zr9tEVEREREOqVSadD9h7M4fCEg1UBRJhPg4myFgPdRUKlS3i8IwMZfPNG3Y9kcq1H5RW9AnfEUXTonlUK+b5v+280l8lyY+LmVK1fCy8sLu3fvhkKhQPv27TFv3jzUrGk83erVag2Kt9yB9yGxSOtoCAJQsogV/I72MKoTo2kRRRHfzfPGH9seJpsj8V1QDJoNOYrgcAW81rRFpbKc68AQgsPiUKzFdijSGV5XLpegRd0iOLqytR4ro0Rz/rqDOX/fQawi7TdOUxMJTv/VFo1qOOuxMsrt9uzZgylTpuDNmzeoWbMmPD098c8//8DLy8vQpWXa0i0PMG7BVZjIJMlez2RSARqNiHaexbFrQTOYmxnHQAy6CBNNDu1CfMceSbdfxESj5WUvPGvRIbvlGTRMBNIOFD8PE/NSkCiKImatuo1Zq2/DRC5B/CfP48Thrr9uXwbrZnlCLufVwrnJh5BYtB5+HD5+YdBoRGg++YxvapJwLHcvbK73oY0o71J+8wMQEqafxgrYQ758kX7aIiIiIiKdU6k0mPLHDSzceB+CAGgynoENcpkEVhYybJrTGB0a5+z3GIaJxsk4zq7loCdPnsDd3R0AsGnTJty/fx8//PADAGDu3LlGMTygVCrBoeWt4DngCGIVqhSBokQCWJjJcPCPlrk6SASAwk4WOLumHZoNOYqmQ44xUDQQR3szbJvXFL0neaUaKMplEhRyMMeGnz0NUB0BwISBlXHK+w2u3g9CXCqBopmJFOMHVGKQSJmyZcsWTJo0Cbt27UKDBg2wadMmDB48GGPHjjV0aZn25+7H+H7hNYgiUryOJV5Zd/LyG3T74QwOLWulswnGKedYWchxZEUrtB99Ms0hT/NSkAgAc9fexZy/7wBAsiARAJT/XgG66/hzCIKATXMa67s8yqKoGCWaDjoC31cfU73SVxGfcKy/+O40Tv7ZBs3qGmaII8pbGO4RERERkbZkMgnmf18HXzQvgR8WXMOVe4EwlUsQr9IkyyakEiHpfEq/jq747bvacLTnaIP5VZ7vmdimTRsMHjwY3bt3N3QpGfLxDcMPC6/izNW3MDdNyHljFSq0aVgMi8bXRbmStgauMGPpBYmfYg9F43D+xjtM+P0a7jwOhZmpFKIoIl6pwVdtS2PB93U4/JaBxSvVmL36NpZtfwiVSoRUIkCp0qCQozl+Hl0jR4cToLwnLi4OxYsXx7p169CxY0cACa/ZZmZmWLt2LapXr47hw4dDEISEYaoXL0bt2rUNXHXqomKUKNRkK2LiMr5KTSoRcHRlK7RqUEwPlaWPPRO183kPxQPnXgIADv7RMk8FiYEhsSjSfDvUGu0+il/f3gm13DnkaW6wePMD/LjkeoqA+P/t3X9s3HUdx/HX3fW6rptbB+tgA3T80CnZj0wyQiSb+IshG8y4TYhkEcM0zolRI9E/YBM1osZ/NBkYowJGxogGlAxCQGQKDiXKNCBsGQUEVH4MHftl13ZX/1g2nX43rlvb67rHI+kfvXyv9276R7+5530+nyJTJo/Nkz9feFR8YBAAABie/vzUP3PHL5/NI4+9kj93bE1Xdy1jR1cza2p7zpk+IR+ae2rGjRkxaPNYmTg0DfuYeDR64cWdeWzzP1IqlTJjynGZ2N7a6JHqUm9I3EdQHDo2PbM1Tz2/LdWmcs6e2p62QfznwOvr6t6T9X98Odt3dmdi+8icdeZ4bzrSZ/fdd18WLlyYbdu27X/stddeS1tbWx5//PG0t7enubk5bW1teeKJJ7J06dKsX7++gRMf3Pd+ujFXXvdwXW/UV8qlzD33pNy1qvFbNvdXTJw+pm3/9121Wv61p2dYxcTkwKB4wvEtOb5tRHZ31YZNSEyS677/p1z73Uf3r1I7lGpTOR++8LTc9FWrE4e6Wq03p15wW557sb6z60ql5MGb5ufcmfWf5Q4AADCciYlDk5hIv1n7q+dy0ZX31RUS99kXFEc0V/LobR+wDR3AALnllluyYsWKdHR07H9s1apVueqqq7J9+/ZUKpX9j3d0dOTyyy/Pgw8+2KfXuPTSS7Np06Z+m/lgOvbMy/ac2odn1DKjfH0a3eAnVJqydtKRRbCBXJk4/29P5+U9PUf8c/rLnt5qnq7Nz86clGRvcHtT6d6MKz/V2MH6yaY9i/Ov1B+QKunMtMr3B3Ai+sPu3rF5srak7utL6cmE0h8zsfzbAZzqP6ZMmZI1a9YMymsBAAAcDjFxaBr2ZyYyeObNOSW/vnFeZp9V/xluE9tb88APLsy2Hd1CIsAAOvPMM/Pss8/m/vvvz5w5c3LnnXfmmmuuybRp0w4IiT09PVm+fHmuvvrqBk57aLX0dfV0Ob0pp5Q6ThRnyKiUunNK6YFs7P1wknLG5qlhExKTpJZqH69323402NPHv2tvSn1+DgAAAAw270rQb0qlUp9C4j4njm/NieMHYCAA9ps5c2ZWrlyZSy65JEmyePHizJ49O5MmTdp/Ta1Wy5IlS7JgwYLMndv3bUEHa7XLws/+Inf88i+pd2+FluZK/vT7PwzsUHXoj21OB9I999wzZLY5TZLnX9yRd11xd0ovbM9bJ4/Nk8+ckQUf+0G+9Mm3N3q0fjH7I2vz0IaX6r5+wvGjs+GBDQM4Ef3hub/vyJvm3lb39SOaq/nUFZdn5bLvDOBUAAAAcGTKjR4AABgcK1asyJYtW7Jly5bccMMN6ejoyMyZM5PsPfd26dKlmTFjRpYtW9bgSQ/t0vefnkqlvtXs1aZyFp8/eWAHot/tC4nP/HVHbv3Gu/LIrQsy56wTc+13N+RL1z/a6PH6xWXzTk9ztb5b8RHVci6bd8YAT0R/eOPE0Zn+lnF1X7+7q5ZF7+vLts0AAADD3Li2pFIZ/K9xbY3+zYc0ZyYCwDGoq6sro0aNyvr16zNr1qysXbs2ixYtyjnnnJMkOe6443L77bc3eMpi3d21THzP6ry6dXdd1z+y+uLMmto+wFO9vqG+MrHph9cPiZWJ/x0SV3/9vFxywd5zJnfs6s685ffm1394MSs/MfOoX6G4Y1d3TjhvdXZ11ndO5ea1i3PGG8cM8FT0h5t/vjkfu/ahdPccemvlSrmUWVPH5+EfXzxIkwEAAMDhsTIRAI5BGzduTJJMmzYtSTJ//vx0dnZm3bp1Wbdu3ZANiUlSrZaz+uvnpfI6Z+02NZXyuSVTh0RIpD4HC4lJMrq1mrtWnT9sViiObq3mxq/MTvl17sYrlVK+9umzhMSjyGXzTs+7z554yJWnlXIpI1sq+eGX5wziZAAAAHB4xEQAOAZNnz493d3daWlpafQoh+X8d5ycu1adn7Gjq6k2HXg7M6K5nHK5lC98dHq+9fmzGzQhfXWokLjPcAuKH5p7WtZ8891pbWn6v/A0ormcpqZSvvGZWfniFTMaNCGHo6mpnJ99+71Z+N7JSfZuU7tPuVxKpVzKpAmtWf+ji/K209oaMyQAAAD0gW1OAYCjVufunvzk3mdy852b89KrnRk1sinz33lKln5wSk4c39ro8Q5gm9ODqyck/rfhtuXpzl3dWX13R265uyNbtu7OmFHVfPA9k/PRD7w5x7cdncGfvTb/5bWsWvNkfrPhpXR178nkk96Qjy+akgvOPTmVis91AgAAcHQQEwEABoGYWKyvIXGf4RYUAQAAAIYqH4cFAKBhnn5he17+R2efQmJy4Janv3vslfT01AZwSgAAAIBjl5WJAACDwMrEg3t1a+dhb+e5Y1d3miqltIxo6uepAAAAAEgS77oAANBQR3Iu4OjWaj9OAgAAAMD/ss0pAAAAAAAAUEhMBAAAAAAAAAqJiQAAAAAAAEAhMREAAAAAAAAoJCYCAAAAAAAAhcREAAAAAAAAoJCYCAAAAAAAABQSEwEAAAAAAIBCYiIAAAAAAABQSEwEABgMLS3JyJGNnqLYyJF75wMAAACA/1Hq7e3tbfQQAADHgt4dO5POzkaP8f9aWlIaParRUwAAAAAwBImJAAAAAAAAQCHbnAIAAAAAAACFxEQAAAAAAACgkJgIAAAAAAAAFBITAQAAAAAAgEJiIgAAAAAAAFBITAQAAAAAAAAKiYkAAAAAAABAITERAAAAAAAAKCQmAgAAAAAAAIXERAAAAAAAAKCQmAgAAAAAAAAUEhMBAAAAAACAQmIiAAAAAAAAUEhMBAAAAAAAAAqJiQAAAAAAAEAhMREAAAAAAAAoJCYCAAAAAAAAhcREAAAAAAAAoJCYCAAAAAAAABQSEwEAAAAAAIBCYiIAAAAAAABQSEwEAAAAAAAAComJAAAAAAAAQKF/A7TqLa0Q/b4zAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "compile_and_plot(U, prompt)" - ] - }, - { - "cell_type": "markdown", - "id": "60d35449-67b2-4ce0-9b5f-ac60670538e5", - "metadata": {}, - "source": [ - "## Transpile and discover" - ] - }, - { - "cell_type": "markdown", - "id": "41449006-9c42-4109-bb54-95581f90679a", - "metadata": {}, - "source": [ - "Set an initial circuit we want to transpile, optimize or use for discovering sub-arrangements:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f1c82de5-3645-403b-a54e-5185860b0f7c", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbIAAADuCAYAAABcSIIkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAd80lEQVR4nO3de1gU570H8O8sC8JyUREUEBQVUSGAFiQaK4rRNhY1GmO0MUaPtulpopLWSFJrEqPHIAkxaWJNtMdr21CMlyZGbW7eiEcNiEYFlGrAymXVVVRuIsvO+cNHKnXB3WV2h3f5fp4nTx53Zt73t+u4331n3pmRZFmWQUREJCiN2gUQERG1BoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEho7SLIDAYDUlJSEBYWBnd3d4SEhCA5ORnV1dWYM2cOJEnCqlWr1C7Tbqpq6rF26xk8/fI+TJj3FZ5+eR/Wbj2Dqpp6tUsjImo1SZZlWe0i7OnEiRMYO3Ys9Ho9PD09ER4ejrKyMly6dAlJSUm4du0aDh8+jKysLPz4xz9Wu1xFGY0mvPrHY1idWYCbVfeHlo+XK56fOgDLXoiFVtsuftMQkRNy6iAzGAwYNGgQSkpKsGDBArz++uvw9vYGALz11lt4+eWXodVq0dDQgOvXr8PHx0flipVTX2/C5N9+g50H/vXAdceP6IFtKx+FqyvDjIjE49TfXPPnz0dJSQnmzp2L9PT0xhADgJSUFMTExMBoNCI0NNSpQgwAFq78zqIQA4CdB/6FhSu/s3NFRET24bRBVlBQgMzMTPj5+SE1NdXsOrGxsQCAmJiYJq8XFRVhwoQJ8Pb2RufOnfHss8/i6tWrdq9ZKVeu1eLDLQVWbfPRJ2dgqLhlp4qIiOzHaYMsIyMDJpMJ06dPh5eXl9l1PDw8ADQNssrKSiQmJqKkpAQZGRlYu3YtsrKyMG7cOJhMJofU3lrr/16I2/XW1Vp3uwHrdxTaqSIiIvvRql2AvezduxcAkJiY2Ow6JSUlAJoG2dq1a1FaWoqDBw+iR48eAIDg4GA88sgj+OyzzzBx4kT7Fa2QfxwqtWm7PYdKkDI7WuFqiIjsy2kne4SEhKCkpATHjx/HwIED71tuNBoRGBgIg8GA8+fPo3fv3gD+HXz79u1rsn6fPn0wcuRIrFu3zupa4uLioNfrrX8TNrrs8yvUa4Os3s7VWIauN9fYoSIiopYFBAQgJyfHpm2ddkRWXV0NAKitrTW7PDMzEwaDAd7e3ujVq1fj6/n5+ZgyZcp960dGRiI/P9+mWvR6PUpLbRsl2aRDtU1/s/V1VY6tk4hIAU4bZAEBAaioqEBubi6GDh3aZFl5eTkWLlwIAIiOjoYkSY3LKioq0KlTp/va8/X1xdmzZ22uxZGuu1xBNfpavZ2XxoCO3bvboSIiopa15nvSaYNs9OjRKCgoQFpaGsaMGYPw8HAAQHZ2NmbMmAGDwQAAZg87Ks3W4bKtCotvoN+ErVZvl/v1SvTt2dEOFRER2Y/TzlpMSUlBly5dcPHiRURGRiIqKgp9+/ZFfHw8evfujVGjRgG4f+p9586dcf369fvau3btGnx9fR1RequFh3ZEUkKIVduMSwhhiBGRkJw2yIKDg5GVlYWkpCS4u7ujuLgYvr6+WLNmDXbt2oXCwjtTzf8zyAYMGGD2XFh+fj4GDBjgkNqVsHFZAsItDKbwnh2xYVmCnSsiIrIPp5212JKqqir4+PhAkiRUVlZCp9M1LktPT8eiRYvwww8/IDg4GABw9OhRDBkyBNu3b8ekSZPUKttql6/WYmrKPuzPLm92nZGDA5H5ViK6dvFwYGVERMppl0F2N5j69euHM2fONFl28+ZNREVFwc/PD2+88QZu3bqFlJQU+Pv74/Dhw9BoxBvEZp++gg+3FGB/th4XyiphkgGduwv2r0/C4If81S6PiKhVnHayR0tOnToF4P7DigDg4+ODvXv3Ijk5GdOmTYNWq8W4cePw7rvvChliADD4If/GwAoenYHSyzXo7NOBIUZEToFBZkafPn3w+eefO7IkIiKykZhDjFZ6UJAREZE42uWI7O59GImISHztckRGRETOg0FGRERCY5AREZHQGGRERCQ0BhkREQmNQUZEREJjkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQGGRERCQ0BhkREQmNQUZEREJjkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQGGRERCQ0BhkREQmNQUZERELTql0AEd1PlmWgrk7tMqzToQMkSVKsOVmWUVNrVKw9R9B5aBX7DLgPWI5BRtQW1dXB+NRMtauwinbLJsDdXbH2amqN8BqyWbH2HKHqyLPw1Lkq0xj3AYvx0CIREQmNQUZEREJjkBERkdAYZEREJDRO9mhHjEYTTLKsdhmqMZlk1N1ugEYjwc1Vo8rsKiJSHoPMScmyjKxjenx5uBQ5eQYcK7gKQ8WtxuXlV2rw2H//A3GRfvjJ0O4YHhvgdF/sJfpqbP2qCDn5BhzLN+Bs8Q3czXE3Vw2iw30RG+GHodFdMXlMKLyUmm1GRA4lyXI7/onuhGpqjdjwaSFWZxYg//x1i7eL6NMJv35qAGZPDIfOQ+zfN/uzy/HBx/n4dP8FNDRYtnt7e7ri2fFhmPfzCPTr1cm+BVpAvnVLyKnXkoJTr6tr6tv19HvuA5bjOTInknVMj+gnt2Pum4etCjEAyD9/HfNSDyP6ye3IOqa3T4F2dvX6LUx/ZT8S5+zG9m+KLQ4xAKisrscf/1aAqMk7sHztCRiNJjtWSkRKYpA5gYYGExa+8x1GzN6F8xcrW9XW+YuVGDF7F15KP4qGBnG+zL85UobISdvx8e7zrWqn3mjC4lXHMOSZnSgqad1nSUSOwSATnNFowvTf7Uf6plNQ6iCxLAPvbD6N6b/bL8TIZMc3xRj7/Be4dLVWsTaP5Rvw41mf40zRdcXaJCL7YJAJTJZlzHk9C5n/KLJL+5n/KMKc17PQlk+jfnGoBFMX7kO9HQK37HINRv9yD4pLOTIjassYZAJbt70Qm3ees2sfm3eew/odhXbtw1aXrtZi+u/22yXE7iq9XINnFh0Q6jArUXvDIBPUv8qr8Nv0o1Zvl50xARe/mobsjAkWb/Pb9KO4qK+yui97kmUZv/6fQ7h63bq7g9vy/g8dv4QPPs63tkQicpB2EWQGgwEpKSkICwuDu7s7QkJCkJycjOrqasyZMweSJGHVqlVql2mV5LQjqKyut3q7AD8dgrt5IsBPZ/E2N6vqkZx2xOq+7Gnn/n9hxzcXrN7OlvcPAIs+yEHZ5Wqr+yMi+3P6IDtx4gSioqLw9ttvQ6/XIyIiAvX19Xj//fcxdepUFBQUAAAGDhyobqFWKCqpxKf7rP8Sb42/773Qps4V/eGveQ7tr/ZWA/607axD+1TCAcNluO3cgpXnzzS7jtvOLZh4NMuBVTnW8vmxkE/OwX9N7Gt2+b51P8OtnFmIDOvs4Mocoz3sA04dZAaDAePHj4der8eCBQtQXl6O3Nxc6PV6pKWlYdeuXcjOzoYkSYiOjla7XIt99EmBYjMULSXLwEefNP8PwZEKfriOvd+VO7zftdvOor6e58pEs2T1cZz65zWsfOlhdO/WdCT+4jORGDk4EK+vzkXeuQqVKqTWcuogmz9/PkpKSjB37lykp6fD29u7cVlKSgpiYmJgNBoRGhoKHx8fFSu1nCzL+PPnrbtWylabd55rEzMY/7rLvhNcmlN2uQZ7vytTpW+yXb3RhJmLD8LTwxXrlgxvfD08tCOWz4vDkZOX8fbGUypWSK3ltEFWUFCAzMxM+Pn5ITU11ew6sbGxAICYmJjG1+4GX3x8PDqo9NjulpReqkH5lRpV+i6/UoOyy+r0fa/vTl9Rre/sPPX6JtsdL7iK1HXf46fDgvHLyf2g0UjYvDwBkgTMXHwQJpP6P9DIdmLfVK8FGRkZMJlMmD59Ory8vMyu4+HhAaBpkJ07dw7btm3D4MGD4ebmhkOHDjmkXksdKzCo2n9OvgHdu3mq1r8sy8jJU+8zULPv1qhpaIChzroZns5m2drjmDCyB9IXxGNg/y54OKorfvv2URQW31C7NIdw5n3AaYNs7969AIDExMRm1ykpKQHQNMgSEhJQXn7n/MuSJUvaXJCpfRw/71wFHk/sqVr/ekMtKm7eVq3/PCvvYdlWLD2bh6VnHTtBpq0xGmXMXHwQ2RkT8PzUAcjK1eO9v5xWuyyHceZ9wGmD7MKFO7P6evY0/6VrNBobQ+reINNolD/aGhcXB71emRvx3vB4FPBIMLssO2PCA6eVB/h5NP7/4lfTml1Pb6jB4J9/dt/rqW+9i9VvfGNFxcoyanyBTsnNLn/QZ9Da9/9DcSmCg4OtqNg2HhoN8gcOVay9X/TojclBIWaXjT1yQJE+wsPDUWtSbjKMCa6A72LF2gOAG1W3UXe7AW6uLtiddVHxSVN9w8OhgfWXxZjT3vaBgIAA5OTk2LSt0wZZdfWda35qa83ffy8zMxMGgwHe3t7o1auXXWvR6/UoLS1VprFulYCH+UV3r5GyhNZFY/G696qqvImqSwq9F1u41QOdml9s6Wdg6/s3NTQo93fZAp2LCzBQufbCvLzwqH835Ro0o6ysDDUNDco1KLkBvso1BwAblg6Hm6sL8s9XYPFzA7HliyL8oODNocvLygBZmSMG3Acs57RBFhAQgIqKCuTm5mLo0Ka/asrLy7Fw4UIAQHR0tN0ndAQEBCjWVqV7B9xsZpne8OCJGAF+HtC6aGBsMEFvaP4mu8215ePlDm9td0tKtYsGyQstjW0f9Bm09v27aBoQ0N3+79/DDkcG7C0oKEjxEZmSF1nMezoCifFBWPR+Dj7ddwG5mROxfulwjJy9W7E+AoOCFB2RiaY1+0BrviedNshGjx6NgoICpKWlYcyYMQgPDwcAZGdnY8aMGTAY7py0d8SF0LYOl835bN8FPJ78tdll5g6F/aeLX01DcDdP6A21CBnzN6v7//OfVmCCiufIZFmGX8Jfce2G+ZPWD/oMWvv+x4/5EXa8V2L1dtYS8aGKhYWFbfbBmmE9fJCaHIfvTl1B2vqTMJlkLPkwF6nJgzHv6QjFbkH2z8LCdv1gTaX3AUuJF/kWSklJQZcuXXDx4kVERkYiKioKffv2RXx8PHr37o1Ro0YBaHp+TASxEX7tun9JkhAb0UW1/tXsm2wjScDGZQlw0UiYufhA41T7tzacQvbpK0hNjkPvYO8HtEJtmdMGWXBwMLKyspCUlAR3d3cUFxfD19cXa9aswa5du1BYeOeO7qIFWVBXHYK6WnefQGfo+14PR3VVre/4h/xV65tss2BmFIYN6obXVufiTNG/p9qbTDJmvXoQWhcN1i8d3kIL1NY5bZABwIABA/D555+jsrISlZWVOHr0KJ577jlUV1ejuLgYGo0GDz30kNplWkWSJDw7PkyVvmeO79smLhB/JqmPKv1276rDqPggVfom2/Tv1RHLXvgRDn9/Ge9sun+qff7561jyYS5GxAVi3tMRKlRISnDac2QtycvLgyzLCA8Ph053/whj69atAID8/Pwmfw4NDUVcXJzjCm3Gr57sj7T1Jx16v0WNRsKvpvRzXIct6NerE0YPCcLXRxx7u6hfTekPrVas334j/Lri9vinWlznQctFdqboBjwGb2pxnRXrTmLFupMOqsjx2sM+0C6D7NSpO/dVa+6w4pQpU8z+eebMmdi4caNda7NEaHdvPPFoKLZ9XeywPieN6omeQW3nPMKLz0Q6NMh07lr84om2EeRE1JRYPy8V8qAgk2XZ7H9tIcTuei9lCDp6uzmkr47ebvjDy0Mc0pelkhJ6YMpP7Hv9371WvBiHQH/1zw8S0f0YZIIKDvDEuwsftno7vaEGJZeqLbrm7K73Uh5W9f6KzfnjoqHw62zdVF9b3v+IuAC8MI3nT4jaqnZ5aPHufRhFN+vxvvj2+CWs31Fo8TaWXGt2r9mTwjFzgvkHEqrN39cDf0tLxM9e+AK3LXxOmLXvPyTAE39+cwQ0GvUnuRCRee1yROYsJEnCmleHYbqdZvFNT+qDta8NaxMzFZvz6JAgfJI+Cm6uyu/Kwd088fXasQgJMP/0BCJqGxhkgtNqNdi8fARemROt2KhBo5HwypxobF4+Ai4ubX8XmZDYE1989Jii17jFP+SPbzclITy0o2JtEpF9tP1vKXogjUZCavJgfLspCf1a+cXbL7Qjvt2UhNTkwUIdThs5OBCntz+BWY+37jBoBzcXpL04GIc2j2tTszSJqHkMMicyNKYbjm+ZiDWvDUN0uHW3DY/p54s1rw3D8S0TMTTGvnfItpfOPh2wYVkCvt00DlMf6wWt1vIg7uTtht/MiETejieQMjtauOvFiNqzdjnZw5l5uGvx3JP98cvJ/XD4+8v46nApjuVfxbECA8qv1ECW79x7LtBfh9gBfoiN6IKfPNIdQ6K7tulzYdYYNqgbhg3qBr2hBtu+LkZOngHH8g04U3QD9cY7k0IkCRgS3RWxEX4YEu2PSaNCofPgPwciEfFfrpOSJAmPDOyGRwb+e3QlyzKMRhlareQ0odWSAD/dfdPmuz/6Mcqu1CLIX4f/+/N4lSojIiUxyNoRSZLg6ur8AdaS9hDgRO0NTwQQEZHQGGRERCQ0BhkREQmNQUZEREKTZNmRT7UiUlfw6AyUXq5B9646lHz9c7XLaZYsy0BdndplWKdDB0Un08iyjJpao2LtOYLOQ6vYZ8B9wHKctUjUBkmSBLhbd2d/ZyNJEjx1rmqXoRruA5bjoUUiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqfEN1GCfeYc5Uece6sZFlGTa1R7TKsovPQch8gVTDI2qq6Ohifmql2FRbTbtnEx7IrqKbWCK8hm9UuwypVR56Fp85V7TKoHeKhRSIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhNYugsxgMCAlJQVhYWFwd3dHSEgIkpOTUV1djTlz5kCSJKxatUrtMu3igOEy3HZuwcrzZ5pdx23nFkw8muXAqhzr0tVaLF97ApGTtqHsSg0A4Mq1W1i/o1C4Z37ZYvn8WMgn5+C/JvY1u3zfup/hVs4sRIZ1dnBlRMpw+ueRnThxAmPHjoVer4enpyciIiJQVlaG999/H+fPn8e1a9cAAAMHDlS3UFKcLMtY+tFxLP/T96g3mposu200Yc7rWXjpnaPYuCwBExJ7qlSl/S1ZfRzjR/TAypcexpeHS1F6qaZx2YvPRGLk4EC88l428s5VqFglke2cekRmMBgwfvx46PV6LFiwAOXl5cjNzYVer0daWhp27dqF7OxsSJKE6OhotcslBcmyjN+8dRRLPjx+X4jdq+LmbUz6zTf45MsiB1bnWPVGE2YuPghPD1esWzK88fXw0I5YPi8OR05extsbT6lYIVHrOHWQzZ8/HyUlJZg7dy7S09Ph7e3duCwlJQUxMTEwGo0IDQ2Fj4+PipWS0rZ+VYw//DXPonVNJhkzFu1HcWmlnatSz/GCq0hd9z1+OiwYv5zcDxqNhM3LEyBJwMzFB2EyyWqXSGQzpw2ygoICZGZmws/PD6mpqWbXiY2NBQDExMQ0vrZ161ZMnjwZPXv2hE6nQ//+/fH73/8eVVVVDqnbXmoaGmCoqzP7nzOyNMTuqrttwtqtZ+1UTduwbO1xnDhzFekL4vHB74bi4aiu+P0Hx1BYfEPt0ohaxWnPkWVkZMBkMmH69Onw8vIyu46HhweApkGWnp6OHj164M0330RwcDBOnDiBN954AwcOHMDBgweh0YiZ/UvP5mHpWeu+3EV1svAaDh2/ZPV2/7v9LF7/9SB0cHOxQ1XqMxplzFx8ENkZE/D81AHIytXjvb+cVrssolZz2iDbu3cvACAxMbHZdUpKSgA0DbKdO3fC39+/8c8jRoyAv78/pk+fjm+//RYJCQlW1xIXFwe9Xm/VNh4aDfIHDrW6r+b8okdvTA4KMbts7JEDrW4/PDwctabmz0U5UrXbIMBrotXbXam4hZ5hg6A1XVO+KCuZ4Ar4Lla83RtVt1F3uwFuri7YnXURsoJHFPuGh0ODeuUapHYlICAAOTk5Nm3rtEF24cIFAEDPnuZnoxmNRhw6dAhA0yC7N8TuiouLAwCUlpbaVIter7d6W52LCzDQpu7MCvPywqP+3ZRr8D+UlZWhpqHBbu1bpUs/wPwg/IEuXakAbtn296woyQ3wVb7ZDUuHw83VBfnnK7D4uYHY8kURfihR5txgeVkZIN9WpC0iazhtkFVXVwMAamtrzS7PzMyEwWCAt7c3evXq1WJb+/btAwAMGDDAploCAgKs3sZDsEOYQUFBbWZEVuPWAVZPJJdlQJLQzd8HWlN3e5RlFRNcUa5wm/OejkBifBAWvZ+DT/ddQG7mRKxfOhwjZ+9WpP3AoCCOyMhmtnxP3uW0QRYQEICKigrk5uZi6NCmh+jKy8uxcOFCAEB0dDQkSWq2ndLSUrz66qt47LHHbL7WzJbhsnzrFoxPzbSpPzUUFhZCcndXuwwAQPmVGvT4yd9gbLDiuJkkIaJPJ5zefrrF/cFRqmvq4TVks2LthfXwQWpyHL47dQVp60/CZJKx5MNcpCYPxrynI/DBx/mt7uOfhYXw1LkqUC2RdcT62W+F0aNHAwDS0tJQWFjY+Hp2djYSExNhMBgAtHwhdFVVFR5//HG4ublh/fr1dq2XlBPor8MTo0Ot3u75qQPaRIgpTZKAjcsS4KKRMHPxgcap9m9tOIXs01eQmhyH3sHeD2iFqO1y2iBLSUlBly5dcPHiRURGRiIqKgp9+/ZFfHw8evfujVGjRgFoen7sXrW1tRg/fjyKiorw5ZdfIjAw0JHlUystnBUFN1fLd+8egZ6YMS7MjhWpZ8HMKAwb1A2vrc7FmaJ/T7U3mWTMevUgtC4arF86vIUWiNo2pw2y4OBgZGVlISkpCe7u7iguLoavry/WrFmDXbt2NY7SzAVZfX09nnzySeTk5GDPnj2IiIhwdPnUSnGR/vhL6khoXR48wurq6449q38KHy83B1TmWP17dcSyF36Ew99fxjub7p9qn3/+OpZ8mIsRcYGY9zT3cxKTJMtKTsAVQ1VVFXx8fCBJEiorK6HT6RqXmUwmTJs2DZ999hl2797dOHJzNNHOkWm3bGoz58ju9c2RMry08ihOnLl/Sr0kAY8NC8YfFz2CXm3s0JrS58gcoerIszxHRqpw2skeLcnLy4MsywgPD28SYgDwwgsv4JNPPsErr7wCnU6HI0eONC7r06eP2en51HY9OiQIuZkTceTkZXy8+wfoDTXQumjQJ8QbsyeFo3cwb01GJLp2GWSnTt25Qaq5w4p79uwBAKxYsQIrVqxosmzDhg2YNWuW3esjZUmShKEx3TA0xn7X0RGRehhk/6G4uNjB1RARUWs47WSPlrQUZEREJJZ2OSK7ex9GIiISX7sckRERkfNgkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQ2uXzyEQgyzJQV6d2GZbr0AGS9OCHWJJlZFlGTa1R7TKsovPQch8gVTDIiIhIaDy0SEREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQGGRERCQ0BhkREQmNQUZEREJjkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQGGRERCQ0BhkREQmNQUZEREJjkBERkdAYZEREJDQGGRERCY1BRkREQvt/XYdQzPpWuqcAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "qc = QuantumCircuit(3)\n", - "qc.h(2)\n", - "qc.cx(0,1)\n", - "qc.cx(2,1)\n", - "qc.h(1)\n", - "qc.x(1)\n", - "qc.h(1)\n", - "qc.x(2)\n", - "\n", - "U = qi.Operator(qc).to_matrix() # the unitary of the circuit\n", - "\n", - "#-----------------------------------------\n", - "\n", - "fig = qc.draw(\"mpl\")\n", - "fig" - ] - }, - { - "cell_type": "markdown", - "id": "db8fb061-d950-4727-a1b1-56ea46e48f1b", - "metadata": {}, - "source": [ - "We set different gate pool targets to see what the model gives us:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8a35405a-d98d-460b-9b0c-92d167266ad0", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[(\"Compile using: ['h', 'cx', 'z', 'x', 'ccx', 'swap']\", 'all'),\n", - " (\"Compile using: ['h', 'cx', 'z', 'ccx']\", 'no x, no swap'),\n", - " (\"Compile using: ['h', 'cx', 'x', 'ccx']\", 'no z, no swap'),\n", - " (\"Compile using: ['h', 'x', 'ccx']\", 'no cx, no z, no swap'),\n", - " (\"Compile using: ['h', 'z', 'x', 'ccx']\", 'no cx, no swap')]" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cs_1 = f\"Compile using: {[x for x in pipeline.gate_pool]}\", \"all\"\n", - "\n", - "cs_2 = \"Compile using: ['h', 'cx', 'z', 'ccx']\" , \"no x, no swap\" \n", - "cs_3 = \"Compile using: ['h', 'cx', 'x', 'ccx']\" , \"no z, no swap\" \n", - "cs_4 = \"Compile using: ['h', 'x', 'ccx']\" , \"no cx, no z, no swap\" \n", - "cs_5 = \"Compile using: ['h', 'z', 'x', 'ccx']\" , \"no cx, no swap\" \n", - "\n", - "cs = [cs_1, cs_2, cs_3, cs_4, cs_5]\n", - "cs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "67e3aa4e-e7e0-4f7a-9a5c-14a70dd6b77b", - "metadata": {}, - "outputs": [], - "source": [ - "samples = 512\n", - "num_of_qubits = 3\n", - "max_gates = 12" - ] - }, - { - "cell_type": "markdown", - "id": "adfbef8e-d91a-4ed3-b756-0e76c33674fa", - "metadata": {}, - "source": [ - "Compile with the different gate-sets and plot correct (exact) compiled circuits. Note, some of the circuits might look the same but the gate time-sequences are distinct. Qiskit reorders \"parallel\" gates to make smaller plots." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6268abb3-1965-4a0e-b5dc-0f3c8db8264a", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABxMAAAF8CAYAAAAEvpV0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAB1hElEQVR4nO3dd3hU1dbH8d9k0gsECBB6D4SiVAHpFhDpKsWrICKiItixYAHxviI2rCA2UMQCFhAQ6VVBaVJDCy1EAgQIKaTnvH9wiUQmkDIzZ2by/TyPz72css/KzOSszF5n720xDMMQAAAAAAAAAAAAAPyLl9kBAAAAAAAAAAAAAHBNFBMBAAAAAAAAAAAA2EQxEQAAAAAAAAAAAIBNFBMBAAAAAAAAAAAA2EQxEQAAAAAAAAAAAIBNFBMBAAAAAAAAAAAA2EQxEQAAAAAAAAAAAIBNFBMBAAAAAAAAAAAA2EQxEQAAAAAAAAAAAIBNFBNNZrFYNH78+Nx/z5gxQxaLRYcPHzYtpiv5d7xmGzp0qCwWiywWixo3bnzFYy++tps2bXJSdAAkKSEhIff31GKx6M033zQ7JAAAAAAAAABAAXlEMTE6OloPPPCAateuLX9/f5UqVUrt2rXTu+++q9TUVLPDg4OFhYVp5syZeu211/Jsr1mzZpELn507d9bQoUOLdO748eNVs2bNIp1bEMX5uYYOHarOnTsX6pxVq1YVq8BtsVg0Y8aMQp9XnNexuDHbg7vHXxz//pwFBQVp5syZmjx5snlBAQAAAAAAAACKxNvsAIpr4cKF6t+/v/z8/DRkyBA1btxYGRkZWrduncaMGaNdu3bp448/NjvMfKWmpsrb233eBleMNygoSHfffbfZYQDIh4+Pj+6++24dPnxYjz/+uNnhAAAAAAAAAAAKwbWqQoV06NAhDRo0SDVq1NCKFStUqVKl3H0PP/ywDhw4oIULF5oY4dX5+/ubHUKhuFu8AAAAAAAAAAAAKDq3nub09ddfV3Jysj777LM8hcSL6tatq0cffTT331lZWXrllVdUp04d+fn5qWbNmho7dqzS09PznFezZk317NlTq1atUsuWLRUQEKAmTZpo1apVkqQff/xRTZo0kb+/v1q0aKGtW7fmOX/o0KEKDg7WwYMH1a1bNwUFBaly5cqaMGGCDMPIc2xB1yBctGiROnTooKCgIIWEhKhHjx7atWvXVc8bP368LBbLZdttrc24adMmdevWTWFhYQoICFCtWrU0bNiwK8Z7sf0DBw5o6NChCg0NVenSpXXvvffq/Pnzec5NTU3VI488orCwMIWEhKh3796KjY21+Rrs2bNHR48everPVxTp6el64oknVL58eQUFBalfv346deqUQ651qa+++krXXXedAgMDVaZMGXXs2FFLliyRJK1YsUJeXl566aWX8pzz9ddfy2KxaOrUqQ6JqXPnznnWsrv0v6JMTXo1qampatCggRo0aJBnCuIzZ86oUqVKuv7665WdnW336170xx9/6NZbb1WZMmUUFBSka665Ru+++26eY/bs2aMBAwaofPnyCggIUP369fX888+bHv/+/ft1++23Kzw8XP7+/qpataoGDRqkc+fOSZJuu+02NW/ePM85vXr1ksVi0c8//5znNbBYLFq0aFFu7E899ZSaNGmi4OBglSpVSt27d9e2bdvytHVx6tXvvvtOY8eOVXh4uIKCgtS7d2/FxMQ45GcGAAAAAAAAAJjPrYuJ8+fPV+3atXX99dcX6Pjhw4frpZdeUvPmzTV58mR16tRJEydO1KBBgy479sCBA/rPf/6jXr16aeLEiTp79qx69eqlWbNm6fHHH9fdd9+tl19+WdHR0RowYIBycnLynJ+dna1bbrlFFStW1Ouvv64WLVpo3LhxGjduXKF/zpkzZ6pHjx4KDg7WpEmT9OKLL2r37t1q37693dZUO3nypLp27arDhw/r2Wef1fvvv6+77rpLGzZsKND5AwYMUFJSkiZOnKgBAwZoxowZevnll/McM3ToUL3//vu69dZbNWnSJAUEBKhHjx4224uMjNSQIUOK/XPZMnr0aG3btk3jxo3TQw89pPnz52vUqFEOudZFL7/8sgYPHiwfHx9NmDBBL7/8sqpVq6YVK1ZIkm644QaNHDlSEydO1JYtWyRJx48f1+jRo3XTTTfpwQcfdEhczz//vGbOnJnnv27dukmSKlSoYPfrBQQE6IsvvtCBAwdyC3TShZHE586d04wZM2S1Wu1+XUlaunSpOnbsqN27d+vRRx/VW2+9pS5dumjBggW5x2zfvl2tW7fWihUrdP/99+vdd99V3759NX/+fFPjz8jIULdu3bRhwwaNHj1aH374oUaMGKGDBw8qISFBktShQwdt27ZNiYmJkiTDMPTbb7/Jy8tLa9euzW1r7dq18vLyUrt27SRJBw8e1Ny5c9WzZ0+9/fbbGjNmjHbs2KFOnTrp77//viyW//u//9PChQv1zDPP6JFHHtHSpUt10003sT4tAAAAAAAAAHgqw02dO3fOkGT06dOnQMf/9ddfhiRj+PDhebY/9dRThiRjxYoVudtq1KhhSDJ+//333G2LFy82JBkBAQHGkSNHcrdPmzbNkGSsXLkyd9s999xjSDJGjx6duy0nJ8fo0aOH4evra5w6dSp3uyRj3Lhxuf+ePn26Ick4dOiQYRiGkZSUZISGhhr3339/nrjj4uKM0qVLX7b938aNG2fYepv/fZ2ffvrJkGRs3Ljxiu39O96L7Q8bNizPcf369TPKlSuX++/NmzcbkozHHnssz3FDhw69rM2L1+nUqdMVYzGMC691jRo1rnqcYfzzM990001GTk5O7vbHH3/csFqtRkJCQoHaKaz9+/cbXl5eRr9+/Yzs7Ow8+y6NIyUlxahbt67RqFEjIy0tzejRo4dRqlSpPJ83R/vtt98MHx+fy95Pe3vuuecMLy8vY82aNcacOXMMScY777zjsOtlZWUZtWrVMmrUqGGcPXs2z75L34OOHTsaISEhl73mlx5jRvxbt241JBlz5szJ95iNGzcakoxffvnFMAzD2L59uyHJ6N+/v9G6devc43r37m00a9Ys999paWmXfS4PHTpk+Pn5GRMmTMjdtnLlSkOSUaVKFSMxMTF3++zZsw1JxrvvvnvVn+PQoUOGJOONN964+g8NAAAAAAAAAHAJbjsy8eLom5CQkAId/8svv0iSnnjiiTzbn3zySUm6bG3Fhg0bqm3btrn/bt26taQLI8iqV69+2faDBw9eds1LR7tZLBaNGjVKGRkZWrZsWYFili6MpkpISNCdd96p+Pj43P+sVqtat26tlStXFritKwkNDZUkLViwQJmZmYU+/98j5zp06KDTp0/nvk+//vqrJGnkyJF5jhs9erTN9gzDyJ1W1t5GjBiRZ+rXDh06KDs7W0eOHHHI9ebOnaucnBy99NJL8vLK+yt3aRyBgYGaMWOGoqKi1LFjRy1cuFCTJ0/O83lzpLi4ON1xxx1q2rSppkyZ4tBrjR8/Xo0aNdI999yjkSNHqlOnTnrkkUccdr2tW7fq0KFDeuyxx3I/6xddfA9OnTqlNWvWaNiwYZe95v+eKtjZ8ZcuXVqStHjx4sumD76oWbNmCg4O1po1ayRdGIFYtWpVDRkyRFu2bNH58+dlGIbWrVunDh065J7n5+eX+7nMzs7W6dOnFRwcrPr16+eOkr3UkCFD8tx377jjDlWqVCn3HgsAAAAAAAAA8CxuW0wsVaqUJCkpKalAxx85ckReXl6qW7dunu3h4eEKDQ29rJD072LCxc78atWq2dx+9uzZPNu9vLxUu3btPNsiIiIkqVBTk+7fv1/ShSJm+fLl8/y3ZMkSnTx5ssBtXUmnTp10++236+WXX1ZYWJj69Omj6dOnX7aeZH7+/XqVKVNG0j+vy8XXv1atWnmO+/f74QxXi9XeoqOj5eXlpYYNG1712Hbt2umhhx7Sn3/+qW7dul22ZqWjZGVlacCAAcrOztaPP/4oPz8/h17P19dXn3/+uQ4dOqSkpCRNnz7d5tqe9hIdHS1Jaty4cb7HXHwg4ErHXOTs+GvVqqUnnnhCn376qcLCwtStWzd9+OGHueslSpLValXbtm1zpzRdu3atOnTooPbt2ys7O1sbNmzQ7t27debMmTzFxJycHE2ePFn16tWTn5+fwsLCVL58eW3fvj1P+xfVq1cvz78tFovq1q1rtymXAQAAAAAAAACuxa2LiZUrV9bOnTsLdV5BO/zzW/csv+2GYRQqjoK6uBbjzJkztXTp0sv+mzdv3hXPz+/nzc7Ovuy477//XuvXr9eoUaMUGxurYcOGqUWLFkpOTr5qnM5+XYrDlWNNT0/PHZEZHR2d7yg0exszZozWr1+v2bNnq2rVqk655uLFiyVJaWlpuUVzd+Ls+N966y1t375dY8eOVWpqqh555BE1atRIx44dyz2mffv22rhxo9LS0nKLiaGhoWrcuLHWrl2bW2i8tJj46quv6oknnlDHjh311VdfafHixVq6dKkaNWp02VqwAAAAAAAAAICSx22LiZLUs2dPRUdHa/369Vc9tkaNGsrJybms0//EiRNKSEhQjRo17BpbTk7OZVOf7tu3T5JUs2bNArdTp04dSVKFChV00003XfZf586dr3j+xVF3CQkJebbnN6VnmzZt9H//93/atGmTZs2apV27dunbb78tcLz5ufj6Hzp0KM/2AwcOFLttV1enTh3l5ORo9+7dVz123LhxioqK0ptvvqlDhw7p2WefdXh83377rd555x29+eab6tSpk8OvJ0nbt2/XhAkTdO+996pZs2YaPny4zVFw9nLx9+hKDx9cHElckAcUnB3/RU2aNNELL7ygNWvWaO3atYqNjdVHH32Uu79Dhw7KyMjQN998o9jY2NyiYceOHXOLiREREapYsWLuOd9//726dOmizz77TIMGDVLXrl110003XXbPuOjf91DDMHTgwIFC3dcAAAAAAAAAAO7DrYuJTz/9tIKCgjR8+HCdOHHisv3R0dF69913JUm33nqrJOmdd97Jc8zbb78tSerRo4fd4/vggw9y/79hGPrggw/k4+OjG2+8scBtdOvWTaVKldKrr75qcy3DU6dOXfH8i0WUi+uoSVJKSoq++OKLPMedPXv2spF5TZs2laQCT3V6Jd26dZOky9bie//9920ev2fPHh09erTY13UFffv2lZeXlyZMmHDZSK9LX/M//vhDb775ph577DE9+eSTGjNmjD744AOtXr3aYbHt3LlTw4cP1913361HH33UYde5VGZmpoYOHarKlSvr3Xff1YwZM3TixAk9/vjjDrtm8+bNVatWLb3zzjuXFckuvgfly5dXx44d9fnnn1/22bv0fTIj/sTERGVlZeXZ1qRJE3l5eeX5/WzdurV8fHw0adIklS1bVo0aNZJ0oci4YcMGrV69Os+oROnCSN1//+7PmTNHsbGxNmP58ssv80wv/f333+v48ePq3r17sX5GAAAAAAAAAIBr8jY7gOKoU6eOvv76aw0cOFCRkZEaMmSIGjdurIyMDP3++++aM2eOhg4dKkm69tprdc899+jjjz9WQkKCOnXqpD///FNffPGF+vbtqy5dutg1Nn9/f/3666+655571Lp1ay1atEgLFy7U2LFjVb58+QK3U6pUKU2dOlWDBw9W8+bNNWjQIJUvX15Hjx7VwoUL1a5duzxFy3/r2rWrqlevrvvuu09jxoyR1WrV559/ntvGRV988YWmTJmifv36qU6dOkpKStInn3yiUqVK5RZii6NFixa6/fbb9c477+j06dNq06aNVq9enTta89/TsUZGRqpTp065U366gs6dO2v16tWFng61bt26ev755/XKK6+oQ4cOuu222+Tn56eNGzeqcuXKmjhxotLS0nTPPfeoXr16+r//+z9J0ssvv6z58+fr3nvv1Y4dOxQUFJTvNS6OCivsunX33nuvJOVOcXmp66+//rJ1Py9atWqVunTponHjxmn8+PGFuuZ///tf/fXXX1q+fLlCQkJ0zTXX6KWXXtILL7ygO+6444qft/Hjx+vll1/WypUrrzoq91JeXl6aOnWqevXqpaZNm+ree+9VpUqVtGfPHu3atSt3ytL33ntP7du3V/PmzTVixAjVqlVLhw8f1sKFC/XXX3+ZFv+KFSs0atQo9e/fXxEREcrKytLMmTNltVp1++235x4XGBioFi1aaMOGDerVq1fu71XHjh2VkpKilJSUy4qJPXv2zB1lef3112vHjh2aNWtWvu992bJl1b59e9177706ceKE3nnnHdWtW1f3339/gX8eAAAAAAAAAID7cOtioiT17t1b27dv1xtvvKF58+Zp6tSp8vPz0zXXXKO33norTwf3p59+qtq1a2vGjBn66aefFB4erueee07jxo2ze1xWq1W//vqrHnroIY0ZM0YhISEaN26cXnrppUK39Z///EeVK1fWa6+9pjfeeEPp6emqUqWKOnTokFsMyo+Pj49++uknjRw5Ui+++KLCw8P12GOPqUyZMnnOvVhc/fbbb3XixAmVLl1a1113nWbNmqVatWoVOmZbvvzyS4WHh+ubb77RTz/9pJtuuknfffed6tevL39/f7tcw5GSk5MVHh5epHMnTJigWrVq6f3339fzzz+vwMBAXXPNNRo8eLAkaezYsTpw4IB+//333NfC19dXX3zxhdq0aaMxY8ZcNqrzUikpKapbt26h4zp16pRSUlI0YsSIy/ZNnz4934LSxXU0K1WqVKjrbdmyRa+++qpGjRqVp4D/7LPPat68ebr//vu1a9cuhYaG5ntdi8VSpPehW7duWrlypV5++WW99dZbysnJUZ06dfLcI6699lpt2LBBL774oqZOnaq0tDTVqFFDAwYMMDX+a6+9Vt26ddP8+fMVGxurwMBAXXvttVq0aJHatGmT59iLoxDbt2+fuy08PFx169bVgQMHLismjh07VikpKfr666/13XffqXnz5lq4cGG+U+yOHTtW27dv18SJE5WUlKQbb7xRU6ZMUWBgYKF+JgAAAAAAAACAe7AYhR1mhasaOnSovv/++9yCC/L3119/qVmzZvrqq6901113Ffr8oUOHasWKFdqyZYu8vb3zLeIUV1JSksqWLat33nlHDz/8sEOuUVS7d+9Wo0aNtGDBAodM12vL008/rW+++UYHDhyQn5+fU64pSdddd51q1KihOXPmOO2a9uTO8V8cjTpnzhzdcccdhTrXMAydPn1aMTExat68ud544w099dRTDooUAAAAAAAAAGBPbj8yEe4jNTVVAQEBeba988478vLyUseOHYvcbkxMjMqXL69GjRpp586dxQ3TpjVr1qhKlSouOZXjypUr1bZtW6cVEi9e88UXX3RqITExMVHbtm27bL1Pd+Hu8RfHuXPnCjW9MwAAAAAAAADAdVBMhNO8/vrr2rx5s7p06SJvb28tWrRIixYt0ogRI1StWrUitfn000/r7rvvliQFBwfbM9w8evTo4dRiXWE8/PDDTh8tuXHjRqdeT7qwfmh6errTr2sv7h5/cQQHB2vp0qW5/46IiDAxGgAAAAAAAABAYVBMhNNcf/31Wrp0qV555RUlJyerevXqGj9+vJ5//vkit9mwYUM1bNjQjlECsDdvb2/ddNNNZocBAAAAAAAAACgC1kwEAAAAAAAAAAAAYJOX2QEAAAAAAAAAAAAAcE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UEwEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE3eZgcAOJJhGEpMzlRCUrr8fK0qU8pPfr5Ws8MCAMApDMNQSmqWziamy8tiUZlSfgrwt8pisZgdGgAAAAAAANwExUR4pEPHkvTRnCh9Pne/4s+m5dnXoXlFjRwYqdtuqilfHwqLAADPc+pMqj6fu08fzd6jw38n59nXqE6oRg6M1N0966pUsK9JEQIAAAAAAMBdWAzDMMwOArCXv/ac1vPvb9Kidcd0tU92hbL+emhApJ6/v6l8fJjxFwDg/o7Fpei59zZq9uJDysjMueKxwYHeGtyzrl59pKVCS/k5KUIAAAAAAAC4G4qJ8BiL1sao/1MrlJKaVajzbm5bWd+/dSOjMwAAbm37vjO6deRixZ48X6jzGtYJ1S8fdlWNyiEOigwAAAAAAADujOFYJomKitKQIUNUuXJl+fv7KyIiQq+//roMw1C5cuVktVqVnJx89YYgSVq2IVa9H1la6EKiJC1d/7d6PLxEaemFPxcAUDTkQfvaeyhBXe77pdCFREnaHZ2gLvct0onTqQ6IDAAAAAAAAO6OYqIJZs+erWbNmmnmzJmqXLmy+vTpIz8/Pz3zzDMaNWqUzpw5o/r16ys4ONjsUN3C/iPndNvjy5WVXfRBtuu2ntADE36zY1QAgPyQB+0rMTlD3Ucu0Zlz6UVu41BsknqNXqLs7CtPjQoAAAAAAICSx9vsAEqa9evXa/DgwQoJCdGvv/6qzp07S5IMw9CYMWP01ltvSZJatGhhYpTuZfLMnUpKySx2O1/OP6CXHmymOtVK2SEqAIAt5EH7m7nggA7FJhW7nY0747Vo3TH17FTdDlEBAAAAAADAUzAy0Ymys7M1bNgwZWRkaM6cObkdqJJksVg0YcIEeXtfqO+2bNkyd9/+/ft1yy23KDg4WOXLl9fo0aN1/nzhpzHzRInJGZq54IDd2pv6XZTd2gIA5EUetD/DMPTht/bLXVPIgwAAAAAAAPgXiolONHv2bO3Zs0e9evVSly5dLtsfGBioKlWqSPpnREZCQoK6dOmipKQkff/993rrrbf0zTffaNiwYU6N3VV9teCAks/bb63Dz+fuU2oaaycCgCOQB+1v9aY4RR1MsFt7v/52TNExiXZrDwAAAAAAAO6PaU6d6IcffpAk3XXXXfkek5qaKi8vLzVr1kySNG3aNJ09e1Z//fWXwsLCJEne3t6666679OKLL6pRo0aOD9yFzfol2q7tnU3M0KJ1x3TbTTXt2i4AgDzoCLMW2m90viQZhvTNomi9MKKZXdsFAAAAAACA+2JkohNt2rRJUt6p2y51/PhxnTx5Ug0aNFBQUJAk6ZdfftGNN96Y24EqSbfffrv8/Py0aNEixwft4o6fSrV7m3HxTJ0HAI5AHrS/4/GOyIP2bxMAAAAAAADui5GJTnTy5ElJyu0g/bevvvpKUt5O1qioqMumcvPz81OdOnW0Z88eu8d4ww036OjRo3Zv11GOBDwqWQLt2uaL41/V2y9ssGubAMxTvXp1rVixwuwwIPKgI8T63SVZq9u1zS++mqNfPx9i1zYBmIc8CAAAAAAoLoqJThQcHKzU1FRFR0crPDw8z76YmBi9+uqrkv5ZJ0qSzp49q9DQ0MvaKlOmjM6cOWP3GI8eParoaPtOHepQ9VMlX/sWE8/E/60zp93oNQAAN0EedICa56QQ+zaZfC5eyX+70WsAAAAAAAAAh6KY6ETt2rXT3LlzNXHiRP3444/y9fWVdGHURb9+/ZSQkCAp/+nfnKF6dfuObnC0Y9YMpdu5zQrlghUSWsfOrQIwi7vd1zwZedD+TvhKyXZus0wpH5UNIA8CnsLd7msAAAAAANdDMdGJxo4dq4ULF2rhwoWqX7++WrVqpfj4eK1Zs0aDBw/W8ePHlZKSoqZNm+aeU6ZMmdzO1UudPXtWDRo0sHuM7jYF0sRPt2nse5vs1l6Av1V7181RaCk/u7UJALiAPGh/81YeUd9Hl9m1zRU/vaGmDcrZtU0AAAAAAAC4Ly+zAyhJWrVqpaVLl6pdu3aKi4vT4sWLlZOTo1mzZmns2LFKTExUZGSkAgP/mbYzMjJSUVFRedpJT09XdHS0QzpR3c19/SLk422/j/Fdt9ahkAgADkIetL8eHaqpWrjtNSiL4vqmFSgkAgAAAAAAIA+KiU7WqVMnrVu3TqmpqTp37pxWrVqlgQMHatOmC6PrLl0nSpJuvfVWLV++XKdPn87d9tNPPyk9PV233nqrU2N3RRXKBah/11p2a+/hQQ3t1hYA4HLkQfvy9vbSg/3tV1QdOTDSbm0BAAAAAADAM1BMdBEbN26UdPk6UQ888IBCQ0PVp08fLV68WDNnztTo0aM1cOBANWxI4UuSnhjSWN5WS7Hb6Xp9FUZjAIBJyINFN/y2+ipbuvij6mtVCdEdN9vvAR0AAAAAAAB4BoqJLiK/ERmhoaFasWKFgoODddttt+nxxx/XwIED9fnnn5sRpktq0TBMn47vUKw26tcsrW8mdbFTRACAwiIPFl2FcgGa9+5N8vMt+p91ZUv7adGUrvLztdoxMgAAAAAAAHgCi2EYhtlBlHQ5OTkqXbq0UlNTlZSUpICAALNDcksfzY7Sw6+uV05O4T7SjeqEatHUbqoWHuygyAAAV0IetI/Fvx3THU8uV/L5rEKdV7FcgBZ8cLNaNirvoMgAAADMkZaepdmLD2nZhr91NjFd3t5eqhQWoLt61NX1TSvIYin+LEcAALiq7OwcLVp3TD+tOKL4s2kyDCmsjJ/63VBTt3aoKquVsWYoOIqJ8Ci/rI3RE2/8ob2Hz131WG+rRQNvqa0Pnmur0FLFnx4OAACzbY2K18Ovrtf6bScLdHzX66vooxfaqVbVEAdHBgAA4DxnE9P12mfb9OmP+3TmXLrNY5rUK6PH7m6se/vWo6gIAPAoGZnZmjxzp6Z8F6Wjx1NsHlMtPEgjB0bq8cGNmaUIBUIxER7HMAyt/PO4pnwXpbkrjyg7O+9HvGrFID3Qv76G31Zf4WGBJkUJAIDjbI2K19TZezRrYbTOp+UdqRga4qt7+9bTQwMiVa9GaZMiBAD3tm3bNr300ktatWqVDMPQDTfcoKlTpyoiIkI9evTQt99+a3aIQIl19Hiyuj34q/YcOieLpPw6vSwWyTCkoX3q6ZNx7eXtzegMoKDIg4DrSkzO0G2PL9PyP47n5jpbLu7r0qqSfnrnJpUO8XVuoHA7FBPh0U6dSdXRuBT1Gr1Ex0+lqlp4oA7+MpAvCQCAEiExOUMHjyWpx8OL9fepVFWtGKi9P/dXYIC32aEBgNtavny5evbsqRo1aui+++5TQECAZsyYoaysLG3btk0TJ07Us88+a3aYQIl0OiFNbe+er/1HEwt13v2319e0l9oxQhEoAPIg4LoyMrN168jFWv7H8UKd17lluH796BZGKOKK6EmCRytfNkDlywYo0P/CR93Xx0ohEQBQYpQK9lXTBuUU8L886OdrpZAIAMVw6tQpDRw4UM2bN9eyZcty1/kdPHiwatWqJUlq2rSpiRECJdvjr/9R6EKiJH3yw151b19V/W6saf+gAA9CHgRc29tf7ix0IVGSVm2K0xsztuuFEc0cEBU8BVUVAAAAAAAKYNKkSTp79qymT5+e24EqSaVLl1bz5s0l0YkKmOXk6VR9++vBIp8/5bsoO0YDeCbyIOC6srNzNOW7KBVlkL3FIn00e4+ysnLsHxg8BsVEAAAAAAAK4Ntvv1WHDh0UERFhc3/FihUVHh4uScrKytKjjz6qsmXLKjQ0VPfdd5/S0tKcGS5Qonw+d58yi9EJumzD39p7KMF+AQEeiDwIuK5f1h5TTFxKvmskXolhSLEnz2vBmqP2Dwweg3muAAAAAAC4iri4OMXGxmrgwIGX7cvJydGOHTvUrNk/U0O9+uqrWrlypXbs2CFfX1/17t1bTz/9tN57771ixZGVlaW4uLhitQF4om9+2SeLpCL0oeaa8dN2PTygtr1CggsJDw+XtzfdoMVBHgRc25c/7yp2GzPn7VbLCO6Vnqq4uZBPBgAAAAAAV5GSkiJJstiYO2revHk6efJknqndPv30U73++uuqUqWKJGn8+PHq37+/Jk+eLKvVWuQ44uLiVK1atSKfD3is+q9JvmHFauK1Nz7Qa0/OsVNAcCUxMTGqWrWq2WG4NfIg4OJqjJZCrlGR5jmVJMPQjz8v0Y/v9bBvXHAZxc2FTHMKAAAAAMBVVKtWTVarVatXr86z/ciRIxo9erSkf9aJSkhIUExMTJ5O1ebNmyspKUmHDx92UsQACq+IHbBACUAeBFydPXIYeRD5Y2QiAAAAAABX4evrqyFDhmj69Onq06ePevTooZiYGH3yySeqWLGiYmNjcztNk5KSJEmhoaG551/8/xf3FVV4eLhiYmKK1QbgiW59ZL12Hkgs1jSnTz/1sEYPfMtuMcF1XFzHD0VHHgRc2+Nv7dD3y/8uegMWi/r2ulnvP/2k/YKCSyluLqSYCAAAAABAAbz33nvy8fHRvHnztGLFCrVt21Y//fSTJkyYoAMHDigiIkKSFBISIkk6d+5c7pf2hISEPPuKytvbm6n6ABsG3FJXOz7YUqw2hvRpoqpVy9gpIsDzkAcB13Vnj6ziFRMl/adnJL9fyBfTnAIAAAAAUADBwcGaNm2a4uLilJSUpCVLlqht27bauXOnmjRpIi+vC1+xQ0NDVa1aNf3111+5527dulUhISGqWbOmOcEDHm74bfXlbS3a9GwWSZ1ahqtRXQqJwJWQBwHX1btzdVUKCyjSkokWi1SxXID6dqlh/8DgMSgmAgAAAABQRAkJCTp27FiedaEkafjw4Zo4caL+/vtvnTp1SuPHj9fQoUNltVrNCRTwcOFhgbrj5lpFOteQNHJgpH0DAkoI8iDgGry9vfTQwEgZRZjv2zCkB+5oIB8fykXIH58OAAAAAACKaMeOHZJ0WSfq2LFj1bFjRzVq1Eh169ZVZGSkJk2aZEKEQMnxzjNtVLNycKHPG9yzrvp3LVohEijpyIOA6xgztInaN6tY6PPaXlNBz953jQMigiehmAgAAAAAQBHl14nq7e2t9957T2fPntW5c+f02WefKSAgwIQIgZKjYrkALf24u2pXvfqabBengRt4Sy19+nJ7WYoyLxwA8iDgQvz9vPXz+zerXdMKki5M452fi2mv7TUVNP+DmxXg7+34AOHWLIZRlIGvgHup22O2omOSVKdaiA4sHGB2OAAAOBV5EAAAlCSnzqTqlY//0oy5+5V0PtPmMXWrl9KjdzXSyIGR8vKikAgA8Bxp6Vl67bPt+mjOHp04nWrzmApl/fVA/wZ67r5rKSSiQCgmokSgExUAUJKRBwEAQEmUfD5TsxZG6+eVR/TLumOSpDturqkH+jfQDddVpogIAPBomZk5mrvyiL5acEA/rzoqSerdubru6lFHfW+oIV8f1jBFwVFyBgAAAAAAgMcJDvTRA/0bqEeHaqrW9VtJ0uQxbVQ1PMjkyAAAcDwfHy/171pLba+pkFtM/HDs9eRBFAlrJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwydvsAACgJDIMKdswOwrXYbVIFovZUQAAnIU8mBd5EAAAAADgyigmAoAJsg2pzQKzo3AdG3pK3nSiAkCJQR7MizwIAAAAAHBlTHMKAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCbWTAQAAAAAAADc1ON/SLHnzY7CvqoESpNbmx0FAMAdkAedg2IiAAAAAAAA4KZiz0sHk8yOAgAAc5AHnYNpTgEAAAAAAAAAAADYRDERAAAAAAAAAAAAgE0UE1EiGIZhdgi4BO8HADgX913XwvsBAAAAAADcCWsmwmPk5Bjaf+ScNu2O1+bd8dq8+7R2RZ9VSmqW0tKzJUkHjyWpbo/Zah4ZphYNy6llw/Jq3rCcypTyMzl6z3M6IS33fdi0K15bouJ16mya0jKylZ1tyN/PqpBAHzWuW0YtGv7v/WhUXnWqhchisZgdPgC4HcMwdPR4sjbvPv2/+2+8tu07o3PJGUpN+ycPVrv5WzVrUE4tGpb73/03TJXKB5ocvedJSsnQ1qjT2hz1z/sRe/K8UtOzlJVlyM/XqkB/q+rXLK2WjcpfeD8iwxRZO1Te3jzvBwAAAAAAXAfFRLi9E6dT9dmPezXt+z06ejzliscahhQdk6TomCTNWXJIkmSxSD06VNPIgZHq1q6qvLwoZBVVVlaOFqw5qinfRWnp+r+veGxaerbS0rO1cuNxrdx4PHd7nWohemhApIb2qadyof6ODhkA3F5SSoZmLYzWlO+itGP/2SseaxjSsRMpOnYiRfNXH83dfn3TCho5MFJ33FxLfr5WR4fssQzD0NrNcZoyO0o/LDusrKz8RyCmZ2QrPSNbG7af0obtp3K3Vyjrr+G31dcD/RuoeqVgZ4QNAAAAAABwRRQT4bb+3HFK787apTlLDikzK6fI7RiGtGBNjBasiVGtKiF6aEADPdC/gUoF+9oxWs925ly6pn4XpWnf71FM3JULulcTHZOkp976Uy98sFmDbqmtR+9qpKYNytkpUgDwHPuPnNO7s3bpy/kHlJSSWay2fv/rpH7/66Qef/0P3XdbhEbf2VCVKwTZKVLPl5aepelz92vKd1HaeeDKBd2rOXkmTa9+uk2vfb5dPTtW0yP/aaQb21S2U6QAAHdhGIZiT5zPneElPSNb3t5eCvT3VmTtUDWpV0a+PjwAdDUnT6dq8+547YpOyN32/dJDan1NeV0bUU6BAXSLAYCrij97Ydazv0+dV1p6tiwWKcDPW3Wrl1LT+mUVFOhjdogu71xShrZExWvrntO522YvOahmDcqpeWSYSofQ/42Csxgs2gI3k3I+U8++u0kffLPbYdeoFh6kT8e3V9frqzrsGp5i3sojemDCbzpxOtUh7Xt5WfTUPY318sjm8vfznC96WTlSmwVmR+E6NvSUmNUPKJisrBy99eUOjZuyVekZ2Q65RukQX00e01pD+9Rj6umr+HPHKQ19cY2iDiY47Br/ubWO3nu2jUeN2CcP5kUeBCBd+K779S/RmrvyiDbtitfJM2n5Huvr46Um9cqqfbOKuv/2+mpUt4wTI3VdGZnZ+mHpYc1eckibdsXr2In8H3a1Wi1qWDtUba+toGF9I3Rdk/Ju+3fPgJXSwSSzo7Cv2iHS7C5mRwHAmTIzczRv5RF9++tBbdodryN/J+d7rJeXRQ1qlVabayro3j711K5ZRbe9h9tTTo6hpetjNXPBAf2585T2H0m84vF1q5fSdY3La3DPuup6fRW3nbGPPOgcFBPhVlZvOq5hL63VwWPOuTsMvy1Cbz3VmlGKNpxOSNMjr23Q179EO+V6kbVDNX1CB7W+poJTrudodKLmRScqUDBRBxM09IU1+nPnqasfbAe3tKuqT8a1V9VwRin+W1p6lsZP3ao3ZuxQTo7j/5yuWC5AH714vfreUNPh13IG8mBe5EGgZNt/5Jw++Ga3Zvy8X4nJRZttoFPLcI0cGKnbb6opq7Xk3VCOxaVo6uwoffrj3isWYa+keWQ5jRwYqbt61HG7B1npRAXgzuLiz+uj2Xv08Q97dfzU+SK10aReGY0cGKkhveqVyFHnCYnp+uynfZo6O0rRMUVLCLWrXlh66r7bIlSmlJ+dI3Qs8qBzUEyEWzAMQ2Pf26TXPtvu9GtXCw/SvHdvUrPIMKdf21X9/tcJ3fb4coeNRsyPl5dFE0Y219j7r3X7p43oRM2LTlTg6j79Ya9GTVzvsNGI+SkV7KOvXu2sXp2rO/W6ruzgsUT1HLXUoaMR83Nv33qa9mJ7+fi4902TPJgXeRAomTIzc/Ta59v0yrS/irV0x6XaXltB0yd0UP1aoXZpz9Xl5BiaNmePxrz9p1JSs+zSZmTtUM14paOua1LeLu05A52oANyRYRiaMW+/Hn/jD51LyrBLm3WqhWj6hI7q0CLcLu25gwWrj2rEhN+KXIj9t4rlAjTtpXbq06WGXdpzBvKgc/CVFS4vKytH941ba0ohUZJi4lLU+b5ftG5LnCnXdzWLfzumm0YscnohUbrwRfGFDzbrsUkbnDIKBABcxaTPt+n+l9c5vZAoSYnJmer3+DJ9teCA06/tinbuP6P29yw0pZAoSdPn7le/x5cpNc0+HaYAAHPs2HdGbe7+WS99uMVuhURJWr/tpJoOmKu3vtih7Gz7teuKDscm6eYRizTy/363WyFRujATRNvB8/XsOxuVlk6+BQBHiD2Rop6jlmjYS2vtVkiUpOiYJHUatlCPTdqg83bMDa7obGK67nl+tXqNXmq3QqIknTidqr6PLtPdz63SmXPpdmsX7o9iIlxaTo6h4ePXafrc/abGkZicqW4PLtb6bSdMjcNsyzbEqvcjS5Wa5vzO7Eu99/VuPTppgxhYDaAkmPT5Nj37ziZTY8jONjTk+dX6eqFzprZ2VXsOJajL8EV2/aJWFAvXxKjfY8uUkWluPgYAFM0va2N03V0/a0vUaYe0n5aerafe+lMDx6w05UEkZ/hzxym1GDRPK/487pD2c3IMTfp8u7o+8KtdO7kBANK2vafVYtA8/bL2mEPaNwzp3Vm71Pm+hTqdULSpr13d0ePJan3Xz/pyvuMe+p21MFqt7/pZh2M9bMgfioxiIlzak2/+oS9+NreQeNH5tCzdOnKJduw7Y3Yopvhzxyn1fXSZMjJd4+nWD77ZrZenbjU7DABwqE++32N6IfEiw5CGvLBaC9ccNTsUU8TEJevmEb8q/qxrfBld/Hus7n5uNSP1AcDNzF1xWH0eWaq0dMcX+X5YdtgjHz5Zv+2Ebrz/F6eMlli75YRuvH8RBUUAsJMtu+PVedgvTpnxbOPOeHW61/MKiodjk9T+ngXafyTR4dc6cDRR7e9ZoEPHKCiCYiJc2LyVR/TOV7vMDiOPhKQM3fmM5z7dmZ/k85ka9PRKu04dYw8vf7RVK//82+wwAMAhdh04q1ET15sdRh4XRiiuUVy8uSPznC0nx9A9z6/RsRMpZoeSx5wlh/TR7CizwwAAFNDKP//WwDErlZXtvAdBFq07prufW+0xs7rs3H9Gt45couTzzvtuunl3vHqNXlLi+gEAwN72HzmnWx5arAQnPqCxKzpBtzy0WCnnM512TUc6dSZVNz/wq2LinPfdNPbkedOWvIJroZgIl3TmXLoefOU3u7W3/NPu2r+gv5Z/2r3Ybe2KTtAr00rWiLhn39moQ3Ya0m7P90KShr20Vske8gcBAFyUlZWjoS+usdtocHveey/maE/plCyIaXP2aOVG+0yjZu88+PTkjTwlCgBu4Gxiuu56bnWRcrvValGVioGqUjFQVqul0OfPWXJI0+bsKfR5riYjM1v/eXZVkTqhi/sart1ygplxAKAYsrJydPdzq3WqCDO9FPcevmlXvJ57zzVm/CkOwzD0wCu/6cDRwo9ILO5rePBYkka8vK5E9QPgchQT4ZIeeW294uLt97RD9fBg1a1eStXDg+3S3mufb9emXafs0parW/nn3/rwW/uNerD3e3H472Q9M3mjXdoCAFfxxowd2rQr3m7t2fveO2/lUX3zy0G7tOXqDh1L0pi3/7Rbe/Z+L1JSs3TfuLVMdwoALu6xSRuKvOZueFiAji29U8eW3qnwsIAitTHm7Y1uv+bRfz/+Szv2ny3SufZ4DSdN366NO0tGPwAA2NvbX+7Un0W8h9rjHv7+17u1epNj1tl1lu9+Paiflh8p0rn2eA1/XnVUX/8SXaRz4RkoJsLlLFxzVLMWuvaNKTvb0L0vrlV2tmusH+goaelZum/cOrPDuKop30VpjZv/QQAAF+09lKDxU7eYHcZVjX5tvcusH+hIIyasc7lpvv9t5cbj+vTHvWaHAQDIx4LVR/Xl/AOmxpB8PlPDx7vviIKtUfF69dNtpsaQk2No6ItrmO4UAAop6mCCXppi/nfcYS+tddvpTk+cTnWJZVBGT1xf5Iej4P4oJsLlTPp8u9khFMjOA2e1cE2M2WE41Jwlh+w2vamjvTFjh9khAIBdTP5ql92mN3WkM+fSPb6AtXl3vJZtcI+1eSd9vp3RiQDgggzD0PPvbzY7DEnS8j/+1io7TdvtbOOnblW2E9eazM/u6AR9u6hkzM4AAPby34//cokHMQ4eS9KMn/ebHUaRvPPVTp1OSDc7DJ1NzNDkmTvNDgMmoZgIl7Jj3xmt3XLC7DAKbMp39pv+0xW508+3cG2M20/bAwDnkjL01QJzRy4Uxkdz9nj0KP2pbpQHDx5L0pLfY80OAwDwL+u3ndT2fWfMDiOXO33Hu+jI30la4EIP8rrjawgAZjl5OlVzlhwyO4xcU76LcrtR+ukZ2fr0x31mh5Hrs5/2KTXNtWfvgWNQTIRLmTrbvf4oX/x7bJEWvXUHW3bHa8N291kPwjCkad/vMTsMACiWmQsOuPyUmpc68neyFq07ZnYYDnE2MV1fL3Ltadf/jc5NwHm2bdumPn36qHTp0ipVqpT69u2r48ePKyQkRIMGDTI7PLgQV7s3/7TiiP4+mWJ2GIUybc5elxp9/+fOU9q0y32+KwOOQB5EQX320z5lZrnOA6i7oxO0ZnOc2WEUyvdLD7nUEiNnzqW7VIEYzuNtdgAlVVRUlCZOnKhly5bpzJkzql69uoYPH64xY8YoLCxMCQkJOnfunIKDg80O1WkSkzM0c4F7ddpJFwqgbz3V2uww7M7dCruS9OmP+zTuwWby9+PWZkv6icPaOaJWgY5tMc91vqzDM5EHL2cYhst1OBbEh99GqWen6maHYXdfzNuv1DTzp+IpjAVrjupwbJJqVgkxOxSXRB6EvSxfvlw9e/ZUjRo19MILLyggIEAzZsxQ9+7dlZycrKZNm5odIlxEUkqGy3W2ZWcbmrnggJ4Zdq3ZoRSIYRiaPs91RmNcNH3ufrVsVN7sMJwiaccq7XuhyxWPIW+WLORBFIYr3sM//2mfOrWsZHYYBTZ9rutNzfr53H0a0rue2WE4BXnwH/S4m2D27NkaMmSI0tPT1aJFC3Xo0EG7d+/WM888oyNHjujMmTOKjIwsUR2okrRq43Elu+EiuPNXH/W4YqJhGPp51VGzwyi0+LNp+mPHKbf6g8CZvEuXV83HZ+a7P/GvpTqz8ksFNbjeiVGhJCIP2hYdk6Sogwlmh1FoSzfEKjUtSwH+nvVnpTvmQcOQflkbo5GDGpodiksiD8IeTp06pYEDB6p58+ZatmyZAgICJEmDBw9WrVoXitV0ouKirVGnXXIdZHeagebI38mKi081O4zLbNh+0uwQnCaowfW6Zsbla22mxURp/yu3qnzXESZEBbOQB1EYpxPStP+I683o5k55MCfH0B87XC/ejTvjlZ2dI6vV8ye+JA/+w7N6fdzA+vXrNXjwYIWEhOjXX39V586dJV0o3owZM0ZvvfWWJKlFixYmRmmOTbvjzQ6hSPYfSdS5pAyVDvE1OxS7OXYiRSfPuM7w+cLYtCueYmI+rP5BKtf5bpv70k8cUswnj8g7tKJqPz3HyZGhJCEP5m+zm+bB7GxD2/aeUZtrK5gdit3k5BjaHOWe78fmqNNmh+CyyIOwh0mTJuns2bOaPn16bgeqJJUuXVrNmzfX8uXL6URFLle9J7vT3xyuGuuO/WeVnpEtP1+r2aE4nJePr7zKhOfZlpV4Woc/HK6Qxp1VddjbJkUGM5AHURiueg/fd+ScEpMzVCrY9fty9x0555KDb86nZWnPoXNqVLeM2aE4HHnwH55fOnYh2dnZGjZsmDIyMjRnzpzcDlRJslgsmjBhgry9L9R3W7ZsKUk6cOCAHnzwQTVt2lTe3t5q3LixGaE7hasmmILYusc1vyQWlTu/F+4cu1lyMtJ0cNIdyk5NUu2nZ8u3XGWzQ4KHIg9emTvfv9y18Jaf6JhEJSa73he2gnDnz5FZyIMojG+//VYdOnRQRESEzf0VK1ZUePiFzobZs2erffv2Cg4OVs2aNZ0YJVzFpl2ueU+OiUvRqTOuN9rPls27XfO7dmZWjnbsP2N2GKYwsjIVPel2efn4q/aY72Sxen5BFf8gD6IwXDUPStIWF33g599c+fudK7+/jlSS8yAjE51o9uzZ2rNnj3r16qUuXS6fZzcwMFBVqlTRkSNHckdk7Nq1SwsXLlTr1q2Vk5OjnBzXmyLFHgzDcNkvCQWxeXe8OrfynNFw7v1euG/sZjk6bZTOR29R1WFvK6RRR7PDgQcjD16ZK39JuBp3jt0Wd/55dkWfVVp6FusHFwJ5EAUVFxen2NhYDRw48LJ9OTk52rFjh5o1a5a7rUyZMho1apROnDihyZMn2y2OrKwsxcXF2a09OM6uA/lPS2a1WhQeFpDv/ktVuuS4SgU8Jy4+VdnZ+a/f89umA2rZ0PVHE2zfm/9n3ezXcMPWwwovnV6gthwtM7OiJB+nXOvoRyOVdnSXGrz5p6yBpRx2nczMTB07dsJu7YWHh+c+uIiiIQ+isP6KunxqyIvMvof/ue2w6lbKLlBbZtq841i++xz9GkpXfh037zymG1sUvC1HIg8WTHFzIVnUiX744QdJ0l133ZXvMampqfLy8spNvr169VKfPn0kSUOHDtWmTZscH6gJTp5J04nT7vFkpC1/7fWsAta2ve77hOW+I+d0PjVLgQHc3goifulnOr3sM5VpP1AV+zxudjjwcOTBK9u2z33vvX/tcd/YbXHn9yIry9Cu6AS1aBhmdihugTyIwkhJSZF0YTT9v82bN08nT57MM7XbzTffLEmaO3euXeOIi4tTtWrV7NomHKTueCmgqs1d4WEBOrb0zkI3ufGbvgU6rurN3yj2xPl89/e7bYCUsqfQ13e6GqOlUtfa3GX2azj6kcc1+uxvhb6+IzR8f6cCqjdy+HVOzJus0yu/VL0Jy+RXsZZDr7Vv3z5V62a/WUliYmJUtart30cUDHkQhVZthBR6nc1dZt/Dn3n2RT1z//JCX9/pwm+Xyne3vcvBr6F05dfx/Q+m6v0XXGOJCPJgwRQ3FzLNqRNd7AC9OHXbvx0/flwnT55UgwYNFBQUJEny8ioZb9G5pAyzQyiWc0nuORVafs4lu/f7keSCc4m7ovPRW3T041Hyr9ZQNUZ/ZnY4KAHIg1eW4Ma5MDHFfWO3xf3/LnHv+J2FPIjCqlatmqxWq1avXp1n+5EjRzR69GhJYp0o5GWjw91lWNzkbyyXjtOVY7O/c5sX6diMMao+cppCGnUwOxyYgDyIwnPhPOg293AXfg1dOkfbH3mQkYlOdfLkSUnK7SD9t6+++kpS/p2sznDDDTfo6NGjTr9uuqWCFHBfgY5d/ml3VQ8PLlT71SsF5/7v/gX9C3Xu0bhk3Th80RWPWbZiterWfahQ7bqyY373SNarrxfkiu+FJF3Xpr18jHOFatvpvLwV+qZ5TwJnJZ9V9KQ7ZPH2VZ1nf5TV3/Z9yVkaNGgg5WQ5pO3q1atrxYoVDmkbhUMezJ8hL2UHPlOgY13x3nv4yN+qW7duodp1ZSd9e0je11z1OFd8LyTprsH3KignulBtOx15MA/yoHvw9fXVkCFDNH36dPXp00c9evRQTEyMPvnkE1WsWFGxsbFO6UQNDw9XTEyMw6+D4rv1kfXacSDR5r64+FRVvfmbArVTKSwgdxRBqzvn6nj81Wf1ibvKMd/P+VatG7v+NKf3/3erfv39pM19Zr+G777zpm67wTXW2R29u6Ji0hzXfurRXTr45iBV7Pukwm4c6rgLXSIiIkKL7Xivu7iOH4qOPIjCeuzNHfphxd8295l9D3/1/17W4B6fF+j6Znrjy/1679uDNvc5+jW8eI38PDxyhJ4d+maB2nE08mDBFDcXUkx0ouDgYKWmpio6OvqyNy4mJkavvvqqJOWuE2WGo0ePKjrahM4n/3SpXsEOrR4erLrVizYfsa+PV5HPvZLU8+cVfdjFO+0Ko06aFHj1w1zxvZCko0cOS5kuPkWd1Vtm/aYbhqFDb9+tjBOHVPvZH+Vftb5Jkfwj+mC0lO2YTlS4DvLglXhJTQp2pCvee7OzM0163RykSqJU9uqHueJ7IUlxccelJBd/P8iDeZAH3cd7770nHx8fzZs3TytWrFDbtm31008/acKECTpw4IAiIiIcHoO3tzdT9bmJGpVL51tMzM42rjj9Wn6Ox6cW6bx/a9qohqpWddw6P/ZSp/pRKZ9iotmvYeP6VVW1qmsUE332S3JQJ2pWYrwO/LeXAms1VYWejyrz7OVr1XmXKi+L1WrX6/r4+HCvc0HkQRRG3ZrHJdkuJpp9D28UUdktPkcN6iRJsl1MNPs1rF+7osu8huRB56CY6ETt2rXT3LlzNXHiRP3444/y9fWVJEVFRalfv35KSEiQZO6IjOrVq5ty3QxLWRW0zn40LrnQ7VevFCxfHy9lZObo6PHCnV+Q6wUG+KhSnTqFjstVxfp5F+j+64rvhSTVqF5F3nLxp2y9zLv9Hv/uFSVu/kUVb3tGZdr2My2OS9WpXcehIzLgGsiDVxZtZEuWq//x6Yr3Xm+rRTU8KA+e8gmU7a7fvFzxvZCkyuFhCqjg4u8HeTAP8qD7CA4O1rRp0zRt2rQ823fu3KkmTZqUqOm5cXUtGoZpwRrXGz0TGuKr2lVDzA6jQFx5DeDmkeXMDsEpzm1aqIwTh5Rx4pB2DKti85jGHx+SX8Wazg0MpiAPojBc+R7uyrFdypXjdOXY7Ik8+A+KiU40duxYLVy4UAsXLlT9+vXVqlUrxcfHa82aNRo8eLCOHz+ulJQUU+cXN2sKpFNnUlWh89cFOrYg03v92/4F/VW3eikdPZ6sej3tvzDsHf1u1Rf/N8nu7Zql76NLNW/l1af5c8X3wmKRonZuUYC/a9/esnKkNgucf91zWxbr+HcvK+SaG1Tl7v9zfgD52LNnj7z5zuHxyINXVqHTLJ06e/VHOVzx3ntt43ra9O0Bu7drluff26RXP9121eNc8b2QpAXzZqtZpGt/sSMP5kUedG8JCQk6duyYevTokWd7dna2MjMzlZmZKcMwlJaWJovFIj8/P5MihbO5aidb88hysrjyeo6XcNXXsE61EIWWKhm/y+VuuEflbrjH7DDgwsiDyE+Lhq750EXFcgGqXKEAU7K5gGsiysjb26KsLMPsUPLw8rKoaX3XfH/tjTz4D9fubfcwrVq10tKlS/X8889r8+bNWrx4sZo1a6ZZs2apefPmmjFjhho3bqzAQPe4mdlT+bIXbuJ/nyz+EGszNGvgWTfPpvXLFaiY6Ioa1Ap1+UKiWTLPHNeht++SZFHplj11Zm3+86qXanqzfEIrOi84lAjkwStr2qCslq63PQWMq2vWoABzgrqRpm6c1328vdSwjouPzjcJeRCOsmPHDkm67GGYmTNn6t577839d0BAgGrUqKHDhw87MTqYqWUj1yyEuWqBzpaGtUMV4G9Valq22aHk4arvLWAG8iDyU6tKiMqW9tOZc+lmh5JHy0ZhbvNQjb+ftxrXLaO/9rjWck4Na4cqMID+15KGd9zJOnXqpHXr1l22/ZtvLnRmmLlOlNlaNgzTzyfds4DlTl/GCsKdvxi56lNPriAtdq+yk05Lko59/sQVj43470o6UeEQ5MH8tWgY5rbFRI/Lg2788zSpV0Z+vvZdq8FTkAfhKPl1og4dOlRDhw51fkBwGZXKB6pD84pau+WE2aHkMfCW2maHUGDe3l66/aaa+mqBa60FPLCb+7yGgKORB5Efi8WiAV1r6aM5e8wOJY+B3WqZHUKhDOha2+WKiQNvca/XEPZBMdFFbNy4UdLl60SdP39ev/zyiyTpyJEjSkxM1Pfffy/pwgiPGjVqODdQB2rRMEw/r3K/YqLFIjXzsLUS3LlT2J07gB0tpElntZjnWtMiABeRB6UWLj4t5ZW480MottSsEqwypXx1NjHD7FAKzZ1zuKORB+EoI0eO1MiRI80OAy7q4UENXaqY2LpJebfLFSMHRrpUMbFqxSD16sR6tMBF5EFcyciBkS5VTAwr46/+Xd2rEHZfvwiNn7pFGZk5Zoci6cJsOMNvq292GDABxUQXsWnTJkmXj8g4efKk+vfvn2fbxX9Pnz7do57wcdcRZfVrllZwoI/ZYdhVpfKBqlQ+UMdPud+0s+72xRjABeRB971/eXtb1KSeZ01zarFY1KJhmJZtcL+Rou76OQIAT9XvxhqqWC5AJ06nmh2KpAuduu6mzTUV1LRBWZcZlfFA//ryZqFbACiQJhFlXWqU/n39IuTv514lkQrlAtS/ay3NWugaD9bcflNNhYeVzOVpSjr++nEBOTk52rp1q6xW62VTAtSsWVOGYdj8z5M6UCWpc8tKKh3ia3YYhdbvhppmh+AQ/W5wv9E+4WEBat2kgtlhACgk8uAFNasE69r67leU69GhmkdOq+mOedBqtahnx2pmhwEAuISvj1VPDG5sdhiSpBqVg91uNIZ04SGfZ+69xuwwJEmlQ3x1/+0NzA4DANzK0y5yDw/093bLh2ok6ckhjeXlZf46j15eFj15j2v8XQPno5joAry8vJSUlKSsrCwFBASYHY5pggJ9NLR3PbPDKBQvL4se6O+Zw7ofGuB+X5BG3N5APj7c1gB3Qx68wGKx6GE3/GLjrl/GrubunnXdbuaBfjfUUJWKQWaHAQD4lyeGNHaJkeOfje+gAH/3Go1x0cBbaqt3Z/OnFn3n6daqWK7k/r0KAEXRs1N1/efWOmaHoUmPt1L1SsFmh1EkzSLD9PS9TcwOQ08MbqyWjcqbHQZMQq87XIq7FbB6dqymGpVDzA7DIRrXK6uOLcLNDqPArFaLRtzhmYVdACXHf26to1LB7lPAqlu9lG5qU8XsMByiVLCvBvc0/wtvYXhqYRcA3J23t5emT+ggHxOnxnywfwPd2KayadcvLovFoo9ebKcypcybzahHx2q6x80egAYAV/Hes21MfRijc6tKbv99adyDzdSwTqhp169fs7QmPNzctOvDfBQT4VLq1wrVTW70Bcfdk9DVPDzIfX6+vl0YjQHA/bnbKP2RAyNdYqoVR3GnPN+gVml1blXJ7DAAAPloElFWrz7SskjnxsWnqurN36jqzd8oLr7way9G1Cit159oVaRru5JK5QP10YvtinRucV/DCmX9Ne3FdrJYPPfvHgBwpHKh/vp0fHsV5TZa3Ht4mVK++uzl9m7/3dXfz1tf/Lej/P0Kv8xIcV9DP1+rvvhvR7ed4QD2QTERLmfs8GvNDqFAWjYK081tPXM0xkV9b6ihyNqhZodxVV5eFj0zzDXmXweA4nrs7sYKdIM/0CuWC9C9fdyn8FkUjeuVVZ8u5k+pVhDP39+UDk4AcHFP3tNYjw9uVOjzsrMNxZ44r9gT55WdbRTq3GrhQVr68S0KCTJvRJ89DehWW+883brQ5xXnNSwd4qsl027h4VUAKKaenaprWhEeCinOPTwowFuLpnRT7aqlCn1dV9SyUXl9/9YN8rYW7rtfcV5Dq9WiOW/eoNbXVCjUefA8FBPhcrpcV1kP9Hft6U59fbw045WObv9Ey9X4+ljd4ud86p7GatWY+boBeIZaVUP02mNFG7ngTNNeaqfQUn5mh+FwU56/XqEhrt0B26NjNd3Vw72mZAWAkshiseitp1rrqXucs+ZR7aohWv15D7ddHyo/j97duEgFxaIIK+OvFZ9017X1yznlegDg6e6/o4E+fqldkUYoFlbo/x4G8bQiWI+O1fXTOzcVaYRiYfn5WvXD2zeqlwusWwzzUUyES3r98VaqXsl1n/ob/1BzNapbxuwwnOK6JuU1Zqj5C/zmJ7J2qF4eyXzdADzLw4MauvS6tXf1qKM+XWqYHYZTVK4QpHefaWN2GPkKDfH935dx137wBwBwgcVi0etPtNIHY9sqwN9xnYBdr6+idV/0VK2qIQ67hpkevbuxZr95g8qWdtyDTS0bhem3L3qqecMwh10DAEqi++9ooHnv3qwKZf0ddo1rIspq7Yweur5pRYddw0w9O1XX8k+6q7YD83zNysFa9vEtJea7P66OYiJcUqlgX332cge7tXc0LlkHjibqaFxysdtq2SjMpYtrjjD+oWZ2m+7Unu+Fl5dF0yd0kL+f608HCACF4eVl0ecTOthtulN73nsrlgvQe8+2tUNU7mNwr7rq2bGaXdqy53shSe8+00aVK7juA1gAgMtZLBY9PKihtn9/mzo0t28nZ0iQjz4Z116/Tu2mSuUD7dq2q+nftZZ2/XSb3ack9/Xx0quPtNT6mb0UUbO0XdsGAFzQq3N17frpdg28pZZd2/W2WvTSA8208ZvealyvrF3bdjXXN62o7d/306g7G9q97ZEDI7Xjx9vUvrnrPuQM57MYhlG4SXIBJxo/ZYte/mir2WHkqlguQOu+6Km61T1jnu3C2Ln/jDoMXaiEpAyzQ8n19pjWenxwY7PDKJKsHKnNArOjcB0bekrePN4CXOa7Xw/qzmdWylX+WvPztWrxR93UqWUls0NxupOnU9V+6ALtP5Jodii57usXoU/Gt3fLUYnkwbzIg0DJlZNj6JMf9mryzJ3ae/hckdvx97Pqzu61Nf6h5h43renVGIahOUsOadLn27Ul6nSR2/H2tqjfDTX10gNN3a4DesBK6WCS2VHYV+0QaXYXs6MA4AzzVh7Ra59t04btp4rchpeXRb07V9dLDzRVs8iSN6J89abj+r9P/tLS9X8Xq52b2lTW8/c3VedW7vWdnzzoHBQT4dIMw9Bjkzbova93mx2KQkN8tXp6D10T4V5fKuxp/bYTunnEr0pJzTI7FL0woqleGdXC7DCKjE7UvOhEBfI3bc4ePfjKb2aHIavVop8m31Si10o48neS2t+zUMdOpJgdivp3raVvJnWW1eqeN0/yYF7kQQCGYWjln8c15bsoLVgTo/SM7AKd16BWaY24o4Hu6V3PoVN+ugPDMLRxZ7ymfBel75ceKvD31uqVgnRfv/oafluE2472pxMVgCfYsjteU2dH6bvFh5SUklmgc6pUCNS9fSM04o76qhZesh6msWXf4XP6aE6UZi6IVvzZtAKdUy7UT4N71tWD/Ruofq1QxwboIORB56CYCJdnGIaeeutPvf3lTtNiKF/GX0um3aKmDVh0/fe/TujWh5fonIkjFMc/1EwvPdjMLUdiXEQnal50ogJX9vH3FwqKZv3V5udr1Zw3byjRhcSLDh5L1M0jftXBY+Z9U/nPrXU045WO8vFx3xsneTAv8iCAS2Vm5mj3wbPavPu0tkTF68jxZC1YHSNJuv2mmmrRMEwtGpZT88gwhZVx3HpT7iw7O0f7jiRq8+54bd4dr+Px55Wali2r1aIAP29F1Cj1v9cxzCOmg6UTFYAnyckxtP/IOW3efVqbo+K1/0ii5q8+Kknq06W6rokoqxYNw9SyYZgqVwh06/5BRzEMQ4djky/kwah4HY5NVlpGtgzDUICft2pUDs59DWtWCXb715A86BwUE+EWDMPQG9N36PkPNikry7kf2Qa1SmveuzezVsIltu09rdseX+70jlRfHy9NHtNaIwfZfy5wZ6MTNS86UYGrm7PkkO4bt7bAT2jaS4Wy/vrujRvcbpoTRzp+6rz6PrpMf+4s+jQ8RfXEkMZ644nr5OXl3l/2yIN5kQcBXMmxuBRV6/qtJClmySBVDXfP0XNwHDpRAXgy8iCuhjzoHHxlhVuwWCx6etg1+nNWb6dNM+rlZdHT9zbR1tl9KST+y7X1y2mbgxb4zU+rxmHaOruvRxQSAaAo+netpZ0/3qab21Z22jXv7F5bu+feTiHxXyqVD9RvX/bUa4+1lK+TRgfWqhKiFZ9211tPtXb7QiIAAAAAAHAvFBPhVppFhmnjN7314gNNZbU6riOtQa3S+u2Lnpr0+HXy9/N22HXcWXCgj95/rq1WfnaralUJcdh1fH28NPHRlvr9y15qWKeMw64DAO6geqVgLf7oFk17qZ2CA30cdp0KZf314+Qb9fWkLioXyvRptnh7e+mZYddq6+y+atU4zKHXenhQpLb/0E9drnNeIRkAAAAAAOAiqiRwO74+Vk14uIXu7F5HH367W1/OP2C3Kd+aNiirhwc21N0961BELKDOrSppxw/99MXP+zXluyjtik6wS7uhIb66t289PTyooepUK2WXNgHAE1gsFo24o4F6dqymj+bs0cff79WJ06l2abtWlRA92L+Bht9eX2VL+9mlTU/XsE4Z/f5lL81ZckhTZ0dp7ZYTdmnXz9eqQbfU1qg7I9WyUXm7tAkAAAAAAFAUVEvgtiJrh+qDsddr4qMtNWthtKbO3qPt+84Uuh0/X6sGdK2lkQMj1fqa8m6/4KwZggJ9NHJQQz00MFJrN8dpyuwo/bjsiDKzcgrdVouGYRo5MFKDbqmtwABuUQCQn8oVgjTh4RZ6YURTzV1xRB9+G6U1m+MK3Y6Xl0Xd21fVwwMj1a1dVabQLAJvby/deWsd3XlrHe3Yd0ZTZ0fpq4XRRXrYqW71UnqwfwMN7VOPUaEAAAAAAMAl0FMPtxcS5KsHB0TqwQGROnk6VZt3x2tzVLw27z6tXdFnlZKapdS0LHlbvRTgb1WFsgFq0TBMLSLLqWWjMDWqW0a+PlazfwyPYLFY1LFlJXVsWUnpGdnasf+MNu2K1+bd8doSdVqnzqYp9uR55eQYslotqlO1lJrUK3Ph/WhYTi0ahtFxCgCF5Otj1YButTWgW22dS8rQ1j2ntXl3vDbtite2fWeUmJyh1PRseXlZ5O9rVZlSvmoe+c9999qIsgpy4JSpJU2TiLKa8kI7vfdsW+05lKDNu09rc9SF9+PvU+d1LC5F2TmGrF4W1agcrAa1Sqtlw/K570flCoE82AQAAAAAAFwKxUR4lArlAtS9QzV171DN7FBKPD9fq1o2Kn/Z1Gx1e8xWdEySalYO1t75d5gUHQB4ptIhvurcqpI6t6pkdiglnre3lxrXK6vG9crqnj71crfn5sEqwTqwcICJEQIAAAAAABQMxUQAAAAAAADATVUJNDsC+/PEnwkA4BiemDNc8WeimAgAAAAAAAC4qcmtzY4AAADzkAedw8vsAAAAAAAAAAAAAAC4JoqJAAAAAAAAAAAAAGyimAgAAAAAAAAAAADAJoqJAAAAAAAAAAAAAGyimAgAAAAAAAAAAADAJoqJAAAAAAAAAAAAAGzyNjsAACiJrBZpQ0+zo3AdVovZEQAAnIk8mBd5EAAAAADgyigmAoAJLBbJm45DAEAJRR4EAAAAAMB9MM0pAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJsoJgIAAAAAAAAAAACwiWIiAAAAAAAAAAAAAJu8zQ4AAAAAAAA4V9Z/X5dx/ITZYdidpVJFeb/wtNlhoIB6j16q6GOJZodhd3WqltLP799sdhgAroA8CFfhibmQPOiZKCYCAAAAAFDCGMdPSDHHzA7D7gyzA0ChRB9L1O7oBLPDAFACkQfhKsiFcBdMcwoAAAAAAAAAAADAJoqJAAAAAAAAAAAAAGyimAgAAAAAAAAAAADAJoqJAAAAAAAAAAAAAGyimAgAAAAAAAAAAADAJoqJAAAAAAAAAAAAAGyimAgAAAAAAAAAAADAJoqJAAAAAAAAAAAAAGyimAgAAAAAAAAAAADAJoqJAAAAAAAAAAAAAGyimAgAAAAAAAAAAADAJm+zA4BnMwxDys42OwzXYbXKYrGYHQVcgGEYysoyzA7DZXh7W/jdgMciF16CPIj/IQ/mRR4EAAAAALgyiolwrOxsZd12l9lRuAzvH2dJ3vzaQcrKMuTbYrrZYbiMjM33yseHTlR4KHJhLvIgLiIP5kUeBAAAAAC4MqY5BQAAAAAAAAAAAGATxUQAAAAAAAAAAAAANlFMBAAAAAAAAAAAAGATxUQAAAAAAAAXtGPfGc1ccCD334vWxSgtPcvEiAAAcJ4jfyfpm0XRuf/+cdlhJSSmmxgRUHJ5mx0AAAAAAAAALsjMzNH3Sw9pyndRWrf1RJ59Iyb8pufe3aTht9fXg/0bqGaVEJOiBADAMQzD0JLfYzXluygtWH1UOcY/+x59fYOefXej7u5ZVw8PitS19cuZFyhQwjAyEQAAAAAAwAUkJKar24O/6j/PrtJv/yokXnQmMV2TPt+uhn1/0MI1R50cIQAAjpOZmaNhL63VLQ8t1s+r8hYSL0pNz9YnP+xVswFz9cE3u50fJFBCUUwEAAAAAKAQtm3bpj59+qh06dIqVaqU+vbtq+PHjyskJESDBg0yOzy4qZTzmbp5xK9aufG4JMlG/+mF7f/bkZaRrT6PLNMva2OcEyAAXIJcCHvLyTF099hVmjFvf4HPGT1xvSbP3OnAqABcRDERAAAAAIACWr58udq0aaO9e/fqhRde0Kuvvqpjx46pe/fuSk5OVtOmTc0O0SXct/VP3bJ+lc19vvNna9axI84NyA08/Op6bdodX+DjDUPKMQzd8cRyHT2e7MDI3NeyT7pr9fQesljybp/77k3a+E1veXtbbJ8I4IrIhVdHHiy8t77YodmLDxX4eMOQLBbpyTf/0JpNxx0YmfsiD8KeKCYCAAAAAFAAp06d0sCBA9W8eXNt3bpVY8aM0ahRo7R8+XIdPXphukk6UFEUx0+d16yFBwp9nmFcmO7t4+/3OCAq93fPC6vVuG4ZPTPsmtxtI+6or5vbVNHdz61WVlZ+4z8B5IdcCEfIzMzR5Jk7Lyt6XY1hXPhv8le7HBOYmyMPwp4oJgIAAAAAUACTJk3S2bNnNX36dAUEBORuL126tJo3by6JDlQUzac/7lVWdtE69CwW6ePv9yojM9vOUbm/2BPn9dB/f9PLI5uraYNyiqhZWm+Paa0xb/+pvYfPmR0e4JbIhXCE+auP6nh8au5U3oX188ojioljlP6/kQdhT95mBwAAAAAAgDv49ttv1aFDB0VERNjcX7FiRYWHhys9PT13lMapU6dUqVIljR49WqNHjy52DFlZWYqLiyt2O+WyMj2yQyArK1Mnjh0zO4xC+/SHKFksKlInqmFIp86m6at529S1TQX7B+dAWZmZDr/G7MWH1KtTdc2a2Enn07K0ZnOcpnwX5dBrZmVm6pgbfg49WXh4uLy9PfGu53xm50Ly4JW5ax78aPb2Yp2fY0gfztqiUQNr2yki53F0LiQP4qLi5kJPvGcCAAAAAGBXcXFxio2N1cCBAy/bl5OTox07dqhZs2aSLnR0hoeHa8mSJapdu7a2b9+ubt26qWLFihowYECx46hWrVqx2pCkvzp3U8OQ0sVu50pWnz6lMr/86NBr/Nu+ffvU1A6vj9M1+kjyKl4XzX0PPCmdWWWfeJyl3suSfxWHX2bUxPWKXTZIOTmGeo5a6vDr7du3T9Wq3enw66DgYmJiVLVqVbPDcHuukAvJg1fmtnmw7ouSf3UVep7TS0x6e5omPfW1HYNyEifkQvIgpOLnQoqJAAAAAABcRUpKiiTJYqOTa968eTp58mTutG5BQUF65ZVXcvc3bdpUvXv31rp164pdTHQn14WW1WfNrrtse8MVi0yIxpVZil1IlCR5+RS/DQ91d486ssiiQH+rWjQM0y9rY8wOCXBL5MLCIQ8WgsUOOcwebXgo8iDsgWIiAAAAAABXUa1aNVmtVq1evTrP9iNHjuRO2ZbfGlGZmZlau3atnnrqqWLHER4erpiY4ncAlXvlden4iWK3cyUBVqvqBoU49Br/FhERoZiZnzr1mvbQ4PZlSkkt3pqHb0yaoEFdP7dTRM5x44PrtO9oikOv0aBWab3++HV69PUNalg7VJ+Ob68mt/+o0wnpDrtmRESElq+go9aVhIeHmx2CR3CFXEgevDJ3zYN9n/xDW/YkFHnNREkaMXywXhz+X/sF5SSOzoXkQVxU3FxIMREAAAAAgKvw9fXVkCFDNH36dPXp00c9evRQTEyMPvnkE1WsWFGxsbH5dqCOGjVKISEhGjJkSLHj8Pb2tstUfZnenvn0vre3j1tOZdi5VWUtXFO8TrdeXRqoatVQ+wTkJN4+jv0centb9NXEzlr2R6w+/WGv/HyturltFU17qZ3ueGKF467r456fQ+BqXCEXkgevzF3z4I1t/tbmqIRitdG9Yx23/NkdmQvJg7AnL7MDAAAAAADAHbz33nsaMWKE/vjjDz355JP6448/9NNPP6ly5coKDAxURETEZec88cQTWr9+vRYtWiRfX18TooY7GDkwsljn39SmsurXCrVPMB5kwsgWqloxSMPHr5MkpWdk6+7nVqlXp+oa3KuuydEB7olcCEd4oH+DIi+XaLFIVSsGqmfH6vYNygOQB2FPFBMBAAAAACiA4OBgTZs2TXFxcUpKStKSJUvUtm1b7dy5U02aNJGXV96v2I899piWLl2q5cuXKywszKSo4Q66XV9FNSsHF7kjtbjFSE/UrllFjRnaRMPHr9WpM2m527ftPaNxU7bovWfaqFp4kIkRAu6JXAhHqF21lLq3L9pINsOQHuwfKW9vSh2XIg/C3pjmFAAAAACAIkpISNCxY8fUo0ePPNsfeeQRrVixQitXrlT58uVNis48nzW7Lt99Gb0GODES92C1eun959qqzyNLJYsKtWbULe2qqlcnRmP8229bT8in+XSb+177bLte+2y7kyMCPBe58HLkwcKb9Fgrrd18QsmpmYXKg5G1QzXqzoaOC8xNkQdhb5TrAQAAAAAooh07dkhSnjWijhw5ovfff18HDhxQrVq1FBwcrODgYHXv3t2kKOEOenaqrk/Gd5BFKvAIxfbNKmr2m10YjQHAVORC2EPjemU1772bFOjvrYIO1K9dNUSLpnRV6RCmzwUcjZGJAAAAAAAUka0O1Bo1asgozCP1wP8M6xehiuX89eikDYqOSbpsv+V/oxZ9fbw0rG+EJj/dWv5+dO0AMBe5EPbS5brKWvdFT90/fp027Y6/bL9FkqEL+bB35+r6ZFx7lS8b4PQ4gZKIvzgBAAAAACiikSNHauTIkWaHAQ/So2N1dW9fTcs2xOrDb6P029YTOpecIT9fq6qFB2lY3wjd2zdCYWX8zQ4VACSRC2FfTRuU08Zv+2jjzlOaOjtKC9fE6GxihqxWiyqWDdCdt9bWA3c0UM0qIWaHCpQoFBMBOEVSSoa2Rp1WSmqmJOl8WpYOxyaR+AEAJUJGZrY2745XSmqWpAt5MOpggiJrh5obGADAJXl5WdT1+qrqen1Vs0MBAMAUrRqXV6vGJWutTcCVUUwE4FC7DpzV1NlR+nL+ASWlZOZuP34qVbW6z1a366to5MBI9ehYTVYr63wAADzLsbgUffzDHn3yw17Fxafmbj9+KlUN+/6g6xqX18iBkRrQrZYC/PnTHAAAAAAAuB56LAA4RExcsoa+sEYr/jx+xeMW/x6rxb/HqnqlIL33bFv16VLDSRECAOA4SSkZemDCb5q95JCys/NfK+bPnaf0585TeuLNPzT+oWYadWdDWSwWJ0YKAAAAAABwZQwDAmB32/edUZu75l+1kHipo8dT1O+xZfrgm90OjAwAAMc7fuq8OgxdqG8WHbxiIfFSZ86l65HXNujRSRuUk1OwcwAAAAAAAJyBYqJJoqKiNGTIEFWuXFn+/v6KiIjQ66+/LsMwVK5cOVmtViUnJ5sdpttaHX9SvvNna8qh/Tb370w8J9/5szVh704nR+b5DscmqesDv+rvU+cLfa5hSKMnrtcX82y/byi+yU+3lrH9PpsjQEuH+Cpm6SAdXDRAQQEMXIdjkQcdizxonsTkDHUfuVjb9p4p0vnvf71bY9/bZOeocBF5EAAAAACAwqOYaILZs2erWbNmmjlzpipXrqw+ffrIz89PzzzzjEaNGqUzZ86ofv36Cg4ONjtUoFBycgz1fmSpTpxOvfrBVzB8/Fpt23vaTlHhUmPf26QDRxM19YXrFRrim2ff5DGtVbVikIaPX6uU1CyTIkRJQB6EJxsxYV2RC4kXTfp8u75fcshOEeFS5EEAAAAAAAqPYqKTrV+/XoMHD1ZwcLBWrlypTZs26bvvvtP27dv15JNPasqUKZKkFi1amBwpUHhL18dqx/6zxW4nK9vQe18z3akjpKZla/j4tapYLkDvPtsmd/st7avq3r4RmjZnj1b8UfDpaYHCIg/Ckx2OTdLsxfYpAr715Q67tIO8yIMAAAAAABQexUQnys7O1rBhw5SRkaE5c+aoc+fOufssFosmTJggb+8LUyq1bNlSkjRnzhz17dtX1apVU1BQkK655hpNnTpVOTk5ZvwIwBVN+S7Kbm19/Uu0ziam2609/GP1pjhNnR2lIb3q6dYO1VQq2Ecfv9ROR48na8zbf5odHjwYeRCe7uPv98qw03KHG7af0pbd8fZpDHmQBwEAAAAAKByKiU40e/Zs7dmzR7169VKXLl0u2x8YGKgqVapI+mdExltvvSU/Pz+98cYbWrBggfr27atHHnlEzzzzjFNjd1cp2VmKT0+/7L9zmRlmh+ZxjvydpAVrYuzWXlp6tmawdqLDPDN5ow7HJmnaS+300YvtVC08WCMmrFNSSqbZocGDkQedjzzoPOkZ2fr0x712bdOeD+kgL/IgAAAAAAAF5212ACXJDz/8IEm666678j0mNTVVXl5eatasmSRp/vz5Kl++fO7+Ll26KDk5WR988IH++9//ys/Pz7FBu7nno3bo+SimCXOGhWtilJNjp+EY/zNv5RE9PrixXdvEBSmpWRo+fp2WfdJdd3avo89/2qfFv8WaHRY8HHnQ+ciDzrNh+0mdOptm1zbnrTqqT+3aIi4iDwIAAAAAUHAUE51o06ZNkv6Zuu3fjh8/rpMnT6phw4YKCgqSpDwdqBc1a9ZMaWlpOnPmjCpVqmTXGG+44QYdPXrUbu15S9rR0Lx1r0bWrKte4VUu237kfIoe3L7J6fFENmigLKdf1TnOeLeTfDvatc31f25X3bqP2rVNV2HISwo0d2TVmcR0ZWXlyNvbS7+ss9+o0qJoENlAFjlm2srq1atrxYoVDmkbhVMS86Bkbi4kDzpPsjVC8rvdrm3GnzmvOnXrymLXVl0DeTAv8iAAAAAAwJVRTHSikydPSlJuB+m/ffXVV5Ly72S9aO3atSpbtqwqVKhg3wAlHT16VNHR0XZrz9tikUwsJkYEh+jG8hUv274z8ZwJ0UjRBw8qy16LKbmaitdKdv5IZmRZ7Pp5dC1WqYl5V/f2tmj6hA5KTMlU8vlMvf1Uay3+LVbJ582Z3u1g9EFJ2aZcG85TEvOgZG4uJA86UWh5qZqd27R46eDBI5LhiSVY8uClyIMAAAAAAFdGMdGJgoODlZqaqujoaIWHh+fZFxMTo1dffVXSP+tE2bJp0yZNnz5d48aNk9VqtXuM1atXt2t7fMDyqlO7tseOyDjrHaAzdm7T1ztb1erUsXOrrsGQlw6aeP3n72+qa+uX0+CxqxWfkKZFU7rp9SdaaeR/fzclntp1ajt0RAZcQ0nMgxK58FKenAdTrKUVZ+9GjSzVrl3DY0cmkgf/QR4EAAAAALgy+recqF27dpo7d64mTpyoH3/8Ub6+vpKkqKgo9evXTwkJCZLyH5ERFxen22+/Xdddd52eecYx00LZewokIytLWbflvzZWSRO1Z48s3p75a/fVggMaPHa1Xdvs2rmV5n/wvF3bdBWZmTnybTHdlGs3qVdGY4dfqwWrj+qrBQckSV/8vF8P3NFA3y46qDWb7d4dflV7ovbIx8fL6deFc5XEPCiRCy/lyXlwa1S8mg+cZ9c2a1Uto+hFB+zapqsgD+ZFHgQAAAAAuDK+sTrR2LFj5ePjo4ULF6p+/foaMGCAbrjhBjVp0kRt27ZVqVKlZLVa1bRp08vOPXfunLp3767AwED9/PPP8vHxcf4PAFxBny7VFRJk38/lPb3r2bU9SFarRdNf6ajzqVl64JXfcrc//voGnTyTqk/Ht5e/n/1HewESeRCerWmDcmpct4xd27ynd127tgfyIAAAAAAARUEx0YlatWqlpUuXql27doqLi9PixYuVk5OjWbNmaezYsUpMTFRkZKQCAwPznJeWlqbevXvr5MmT+vXXX1WuXDmTfgIgfyFBvhrSy36dnpXKB6pPlxp2aw8XPDPsGrVoGKan3v5Tf588n7v9bGKGHn51verVKK1XRpm3zik8G3kQnsxisWjkwEi7tedttej+2+vbrT1cQB4EAAAAAKDwKCY6WadOnbRu3Tqlpqbq3LlzWrVqlQYOHKhNmzZJunydqKysLA0YMEDbt2/XokWLVKMGxRW4rocG2K8TdcTt9Znuy84ia4fqpQeaaen6WH32477L9v+47LC+X3pIj93VSK0ah5kQIUoC8iA82d096yg40D7TuPa7saYqVwiyS1u4gDwIAAAAAEDReOaiNW5o48aNki5fJ+rhhx/W/Pnz9frrr+v8+fPasGFD7r6GDRuqVKlSTo3TXXQKq6CMXgPy3d+4VOkr7kfRNKpbRv+5tY6+/iW6WO1UqRCoh+w4ugMXRB1MkH/LGVc8pv+T9l8vDigI8qB9kQfNERLkq+fuu1bPv7+5WO0E+Fk1dvi1dooKF5EHAQAAAAAoGoqJLiK/ERmLFy+WJD399NOXnbNy5Up17tzZ4bEBhfHZy+0VE5estVtOFOn8kCAfLfywqyqWC7BzZABcGXkQnuK54ddq/9FEzZi3v0jne3lZ9O3rXdS0AdP5AnAsS6WKMswOwgEslSqaHQIKoU5Vz3wwzFN/LsCTkAfhKjwxZ3jizwTJYhiGJ9433UpOTo5Kly6t1NRUJSUlKSDAc4ooRlaWsm67y+wwXIb3j7Nk8fb8Gn5SSoYGPLVSv/52rFDnhYcFaOEHXdW8oedPLZaZmSPfFtPNDsNlZGy+l2ltSzBPzoMSufBSJSUPZmXl6JHX1mvq7D2FOi/Az6pvXu9SItYMJg/mRR4EAAAAALgyvrG6AC8vLyUlJSkrK8vjOlBRMoUE+ern927WpMdaqUbl4KseH+Bn1X39IrThq14lopAIIC/yIDyNt7eXPnz+es14paOa1Ctz1eO9vCzqd2MNrfuiZ4koJAIAAAAAAPfi+Y+GAzCFj4+Xnh52jZ68p7F+WXtMU2dHadWm40pNy5YkWa0WRdQorRG319c9feqpTCk/kyMGAMB+LBaL7ulTT0N619Xvf53UlO+iNH/1USWlZP5vv1S1YpCG9qmn+2+vr2rhV3/4BgAAAAAAwAwUEwE4lNXqpV6dq6tX5+qSpLT0LGVm5Sg40EcWi8Xk6AAAcCyLxaJ2zSqqXbMLa5dkZGYrNS1bIUE+8vIiDwIAAAAAANdHMRGAU/n7ecufQYgAgBLK18cqXx+r2WEAAAAAAAAUGGsmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALCJYiIAAAAAAAAAAAAAmygmAgAAAAAAAAAAALDJYhiGYXYQ8FyGYUjZ2WaH4TqsVlksFrOjgAswDENZWdx+L/L2tvC7AY9FLrwEeRD/Qx7MizwIAAAAAHBlFBMBAAAAAAAAAAAA2MQ0pwAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwCaKiQAAAAAAAAAAAABsopgIAAAAAAAAAAAAwKb/BzE5rB+bbDfuAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABxMAAAETCAYAAAD9HCj7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABT+klEQVR4nO3dd3gUVdvH8d/uppOEUBIglID0AFJFBKkiqKggKFghoiJilxcLKiD2Do88YBcUVBRBBQRRAVFRRIq00EIVkBJISCF15/0DyUPMAtlkd2fL93NdXBfZ3TPnZnOYe2bumXMshmEYAgAAAAAAAAAAAIB/sZodAAAAAAAAAAAAAADvRDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERZWKxWDRu3Liin6dOnSqLxaJdu3aZFtPZ/DtesyUlJclischisah58+Zn/eyp7/aPP/7wUHTwVv369Sv1uAEAAAAAAAAAwBUoJpooJSVFd955p8477zyFhYUpOjpanTp10sSJE3XixAmzw4ObVa1aVR999JFeeOGFYq/XrVu3zIXPbt26KSkpqUxtx40bp7p165apbWmU59+VlJSkbt26OdVm6dKl5SpwWywWTZ06tUxtXcFR/A8++KA++ugjNWnSxLS4AAAAAAAAAACBJcjsAALV/Pnzdd111yk0NFSDBw9W8+bNlZeXp59//lmjRo3Sxo0b9fbbb5sd5hmdOHFCQUG+M3y8Md4KFSro5ptvNjsM+JCuXbtKkt59910dOXLE5GgAAAAAAAAAAIHAu6orAWLnzp26/vrrlZCQoMWLF6tGjRpF7919993avn275s+fb2KE5xYWFmZ2CE7xtXgBAAAAAAAAAAC8AdOcmuCll15SZmam3nvvvWKFxFMaNGig+++/v+jngoICPf3006pfv75CQ0NVt25djR49Wrm5ucXa1a1bV1deeaWWLl2qdu3aKTw8XC1atNDSpUslSbNnz1aLFi0UFhamtm3bas2aNcXaJyUlKTIyUjt27FDv3r1VoUIFxcfHa/z48TIMo9hnS7sG4YIFC9S5c2dVqFBBUVFR6tOnjzZu3HjOduPGjZPFYinxuqO1Gf/44w/17t1bVatWVXh4uOrVq6ehQ4eeNd5T29++fbuSkpIUExOjihUr6tZbb1V2dnaxtidOnNB9992nqlWrKioqSldffbX27dvn8DvYvHmz9uzZc85/X1nk5ubqoYceUmxsrCpUqKBrrrlGhw8fdktfp5s+fbrat2+viIgIVapUSV26dNGiRYskSYsXL5bVatWYMWOKtfn4449lsVg0ZcoUt8TUrVu3orUD//3HnVOT7tu3T7fddpvi4+MVGhqqevXq6a677lJeXl7RZ9LS0vTggw+qbt26Cg0NVa1atTR48OCiJwmHDBmisLAwJScnF9t27969ValSJe3fv99t8QMAAAAAAAAA4CyeTDTB3Llzdd5556ljx46l+vztt9+uadOm6dprr9XIkSO1YsUKPf/880pOTtacOXOKfXb79u268cYbdeedd+rmm2/WK6+8oquuukpvvvmmRo8erREjRkiSnn/+eQ0cOFBbtmyR1fq/mnJhYaEuu+wydejQQS+99JIWLlyosWPHqqCgQOPHj3fq3/nRRx9pyJAh6t27t1588UVlZ2drypQpuvjii7VmzRqXrM936NAh9erVS7GxsXr00UcVExOjXbt2afbs2aVqP3DgQNWrV0/PP/+8Vq9erXfffVdxcXF68cUXiz6TlJSkzz77TLfccos6dOigH3/8UX369HG4vaZNm6pr165FBVxXuvfee1WpUiWNHTtWu3bt0oQJE3TPPfdo5syZLu/rlKeeekrjxo1Tx44dNX78eIWEhGjFihVavHixevXqpR49emjEiBF6/vnn1a9fP7Vp00YHDhzQvffeq549e2r48OFuievxxx/X7bffXuy16dOn69tvv1VcXJxb+ty/f7/at2+vtLQ0DRs2TE2aNNG+ffs0a9YsZWdnKyQkRJmZmercubOSk5M1dOhQtWnTRkeOHNHXX3+tv/76S1WrVtXEiRO1ePFiDRkyRL/++qtsNpveeustLVq0SB999JHi4+PdEj8AAAAAAAAAAGViwKPS09MNSUbfvn1L9fm1a9cakozbb7+92Ov/93//Z0gyFi9eXPRaQkKCIclYvnx50WvffvutIckIDw83du/eXfT6W2+9ZUgylixZUvTakCFDDEnGvffeW/Sa3W43+vTpY4SEhBiHDx8uel2SMXbs2KKfP/jgA0OSsXPnTsMwDCMjI8OIiYkx7rjjjmJx//3330bFihVLvP5vY8eONRwNz3/3M2fOHEOSsXLlyrNu79/xntr+0KFDi33ummuuMapUqVL086pVqwxJxgMPPFDsc0lJSSW2eaqfrl27njUWwzj5XSckJJzzc4bxv39zz549DbvdXvT6gw8+aNhsNiMtLa1U23HWtm3bDKvValxzzTVGYWFhsfdOjyMrK8to0KCB0axZMyMnJ8fo06ePER0dXWy8udsvv/xiBAcHl/h9utLgwYMNq9XqcKyd+j7GjBljSDJmz559xs8Yxv/+Xz7zzDPGjh07jMjISKNfv36ljqVr165Gs2bNyvCvAAAAAAAAAADAOUxz6mHHjx+XJEVFRZXq8998840k6aGHHir2+siRIyWpxNqKiYmJuuiii4p+vvDCCyVJPXr0UJ06dUq8vmPHjhJ93nPPPUV/t1gsuueee5SXl6fvv/++VDFL0nfffae0tDTdcMMNOnLkSNEfm82mCy+8UEuWLCn1ts4mJiZGkjRv3jzl5+c73f7fT8517txZqampRb+nhQsXSlLRE52n3HvvvQ63ZxiGW55KlKRhw4YVm/q1c+fOKiws1O7du93S35dffim73a4xY8YUe3pVUrE4IiIiNHXqVCUnJ6tLly6aP3++Xn/99WLjzZ3+/vtvXXvttWrVqpUmT57slj7sdru+/PJLXXXVVWrXrl2J9099H1988YVatmypa6655oyfkaRevXrpzjvv1Pjx49W/f3+FhYXprbfeckvsAAAAAAAAAACUB8VED4uOjpYkZWRklOrzu3fvltVqVYMGDYq9Xr16dcXExJQoJP27gFOxYkVJUu3atR2+fuzYsWKvW61WnXfeecVea9SokSQVW6fwXLZt2ybpZBEzNja22J9Fixbp0KFDpd7W2XTt2lUDBgzQU089papVq6pv37764IMPSqwneSb//r4qVaok6X/fy6nvv169esU+9+/fhyecK1ZXS0lJkdVqVWJi4jk/26lTJ9111136/fff1bt37xJrVrpLQUGBBg4cqMLCQs2ePVuhoaFu6efw4cM6fvy4mjdvftbPpaSknPMzp7zyyiuqXLmy1q5dq//85z9um54VAAAAAAAAAIDyYM1ED4uOjlZ8fLw2bNjgVLvTn2o6G5vN5tTrhmE4FUdp2e12SSfXTaxevXqJ94OCzj70zvTvLSwsLPG5WbNm6bffftPcuXP17bffaujQoXr11Vf122+/KTIy8qz9ePp7KQ9vjjU3N7foicyUlBRlZ2crIiLC7f2OGjVKv/76q77//nvVqlXL7f250po1a4qK6uvXr9cNN9xgckQAAAAAAAAAAJTEk4kmuPLKK5WSkqJff/31nJ9NSEiQ3W4vetLvlIMHDyotLU0JCQkujc1ut5eY+nTr1q2SpLp165Z6O/Xr15ckxcXFqWfPniX+dOvW7aztTz11l5aWVuz1M03p2aFDBz377LP6448/NGPGDG3cuFGffvppqeM9k1Pf/86dO4u9vn379nJv29vVr19fdrtdmzZtOudnx44dq+TkZL3yyivauXOnHn30UbfH9+mnn2rChAl65ZVX1LVrV7f2FRsbq+jo6HPeBFC/fv1S3SiQlZWlW2+9VYmJiRo2bJheeuklrVy50lXhAgAAAAAAAADgMhQTTfDwww+rQoUKuv3223Xw4MES76ekpGjixImSpCuuuEKSNGHChGKfee211yRJffr0cXl8kyZNKvq7YRiaNGmSgoODdckll5R6G71791Z0dLSee+45h2sZHj58+KztTxUjly1bVvRaVlaWpk2bVuxzx44dK/FkXqtWrSSp1FOdnk3v3r0lqcRafG+88YbDz2/evFl79uwpd7/eoF+/frJarRo/fnzRk6annP6dr1ixQq+88ooeeOABjRw5UqNGjdKkSZP0448/ui22DRs26Pbbb9fNN9+s+++/3239nGK1WtWvXz/NnTtXf/zxR4n3T30fAwYM0J9//qk5c+ac8TOS9Mgjj2jPnj2aNm2aXnvtNdWtW1dDhgxxyZgFAAAAAAAAAMCVmObUBPXr19fHH3+sQYMGqWnTpho8eLCaN2+uvLw8LV++XJ9//rmSkpIkSS1bttSQIUP09ttvKy0tTV27dtXvv/+uadOmqV+/furevbtLYwsLC9PChQs1ZMgQXXjhhVqwYIHmz5+v0aNHKzY2ttTbiY6O1pQpU3TLLbeoTZs2uv766xUbG6s9e/Zo/vz56tSpU7Gi5b/16tVLderU0W233aZRo0bJZrPp/fffL9rGKdOmTdPkyZN1zTXXqH79+srIyNA777yj6OjookJsebRt21YDBgzQhAkTlJqaqg4dOujHH38selrz39OxNm3aVF27di2a8tMbdOvWTT/++KPT06E2aNBAjz/+uJ5++ml17txZ/fv3V2hoqFauXKn4+Hg9//zzysnJ0ZAhQ9SwYUM9++yzkqSnnnpKc+fO1a233qr169erQoUKZ+zj1NOuzqzHKUm33nqrJKlLly6aPn16sfc6duxYYt3PU5YuXaru3btr7NixGjdunFN9Pvfcc1q0aJG6du2qYcOGqWnTpjpw4IA+//xz/fzzz4qJidGoUaM0a9YsXXfddRo6dKjatm2ro0eP6uuvv9abb76pli1bavHixZo8ebLGjh2rNm3aSJI++OADdevWTU8++aReeuklp+ICAAAAAAAAAMCdKCaa5Oqrr9a6dev08ssv66uvvtKUKVMUGhqq888/X6+++qruuOOOos++++67Ou+88zR16lTNmTNH1atX12OPPaaxY8e6PC6bzaaFCxfqrrvu0qhRoxQVFaWxY8dqzJgxTm/rxhtvVHx8vF544QW9/PLLys3NVc2aNdW5c+eiYtCZBAcHa86cORoxYoSefPJJVa9eXQ888IAqVapUrO2p4uqnn36qgwcPqmLFimrfvr1mzJihevXqOR2zIx9++KGqV6+uTz75RHPmzFHPnj01c+ZMNW7cWGFhYS7pw50yMzMdrltZGuPHj1e9evX0xhtv6PHHH1dERITOP/983XLLLZKk0aNHa/v27Vq+fHnRdxESEqJp06apQ4cOGjVqVImnOk+XlZWlBg0aOB3X4cOHlZWVpWHDhpV474MPPjhjMTEzM1OSVKNGDaf7rFmzplasWKEnn3xSM2bM0PHjx1WzZk1dfvnlRetDRkZG6qefftLYsWM1Z84cTZs2TXFxcbrkkktUq1YtZWRkaOjQoWrdurUef/zxom137txZ999/v1599VX1799fHTp0cDo+AAAAAAAAAADcwWI4+7gS/FZSUpJmzZpVVHDBma1du1atW7fW9OnTddNNNzndPikpSYsXL9bq1asVFBSkmJgY1wcpKSMjQ5UrV9aECRN09913u6WPstq0aZOaNWumefPmuWW6XkcefvhhffLJJ9q+fbtCQ0M90qcrZWRkKDc3V3379lV6enqp1mcEAAAAAAAAAKA8WDMROIcTJ06UeG3ChAmyWq3q0qVLmbe7d+9excbG6uKLLy5PeGe1bNky1axZs9iTrt5iyZIluuiiizxWSDzV55NPPumThURJuuWWWxQbG6vly5ebHQoAAAAAAAAAIEDwZCKK8GSiY0899ZRWrVql7t27KygoSAsWLNCCBQs0bNgwvfXWW2Xa5qZNm7R//35JJ6fGZFpLlMa6det06NAhSYwbAAAAAAAAAIBnsGYicA4dO3bUd999p6efflqZmZmqU6eOxo0bV2zNO2clJiYqMTHRhVEiEJx//vlmhwAAAAAAAAAACDA8mQgAAAAAAAAAAADAIdZMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADgWZHQDMd+Bwtr74fpcOpp5QkM2iBnWi1f+SugoPY3gAAPxfekaeZn23U3sOZMmQoVrVKujaS+upcsVQs0MDAMDtcvMK9dWS3dq8M115+YWKrRSm/j3rqnb1SLNDAwAAMJ3dbmjx7/u1Yt1hZecUqGJkiHp3qqmWjauYHRrgURbDMAyzg4A5Nmw7qvFvrdHs73fLbhiy2SySIRUUGoquEKw7BjTWE8NaKSaai6kAAP+z/1CWxr+1VtO+3qac3EIF2SySpMJCQ8HBVt14RX2Nu6u1EuKjTI4UAADXy8rO17Pv/Kk3P0/WseN5CrJZZLGcPB+UpKu61tHY4a3VJrGqyZECAAB4nt1uaMpnyXrtww3a8VeGbFaLrFap0G7IbpcubBGr0be31NXdE8wOFfAIpjn9R1ZWlu6//37FxcUpKipKSUlJmjp1qoKDg5WTk2N2eC635Pf9uvCmuZr9w24V2g0ZhlRQYBSdOB7PyteE6RvV4ea5OnA42+RoAQCeEEi5cMvONLW74Su9O3uLcnILJZ28eFpQaMiQlJdv1/R529X2+q/055ZUc4MFAHhEIOXB1LQcXTxknl78YJ2OHc+TdDIP5hecPDc0DGn+T3t10S1zNe/HPSZHCwDwhEDKg8C5FBTYddOjS3XPc79q574MSSeLiPkFJwuJkrRy4xH1vf97vfDenyZGCngOTyZKKigo0CWXXKL9+/dr3Lhxqlq1qp577jlt27ZNlStX1oYNG8wO0aU2bj+m9jd+rZy8gqKd35nYbBY1Oy9Gv824mmlPAcCPBVIuPHIsR60HfqkDR7JVWHj2wyCb1aLKFUO19vN+io+r4KEIAQCeFkh5sKDArs5J87RywxEV2s+eBy0WKTjIqp+nXakLmsd6KEIAgKcFUh4ESuOe55Zr8qfJKm3h5P3xnXVrv0ZujQkwG9UhSRMnTtTatWu1ZcsWVa9eXZLUpEkT1a1bVz169DA5Otd7+u01ys0vPGchUTo51du6bcc089sdSurLDhEA/FUg5cIpnyVr/+Gs0uVBu6Gj6bmaMH2jXnqovfuDAwCYIpDy4NdL9+i3dYdL9VnDOHlOOOa/q7VgSm83RwYAMEsg5UHgXHbty9DkmaUvJErSI6+v1M19Gig4mIkg4b8CfnQbhqHXXntNd9xxR1GylKSEhAQFBQWpZcuWkqTk5GRdcMEFatSokXr06KEDBw6YFXK5/H0kW198t+ucT2KczmqV3vh4kxujAgCYKZByYUGBXf/9NLlUhcRTCu2G3p61WSdyCtwXGADANIGUByXpjU82yWa1lPrzhXZD3y7/Szv+Ou7GqAAAZgm0PAicy1uzNstiKf2xkiQdPpajr5bsdlNEgHcI+GJicnKy9u/fr379+hV7/cCBAyooKFCrVq0kScOHD9cTTzyhrVu3qm/fvnr00Uc9H6wLzPlnjURn2O3S6uRUpezl5BEA/FEg5cJf1h7UwdQTTrdLz8zX97/td0NEAACzBVIePHz0hJauPOD0OaHFYtFn3+50U1QAADMFUh4ESmP6vO2yO3msZLNa9MmCFDdFBHiHgF8z8bvvvlOvXr20ZcsWNWr0v2k8P/zwQw0ZMkSHDh2S3W5X69attX//yYuImZmZio+P1/HjZS+uJSQkKD09vdzxOysnupdyK/aULDan21b4+z8KyuMOCwC+qWLFitq9m32YI2bkQrPyYH74+cqOHVKmtuGpnykka4WLIwIAzyAPnlkg5cHCoGrKjH/Y+YZGgUIyflF42teuDwoAPIRc6Fgg5UGgNNJrvyBZgp1uZ8vdpciDb7ghIsA1ypsHA/7JxCpVqkiSUlL+d+dAVlaWnnnmGdWoUUOxsbH666+/VLt27aL3IyMjFRYWptTUVI/HW36FJrUFAHirwMqFTsxvWgJ5EAD8UWDlQc4HAQDFBVYeBErBKON1A4OlUeDfgswOwGzNmzdXQkKCRo4cqYKCAhUUFOjFF19URkaGWrdu7bZ+zboT6rNvd2jQqCVOt7NZLdq15Q9VrRTmhqgAAGYyIxealQc3bDuqFgPmlKnt9/NnqGOrai6OCABgtkDKg5nZ+araZbpy85y7SGaxBOm/r47R7QNmuCkyAIBZAikPAqXR6ro5Wr/1qJyZ6TTIZtHQm/vorTHPuy8wwGQB/2RiSEiIZs2apfDwcA0aNEjjx4/XE088oZiYmKI5wWvVqqW9e/cWtcnMzFROTk7RnTu+pG/3BFWKDnGqTZDNov49EygkAoCfCqRc2LxhZbVvHiurtfSLqVssUuO6FXVRyzg3RgYAMEsg5cHIiGDdcmUDBdlKnwclKTwsSIMuq+emqAAAZgqkPAiUxohBTZ0qJEpSQaGhYdc2cU9AgJcI+GKiJLVr106rVq1Sdna21qxZox49emjr1q1q2bKlJKlatWpq0KCBvvrqK0nSe++9V2JRYl8RGmLT8OuaOnURtaDQ0IhBiW6MCgBgtkDKhffemOjcYuqGdN+NibJYnLvwCgDwHYGUB+8a2FQFhaXPgzarRUl9GyqqgnM3pQIAfEcg5UHgXG68or4qhAeptJcAbFaL2jStoraJVd0bGGAyi2EYTtbZ/d9vv/2miy66SJs3b1bjxo0lSRs3btSQIUOUnp6uWrVqacaMGYqPjzc50rLJyMpTp8HztGlHmgrPcRJpkTR8UBP9d3RHLqICQADx51xYWGhX/wd/0Lwf95zzbkOr1aLuF1TXgsmXKTiYe7AAIFD4cx6UpMcmrtQL76075+eCbBYlxEfq94/7qnLFUA9EBgDwBv6eB4FzmbVopwaOWixJOlv1xGa1KDTEpl+nX6XzG1X2UHSAOSgmOvDmm29q5MiRysjIkNXqnxcOD6We0BV3f6tVm1JltVpKPKERZLP880RiU/3n0Q6y2fzzewAAOObvufBEToFuGf2jvvh+l2w2S4mba0691rtjTc167RJFRgSbFCkAwAz+ngftdkOj//OHXnx/XdG53+msVslul5rVj9HCKZepVvUKJkUKADCDv+dBoDRmzN+upCeWyTCkQgd3IlssUkxUiL75b291YFkUBACKiQEsN69QX3y3S298slG/rTtc9HqQzaIBPetqxKCm6ty2Ok8kAgD8kt1u6Ntf/tKkTzdpwc9/FbvbsGeHeN1zQ6Ku7FKbG2oAAH5rxbpDmjwzWZ8s2KH8AnvR622aVtG9NyZqUO/zFB4WZGKEAAAA5knZe1xvfrZZ73yxWemZ+UWv16pWQffekKih1zRS1UphJkYIeA7FREiS9hzIVOI1X8giafe31zOFDQAgoBxMPaEGfT6TJG2de51qxEaYHBEAAJ6TdjxXtXt9KkPS+ln9Va9WlNkhAQAAeI0TOQWK6zZDhqQ/PumrhnWiufEYAYdbDCFJqlMjUkH/7AApJAIAAk21KuFFJwIUEgEAgSYmOrQoD1JIBAAAKC48LKjoWKlJvRhzgwFMQvkcAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4FGR2AADcY3+2lJZndhSlFxMixUeYHQUA+Cbj4CEZxzPMDsMplugoWarFmR1GwGCMlMSxEgB/5mv7OIn9HAAA7uRrxwYcF3gfiomAH9qfLV27WMqzmx1J6YVYpVk9SBIA4Czj4CEVjHhIys83OxTnBAcraPJrFBQ9gDFSEsdKAPyZL+7jJPZzAAC4iy8eG3Bc4H2Y5hTwQ2l5vpUcpJPx+tLdMQDgLYzjGb5XJJKk/Hyfe1LOVzFGSuJYCYA/88V9nMR+DgAAd/HFYwOOC7wPxUQAAAAAAAAAAAAADlFMBAAAAAAAAAAAAOAQxUQAAAAAAAAAAAAADgWZHQDMV1ho14HDJ1RoNyRJh1JPKLZymCwWi8mRmSc9I09H03OVX2BXaIhV1aqEKyyU/y4A4I8Mw9DfR/6XB/cfylKN2IiAzoMAgMBy5FhOUR7c+3emalSNUFBQ4N57nJdfqL+PnFBObqGCgiyqFB2qStGhZocFAIBHnLpWfCK3QBaLRdEVggP+WvHxzDzZ7YYMSbv3ZyiucrjCw7hWjMDCiA9AhmFo2aq/NeeH3Vq16YjWbE5V1omCoverdf9Y1aqEq21iFbVvHqubr2yg+rWjTYzY/Y4dz9WM+Sn6afXfWrXpiFL2ZhR7P8hmUbMGldQ2saou7RCv/j3rKiTYZlK0AIDyWr/1qD5ZsEMrNx7W6uRUHU3PLXqvZs9PVTEyRG2aVlG7ZlU16LLz1DaxqonRAgDgWnsOZOqjudu1Yv1hrUo+ov2Hsoveq9NrpsJDbWrVpIraJlbV1d3q6JIL42W1+u8FxMJCu+Yv26tvfv5LqzYd0bqtR5WXby/2mTo1KqhtYlV1bFlNN19ZX9WrRpgULQAArmUYhn5efVCzf9hVdK04M7ug2GfiKoepbWJVtW8eq5v61FfDhIomResZacdzNeObU9eKU7V9z/Gi9+pe9plsNoua1a+ktolVdGmHmurfs65CQ7hWDP9mMQzDMDsIeEZuXqHenb1Fk2cma1NKmlNtL+tUS/fdmKjLLq7lV3ehrN96VBNmbNTH81OUk1dY6nZxlcN0x4DGuvfGZqpWJdyNEZbNpjRp8DKzo3Deh12kxBizowDgr+x2Q58v2qlJn2zSz2sOOtW2ffNY3X19U93Up75sNu96UsO+LUWFI0ebHUaZ2F59TtaG9c0Ow+8xRkriWAmBavGK/Zo4Y6PmLdsru730lwIaJVTUXQObaNi1TRQR7j/3JKcdz9Xkmcl6a9Zm7TmQVep2QTaLBlxaVw/c1FwdWsa5McKy8dV9nMR+DgA8KS+/UO/P2arJM5O1ftsxp9r26lhT996QqD5davvVteKN249pwvQNmjE/RSdyS3+tOLZSmG7v31j33pioGrHed8ORrx4bcFzgXbzrahjcZuWGw2oz6Evd89yvThcSJWnhL3/pirsX6dqHFutg6gnXB+hhefmFenLSKrUe+KXen7PVqUKiJB06mqNn3/lTTfvO0oz520VNHgC8286/MtTzjgW6/uElThcSJen3DYc15IllunjIPG3emeb6AAEAcKOj6bm6ZfRSXXLHAn29dI9ThURJ2ro7XQ++vELnXztbP636201RetY3P+1Vs2tm6/E3VjlVSJSkgkJDMxfu1EW3zNW9z/+qzOx8N0UJAIB7rN50RO2u/0p3PbPc6UKiJC1avk9X3fudrnnge/19JPvcDbxcXn6hnpqyWq2um6N3Z291qpAoSYeP5ej59/5UYr8v9OHX27hWDL9EMdHPGYahsf9drQ43zy1TEfHfZv+wS836faF5P+4pf3Am2bIzTe2u/0rPvL22aF2Qsjp2PE83P/aj+j/4g45n5rkoQgCAK039aqtaDJitJSsPlHtbv607rFbXzdF/P93kgsgAAHC/xSv2q1m/LzR9Xkq5t5WyN0Ndh87X/72yQoWF9nM38EK5eYW6bexP6nP3Iu0/XP6Ln5M+2aTzB8zWyg2HXRAdAADuZRiGnn5rjdrf+HWZioj/9tWSPUrs94W+XLyr/MGZZNvudLW/8WuNm7JGBYXlu1aclpGnIU8sU9/7vld6BteK4V8oJvoxu93Q7eN+1vi31jh95+nZpKbnqt/932v6vO0u26anrN2cqouHzHNJsjzdl4t365I7FhRbcwsAYL6X3l+nW5/8qdjawOWVm2fXPc/9qicnreJuQwCAV5vzwy5dPuJb/e3C2WUMQ3r1ww268dGlys/3rYJi9okCXXnPIr0/Z6tLt7tzX6Z63PaNlrrgxiUAANzFbjd01zPLNea/q8v9gMXpjh3P04CHftDUr1ybXz1h/daj6pw0T39uOerS7c79cY+63TZfR47luHS7gJkoJvopwzB097PLXX6SdEqh3dCQJ5bpi+92umX77rBlZ5ouHbZAR9LcU/D7Y+MRXX7Xt8rI4q4TAPAGkz7ZpEcmrHTb9p95e62ef/dPt20fAIDy+PaXvzRo1BLluang99m3O3XrmGUuvXHVnfLyCzXgoR/0/W/73bL9zH8KlSvWHXLL9gEAKA/DMPTAS7/prc83u2X7drs0dMxPmrlwh1u27w7bdqer57AFOpjqnoLf2s1H1Xv4Qmazg9+gmOinps/brjedTA67Fg7SroWDSv15+z8FxZ1/ZTgbnsfl5Rfquv9b7FQh0dnvQzq5ptbIV353NjwAgIv9vv6w7n/xN6falGW///gbq7R4hXsuSgIAUFYHDmfrhkeWKL+g9IXEsuTBGfNTNOWzZGfDM8Uzb6/Vwl/+cqqNs99J1okCXfd/i5nWDADgdWYu3KE3PnZuuQ5n86BhSLc+uUzbdqc7G57H5efbNWjUEh06WvpCYlmOlVYnp+q+F5y7NgF4K4qJfujA4ewy7aQqRgarYmSwU22yThRo6NifvP5u1GfeXuv01KZl+T4k6Z0vtmjRcudOUgEArpOTW1CmJyXKut+/bexPyszOd7odAADuYBiG7hz/i44dd66gVdY8+PBrv2vHX8edbudJqzcd0XPvOD+bQFm+k71/Z+n/Xl3hdF8AALjLwdQTuue5X51uV5Y8eCK3UEPHeP+14hfe/1NrNqc61aasx0rTvt6m+cv2ON0O8DYUE/3Qvc//qjQP3gm5dOUBvTd7i8f6c9bG7cfKdOJYHreP+1kncly3PhcAoPSef3edNqWkeay/Xfsz9cQbqzzWH8xjGAbrZALwep8v2qm5P3ruglV2TqHuHP+Lx/pzlt1uaOjYn1y6NtS5vDt7q5b8zswFAADv8MCLvyk13T3LPjny85qDetOLZy7YvDNNT7+11qN9DnvqF2VxEzJ8HMVEP5Oy97i++H6Xx/t9ZdoGr724NmH6Bo+eOEon70b9fJHvrCcJAP7iRE6B3vhko8f7feeLzUo77rmTM3jOodQTev7dP1XvspkKbv2Bglq9r+rdP9ZjE1dq1z7vn+odQOB5eep6j/f5/W/7tdbJu/s95fvf9unPLUc93u+rH27weJ8AAPzbrn0Zmvmt59cxfPXDDV77dOJ/Zmx0aip4V9h/OFufLPCd9SQBRygm+pk3P3PPIrrnsnV3uhavOGBK32eTdjxXM+anmNL35JneewcOAPirmd/ucHpaN1fIzinUh3O3e7xfuNd/ZmxUrUs/0eNv/KFd+zNVaDdkN05OE/TSB+t13hWfadSrv3vtSTKAwLNyw2H9sfGIKX1769qJZp2XffPTXu38i5tOAADmenvWFpnx/MeOvzK0aPk+z3d8Dscz8/TRPHPO3SfPTPbah3GA0qCYeJqsrCzdf//9iouLU1RUlJKSkjR16lQFBwcrJ6f0i7GapbDQrve/3Gpa/+/MNqeQeTYff5OiE7mFpvS9Yv1hrd/q+TtgXS116QytGRRZ4s+qflbteuM2s8MD4EK+ngelk+vWBmLfrtRz+RK9vav4ydX2rAyFzP3MpIjM8dL763T/i78pv8BwePJtt598/ZVp6zX86V8C6qSQMVIcx0r+gzxYPtPnpSj7hHct9fD3kWzN/XGvKX0bhkw9P3cl9nNAYPCHPIji7HZD783hHPl0MxfuUGa2Occrazanak2yd87k4AyOCwJXkNkBeIuCggJdccUV2r9/v15//XVVrVpVzz33nBYtWqTGjRsrLCzM7BDPafPOdB314PzX//bLmkOm9X0mv6w1N6Zf1h5Ui0aVTY2hvKp0u0lVut1U7LW0FV9r5+s3q1rfkSZFBcDV/CEP5uYVauUGc57GkKQN248p7XiuYqJDTYsBrvHbn4f0yISVpf78O19sUbd2NXRjn/pujAreimMl/+APeVA6ef5hluycAv25NVUXtaxmWgz/9tu6Q6Y+Pf7LGvN+H67Efg7wf/6SB1Hc9j3HdeioeYXgX9YelGEYslgspsXwb95wrbhNYlVTYygvjgsCF8XEf0ycOFFr167Vli1bVL16dUlSkyZNVLduXfXo0cPk6Epn1SbzLqBK0l8Hs3Qw9YSqVQk3NY7TmTXFj7f07w45+7dp18TBSrj7HYXXSTQ7HAAu4g95cP22ox5f9+DfVienqseF8abGgPJ745NNstksKiws3QVoq9WiCdM3UEyEJI6VfJU/5MHM7Hwl70gzNYZVm7yrmGj2+djq5FSvu4jqCuznAP/jD3kQJZl9rfhg6gntO5itWtUrmBrH6f7YeNjk/rlWDN/FNKeSDMPQa6+9pjvuuKMoYUpSQkKCgoKC1LJlS0nSnXfeqZo1a3rticAaL1jwfrXJSep0mdn52ro73dQYVvvBo+unK8zJUsoL/VXlkqGq3HmQ2eEAcBG/yYNesM/1t/1+IDp89IQ++3ZHqQuJ0snpg1ZuPOJVx0EwB8dKvslf8uC6rUdNWRPpdKuTvWs/aPY5cnpmnnb42bqJ7OcA/+MveRAlmZ0HJe86NjiRU6DknVwrdiWOCwKLxQikBV7OYNOmTWrWrJl++uknXXzxxUWvHzhwQPHx8Vq0aJEuvfRSLVu2TI0bN1b16tXLvS5OQkKC0tNdu/PKrnKD8iu0c/jeroWDVDEy+KztK0aFSJLSM/LO+rn0zHzVvWymw/fCj8xQSPbqUkTrfnZbjDJqPunwPU99H5aCo4re/2wponWt0HqtVPuppS7f7o5Xb1R+6j41evoHWWyuf7B579huyt251uXbBSSpYsWK2r17t9lheCV/yYM50T2UG9PH4Xue2u+Hpn+vsPQFpYjWdVpGRuv7Nhe5bHs9ly/RymNHFWL93z1ndhnKKChQ3lUDXdaPJPVc/av+zDzu0m2WV0FoA2VVu6tMbcNTP1NI1goXR1R+jJGSAvFYiTx4Zv6SB/PDE5Ud63idGk/lwaDsDapw5INSROsZmdXuU2FogsP3PPWdVPh7goLyPLtuo7v2cZJ37+eAcyEXOuYveRAlZVceqPzICx2+57FrxamfKiSr9EtIuJPdGqWMWuMcvueR78MwZCk8ruj940sVrysF4vkPSipvHmSaU0n79u2TJMXFxRV7/bvvvpMktWrVSpLUpUsXj8blPJvZAUgWL4ihiMkP3hqGvOJ34iIH505U5vqlavr6arckBwDm8Z88aP6EC4bFP/aPLzdrqWF1GxT9vD0rQ4mLPVskNYthDSljQ3vZ2/qgQB4jZ8Kxku/ynzzoBeceXnU+KBkW848N5CfHBhL7OcBf+U8eRAlekZe9IYZ/eMP34Q0xuAjHBYGH37KkKlWqSJJSUlLUqFEjSVJWVpaeeeYZ1ahRQ7GxsS7v0x13QiU9sUzTvt7m8L0z3R1yumM/3yxJqnTx9DLH8N47b+qGK7xjvaB9B7NU69JPHb7nke/DYlHdhFrasSmtbO3LYVOaNHiZ67aXueln7f9otBo+tUjBlaqfu0EZLVmyVIkxbts8gDPwlzz40vvr9MgEx3c8eioPjhp5v56596Myty8L+7YUFY4c7dE+XWXJkiWyNvSO44ZTflr1t7rcOt/5hharpkx6VUl9G7k+qHJijJTEsRJO5y95cP6yPbrynu8cvuepPHjVlZdp9uuvlLm9q3W8Za5+/fOQw/c89Z0sXbxI7Zq5fgydjav3cRL7OcCf+UseREl3jPtJ787e6vA9T+XBN6e8ocFXNyxze1c6lHpC1bp/7PA9T10rrhlfTXs3ppWtfTlw/gNXoJgoqXnz5kpISNDIkSNVUFCggoICvfjii8rIyFDr1q3NDq/UanvBYrbeEMMpsZXDFBJsVV6+3bQYvOn7KKv8oweU8tJ1qjn4BUU27WR2OADcgDzoyhgizQ4B5XRB86qKiQpR2jmmrvk3q9WiXhfVdFNU8GYcK/k+/8mD5uegWtXMz8Wnq129gn790+wYzP+9lBf7OcC/+UseREnekIO84Tz9lMoVQxUeatOJ3ELTYvCm76OsOC4IXF4w54f5QkJCNGvWLIWHh2vQoEEaP368nnjiCcXExBQ9yu8L2jStYmr/FovUqom5MZwuJNim8xtVNjWGtone832U1eFF76jg2N/a99FjWjMostifbU9dbnZ4AFzAX/Jg28SqZofgF/v9QBcWGqRh1zaWzWopdRubzaJ+3esoPs73TwzhPI6VfJ+/5MHE82IUFmru1Fltm5qfi09ndjw14yJUrUq4qTG4Avs5wL/5Sx5ESWZfKz4Zg/ccGwQFWU2/du0N1y3Ki+OCwMWTif9o166dVq1aVfRzdna2tm7dqpYtW5oYlXPM3hk1qRejyIizL1TraW0Tq+qPjUdM7d/XxV8/RvHXjzE7DABu5g95sEGdaEVVCFZGVr4p/QcHWdWiobk3sbjC9x27l3itQYUo5V010IRozDFiUFNNnpms7JxC2e3GuRsY0sO3nu/+wLwEY6Q4jpX8gz/kwaAgq1o2qqwV6w+bFoO33VRj9vmY2f27Cvs5wP/5Qx5ESWbnoQZ1olUxyrvWlW+bWPWMU6B7pn/vOlYqC44LAhdPJp7BunXrZLfbi92Bk5SUpFq1akmSatWqpVtuucWk6ByrXb2CGtetaFr/l3aIN63vMzFzujGb1aLuF9QwrX8AKA9fzINWq0WXdjBvv9+lbXWFhvjPYuqBLCE+Sl//51KFBFnP+oSixXLyz9RnuujC8+M8GCEAd/PFPChJvTqalwdrxkWo6XkxpvXvyIXnxyrKxBteL2X6awA+ylfzIIqrERuh5g0qmdY/14qLs1ot6tHe+74ToLR4MvEM1q5dq4iICDVs+L8FYqdOnWpeQKVgsVh018AmeuClFab0f9egpqb0ezZXda2j+NgI7T+c7fG++/VIYLozAD7LF/OgJN01qIlm/7DLlL5HeGEeRNl1bx+vZVP7aPjTv2h1cqqCbBYVFJ58SvHU38+rFaX/PHqRruhc2+RoAbiar+bBOwY01rPv/Fm6p6pd7M7rmshm8677lSMjgjX46gb676fJHu+7QniQbrmygcf7BQBX8NU8iJLuGthEdz/3q0l9e9858hWda6lOjQracyDL43336VxbdWqYv44lUFbedaTvRYYPH66srCxZrb71FQ25uqHCTVgno0f7GmpSL8bj/Z5LcLBVw65tbErfXFQG4Mt8NQ/2aB+vRgmef0o/PjZCV3er4/F+4V4XNI/Vqpn9tPKTq3XHgMYKsllks1l085UN9MM7l2vbvOsoJAJ+ylfzYO3qkabkoyCbRbf3N+e861zMupB585UNvG5qNwAoLV/Ngyjp5isbKDLC888TdW5TTS0aed8yIDabVXde28SUvrlWDF9HRvAzMdGhevCW5h7vd8zw1h7vs7Tuvj5RVWNCPdpnl7bV1b09U5wCgKdZrRaNNSEnPXlnKwUFcVjlr9o1i9XkJzqpQkSwIiOC9cHTXdTjwnhZLGeeAhUAzPLEsFZnnaLZHYYPbKoasREe7bO0mjWopOt61fNon+GhNo1KauHRPgEAcCQ6MkQjB3s+J40d3sbjfZbW8IFNVa1KmEf7vKhlnKnT0QOuwFUvPzRmeGs1qx/jdLv0zHylZ+Y73e6eGxLVtZ33Fs6qVgrT5Cc6Od2urN9HRJhN74/vzAVGADDJDVecp77dnX8qo6z7/R7ta2iYSXc2AgDwb20Tq+qRoec73a6sebBezUg9f387p9t50qTHLirTDaZl/U5eeOAC1a8d7XQ7AADcYfQdLdWysfNPCZY1D955XRNd4oXrJZ5SuWKo3nzSc9eKw0Js+mB8Z1k9fLMX4GoUE/1QaIhNU5/poiCbczuoupfNVN3LZjrVpn7tKL3g5SeOknRdr3oa2Nu5u1HL8n1InDgCgNksFovefLKTqlR07qJhWfb7kRFBeu8pTgoAAN5lzPDWatGwklNtypIHLRbp/fFdFBkR7FQ7T4urEq4pZbjBtCzfSZe21XXPDYlO9wUAgLuEBNs09ekuCnZyNp2y5MGEGpF6+aELnGpjhn496uqmPvWdalPWa8XP3tdWjb1weTDAWRQT/VS7ZrH68NmucufDcbGVwvTNf3urgpefOJ7y3lOd1eH8WLf2MWJQU04cAcALVK8aobmTLlVEmPvWEQ4JturLCZeqbs0ot/UBAEBZhIbYNPeNS1WrmnunHn3zyU7qdoH3zlJzumt71dP4u9075VqTehX1xWuXcJMRAMDrtGpSRTNe6CZ3LoNZpWKoFkzpragKvrFm8NtjLtbFrau5tY87BjQ2ZUkywB0oJvqxG66orw+f7eqW9TJqVA3X4ncvV6O6FV2+bXeJjAjWgsm91clNSWLEoKZ647GLmN4UALzERS2raeGUyxTlhptewkNt+vo/l3r11C0AgMCWEB+lJe/1UUKNSJdv22KRJj/e0eem+X5iWCs9NcI9BcXmDSpp8btXqGolz67BBABAaV3Xq56mP9fN6dnsSiOucph+ePdyNT0vxuXbdpeI8CDN/28vdWlb3S3bH3ZtY015oiPXiuE3KCb6uZuvbKBFb12mOtUruGybPdrX0K/Tr1Lzhs7PtW22mOhQLXrzMt19fVOXbTMizKY3HrtIk0ZfxB2oAOBlOretrl+nX6U2Tau4bJvN6sfop2lXqnenWi7bJgAA7tCgTrR+nX6VrujsupxVo2q45r5xqe4a5LpzKk+xWCwaM7y1ZjzfTZWiXffUxI1X1NeyqX1UI9a9T4ICAFBeN1xRX9+/c7nqxrvuZqOu7arrt+lXq2Vj1513e0p0ZIgWTumt+2503Uxz4aE2TXj4Qr35ZCfZbJRf4D8YzQGgx4Xx2jCnv+68rnx3jVYID9Lkxzvqu7cvV0K8707pFhEepEmjO2rxu5erXs3yJc6u7apr3Rf9dc8NidxlAgBeqlmDSvpt+tV6+p42Tq8RcTqb1aLRt7fUqpn91DaxqgsjBADAfWrERmjepF6a+nQXVYws39P6g69qoI1fDlCfLnVcFJ05buxTXxvnDNDV3cr376hWJUxzJlyiGS90U6Vo59ZqBgDALF3b1dD62f3L/bDFqQcsFr97herV8t1rxeFhQZr46EX68f0rVL92+f4dF7eupj9nXaP7b27OtWL4nSCzA4BnRFUI0ZtPdtL9NzXTm58la+pX23Q8K79UbevXjtJdA5sqqW9DVYnxnylburePV/JX12r297s0eWayfl5zsFTtgmwWXXNJXY0Y1FRd21UnMQCADwgOtuqJYa11a99Genf2Fr09a4v2H84uVdu4ymG6vX9jDbu2sU/fTAMACFwWi0VD+jZUvx4J+nDuNk2emazNO9NL1TYyIki3XNlAdw1sqhaNfG92mjOpERuhLyf21MoNRzR5ZrI+XZii3Dx7qdq2blJFIwY11Q2Xn6cKbphOHQAAd4uMCNak0R11343N9Obnyfrgy21Ky8grVdt6NSN118CmurVfI7+a3rtLuxra9OUAzflhtybPTNayVX+Xqp3NalG/HgkaMaipurevwbVi+C2LYRiG2UHA87Ky87Vk5QGt2nREqzalauvudJ3ILZDNalVUhWC1aFhJbROrqn3zWF3UMi4gpu9M3pGmX9Yc1KpNR7Q6OVV/bDwsQyenBqoVV0FtE6uqTdMq6nZBDa+fvmZTmjR4mdlROO/DLlJijNlRAAgE+fl2LVv198k8mHxEG7cfU9aJAhmSIsKClHhejNomVlXbxCrq2q6GQkNsZod8RvZtKSocOdrsMMrE9upzsjasb3YYpRbT6SNJUtovt5gciXMYIyVxrIRAZxiGfl9/WCvWH9aqTUf059ajOp6Zr0K7XWEhQWpQJ+qfPFhV3S+ooehI100J6q1S03K0dOUBrdqUqlXJR7RzX4a27zkui6RWTaqodZMqaptYVR3Oj1WrJlW8+kKhr+7jJPZzAGCW7BMFWrJy/8k8uOmItu5O15ZdJ288at6gklo0rKy2iVV0QbNYdWpdLSCuFW/eWfxa8ZG0HOXl2xUWalPNuApq27SK2jStqm4XVFd8nOuWGHMHXz024LjAu/BkYoCqEBGsK7vW0ZVdfXt6Gldqel6Mmp4Xo9sHNJb0vwuGW+deZ2ZYAAA3CA626pIO8bqkQ7zZoQAA4HEWi0UXnh+nC8+PMzsUr1ElJkwDLq2nAZfWK3rt1Dnhqpn9TIoKAADPiAgPUp8udYpNZX4qD677or9ZYZmqSb0YNakXo9v6NzY7FMArsGYiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgJ+KCZECvGx/90h1pNxAwCcY4mOkoKDzQ7DecHBJ2OH2zFGSuJYCYA/88V9nMR+DgAAd/HFYwOOC7xPkNkBAHC9+AhpVg8pLc/sSEovJuRk3AAA51iqxSlo8msyjmeYHYpTLNFRslSLMzuMgMAYKYljJQD+zBf3cRL7OQAA3MUXjw04LvA+FBMBPxUfwQ4XAAKFpVochTmcFWOkJI6VAPgz9nEAAOB0HBugvHzs4VYAAAAAAAAAAAAAnkIxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDFBMBAAAAAAAAAAAAOEQxEQAAAAAAAAAAAIBDQWYHgLIxDh6ScTzD7DBKzRIdJUu1OLPDCCiMkeL2Z0tpeW7bvFvEhEjxEe7bPmMEAAIHebAk8iB8GeMX5+JrY0Ry7zghD5bEGIEvY/ziXBgjJflaLiQPlmT2foRiog8yDh5SwYiHpPx8s0MpveBgBU1+jaTpIYyR4vZnS9culvLsLt+0W4VYpVk93JM4GSMAEDjIgyWRB+HLGL84F58cI5Lbxgl5sCTGCHwZ4xfnwhgpyRdzIXnQAZP3I0xz6oOM4xm+N9Dz832u0u/LGCPFpeX5VrI8Jc/uvjuGGCMAEDjIgyWRB+HLGL84F58cI5Lbxgl5sCTGCHwZ4xfnwhgpyRdzIXnQAZP3IxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADgUZHYAAAAAgDfKzSvU+m1HtWpTqjZsP6bsEwWSpBHP/KIm9WLUNrGqWjWurAoRwSZHCgAAAAAA4D4UEwEAAIB/GIahX/88pMkzkzXru53KdbBK/ZTPNhf93Wq1qE/n2hoxqKl6dawpq9XiyXABAAAAAADcjmIiAAAAIGnFukMa8exyrU5OLXUbu93Q3B/3aO6Pe1S/dpQmPtJBfbrUcWOUAAAAAAAAnsWaiQAAAAhoObkFeuT139Vx8DynCon/lrI3Q1fe852SnlimY8dzXRghAAAAAACAeSgmAgAAIGAdOZajLrfO10sfrJfdbrhkm9O+3qa2g75Uyt7jLtkeAAAAAACAmSgmAgAAICAdTc9V99u+0coNR1y+7Z37MtV5yDzt+IuCIgAAAAAA8G0UEwEAABBwCgrsuureRdqw/Zjb+jhw5IQuHbZQ6Rl5busDAAAAAADA3SgmAgAAIOC89ME6LV97yKk2uxYO0q6Fg5xqs+OvDI18ZYVTbQAAAAAAALwJxUQAAAAElA3bjmrclDVOt6sYGayKkcFOt3tvzlYt/Pkvp9sBAAAAAAB4A4qJwL8YhqGfV/+t3LxC5eQWatInm7Rtd7rZYQEAABd5ZMJK5RfYPdrngy//JsMwPNonAKBs9v6dqSkzk5WTW6jcvEJ99+s+2e3swwEAgeHY8Vx98OXWk3kwt1Czv9+lvPxCs8MCYLIgswMAvIVhGHpv9la99tEGJe9IK3r9vud/lSGpZ4d4PXZbS/W4MN60GAEAQPns+Ou4FpjwlODmnelauvKAurfnOAIAvNXv6w/r2XfWau6Pe3T6/R+97lyouvGRuu+mZrr3hkQFBXFfNgDA/6TsPa5n31mrj+enKDf/fzdfDnjoB1WuGKq7BjbRw7eer+jIEBOjBGAWjoBPk5WVpfvvv19xcXGKiopSUlKSpk6dquDgYOXk5JgdHtyosNCuIY8v0x1P/azNO9OKvXfqHHLJ7wfUc9gCTZmZ7PH4AMATyIMIBG99vllmPSA4mWMIwKuRBwPb54t2qtOQuZr/016HeWL3gUyNfHmFrr7vO+XkFng+QABwM/JgYFux7pDaDvpSH87dXqyQeMrR9Fw9/946dRo8TwdTT5gQIQCzUUz8R0FBga644gp98803ev311zVr1izt3LlTo0ePVuPGjRUWFmZ2iG7Rc/kSvb1re7HXtmdlKGTuZyZFZI7/e/V3TZ938ns40wXGQrshw5BGPLtcny/a6cHozMc4KS516QytGRRZ4s+qflbteuM2s8MzBWPE9wVqHkTgmf3DbtP6/nrpHr+YHog8WBJ50PcFch5k/EpLVx7QDQ8vUWGhocJCxyeEhnHyRtNvf/lLQx5f5tkATcYYKY48WBJjxPeRBwN7/KbsPa7ewxcqM7vgjHlQkux2Q8k703T5XQsD6sYaxkhx5MGSAmWMMM3pPyZOnKi1a9dqy5Ytql69uiSpSZMmqlu3rnr06GFydHCnXfsyNHHGRpX2IQWLRRr5ygr1vyRBNhv1+EBUpdtNqtLtpmKvpa34Wjtfv1nV+o40KSqgfMiDCARpx3O1fc9x0/rPy7dr4/Zjat20qmkxuAJ5EP6IPBjYRr36uwyd+cbS09kN6bNFO/V/Gw7rguaxbo8N3oc8CH9EHgxsz76zVlknClRYivWBCwsNrdl8VJ8u3KGkvo08EB28DXkwcFEJ0cm18l577TXdcccdRQlTkhISEhQUFKSWLVsqNTVVl19+uRo3bqwWLVpo6NChys3NNTFquMpbszbLarGU+vOGIe39O0vfLt/nxqjgS3L2b9OuiYOVcPc7Cq+TaHY4gNPIgwgUq5NTzQ5BqzaZH4OrkQfh68iDgW3VpiP6Y9MR2UtxAfWUIJtFkz9l6mqcRB6EryMPBrZjx3M1Y36KCs7yROK/Wa3SGx9vcmNU8CXkwcBBMVFScnKy9u/fr379+hV7/cCBAyooKFCrVq1ksVj02GOPacuWLfrzzz914sQJTZo0yZyA4VLTvtpWqjtvTmezWoqmRUVgK8zJUsoL/VXlkqGq3HmQ2eEAZUIeRKDYujvd7BC0ZZf5MbgSeRD+gDwY2D7+JkVBttLfXCpJBYWGZnyTosLCkmtKIbCQB+EPyIOBbc4Pu5XnYI3Es7HbT96oudXPzm3gPPJgYGGaU0n79p18wiwuLq7Y6999950kqVWrVqpcubK6dOkiSbJarWrXrp327NlT5j4TEhKUnl62HW7LyGh93+aiMvf9b6M2/qnHk9cX/Wwv9YSfzunevbv+zDRvarEzSa/9kmSxOdWm0G7oszmL9M1b17gpqvJx9RiRPDNO3DVGQuu1Uu2nlrp8u5K0+793KCiysmolveSW7Xfv3k25O9e6fLuBOEYqVqyo3bvNWyvNm/laHgTKKjeqi1Spr8P3di0cpIqRwWdtXzEqRJJ07Oebz/q59Mx81b1spsP3/jPpTb3zdK9SROs65MGSyIM4na/lwUAcv+6UXeUmFUS0dPqcML/ArkqxtWS1Z7spsrLz1TEiuWeckAdLCtQxQi50jDzoG+PXXXKiL5Eq9nY6D0pSmwu7Kyh3hxuiKh/GSEnuyoXkwf/xhTFS3jxIMVFSlSpVJEkpKSlq1OjkXM9ZWVl65plnVKNGDcXGFl8HIScnR1OnTtXLL7/s8Vjd4eVmLTWsboOin7dnZShx8QITI/K0Mv7HNgLrLlTGSUkH505U5vqlavr6alls7E4ZI74r0PMgAol7Duad4w0xuAZ5sDjyoO8iDwb6+C37eZ0lgM4JA3uMOEYeLI4x4rvIg4E9fk/mMuee0C9CHjQxIvORB4sLhDHCb1lS8+bNlZCQoJEjR6qgoEAFBQV68cUXlZGRodatWxf7rN1u15AhQ9S9e3dddtllZe6zPBVg+7YUFY4cXeb2ZlmyZImsDeubHUYJTa6epa2702U4cW0vyGbRbYOv1ptPvui+wMqBMVLcpjRp8DLXbjNz08/a/9FoNXxqkYIrVT93gzJasmSpEmNcv13GCE7na3kQKKv3Zm/R7eN+dvjemZ4kPN2pJxIrXTy9zDGMfOBuPXf/B2VuXxbkwZLIgzidr+VBxq9rPTVltca/tdapNRMlqVJ0iI6kHpDVWsYLsG7kq2NEcs84IQ+WxBjB6ciDnuON43fWop267v8WO93OYpG2bvhF8XEV3BBV+TBGSnJ1LiQPmsfM/QhrJkoKCQnRrFmzFB4erkGDBmn8+PF64oknFBMTo1atWhX77N133y2r1aoJEyaYEitcb/h1TZxuU1Bo6I4Bjd0QDXxB/tEDSnnpOtUc/IIim3YyOxyg3MiDCBQtGlY2OwS1aFjJ7BDKjTwIf0MeDGxJfRvKcObOUkk2q0XDrm3ilYVEuB95EP6GPBjYrupWR5WiQ5xqY7NZdPnFtbyykAj3Iw8GLp5M/Ee7du20atWqop+zs7O1detWtWzZsui1hx9+WHv37tWcOXNktVKH9RdD+jbUoxNXKjevdI/m26wWtWxcWW0Tq7o5Mnirw4veUcGxv7Xvo8e076PHir0XmdhZDcf61yPsCAzkQQSC8xtVUpDNooJC86Ya9YfjB/Ig/BF5MHAlxEepT5faWvDzXyosZX6wG4buvNb5m1LhH8iD8EfkwcAVGmLT8Oua6sUP1pX6Kf3CQkN3X5/o5sjgrciDgYti4hmsW7dOdru96A6cjRs36uWXX1aTJk10wQUXSJIuvfRSn58f/PuO3Uu81qBClPKuGmhCNOaoFB2qKU900tAxP53zs1arRSEhVr077mIPROY9GCfFxV8/RvHXjzE7DK/CGPE/gZIHEVjCQoPUomFlrdmcakr/MVEhalAn2pS+XYk8WBJ50P8EUh5k/EoTH+mg5Wu/VnpGngpLcSH1ufvaqV6tKA9E5h0YI8WRB0tijPgf8mBgjd9Hhp6vr5bs1pbd6ee8scZikW7qU1+XX1zLQ9GZjzFSHHmwpEAZIxQTz2Dt2rWKiIhQw4YNJUnNmjVzeuoT+I5b+zVSbl6h7n52uaxWx08sWK0WRUUEad6kXmrd1PefKgCAsyEPwl/dclUD04qJN1/ZgCnxAB9BHgws59WK1o/vX6FL71yog6kn5OhXferJ9qdGtNEjQ8/3fJAA4EHkwcBSMSpEP7x7uS4b/q3+3HpUVov073trbDaLCgsN3dSnvt57qrMsFs5rgEDDM+lnMHz4cGVlZfHYfgAZPrCp1n5+jW7v31hhobZi78VVDtPY4a2V/NW1uriN+xaVBQBvQR6Ev0rq21Dh/8rznnLXQKbEA3wFeTDwNG9YWRvnDNBLD7ZXnRrF14AKDrLqxivq67fpV2nM8NZcQAXg98iDgad61Qj9Ov0qvfdU5xJrzVssUu+ONTX/v7304bNdFRJszvkUAHPxZCJwmhaNKmvKk5304oMXaPPOdGVm5ysmKkQtGlZWcDAHUAAA+LpK0aG6+coGeueLLR7t95IL45VYv5JH+wQAOKdyxVD9X1ILPTS4udZvO6qj6bkKDw1Sw4RoVYkJMzs8AADcKjwsSEOvaaRb+zXU1l3p+jv1hIJsVtWNj1TNahXOvQEAfo1iIuBAdGSI2reINTsMAADgBs/e21ZfLt6tw8dyPNJfaLBVbzx2kUf6AgCUn9VqUcvGVcwOAwAAU1gsFjWuF6PG9WLMDgWAF+FRKwAAAASU2Mrhmvx4R6fbpWfmKz0z3+l24+9uq6bnxTjdDgAAAAAAwBtQTAQAAEDAubZXPd3ev5FTbepeNlN1L5vpVJtLL4rXyCHNnWoDAAAAAADgTSgmAgAAICBNeaKTBvau57btd2pdTbNf7ymbjUNuAAAAAADgu7iyAQAAgIAUFGTVjOe76c7rmrh823261Na3U3orMiLY5dsGAAAAAADwJIqJAAAACFhBQVa9+WQnzZlwiapVCSv39iIjgvTmk500941LVYFCIgAAAAAA8AMUEwEAABDw+vWoq41zBuju65sqMiLI6fYhwVbd1Ke+NszurzuvayKLxeKGKAEAAAAAADzP+SslAAAAgB+qEhOmSaM76rn72mn6vO2a8U2KVienKie30OHng4Osat6gkgb2rqeh/Roprkq4hyMGAAAAAABwP4qJAAAAwGmiI0M04vpEjbg+UQUFdiXvSNOG7ceUmZ0vuyFFhNnU9LwYtWhYWaEhNrPDBQAAAAAAcCuKiQAAAMAZBAVZ1aJRZbVoVNnsUAAAAAAAAEzBmokAAAAAAAAAAAAAHKKYCAAAAAAAAAAAAMAhiokAAAAAAAAAAAAAHKKYCAAAAAAAAAAAAMAhiok+yBIdJQUHmx2Gc4KDT8YNj2CMFBcTIoX44N4uxHoydndgjABA4CAPlkQehC9j/OJcfHKMSG4bJ+TBkhgj8GWMX5wLY6QkX8yF5EEHTN6PWAzDMEzrHWVmHDwk43iG2WGUmiU6SpZqcWaHEVAYI8Xtz5bS8ty2ebeICZHiI9y3fcYIAAQO8mBJ5EH4MsYvzsXXxojk3nFCHiyJMQJfxvjFuTBGSvK1XEgeLMns/QjFRAAAAAAAAAAAAAAO+djDrQAAAAAAAAAAAAA8hWIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIf+H8OUAZTnzaTyAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABxMAAAETCAYAAAD9HCj7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABcE0lEQVR4nO3dd3gUVdvH8d/uppJCgNDB0HsLIKJIFQEFBEWKhaIiIhZEH3wsdAtWxBcE7IjYKSogICgWRKWDQAgQCUgRCBBIQvrO+weSh5gFsmF3J7v7/VyXl2R2Zs6d3czeM3PPOcdiGIYhAAAAAAAAAAAAAPgXq9kBAAAAAAAAAAAAACieKCYCAAAAAAAAAAAAcIhiIgAAAAAAAAAAAACHKCYCAAAAAAAAAAAAcIhiIgAAAAAAAAAAAACHKCYCAAAAAAAAAAAAcIhiIgAAAAAAAAAAAACHKCYCAAAAAAAAAAAAcIhiIgAAAAAAAAAAAACHKCbCaRaLRRMmTMj7efbs2bJYLEpMTDQtpov5d7xmGzJkiCwWiywWixo1anTRdc+9t+vXr/dQdCjOHnnkkby/nfDwcLPDAQAAAAAAAAD4AYqJJklISNB9992nGjVqKCQkRJGRkWrTpo1ef/11paenmx0e3Cw6OloffvihXnjhhXzLq1WrVuTCZ4cOHTRkyJAibTthwgRVq1atSNsWxuX8XkOGDFGHDh2c3i4xMVEWi0U//PBDkdq9nJhdwVH8AwcO1Icffqi2bduaFhcAAAAAAAAAwL8EmB2AP1qyZIn69u2r4OBgDRo0SI0aNVJWVpZWr16t0aNHa/v27XrrrbfMDvOC0tPTFRDgPX86xTHesLAw3XnnnWaHAS/TokULtWjRQitXrtTGjRvNDgcAAAAAAAAA4AeKV4XFD+zdu1cDBgxQTEyMvv/+e1WsWDHvtQceeEB79uzRkiVLTIzw0kJCQswOwSneFi8AAAAAAAAAAEBxwTCnHvbSSy8pNTVV7777br5C4jm1atXSyJEj837OycnRM888o5o1ayo4OFjVqlXTU089pczMzHzbVatWTT169NAPP/ygli1bKjQ0VI0bN84bInHBggVq3LixQkJC1KJFC23atCnf9kOGDFF4eLj+/PNPde3aVWFhYapUqZImTZokwzDyrVvYOQiXLl2qtm3bKiwsTBEREerevbu2b99+ye0mTJggi8VSYLmjuRnXr1+vrl27Kjo6WqGhoapevbruvvvui8Z7bv979uzRkCFDFBUVpZIlS+quu+7SmTNn8m2bnp6uhx9+WNHR0YqIiNBNN92kgwcPOnwPdu7cqf3791/y9yuKzMxMPfrooypbtqzCwsJ0880369ixY25p63xz585Vq1atVKJECZUqVUrt2rXTt99+K0n6/vvvZbVaNW7cuHzbfPzxx7JYLJo5c6ZbYho/frysVqu+++67fMuHDRumoKAgbdmyxS3tSlJycrJGjRqlatWqKTg4WFWqVNGgQYOUlJSUt05GRoYmTJigOnXqKCQkRBUrVtQtt9yihIQE0+MHAAAAAAAAAMBZFBM9bNGiRapRo4auueaaQq0/dOhQjRs3Ts2bN9drr72m9u3ba/LkyRowYECBdffs2aPbb79dPXv21OTJk3Xy5En17NlTH330kUaNGqU777xTEydOVEJCgvr16ye73Z5v+9zcXHXr1k3ly5fXSy+9pBYtWmj8+PEaP36807/nhx9+qO7duys8PFwvvviixo4dqx07dujaa6/NVwy8HEePHlWXLl2UmJioJ554QtOmTdMdd9yh3377rVDb9+vXTykpKZo8ebL69eun2bNna+LEifnWGTJkiKZNm6Ybb7xRL774okJDQ9W9e3eH+6tfv74GDRp02b+XIw899JC2bNmi8ePH6/7779eiRYv04IMPuqWtcyZOnKiBAwcqMDBQkyZN0sSJE1W1alV9//33kqROnTppxIgRmjx5ct6Qm4cPH9ZDDz2kzp07a/jw4W6Ja8yYMWrWrJnuuecepaSkSJKWL1+ut99+W+PGjVPTpk3d0m5qaqratm2radOmqUuXLnr99dc1fPhw7dy5UwcOHJB09hjq0aOHJk6cqBYtWujVV1/VyJEjderUKW3bts3U+AEAAAAAAAAAKBIDHnPq1ClDktGrV69Crb9582ZDkjF06NB8y//zn/8Ykozvv/8+b1lMTIwhyVizZk3esuXLlxuSjNDQUGPfvn15y998801DkrFq1aq8ZYMHDzYkGQ899FDeMrvdbnTv3t0ICgoyjh07lrdckjF+/Pi8n99//31DkrF3717DMAwjJSXFiIqKMu699958cf/9999GyZIlCyz/t/HjxxuO/jT/3c7ChQsNSca6desuur9/x3tu/3fffXe+9W6++WajTJkyeT9v2LDBkGQ88sgj+dYbMmRIgX2ea6d9+/YXjcUwzr7XMTExl1zPMP73O3fu3Nmw2+15y0eNGmXYbDYjOTm5UPtx1u7duw2r1WrcfPPNRm5ubr7Xzo8jLS3NqFWrltGwYUMjIyPD6N69uxEZGZnv780d/vjjDyMoKMgYOnSocfLkSaNy5cpGy5YtjezsbLe1OW7cOEOSsWDBggKvnXtP3nvvPUOSMWXKlAuu44r4Bw8ebISFhRXxNwEAAAAAAAAAoPDomehBp0+fliRFREQUav1vvvlGkvToo4/mW/7YY49JUoG5FRs0aKCrr7467+errrpK0tkeZFdccUWB5X/++WeBNs/v7WaxWPTggw8qKytLK1euLFTMkrRixQolJyfrtttuU1JSUt5/NptNV111lVatWlXofV1MVFSUJGnx4sXKzs52evt/95xr27atjh8/nvc5LVu2TJI0YsSIfOs99NBDDvdnGEbesLKuNmzYsHxDv7Zt21a5ubnat2+fW9r78ssvZbfbNW7cOFmt+b8mzo+jRIkSmj17tuLi4tSuXTstWbJEr732Wr6/N3do1KiRJk6cqHfeeUddu3ZVUlKSPvjgAwUEuG8a2Pnz56tp06a6+eabC7x27j2ZP3++oqOjHf6NnP++mRE/AAAAAAAAAABFQTHRgyIjIyUpb2jDS9m3b5+sVqtq1aqVb3mFChUUFRVVoJD07wJOyZIlJUlVq1Z1uPzkyZP5llutVtWoUSPfsjp16kiSU0OT7t69W9LZImbZsmXz/fftt9/q6NGjhd7XxbRv3159+vTRxIkTFR0drV69eun9998vMJ/khfz7/SpVqpSk/70v597/6tWr51vv35+HJ1wqVldLSEiQ1WpVgwYNLrlumzZtdP/992vt2rXq2rVrgTkr3WX06NFq2rSp1q5dq/Hjxxcq1suRkJCgRo0aXXKdunXrFqoo6On4AQAAAAAAAAAoCrrBeFBkZKQqVaqUN3daYZ3fo+libDabU8sNw3AqjsI6Nxfjhx9+qAoVKhR4/VKFlgv9vrm5uQXWmzdvnn777TctWrRIy5cv1913361XX31Vv/32m8LDwy/ajqffl8tRnGPNzMzM65GZkJCgM2fOqESJEm5v988//8wrXP/xxx9ub8/VvD1+AAAAAAAAAIB/oGeih/Xo0UMJCQn69ddfL7luTEyM7HZ7XsHhnCNHjig5OVkxMTEujc1utxcY+nTXrl2SpGrVqhV6PzVr1pQklStXTp07dy7wX4cOHS66/bled8nJyfmWX2hIz9atW+u5557T+vXr9dFHH2n79u369NNPCx3vhZx7//fu3Ztv+Z49ey5738VdzZo1ZbfbtWPHjkuuO378eMXFxemVV17R3r179cQTT7g9PrvdriFDhigyMlJPPfWUPvnkEy1YsMCtbdasWfOSDwLUrFlT8fHxlxx214z4AQAAAAAAAAAoCoqJHvb4448rLCxMQ4cO1ZEjRwq8npCQoNdff12SdOONN0qSpk6dmm+dKVOmSJK6d+/u8vimT5+e92/DMDR9+nQFBgbquuuuK/Q+unbtqsjISD3//PMOiyrHjh276PbnipE//fRT3rK0tDR98MEH+dY7efJkgZ55zZo1k6RCD3V6MV27dpUkzZgxI9/yadOmOVx/586d2r9//2W3Wxz07t1bVqtVkyZNyutpes757/nvv/+uV155RY888ogee+wxjR49WtOnT9ePP/7o1vimTJmiNWvW6K233tIzzzyja665Rvfff7+SkpLc1mafPn20ZcsWLVy4sMBr596TPn36KCkpKd9x9O91zIofAAAAAAAAAICiYJhTD6tZs6Y+/vhj9e/fX/Xr19egQYPUqFEjZWVlac2aNfriiy80ZMgQSVLTpk01ePBgvfXWW0pOTlb79u21du1affDBB+rdu7c6duzo0thCQkK0bNkyDR48WFdddZWWLl2qJUuW6KmnnlLZsmULvZ/IyEjNnDlTAwcOVPPmzTVgwACVLVtW+/fv15IlS9SmTRuHxZZzunTpoiuuuEL33HOPRo8eLZvNpvfeey9vH+d88MEHmjFjhm6++WbVrFlTKSkpevvttxUZGZlXiL0cLVq0UJ8+fTR16lQdP35crVu31o8//pjXW/Pfw7HWr19f7du3zxvyszjo0KGDfvzxR6eHQ61Vq5aefvppPfPMM2rbtq1uueUWBQcHa926dapUqZImT56sjIwMDR48WLVr19Zzzz0nSZo4caIWLVqku+66S3/88YfCwsIu2Ma53q7OzMcpSXFxcRo7dqyGDBminj17SpJmz56tZs2aacSIEfr8888vuG1iYqKqV6+uwYMHa/bs2U61O3r0aM2bN099+/bV3XffrRYtWujEiRP6+uuvNWvWLDVt2lSDBg3SnDlz9Oijj2rt2rVq27at0tLStHLlSo0YMUK9evW6rPgBAAAAAAAAAPA0iokmuOmmm7R161a9/PLL+uqrrzRz5kwFBwerSZMmevXVV3XvvffmrfvOO++oRo0amj17thYuXKgKFSroySef1Pjx410el81m07Jly3T//fdr9OjRioiI0Pjx4zVu3Din93X77berUqVKeuGFF/Tyyy8rMzNTlStXVtu2bXXXXXdddNvAwEAtXLhQI0aM0NixY1WhQgU98sgjKlWqVL5tzxVXP/30Ux05ckQlS5ZUq1at9NFHH6l69epOx+zInDlzVKFCBX3yySdauHChOnfurM8++0x169ZVSEiIS9pwp9TUVIfzVhbGpEmTVL16dU2bNk1PP/20SpQooSZNmmjgwIGSpKeeekp79uzRmjVr8t6LoKAgffDBB2rdurVGjx5doFfn+dLS0lSrVi2nYsrNzdXgwYMVHR2dr8du7dq1NXnyZI0cOVKff/65+vXr53D71NRUSVLFihWdaleSwsPD9fPPP2v8+PFauHChPvjgA5UrV07XXXedqlSpIunsMfTNN9/oueee08cff6z58+erTJkyuvbaa9W4cePLjh8AAAAAAAAAAE+zGM52WYJPGjJkiObNm5dXbMGFbd68WbGxsZo7d67uuOMOp7cfMmSIvv/+e23cuFEBAQGKiopyfZCSUlJSVLp0aU2dOlUPPPCAW9ooqh07dqhhw4ZavHixW4brvZAZM2bo8ccfV0JCgsqXL++xdl0lLS1N6enpeuihh7Ro0SKOVwAAAAAAAACA2zFnInAR6enpBZZNnTpVVqtV7dq1K/J+//rrL5UtW1bXXnvt5YR3UT/99JMqV66cr6drcbFq1SpdffXVHi0knmv34Ycf9spCoiQ9/fTTKlu2rD799FOzQwEAAAAAAAAA+Al6JkISPRMvZOLEidqwYYM6duyogIAALV26VEuXLtWwYcP05ptvFmmfO3bs0KFDhySdHTqzdevWrgwZPmzXrl1584YGBASoQ4cO5gYEAAAAAAAAAPB5FBMhiWLihaxYsUITJ07Ujh07lJqaqiuuuEIDBw7U008/rYAAphwFAAAAAAAAAAC+jWIiAAAAAAAAAAAAAIeYMxEAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADhEMREAAAAAAAAAAACAQxQTAQAAAAAAAAAAADgUYHYAMN/hY2c0f2WijhxPV4DNolpXROqW66opNIQ/DwCA7zuVkqV5K/Zq/+E0GTJUpXyYbr2+ukqXDDY7NAAA3C4zK1dfrdqnnXtPKSs7V2VLheiWztVUtUK42aEBAAAAKCYshmEYZgcBc2zbfUKT3tykBSv3yW4YstkskiHl5BqKDAvUvX3qasywZoqK5GYqAMD3HDqapklvbtYHX+9WRmauAmwWSVJurqHAQKtuv7GmJtwfq5hKESZHCgCA66WdydZzb2/RrC/idPJ0lgJsFlksZ68HJaln+ys0fnismjeINjlSAAAAAGZjmNN/pKWlaeTIkSpXrpwiIiI0ZMgQzZ49W4GBgcrIyDA7PJdbtfaQrrpjkRZ8t0+5dkOGIeXkGHkXjqfTsjV17na1vnORDh87Y3K0AABP8KdcGL83WS1v+0rvLIhXRmaupLM3T3NyDRmSsrLtmrt4j1oM+Epb4o+bGywAwCP8KQ8eT87QtYMX68X3t+rk6SxJZ/Ngds7Za0PDkJb8/JeuHrhIi3/cb3K0AAAAAMxGz0RJOTk5uu6663To0CFNmDBB0dHRev7557V7926VLl1a27ZtMztEl9q+56Ra3f61MrJyZLdffF2bzaKGNaL020c3MewpAPgwf8qFSSczFNvvSx1OOqPc3IufBtmsFpUuGazNX/RWpXJhHooQAOBp/pQHc3LsajtksdZtS1Ku/eJ50GKRAgOsWv1BD13ZqKyHIgQAAABQ3FAdkvT6669r8+bNio+PV4UKFSRJ9erVU7Vq1dSpUyeTo3O9Z97apMzs3EsWEqWzQ71t3X1Sny3/U0N61XF/cAAAU/hTLpz5eZwOHUsrXB60GzpxKlNT527XS4+2cn9wAABT+FMe/PqH/fpt67FCrWsYZ68Jx72xUUtndnVzZAAAAACKK78f5tQwDE2ZMkX33ntv3kWjJMXExCggIEBNmzaVJMXFxenKK69UnTp11KlTJx0+fNiskC/L30lnNH9F4iV7YpzPapWmfbzDjVEBAMzkT7kwJ8euNz6NK1Qh8Zxcu6G35u1UekaO+wIDAJjGn/KgJE37ZIdsVkuh18+1G1q+5oD+PHDajVEBAAAAKM78vpgYFxenQ4cOqXfv3vmWHz58WDk5OWrWrJkkafjw4RozZox27dqlXr166YknnvB8sC6w8J85Ep1ht0sb444r4S8uHgHAF/lTLvxl8xEdOZ7u9HanUrO18rdDbogIAGA2f8qDx06k64d1h52+JrRYLPp8+V43RQUAAACguPP7ORNXrFihLl26KD4+XnXq/G8Yzzlz5mjw4ME6evSo7Ha7YmNjdejQ2ZuIqampqlSpkk6fLnpxLSYmRqdOnbrs+J2VEdlFmSU7Sxab09uG/f1/Csja54aoAMD9SpYsqX37+A5zxIxcaFYezA5tojNlBxdp29Djnyso7XcXRwQAnkEevDB/yoO5AeWVWulx5zc0chSU8otCk792fVAA4CHkQgAAis7veyaWKVNGkpSQkJC3LC0tTc8++6wqVqyosmXL6sCBA6patWre6+Hh4QoJCdHx48c9Hu/lyzVpWwBAceVfudCJ8U0LIA8CgC/yrzzI9SAAAAAA5wWYHYDZGjVqpJiYGD322GPKyclRTk6OXnzxRaWkpCg2NtZt7Zr1JNTny/9U/9GrnN7OZrUoMX69okuFuCEqAICZzMiFZuXBbbtPqHGfhUXaduWSj3RNs/IujggAYDZ/yoOpZ7IV3W6uMrOce7jGYgnQG6+O09A+H7kpMgAAgPwMw1Dcn8k6npypsNAANa5dWoGBft83CjCN3x99QUFBmjdvnkJDQ9W/f39NmjRJY8aMUVRUVN7cGFWqVNFff/2Vt01qaqoyMjLynmD1Jr06xqhUZJBT2wTYLLqlcwyFRADwUf6UCxvVLq1WjcrKarUUehuLRapbraSublrOjZEBAMziT3kwvESgBvaopQBb4fOgJIWGBKh/t+puigoAAOB/srJz9canO1S7+xdqePMCtb97iVoM+EoVr/tYE2du1IlTmWaHCPglvy8mSlLLli21YcMGnTlzRps2bVKnTp20a9cuNW3aVJJUvnx51apVS1999ZUk6d1331Xv3r1NjLjogoNsGt63vlM3UXNyDY3o38CNUQEAzOZPufCh2xvIbndiymhDevj2BrJYnLvxCgDwHv6UB+/vV185uYXPgzarRUN61VZEmHMPpQIAip958+apTp06CgsLU7t27TRmzBh17NjR7LCAPGfSc9T53qV69OXflXAgRZJk/HPacjw5U5Pf2aLm/RZq/+FUE6ME/BPFRAe2bt0qu92e9xSqJM2cOVPPPPOMateurS+//FIvvPCCeQFepieHNlHDmlGyFeJpVIuk+/vXU/uWFdwfGACg2PDlXHjbDTV0U4crVJjnaqxWizpdVVH39qnn/sAAAMWGL+fB5g2i9cQ9TQq1boDNomqVw/XMgy3cHBUAwN3mzp2rkSNH6v3331dqaqruuecevfDCC2revLnZoQF5Bj71g37/45iysh0PyZ6ZbdehY2fU+d6lyspmPmfAkyyGYTjxaL5/mDVrlh577DGlpKTIavXNeuvR4+m68YHl2rDjuKxWS4EeGgE2yz89Euvr/55oLZvNN98HAIBjvp4L0zNyNPCpHzV/ZaJsNoty/9VD49yyrtdU1rwp1ym8RKBJkQIAzODredBuN/TU/63Xi+9tzbv2O5/VKtntUsOaUVo2s5uqVAgzKVIAgCtkZGSoatWqeu+999SzZ09JZ+ejCwkJ0bvvvqs777xTc+fO1RtvvCFJevbZZ3XdddeZGTL80M69yarfa36h1g0MsOjD5zuof7cabo4KwDkUE/1YZlau5q9I1LRPtuu3rcfylgfYLOrTuZpG9K+vti0qMKwbAMAn2e2Glv9yQNM/3aGlqw/o/DOizq0r6cHbGqhHu6o8UAMA8Fm/bz2qGZ/F6ZOlfyo75389AJrXL6OHbm+g/l1rKDQkwMQIAQCusGLFCvXp00enT5/OW3bq1ClFRUVp27Ztqly5stq1a6e1a9cqNTVVHTt21ObNm2Wz2UyMGv5m5Au/atYXOy/YK/F8FovUqlFZ/fbRTR6IDIAkcVXgx4KDbLq9e03d3r2m9h9OVYOb58siad/yASpdMtjs8AAAcCur1aIb2lbVDW2r6sjxdNXq/rkkadeivqpYtoTJ0QEA4H5XNSmnq5qU0+v/ba2qXT6VIemPebeoepUIs0MDALjQ0aNHVbZs2XzL5s6dq9DQUNWrV08rV65U27ZtFRISopCQEFWtWlUJCQmqU6dOodsYMGCA4uPjXR06/Miu3L7KUvlCrWsY0to/Dis2NtbNUQG+pW7duvr000+LtC2P2kOSdEXFcAXYrLLZrBQSAQB+p3yZUNn+yYMUEgEA/iYqMlg2m1UBNiuFRADwQQ0aNFBiYqK+++47ZWdna/78+Ro7dqwaN24sm82m48ePq1SpUnnrlypVSsePHzcxYvgjw+lSBaPpAZ5Ez0QAAAAAAAAA8FGxsbEaP368+vfvL0nq27ev2rZtq0qVKkmSypQpo5MnT+atn5ycrDJlyjjVRlF7ugDn9PvPd1qwcp9y7YWblS2mUkltWrbJzVEBOIeeiQAAAAAAAADgw8aNG6ekpCQlJSVp5syZSkhIyBsi8qqrrtLq1auVmZmpEydOaP/+/apZs6bJEcPfDLu1ngpXRpSCg6x6YEADt8YDID96JgIAAAAAAACAn8jKylJ8fHxeMTEqKkqPPfaYOnToIEmaMmWKbDabiRHCH3VqVUk1q0Zo74EU5eReuKxosUhWq0V39a7twegAUEwEAAAAAAAAAD+xc+dOSVLjxo3zlg0aNEiDBg0yKyRAVqtFS6Z3Ues7Ful0WpbDgqLFItlsFn05tbPKRIWYECXgvxjmFAAAAAAAAAD8RJMmTZSdna2QEIoxKF5qx5TUhs96qXPrSrJYpJBgmyw6W0SUpOb1y+jH97qryzVVTI0T8Ef0TAQAAAAAAAAAAKarVjlCS2d2075DKVrw3T69+N4WWa0WffNGVzWrV8bs8AC/Rc9EAAAAAAAAAABQbMRUitCogY1UsWwJlS8TSiERMBnFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOUUwEAAAAAAAAAAAA4BDFRAAAAAAAAAAAAAAOBZgdAAD3MI4clXE6xewwCs0SGSFL+XJu23/iwRQlJWe4bf/uEB0VomqVI9y2f/5GAN/hbcezxDHtaeTBgrztuOGYAS7M245niWPa08iDBXHcAAAAZ1BMBHyQceSockY8KmVnmx1K4QUGKmDGFLdcGCQeTFH9XvOVkZXr8n27U0iQTXFf9XHLBSR/I4Dv8MrjWeKY9iDyYEFeedxwzAAOeeXxLHFMexB5sCCOGwAA4CyKiYAPMk6neN9FQXa2jNMpbrkoSErO8LoLR0nKyMpVUnKGey4e+RsBfIZXHs8Sx7QHkQcL8srjhmMGcMgrj2eJY9qDyIMFcdwAgGsZqWlSRjHsAR8SIkt4mEt3mXw6U6lncly6T1cILxGgqMhgl+6z2H6ukls+20uhmAgAAAAAAAAAAOAkIzVNOfc8IKWnmx1KQaGhCnj3DZcVnZJPZyqm62c6nVb8HkiJDAvUvuX9XVZQLNafq+Tyz7YwrB5rCQAAAAAAAAAAwFdkZBTfglN6ukt71qWeySmWhURJOp2W7doek8X5c5Vc/tkWBsVEAAAAAAAAAAAAAA5RTIRyc+068Heacu2Gcu2Gjh5Pl2EYZodlqlMpWdp7IEW7Ek9p36EUZWQWv3GgAQCuYRiGDh87k5cHDx1N8/s8CADwL0knM/Ly4F9/pyonx252SAAAAACKEeZM9EOGYeinDX9r4Xf7tGFHkjbtPK609P8Vy8p3/Fjly4SqRYMyatWorO7sUUs1q0aaGLH7nTydqY+WJOjnjX9rw44kJfyVku/1AJtFDWuVUosG0bq+dSXd0rmaggJtJkULALhcf+w6oU+W/ql1249pY9xxnTiVmfda5c6fqmR4kJrXL6OWDaPVv1sNtWgQbWK0AAC41v7Dqfpw0R79/scxbYhL0qGjZ/Jeu6LLZwoNtqlZvTJq0SBaN3W4QtddVUlWq8XEiAEAAACYiWKiH8nMytU7C+I147M47UhIvui6R46n65ufD+ibnw9owsxN6tamih6+vYG6XVtFFovvXET+seuEpn60XR8vSVBGVu4F18vJNbQl/oS2xJ/Qewt3qVzpEN3bp64eur2hypcJ9WDEAICistsNffHtXk3/ZIdWbzpy0XVPpWZp1brDWrXusF6e/YdaNSqrBwbU1x3da8pmY2AHAIB3+v73Q3r9o+1a/NNfstsv3As/PTNXv245ql+3HNX0T3aoTkxJ3d+vnobdWk8lQrmNAACAJ51KydKyXw5ow44kbdiRpMNJ6crJsatEaIDqVS+pFvWj1Sa2vK5uWs7r79vm5tr14/q/9euWo9oQl6Q9+08rPvGUrBap2/BlatEgWlc2ilbXa6ooNIRzEsCTOOL8xLptxzRk7E+XLCJeyLJfDmjZLwd0y3XVNGPMNV5fQMvKztUzb27W5He2KPciF9EXcvREhp57e4tmfBanaU9erdtvrOn1yRoAfNneAym6Z/zPWrXucJG2X7vtmNaOOaaZn8fp/WfaqV71KNcGCACAG504lamRL/6quYsTirT9rn2nNOrl3zX90x16f1I7tW1RwcURAgCAf9uRcFL/9/EOfbhot85knO0EYbFI58/KsTX+hD5btleSVL96ST1wWwPd1auO1z38k3w6U2/O26mZn8Vp3+E0SQV/129/Pajlaw5KkkpFBOnum+voodsbKKZShBkhA36HR+t9nGEYGv/GRrW+c1GRC4nnW/Bdohr2nq/FP+6//OBMEr83WS0HfKVn39pcpELi+U6eztKdT/6oW0Z9p9OpWS6KEADgSrO/2qXGfRYUuZB4vt+2HlOzvgv1xqc7XBAZAADu9/3vh9Sw9/wiFxLPl/BXitrfvUT/eeV35eYyryIAAO6QlZ2r8W9sVJNbF+rNL3bmFRKl/MU1STr/x52Jp/Tg87+q6a0LtGbzxUfjKU4W/7hf9XrN1xNT12v/32l5ywv8ruf9fDIlS6/O2ab6veZr2sfbLzriAgDXoJjow+x2Q0MnrNakNze59Av1+KlM9R65UnMX73HZPj1l887junbwYv2x+6RL9/vl9/t03b1L8825BQAw30vvbdVdY3/ONzfw5crMsuvB53/V2OkbZPz76gYAgGJk4XeJumHEcv19PN1l+zQM6dU523T7Ez8oO5uCIgAArnTwSJpa3fa1Jr25Sbm5zl1vnrs8TTiQomsHL9bEmRuL9TVrTo5dw5/5RT0fWqGjJ86eqzgbbkZWrh5+4Td1uW+pTqXQ0QNwJ4qJPsowDD3w3Bq9t3CXW/afazc0eMxPmr9ir1v27w7xe5N1/bClSkp2T8Fv/fYk3XD/cqWkkbgAoDiY/skO/XfqOrft/9m3zg6XDQBAcbT8lwPqP3qVstxU8Pt8+V7dNe4negIAAOAif/2dqjaDF2vLrhOXtR/DOPvfhJmb9J9X1xbLgmJurl13PPmD3vxipyTni4jnnNvuu98Pq+M93yj5NB09AHehmOij5i7eo1n/fBkXVuKy/kpc1r/Q69v/KSjuPZDibHgel5Wdq77/+d6pQqKz74d0dk6tx15Z62x4AAAXW/vHMY188TentinK9/7T0zbo+98PObUNAADudvjYGd3231XKzil8IbEoefCjJQma+Xmcs+EBAIB/OZOeo673LdO+Q6ku3e+UOds0de52l+7TFf7z6lp9vty1nVQ27Tyum0et5EEnwE0oJvqgw8fO6OEXnLuBKkklwwNVMjzQqW3S0nN09/ifi/2X9LNvbXZ6aNOivB+S9Pb8eH275oDT2wEAXCMjM6dIPSWK+r1/z/iflXom2+ntAABwB8MwdN+kX3TytHMjphQ1Dz4+Za3+PHDa6e0AAMD/jJm+XnF7TxVq3U9e7KhPXuxYqHUtkp6Yuk5xfyYXPTgXW7X2kFMFTmd+3x/W/a3/+6j4FU8BX0Ax0Qc9NPlXJXtwjOgf1h3WuwviPdaes7bvOann3/bsMHRDJ6xWeobr5ucCABTe5He2akdCssfaSzyUqjHTNnisPZjHMIxiOUQQAJzvi2/3atGP+z3W3pmMXN036RePtQfzGIZR7B8kBgBv9NuWo04V1+pWK6m61UoWal1DUna2XUPG/FQsrmXSM3J017ifZbEUfhtnfl+LRXry9fU86AS4AcVEH5Pw12nNX5no8XZf+WBbsUhIjkydu025Hr7g+evvNH3xrffMJwkAviI9I0fTPvH8U4hvz9/J3Aw+6ujxdE1+Z4uqd/tMgbHvK6DZe6rQ8WM9+fo6JR4s/kO9A/A/L8/+w+NtrvztkDbvPO7xduF+qWey9eYXO9WkzwIFt3hfAbHvKarNh7pv0mptieczBwBXeOG9LWerfm5i6OzUTKvWHnZfI4X06bI/te9QapHnSLwUw5AysnL1ejEc2hXwdhQTfcysz52bJ9FVdu07pe9/Nz8h/Vvy6Ux9tCTBlLZnfMbcIQDgaZ8t/9PpYd1c4UxGruYs2uPxduFe//fRdlW5/hM9PW29Eg+lKtduyG5IR46n66X3/1CNGz/X6FfX0ksDQLGxbtsxrd+eZErbzJ3oexb/uF8VO32s4c/8ou17Tio7x5BhSKdSsvTuwl1q1vdL9Rn1nc6kMyoPABTV/sOpWvTDfnfWEvMUh3uVb3wa51SvxKJ6/8vdSmM6EsClKCaeJy0tTSNHjlS5cuUUERGhIUOGaPbs2QoMDFRGRobZ4V1Sbq5d7325y7T2315gTiHzYj7+JkHpmbmmtP37H8f0x64TprTtap3XrNJbiflvku9JS1HQos9NishcoSE27VnSV4Nvqp23LDjIpriv+mjYrXVNjMw8/I34Bm/Pg9LZeWv9sW1X4ng+66X3tmrki7/l3Tj9N7v97PJXPvhDw5/5pdiO0OAO5MGCOG58A3nw8sxdnOATRSWO57O+WrVPNz28Qmn/fKb/fm4mN/fsgi+/T9QNI5YrK9uc624zkAcL4rjxHvPmzVOdOnUUFhamdu3aacyYMerYsXBz0cE9Pl36Z4HvWHf58vt9Sknz/MO358TvTdaGHUlu65V4vpQz2Vr801/ub8hF/v19mXgmTbVXLjYpGvcqVzpEB1YMUI0qEXnLls/qpiG9al9kK+/lS58txcR/5OTk6MYbb9Q333yj1157TfPmzdPevXv11FNPqW7dugoJCTE7xEvaufeUTpwyb4i1XzYdNa3tC/lls7kx/bL5iKntwz3SM3J178TVevU/rVQhOlSSNHFEcx08ekZvzfONYgL8jy/kwcysXK3bZk5vDEnatuckQ536iN+2HNV/p64r9Ppvz4/XJ9/86caIihfyIHyRL+RBydzrjzMZOdqyi2EvfcGR4+nqP/p7SbrkDV+7If288W9NmrXJA5EVD+RBeKu5c+dq5MiRev/995Wamqp77rlHL7zwgpo3b252aH5t3fZjHumpJ0m5dkObd5rX8WGdh0dPWLftmEfbQ+EcPZGhcTM2atbYNpKkIb1qy2qVZn+12+TIcCkUE//x+uuva/Pmzfr55591xx13qGvXrpozZ44OHz6sZs2amR1eoWzYYd4NVEk6cCRNR46nmxrDv5k1xE9xaR/us2rtYc1fmahZY9uoZcNoDe9bT/dOWG12WECR+UIe/GP3CWXn2E2NYWMcN1F9wbRPdshmK/wVvdVq0dS529wYUfFDHoSv8YU8mHomW3F/Jpsaw4Yd5EFf8O6CeGVn2wvdc8Qwzg5bl5nlP70TyYPwNhkZGRo1apRmzZqlNm3ayGKxaNCgQbLZbIqNjZUktW3bVtHR0ZowYYK5wfqZdds801PvnPUm3j/25H1Si8Xc3xUX997CXbLZLHr8riZ65sEWGjbxF7NDQiEEmB1AcWAYhqZMmaJ7771XFSpUyFseExOjgIAANW3aVJJ03333afHixTp06FCxHMpqUzGY8H7jjiTd0Laq2WFIOnsxvWvfKVNj4Kayb/vPq2u1feEtWjqjq8bN2Ki9B1PMDgkoEp/Jg8XgO3dj3HF1uqqS2WHgMhw7ka7Pl/+ZN3xbYdjthtZtT9LGHUlq3iDajdEVL+RB+ApfyYNbd53w6M1IRzbGcdPO2+Xm2jX90x1OD7mXnJKleSv26o7utdwTWDFEHoQ3+fnnn5WZmamePXvmLTt9+rSysrLyiokff/yxvvvuOyUmJhapjQEDBig+nt65zjAMaZ99hBz19/nkxY6qW62kw+3qVj+7fONnvS+47/jEU7rtv6sKLJ/8ypuaM8Wchx/25t4gqYak/A9uXux3lS79+zr6XQ1DWrN+l2Jjx1xOyIVSzhagxZVqXPZ+Wv74bd6/s+yue1C6W7duOprrmqHos4wwSXe5ZF/3P7tG8V/fqiemrnNZDu3arauCLGku2ZerPlepeH22devW1aefflqk9igmSoqLi9OhQ4fUu3fvfMsPHz6snJycvCdR77jjDk2aNCnfBWZRxcTE6NQp1xa6zpS5TQpr6fC1xGX9VTI88KLbl4wIkiSdXH3nRdc7lZqtat0+c/han/6DFXRmYyGidT+7LUqqPNbha556PzZv262oqKhLxupqTcMjtbL51S7d5+jtW/R03B95P9vdMDV0x44dtSX1tMv3mxNURaowyuX7TUnL1tZdJ9Xl6sr6ZGmCy/cvSR06dlRA1gGX79cf/0ZKliypffv2uTgi3+AreTAjspMU1d3ha5763h8z/nk9+8jSQkTrOt56PEvu+96/HDnBtZRT/v4ibXvt9XcoKO13F0d0+ciDBXnrcUMedA9fyYPZoQ2ksvc4fM1TefDDjxdqwf/dVIhoXcdbj2epeOZBuzVCKVUmOL+hkaO7H5ikB+742uUxXS7yYEH+etz4cy48evSoypYtm2/Z3LlzFRoaqnr16kmSqlYtHp0D/ItFnh440DBxoEJDNkmG/l1MdG973mN9+y55/048k6br1xQsBvuS7m2r6uCRNDWuXcrsUNzOVz5biomSDh48KEkqV65cvuUrVqyQpLyLx3bt2nk0LucVgy9ISzGIIY/Jo/gahorFZ+IiLzdsqmHV/veU6Z60FDX43rM3zIubfl2rq3HtUlr4faKmPt5adzzxg9khmYq/Ee/lO3nQ/NHbDYtvnFr58/FsWIOKuKG96Nt6KfJgfv583Hg738mDxeDao1hdDxadXx/PlosXnS+yIXmQPOi/x40XaNCggRITE/Xdd9+pXbt2+vrrrzV27Fg1btxYNptrvruL2tPF3wU0e0+5DrqDO+pVeM65HnrN+3/pdHsD77xNU0a/7vR2rtBn1Hda+H1igZEULva7SkX/fatWLq9NS90/p6+RdFw5d49weztFtWzZMlmiy7hkXwf+TlPVLpd/rMdUCteogQ3V6o6v9c0bXdTlmsr6ds3By97v8mXLVaVC2GXvRyr+n6vk2s+2MHzjjtdlKlPm7BuekJCgOnXqSJLS0tL07LPPqmLFigWe3HEFdzwJNWTMT/rga8cTlV7oydHznXsCtdS1c4scw7tvz9JtN9Ys8vaudPBImqpc7/jLzSPvh8WiajFV9OeO5KJtfxnsuxOU+9hTHm/3cq1atUrW2q7/+1m//ZiuvM21T8iWiQrWtCev1uAxP+n3rUcV99Wt6t6uqpb89JdL2/lh1Sq1bOj67yD+RnA+X8mDL723Vf+dus7ha57Kg6MfG6lnH/qwyNsXhbcez1LxPKZ/3vC32t21xPkNLVbNnP6qhvSq4/qgLhN5sCBvPW6K4zHjC3wlDy75ab96PLjC4WueyoM9e3TTgtdeKfL2ReGtx7NUPI/p48kZim73kdPbBQQE6JEHh+mlR99xQ1SXhzxYEMeN/4mNjdX48ePVv39/SVLfvn3Vtm1bVarEFA1mq1ElQrv3e66Xes2qkR5rq2DbER4bkt1ikWpdYd7viot7c2wbTZi5SYeOntG9E1fro8kd1LTvQqVn+M/8y97I/Ef4i4FGjRopJiZGjz32mBYtWqSFCxfquuuuU0pKSt5TqN6gqouq7t4ewzllS4coKNDcP/Hi9H7AtaY/eY2W/XJAy1Yf0MnTWXpo8q+aOeYaRYQV9UlewDzkQVfGEG52CLhMVzaKVlSE8z0rrFaLulxd2Q0RFU/kQfgS38mD5uegKuXNz8W4PGWiQtSyQbSsVueGoMvJNdS9nf8MkUgehDcaN26ckpKSlJSUpJkzZyohISFvvkSYp2VDz8653qK+eXO8t/Dg/PKG4dn2UHgDe9ZScJBN7y3cJUlaty1J36w+oIkjmpscGS6FYqKkoKAgzZs3T6Ghoerfv78mTZqkMWPGKCoqyqsuHpvX91yXVkcsFqlZPXNjOF9QoE1N6pQ2NYYWDYrP+wHXuanDFepwZQU98uJvecu++Hav1m9P0kujrjQxMqBofCUPFocLBb73vV9IcICG3VpXNiduotpsFvXueIUqlfOPm+jkQfgaX8mDDWpEKSTY3GFGzbxBCdd58LYGsjsYcu9CLBapTkyk2rW4/PlEvQF5EL4gKytL8fHx+YqJgwcP1ssvv6w5c+aoc+fOJkbnX1o3KXfplVwkKNCqpnXNu1d6VWPX97QuTu1djqye/fL9XK1EmHZ37mFSNO714aI96njPN/mWPfLib3p8iuORprydL322DHP6j5YtW2rDhg15P585c0a7du1S06ZNTYzKOWbfRK1XPUrhJYrXU3gtGkRr/fYkU9v3BSuv6VhgWa2wiAJfhv7i6x/26+sf9hdYfsuo70yIpnjgb8T7+UIerHVFpCLCApWSlm1K+4EBVjWube5DLK7A8SyN6F9fMz6L05mM3MLdTDWkx+9q4v7AignyYEEcN97PF/JgQIBVTeuU1u9/HDMtBl94qIbj+excgONnbtSBI2nKzb10HjQMaex9sbJYnOvN6K3IgwVx3HifnTt3SpIaN26ct+yDDz4wKxy/NqBbDf3n1bXKzrF7pK3QEPPKAdUqR6hDywr6ccPfbh3u1CKpdFSwbrjWf3rMA55Az8QL2Lp1q+x2e74nUYcMGaIqVapIkqpUqaKBAweaFJ1jVSuEqW61kqa1f33r4jfOupnDjdmsFnW8sqJp7QPA5fDGPGi1WnR9a/O+99u1qKDgIHN7hMA1YipF6Ov/u15BAdaL9lC0WM7+N/vZdrrKg08UA3A/b8yDktTlGvPyYOVyJVS/RpRp7cN1QkMCtOLNbipdMlg226ULhGOGNdOdPWp5IDIArtKkSRNlZ2crJCTE7FD8XrkyoerXtbpH2hrRv75H2rloDAMauH3eREPSsD71uD4HXIxi4gVs3rxZJUqUUO3atfOWzZ49WwcOHJBhGDpw4IA+/PBDEyMsyGKx6P5+9Uxr//5ikJD+rWf7K1SpbAlT2u7dKcZvhjsD4Hu8MQ9K0v39zcuDxeHCDK7TsVUl/TS7e94wQAHn3Uw99+8aVSK0eHoXbqACPshb8+C9feo6Pdedq9zXt55sNm4x+IraMSW1/pNeuq7V2YeGz3+45ty/y5YK0ayxbfTMgy1MiREAfMWT9zTNd73hDl2vqaxWxWDYz94dY9S4dim5qzO7xSJFhgfqodsbuKcBwI9xpn8Bw4cPV1pamqxW73qLBt9UW6EmzJPRqVVF1ase5fF2LyUw0Kpht9Y1pW1uKgPwZt6aBzu1qqQ6MZ7vpV+pbAnd1OEKj7cL97qyUVlt+Ky31n1yk+7tU1cBNotsNovu7FFL3719g3Yv7qsb2zJ0DuCLvDUPVq0Qbko+CrBZNPQWc6674D5XVAzX8je7affivnp0UKO8PHhrl2r64pVOOrjyNt3X17wHuQDAVzSsVUrj74+99Ir/iE88pfjEU4Va12KRwkMD9PaEa4vFcNSBgVbNfqadrE7E4szvaxjS9CevVkWTOpcAvsy7roxwSVGRwRo1sJHH2x03vPAJz9MeGNBA0VHBHm2zXYsK6tiKIU4BwNOsVovGm5CTxt7XTAEBnFb5qpYNy2rGmDYKKxGo8BKBev+Zdup0VaVicTEOAP82Zliziw7R7A7D+9Xnpp0Pq3VFpF56tFVeHvz0pU66tUt1BQZy7gMArvLfu5rq6qaFmzrhtv+u0m3/XVWodQ1DmjHmGlWtEH454blU8wbRmvRA80Kv78zve+v11Rg5BnATzvx80LjhsWpYM8rp7U6lZutUarbT2z14WwO1b1l8C2fRpUI0Y0wbp7cr6vtRIsSm9ya15QYjAJjkthtrqFdH53tlFPV7v1Orihp2K0/lAwCKhxYNovXfu5s4vV1R82D1yuGaPLKl09sBAID/CQy0askbXdTsn2kWLte525Kv/qeVBvasffGVTfDk0KZ65M6GLt3n9a0r6cPn23NPFnATiok+KDjIptnPtnN6rO1q3T5TtW6fObVNzaoResELLhz7dqnu9GTGRXk/JOmFR65UzaqRTm8HAHANi8WiWWPbqExJ53qlF+V7P7xEgN6d2Na0+akAAHBk3PBYNa5dyqltipIHLRbpvUntFF4i0KntAABAQaUig7Xq3RvV9ZrKklTkeQUtkoICrXp7/LV6dFBj1wXoQhaLRVNGX6VnHmwum9VS5N/13KX4wB61tGh6F4UEB7guSAD5UEz0US0bltWc59q7bTJb6exk69+80VVhXnLh+O7EtmrdxL0TDY/oX18P3sYEvwBgtgrRJbRo+vUqEeK+eYSDAq36cur1qlY5wm1tAABQFMFBNi2adr2qlHfv0KOzxrZRhyuL7yg1AAB4m6jIYC2d2VVvjWujsJCzhbHC3t89t17rpuW0dd4tGtqneM9nbLFYNGZYrH77qKfqV4/6Z1lhtz37/9JRIVrw2nWa83x7BQe57/ofAMVEn3bbjTU157n2bpkvo2J0qL5/5wbVqVbS5ft2l/ASgVo6o6vaxJZ3y/5H9K+vaU9eTVd6ACgmrm5aXstmdlOEGx56CQ226ev/u17Xta7k8n0DAOAKMZUitOrd7oqp6Po5kiwWacbT1zDMNwAAbmCxWHTvrfW055t+eubB5qp0gXmJ/30L8vqrK+ur1zvr59ndveqebcuGZbXpi96aO7m9WjfJP2+k1fK//85Xq2qkpj5+lXYvulU3X1fNc8ECfox+vz7uzh61VKlsCd019ift/zvNJfvs1Kqi3pvUVjGVvK8nRlRksL6d1U2Pv7ZWb3wa55J9lgix6cVRrfTAgPoUEgGgmGnbooJ+ndtTg57+URvjjrtknw1rRumD59qrRYNol+wPAAB3qXVFpH6d21NDJ/ysb34+4JJ9VowO1dsTrlX3ds7PTwwAAAqvfJlQjRkWqyfubqrf/zim9duPacOO4/pyVaIMQ2rXooLqV49SiwbRurppOa8eNSco0KY7utfSHd1rKX5vsn7bekwb4pK0Z/9pZWTmKijQqirlw9SiQbSubBSt5vWjmW4E8DCKiX6g01WVtG3hLRo9ZZ3e/GJnkfcTFhqglx9tpfv61vPqL+sSoQGa/tQ16tO5mu4Z/7P2Hkwt8r7at6ygdye2ZY5EACjGGtYqpd/m3qQX39+iSbM2KzvHXqT92KwW/ffuJho3PJbhUwAAXqNi2RJaPL2L5ny9RyNf/FWnUrOLvK9BPWtp6n9bq1Skc/MSAwCAogsIsKpNbPm80dZi+52QJC15o6uZYblN3epRqls9SoN71TY7FADnoZjoJyLCgjRrbBuNvKOhZn0ep9lf7dbptMJdRNasGqH7+9XXkF61VSYqxM2Rek7HVpUU99WtWrAyUTM+i9PqTUcKtV2AzaKbr6umEf3rq33LCvRGBAAvEBho1ZhhsbqrVx29syBeb82L16FjZwq1bbnSIRp6S10Nu7WuV/bKBwDAYrFocK/a6t0pRnMW7daMz+K0c++pQm0bXiJAA3vU0v396qtxndJujhQAAABAcUQx0c/UrxGl15+4Ws8/3FKr1h3Whh1J2rDjuHbtO6X0zBzZrFZFhAWqce1SatEgWq0aldXVTct5dU/EiwkOsum2G2vqthtrKu7PZP2y6Yg27EjSxrjjWr/9mAydHRqoSrmz3eib1y+jDldWVMULjFUOACjeKpcP0/j7m+upoc3004a/z+bBuCRt33NSaek5MiSVCAlQgxpnh4pp0aCM2resSE9EAIBPKBkRpIdub6gHb2ugtX8c0+9/HNOGHUnasuuETqdmK9duV0hQgGpdEfFPHoxWxysrKjI8yOzQAQAAAJiIYqKfCisRqB7tr1CP9sxzcU79GlGqXyNKQ/vUlSRFtflQkrRrUV8zwwIAuEFgoFXXta6k61pXMjsUAAA8zmKx6Kom5XRVk3JmhwIAAADAC1jNDgAAAAAAAAAAAABA8UQxEQAAAAAAAAAAwFkhIVJoqNlROBYaejY+FwkvEaDIsECX7c+VIsMCFV7ChQNxFufPVXL5Z1sYDHMKAAAAAAAAAADgJEt4mALefUPKyDA7lIJCQmQJD3PZ7qIig7VveX+lnslx2T5dJbxEgKIig122v2L9uUou/2wLg2IiAAAAAAAAAABAEVjCwyQPF3bMEhUZ7NKiXXHmT59rYTDMKQAAAAAAAAAAAACHKCYCPsgSGSEFFs/xqy8oMPBs3G4QHRWikCCbW/btTiFBNkVHuWfsa/5GAN/hlcezxDHtQeTBgrzyuOGYARzyyuNZ4pj2IPJgQRw3AADAWRbDMAyzgwCKo6g2H0qSkn8ZaHIkRWMcOSrjdIrZYRSaJTJClvLl3Lb/xIMpSkoupmNcX0B0VIiqVXbfhRJ/I4Dv8LbjWfLOY9qbzw3IgwV523HjjccM4CnedjxL3nlMkwc9izxYkDceN4A7xPZbKEna9PnNJkcCwJ8wZyLgoyzly3GSfZ5qlSPceiHmjfgbAXwHxzMuhTxYEMcN4Ds4nnEp5MGCOG4AAIAzGOYUAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4FGB2AAAAAAAAAAAA/2OkpkkZGWaHUVBIiCzhYWZH4bWST2cq9UyO2WE4FF4iQFGRwWaHAXgdiokAAAAAAAAAAI8yUtOUc88DUnq62aEUFBqqgHffoKBYBMmnMxXT9TOdTss2OxSHIsMCtW95fwqKgJMY5hQAAAAAAAAA4FkZGcWzkCidjas49pj0AqlncoptIVGSTqdlF9tek0BxRjERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMBZgeAojGOHJVxOsXsMArNEhkhS/lyZofhVxIPpigpOcPsMAotOipE1SpHmB0GAAA+ydvOHSX3nz9yrgQA/oM8WJC35UGJXAgAgJkoJnoh48hR5Yx4VMrONjuUwgsMVMCMKRQUPSTxYIrq95qvjKxcs0MptJAgm+K+6sOFAQAALuaV546SW88fOVcCAP9BHizIG/OgRC4EAMBMDHPqhYzTKd53Epyd7XVPAXqzpOQMr7soyMjK9bqnIgEA8AZeee4oufX8kXMlAPAf5MGCvDEPSuRCAADMRDERAAAAAAAAAAAAgEMUEwEAAAAAAAAAAAA4RDERAAAAAAAAAAAAgEMBZgcAAAAAFEeZWbn6Y/cJbdhxXNv2nNSZ9BxJ0ohnf1G96lFq0SBazeqWVliJQJMjBQDA9XJz7dq595Q27EjS5vjjeXnwvkmrVaNKhFo2KKvmDcqoVGSwyZECgH/Iys7VgpWJ+uvvNNnthp58fZ3u7F5LDWuVMjs0AH6AYiIAAADwD8Mw9OuWo5rxWZzmrdirzCx7gXVmfr4z799Wq0Xd21bViP711eWayrJaLZ4MFwAAl4v7M1kzP4/TnK9361RqdoHX35oXn+/nNrHlNaJfffW5vpqCg2yeChMA/Mo78+M1espapaZnKyfHkCRNmbNNL7y7VVc3LaePJndQ9SoRJkcJwJcxzCkAAAAg6fetR9VywFdqM2ixPlqS4LCQ+G92u6FFP+7XDSOWq07PL7Tkp/0eiBQAANdL+Ou0ug1fpga952vaxzscFhId+WXTEd3x5A+qev2nmvV5nAzDcHOkAIpq3rx5qlOnjsLCwtSuXTuNGTNGHTt2NDusyxK06PN8PyeeSVPtlYtNisY9Xnxvi+575hclp2TlFRIlKSv77PXKum3H1HLAV/rzwGmzQnS5cqVDdGDFANU4r0C6fFY3DelV28SoAP9GMREAAAB+LSMzR/99ba2uGbRYG+OOF3k/CX+lqMeDKzRkzE86eTrThRECAOA+druhaR9vV5M+C7R8zcEi7+fYyQzd/+waXT9smfYdSnFhhABcYe7cuRo5cqTef/99paam6p577tELL7yg5s2bmx0aLmLDjiQ9+fp62e0XflAjJ9fQ6bQs9R+9yoORudfRExkaN2OjZo1tI0ka0qu2rFZp9le7TY4M8F8UEwEAAOC3kk5mqN1dS/TS+39c9ALdGR98vVst+n+phL9858lgAIBvyszKVb//fK+HX/hNZzJyXbLP734/pKa3LtTqjX+7ZH8ALl9GRoZGjRqlWbNmqU2bNrJYLBo0aJBsNptiY2O1fft2XXvttWrbtq2uvfZarVu3zuyQ8Y//+2i7AmyXvoWfk2to/fYkbYpL8kBUnvHewl2y2Sx6/K4meubBFho28RezQwL8GnMmAgAAwC+dOJWpjvd8o217Trp833sPpqrt4MVaPaeHalSJdPn+AQC4XFnZubpl1Ep98/MBl+/7VGq2uty3TMtndVPbFhVcvn8Azvn555+VmZmpnj175i07ffq0srKyFBsbq7Jly2rx4sWKiorSjh07NHToUK1Zs8bEiJ3T8sdv8/6dZb/0VAXeIis7V58s/VPZOYX7nQIDrJqzaI9i60e7OTLPuf/ZNYr/+lY9MXWd9h6k1ztgJoqJAAAA8Ds5OXb1fOhbtxQSzzmclK7rhy3Txs96q2REkNvaAQCgKIY/84tbConnpGfmqseD32rdJ71Up1pJt7UD4NKOHj2qsmXL5ls2d+5chYaGql69erLZbHnLg4OD8/1cWAMGDFB8fLxT25SzBWhxpRpOt/Vv69t3yft34pk0Xb/GNcN9duvWTUdzc1yyr6LIMUKUbR9a6PWzc+x6f+7X+uGT+90Y1aVlGWGS7nLJvrq3raqDR9LUuHYpl+zvnK7duirIkubSfQLeoG7duvr000+LtC3DnAIAAMDvvPT+Vq3ZfNSpbRKX9Vfisv5ObfPngRQ99srvTm0DAIC7fbVqn97/0rl5p4qSB0+nZeuucT8pN9d3egoB3qhBgwZKTEzUd999p+zsbM2fP19jx45V48aN8xUOc3Jy9MADD2jMmDEmRotzLHJ++Gmrst0QiTliKoVr1MCGanXH12pUq5S6XFPZ7JAAv0bPRAAAAPiVbbtPaMLMTU5vVzI8sEjtvbtwl269vrq6XVulSNsDAOBKJ05l6r5Jq53erqh5cM3mo3r9o+16dFDjIm0P4PLFxsZq/Pjx6t//7AMBffv2Vdu2bVWpUqW8dex2uwYOHKhevXqpa9euTrdRlJ4uRtJx5dw9wuntPGXZsmWyRJcxrX3DMFSnxxfa81fhhvcMCrRq8hP36L6+L7s5sos78HeaqnYpWs+n8705to0mzNykQ0fP6N6Jq/XR5A5q2neh0l0wx+/yZctVpULYZe8H8Cf0TAT+xTAMrd74tzKzcpWRmavpn+zQ7n2nzA4LAAC4yH+nriv0vCOuMurl32QYhkfbBADAkRfe3aIjxzM82ua4NzbqVEqWR9sEkN+4ceOUlJSkpKQkzZw5UwkJCYqNjZV09l7Y0KFD1bRpU91/v7lDZOJ/LBaLRg1spKDAwt3Ct9ksuqN7TTdH5RkDe9ZScJBN7y3cJUlaty1J36w+oIkjmpscGeC/KCYC/zAMQ+/Mj1fDmxeo7ZAlysjMVWZWrh6e/Kvq9Jyn64ct1fe/HzI7TAAAcBn+PHBaS1e7b36oC9m595R+WHfY4+0CAHC+9IwcvfvPjVlPSkvP0YeL93i8XQCOZWVlKT4+Pq+YuGTJEn388cdatmyZOnTooFtuucXkCAsvq2e/fD9XKxGm3Z17mBSN6w3sWUuVy5VQgM1y0fUCbBaNuy9W4SWK1ou8uPlw0R51vOebfMseefE3PT5lnUkRAaCYeJ60tDSNHDlS5cqVU0REhIYMGaLZs2crMDBQGRmefWoPnpWba9fgp3/SvRNXa+fe5HyvnetDsGrtYXUetlQzP4vzeHwA4AnkQfiDN7/YKbM6CM7gHAIo1siD8AdffLtXJ05lmtL2jM/i6KUPFBM7d+6UJDVufHb44R49eigjI0M//PCDfvjhBy1YsMDM8HCeiLAgrXr3RlUpH6ZgBz0UbTaLrFaLRt7RUP+9u4kJEQLwFxQT/5GTk6Mbb7xR33zzjV577TXNmzdPe/fu1VNPPaW6desqJCTE7BDdovOaVXorMf/TgXvSUhS06HOTIjLHf15dq7n/PCV5oWubXLshw5BGPLdGX3y714PRmSs0xKY9S/pq8E2185YFB9kU91UfDbu1romRAXAlf82D8D8LvttnWttf/7BfWdmXP7+H2Th/zI9zJd9AHoS/WPBdomltx/2ZrJ17vX8KEfJgQeRC79OkSRNlZ2eT37xETKUIbf7iZr3wyJW6ouL/5vmzWqQbr62qFW920yv/uUoWy8V7LwLA5QgwO4Di4vXXX9fmzZsVHx+vChUqSJLq1aunatWqqVOnTiZHB3dKPJii1z/arsI+H2mxSI+98rtuuS5GNpvv1+PTM3J178TV+uKVTlq+5oD+TkrXxBHNdfDoGb01L97s8AC4CHkQ/iD5dKb27D9tWvtZ2XZt33NSsfWjTYsBrse5km8gD8JfrN+eZGr7G3YkqX6NKFNjgOuRCwH3KxkRpEcGNtLDdzTUkePpysjMVXSpYEWEBZkdGgA/4fuVkEIwDENTpkzRvffem3fhKEkxMTEKCAhQ06ZNdfz4cd1www2qW7euGjdurLvvvluZmeYMDQLXenPeTlmdeHLHMKS//k7T8jUH3RhV8bJq7WHNX5moWWPbqGXDaA3vW0/3TlhtdlgAXIQ8CH+xMe642SFoww7zY4Drca7k3ciD8BdHjqfr4NEzpsawYYe5xUy4D7kQ8Ayr1aKKZUuoepUICokAPIpioqS4uDgdOnRIvXv3zrf88OHDysnJUbNmzWSxWPTkk08qPj5eW7ZsUXp6uqZPn25OwHCpD77arVy7c/M22KyWvGFR/cV/Xl2r5vXLaOmMrho3Y6P2HkwxOyQALkIehL/Ytc/8odXiE82PAe7BuZL3Ig/CX8TvTTY7BPKgjyMXAgDguxjmVNLBg2d7mJUrVy7f8hUrVkiSmjVrptKlS6tdu3aSJKvVqpYtW2r//v1FbjMmJkanThXtJLppeKRWNr+6yG3/2+jtW/R03B95P9sLPeCnczp27KgtqeYNLXYhp6q+JFlsTm2Tazf0+cJv9c2bN7spqsuTE1RFqjDKpftMScvW1l0n1eXqyvpkaYJL931Oh44dFZB1wC37BkqWLKl9+8ybK60487Y8CBRVZkQ7qVQvh68lLuuvkuGBF92+ZMTZJ39Prr7zouudSs1WtW6fOXzt/6bP0tvPdClEtK7j6nNHyfvPH/3xXIk8eGHkQfiL7JC6UrlhDl/zVB78duUPiorqX4hoXYc8WJA78qBELgQAwJfRM1FSmTJlJEkJCf870UlLS9Ozzz6rihUrqmzZsvnWz8jI0OzZs3XDDTd4NE53eblhUx274ea8/35vd73ZIXlYEU/6Dbtrwyjm+nWtrsa1S2nh94ma+nhrs8MB4EL+ngfhT9xzo885xSGGy8f5Y0GcK3kv8iD8R3HIQcUhhstHHnSMXAgAgO+iZ6KkRo0aKSYmRo899phycnKUk5OjF198USkpKYqNjc23rt1u1+DBg9WxY0d169atyG1ezpNQ9t0Jyn3sqSJvb5ZVq1bJWrum2WEUUO+medq175QMJ65pAmwW3TPoJs0a+6L7ArsM67cf05W3fe2y/ZWJCta0J6/W4DE/6fetRxX31a3q3q6qlvz0l8vakKQfVq1Sy4ZlL70iAJfytjwIFNW7C+I19AJz91yoB8X5zvXEKHXt3CLH8NgjD+j5ke8Xefui8NZzR8l954+cK+F85EH4i1+3HNE1Axc7fM1TebB7t+v01f+9UOTti4I8WJCr86BELgQAwNfRM1FSUFCQ5s2bp9DQUPXv31+TJk3SmDFjFBUVpWbNmuVb94EHHpDVatXUqVNNiRWuN7xvPae3yck1dG+fum6Ipnia/uQ1WvbLAS1bfUAnT2fpocm/auaYaxQRdvFhcAB4B/Ig/EXj2qXNDkGNa5cyOwS4AedK3o08CH/RsKb5OYg86LvIhQAA+DaKif9o2bKlNmzYoDNnzmjTpk3q1KmTdu3apaZNm+at8/jjj+uvv/7SnDlzZLXy1vmKwb1qKyiw8J+nzWpR8/pl1KJBtBujKj5u6nCFOlxZQY+8+Fvesi++3av125P00qgrTYwMgCuRB+EPmtQppQCbxdQY/OX8wZ9wruQbyIPwB5HhQapbraSpMZAHfRO5EAAA38cwpxewdetW2e32vCdRt2/frpdffln16tXTlVeePRG6/vrr9fLLL5sY5eVbeU3HAstqhUUoq2c/E6IxR6nIYM0c00Z3j/v5kutarRYFBVn1zoRrPRBZ8fD1D/v19Q/7Cyy/ZdR3JkQDwFP8JQ/Cv4QEB6hx7dLatPO4Ke1HRQSp1hWRprTtSpw/5se5km8iD8JXXdkoWvGJp8xr3weGpyQPFkQuBADA91FMvIDNmzerRIkSql27tiSpYcOGMpyZVA9e5a7edZSZlasHnlsjq9WinNyCn7XValFEiQAtnt5FsfV5mhKAbyMPwlcN7FnLtGLinT1qyWo1t2ckgMIhD8JXDexRS3MXJ5jSdscrK6pKhTBT2gYAAMDlYWyWCxg+fLjS0tIYvsaPDO9XX5u/uFlDb6mrkGBbvtfKlQ7R+OGxivvqVl3bvIJJEQKA55AH4auG9Kqt0H/leU+5v5/z8zQDMAd5EL6qc+vKpvWSH9G/vintAgAA4PLRMxE4T+M6pTVzbBu9OOpK7dx7SqlnshUVEaTGtUsr0Il5FQEAQPFUKjJYd/aopbfnx3u03euuqqQGNUt5tE0AAP7NarXowQH19chLv3u03SrlS6hXxxiPtgkAAADXoToCOBAZHqRWjcuq01WV1LxBNIVEAAB8yHMPtVDZUiEeay840KppT17tsfYAALiYEf0bqFm90h5tc9bYNlxXAwAAeDHO5AAAAOBXypYO1Yynr3F6u1Op2TqVmu30dpMeaKH6NaKc3g4AAHcIDLRq9jPtFGBzbh7foubBQT1rqXu7K5zeDoAfCAmRQkPNjsKx0NCz8cFp4SUCFBkWaHYYFxQZFqjwEgzYCDiLowYAAAB+59Yu1TX0ljp6Z8GuQm9TrdtnTrdz/dWV9NjgRk5vBwCAOzWtW0YvP9pKo14u/HCnRcmDdauV1NT/tnZ6OwD+wRIepoB335AyMswOpaCQEFnCw8yOwitFRQZr3/L+Sj2TY3YoDoWXCFBUZLDZYQBeh2IiAAAA/NLMMW10Oi1bny/f65b9t4ktrwWvdZbNxmAgAIDi55GBjXTidKaeeXOzW/ZfvXK4VrzVTaW4YQvgIizhYRJFO58TFRlMwQ7wMdzZAAAAgF8KCLDqo8kddF/fei7fd/d2VbV8ZleFlyi+w/sAADDpgRZ6adSVslqdG/L0UprWLa3VH/RQ1QrhLt0vAAAAzEExEQAAAH4rIMCqWWPbaOHU61S+zOXPiRJeIkCzxrbRomnXK4xCIgDAC4y+q4nWzOnhkvl9bVaLxgxrpt8/ukmVytHTCAAAwFdQTAQAAIDf692pmrYv7KMHBtRXeAnnZwIICrTqju41tW3BLbqvbz1ZLK7t4QEAgDtd1aScNn7WSxNHNFe50s4/XGOxSDdcW0W/f3yTnnmwhYKDbG6IEgAAAGZhzkQAAABAUpmoEE1/6ho9/3BLzV28Rx99k6CNcceVkZnrcP3AAKsa1Sqlfl2r6+7edVSuTKiHIwYAwHVCggM0bnisnriniRasTNT7X+3W71uP6VRqlsP1LRapXvUodW9bVcP71VPNqpEejhgAAACeQjERAAAAOE9keJBGDGigEQMaKCfHrrg/k7Vtz0mlnsmW3ZBKhNhUv0aUGtcuTc8LAIDPCQq0acANNTXghpqy2w0l/HVaW+JPKDklSzm5doUGB6hGlQjF1i/D3MAAAAB+gmIiAAAAcAEBAVY1rlNajeuUNjsUAAA8zmq1qHZMSdWOKWl2KAAAADARcyYCAAAAAAAAAAAAcIhiIgAAAAAAAAAAAACHKCYCAAAAAAAAAAAAcIhiIgAAAAAAAAAAAACHKCZ6IUtkhBQYaHYYzgkMPBs3PCI6KkQhQTazw3BKSJBN0VEhZocBAIDP8cpzR8mt54+cKwGA/yAPFuSNeVAiFwIAYCaLYRiG2UHAecaRozJOp5gdRqFZIiNkKV/O7DD8SuLBFCUlZ5gdRqFFR4WoWmUKzgAAuIO3nTtK7j9/5FwJAPwHebAgb8uDErkQAAAzUUwEAAAAAAAAAAAA4BDDnAIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIcoJgIAAAAAAAAAAABwiGIiAAAAAAAAAAAAAIf+H+97Qghxp4p7AAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABxMAAADcCAYAAAC71Y8uAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABc4UlEQVR4nO3dd3gU1RrH8d+2JIQAofcAAQEJIAjSkS4oKCKIiIqAoiJW7lXsiF0EwWvDBiIWbFhAiiBFkK6AEHoNHRIJpCe7O/ePSDSw6duy+X6eh0d3Mjvz5pzJnDnnnTljMgzDEAAAAAAAAAAAAABcwOzrAAAAAAAAAAAAAAD4J5KJAAAAAAAAAAAAAFwimQgAAAAAAAAAAADAJZKJAAAAAAAAAAAAAFwimQgAAAAAAAAAAADAJZKJAAAAAAAAAAAAAFwimQgAAAAAAAAAAADAJZKJAAAAAAAAAAAAAFwimQgAAAAAAAAAAADAJZKJ8DmTyaRnn3026/PHH38sk8mkgwcP+iym3FwYr68NHz5cJpNJJpNJTZs2zXXd82W7ceNGL0WHqVOnZtWPyWRSbGysr0MCAAAAAAAAACDfSCYGiH379unuu+9WZGSkQkJCVLZsWXXs2FFvvPGGUlJSfB0ePKxSpUqaNWuWXnnllWzL69atW+jEZ9euXTV8+PBCfffZZ59V3bp1C/Vdk8mkjz/+uFDfLUrM7nJh/H369NGsWbM0YMAA3wUFAAAAAAAAAEAhWX0dAIrup59+0o033qjg4GANGzZMTZs2VXp6ulatWqVHHnlE0dHRev/9930dZo5SUlJktRafQ9Ef4y1durRuvfVWX4cBFxo3bqzGjRtr7969+u6773wdDgAAAAAAAAAABeJfGREU2IEDBzRkyBDVqVNHS5cuVfXq1bN+NmbMGO3du1c//fSTDyPMW0hIiK9DKJDiFi8AAAAAAAAAAEBhMc1pMTdx4kQlJibqo48+ypZIPK9BgwZ68MEHsz7b7XY9//zzql+/voKDg1W3bl098cQTSktLy/a9unXrql+/flq+fLlat26tUqVKqVmzZlq+fLkkac6cOWrWrJlCQkLUqlUrbdq0Kdv3hw8frrCwMO3fv1+9e/dW6dKlVaNGDT333HMyDCPbuvl9B+GCBQvUuXNnlS5dWmXKlFHfvn0VHR2d5/eeffZZmUymi5a7ejfjxo0b1bt3b1WqVEmlSpVSvXr1NHLkyFzjPb/9vXv3avjw4QoPD1e5cuU0YsQIJScnZ/tuSkqKHnjgAVWqVEllypTRddddp6NHj7osg507dyomJibP368w0tLSNHbsWFWuXFmlS5fWgAEDdPr0aY/sS5JmzJghk8mk6dOnZ1v+0ksvyWQyaf78+R7bd2pqqp599lk1bNhQISEhql69um644Qbt27cvax2n06k33ngj65iuXLmy+vTpk/VuSV/GDwAAAAAAAACAL5FMLObmzp2ryMhIdejQIV/r33nnnXrmmWd0+eWXa8qUKerSpYtefvllDRky5KJ19+7dq6FDh+raa6/Vyy+/rDNnzujaa6/VZ599pocffli33nqrJkyYoH379mnw4MFyOp3Zvu9wONSnTx9VrVpVEydOVKtWrTR+/HiNHz++wL/nrFmz1LdvX4WFhenVV1/V008/re3bt6tTp07ZkoFFcerUKV111VU6ePCgHnvsMb355pu65ZZbtHbt2nx9f/DgwUpISNDLL7+swYMH6+OPP9aECROyrTN8+HC9+eabuuaaa/Tqq6+qVKlS6tu3r8vtXXrppRo2bFiRfy9X7r//fm3ZskXjx4/X6NGjNXfuXN13330e2ZckjRgxQv369dPYsWN1+PBhSdLWrVs1YcIE3XHHHbrmmms8sl+Hw6F+/fppwoQJatWqlSZPnqwHH3xQZ8+e1bZt27LWu+OOO/TQQw+pdu3aevXVV/XYY48pJCQkq+59FT8AAAAAAAAAAD5noNg6e/asIcno379/vtbfvHmzIcm48847sy3/73//a0gyli5dmrWsTp06hiRj9erVWcsWLVpkSDJKlSplHDp0KGv5e++9Z0gyli1blrXs9ttvNyQZ999/f9Yyp9Np9O3b1wgKCjJOnz6dtVySMX78+KzPM2bMMCQZBw4cMAzDMBISEozw8HBj1KhR2eI+ceKEUa5cuYuWX2j8+PGGq0P9wv189913hiRjw4YNuW7vwnjPb3/kyJHZ1hswYIBRsWLFrM+///67Icl46KGHsq03fPjwi7Z5fj9dunTJNRbDyCzrOnXq5LmeYfzzO/fs2dNwOp1Zyx9++GHDYrEY8fHx+dpOYRw/ftyoUKGC0atXLyMtLc1o2bKlERERYZw9e9Zj+5w+fbohyXj99dcv+tn533/p0qWGJOOBBx7IcR13xH/+OPn3sQ8AAAAAAAAAgL/jycRi7Ny5c5KkMmXK5Gv981Mxjh07Ntvy//znP5J00bsVmzRpovbt22d9btu2rSSpe/fuioiIuGj5/v37L9rnv592M5lMuu+++5Senq4lS5bkK2ZJWrx4seLj43XzzTcrNjY265/FYlHbtm21bNmyfG8rN+Hh4ZKkefPmKSMjo8Dfv+eee7J97ty5s+Li4rLqaeHChZKke++9N9t6999/v8vtGYaRNa2su911113Zpn7t3LmzHA6HDh065JH9SVK1atX09ttva/HixercubM2b96s6dOnq2zZsh7b57fffqtKlSq5LOPzv/+3334rk8nk8onZf5eRL+IHAAAAAAAAAMDXSCYWY+eTGAkJCfla/9ChQzKbzWrQoEG25dWqVVN4ePhFiaR/JwwlqVy5cpKk2rVru1x+5syZbMvNZrMiIyOzLWvYsKEkFWhq0j179kjKTGJWrlw527+ff/5Zp06dyve2ctOlSxcNHDhQEyZMUKVKldS/f3/NmDHjovdJ5uTC8ipfvrykf8rlfPnXq1cv23oX1oc35BWrpwwZMkR9+/bV+vXrNWrUKPXo0cOj+9u3b58aNWokq9Wa6zo1atRQhQoV8tyet+MHAAAAAAAAAMDXch5hh98rW7asatSoke3db/nx76etcmOxWAq03DCMAsWRX+ffxThr1ixVq1btop/nliiScv59HQ7HRet98803Wrt2rebOnatFixZp5MiRmjx5stauXauwsLBc9+PtcikKX8UaFxenjRs3SpK2b98up9Mps7n43NNQ3OMHAAAAAAAAAKCgGAUv5vr166d9+/ZpzZo1ea5bp04dOZ3OrCf9zjt58qTi4+NVp04dt8bmdDovmvp09+7dkqS6devmezv169eXJFWpUkU9e/a86F/Xrl1z/f75p+7i4+OzLc9pSs927drpxRdf1MaNG/XZZ58pOjpas2fPzne8OTlf/gcOHMi2fO/evUXednExZswYJSQk6OWXX9aqVas0depUj+6vfv362rVrV67T1tavX1/Hjh3TX3/9lef2vB0/AAAAAAAAAAC+RjKxmHv00UdVunRp3XnnnTp58uRFP9+3b5/eeOMNSdI111wjSRclQF5//XVJUt++fd0e31tvvZX1/4Zh6K233pLNZivQ9JC9e/dW2bJl9dJLL7lMCp0+fTrX759PRv76669Zy5KSkjRz5sxs6505c+aiJ/NatGghSfme6jQ3vXv3liS988472Za/+eabLtffuXOnYmJiirxff/HNN9/oyy+/1CuvvKLHHntMQ4YM0VNPPZWVYPaEgQMHKjY2NttxeN75uh44cKAMw9CECRNyXMdX8QMAAAAAAAAA4GtMc1rM1a9fX59//rluuukmXXrppRo2bJiaNm2q9PR0rV69Wl9//bWGDx8uSbrssst0++236/3331d8fLy6dOmi9evXa+bMmbr++uvVrVs3t8YWEhKihQsX6vbbb1fbtm21YMEC/fTTT3riiSdUuXLlfG+nbNmyevfdd3Xbbbfp8ssv15AhQ1S5cmXFxMTop59+UseOHV0mi8676qqrFBERoTvuuEOPPPKILBaLpk+fnrWN82bOnKl33nlHAwYMUP369ZWQkKAPPvhAZcuWzUrEFkWrVq00cOBATZ06VXFxcWrXrp1WrFiRlYy6cDrWSy+9VF26dNHy5cuLvG936dq1q1asWFHg6VBPnTql0aNHq1u3brrvvvskZSaaly1bpuHDh2vVqlW5ThdqMpkKVRbDhg3TJ598orFjx2r9+vXq3LmzkpKStGTJEt17773q37+/unXrpttuu03/+9//tGfPHvXp00dOp1MrV67Mireo8QMAAAAAAAAAUFyRTAwA1113nf7880+99tpr+uGHH/Tuu+8qODhYzZs31+TJkzVq1KisdT/88ENFRkbq448/1nfffadq1arp8ccf1/jx490el8Vi0cKFCzV69Gg98sgjKlOmjMaPH69nnnmmwNsaOnSoatSooVdeeUWvvfaa0tLSVLNmTXXu3FkjRozI9bs2m03fffed7r33Xj399NOqVq2aHnroIZUvXz7bd88nV2fPnq2TJ0+qXLlyatOmjT777DPVq1evwDG78sknn6hatWr64osv9N1336lnz5768ssv1ahRI4WEhLhlH56UmJjo8r2VeRk9erTS0tI0Y8aMrKRpxYoV9f7776t///6aNGmSHn300Rz3KUnVq1cv8H4tFovmz5+vF198UZ9//rm+/fZbVaxYUZ06dVKzZs2y1psxY4aaN2+ujz76SI888ojKlSun1q1bq0OHDkWOHwAAAAAAAACA4sxkFPQRIyAfhg8frm+++SYrEYScbd68WS1bttSnn36qW265pcDfHz58uJYuXao//vhDVqtV4eHh7g9SUkJCgipUqKCpU6dqzJgxHtmHK/Pnz1e/fv20ZcuWbAnA4iI1NVWJiYmaOHGiXnvtNZ0+fVqVKlXydVgAAAAAAAAAAOQL8/IBXpSSknLRsqlTp8psNuvKK68s9HYPHz6sypUrq1OnTkUJL1e//vqratasme1JV29YtmyZhgwZUiwTiZI0bdo0Va5cWa+99pqvQwEAAAAAAAAAoMB4MhEewZOJrk2YMEG///67unXrJqvVqgULFmjBggW666679N577xVqm9u3b9exY8ckSWFhYWrXrp07Q0YRHT58WLt27cr63KVLF9lsNh9GBAAAAAAAAABA/vHORMCLOnTooMWLF+v5559XYmKiIiIi9Oyzz+rJJ58s9DabNGmiJk2auDFKuFPt2rVVu3ZtX4cBAAAAAAAAAECh8GQiAAAAAAAAAAAAAJd4ZyIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl6y+DiBQZWQ4ZXc4fR2GrBazbDZyxq5QRwAAAABQMtEf9G/UDwB4FudZ/0cdwd+QTPSAjAynavT4XLHxab4ORZXCg3Xsl6H8wV+AOgIAAACAkon+oH+jfgDAszjP+j/qCP6II8AD7A6nX/yhS1JsfJpf3MHgb6gjAAAAACiZ6A/6N+oHADyL86z/o47gj0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJZCIAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHDJ6usAAAAAkD8RvWbreFyy1/dbvWKoYhYP8fp+/QFlDgAAAAAASjqSiQAAAMXE8bhk2e2GT/ZbUlHmAAAAAACgpGOaUwAAAAAAAAAAAAAukUwEAAAAAAAAAAAA4BLTnMKjDMPQ6s2ntPC3I0pOtSuyVhndfHV9VSgX7OvQ8LeEpHTNXrBfuw6eVXCQRd3aVFePtjVkMpl8HRoAAACAYu7IiSR9sWCfjscmK7xMsG7oUUdNL6ng67DwN6fT0JK1R7V8wwll2J1qXK+cbuoTqbBQm69DA4CAcOZcmr6Yv0/7jyQoJNii3h1qqdPlVRl38yMbtp3WvBWHlZiSoYhqpXXz1fVVpWIpX4cF+J2ASyba7XY9/fTTeu+991S6dGk9/PDDmjZtmnbv3u3r0Eqcrbv/0sD//KKY40myO5xyOAyFhlj18MR1uv/mSzVxbBtZLDwc6yuGYeiF9zfrpQ+2yGIxKSnFLrNZmjJrmyqVD9FXr3VXu8uq+DpMAAAAoEDoE/qH5BS7Rjz9q+b8clBWq1mpaQ4FWc164f3NatWkor59vYdqVCnt6zBLtFV/nNCQR5fpr7NpSstwyOmUSpey6r6X1+jZe1rq0ZHNGewGiiHaQf/gdBp6bOoGvfFZtKwWs5JT7bKYTZo8c5tqVg3Vt6/30GWNKvo6zBJtb8w5DXhoifYcOiuH05DdYahUsEWPvL5ew/s31NtPdJDNxtg1cF7A/TWMGzdOW7Zs0f79+7VmzRq9/vrratq0qa/DyrfIWmX0x5fXKzjIIkm6a1AjvfVEex9HVXA7D8Srw21ztTfmnNLSHXI4DElScqpdGXan3v1qp0Y+s1KGYfg40oILlDp69PX1eunDLUpNdygpxS5JcjqllDSHDp9IUrc752v91tM+jhIAAAAomOLeJwwE6RkO9bp7gX5cESO7w1BqmiNzud2pDLtTv2+P0xVDf9Tpv1J8HGnBBUp/cPXmk+p110IdPZWslLTMRKIkJaXYlZrm0LPTNumZt//wbZCFFCh1BBQW7aB/uGvCKr31xXalZziVnJo57uZwGkpNd2j/kQR1HDZP0XvP+DjKgguUc+zBowlqM/QHbd9/RmkZTtn/HrtOSXMow27o03l7Neg/v8jpZOwaOC+gkonHjh3Thx9+qJkzZyo8PFy1atVSx44dFRUVJUl69NFH1blzZ40YMUIOh8PH0bq2/0iCPpu/V8/c00I1qoTqvpub6LGpG30dVoHd+8JqJafalVOuMCXNoS8XHdDaP095NzA3CIQ62nUgXm98tj2rU+9KappDd4xf6cWoAAAAgKIJhD5hIPhi/n5t3hmXY38jw+5U7JlUPffeZu8G5gaB0B80DEPDn/5Vqem59wdf+WiLDh5N8GJk7hEIdQQUFu2gf9iw7bRmzdurlBzaQcPIfOBi9Au/eTmyoguUc+x/Jq3TuaSMrJtpLpSS5tDiNUf18+qj3g3MDQKljuB/AiqZ+Msvv6h169aqXLly1rK4uDhFRUVpy5YtOn36tFauXKm6detq3rx5Pow0d1NmRatr6+r6ZnIPjZuyQYnJGb4OqUAOHEnQqk0nlNeNG3a7U69/ss07QblZca+jN7/YLuVjtprdh87q9+2xng8IAAAAcINA6RMWd699/KeSU3MfpE7PcGr6d7uU/PcsKcVJce8Prv3zlGKOJ+a5nsVi0ttf7vBCRO5X3OsIKCzaQf8w9dNoOfIYGDUMae2fp7Xn0FkvReU+xf0ceyouRXNXxGTNpJeT1HSHJs3c6qWo3Ku41xH8U0C9MzEuLi5bY3nq1CmtWbNGU6ZM0apVq9SnTx9JUp8+fTRnzhz1798/39tOTExUdHR0vtZNTc/hloZ8ynwB+jGNuP4SLXLD3Q/rN2xQSJD38saLN5yVzWJShj33E7LDaWjZ+iNat26dlyL7R0mvo/kr9isjI+8yMJsMzf5xvewJzOGOwBIVFaWwsDBfhwEAANzMU33CgvQHSzqH01D0vvj8retw6tt5q9QwopRng7pASe8Pzv45Nl+vHElLd2ru0j0a1NG7U7wVtX4k99aRt+sHnhfI/UHaQf+wZM2hPBNVkmSzSJ9/v1Z92oV7Pqh/Kent4NptCfkauzYMac2WE4xdi7Yw0BS2HQyoZGLDhg01ceJEHT9+XCaTSSNGjFB6eroaNWqkuXPnql69epKkcuXK6cyZgs1JHR0drXbt2uVvZZNNavpuQcPP0iCirPpeWVtfLjqgscOaatLHRbsDomuXLpLhxbsPyrWRat4mWfLuEMbF/pX/cnWnkl5HlzwnhdTIc7XUlFRNmjRZkx5f6oWgAO9Zu3at2rZt6+swUEz8vPqIXvxgi9ZtPS3J0GUNK+qJUZfpuq4RMpny8Zg3Cuz0Xyl647Novf/NLsUnpKtsaZtGDmioscOaqlqlUF+HB8CPeapPWKD+YIlnlpq9n68109JSNWzYcCn1kGdDulBJ7w9Wukqqer1kDspz1V279qhdu1s9H9O/FbF+JPfWkdfrBx4XyP1B2kE/0fg1yVY+z9WSk5P07LMT9Gz8Gi8E9S8lvR0MaypF3J2vsevkpGTGrkVbGGgK2w4GVDKxT58+6tWrlxo2bKjIyEgNHjxYhw4dUlBQkMLDw3X2bOZj42fPnlX58nmf0P8tKipKa9euzde6qelOdR2zvcDxn/feMx11/8tr9MeOOK399Fp9s/iADh7NewqSnCxfscKrdw7sO5qq25/fK3seU6+bJLVuXl1vfpy/cnWnkl5Hz350WIvXn5Ujj5tcgkNC9PrrT6hVo5e8ExjgJeffFxEIYmJiNGrUKB09elSNGzfWrl27tHDhQtWsWdPXoQWEx6du0Fuztysx+Z8p2NZvO61bH1+uIX0i9f74Tn6dUGzVpJLGjWyuwf9dqiCbWStm9FXvexbqXKL/dgL2HT6n9rfO1dnEdKX//RR93Nk0vfFZtD76brd++6SfGtcL922QeSiO5Q4ECk/1CQvSH4R0/bhdOvFX3uc8qy1EC+Z/rjKhFi9E9Y+S3h9cuy1B496JUVpG7k9kWM3SNb2a64lPvXvsF7V+JPfWkbfrB54XSP3BC9EO+oex/zuoNdsSlddD4LagUH34/ktq5IMn9EtyO3g8Ll2Dntid57ioJDWODNfH0xm7pi0MLIVtBwMqmWg2mzVz5kzNnDlTkvTOO+9kFUyHDh305ptv6qabbtKiRYvUvn37Am07LCws39nalFS7pML9sY8a2Eg79sdrzZZTkqSxk9Zp2lMd1Wf0okJtT5LaXHGFSoV4r6rbSpo0+4w27YzLdb2QYIueu7+T2rat5Z3A/qWk19HzYZFaNmyeHDm9ZfhvVSuGavRt3f16oBwoyRwOh/r376/Jkyere/fuevfdd/Xrr7+SSHSThauOXJRIPC8x2a7ZC/er2xU1NLRvfR9Elz+/b49VUkqGurSupvaXVdH073b7dULLMAz1vXeRYuNTL+p4p2c49dfZNPUZvUgHFgz267apuJU7EEg81ScsSH8Q0mN3ltbjb2xUSlrOd5haLCbd1CdSPbt18GJkmUp6f/CKKwy98tkXOhGbkut6ZrNZLzzUVc0aVvBSZJmKUj+S++vI2/UDFAXtoH947v7a6nf/z0rNpR2UpEb1wjXsxq7eCepfSno7KElXfpug5RuP55rwDQ2x6Nkx7dW2baT3AvsbdQR/FNBHwK5du7IazBYtWqh8+fLq3LmzIiMj9cQTT/g4Otc++HZXts/L1h/XsvXHfRRN4b35eHv1uHO+0nJ4L19IkEVtmlXWVR2K34B3INRR66jK6tclQj+tPJzjhU2Qzax3nurg14O1QEm3YMECRUZGqnv37pIy7yxq0aKFkpOTde+99yo0NFQNGzbUQw895NtAi6kX3t/kMpF4XmKyXS+8v8mvk4mS9PgbG/XDG72UluFQlxE/+TqcXK3ZckrHYlNy7dCdOZumpeuOq0e7vKfr9qXiVO5AICuOfcJAMHJAQ735xXbFHE90+T4ik0kKDbFqwr2X+yC6ogmE/qDZbNI7T3bQkEeXZc0CcKFSwRbd0LOu1xOJ7hAIdQS4C+2gb3RvW12dWlbVqj9OKjXd9bhbcJBZbz1RsIdd/EGgnGMn/7eNOg6bl+ONT8E2sy6NDNeAHnW8HFnRBUodwf8EfDLxjjvuyPo8adIkH0ZTsnRsWVU/vtlLg8YulcMwlJySORhrtZhkNpvUvW11ffVad5nNJKp85fNXu2rk0yv11c8HZDIpqxMZEmyRSdLMF7uo75URvg0SQK42b96sVq1aZX3etGmTWrRooTlz5ujaa6/VwIEDNXjwYI0ZM0Y2my3P7WVkZCgmJsaTIRcr67edznOdfUfOKXr7boUEe2l6tjymyXHlRGyKnIahBauO5DnNTm773bdvXyG/nH9fLzioxOTcn+A7l5ShL+dvU93KuT9N4TaFLLMil7uXyhwlV0RERL7ahuKOPqFvlCkdpN8+uVZXj16k7fvjlWF36PykKCFBFlUoF6wF7/ZW/dplfRtoCTagR119/PyVGvnMSpnNJiWnZvbZbVaTDEk39YnU+8908m2QAIqMdtA3TCaTfvxfL908bpnmrzwiScqw/zPuZrWY9NVr3dWldXVfhlmitby0kha/f7Wuvf9npaU7s9pBi1myWMxqd1kV/fBGLwXZvDsVO+DPAjqZuHDhQl+HUKJd1aGWTi4fqq9/PqAP5+zSyj9O6vrudTT+npZqeknxu7sx0ATZLPr0la4aP7qlpn29Uyv/OKEN22L14NAmeurulgoLDfzBJaC4q1ixolavXi1J2r9/vyZOnKhJkybp4MGD6tmzpySpcuXKio2NVfXqeXdSYmJi1KBBA4/GXKw0nSaZcr9USk9LU9NmzSVnmt/EdKHru9fRxuhYXdc1Qm99sV0JSQWfbtNuz/DOsVHl2sx/ptzfxfDBBx/qg+e/9nw8UqHKXCp6uXutzFFi7d27V/Xr+/eT1e5An9B3qlYspd+/7K/Vm0/pwzm7tG3PGW3cHqvnxlyuscOaymLhvTu+dvM19dWvS219Om+fPv1pr1ZvPqXBV9XThDGtSPQCAYJ20HdKhVj1/Ru9tH3fGb371U6t/fOUNkbH6r+3N9MTd17GlJF+oGPLqjqxbKjmLDmo977eqeUbT+jarnX09F0tdHmTSr4OD/A7nLXgUaVCrBp23SVqVLec2t06V/+9vRmJRD9zSZ1ymvzftlr35ym1u3WuBvSoSyIRKCaGDh2qzz//XE2aNFH79u1VpUoVtWjRQg6HQ4cPH9YVV1yh2NhYVaqUv4vgiIgI7d2718NRFx/9Ht6gnQeTcl2nRtUyWvH9Nq9NCd140ArZHfl/zC3IZta4kc3V+56Fuqp9TT05qoUem7qhwPu1Wm3a6YVjY+Xmv3T/xGglpuT8bpGwUha9Nvk/6tX2ZY/HIxW8zCX3lLu3yhwlV0QEM1DA80wmkzq2rKqOLatm9TeubFWNRKIfKVM6SKNvulSXX1pR7W6dq/uHRpFIBAA3alK/vN58vH1WO9jvytokEv1IkM2iIVfXV72aZdTu1rl6bGRzEolADjhzAQBQTJUrV04rV66UJDmdTlWrVk2NGjVSnTp1NGbMGC1fvlwdOnTI9zR2NputRDylkl8Txph0xzMrlZji+r2JpUtZ9eSoy7379JhpRYFWf+jWppr54x6dS8zQN4sP6u4bG6tezTI6cDShgPuVV46NevUi9dS7e5WYkpzjOqVCbBp10xXeG4guYJlLbip3L5U5AAAAAABAXkgmAgAQAHbv3q3IyEiZzWaFhoZqxowZvg6p2Lvxqnr6fukh/bg8RkkXJBRDQ6zqfHlV3X1jYx9Flz8TZ/yZ7XOvu/x7miOzOfPdIt3umK/ElIys92tJkskkhZWy6cf/9fL7J1qKW7kDAAAAAADkxr9HYgAAQL40btxYa9eu9XUYAcVkMumzV7pq6rh2iqxVRsFBmS9er1axlF5+sLXmvXWV3ye1iqPLm1TSxtn9dVPvSJUKsSg0JLPce3eopXWfX6d2l1XxcYQAAAAAAAAlC08mAgAA5MBkMunOGxrpjgENtWX3X2p54/daObOvGkSU83VoAe2SOuX0+avdlJ7h0JZdf6nN0B/11hPteYcTAAAAAACAD3A7PQAAQB5MJpPKhNqy/h/eEWSzqEK5YF+HAQAAAAAAUKKRTAQAAAAAAAAAAADgEslEAAAAAAAAAAAAAC6RTAQAAAAAAAAAAADgEslEAAAAAAAAAAAAAC6RTAQAAAAAAAAAAADgEslED7BazKoUHuzrMCRJlcKDZbVQzReijgAAAACgZKI/6N+oHwDwLM6z/o86gj+y+jqAQGSzmXXsl6GyO5y+DkVWi1k2G3/sF6KOAAAAAKBkoj/o36gfAPAszrP+jzqCPyKZ6CE2G39k/o46AgAAAICSif6gf6N+AMCzOM/6P+oI/oZkIgAAQDFRvWKojscl+2S/JRVlDgAAAAAASjqSiQAAAMVEzOIhvg6hxKHMAQAAAABAScdzsgAAAAAAAAAAAABcIpkIAAAAAAAAAAAAwCWSiQAAAAAAAAAAAABcIpkIAAAAAAAAAAAAwCWSiQAAAAAAAAAAAABcIpkIAAAAAAAAAAAAwCWSiQAAAAAAAAAAAABcIpkIAAAAAAAAAAAAwCWSiQAAAAAAAAAAAABcIpkIAAAAAAAAAAAAwCWSiQAAAAAAAAAAAABcIpkIAAAAAAAAAAAAwCWrrwMAAFcMu11yOH0dhmQxy2Qt+qkyI8Mpux/8PlaLWTabe+4jCbQ6AgAAQMkRiNfngSQQ+xqBdswFYh3BfQLteAd8IdDOs4F2Xgi0+skPWlsAfsew22W//R4pIcHXoUhlysg6c1qRTsoZGU7V6PG5YuPT3BhY4VQKD9axX4YWudEMtDoCAABAyRGI1+eBJBD7GoF2zAViHcF9Au14B3wh0M6zgXZeCLT6yS/OpAD8j8PpHydjKTOOIt5lYnc4/aKxlKTY+DT33AUUYHUEAACAkiMgr88DSQD2NQLumAvAOoL7BNzxDvhCgJ1nA+68EGD1k1/ctgMAAADkIKLXbB2PS/b6fqtXDFXM4iFF2kbGyHulM/HuCaggyofLNv2dIm2iuJa7z8pccku5AwAAAADgCslEAAAAIAfH45Jltxs+2W+RnYmXHI6ib6cw+y2iYlvuvirz8/sGAAAAAMADmOYUHmUYhmKOJ2rzrjhJ0s4D8UpOsfs4KvxbWrpDm3bE6vftsZKko6eSZBjeH7wDAAAAEHjOJqRr3Z+nsvobcWf9Y4orZDIMQ/sOn9PmnZl99t2Hziot3Uc3RQBAAEpJteuP7f+Mux2PTWbczY8YhqGjJ5Oyxq6374tXYnKGj6MC/BNPJsLtnE5Di347ove/2anlG08oPiE962fDn16pkeNXqVHdcrqpdz2NGthINaqU9mG0JdPpv1I0/fvd+nz+fm3fd0Z2xz8XMQPHLlXZ0jZ1aFFVdw1qpGu7RMhq5b4DACWTw+HU7kPn9Pv2WG3be0aSNGfJQfXpWEuXRoZzfvSQM+fS9Pv2WG3fF6/DJ5IkSSv/OKHwMkGqGB7i4+gAAHnZdSBe077eqR+WHdKBo4nZftZ3zM+qVrGUrupQU/cMbqx2zavIZDL5KNKSKSPDqR+WHdIHc3ZpzeZTSvjXoOmwJ3/VyGdWqmmD8rqlbwONuP4S2l4AKKDjp5P14Zxdmr1gv3YePCun859xtwEP/aLwMkHq0rqa7hrUWL071JTFQr/SmwzD0LL1xzXt651auv6Y4v71Lr+R41fqzmdXqkFEWQ3sWU9339hIdWqU8WG0gP8gmQi3WrrumEY9u0r7jybIYjHJ4bj4Thun09CO/fGaMG2Tnntvs+4a2EivPnyFyoYF+SDikiUl1a5n3v5Db3y2TXaHoZxuhDqXlKHFa45q4W9HVKNyqN59qoOu61bHu8ECgA+dikvRtK936u3Z23Xqr1RZLSaZzZkDnU/8b6MenbJB5csGafTgS3XvTZeqZlVujCkqwzC0cNURTf00WovXHJUhKTjInNVWjXh6pSSpR5vqevDWpurXpTaDzwDgZ46dStK9L67WD8tiZDGb5HC67nCciEvRZ/P36ZO5e9U6qpKmT+isZg0reDnakmnOkoMa8+JqnYhLybGO7A5Dm3f9pS271+uJ/23Uf29vpvGjWyo4yOKDiAGg+EhMztBjUzdo2lc75TRyHneLT0jXvF8P64dlMapbI0wfPNtJPdvV9G6wJdSaLSd1xzMrtePA2ZzHrg1p96FzenXGFr0yfYtu69dAUx5tpwrlgn0QMeA/uO0BbpGR4dT9L69Rj1ELdPB45p2nrk7G/2YYmYnF97/ZqSbXf6vVm096I9QSa8uuODUf+J0mf7JVGfacL2jOO9+pPBGXrP4PLtGtjy9XSipT1AIIbIZh6LOf9qpB36/1wvubdeqvVEmZg2rpGc6s/5ekM+fSNWnmVl3S72u99/VOpqopgmOnknTNvYvU976f9cu6YzpfkmnpzqxyP2/5xhPq/+Bi9Ry1QDHHEy/eGADAJ75dfECN+3+reSsOS1KOicTzzvcXN+2I0+U3fa+J0/+kLfWgpOQMDXlkqQaO/UWn/kqRlHcdGYaUYXfq5Y+26LJB3yn671kaAAAXW7vllJr0/1bvfLlDDmc+xt3+bgdjTiSp110LNfr535SewTTTnuJ0Gnr8jQ3qOGyedh86JynvsWunM7Mt/PSnfWp83TdasvaoN0IF/BbJRBRZRoZTA8f+ore/2C5J2R7dzw+nIR2PTVH3O+frl7XHPBFiibdh22l1un2eDhxNyPNi5kLOv8dwP5+/T33uWcQ7LwEELKfT0JgXV+u2J1YoITlDGXZnnt9Jz3AqJc2he19craGPLZc9H99Bdpt2xCpqwBz9su64DCMfg89/d8xX/nFCTW+Yo/VbT3spUgBATmZ8v1s3/nepEpMz8jyPX8jhNGR3GBo3dYP+M2kdCUUPSEzOUM+7Fujrnw9IyuyDF9TemHPqMGyuNu2IdXN0AFD8Ld9wXN3umK9jp5MLMe6W+YX3vtmp6x9cQkLRAxwOp4Y9uUKvfPRnvvqcF3I6DcXFp+rq0Yv047JDHooS8H8kE1Fk9728WvNWxKgoXT6n01BGhlPX3v+ztu/jbkd3OnwiUb3uWqCUVEeBG8t/Mwxp1aYTuuXx5XTwAQSk/05ep/e/2VXgzp+U2Y598/MBjRy/knNkAew+eFbd7pivhKT8JW//LcNuKCnFru53zudJCQDwoUW/HdEd41fKMFSoNvTfpsyK1uufbHNPYJCUOevC4P8u1fqtpwuVRDzP4TSUlGxXz7sW6vjpZPcFCADF3K4D8eo7ZpHS7UUfd1v42xHd/dxvbowOUuarSj77aV+RtuH8Owk56D+/aMM2bmhFyUQyEUWyYOXhzIHXPNYrXcqqNs0qq3SpnF/T6TSkdHvmnSI82eEehmHojvErlZhsz/OCJr919P3SQ0VugAHA3yz67YimzIrO9VxptZoUWauMrFbX7+mzOwx9Om+vvly431NhBhSHw6mbxy1TUkrubVRu5e50GkpNd+imR5YqI4NrBwDwtvhzaRr+1K/Kzyts89PfkKTH39jADaZuNP273Vqw6kieicT81I/DaehsQrruem4VN08BgP5+4u2pX5WW4cya2Ssn+TnPGob08Q97NHd5jJsjLbl+23RSr328Nc/18ls/Tqd02xMrlJbOE6QoeUgmotDSMxy689lVMufjKIpqUF7rPrtOUQ3K57qew2Ho9+1xmvb1TjdFWbJ9teiAFq85lq87o/JbRyaTNOal1TqXmO6uMAHAp9LSHfkaCI2oFqZ98wcrolpYjusYhnT3c7/pbALnyLy8+9VObdn9V9Y7KHOSV7k7HIZ2HTyrKbN4kgUAvO2Zd/7QqTMpeQ6gSvnvbzid0l08leEWZ86l6cFX1+Yr2ZvvPrvT0LwVh/X9UqZ5A4AP5+zW+q2n83z3npT/86zZLI2asJJklRs4nYZGPvOrzPloCAvSDu4+eFaT8pGgBAJNwCUT7Xa7Hn/8cVWoUEG1a9fW66+/roYNG/o6rID0/dJDOnY6OV8dx4IwmaQpn2wr8LsXcbHXP9kmszkfPccCMAzpXGKGPp23163bBQBfmbPkoE6fSS3y1GznJafa9elPnCNz43QaenX6n/nqdOeH3WFo0sytfjOzQasmlfTVpO6SpCCbWWs+vVZlw2w+jip/VsSe0hM7/sz6/NyubVp06rgPI8ofyhz/Rp/QO84lpuvDb3e5vT/ocBr6bdNJbd4Z594Nl0Af/7BHSSl2t13jnGcxmzR1VrR7NwrAbWgHvcMwDE3+ZGu+btgoCKdTOhmXqm8XH3TvhkugJWuPavehc0WaftYVQ9L/Po9mdhyUOAGXTBw3bpy2bNmi/fv3a82aNXr99dfVtGlTX4cVkN6evUMWNyeqpMxk1f6jCVq+gUGUovhz919av+20R5KyJpP01uwdbt+uuxxMTlLP1cuyLbtkyTwfRVM0kbXK6I8vr1dwkEWSdNegRnrrifY+jqpoAql+/EFMTIx69+6tpk2batCgQWrWrJmOHj3q67CKlXe/2iGnG0fZ7A5Db3+x3W3bC0TLNxzX0VNJbt1mbHyqFq0+4tZtFtbv22OVlJKhLq2raeywppr+3W6dS8zwdVgBjTLHv9En9I7PftqnlDTPPDVhMZv0np/OVlOcrs/f+mK72we5pcyE769/nNDOA/Hu37gbBFp/ozgdc/kRaPXjj4p7O1hcjvmVv5/QnkPn3H7DhiSZzSa9/SV9yqJ698sdslg80BBKOvVXquau8M/paAPtPFtczgkFUVzrKKCSiceOHdOHH36omTNnKjw8XLVq1VLHjh0VFRWltLQ0tWvXTmFhYdq7l6cFiio9w6E1W066/c6O8yxmk5ZvJJlYFJ5MxhqGtGN/vOLiUz22D2TafyRBn83fq2fuaaEaVUJ1381N9NjUjb4OC37C4XCof//+GjdunLZt26YePXro5MmTqlmzpq9DKzYcDqfWbzvt9g7gzoNnlZDEVKc5Wb3lpIJs7r0MDbKatXrzKbdusygef2OjJj7cRtd0rq0P5+zydTglAmUOiT6hNy3feNwjN5dKmcmqJWv98+ao4nJ9fvx0svYfSfDIIPd5Kzae8NzGkaW4HHPwD4HQDhaXY375xhP5evVTYTidhtb9eVqpaXbP7KAEMAxDS9cfd9tsOBeyWhi79pbick4oCQIqmfjLL7+odevWqly5ctayuLg4RUVFyWaz6YcfftCgQYN8GGHgiN57Rhl2z/VKDMPQxuhYj22/JPh9e6ysHrr75rw/djD1kDdMmRWtrq2r65vJPTRuygYlJvOkBTItWLBAkZGR6t49c2q/qKgotWjRQqdOndLIkSPVuXNnH0fo/3YfOqe0dPdPTWIY0p+7z7h9u4FiY3Ss28s9LSMzMewvTsSmyGkYWrDqiEcHcj3hyyMx6rl6mXquXqZZhw/6Opx8o8wh0Sf0pnV/nvbYzaWStO9Igt/emFMcrs9/3+7Z/rTVYvL4PvCP4nDMwT8ESjtYHI75jdtjJXlu3M3hNLR1D33Kwjp4NFHnkjx33NgdhtZv9Z/+Z6ArDueEksDq6wDcKS4uLltjeerUKa1Zs0ZTpkyR2WxW1apVC73txMRERUfzToDzlv1x9qJlpUtZc3xJbVT98Gz/dSV67xklpWTeceM0pD93ndK6deuKHGtJtWn7UdkvuPvGnXUkSUtX/amyZvdPKWfKyFDLIm5jy9n4bI+Ln0gr/FOUGzasl2Er/HuXUos4YO50Glqy9phGXH+JFq0u+h3a6zdsUEhQ0e4lKWodubN+pILXUVRUlMLCwoq0T3+wefNmtWrVKuvzpk2b1KJFC1WpUkXTp0/X9ddfX6DtZWRkKCbGP6fp8JRtOy9uz6xWkyKqXXx8RFQrne2/F4o5kSj73zfamExS9K6DqlY20Y3RBo6YY/HZPudU5lLByv3IibPat2+f+wKVMl+IUQjXd6+jjdGxuq5rhN76YrsSCtqRNVTk36W2YRRqeOOmWhF66dLmkjLf31dQhmEUvR4KUe5FLvO/91uU2H1V5lLByz0iIkK2Ilzf+CtP9QnpD2ZnGIZiTlzcxhWlv3FhX8MwpLmL1qh+zZAix3uhQLw+v9DS3y6+8dOd/UG7w9Cm6CMe6bMHWn9QCrxjjv6g//KHdrCox7vk3mPeE+dYSdq688RFrxZy97jbz8s3yZlUrsixlkQbdrjui+dUR4Wpn10H/vLY2HVxP89eiHbwYr68VilsOxhQycSGDRtq4sSJOn78uEwmk0aMGKH09HQ1atSoyNuOjo5Wu3bt3BBlgCh3hRRxd7ZFUQ3Ka91n1+X6tenPXZnjz9re8mO2OzqOHj2udu2GFS3Okqz+U1Jo3WyL3F1Hr7z6ml55dFmO6xdWiNmic30HFmkbl5UL15IO3bI+F2Xe6S5duirVWYT3wZhsUtN3C/31BhFl1ffK2vpy0QGNHdZUkz7eWvhYJHXt0kUyinYHT1HryJ31IxW8jtauXau2bdsWaZ/+oGLFilq9erUkaf/+/Zo4caImTZpU6O3FxMSoQYMG7gqveChVT2rwZLZFEdXCtG/+4By/smx6X5fL61/zlfYfSZAkGU5Dd981Skoo2t9rwKr3qBTWMOtjXmUu5a/cd26PVoMGN7svTklqOk0yFeySOchm1riRzdX7noW6qn1NPTmqhR6buqFA27DbM4r895jUd5Bsnpp7KRd2u73o55IClrs7ylwqern7qsylgpf73r17Vb9+fQ9G5Bue6hPSH7yQSWr2wUVLi9LfuLCvIUm33DJMSvXAjU4BeH1+kUpXSdVu1L9fmuju/uDG3zepXTs3t7sKwP6gFHDHHP1B/+UX7WARj3fJvce8R86xktTwJSm4SrZF7j7PPvX0eD0Vv7ZocZZUYVFSvYcvWpxXHRWkfuLPJnjs+rC4n2cvQjt4EV9eqxS2HQyoZGKfPn3Uq1cvNWzYUJGRkRo8eLAOHTqkoKCgIm87KipKa9dy8j5v5ZZzeuSt7J266L1n1PaWH12uH1U/XNOfu1Ijn/lV0fviXa4TvTf7o/t161TX7JmUeWHd+9p+/bE7Odsyd9fRU088qn4dX3ZLvP9mysiQJr7p9u0W1ooVy4t8903XMYV/cfZ7z3TU/S+v0R874rT202v1zeIDOni08E87LV+xwi1PJhbnOoqKivJgNN4zdOhQff7552rSpInat2+vKlWqqEWLFoXeXkREhF+/O8MTYuPT1W7E6mzLYk4kqv41X120bkS10lo2va+6jfxJMSeSLvp5tic0TCYtmvup6tdy/TRdSffE2zv17dITcvx9c2JOZS7lv9zNJqlv7zaaMta9x3DjQSsuetI/Lw/d2lQzf9yjc4kZ+mbxQd19Y2PVq1lGB44m5HsbVqtNO4v492j9z1OS0/3T+Oa5X6u1yOeSgpa7O8pcKnq5+6rMpYKXe0REhAej8R1P9QnpD16s0z3RF/2dFqW/cWFfQ5K+nD1LdaoFuyXefwvE6/MLzVn+lyZ+dizbMnf3B9u3vVxTPnf/30Vx72u4EmjHXHGvo0DpD7riD+1gUY93yb3HvCfOsZJ064Q92nskLdsyd59nX3npOXW9vKxb4i1pNu1O0ujXDly0PKc6Kkz9VKpQVvM8dH1Y3M+zF6Id9CxvtYMBlUw0m82aOXOmZs6cKUl655133HaBEBYWFrB3LRVG5ZoXJxOTUux5zhUdvS8+X/NJWy0mtW9RizIvgo6tnfpz345sHXx31pEkXdurldo0q5z3igVkpKXLn14xfcUVbWQKLvyFd0qqXVLhGsxRAxtpx/54rdlySpI0dtI6TXuqo/qMXlToeNpccYVKhRTt9B9odVRclStXTitXrpQkOZ1OVatWTY0aNVJaWpoefPBBbdmyRWPGjNHbb7+dr+3ZbLaAfEolN/UlVanwh0799c90Ena7kfWkmysxJ5Jy/bkklQq2qGfnZjKbPfvu2uKqW7t0fbf8lBx/J13yKnMp73K3Ws3q1q6e+49h04oCf2XijD+zfe5118JC7FdF/l0yTAU//rpUqqIulf65w/qZRk0LvA2TyVT0eihgubulzKUil7uvylxyU7kHAE/1CekPXiyq/jFt2f1XtmXu7G8E2cwacE0n2WzuHwAOxOvzC6VbT1yUTHRn/VgsJnVsVU9t27YpUpyuBGJfI9COuUCso0DhD+1gUY53yf3HvCfOsZLUvkWqDh7f59FxtwFXt1HDukxzWhgNGqW6TCbmVUf5rR+zSWrdtKrHrg8D7TxLO+hZ3moHAyqZeKFdu3ZlazCvv/56rV+/Xnv27NFDDz2kG2+80YfRFW/1apZR2dI2j73I1uE01KpJJY9su6RodWmlAj9JURAWs0nNG7qeB97X6oaWzvaYuCTt6dnPR9EUzQff7sr2edn641q2/riPonGPQKoff7J7925FRkbKbDYrODhY06ZN83VIxcb13epoxg97lGF3z9NEVotJfTrWIpGYiz4da8nupvI+Lz3Dqas71XLrNgEUf/QJPadNs8qK3nfGY32O5pdU8EgisaiKy/V5y0srymTKfPekJzgchlo1qeiZjRdRoPU3issxl1+BVj/+rji2g8XlmG/VpKJmzfPczD6hIVY1iOCpxMKqGB6iWlVDdeRkct4rF4LJZFLrKPc/YOEOgXaeLS7nhIIornXkf1fmbnRhg/n999/r2LFj+u233/yysSxOTCaTru9eRxaLZwZKDUPqd2Vtj2y7pOjTqZasHqofi8Wknu1rKCQ4oO9HAIqVxo0bM/1aId075FK3JRIlye4w9MAtgTt1kjvUrVlGvTvWdNt1hNksXdmqmhrXC3fL9gAEDvqEnnNd1wiPJRJNJun67nU8su2SIizUpq6tq8vioZubbFazerWv6ZFtA3Af2kHP6dfFc1PGWywm9e8WwQ2qRXRDj7oeawcdTkPXevAYAPxRQCcTFy5cSMPoQffedKkcHug8WswmdW9TXY0YECySqhVLaWCvuh5J+Dochu6/uYnbtwsAvnBZo4oa0L2ObNaiXxbZrCZ1vryaurSu5obIAtsL97eW4XTPdYThlF56oLVbtgUgsNAn9JyrO9VSraqhHtm2xWzSnQMbeWTbJcl9NzeRw01t7b9ZLCYNvSZSFcND3L5tAO5FO+g59WuX1VUdanokWeVwGBozhHG3ohp906UeaQfNZqll44oeefUT4M8COpkIz2rTrLK6tq7m9mSVw2noiTtbuHWbJdW4Ec3dPq2NxWJSs0vKq09HppIDEDimPd1RpUtZVZR+oMkkWS1mffLilTIV4r1pJU2rJpU07o7mRX6K3mo16YFbotSxZVU3RQYAyA+LxawnR7Vw+3bNJumOGxqpasVSbt92SXNd1wg1qlvO/QPdhvTf25u5d5sAUAw9cedlbk9WWcwmdWhRRR1aVMl7ZeSqcb3wzJn13NwOOp3S03e3cOs2geKAZCIKzWQyafpzVyrIDU9ynGc2SXcObKge7Wq4bZslWctLK2nciOZya5NpSLNe6iKLhdMHgMBRpWIpzXvrKtls5hynkok5kaj613ylmBOJF/3MZMrs9M2Z0lN1a5bxdLgBY8LoVurdMfdpuXMrd6vVpK6tq2vi2Cs8GSYAIAd3DWqsK1u57wZTs9mk6pVDNfFhzuvuYLWaNeulLnL3MxnP3NNSTS+p4OatAkDx06V1dY0e3FjuvJfUajVp5gtduEHVTd55soNKl7K6rY7MZpNuvKquBvSo654NAsUI2QAUSb1aZfTeMx3zXC967xm1veVHRe89k+M6FrNJjeqW0+T/tHVniCXe+NEt1aZZ5TznWc9PHUnS5P+20WWNKrozRADwCx1bVtXyj/qqcniwyylP7XZD+48kyG7PPiRns5pVLixIC97trT6deGq7IGw2s+ZM6aFb+zWQJJcdvJzKXZIG96qneW9dpSCbxdOhAgBcMJtN+uTFK1WhbHCed/3n1d84f2PO5690VdmwIE+EWyJd0bSyXnqgVZ7r5ac/aDab1Pnyqnr8jsvcGSIAFGsTx7ZRVP3yRW4Hz3v3qY5qEFHWnSGWaNUrh+qTl7pIct3fPC9fY9cWkyKqldY7T+Y9Fg4EIpKJKLLbrr1E7zzZQVLmnNGuJKXYtX7raSWl2F3+3GyWLqlTVr98eA0dRzcLDrJo4bu91apJxVyn78utjs43ti/c10oP3trUQ5ECgO+1u6yKds29Ubdd20Bmk3J9+j7IZpZJ0oDudbR77iD1bFfTe4EGkCCbRTOev1Lz3uqlGpVDswaTXTGbM9ukahVLac6UHvrs1W4KDiKRCAC+VKdGGS376BpVKJd7QjG3/obFbJLNatb3b/TUla2rezLcEunREc2zpmPLaSA1rz67ySS1bVY5ayYHAECmsFCblnxwtRrXK5fjuKiU+3n2fPP5v8faacT1DT0UacnVv1sdffJi5tOeOV2r5NUOWswm1a5aWsunX6NK5XlnMEomrgDhFqNvulTz375KlcuH5NpwXuj803K3XNNAq2ddq+qVQz0UYckWXjZYyz68RqNvulRSzoO0rljMJpULC9KXr3XTk3e18FCEAOA/ypUJ0kcTOuvw4iF68q4Wan9ZFZUK+SdhFWQz64qoSnpkeDPtXzBYX07qrsoVeK9TUfW9MkKHFt2kH//XS4N61VXtaqWz/bxqxVK6oUddzZnSQ4cXD2FaGQDwI1ENymvj7P7q0rqapNzv/L+QSZk3lq6eda2u6VzbMwGWcCaTSc+NaaVPX+6iMqG2AvcHTSbpwVuitOT9q7n5FwBcqFqxlH775FoNu/YSScpzdrB/M5tNqlQ+RHPf7KX7h0Z5KsQS79Z+DbT0w6tVs0porg9bXOj8uv27RWj959epTg1ea4KSy+rrABA4ru5cWzt/GKQJ0zbpg293KSnFLqvFJLsj+7RkJpNkNpnkcBpq2biCJtx7ufpeGeGjqEuO0qE2vfVEBw3qVU9Pv/W7Vm06KYvZJMMwdOG7os/XW0iQRcOua6Dnx7RSlYoMlAMoWWpUKa1n7mmpZ+5pKafT0NY9f6nFjd9r67c3qGHdcr4OLyBZLGb16xKhfl0yrwtS0+zasT9el9/0g377pJ/q12a6HwDwVxHVw7Tkg6s14/s9eunDzdp3OMFlf1D6p79RKTxYD94apUeGN+dJcy+4pW8DdW9TQ0+/9btmzdurjAynzBaTHBfUkdkkyWSS02moU8uqeuH+Vup0eTXfBA0AxUS5MkGa8fyVGnxVPY1/9w9t2BYri9kkp2HIyGHcLTTEqjtuaKgJ916u8mWDfRN4CdKldXVFfz9QL36wWe9+uUNnEzPyHLtuUr+8xt/TUoOuquejqAH/QTIRbhVeNlhTHm2n5+9rpa8WHdDKP05o3dbTOnQsQcmpDpUrE6Q2UZXUOqqybuhZR62jKvs65BKn6xXVtXJmP23b85e++vmANkbH6o8dsYpPSFdaulO1q5ZWlyuqqcNlVXXz1ZEK52IGAGQ2mxQWapOU+Z4EeEdIsJUnIACgGDGZTBo5oKFGXH+Jfll3TPNXHtaGbbGK3ndGySl2pWU41ahuOXW7orq6t62h/t0ieO+tl1WvHKoPJ3TWxLFt9MWCfVqz5ZTW/XlaR08lKSXNoQrlgtWueWVdEVVZN/WJ1KWR4b4OGQCKlas719bVnWvrj+2x+nbJQW2MjtWmnXE6l5Q57lanRpi6tq6uTi2r6qY+9VSmNP0dbwoLtenlB6/QM3e31DeLD+jX3zPHrvcdPpc5dh1m0+VNKumKqErq362O2l9WRaaCTLkABDCSifCIsFCbRg5oqJEDMuf5XvfnKbW7da4WvdtbbZtX8XF0kKSml1RQ00sqZH0+X0dfT+5OHQEAAAAoNJPJpJ7tamZ7n/D5/sbMF66kv+EHKpQL1pghTTRmSBNJ/9TP/Levon4AwA0ub1JJlzeplPX5/Hn2y4ndOM/6gVIhVt127SW67e+pabPGrqf1oX6AHPDORAAAAAAAAAAAAAAu8WQiAAAAkIPqFUN1PC7ZJ/stsvLh0pn4om+nMPstomJb7r4q8/P7BgAAAADAA0gmAgAAADmIWTzE1yEUmm36O74OodCKa7kX5zIHAAAAACAnTHMKwP9YzFKZMr6OIlOZMpnxFIHVYlal8GA3BVQ0lcKDZS3i7yMp4OoIAAAAJUdAXp8HkgDsawTcMReAdQT3CbjjHfCFADvPBtx5IcDqJ794MhGA3zFZrbLOnCY5nL4ORbKYZbIW7VRps5l17JehsvvB72O1mGWzFb2BCbQ6AgAAQMkRiNfngSQQ+xqBdswFYh3BfQLteAd8IdDOs4F2Xgi0+skvWlsAfslktQbUGcpmC7wL2ECrIwAAAJQcgXh9HkgCsa8RaMdcINYR3CfQjnfAFwLtPBto54VAq5/8CJzaAwAAAAAAAAAAAOBWJBMBAAAAAAAAAAAAuEQyEQAAAAAAAAAAAIBLJBMBAAAAAAAAAAAAuEQyEQAAAAAAAAAAAIBLJBMBAAAAAAAAAAAAuEQyEQAAAAAAAAAAAIBLJBMBAAAAAAAAAAAAuEQyEQAAAAAAAAAAAIBLJBMBAAAAAAAAAAAAuEQyEQAAAAAAAAAAAIBLVl8HAAAAip+MkfdKZ+K9v+Py4bJNf8f7+/UTEb1m63hcstf3W71iqGIWD/H6fgEAAAAAAOB7JBMBAEDBnYmXHA7f7LcEOx6XLLvd8Ml+AQAAAAAAUDIxzSkAAAAAAAAAAAAAl0gmAgAAAAAAAAAAAHCJaU49xLDbJYfT12FIFrNMVqrZlYwMp+x+UEdWi1k2G3l9AAAAIFDQH/R/9AcBwHNoB/0f7SCAguJs6gGG3S777fdICQm+DkUqU0bWmdNoOC+QkeFUjR6fKzY+zdehqFJ4sI79MpSGEwAAAAgA9Af9H/1BAPAc2kH/RzsIoDD4K/UEh9M/GkwpMw4/uMvE39gdTr9oMCUpNj7NL+4EAgAAAOAG9Af9Hv1BAPAg2kG/RzsIoDBIJgIAAAAAAAAAAABwiWQiAAAAAAAAAAAAAJdIJgIAAAAAAAAAAABwibfPAgAA5MIwDK3felrfLjkoSVq39bQia5WRyWTybWABLv5cmr5dclDb9p7J/JyQ7uOIAAAAAAAASiaeTAQAAF6xIvaUntjxZ9bn53Zt06JTx30YUd72HzmnqAFz1OvuhZo8c6sk6e7nVqn+NV9p256/fBxd3lo1qaSvJnWXJAXZzFrz6bUqG2bzcVS5MwxDz7z9u2r2nK0HXlmrqZ9GS5I63z5Pj0xeL6fT8HGEAAAAAAAAJQvJRAAAirGYmBj17t1bTZs21aBBg9SsWTMdPXrU12EFhFNxKWp3y1ztPBCvhKQMnc9hJSbbdeBoojoP/0kHjyb4Nsg8/L49VkkpGerSuprGDmuq6d/t1rnEDF+Hlatn3v5dr3+yTcmpdiWn2rOWp6Q59O5XO/Tfyet8GB0AAAAAAEDJQzIRAIBiyuFwqH///ho3bpy2bdumHj166OTJk6pZs6avQwsIr8/aprOJ6TJyeBAuISlDz7+3ybtBFcLjb2zUxIfb6JrOtfXhnF2+DidXZ86l6fVPopWUYnf586QUu977epdOxaV4OTIAAAAAAICSi2QiAADF1IIFCxQZGanu3TOnsYyKilKLFi30008/6c4779Qtt9yimTNn+jjK7L48EqOeq5ep5+plmnX4oK/DydUH3+5SeoYzx587nIZmL9wvhyPndfzBidgUOQ1DC1YdyTEx6i++WnRAUu5BOp1OffbTPu8EBAAAAAAAAFl9HQAA/2D4+wgzgIts3rxZrVq1yvq8adMmtWjRQn379lXfvn0lSf3799ftt9+er+1lZGQoJiYmX+vWNgyZCh6ybqoVoZcubS4p852JBWUYhvbt804i6Vxiep7rOJ2Gtm7fozKhXrqkKsSp+vrudbQxOlbXdY3QW19sV0JSIaY5NeSVcv9zx2ElpzpyXSc13aktO45o375SHo8HCGQRERGy2fz7HaoILPQ3AAAlGe0ggOKOZCI8avmG45r08Vb9vCbz/V397vtZY4c11aiBjVWpfIiPo8PZhHTN+GG3ps6K1uETiZKkMS+t1nNjWunqTrVkMhUmVQDAWypWrKjVq1dLkvbv36+JEydq0qRJWT9/+eWXNWrUqHxvLyYmRg0aNMjXukl9B8lm9v4EB3a7Pd8xFlmT/0mW0FxXSU1NVcvmTSR56enEptMkU/4v34JsZo0b2Vy971moq9rX1JOjWuixqRsKvFu7PcM75V6xm1TtRskclPM6zgzN/HCqZr6yyPPxAAFs7969ql+/vq/DQIA7dCxBb32xXR99t1vxCZk36UyYtkkvPdBaLRpX9HF0MAxDP/16WJNmbtVvm05KkgY8tESP33mZhve/RGVK59IeAwDyFBefqg+/3aU3v9iuY6eTJUkPT1yrF+5vre5ta/g4OkjSmi0nNXnmNs1dkXlj9dX3LtIDQ6N0z+DGqlYp9/EAoKQJyGSi3W7X008/rffee0+lS5fWww8/rGnTpmn37t2+Di1PB5OTdOfm9VrSoVvWskuWzNOenv18GFXBGYahsRPX6d2vdyrD7pDz7zHW2Pg0Pf/eZk2auU2/zuirqAblfRtoIUTWKqNvJvdQ+9vmKi3dobsGNVLzhhV030trfB1agRw8mqBOt8/TX2fTlJL2z1Mgf2yP06Cxv+iGnnX1yYtdZDaTUAT81dChQ/X555+rSZMmat++vapUqaIWLVpIkp5//nldcskl6tcv/+1HRESE9u7dm691rf95Slkndy+yWq35jrGoJnywW18sOiZ7Dg/KmU3S1VfW1Bs/eO/6ovGgFbI78n9H60O3NtXMH/foXGKGvll8UHff2Fj1apbRgaMJBdqv1WrTTi+Ue1x8urres1YpaTkfW6VKBevneZNVvdLbHo8HCGQRERG+DsFjinN/UAqcPuEva4/p2vt/ltMwlJb+z3n95zVH9cu6Y5rySFvdO6SJDyMsnEDpDzocTg0dt1xzV8Rk6w8ej03RuKkbNGnmVq2a2U+1q4X5MEoAhUE76B92HohX59vnKSnFnu08u3brafUd87PuuKGh3ny8fbG7kT9Q2kFJevadP/TKR1uU4XBmDW+cOZeuV2f8qddnbdPSD69W66jKvg0S8CMBmUwcN26cduzYof379ysxMVHt2rVTmzZtfB1WifLO7B2a9s1OpaVfPAKbkuZQarpDXUfO1/4FNxa7ux33H0nQZ/P36pl7Wujt2Tt0381N1OG2eb4Oq0AyMpzqesd8nYhLkeOCQWlDmXU0Z8lBRdYso+fua+V6IwB8rly5clq5cqWkzPfIVatWTY0aNdIHH3yg2bNnq3Pnztq5c6eeffbZfG3PZrPl+ymVjEJ0eLpUqqIulapkfX6mUdMCb8NkMnntSZoXH6qquSu/y3qS4kKlQ22a9MiVql8v3CvxSJJMKwq0+sQZf2b73OuuhYXcr7xS7vUljRxwRh//sEdJKfaLfh4aYtXg3vXUqW2Ux2MBUHzRH/S9/UfO6dr7f842eHqew2HI4TA09rV1ql+7rHp3rOWDCAsvEPqDkvT4GxsvSiSel5Lq0LFTyepx5wLt+GGgLBbvz0YBoPBoB30vOcWuriN/0l/n0i66B9cwpNR0h6Z/t1sNapfVQ7cVvF/sS4HSDn720169Ov1PpWVcfCNrappDqWkO9bhzgfb+dKMqV+AVG4AkBdwV4bFjx/Thhx9q5syZCg8PV61atdSxY0dFRUVpxYoVateunTp16qSHH37Y16EGLIfDqefe26RUF52S8wxDSk6z67OfvPPeK3ebMitaXVtX1zeTe2jclA1KTC7E+6d86Mflh3T6r9SLEon/lpLm0ORZ25TsYjAXgP/ZvXu3IiMjZTabNWrUKEVHR2vatGn5TiTiYrWrhWnFjL6qUTlUZcP+ea9Y2dI2VQoP1qJpvdXYm4nEEuKNce10a78GCg2xKsiWeakaZDMrNMSqG6+qqw/Gd/JxhAD8Gf1B/zD102g5nLk/SZ+W4dSz7/7hpYjcq7j3BxOS0vXm59tdJhLPszsMHT2VpAWrjngxMgBFRTvoH2Yv3K+EJHuuk/mkpDn0/PubZbd7f8afoiru7aBhGHr6rd+V6uIhmH/LsDv10XfF44lewBsC7snEX375Ra1bt1blyv88ghwXF6eoqCg1aNBAK1asUHBwsG655RZt3bpVzZo182G0rm05G6+eq5dlfT6RlurDaAru199P5KsRSU6x6+3ZO3TP4Eu9EJV7OZ2Glqw9phHXX6JFq4/6OpwCe+fLHUpOzTtJaJI079cYDe4d6fmgABRJ48aNtXbtWl+HEXCaN6ygmJ9v0qLVR7Vo9RE5HYa6tamh67pGyGoNuHuy/ILFYta0pzvq6btaaNbcvTpwLEG1q4Zp2HUNFFGdqdYA5C4Q+oNS8e8Tzvh+t9Jd3Ol/oY3RsTp8IrHYTaVZ3PuD3y89JIsl71kmklMdeverHerXJXCnRQYCDe2gf3h79vZ8jbulpjn0y7pjxe4p/eLeDm7aEadjp1PyXC8lzaF3v9yhx+64zAtRAf4v4JKJcXFx2RrMU6dOac2aNZoyZYpq1qyZtdxqtcpiseR7u4mJiYqOjs7XuqaMDLXMf8gXuaxc+EXzghfFhg3rZdhsea/oJivWxmc+epgPh0+c07p16zwbkAup6UW766dBRFn1vbK2vlx0QGOHNdWkj7cWaXvrN2xQSJD3BqX3HIrL13p2u0NrNu5QnfDTHo4I8K6oqCiFhRWvQSv4jsVi1jWda+uazrV9HUqJUrNqaT12J502AAUTCP1Byb19Qm/3BzPsTiUm5292E6tFWrx8g6LqhXo4quxKen9w7e+nlZ6R+9MY5+3af9onfXbAkwK5P0g7eDFvt4OSdOjo2XytZxhOrVq3TeFW7ybkSno7uGrLOVnM+Ru7PhmXTDuIgFPYdjDgkokNGzbUxIkTdfz4cZlMJo0YMULp6elq1KhR1jqbNm1SbGysmjTJ/8veo6Oj1a5du3ytG2K26FzfgQWO3VO6dOmqVGf+OgpuUaaFVPtOyRKS56pn/zqpdu1GeD6mC5lsUtN3C/31957pqPtfXqM/dsRp7afX6pvFB3TwaGKht9e1SxfJ8OKUAPWfkkLr5rlaWmqKpk5+RVOfWuX5mAAvWrt2rdq2bevrMAAAgJvRH7yY1/uDktT0PcmU9yB1amq67hh+i5R23AtB/UtJ7w9W6CpVu1GyBOe56r7d29Su3W2ejwnwokDuD9IOXswn7WDDF6XgqnmulpKcrBcmPKUXzv3uhaD+paS3g6UbSnUflMx5t4NpKQn5PvaB4qKw7WDAJRP79OmjXr16qWHDhoqMjNTgwYN16NAhBQUFScq8I+eBBx7QN998U6DtRkVF5Xv6OFNGhjTxzQLH7ikrViz36h04SakOXf3wTqXbc7/DI8hq0s1XN9boG7w/LV9qulNdx2wv1HdHDWykHfvjtWbLKUnS2EnrNO2pjuozelGh41m+YoVX78CZvSRW0747qdT03OvIEhSiH2e/rIrlvHsHF+BpUVFRvg4BAAB4AP3Bi3m7PyhJj7x1SKv+TMhzwpoqFUvphw/nyGTKe8pNdyrp/cETceka+MRuOfJ4MCUkyKQHb+2iAV0GeCcwwEsCuT9IO3gxX7SDH/x4Up8ujFVaRu4NoS24lBZ8/z+Fheb/KVF3KOntYHqGU1eP3amk1NwbQqtFurZLbY2bwStlEFgK2w6aDCOf81EWU++8846WLVumr7/+Wmlpaerbt69effVVtWrVymP7NNLSZb/Rf+7cs349S6bgIK/uc/QLv+nj7/fk+iLbIJtZe3+60Sfvx0hJtSu0zUyv7zcnyetvV6kQ7+X248+lqUaPL5SSlnP9BNsyp/WbM7Wn1+ICUHxkDBgqObx8d6ckWSyyffe59/frJ2yXT5c9j5t1PMFqNSnjj5Fe3y8AFBX9Qd/0B1f+fkK97l6gtFymUSsVbNGrD1+h+4d6f1C/pPcHJanvmEVasvZYru+2LF3KqhPLhioslJtLgeKKdtA37eDx08mq1+dLpeVyjg0OMuvWvg304YTOXowsE+2g9OT/Nur1WduUmsvYaJDNrM1fD9ClkeHeCwzwY95L+fvIrl27sjKtn3zyibZu3ar//Oc/6tq1q9asWePj6ALX5P+01aWR4QoJcn1nTZDNrJkvXOmTRCKk8LLBmjOlp4KDzHJ1E3BwkFm1q4fpo+e8f0EDAAAAuAv9Qd/o3KqanhzVQsE59AdLBVvUu2MtjRmS/yn24F4zX+iiGpVDFWS7eFjIZJJCgiz68X+9SCQCxRztoG9UrxyqT1/u6vIcK0khQWY1qltOU8cxfaavjB/dUq2bVFKpYNfXKsE2s/73WHsSicC/lKhk4qhRo3Ty5EktX75cy5cvV/v27X0cXeAKLWXVqpn99NBtUSobZlOpYIvCQq2yWk3qcFkVLZrWR0Ouru/rMEu0Pp1q6dcZ/dS9TQ1ZLSaFhVoVGmJV6VJW3T2osTZ8fp3Kl8177nAAAADAX9Ef9J2n726pWS91UeN65RRkNSss1KqQIIuqVSqlF+9vrW8md5fZ7N3pTfGPSuVD9PuX1+vOGxqpVLBFoSHWzD67xaRe7Wpq1Sf91L1tDV+HCaCIaAd9Z9BV9bT4vT7q1LKqrNbMcbdSIRaVLW3TA7dEafUn13LDhg8F2Sz65cOr9dgdzVWhbFDW2LXNalarJhX13dSeuvvGxr4OE/ArAT/NqS/wOH926RkObdoRp9R0h+pUD1PdmmV8Fst5PM6f3ZETSdp/5JxsNrMua1hRoaUC7nWqANyMaU59g2lOAcD/0R+82I798ToZl6KypW1q0biiz5OI9AezS0rO0J97/lJGhlMNIsqqRpXSPosFQPFHO3ixQ8cSdPBYokKCLGrRuGKOT+57C+1gdna7U5t2xik51a5aVUurfu2yPosF8GdkDOBxQTaL2jav4uswkIta1UqrVjU6jAAAAADc79LIcKYJ82OlQ21qf1lVX4cBAAGrTo0yqlPD9w9XwDWr1awrmlb2dRiA3wv4aU4BAAAAAAAAAAAAFA7JRAAAAAAAAAAAAAAuMc0pAAAouPLh0pl43+y3BKteMVTH45J9sl8AAAAAAACUTCQTAQBAgdmmv+PrEEqkmMVDfB0CAAAAAAAAShimOQUAAAAAAAAAAADgEslEAAAAAAAAAAAAAC6RTAQAAAAAAAAAAADgEslEAAAAAAAAAAAAAC6RTAQAAAAAAAAAAADgEslEAAAAAAAAAAAAAC6RTPQEi1kqU8bXUWQqUyYzHmRjtZhVKTzY12FIkiqFB8tKHQEAAACBgf6g36M/CAAeRDvo92gHARSGyTAMw9dBBCLDbpccTl+HIVnMMlmtvo7CL2VkOGX3gzqyWsyy2Wg0AQAAgEBBf9D/0R8EAM+hHfR/tIMACopkIgAAAAAAAAAAAACXSPsDAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcIlkIgAAAAAAAAAAAACXSCYCAAAAAAAAAAAAcOn/h7O81PrtFzgAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABxMAAAETCAYAAAD9HCj7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABxl0lEQVR4nO3dd3QUVRvH8d+WVFqAhN47hN6R3gSxoSgoFkAFsXdURLF3AV8Lgg1EbKAIgoKIgCBNkBpCJyC9BkL67s77RyQSWEjb3dlsvp9zOJrJ7J1nM+XemWfuvRbDMAwBAAAAAAAAAAAAwHmsZgcAAAAAAAAAAAAAwD+RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTITHWCwWPf/885k/T5o0SRaLRXFxcabFdCnnx2u2wYMHy2KxyGKxqGHDhpdc9+zfdvXq1T6KDmZo2rRp5jFx1VVXmR0OAAAAAAAAAKAQIpnoZ3bu3Km7775bNWrUUGhoqIoXL6727dvr3XffVXJystnhwcsiIyM1ZcoUvf7661mWV6tWLc+Jzy5dumjw4MF5+uzzzz+vatWq5eozcXFxslgsWrRoUZ62mdfvejbBmhf5jdkT3MX/6quvasqUKYqMjDQpKgAAAAAAAABAYWc3OwD8Z86cObrxxhsVEhKi22+/XQ0bNlRaWpqWLl2qJ554QjExMZo4caLZYV5UcnKy7PaCc0j5Y7xFihTRrbfeanYY8BN9+vSRJI0aNcrkSAAAAAAAAAAAhZV/ZVIKsd27d+umm25S1apV9fvvv6t8+fKZv7vvvvu0Y8cOzZkzx8QIsxcaGmp2CLlS0OIFAAAAAAAAAADwNYY59RNvvvmmzpw5o08//TRLIvGsWrVq6aGHHsr82eFw6KWXXlLNmjUVEhKiatWqaeTIkUpNTc3yuWrVqumqq67SokWL1LJlS4WFhalRo0aZwzn+8MMPatSokUJDQ9WiRQutXbs2y+cHDx6sokWLateuXerVq5eKFCmiChUq6MUXX5RhGFnWzekchL/88os6duyoIkWKqFixYrryyisVExOT7eeef/55t8NYupubcfXq1erVq5ciIyMVFham6tWr64477rhkvGfL37FjhwYPHqyIiAiVKFFCQ4YMUVJSUpbPJicn68EHH1RkZKSKFSuma665Rvv373f7N9iyZYv27t2b7ffLi9TUVD366KOKiopSkSJFdN111+no0aNe2ZaUdV7H8/95Y/5JwzDUtWtXRUVF6ciRI5nL09LS1KhRI9WsWVOJiYke3+5ZW7ZsUf/+/RUVFaWwsDDVrVtXzzzzTJZ19u/frzvvvFMVKlRQSEiIqlevrnvuuUdpaWmmxw8AAAAAAAAAQH7RM9FP/PTTT6pRo4Yuu+yyHK1/1113afLkybrhhhv02GOPaeXKlXrttdcUGxurGTNmZFl3x44dGjhwoO6++27deuutevvtt3X11Vfro48+0siRI3XvvfdKkl577TX1799fW7duldX6X57Z6XSqd+/eatu2rd58803NnTtXo0ePlsPh0Isvvpir7zllyhQNGjRIvXr10htvvKGkpCSNHz9eHTp00Nq1a3M9P587R44c0eWXX66oqCg99dRTioiIUFxcnH744Yccfb5///6qXr26XnvtNf3999/65JNPVKZMGb3xxhuZ6wwePFjfffedbrvtNrVt21aLFy/WlVde6ba8+vXrq3Pnzl6Zj++BBx5QyZIlNXr0aMXFxWncuHG6//779e2333p8W5J09913q0ePHlmWzZ07V1OnTlWZMmU8vj2LxaLPPvtMjRs31vDhwzP34ejRoxUTE6NFixapSJEiHt+uJG3YsEEdO3ZUUFCQhg0bpmrVqmnnzp366aef9Morr0iSDhw4oNatWys+Pl7Dhg1TvXr1tH//fk2fPl1JSUmKiIgwLX4AAAAAAAAAADzCgOlOnTplSDKuvfbaHK2/bt06Q5Jx1113ZVn++OOPG5KM33//PXNZ1apVDUnGsmXLMpfNmzfPkGSEhYUZe/bsyVw+YcIEQ5KxcOHCzGWDBg0yJBkPPPBA5jKXy2VceeWVRnBwsHH06NHM5ZKM0aNHZ/78+eefG5KM3bt3G4ZhGAkJCUZERIQxdOjQLHEfOnTIKFGixAXLzzd69GjD3SF7/nZmzJhhSDL++uuvS5Z3frxny7/jjjuyrHfdddcZpUuXzvx5zZo1hiTj4YcfzrLe4MGDLyjz7HY6d+58yVgMI+NvXbVq1WzXM4z/vnOPHj0Ml8uVufyRRx4xbDabER8fn6Ny8mv79u1GiRIljJ49exoOh8Nr2zl7bH755ZfGihUrDJvNdsHf39M6depkFCtWLMs5YhhGlr/37bffblitVrfH2rnr5Tf+qlWrGldeeWUevgUAAAAAAAAAAPnDMKd+4PTp05KkYsWK5Wj9n3/+WZL06KOPZln+2GOPSdIFcys2aNBA7dq1y/y5TZs2kqRu3bqpSpUqFyzftWvXBdu8//77M//fYrHo/vvvV1pamn777bccxSxJ8+fPV3x8vG6++WYdO3Ys85/NZlObNm20cOHCHJd1KREREZKk2bNnKz09PdefHz58eJafO3bsqOPHj2fup7lz50pSZo/Osx544AG35RmG4ZVeiZI0bNiwLEO/duzYUU6nU3v27PHK9s6VmJio6667TiVLltTXX38tm83mtW0NGzZMvXr10gMPPKDbbrtNNWvW1Kuvvuq17R09elR//PGH7rjjjizniKTMv7fL5dKPP/6oq6++Wi1btrygjHP3i6/jBwAAAAAAAADAUxjm1A8UL15ckpSQkJCj9ffs2SOr1apatWplWV6uXDlFRERckEg6PxlSokQJSVLlypXdLj958mSW5VarVTVq1MiyrE6dOpKUZZ7C7Gzfvl1SRhLTnbN/h/zq3Lmz+vXrpxdeeEFjx45Vly5d1LdvXw0cOFAhISHZfv78v1fJkiUlZfxdihcvnvn3r169epb1zt8fvnCpWL1t6NCh2rlzp5YtW6bSpUt7fXuffvqpatasqe3bt2vZsmUKCwvz2rbOJtQbNmx40XWOHj2q06dPX3Kdc/kyfgAAAAAAAAAAPIVkoh8oXry4KlSooE2bNuXqc+f2fLqUi/UYu9hywzByFUdOuVwuSRnzJpYrV+6C39vtlz4cL/Z9nU7nBetNnz5dK1as0E8//aR58+bpjjvu0DvvvKMVK1aoaNGil9yOr/8u+WFWrO+++66+/vprffnll2ratKlXt3XWokWLlJqaKknauHFjlt62BUFBjx8AAAAAAAAAUDgxzKmfuOqqq7Rz504tX74823WrVq0ql8uV2dPvrMOHDys+Pl5Vq1b1aGwul+uCoU+3bdsmSapWrVqOy6lZs6YkqUyZMurRo8cF/7p06XLJz5/tdRcfH59l+cWG9Gzbtq1eeeUVrV69WlOnTlVMTIy++eabHMd7MWf//rt3786yfMeOHfkuuyBYsmSJHn/8cT388MO65ZZbfLLNgwcP6oEHHtDll1+uq666So8//rhXh3I92xP3Ugn+qKgoFS9ePEcvAfg6fgAAAAAAAAAAPIVkop8YMWKEihQporvuukuHDx++4Pc7d+7Uu+++K0nq06ePJGncuHFZ1hkzZowk6corr/R4fO+//37m/xuGoffff19BQUHq3r17jsvo1auXihcvrldffdXtXIZHjx695OfPJiP/+OOPzGWJiYmaPHlylvVOnjx5Qc+8s73nzvYMy49evXpJkj788MMsy9977z2362/ZskV79+7N93b9wcGDB9W/f3916NBBb731ls+2O3ToULlcLn366aeaOHGi7Ha77rzzTq/1wIyKilKnTp302WefXbDvzm7TarWqb9+++umnn7R69eoLyjg3Nl/HDwAAAAAAAACApzDMqZ+oWbOmvvrqKw0YMED169fX7bffroYNGyotLU3Lli3TtGnTNHjwYElSkyZNNGjQIE2cOFHx8fHq3LmzVq1apcmTJ6tv377q2rWrR2MLDQ3V3LlzNWjQILVp00a//PKL5syZo5EjRyoqKirH5RQvXlzjx4/XbbfdpubNm+umm25SVFSU9u7dqzlz5qh9+/ZZkpbnu/zyy1WlShXdeeedeuKJJ2Sz2fTZZ59llnHW5MmT9eGHH+q6665TzZo1lZCQoI8//ljFixfPTMTmR4sWLdSvXz+NGzdOx48fV9u2bbV48eLM3prnD8dav359de7cWYsWLcr3tj2lS5cuWrx4ca6TWQ8++KCOHj2qESNGXNDLs3HjxmrcuLHbz8XFxal69eoaNGiQJk2alKttfv7555ozZ44mTZqkSpUqScpI3N56660aP3687r333ot+dtKkSRoyZIg+//zzzPMnp/73v/+pQ4cOat68uYYNG6bq1asrLi5Oc+bM0bp16yRJr776qn799Vd17txZw4YNU/369XXw4EFNmzZNS5cuVURERL7iBwAAAAAAAADAbCQT/cg111yjDRs26K233tLMmTM1fvx4hYSEqHHjxnrnnXc0dOjQzHU/+eQT1ahRQ5MmTdKMGTNUrlw5Pf300xo9erTH47LZbJo7d67uuecePfHEEypWrJhGjx6t5557LtdlDRw4UBUqVNDrr7+ut956S6mpqapYsaI6duyoIUOGXPKzQUFBmjFjhu699149++yzKleunB5++GGVLFkyy2fPJle/+eYbHT58WCVKlFDr1q01depUVa9ePdcxu/PFF1+oXLly+vrrrzVjxgz16NFD3377rerWravQ0FCPbMObzpw543beyuwcPXpUTqdTjz766AW/Gz169EWTiWfOnJEklS9fPlfb27dvnx555BFdffXVGjRoUObyW265Rd9//71GjBihK6644qL7Na/blTKS9itWrNCzzz6r8ePHKyUlRVWrVlX//v0z16lYsaJWrlypZ599VlOnTtXp06dVsWJFXXHFFQoPD893/AAAAAAAAAAAmM1iMM4eLmHw4MGaPn16ZlIGF7du3To1a9ZMX375ZZ7mEhw8eLB+//13/f3337Lb7YqIiPB8kJISEhJUqlQpjRs3Tvfdd59XtnG+Dz/8UCNGjNDOnTtVtmxZn2xTkvr376+4uDitWrXKZ9v0pPj4eDkcDjVv3lyNGzfW7NmzzQ4JAAAAAAAAAFDIMGcikAfJyckXLBs3bpysVqs6deqU53L/+ecfRUVFqUOHDvkJ75L++OMPVaxYMUtPV29buHChHnzwQZ8mEg3D0KJFi/Tyyy/7bJue1qVLF0VFRemff/4xOxQAAAAAAAAAQCFFz0RcEj0T3XvhhRe0Zs0ade3aVXa7Xb/88ot++eUXDRs2TBMmTMhTmZs3b9aBAwckSUWLFlXbtm09GTIKoJUrVyohIUGSFBUVpSZNmpgcEQAAAAAAAACgsGHORCAPLrvsMs2fP18vvfSSzpw5oypVquj555/XM888k+cyGzRooAYNGngwShR0bdq0MTsEAAAAAAAAAEAhR89EAAAAAAAAAAAAAG4xZyIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt0gmAgAAAAAAAAAAAHCLZCIAAAAAAAAAAAAAt+xmB4C8idufoGPxKWaHkWOREaGqVrGY2WEUKhwjAAAAAAAAAAAgv0gmFkBx+xNU/9rvlZLmNDuUHAsNtil2Zj+SRT7CMQIAAAAAAAAAADyBYU4LoGPxKQUqSSRJKWnOAtVLrqDjGAEAAAAAAAAAAJ5AMhEAAAAAAAAAAACAWyQTAQAAAAAAAAAAALhFMhEAAAAAAAAAAACAWyQTAQAAAAAAAAAAALhFMhEAAAAAAAAAAACAWyQTAQAAAAAAAAAAALhFMhEAAAAAAAAAAACAWyQTAQAAAAAAAAAAALhFMhEAAAAAAAAAAACAWyQTAQAAAAAAAAAAALhFMhEAAAAAAAAAAACAWyQTAQAAAAAAAAAAALhlNzsAwN+4XIZ+W7Fff206pqQUh0oUDdYVHSqpUZ1SZocGP2EYhhauOqgVG44oMdmh4kWD1LNtRTVvEGl2aAAAAAAAAAAAeBTJROBfLpeh97/erLFTNinuwBnZrBZZrZLTZejJcX+pXZMyemZoE13ZqYrZocIkhmFowrQteueLTdqx9/Q5x4j01LjVahUdqafvaqLrulczO1QAAAAAAAAAADyCYU7/lZiYqIceekhlypRRsWLFNHjwYE2aNElBQUFKSUkxOzx4WXq6SwOe+F0PvbFCew6ekZSRREx3GHK5MtZZufGorrp/vt6ZvNHESGEWp9OlQc/8oXteXqad/5zOWJZ5jBiSpDWxx3T9Iwv08sS1ZoYKAAAAAAAAAIDHkEyU5HA41KdPH/38888aO3aspk+frt27d2vkyJGqW7euQkNDzQ7Ra8JCbdox50YNuqZ25rKQYJtiZ/bTsBvqmhiZb93/2jJ9/1ucJMkw3K9zNmH0+Dur9OXsHT6KzHwcIxmeGLNKU/7d7xc/RjL+++z7f+vj6Vt8FBkAAAAAAAAAAN7DMKeS3n33Xa1bt05bt25VuXLlJEn16tVTtWrV1K1bN5Oj867kFKeGvrBU097upnnL9unQsWS9cG9z7T+SpInTt5odnk/s2Hs61991xNhVuql3DdntgZ+P5xiR9h1K1LtfxuTqM0+9u1q3X1NbIcE2L0UF+Lf0dJccTpfZYchusyooKPCv1XnBPgIA7+EaC+QP5xAAeBfXWf/HPoK/KfTJRMMwNGbMGA0dOjQzkShJVatWld1uV5MmTSRJsbGxuv3223Xq1ClVqlRJU6dOVfny5c0K26MWrjqo73+L00fPttfLE9dp+I311Kz/j2aH5TMffRcrm9Uip+si3c3cOHg0WXOW/KNru1b1YmT+o7AfIxO/3yJZLBfvkujGiVOp+uG3ON3cp6YXIwP8U3q6SxW6f6Vj8almh6LIiBAdWDCQhu952EcA4D1cY4H84RwCAO/iOuv/2EfwR4X+CIiNjdWBAwfUt2/fLMsPHjwoh8Ohpk2bSpKGDx+uUaNGadu2bbr22mv11FNP+T5YL3r8nVVqXr+0fvmwl5778G/t3p9gdkg+8+WcnblKJEqSzWrR17/s9FJE/qlQHyOzd2QOc5tTVqtFX/1cuI4R4CyH0+UXDV5JOhaf6hdv8vkb9hEAeA/XWCB/OIcAwLu4zvo/9hH8UaHvmbh//35JUpkyZbIsnz9/viSpadOmOnz4sLZv365rr71WknTnnXfq2Wef1eTJk/O83apVq+rUqVN5+qwjuJJU7pE8b9udhMR0bdh2Upe3q+i1JFmXrl1lT9vnlbLz41TlNyVL7oaidLoMTf9xviImXu+lqPKHY8SzTlV6VbKG5OozLpehn39broiI/l6KCrlVokQJ7dmzx+wwAAAAAAAAAKBAKfQ9E0uXLi1J2rnzv+RIYmKiXn75ZZUvX15RUVHat2+fKleunPn7okWLKjQ0VMePH/d5vN7Sv1d1NapdUjN+j9O4EW3NDsfH8vhmheH0bBh+jmMk9yyGw8NxAAAAAAAAAADgW4W+Z2LDhg1VtWpVPfbYY3I4HHI4HHrjjTeUkJCgZs2aeW27+ekdszrmqFrdPMtjsZSOCNF7T7fToFF/aOWGI4qdeYOu7FRZc/74x2PbkKRFCxeqZXSUR8v0hOi+3yt2d3xupsOTzWbR8CF99f7IN70XWD5wjHhW65tnak3sMblykVO02yy6bUBPffbiS94LDAAAAAAAAAAALyv0PRODg4M1ffp0hYWFacCAAXrxxRc1atQoRUREZM6XWKlSJf3zz39JkzNnziglJSWzV2NB9/7Tl2nun/s0d+k+nTydpgdeW67xoy5TsSJBZofmE/feVF/K3XR4cjoNDe1X1zsB+aHCfozcM6B+rhKJkuRwGhrWr553AgIAAAAAAAAAwEcKfTJRklq2bKk1a9YoKSlJa9euVbdu3bRt2zY1adJEklS2bFnVqlVLM2fOlCR9+umn6tu3r4kRe841XaqoS6tyeviNFZnLpv26W6tjjunNR1qZGJnv3HZVLYWG2GSx5Gx9m9WiNo2i1KRuYCSTs8MxIg3oVUPFiwTl6hhpWKuk2jT2v16WAAAAAAAAAADkRqEf5tSdDRs2yOVyZfZMlKTx48dr0KBBevzxx1WpUiVNnTrVvAA9aNaivZq1aO8Fy69/ZIEJ0ZijeNFgffpCRw18alG269qsFoWF2vTpCx29H5if4BiRwsPsmvxKJ13/yAJZLLrkkLhWq0XBQVZ98UonWXKafQQAAAAAAAAAwE+RTHRj3bp1Cg8PV+3atTOXRUdHa/Xq1SZGBW+6uU9NpaW7dOfoJZIkp+vCbJHFIkUUC9Yv43spulZJX4cIk/XtVk3fvNlVtz69SC7XxY+R4kWCNPv9y9WsfqQJUQIAAAAAAAAA4FkMc+rG8OHDlZiYKKuVP09hMuja2oqd2U8P3RqtYuFZ5wKsUr6I3nyktbb+dINaNWToysKqf68a2jrrRj02qKEiigVn+V3FMuF69cGW2jrrBrVvVtakCAEAAAAAAAAA8Cx6JgLnqF21hN55vI1evr+FynSZKkPS6q+vVZ2qJWS1MmQlpOqViumNR1rrhXubK6pzxjHy11fXqk7V4rLZeAEBAAAAAAAAABBYSCYCboSF2jMTQ/WqR5gbDPxSaMh/x0j9GhHmBgPA7x08mqRTZ9IUVTJUpSNCzQ4H50lNc+qfQ4mSMkYjCA6ymRwRznfsZIqOxacooliwykWGmx1OviUmpWv/kSQF2a2qWqEoL60BMJXT6dLeg4lKd7hUsUy4ipw3Ug/MdyohTYeOJSks1K7K5YrIYqHeAAqShMQ0HTiSpNAQm6qUL8o57GcMw9C+w4lKTHaoXOkwRRQPMTskwC+RTAQAAPACwzA0fX6cXv14nTZuPym73aJ0h6FOLcrqububqWvrCmaHWOgdOZ6sNz7foInTtygt3SXDkEJDbBrev56eHNKYxK8f+G3Ffr00YZ2Wrj2soH/PoWb1SumZoU11XfdqZoeXa9v3nNIrH6/T17/skkWS0zAUGRGqR25tqAdvaaDQEG7PAPhOcopD706N0dgpm3TidKpsFotkkQb2qalnhjZVzcrFzQ6x0Fsbe0wvTVinnxbvlc1mkcNpqEq5IhoxpLGG9qvL6DiAn9u0/YRemrhOMxbskdUiOV2GykeF64nBjTT8xvoKCuIcNpPLZeizGdv0xucbtHtfgux2i5xOQ1d0rKzn7m6qltFMdQWciysWAAABxOFw6Omnn1apUqVUuXJljRkzRnXq1DE7rByrUamY/v62r0KCM3qGDbuhrt4f2c7kqHLPMAw99MYKDXpmsdZtPSGny1Bqmksul6HFqw/pinvm6cNvNpsdZp4Eyj7ae/CMmtw4Q+9/vVlnkhxKS3cp3eFSQmK6/vdljJrcMEP7DyeaHWahNuaLjbrq/l/1x5pDcp1zDq3ZfFwDn1qkEWNWmR1irqzaeFTN+v+oqXN2Ki3dpdR0lxwOQ4eOJev58X+r46A5SkxKNzvMgFCQ68JAucbC/51JSlf722frhY/W6siJFDkchlLTXUpNc2nKTzvU9MYZWrP5mNlh5kmgnEdz/tirdrfN1sxFe+RwZtSDTqeh3fvP6NG3VuraB3+Tw+EyO0z4oYJcDwaS31ceUKubZ+n7+XFKd/zb9nMa+udQop4c+5d63TNXaelOs8PMtUC5xjqdLt34+AI9+Ppy7dh7OvOe3eE0NHvxXrW/fbZ++C3O7DDzJFD2EfwPyUQAAALIk08+qfXr12vXrl1avny5xowZo4YNG5odVo7t2pegqT/v0HPDm6pCmXDdf3MDPTVutdlh5drXP+/SJ99vVXLqhTeHhiGlprv06FsrtWrjUROiy59A2EeGYajPvfN07GSK0tIvfAiXmu7S4ePJuubB+SZEB0lasuaQRv5vtVLT3D8kTUl16r2vN+v7+bt9HFneJCU7dPndc5WY7JDDaVzw++RUpzbtOKl7X1lmQnSBpyDXhYFwjUXBMPT5pdq8K14pbtoqDqehxH+vWympDhOiy59AOI/2H07UDY/+rtQ0p1xuqsLkVKd+X3VAL09c5/PY4P8Kcj0YKI7Hp+jqB+YrJc0pp8t922/F+iMF7tokBcY1VpLenrxRvyzZd9F79rR0lwY+tVC79yWYEF3+BMo+gv8hmQgAQIA4cOCAPvnkE02ePFkRERGqVKmS2rdvr+joaEnSiBEj1LFjRw0ZMkROp/++ATl2Soy6tCyv6e9015Nj/9KZAthT55WP17m9KTlXutOltyZt8FFEnlXQ99Hy9Ue0458Et0mdsxxOQ5t2nNTqmIKX8A0Eb3y+QenZ9LZISXXq1U/W+yii/Pn6l51yOLP5PmlOff3LLh2PT/FRVIEpEOrCgn6Nhf87fDxZ3/+2W6lpFz8HDCPjOjvt14Lx0sb5Cvp59NG0LVI2U6olpzo17stNBbJnE7wnEOrBQPDZjG0ydPF7DSnjHP7ou9gCOTJFQb/GOhwuvT1pY7b37DKkDwroiEIFfR/BPzEpBwAAAWLBggVq2bKloqL+G9f/+PHjio6O1vr163X06FEtWbJEL7zwgmbPnq1rr702x2WfOXNGMTExOVo35SI9iXLK5TL024oDGtK3tuYt25+vsiRp1V9/KTTYd+9PHTiapq1x8dmu53JJMxbEafnyFbJas3la5GGFfR+NmXpA6Tl48OZwujT2sz/1YP/yPogKZ6Wlu/Tzkn9kXPr5iyRp3dbjmj1vqaIigrwfWD68+8VOJSZn37vHbjM07rNFuqp9Sa/GEx0draJFi3p1G2bxVl1IPYhAMmPxCdmsUnaPFZNSHBo3ebXqlDnhk7jOyu85JHn2PDLjHJo4bYvbXqPnS01zaMKXi9S6QWBe072FetC79SCkD77epuSUnCRrXfrgi0Xq3My3c9QW9rbKuu2JSkhKy3a91HSXPpsRqxs7+iCo8xT2fQTvyms9SDIRAIAAcfz48Sw3jUeOHNHy5cs1duxYLV26VL1795Yk9e7dWz/88EOukokxMTFq27Ztzla2BEkNx+cq9nPVqlJcV3aqrG/n7dajtzfU25M25rksSerSubNk+PAtvNDKUs2nJGtItqs6XdJlHbpIrlTvx3Wuwr6PKt8tRbTKdjWXS/rqu5/01ZjPfBAUMtmKSA3ezdGqLkearr52gJR6wMtB5VPtl6TQ7JPSycmpevm1MXr5mHeH2F2xYoXatGnj1W2YxVt1IfUgAkpkb6nstZI1+xcx/t6wTW3bDvRBUOfI5zkkefY8MuUcavA/yRae7WopyUl68OEnpdNrfBBU4KAe9HI9CKneO1JQiWxXS05K1pMjX5BO/umDoM5R2NsqxRpLlYdKtrBsVz15KtmcY7+w7yN4VV7rQZKJAAAEiDp16ujNN9/UwYMHZbFYNGTIEKWlpalu3br66aefVL16dUlSiRIldPLkyVyVHR0drRUrVuRo3ZQ0l7rcl/ehQCY8114PvLZcf8ce14ovr9b0+bsVt/9MnstbtHixT9+gO3wiTdc/vU3ZjGgoSQqyWbR46SJTeiYW5n005psD+n7hiWz3kd0m3XzzNbqv3zDfBAZJksNhqPN9MTk6h6y2YP3003cqXdy/b2uGv7lL67YnZbteWFiIRjzzqK5o95JX4zk71Fkg8lZdSD2IQDJryQmN/fagklOz7wLeqlldvTclZ8e+p+T3HJI8ex6ZcQ5d99RWHTye/UPb0LBwvfPeG2pRNzB72XkL9aB360FIA0dv164D2b8wGhoeppdfH60OTXzfM7Ewt1U27UzSfe/sVmp69vVgZMlwzTbh2C/s+wjeldd60L/vugEAQI717t1bPXv2VJ06dVSjRg31799fe/bsUXBwsCIiInTq1ClJ0qlTp1SyZO6G0CtatGiO31pKTnFIylujd2i/uordFa/l649Ikh59e6U+GtVeve+Zl6fyJKl1q1YKC/Vtk6fJpOP6O/b4Jdex2yy6uU9NtWvn+7ccC/s+erLYMf34x09yui6drbJarRoxtKMa1i7lo8hw1nXdE/XDgjhls4vUrkkZ9enZ3jdB5cPjd5TWsBf+VFLKpYc6dRkWPXJnN5UoFuyjyAKPt+pC6kEEktr1UvXWV19J2cznFR5q1xN3tlGbNtV9E9i/8nMOSZ4/j8w4hx64JUQvTVyX7TCJRcODNWxgV9ntPOBFBn+oByE9MqiYnhy7SknZnMNBNrvuH9xNIcE2H0WWobC3VVq1MvTsJwd18FjyJdcLDbFp+ICGatOmhY8i+09h30fwT7Q2AAAIEFarVZMnT1ZCQoLWr1+vkiVLZr5tdNlll+nXX3+VJM2bN0/t2rUzM9SL+vj7rbr/1eWZPy9cdTBfDV6zPDe8WbY3hDabRY8NauSjiDwnEPZR8waRalqvtIKCLt4UDg6yqk3jKBKJJnnyjsay2y59qxISbNOoYc18FFH+3NCzuoqG22W5RCfksBCb7ryuDonEfCrodWEgXGPh/0qVCNHga2orNOTibRWr1aKIYsHq27WqDyPzjEA4j+68rq6sl6o0lFFvPHlHYxKJyKKg14OB4raramV7boaF2PTwbdE+TyTmVyBcY61Wi54Z1lRhl6gHJckiafiN9XwTlAcFwj6Cf6LFAQBAgNq6dWvmjWPTpk1VsmRJdezYUTt37tRVV11lcnSB7dquVfXM0CYKCbZdkDywWS0KDrLqsxc7qnEdElVmmfW/nqpctojbB6mhITZVq1BUP4zpYUJkkKSW0VGa8Fx7BQdZZT3vjsVikUKCrXrhnmbq3aGSOQHmUkiwTQs+vkIligYr2E0SOyzUpsualtXYEcxF5GnUhYB7/3u6rdo0jFJY6IX1YEiwVRHFgvXbx1dc8sUbeE+Z0mH6+YPLFR5qU5CbhERYiE39elbTo7cXvBfT4FvUg+YoUSxY8ydcoSJhdgVf5By+okMljR5eMF6MC0T3DqivW66s6TahGGS3KjTYplnv9VTFskVMiA7wT7QKAQAIUOfeOErS22+/rSVLlmjy5Mmy2xmewtuevbuZ5rx/ubq3qZCZULRZLerXo5qWT7laA/vUMjfAQq5s6TCt/a6vnh/eTGVLh2Yuj4wI0Uv3Ndfqb65VZMnQS5QAbxt8bR0tnXyVrutWTTbbf1n5No2iNHd8bz15ZxMTo8u9hrVLKWbG9XrwlmgVKxKUubxK+SJ676l2mju+l4KDCtab6QUBdSHgXmiIXfMnXqF3R7RV7ar/zdVVJMyuh29tqE0/XK/6NSLMCxDq1LK81k+/Xnf0ra3Qc3ou1a9eQpNf6awvXuns83m3UfBQD5qndaMobfrheg27sZ7CzxkesnaV4vr4+Q6a9k532bIZiQPeY7FYNHF0B019vYvaNIrKXB5st+r2q2tp3bS+6tG2ookRAv7HYhhG9jONwq+sjjmqVjfPMjuMXPvr62vUMjoq+xX9RET7KZKk+D9vMzmS3OMY8Y2CfIwA3pSc4lB468lmh5EpadUg08f2X7z6oLrc8bP++LyPOrYob2osEvvofC6XoQUr9+vyu+dp+ZSr1LZJWdNigXupaU4tXHVAV9z7q1Z8ebXaNC5jdkj54nS6tGDFAfW6h2MuEHGNRUFjGIYWrjyg7sPm6s8vrtJlTc29JnEOXSg93aXfV+1X73sCox4EChuHw6UFK/3nHOY6e6FFqw6o612/aOmkK9W+eTlTY5HYR/BPvP4AAADgZWffJqfXkX+yWi0qXiRjnjpLNvMTwRwhwTaVLB5idhgeY7NZM+dG5JgDYDaLxaIi4Rk9pm30dPNLQUFWRRQLnHoQKGzsds5hf3c2UcY8tMDFcXYUQJERoVmGuCgIQoNtioxgqDBf4RgBAAAAAAAAAACeQN/UAqhaxWKKndlPx+JTzA4lxyIjQlWtYjGzwyg0OEYAAAAAAAAAAIAnkEwsoKpVLEbiBZfEMQIAAAAAAAAAAPKLYU4BAAAAAAAAAAAAuEUyEQAAAAAAAAAAAIBbJBMBAAAAAAAAAAAAuEUyEQAAAAAAAAAAAIBbJBMBAAAAAAAAAAAAuEUyEQAAAAAAAAAAAIBbJBMBAIBH2W1WRUaEmB2GJCkyIkR2G82d87GPAMB7uMYC+cM5BADexXXW/7GP4I/sZgcAAAACS1CQVQcWDJTD6TI7FNltVgUF0eg9H/sIALyHayyQP5xDAOBdXGf9H/sI/ohkIgAA8LigIBqb/o59BADewzUWyB/OIQDwLq6z/o99BH/D0QgAAAAAAAAAAADALZKJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALZKJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALZKJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALZKJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALZKJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALbvZAQAAzHEgSYpPMzuKnIsIliqEmx0FAAAAAAAAABQuJBMBoBA6kCTd8LuU5jI7kpwLtkrTu5FQBAAAAAAAAABfYphTACiE4tMKViJRyoi3IPWkBAAAAAAAAIBAQDIRAAAAAAAAAAAAgFsMcwoAAAD4GYdLchhmRyHZLZKd1w8BAMB5aKsABVt6uksOp/lDVtltVgUFcRK7E2jXWb6Pd/iyHiSZCAAAAPgRh0vq9at0yg+Gdi4RLM27nId0AADgP7RVgIItPd2lCt2/0rH4VLNDUWREiA4sGEhC8TyBdp3l+3iPL+tBzlLAjVMJaXK5DDldhvYcSFBKqsPskOBnEhL/O0bi9icoOYVjBADgGQ7DP25KpIw4/OFtSwAA4D9oqwAFm8Pp8otEoiQdi0/1ix6S/ibQrrN8H+/xZT1Iz0RA0snTqZo6Z6eW/H1IazYf085/EjJ/V633d7LbLIquVVItGkSqZ9sKur5HNQUH2UyMGL52KiFNX/28U3+sOaQ1sce0fc/pzN9Vv+I72WwWNagRoRYNItW9TQXd0LOaQkO4xAIAAAAAAAAACjaedKNQ27jthMZNjdFXc3YqJc150fUcTkPrt57Q+q0n9NmMbSpTKlRD+9XVAwOjVbZ0mA8jhq/F7orXuC836cvZO5SUcvFjxOk0tHH7SW3cflKTZm7Xw2+s0J3X19GDA6NVsWwRH0YMAAAAAAAAAIDnkExEoZSW7tRLE9bptU/Wy+nKfT/gIydS9MrH6/Xht7F67+l2GtinpiwWixcihVnS01164/P1evGjdUp35H64heOnUvXm5xs1YdoWjR3RVoOvrc0xAhQihmFo2bojWrDygP6KOap1W05IkgY8sVCtG0WpRYPS6tOhshrVKWVypIXXqYQ0/fj7Hq3adFSrY47pn8NnJEl3jl6irq0rqHXDSPXtVlXFigSbHGnhtW7Lcc39c5/WbD6mjdtPKj4hYxyZEWNWqUe7iurRtoLaNi5TYOrXQ8eSNGPBHq3efExrNh/T4ePJkqR7Xv5T3dtU1GVNy+jKTpUZ/QKAT6SmOTV78V4tX39Ef8Uc085/MkZeuW3kYnVqUU4toyN1XfdqvDxrot37EjRrUUa9sTb2uI6fyhiS8MHXl6tH24rq1KKcerStIJuNGYwAf/TPoTP68fc9WrP5uP6OPaajJ1MkSfe/tlzd21RQx+Zl1euySrIz4acpXC5DC1Ye0OLVB7U65phidpyUJN385EK1a1JWLaMjdXXnKqpVpbjJkQL+w2IYBiOLo1DZujteNz7+uzZuP+mxMvt2q6rJL3dS8aI8cAwEu/ad1o2P/a6/Y497rMwrOlTS1Ne7qGTxEI+VmR+b46Xb/zA7itz7opPUIMLsKICLc7kMTZq5XW9P2qDY3adktUqSRa5zXlyx2TJ+NgypXZMyevrOJrq6SxXTYi5s9h9O1EsT1+mLWduVnOqU3WaRw5m1OXx2WXioXUP61tYzQ5uqfFS4z2JMcUod5vhsc9laeqUU6sP81o+/x+m1TzZo1aajslgkq8WS5eUv67+5Q5chNaxVUk8MbqTbrq7lt0nFTdtP6KWJ6/T9/Dg5XcYlj7nIiBDdM6C+RgxprKLhQSZFDCCQnT6Tpjc+26CPvtuiE6dTZbdlXGPPfTJ09ppkt1l0Q8/qevbupmpQs6R5QRcyy9Yd1ssT1mnusn0yDF1Qb5xbN1YqW0QP3RKtB29p4NOXUQp7WwW4lNUxR/XShHWavXivXNmcw+Uiw/TAzQ306O0NfTpVTnKKQ+GtJ/tse9lJWjVIYaG++f7p6S598M1mjftyk/YcTJTNapFhGHKdVw86nYYMST3bVdCooU3VqWV5n8R3VqBdZ/k+3uWrepBkIgqVdVuOq+ewX7wyyXDL6EjN+6i3SpXwj2QR8mbzzpPqMfQXHTyW7PGyG9UuqfkTr/CLt3tJJgKet3tfggY/+4f+WHNIFouUkxaW1ZqRWBzQu7ref/oyRZYM9X6ghZRhZCR6H3x9uZJTnDkemcBmtahImF0fPHOZbrnSNyMRFNYbkyPHk3XvK8v0/W9xmedGdqyWjKRit9bl9dmLHVW1QjHvB5pD6ekuvfn5Bj0//m8ZyhgSPSesFqlimSKa/EondW1dwbtBAihU5i/fryHP/qGDR5OU0wF6bDaLrBbpxfta6PFBjehB40VJyQ49895qvTs1RlarJcf1hiQ1qBGhKa92VvMGkV6M8D+Fta0CXEpKqkMvfLRWb3y24YKX4S7FYpFqVS6uL17prLZNyng5ygyFNZm4cdsJ3TZysdZvO5Hje3abNWNf3ndTfb3+cCufvfAXaNdZvo93+aoepBWIQmPr7nivJRIlaXXMMV1xzzwlJKZ5pXx43659p72WSJSkjdtPqtfwuYo/7Z1jEIB5Vm08qqb9Z+jPdYcl5eymRFJmsmTar3FqedOP2r0vwVshFmoul6EHX1+hO55bosRkR66GOHe6DCUkpeu2kYv1+DurxHt43rF9zyk1H/Cjfvx9jyTlKJEoKfNh+OLVh9Ss/49as/mYt0LMleQUh659aL5Gvb9GDqeRqwfCLkPafzRJ3Yf+ognTtngxSgCFyQffbNbld8/VwWPJOU4kShkvQqQ7DD397mpd98hvSkl1eC/IQuzEqVR1GjJb706NkWHk/AWUs7bGnVLbW2dp5sI9XooQwKWcPpOmnsPm6o1PN2Scw7m40BqGtGtfgjoMmq1vftnpxSgLt1+W/KOWN8/Upp0ZI9Xl9Lbu7L788NtYtb/9Jx094Z1nhkBBQDIRhUJaulM3Pv57rhKJcXMHKG7ugFxtZ9Wmo3rs7VW5DQ9+wOFw6aYRC3OVSMzLMbJ+6wnd9+ry3IYHwI9t3HZC3Yf+rMQkR64f/JzlchnadzhJnYfM0aFjSR6OEE+MWaX3v94sKec3jec6+5kxX2zSM/9b48HIIEn7DiWqyx0/69Cx5DzNZS1l3OSfPpOu7nf9rNhd8Z4NMLexOF264bHfNe/PfXku4+xQyMNf+lOTZ273YHQACqNPvt+q+/+9B8npyxruzPnjH900YmG+ysCFkpIduvzuX7Ruy4k8tVOkjHrQ4TR0w6ML9OuyvNc/AHIvNc2pK+/7VcvWH1Zer45OlyGXYWjgU4v04+9xngwPkhb9dVDXPjRfDocrz/fshiHF7IhX96G/0JEEhRbJRBQKL09cl+s5EksUDVKJornvuv7x91tpvBdA73yxUX9tyl1vhrweI1/9vJPGIRAgUtOc6v/E79kOm1kkzK7WjaJUJOziQ7c4XYYOHE3SHc8tofebB835Y6/GfLEp2/Vyso8k6bVP12v+8v2eCq/QMwxDg0Yt1uHj2ScSs9tHTpehM0kO3TTid6Wnu7wRbo6MnRKjn5f8k23Pn5wec3e/tFTb95zyYIQACpPYXfG695Vl2a6Xk2uSYUgzF+7NfEEHnvH0u3/p79jj+a4HDSMjWXzzk4voOQP40Avj1+rPtYflyqb5mZNzWJJuG7lY+w8nejjKwuvk6VQNeOJ3OZ3GJdvnOb1nj9lxUo+/Q0cSFE4kExHwYnac1Ksfr/fpNu96fqmSUxj+paDYsfe0nvvgb59uc/hLf/ImExAAXpqwVlt3n8r24U90rZJaOfUaRdcqecn1nC5Dvyzdpyk/7fBkmIVW/OlU3fncEllzMM1hTveR1WrR4FF/cA33kE++36rfVx3MUY/EnOwjp8vQhm0n9fpnvm37nbUt7pRG/m91jtbN6THncBoa9MxiXjIAkGsul6Hbn1ksVw6uHzm9JknSiLGrtPOf054IsdBbsuaQ/vfV5hz1SMzJPnIZUnxCmh54jdFwAF9Ys/mYXv9sfY56JObkHDYMKTnFqWEvLvVckIXco2+u1LH4lGxf9MtpPegypInTt2rBigMejBIoGEgmIuCN+3JTnofMyqt/DiVq2q+7fbpN5N17X8Uozcc9GA4fT9GXsxkLHyjITiWkacwXm/I8lM3FWCzS6PF/M4SYB3w6Y5sOn8j+xjE3XP/2IP2ChG++OZ0uPf/RWq+U/cZnG5SYlO6Vsi/lzc83ePzcdToNLd9wVAtXHfRouQAC36/L9mt1zLE8D+l2MQ6HoXcmb/RomYXVixPWypaTt55yweUy9O283dq6O96j5QK40CsT18nq4XPY6TL085J9+ttP5gIvyOL2J2jyT9uz7TWaW1arRc+P922nBMAfkExEQIs/naqpc8xJ2Hz4bawp20XuJCala5JJcxF9+G0svQzgFQ6HQ08//bRKlSqlypUra8yYMapTp47ZYQWcKbN3KDnV6fFyDUOK239Gv61gKM38cLkMvff1Zlk8e28vKSPh+97UzVzD8+nnJft04Ih35ghNTHboq5992wY8eTpVX87Z4ZWX2Gw2iz74hmEFc4p6EMjw/jebZbN5viJ0ugxNmrVdp8/QSz8/tu85pd9WHPBOvWG16KNpWzxeLgoG6kHf2HcoUTMX7fX4CxtSxjk8/jueK+bXhOlbvHI/6HIZWrr2sDZtP+H5wgE/dukJOgqZxMREjRw5Ul9//bWSk5PVr18/denSRUOHDlVCQoJCQ0PNDhG59NXPO73yoDcnVm48qo3bTqhRnVKmbB85M23+bp1O9H3PBUnatOOkVm44qrZNypiyfU85vmiq9o6/+4LlrtQkle4+RNUe+NSEqAq3J598UrGxsdq1a5fOnDmjtm3bqnXr1maHFXCm/bpbFotyNCxVbtltFn3/W5wuv6yS5wsvJDZsO6E9B854pWzDkLbuOaWtcadUr3qEV7aRH8fmf6rji6Zk/px2dK9CK9VX7efmmBjVhb7/LU42m8UrD2CsFmn6/DgNvaGex8u+mLlL9yk1zTsjHTidhn5avFdp6U4FB9m8so1AQj0ISMkpDv2ydJ/XRjpITnFq/vL96tezulfKLwxmLNjjtbak02Xo27m7NHZEW88Xnk8FpZ1SkFEP+sasRXu8do11ugx9N2+3Jo7uIIs3smH5UKNSMU1/p7va3faTUtOcGnZDXTWuU0r3v+p/wyt/O3eXx3slnmW1WvTDgj1qWNv/nvsG2nWW7+M/SCb+y+FwqE+fPjpw4IDGjh2ryMhIvfrqq/r1119Vt25dEokF1J/rjpi8/cMkE/3cn2sPm7v9dYcLfDKxdJdbVLrLLVmWxa+cpd1jb1XZax8zKarC68CBA/rkk0+0Y8cORUREKCIiQu3bt1edOnWUmpqqzp07a9OmTVq3bp1q1apldrgFlstlaM3mY155+CNlzJG2auNR7xReSKzxwbBAazYf88tkYmTPOxXZ805JkiPhhLaO7KRKg98yOaoLrdx4xCuJRCljLpNVm47KMAyfPYBZs/mY15KjkpTuMLRp+0k1bxDplfIDBfUgkGH91hNeHTLdZrVozebjJBPzYU3sMVktFjm91KA8eCxZR44nq0zpMK+Un1cFpZ1SUFEP+s6a2OOy2yxyeKntdzoxXXH7z6h6pWJeKT+vdu1L0NSfd+i54U31wTexuv/mBrrsttlmh3WBUwlp2r3fOy+XnrXaT4eiDbTrLN/HfzDM6b/effddrVu3TkuWLNEtt9yiXr166YsvvtDBgwfVtGlTs8NDHq2OMfeibvb2kT2zK/5APEZSDmxX3Lu3q+p9HyusSgOzwyl0FixYoJYtWyoqKipz2fHjxxUdHa2goCDNnDlTN9xwg4kRBoa9B88oMdnh1W3E7IxnGM182Lj9pOxeGNrtLLvNog3b/HtYG8MwFDfudpXr95TfXY/T013aFnfKq9uIT0jT4ePJXt3GuTZs8+6De0nawFBK2aIeBDJ4+3rhMgxt5JqUL2tjj3tliNNzbdx+0qvl54c/t1MKMupB31m35bjXEoln+et1duyUGHVpWV7T3+muJ8f+pTMmzFWenU07vHv9c7kMrY097tVt5FegXWf5PuajZ6IydtyYMWM0dOhQlStXLnN51apVZbfb1aRJE0nS3XffrdmzZ+vAgQM83CsAziSla9se7z6kys7ffl6pFHZp6U5tMvnmKtCOEWdKona+fr1Kd79DpToOMDucQun48eNZbhyPHDmi5cuXa+zYsbJarSpbtmyeyj1z5oxiYmI8FWaBt3N/itvlRcLsiq5V8oLl0TUjsvzXnZgdJ7MkKNMdLi39c4WCg3j3Ky92xe2/oL12sf0j5X4fGYahHbv3a+XKlR6J91xphkVS/oeiOvz9GwoqVUGlu9yar3L++muVgi2ebfsmpjjl7vlpfvbR+eeQJC35c7WqlAvJT6g5tv/QyQt6K3vymJOkDTE7tLJc/tsu0dHRKlq0aL7L8UfUg0CGmNgLX1r0bD0o/XPguFfqwcLiePyF8wZ7ut5YvTZGRS3/5CtOdzzRVvFUO0XKfVuFejD3qAcvdPhYwgXLPH0O/71hi8qGH8pXnO6k5HNofpfL0G8rDmhI39qat2x/vuNZ9ddfCg327H3vX5su3D+SZ+/Z408ne60eLOjX2fPxfS5UEOtBi0FWTJs3b1Z0dLSWLFmiDh06ZC4/ePCgKlSooF9//VU9e/bUH3/8obp166pcuXL5TiZWrVpVp06Zm+gKdC5bhBIqPuv2d3FzB6hE0aBLfr5EsWBJGd3iL+XUmXRV6/2t299ZHCdU/MArOYgWZnBZw5VQ6aWL/j674yTfx4hhyOJKVPH9o3MWsAeFVG+qyi8s8ni5u94ZqPTj+1XnpQWy2Dz/vso/o7sodfe6PH22RIkS2rNnj2cD8kM///yz7rrrLq1Zs0YWi0VDhgzRb7/9psTERAUHZxyzgwcP1qhRo3I1rM3KlSvVtq3/zblimpDyUp0Lrx+tG0Vp5dRr8lRkm1tmXTi06cahkgp9Uy1vKtwmlWovWf67FuVn/0jn7SPDIR1fLB38Or+RXsASHKrm0/LXoy5h02Lt+/xx1X1tiazB+Ruu/+8bw2SkuU+g55klWGr44QWLPX4ObXlKSvfRKAA1npSK1M6yyKPHnCTt/1I6sSjP5Z21YsUKtWnTJt/l+CPqQeBfpbtLFW7Ossjj16QzsdLud/JcXqFX900pOOu0KB7fR3H/kxI25Lm8i8lvW8WT7RQp920V6kHqQY+o/ZIUWj7LIo+fw3snSKf+ynN5F2UJkhqOz/PHa1Uprm/e7KqFfx3U4ePJenvSxvzFs+keyfBw78Yi9aUaF06949H7DUeCFPtIXiO8pIJ+nT0f3ycrs79PXutBeiZK2r8/4w2KMmWyzls2f/58Scoc5rRTp04+jQv5ZXJPDsOQZDM3BmTD5P1jsciwBM4xcvind3Vm4yLVH/u3VxKJyJnevXurZ8+eqlOnjmrUqKH+/ftrz549mTeOeRUdHa0VK1Z4KMqCLzHFqR4PxF6Q5ovZcVJtbpl1wfrRNSP02YuddMdzfyhmZ7zbMmPOG4alZDGbflnhf5PYFxSfzT6ij2cdydJT7GL7R8r9PrJY7Lpv+EDd2ushT4YtKeMtx0fyMe1zevxh7fnwbtV+do5HbkwWL17s8Z6JhmGo50OxOpOc9a3o/Oyj888hq0X6fcEsj7/lfDHPffyPFqw+Jec5X8mTx5wkvf3aSHVo8nq+Y42Ojs53Gf6KehDIsOjv03pq/N4syzx5TbJZpd6Xt9OzQzgv8mrY67u0YWfW3omerjemfPaualf2/JyJ+WmreLqdIuW+rUI9mHvUgxd6+N04rYw547X7DUma+MEbalwr3FMhZ0pJc6nLfZvz/PkJz7XXA68t19+xx7Xiy6s1ff5uxeVjfsJFixd7vM2+91Cq+j+7/YLlnrxnr1sjSpM/9855UdCvs+fj+/zHH75PXutBnvZKKl26tCRp586dqlOnjiQpMTFRL7/8ssqXL59leABPKQy9Y8y2/3CiKvX8xu3vLtaT8Fwnl2Z0MS7Z4cu8BWCxqFrVStq1OT5vn4fXnTydqlKX2L/ZHSf5PkYklYksqcOb4vP8+bzaHC/d/ofnyjuzeakOTBmp2i/8qqCS5bL/QB4tXLhIDSK8VnxAsFqtmjx5siZPnixJ+vDDDz1ys1y0aNGAfXs3r2pU2qed+7IOnZKY7LiwZ9Q5YnbGX/L3Z1mtFrVvXoG/eT6cSPtHE2f+mmVZdvtHyvk+MiRd37u52rSukJ8w3UpxSpqT988f/OZFOZNOKe79OzOXBZeqqOqPTc1Tea1atVaoF959adfkpH5beSDLAxhP7qN6NSLUuWO7/IaZY722hmv+qlVZlnny+0jSLde3V7lIzz9QCiTUg0CG8lXOXJBM9OQ1yemSeneurzZtCsYcP/6oWztDm+M2Z5lzzZP7KDjIqpv6dpLd7vmXavLTVvF0O0XyXlulIKIe9J0el9n11+b1chreOYetFumW6zsqPMzzj++TUxyS8pZMHNqvrmJ3xWv5+oxMyqNvr9RHo9qr9z3z8hxP61atFBbq2e/ZymUo/JXdSkrJOg2Cp+7Z7TaLurap5rXzItCus3yf//jj98kpkomSGjZsqKpVq+qxxx6Tw+GQw+HQG2+8oYSEBDVr1szs8JBHUaVCFRxkVVp6/sYBz4/K5YqYtm1kr0TRYBUrEqSERPMmiq5cruDP05B+4qB2vnmjKt7+uorWb292ODjP1q1bs9w89u3bV6tWrdL27dv18MMP68YbbzQxuoKta+vyijt4Rk6n54chNQxDHZt5LzFfGLRuFCW7zZLlAZ0nBQdZ1aJBpFfKzq8qwz9QleEfmB1Gtjq1LKcFKw94ZSBfm9WiLi3LZ7+iB3VsXs6rgxJXrVCURGIeUA+isKpcrogqlAnXgSMXzsvnKR2a5W3uNWTo2Lycxn3pnTnorFaL2jYq45VEYn4VlHZKoKAe9J6OzcvqZXeTgHuA1So1qVPKK4nE/Pr4+61Zfl646qAWrjpoUjQXZ7Va1LF5Wf224oCcXthPDqehDs39sx4MtOss38d/+F+rwgTBwcGaPn26wsLCNGDAAL344osaNWqUIiIiMoc4RcETHGRT4zqlsl/Ri1o0KG3q9nFpVqtFzeqZu48C4Rg5+uvHcpw8pP1TntbaAUWz/Nv+whVmh1fonX/z+OOPP+rAgQP6888/uXHMp6H96nklkShlJEIGXVs7+xVxUaUjQtWvZzXZbBaPl22zWTSwT00VL5q/4aIKu8HX1JEsnt8/kuR0GRrar65Xyr6YltGRalirpKxeuMOyWKR7+9f3fMGFAPUgCiuLxaJ7+tfzymXWapWa1y+tpibfSxV0V3WurMiIEK+U7XIZGt6/nlfKRsFCPeg93dtUUKWy3ulE4HJJ99D2y7e7b6znlUSiJJUoGqTru1fzStmAv/K/1xtM0rJlS61Zsybz56SkJG3btk1NmjQxMSrkV4sGkVodc8zU7cO/tWhQWn+sOWTi9gv+MVLhpudU4abnzA4DFzF37lyzQwhYrRpGqkWD0lq35YRHb1BsVov696qhsqU9P79NYfPgwGh9O3e3x8t1Og3ddxM39/lVqVwRXd+9qmYs2OPZc8hmUeuGUT5/yG2xWPTIbQ115+glHi87OMiqO66r4/FyCwPqQRRmd11fVy9+tE7pDs+O1uNySQ/dErhzzvlKcJBN993UQC9OWJtlyO/8slqkyJKhur5HNc8VigKLetB7bDarHrolWiPGrPLo6BQWi1SsSJAG9qnpwVILp6s7V1GlsuE6cDRJLg9WhVaLNLx/fY8PzQr4O3omXsSGDRvkcrmy9EwcPHiwKlWqJEmqVKmSbrvtNpOiQ05d3q6iadu2WS3q2sq3w2sh9y5vV8nU7Xdv4/m5tgD4hsVi0YTnOnj8xjE8zK63Hm3lwVILr8ualtXga2p7tKeYxSINv7GeWkZ7fk7twuidx9soNMSzkztYJE141pxhtwddU0vtGkd5vEfsm4+0VmTJUI+WCSDwlYsM16sPtvRomTZbxrBxt15Vy6PlFlYjhjRWtQpFZbN6rt5wGdJHz7ZXSDCTCALe9sDABqpXvYRHz2HDkN5/up2KhAd5rMzCym63auJzHTybSLRaVKFMuJ4ZSgckFD4kEy9i3bp1Cg8PV+3a/w0xNmnSJO3bt0+GYWjfvn2aMmWKiREiJ67uXEUVosyZW6Zvt6qqUIY5E/3d5ZdVVPWK5sxb2Lt9JdWsXNyUbQPwjBYNIjVqaFOPlWcY0vhRl1F/eNDYEW1UrnS4R5I7NptFlcsW0Zskez2mSvmi+t9T7Txa5gv3Nlcjk4a6t9msmvRyZwXbrfLEMyXbv3O93H9zg/wXBqBQeuS2aLVtHOWRB91Wq0UhQVZ9/lInWT344LwwCw+z64tXOsuQZ0b+tlikgX1q6jqG3gN8IiTYpimvdZHF4plz2GrJeJbJCxuec0XHyrrjujoeG/bbMAxNfrmzihVhygsUPiQTL2L48OFKTEyU1RuTnsBngoKsGnaDb+fLOeveAQx/VhBYrRbTxqHnGAECw+h7mun2qy99sxez46Ta3DJLMTtOXnK9l+5vrluu5MbRkyKKh+i3j69QiaLBl3yQmt0+stksKvVvWdw4etYd19XRqGFNs10vJ+fRXf3q6Om7zH1LuE61Epr1Xk/Z7dZLPmzP9pizWlSvegn9+G5PHtoDyDObzaqf3rtctasWz1c9aLVaFGS3aPb7l/NCpId1aF5OX7zSSdKlkxHZ7SOLRercopw+eb6DN8IEcBEtGkTq27e6yWqx5Osctlqklg2j9NUbXWTx0rzihdWHz1ymnu0q5mv/SBnX2Y9Hd1A3RhlDIUWmDAHvvpsaeG1S84vp1KKcurZmiNOCYmi/uj7vwdqqYaT6dDR3iFUAnmG1WvT5S530xOBGsljk9kFdYrJDqzYeVWKy44Lf2WwZD+fee7qdRg1r5ouQC536NSK07IurVKtK8YveQF5qH1kk1a1aQsu/vFq1q5bwbrCF1Ev3t9A7j7eW3Wa5aC/Si+0jmzXjwc3Iu5powrMd/OLhS4+2FTV/Qm9FRoRcdJjdSx1zktSlVTktmXyVSpXwbTsWQOCJLBmqpZOvUsfmZS+6zqWuSVarVKZkqBZ83EddW/MA1RtuubKWpr/TXUXC7BdN+l5sH52t9gb0qqGfP+zFHF6ACa7vUU2z3ut5yRcYszuHr+5SRQs+vkJFGd7U40KCbZr1v566/eqMEQjdtc8vec9utSg81K6v3+iqO683p9MK4A9IJiLgRZYM1Yejcj9vzqkz6Tp1Jj3XnwsPtemzFzv6xYMs5ExE8RBNHO27YyQ4yKpJL3WSzcYlGAgUVqtFbz7aWos+7aPK5TOGKM1uOLGzCZOmdUtp3bTrGMbQy+pWj9C6aX315B2NM5NP2bFYJLvNolF3N9XaaX3pieFlj97eSGu+7atGtUpKUrZD0549x6pXLKalk6/SKw+29KsefJ1alteWWTdoYJ+akpRtbGePySJhdk14rr3mT7xCJYuTSATgGaUjQrXgkz76YGQ7hYdmzKWXXV149rp1+9W1FTuzn9o3u3gyEvl3fY9q2jLzBl1+WUVJ2bclz+6fUsVDNP2dbvr6za4kEgET9elYWVtm3aBrulSRlINz+N9fFy8SrC9f66wZ43qQSPSikGCbJr3cSTPf7aGof+ciz/ae/d/fd21dXrEz+2lA7xpejxPwZxbDMAyzgwB8YcATv+u7ebu9vp3/PdVWDwyM9vp24HlDnv1Dk2Zu9/p2Xn+4pZ68w9wh2DbHS7f/YWoIefJFJ6lBhNlRAJfmcLg0Z8k/+uCbzVq8+pDS0i+c7T081K4+HSvp/psbqFOLcryA4mMHjybpkx+26rMZ2xR34IzbdWpUKqY7r6ujO6+vq7Klw3waX4pT6jDHp5u8pKVXSv8+d/YJwzC0cNVBffDNZs39c5+SUpwXrBMSbFXXVhV03031dUWHSn7/gs62uFP6aFqsps7ZqSMnUi74vdUiNapdSnffWE+3XlWToXQBeNWphDRNmb1DE6dvUcyOk3K5eSpUtnSYbr2ypob3r69aVXiZxtfWxh7T+O+2aNqvuxWfkHbB721Wi1o1jNS9A+rrxsurKzTEt0nEwt5WAbKzafsJjf9ui76Zu0snTqVe8Hur1aLm9UrrngH1dVPvGgoP8+05nJziUHjryT7d5qUkrRrk05chUtOc+n5+nD78LlYrNxyRw3lhRViiaJCu71FN9w6or5bRUT6L7axAu87yfbzLV/UgyUQUGmeS0tVz2C9aseGo17Zx74D6en9kOx4KF1DJKQ5dce88LV59yGvbGHxtbX36QkfTe06QTAR8w+FwKXZXvOYs+UdPv7tabz7SSn27VVXNysVNvw4gw8nTqVobe1zL1x/RqPfX6IOR7TSwT01FmNgjrLDemLjjchnavueUYnfHa9P2k3r2g7819bXO6t+rhux2/04gXszBo0lav/WEVm8+qmff/1sTn2uvgX1qqghvogMwQWJSutZtPaE/1hzUyP+t0bgRbTSgdw2Vi/TtNBBwzzAM/XMoURu3n9Da2ON69oO/9fmLHXVzn5oKCTYve0ZbBcgZwzC0/3CSNmw/ob9jj+nZ9//Wpy900MA+NX3+EsC5Cnsy8VypaU5t2nFSvy7bp5H/W6O3H2utfj2qqWqFoqY+3w206yzfx7t8VQ8WzDtwIA+Khgfplw97eW1olnsH1Nd7T5NILMjCQu2a/f7l6ual+S6H9K2tj0d3IIEAFCJ2u1WN6pRS11YZ15VOLcqpdtUSXAf8SMniIerWpoJ6tM2YA6pFg0hTE4nIymq1qG71CPXtVk0922UM+1azcvECm0iUpPJR4erdoZJ6ts34Po3rlCKRCMA0RcKD1L5ZWXX7dy7Eto3LkEj0IxaLRVXKF9WVnapk1oP1a0SYmkgEkHMWi0WVyhVRn46VM9t+0TVLmppIRFYhwTa1aBCZWQ92aFZW1SoW4/ku4EbBvQsH8iCieIh+/ai37rupvsfKDA+16b2n2+n9ke14OBwAioYHac4Hl+uR26JzNJ9WToQG2/T2Y631yfMdC/TDTwAAAAAAAABA4cNTbRQ64WF2vT/yMv3+yRWqXrFovsrq3LKcNnx/ve6/uQFvrASQ0BC7xjzRVksmXaXa+ZwfpF2TMlo3ra8eG9SIZDMAAAAAAAAAoMChTzUKra6tKyh25g364bc4ffhtrJauPZyjz9ltFl3XPWMC3s4ty5FEDGDtm5VVzIx++nHhHn34bawW/XUwR5+zWi26pksV3Tugvrq3qUASEQAAAAAAAABQYJFMRKEWEmzTzX1q6uY+NRW7K15/rj2sNZuP6e/Y41odc1SGpFpViqtSmSJq0SBSzeuXVpdW5VU+ijkkCougIKtuvLy6bry8urbFndLSzGPkmFZtzDhGalYuroplwjOPkc4tyqtSuSJmhw4AKKDsFqlEsHQqzexIMuKw804MAAA4B20VoGCz26yKjAjRsfhUs0NRZESI7DYGTzxfoF1n+T7e48t6kGQi8K/6NSJUv0aE7upXV5IU0X6KJGnbTzeaGRb8SJ1qJVSnWgndcV0dSf8dI9tnc4wAADzHbpXmXS45DLMjybgpYbpfAABwLtoqQMEWFGTVgQUD5XC6zA5FdptVQUGcxOcLtOss38d7fFkPkkwEAAAA/IzdSkMdAAD4L9oqQMEWFEQSz98F2nWW71PwccUAAAAAAAAAAAAA4BbJRAAAAAAAAAAAAABukUwEAAAAAAAAAAAA4BbJRAAAAAAAAAAAAABukUwEgEIoIlgKLmA1QLA1I24AAAAAAAAAgO/YzQ4AAOB7FcKl6d2k+DSzI8m5iOCMuAEAAAAAAAAAvkMyEQAKqQrhJOcAAAAAAAAAAJdWwAa5AwAAAAAAAAAAAOArJBMBAAAAAAAAAAAAuEUyEQAAAAAAAAAAAIBbJBMBAAAAAAAAAAAAuEUyEQAAAAAAAAAAAIBbJBMBAAAAAAAAAAAAuEUyEQAAAAAAAAAAAIBbdrMDAAAAgcdwOCSny+wwJJtVFjvNHXfS011y+ME+stusCgri/TYAgSUQ60GHS3IYHikqX+wWye6BaiPQvk+goZ0CFGyBWA8GGq6zAHKLqykAAPAow+GQY9BwKSHB7FCkYsVkn/wRN5DnSU93qUL3r3QsPtXsUBQZEaIDCwZyAwkgYARiPehwSb1+lU6leSiufCgRLM27PH8JuED7PoGGdgpQsAViPRhouM4CyAvOUgAA4FlOl3/cOEoZcfjB25b+xuF0+cWNoyQdi0/1izdiAcBjArAedBj+kXiTMuLIb4/CQPs+gYZ2ClDABWA9GGi4zgLIC5KJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALZKJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALZKJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALZKJAAAAAAAAAAAAANwimQgAAAAAAAAAAADALZKJAAAAAAAAAAAAANyymx0AAABAIPtr01H976sYSdKEaVsUUSxYdatHmBsUMiUmpeububv00+K9kqTZi/eqcZ1SCgulmewvYnfFa+qcHVq/9YQkacvueLVpXMbkqPLu2MkUTZm9QwtXHZAkLV59UC0aRMpu5z1PAL7ncLg0a9FeTZ2zQ5L07dxdql21hEqVCDE5Mpy171Civpi9XcvXHZEkrdp0VK0bRclisZgcGYCcOHQsSZNnbdefaw9LkpatP6xWDaNktXIO+wPDMLTor4OaMG2LJOnzH7epfFS4qpQvanJkgP+xGIZhmB0Ecs84fETG6QSzw8gxS/FispQtWA99ItpPkSTF/3mbyZHkTdz+BB2LTzE7jByLjAhVtYrFzA4jVwr6MYLA5XA49Oyzz2rChAkqUqSIHnnkEX300Ufatm2bT7ZvpKbJcaP/nBf2aVNkCQn2+Xb3Hjyjax6cr9hd8XI4XXK5JLvNIotF6tqqgr57u5tKFPN9XJKUnOJQeOvJpmzbnaRVg0xJ3k2YtkWPvLlCFqtFSckOSVJoiE02q0XvPd1OQ/rW8XlM+M+JU6m68bEFWvL3YUmG0h0Zty1Bdqsa1ymlme/2UMWyRcwNMhdcLkNPjftL476Mkd1uUXKKU5IUEmxTsXC7vnmzm7q3rWBylIGBejArT9SDKU6pwxwPBeQBS6+UQm15/3ygfZ+8+nXZPg18cpGSUx1KyrwmWeVySU8MbqSXH2hhSsKKdkqGtHSnhr2wVFN/3im7zaqU1H/3UZBV5SLD9OO7PdW0XmmfxwX/Rz2YlVn3gw6HSw+8tlyfztgq27nncLBVpUuE6oex3U17QY7rbIZN20/o2gd/04FjSUpNc8owMu41DMPQDT2r6/OXOio0hJdMgbM4Gwog4/AROe59VEpPNzuUnAsKkv3DMQUuoVhQxe1PUP1rv1dKmtPsUHIsNNim2Jn9ClxCEfBHTz75pGJjY7Vr1y6dOXNGbdu2VevWrc0OK8fikhJ117pV+u2yrpnLav82W9t7XGViVLlz+Hiy2gycpWPxKXI4/3tv6+z/L159UJ2HzNHyL68ucD3galQqpunvdFe7235SappTw26oq8Z1Sun+V5ebHVqufPRdrB5+c4VS01xZlp+9yb/npT8liYSiSRKT0tVx0Gzt+Oe00h1Z91G6w6X1W4+rzS2ztG7adYosGWpSlLnz4OvL9dmMbUp3uJTu+G95appTqWlOXXnfPP06obc6tSxvXpABgnrQvxyb/6mOL5qS+XPa0b0KrVRftZ/zo2xeLgTK91m46oCufmC+0tKzXmPP1otjp2xSUopDY0e0NSO8fAmEtorLZajfowv024oDcjgMORz/3dunpru091CiOgyarVVfXaMGNUuaGCn8EfWg+QzD0G0jF2vmwj1KdxhKP/ccTnPpwNEkdb3zZ/35xVVqVj/SxEhzLxCusZK0fc8pXXb7bCUmp8t1TlV49t7jx4V7dPyBFP0yvpdsNkYQASTmTCyQjNMJBSuRKEnp6QWqJ2VBdyw+pUAlEiUpJc1ZoHpSAv7qwIED+uSTTzR58mRFRESoUqVKat++vaKjo7V48WK1bdtWHTp00COPPGJ2qAHtpQlrdeJ0apZE4rlS013atveUJs/a7uPI8m/XvgRN/XmHnhveVBXKhOv+mxvoqXGrzQ4rVxIS0/TImysvSCSeKzXdpftfXZ7ZYxG+NfH7rdq9P+GCh9xnOZyGjp1M0Rufb/BxZHkTuyteE6dvVXLqxdtnqeku3fXCUjFwTP5QD/qfyJ53qu4ri1T3lUWq+dQPsoaEq9Lgt8wOK88C4fsYhqE7Ry+56DVWkpJTnXr/m83asfe0DyPzjEBoqyxYeUALVhzIfMnpfIYhJaU49ODrK3wcGfwd9aB/WLHhiGb8vueSbb+UNKfueXmZD6PyjEC4xkrSo2+tvCCReK6UVKeWrjusn5fs821ggB8jmQgAQABZsGCBWrZsqaioqMxlx48fV3R0tGrVqqXFixdr6dKlOnLkiDZu3GhipIErKdmhz3/cdskHdJKUnOLUW5MK5j4YOyVGXVqW1/R3uuvJsX/pTFLBesnpy9k7Zc1BK9hikb6Zu8v7ASELwzA05ouNl3z4ImUk3yZ8t0WpBeAFqv9NjcnRensPntGKDUe8HE1gox70X4ZhKG7c7SrX7ymFVWlgdjj5VpC/z5I1h3TwWHK261ktFr3/dc6uX/6moLdV3pm8MdsXhA0jY7SLPQd4cRv/oR70D2O+2JSlN6I7hiH9HXtcsbvifROUBxX0a+zBo0mau2zfRROJZyWnOPX2ZM4T4KyCNa4WAAC4pOPHj2e5cTxy5IiWL1+usWPHqmLFipnL7Xa7bLacT85z5swZxcTk7GGSJT1dzXIeslvrT8Wrx7KFmT8fSs17z+W//lolIygonxHl3I59KXI4s7kr+deufQlasnS5goN8+35XyiV65OWEy2XotxUHNKRvbc1btj/f8az66y+FBvvubzDzt38y54a6lMRkh2bO36joCid9EBXOSkx2at/hpBytm5Lm0Ky5f6pK2RAvR5U/v/65+4LhWt0yDE2fs0bWFO/OgRUdHa2iRYt6dRtmoR68kCfqwTTDIil/Q+Qd/v4NBZWqoNJdbs1XOVLGdwq25L0Xb6B9n9ya/usxubJ7giopLd2luUt2aWWXgtVOkTzbVvF1O0WSlq8/pJx0VA8OsujbWSvUuVlx7wcVQKgHqQe97Y/V2SeqJMluk779aaWuaBvh9ZjOVdjvB5dvSlCwzSKHI/sL7epNR7Ry5UofRAX4Tl7rQZKJAAAEkDp16ujNN9/UwYMHZbFYNGTIEKWlpalu3bqZ66xdu1bHjh1TgwY5f4s+JiZGbdvmbM6cUKtNp6/sl+vYz9WkRMQFc2TkVefOXZTi8mHPpdBKUs2RkjU4R6t36tRJMnw8lKYlSGo4Ps8fr1WluK7sVFnfztutR29vqLfz2cOyS+fOkuHDt1krDZVKtsnRqrNm/aRZ4yd7OSBkYQ2Tot/L0arpaWnqf2N/Ke2wl4PKp9ovSKEVs10tNTVFY8aM0ZhnFng1nBUrVqhNm5ydAwUN9eCFPFEPWoJD1Xxa9j3ZLiZh02KdXP696r62JF9xnNW5c2cZaXl/sBxo3yfXIi+XyvbNUVtl69Ztats2/wnTXMlnO0XybFvF5+0USao/TrJn/5AvKTFRTz75pHR6rfdjCiDUg9SDXlfvLSko+/lMk5OS9MLzz+uFeB8PWVzY7weLNpSq3C3ZwrJdNSkpKcfHPlBQ5LUeJJkIAEAA6d27t3r27Kk6deqoRo0a6t+/v/bs2aPg4IyHRUeOHNGDDz6o6dOn56rc6OhorViRsxscS3q69GbOEgG+sHjxIp++iZqc6lKvh2OVloO3HMuWCtLM5Ut9EFVWKWkudblvc54/P+G59nrgteX6O/a4Vnx5tabP3624/WfyXN6ixYt9+ibqdwuO68MfDikl7dL7KDTYooceHKDrOt/jo8ggZQwdeNXjW3X8dPZJ9tDQYM1bMEMhPu7dm1uvTt6nOcvj5czmOVZwcKjeHTtSzeq84tV4oqOjvVq+magHL+SJejDNsOiRPI7Amx5/WHs+vFu1n50ja3BovuI4a/HixfnumRhI3ye3/oo9o8ff26PU9Etv026TrrmiqUbc4tuH3Pltp0iebav4up0iSfe/s1trtiQqu6PCFhSmb78cp0pl/LuHvr+hHqQe9LYRH+zRkvUJ2fYwtgeHatInr6tWJc/UJzlV2O8Hj5xI13VPb1VOBhRqWLukPvmM+WkRWPJaD5JMBAAggFitVk2ePFmTJ2f0pPrwww8zGwmpqakaOHCgxo0bp7Jly+aq3KJFi+b4rSUjNU0+7md3Sa1atZYlJGe9BD1lcF+nPv9xm9IvkVAMC7HpmWEt1KaN7x9mJKc4JOXt5nFov7qK3RWv5esznsI++vZKfTSqvXrfMy/P8bRu1Uphob5rltatn6r3v/9Kyu4RncWqUff3UNFw3z58gDTijnA9P/7vS86bGBxk1d031lOnDu18GFnevFSytuaumCmnLv3EomLZIrr71q6yWCw+iizwUA9eyBP1YIpT0py8ffbgNy/KmXRKce/fmbksuFRFVX9sap7jadWqtUJzPjrfBQLt++R+e4bemPqt9h1OvOR6FotVrzzSVfWqR/gmsH/lp50ieb6t4ut2iiS98EAF3fDogkvWgxaL1L5ZOfW7upMPI4O/ox68kBn3gy/ZqqjXPfOUks0c4I1ql9Yt/Tr7KKr/FPb7QUnqMfOMfl2+/5IJ3/BQm164/zK1aVPNZ3EB/oxkIgAAAWzr1q2ZN49ffPGFNm7cqMcee0yS9Nprr6ldO/97CF8tvEiWIW0kaXuPq0yKJm+eHdZM0+fHKT4h1e1cGcFBVlUtX1RD+tbxfXD59PH3W7P8vHDVQS1cddCkaPImoniIXn2wpUa9t0Ypae5v8EODbXr70dYkEk0yvH89Tfx+i/YeSlR6+oUnkc1mUUSxYD11ZxMTosu9xnVKaeCVNfXt3F0XfTAcEmTVhOc6kEj0MOpB81UZ/oGqDP/A7DA8JhC+j9Vq0cTn2qvvw78pzc01Vsp46enWq2r5PJHoCYHQVundvpIua1pWf649fMm2yrtPMvQeLo160BwdW5TT5W0r6tcV+y+aUAwNtunDZy7zcWT5FwjXWEl65/E2aj1wlpJTHW4TiqHBNjWrH6lrulTxfXCAn/Lv8YAAAEC+nHvzOHToUB0+fFiLFi3SokWL/PLGMVBUKldEK6deoxqViissxKazuQG7zaLgIKtaNojUkslXkagy0aO3N9RL97dQcJBV4ee8BRsealNIsFVvPtpK992c83lk4FnFiwbrzy+uVtO6pRQSZJXNlnESWS0ZN/a1KhfXii+vUbnIcJMjzbmPR3fQ4Gtry263KDTkvy5I4aF2FSsSpOljuqtnu+znVUTuUA8C7l3RsbK+e7ubiobbVSTsv3owNMQmu92iu/rV1fhRBe8hd6CwWi366b2e6tOxkuz2jPbjWeGhNpUpFarfP+mjpvVKmxglCgLqQXNYLBZNG9NN/XpUk91mUcg5Q3iGh9lVqkSIfhnfS22blDExysItulZJ/THpSpUrHabwc4YHCAqyKshuUfe2FTRvfC/Z7aRPgLPomQgAQACbO3eu2SEUWrWqFNe2n27Q4tWH9NXPO3U8PkUVyxbR4Gtqq3mDSLPDK/QsFoseH9xId11fR5NmbdeK9UdksVjUvllZ3XZVLZUo5tuhkHChsqXDtOqra7U65qgmz9quA0eSFFkyVLdeWUsdmpctcD347HarPhzVXs8MbapPZ2zVxu0nFRJs0xUdKumGntUVEuzDMQ4LEepB4OKu7VpVRxffomm/7ta8ZfuVmuZU4zqldOd1dVShTBGzwyv0wkLt+n5sD+3Ye1qf/LBVO/aeVtFwu67rXk1XdqzMA27kCPWgeYKDbPrytS56+f4W+uSHrdqy+5SKhNl1decqurZrVQX5+ZzfhUGLBpH6Z/5N+mXpPn3/W5wSEtNVo1Ix3XldHdUtgD3zAW8jmQgAAOAlFotFXVqVV5dW5c0OBRcRUTxED9/aULrV7EhwMS2jo9QyOsrsMDymYtkiem54c7PDAABJUmiIXbddXVu3XV3b7FBwEbWqFNfrD7cyOwwAeVStYjG9/EBLs8PARdhsVl3VuYqu6sxwpkB2eAUCAAAAAAAAAAAAgFskEwEAAAAAAAAAAAC4RTIRAAAAAAAAAAAAgFskE4HzGIahpX8fUmqaUympTr3/9WZt33PK7LDgRwzD0Ir1RzKPkfe+ilHsrnizwwIAAAAAAAAAwOPsZgcA+AvDMPTpD9s0ZsqmLImhB19bLkNSj7YV9PSdTdStTQXTYoS5DMPQlJ926O3JG7Vx+8nM5Q+9vkKGpM4ty+mpO5qod4dK5gUJAAAAAAAAAIAH0TPxHImJiXrooYdUpkwZFStWTIMHD9akSZMUFBSklJQUs8ODFzmdLg165g8NfWGptuyOz/I749//Llx1UD2G/aLx38b6PD6Yz+UydM/LyzRo1B+K2XEyy+/OHiNL/z6sK+6dpzFfbPR9gAAAAAAAAAAAeAHJxH85HA716dNHP//8s8aOHavp06dr9+7dGjlypOrWravQ0FCzQ/SKHssWamLcjizLdiQmKPin70yKyByPv7NKX87O+DsYhvt1nC5DhiHd+8oyTft1tw+jM1dYqE075tyoQdfUzlwWEmxT7Mx+GnZDXRMj861n31+jCdO2SJJclzhGJOmxt1fpi1nbfRUaAAAAAAAAAABewzCn/3r33Xe1bt06bd26VeXKlZMk1atXT9WqVVO3bt1Mjg7eFLc/Qe9OjdFF8kMXsFikx95eqeu7V5XNFvj5+OQUp4a+sFTT3u6mecv26dCxZL1wb3PtP5KkidO3mh2eTxw8mqQ3PtuQq888/s4q3XxFTQUFBf4xAgAAAAAAAAAIXDzlVsY8aGPGjNHQoUMzE4mSVLVqVdntdjVp0kTHjx/XFVdcobp166pRo0a64447lJqaamLU8JQJ07fIarHkeH3DkP45lKh5y/Z7MSr/snDVQX3/W5w+era9WkZHaviN9TT0+aVmh+Uzn/ywVcbFuqxexNGTKZq5cI+XIgIAAAAAAAAAwDdIJkqKjY3VgQMH1Ldv3yzLDx48KIfDoaZNm8pisejpp5/W1q1btX79eiUnJ+v99983J2B41OSZ2zOHp8wpm9WSOSxqYfH4O6vUvH5p/fJhLz334d/avT/B7JB8ZvLM7Rcd2vRibFaLphSyYwQAAAAAAAAAEHgY5lTS/v0ZPczKlCmTZfn8+fMlSU2bNlWpUqXUqVMnSZLValXLli21d+/ePG+zatWqOnXqVJ4+26Rocf3WvF2et32+J2LW65nYjZk/u3I84GfudO3aVevPnPZK2flxqvKbksWWq884XYa+m/Grfp5wnZeiyh9HcCWp3CMeLTMhMV0btp3U5e0q6utfdnq07LO6dO0qe9o+r5SdH6cqvSpZQ3L1GafL0Ox5fyoi4kYvRYXcKlGihPbsobeoT9isUrFiUoIfvHRQrFhGPMjCbrMqMiJEx+LNH2UhMiJEdvYRgEASgPWg3SKVCJZOpXkgpnwqEZwRT34E2vcJNLRTgAIuAOvBQMN1FkBekEyUVLp0aUnSzp07VadOHUlSYmKiXn75ZZUvX15RUVFZ1k9JSdGkSZP01ltv+TxWb3gruomGVauV+fOOxAQ1+P0XEyPytTwmTw2XZ8Pwc/17VVej2iU14/c4jRvRVrc8tcjskHwob/vaIqeH4wAKBovdLvvkjySnH1wnbVZZ7DR3zhcUZNWBBQPl8IN9ZLdZmV8WQEAJxHrQbpXmXS45vPPeae5isWTEk68yAuz7BBraKUDBFoj1YKDhOgsgL7iaSmrYsKGqVq2qxx57TA6HQw6HQ2+88YYSEhLUrFmzLOu6XC4NGjRIXbt2Ve/evfO8zfz0jnFt3ynnYyPz/HmzLFy4UNbaNc0O4wL1rpmubXtOKTdT4tltFt15+zX66Nk3vBdYPqyOOapWN8/yWHmlI0L03tPtNGjUH1q54YhiZ96gKztV1pw//vHYNiRp0cKFahkdlf2KPtZ8wI9at+V4ro+Rm/t11xevvuS9wAA/ZrHbaWX4uaAgbtoAwFsCsR60WwPrKwXa9wk0tFOAgi0Q68FAw3UWQG5xxZAUHBys6dOnKywsTAMGDNCLL76oUaNGKSIiQk2bNs2y7n333Ser1apx48aZEis8b/iN9XL9GYfT0NB+db0QjX96/+nLNPfPfZq7dJ9Onk7TA68t1/hRl6lYkSCzQ/OJ4TfWy1UiUTp7jOT+2AIAAAAAAAAAwJ+QTPxXy5YttWbNGiUlJWnt2rXq1q2btm3bpiZNmmSuM2LECP3zzz/64osvZLXypwsUg66treBcvIljs1rUvH5ptWgQ6cWo/Mc1XaqoS6tyeviNFZnLpv26W6tjjunNR1qZGJnvDOxTU0XCcv5KndVqUb3qJdSheVkvRgUAAAAAAAAAgPeREbuIDRs2yOVyZfZMjImJ0VtvvaWdO3eqVatWatq0qZ544glzg/SA3y7rmmW+REmqVaSY0q7ub1JEvleyeIjGj2qfo3WtVouCg6365PkOXo7Kf8xatFflu32tk6fTsiy//pEFuuflZSZF5VtFw4NyvM+tVinIZtGklzrJYrF4OTIAAAAAAAAAALyL0asvYt26dQoPD1ft2rUlSdHR0TJyO84hCowhfesoNc2p+15ZJqvVIofzwn1ttVpULNyu2e9frmb1C0evRPznpitqKjXNpTtHL5EskvMix0hYiE2z/tdTbRqXMSFKAAAAAAAAAAA8i56JFzF8+HAlJiYynGkhMrx/fa2bdp3uur6uQkNsWX5XplSoRg9vptiZN6hD83ImRQizDbq2tjb+cL3u6V9f4aFZ38UoHRGiZ4Y20ZaZN6hbmwomRQgAAAAAAAAAgGfRMxE4R6M6pTT+2fZ645FW2rL7lM4kpSuiWLAa1S6loFzMq4jAVb9GhN57up1ee6ilNu+M15mkdJUoFqxGtUsqOMiWfQEAAAAAAAAAABQgJBMBN4oXDVbrRlFmhwE/VjQ8iGMEAAAAAAAAABDw6GoFAAAAAAAAAAAAwC2SiQAAAAAAAAAAAADcIpkIAAAAAAAAAAAAwC2SiQAAAAAAAAAAAADcIpkIAAAAAAAAAAAAwC2SiQAAAAAAAAAAAADcIpkIAAAAAAAAAAAAwC2SiQAAAAAAAAAAAADcIpkIAAAAAAAAAAAAwC2SiQAAAAAAAAAAAADcIpkIAAAAAAAAAAAAwC2SiQAAAAAAAAAAAADcIpkIAAAAAAAAAAAAwC2SiQWQpXgxKSjI7DByJygoI274RGREqEKDbWaHkSuhwTZFRoSaHQYAAAAAAAAAADiHxTAMw+wgkHvG4SMyTieYHUaOWYoXk6VsGbPDKFTi9ifoWHyK2WHkWGREqKpVJOEMAAAAAAAAAIA/IZkIAAAAAAAAAAAAwC2GOQUAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG6RTAQAAAAAAAAAAADgFslEAAAAAAAAAAAAAG79H0/2h7GnCmFkAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "for c, note in cs: compile_and_plot(U, c)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "acee9cc1-5b36-45d3-9844-2dbb885d6bfe", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ef5f0dc2-ad3a-467a-89dd-36d296d458ac", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "genQC Version 0.1.0\n" - ] - } - ], - "source": [ - "import genQC\n", - "print(\"genQC Version\", genQC.__version__)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/examples/Discrete-continuous circuits with multimodal diffusion/0_compile_testset.ipynb b/src/examples/Discrete-continuous circuits with multimodal diffusion/0_compile_testset.ipynb new file mode 100644 index 0000000..39ee495 --- /dev/null +++ b/src/examples/Discrete-continuous circuits with multimodal diffusion/0_compile_testset.ipynb @@ -0,0 +1,1096 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "c4e9a976-c6b5-4ce2-8e92-1dd7cee7c736", + "metadata": {}, + "source": [ + "---\n", + "categories:\n", + " - Unitary compilation\n", + " - Parameterized gates\n", + " - Quantum circuits\n", + " - Pretrained model\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "6c172f48-2011-45c9-ba09-b49edc98b7ec", + "metadata": {}, + "source": [ + "# Compile unitaries with parametrized circuits\n", + "\n", + "> A short tutorial showing unitary compilation with parametrized circuits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24ecd66b-9552-4e9b-aa68-ce292444d85e", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.imports import *\n", + "import genQC.utils.misc_utils as util\n", + "\n", + "from genQC.dataset.config_dataset import ConfigDataset\n", + "from genQC.pipeline.multimodal_diffusion_pipeline import MultimodalDiffusionPipeline_ParametrizedCompilation\n", + "from genQC.scheduler.scheduler_dpm import DPMScheduler\n", + "\n", + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer\n", + "from genQC.platform.simulation import Simulator, CircuitBackendType\n", + "from genQC.inference.sampling import decode_tensors_to_backend, generate_compilation_tensors\n", + "from genQC.inference.evaluation_helper import get_unitaries\n", + "from genQC.inference.eval_metrics import UnitaryInfidelityNorm\n", + "from genQC.dataset.balancing import get_tensor_gate_length" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ae966b5-0f67-4ffe-9d2b-ed0f23c7c383", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" + ] + }, + { + "data": { + "text/plain": [ + "device(type='cuda')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "util.MemoryCleaner.purge_mem() # clean existing memory alloc\n", + "device = util.infer_torch_device() # use cuda if we can\n", + "device" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f903ecdc-f07c-4e0a-8fc9-1d1bea2bfec5", + "metadata": {}, + "outputs": [], + "source": [ + "# We set a seed to pytorch, numpy and python. \n", + "# Note: This will also set deterministic algorithms, possibly at the cost of reduced performance!\n", + "util.set_seed(0)" + ] + }, + { + "cell_type": "markdown", + "id": "73b1255b-b440-4bb9-89fc-c864fbcafaa1", + "metadata": {}, + "source": [ + "## Load model" + ] + }, + { + "cell_type": "markdown", + "id": "39db6391-1b4f-41ec-b0ef-4e3350c5d790", + "metadata": {}, + "source": [ + "Load the pre-trained model directly from [Hugging Face: Floki00/cirdit_multimodal_compile_3to5qubit](https://huggingface.co/Floki00/cirdit_multimodal_compile_3to5qubit)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6ff4eb91-6929-4d54-b6e5-38f6d7b18fdf", + "metadata": {}, + "outputs": [], + "source": [ + "pipeline = MultimodalDiffusionPipeline_ParametrizedCompilation.from_pretrained(\"Floki00/cirdit_multimodal_compile_3to5qubit\", device)" + ] + }, + { + "cell_type": "markdown", + "id": "03202ede-a4a0-431c-877e-d308604c6901", + "metadata": {}, + "source": [ + "The model is trained with the gate set:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e4992c91-8f21-48d8-80d2-d156f883133e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['h', 'cx', 'ccx', 'swap', 'rx', 'ry', 'rz', 'cp']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pipeline.gate_pool" + ] + }, + { + "cell_type": "markdown", + "id": "5f48f11f-f215-481c-9f36-60a34b36e97f", + "metadata": {}, + "source": [ + "which we need in order to define the `vocabulary`, allowing us to decode tokenized circuits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c97ddef1-173d-412c-a0b3-3a4e90edd299", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'h': 1, 'cx': 2, 'ccx': 3, 'swap': 4, 'rx': 5, 'ry': 6, 'rz': 7, 'cp': 8}" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "vocabulary = {g:i+1 for i, g in enumerate(pipeline.gate_pool)} \n", + "tokenizer = CircuitTokenizer(vocabulary)\n", + "tokenizer.vocabulary" + ] + }, + { + "cell_type": "markdown", + "id": "b4aab0b8-4f8a-49d4-989b-bc42cd9876e9", + "metadata": {}, + "source": [ + "### Set inference parameters" + ] + }, + { + "cell_type": "markdown", + "id": "6ab1e4f6-9ae7-4711-825a-92ba96b28d74", + "metadata": {}, + "source": [ + "Set diffusion model inference parameters." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f7592401-ab32-4beb-8727-42c02d9584b0", + "metadata": {}, + "outputs": [], + "source": [ + "pipeline.scheduler = DPMScheduler.from_scheduler(pipeline.scheduler)\n", + "pipeline.scheduler_w = DPMScheduler.from_scheduler(pipeline.scheduler_w)\n", + "\n", + "timesteps = 40\n", + "pipeline.scheduler.set_timesteps(timesteps) \n", + "pipeline.scheduler_w.set_timesteps(timesteps) \n", + "\n", + "pipeline.lambda_h = 1.0\n", + "pipeline.lambda_w = 0.35\n", + "pipeline.g_h = 0.3\n", + "pipeline.g_w = 0.1" + ] + }, + { + "cell_type": "markdown", + "id": "953d070b-492c-44bd-b2fa-7f4b5b9630fa", + "metadata": {}, + "source": [ + "We assume in this tutorial circuits of 4 qubits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "51a147e1-543c-49d2-a3b6-4f5c2b8f41d2", + "metadata": {}, + "outputs": [], + "source": [ + "num_of_samples_per_U = 32 # How many circuits we sample per unitary\n", + "num_of_qubits = 4\n", + "\n", + "prompt = \"Compile 4 qubits using: ['h', 'cx', 'ccx', 'swap', 'rx', 'ry', 'rz', 'cp']\"\n", + "\n", + "# These parameters are specific to our pre-trained model.\n", + "system_size = 5\n", + "max_gates = 32" + ] + }, + { + "cell_type": "markdown", + "id": "7b2fa84f-94c1-4032-9bdc-b10284c4a68a", + "metadata": {}, + "source": [ + "For evaluation, we also need a circuit simulator backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7920f4e9-0376-448c-9159-2d3095c35aaa", + "metadata": {}, + "outputs": [], + "source": [ + "simulator = Simulator(CircuitBackendType.QISKIT)" + ] + }, + { + "cell_type": "markdown", + "id": "b0060705-6d0d-4d48-95c0-988f700bb3f8", + "metadata": {}, + "source": [ + "## Load test unitaries" + ] + }, + { + "cell_type": "markdown", + "id": "827a7eae-3585-45a0-a611-01bc8b4c1c5d", + "metadata": {}, + "source": [ + "We load a balanced testset directly from [Hugging Face: Floki00/unitary_compilation_testset_3to5qubit](https://huggingface.co/datasets/Floki00/unitary_compilation_testset_3to5qubit)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "118357f2-6340-4667-983c-0b250a08ec4c", + "metadata": {}, + "outputs": [], + "source": [ + "testset = ConfigDataset.from_huggingface(\"Floki00/unitary_compilation_testset_3to5qubit\", device=\"cpu\")" + ] + }, + { + "cell_type": "markdown", + "id": "ae9737d4-ef9a-4757-b351-ddaa8625aa67", + "metadata": {}, + "source": [ + "We pick the 4 qubit circuits as test cases for this tutorial." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d61708c5-3e8e-4782-8a02-37abfa2315ec", + "metadata": {}, + "outputs": [], + "source": [ + "target_xs = testset.xs_4qubits # tokenized circuit\n", + "target_ps = testset.ps_4qubits # circuit angle paramters\n", + "target_us = testset.us_4qubits.float() # corresponding unitaries, " + ] + }, + { + "cell_type": "markdown", + "id": "dea0df41-ca38-438f-a60a-c158427065ac", + "metadata": {}, + "source": [ + "For 4 qubits the unitary is a 16x16 matrix. Complex numbers are split into 2 channels (real, imag)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d9e7035c-0a07-4c9f-bbf4-aaa10784cb8c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([3947, 2, 16, 16])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "target_us.shape # [batch, 2, 2^n, 2^n]" + ] + }, + { + "cell_type": "markdown", + "id": "01beefbe-5d31-4744-b5cc-0c3897781c8b", + "metadata": {}, + "source": [ + "A random circuit may look like this:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0da110b2-4894-4f6b-9a1c-cd50c69fb455", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzQAAAEvCAYAAACT/IQGAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAV9VJREFUeJzt3Xd4FNX6B/DvlvRCSCMJIQ0CJHTpvUsTRJQmoF4rGkS9SLzYEO8VAbkXKYJgQwSRFpGmUkIJnRhqEkoakLKENEjZbLK78/sjP6IxCWTD7k5m9/t5Hp6HzJwz887uzJl5d2bOkQmCIICIiIiIiEiC5GIHQEREREREVF9MaIiIiIiISLKY0BARERERkWQxoSEiIiIiIsliQkNERERERJLFhIaIiIiIiCSLCQ0REREREUkWExoiIiIiIpIsJjRERERERCRZTGiIiIiIiEiymNAQEREREZFkMaEhIiIiIiLJYkJDRERERESSxYSGiIiIiIgkiwkNERERERFJFhMaIiIiIiKSLCY0REREREQkWUxoiIiIiIhIspjQEBERERGRZDGhISIiIiIiyWJCQ0REREREksWEhoiIiIiIJIsJDRERERERSRYTGiIiIiIikiwmNEREREREJFlMaIiIiIiISLKY0BARERERkWQxoSEiIiIiIsliQkNERERERJLFhIaIiIiIiCSLCQ0REREREUkWExoiIiIiIpIsJjRERERERCRZTGiIiIiIiEiymNAQEREREZFkMaEhIiIiIiLJYkJDRERERESSpRQ7AKqZIAjQqjVih1FnSgc7yGQyoy1PEIBSndEWZxb2CsCIH4HVk9oxABj3OLD27SfieUB67QDbAOOS2vcPiLcPMKFpoLRqDTY0nyp2GHU2JXk9bBztjba8Uh3Qd4/RFmcWMSMBBx5RRiO1YwAw7nFg7dtPxPOA9NoBtgHGJbXvHxBvH+AjZ0REREREJFlMaIiIiIiISLKY0BARERERkWQxoSEiIiIiIsliQkNERERERJLFhIaIiIiIiCSLCQ0REREREUkWR82wID4922B41Lwq08qL1bibkoXkrUeQ+M0eCDq9SNGZXuHFQ7j6/sAq0+T2TrDzawmPAdPg/djrkCm4y1syaz8GAH4GRNZ+LmAbQNa4D1juEW3FUqJikB4dB8hkcPByQ4vx/dFt3nNoFNoUJ2avFjs8k2vcbzIadR4JCALK81XIPbQO6d/+E6XpiQiMWCN2eGQG1n4MAPwMiKz9XMA2gKxpH2BCY4FyL6YiZVtM5d9X1v6OJ2KWouXTgxG3YCM0uXdFjM70HEMegceAP0fW9Rr5GuJfa42cfV/Db+onsGnkJWJ0ZA7WfgwA/AyIrP1cwDaArGkf4Ds0VkCr1uB23DXI5HK4BjYROxyzU9g7walVD0AQoFElix0OicDajwGAnwGRtZ8L2AaQJe8DTGishEtQxY6rKSgSORJx3Dt5KZ3dRY6ExGLtxwDAz4DI2s8FbAPIUvcBPnJmgZQOtrBzd6l8ZrLVM4/Co10Ibsddw92ULLHDMzm9pgTauzkQBAHafBVu//Yl1Cln4RjaDfZNW4odHpmBtR8DAD8DIms/F7ANIGvaB6wiocnJycGiRYsQFRWF9PR0eHl5Ydy4cZg/fz5mzpyJb7/9FsuXL8eMGTPEDtUoOkVOQqfISVWmpe0+iVNzvhYpIvPK2jgXWRvnVpnm1nMcAl75QqSIGgZBEFCi1qK0TIdGzrZQKi33Bq21HwMAPwOqmU6nx52ictjayOHkoIRMJhM7JJOx9nMB24Ca6fUC7hSVQSGXwcXJxqKPAWvaByw+oTl37hxGjBgBlUoFJycnhIeHIzMzE8uWLUNycjLy8vIAAB07dhQ3UCO68sNepO08AbmNEo1bB6BtxFg4+XpApymrLCO3VWL03s+Q+nMMLiyNqpze5/MI2Hu5Yf+UT8QI3Sg8h72Mxr3GQ9CVQ339IlRRC1GWkw6ZjX1lmcL4GCR9PKJaXUFbBkGvQ+efdeYM2aRy8kvx7c9X8eWWy0jNKAQAKBQyjBkQgNcmhmFwdz+La9Drcgz0X/UWIJfh8Cv/q5xm6+aMsYeWIPbjdUiJiqlp0ZJh7e0A/UkQBBz5Q4WVmxIRdSANWq0AAGjm44RXnmqNF59shSYeDiJHaXzWfi5gG1DVucu5WLkpERt2J6OkVAsA8HCzw/NjW2L6hNYI8XcVOULjs6Z9wHJ/okXFnZnRo0dDpVJh1qxZyMrKQlxcHFQqFRYuXIjdu3fjzJkzkMlkaN++vdjhGs3dFBWyYi4iI/osLq38BQeeXQDPjs3Rc+ErlWX0ZVocnbkc7WaOQ+PwQABAwPCu8B/aBcf+uVKs0I3CzjcUrh2HoFHnEfAZF4kW7+1ESdIZ3Fg1vbKMS5u+6LSpqMq/NiuvQuniCb+n/y1i9MYVtT8NAcN+wjufn6lMZgBApxPw84HrGPrybxjy0q8ouKsRMUrjq8sxcGLOV/Du2grBY3tXTusx/0Vkn74s+WQGYDtAFQqLyzAqYi8GPL8Hm39PrUxmAOCmqhjvr/gDAY/+hB93W95L8tZ+LmAbUEGr1eOVj4+i04Tt+GrblcpkBgByCzT4bO1FtBi1BZ9+fR6CINxnSdJjTfuARSc0M2fORHp6OmbMmIHFixfDxcWlcl5kZCQ6dOgArVaLoKAguLpaXmZ+z+3YK0jeegTBY3vDq0uryum5F1IQv2oH+i57HY6+7uj52XScevdrqG/lixit8TmH9YL7gGnIP7oJRYnHayyjL9cgZcE4OIf3ge/4d80coWlE7U/DU7MOQF16/18Yo09n4dHpv6G4pNxMkZlfTcdAWUERjs9ahe6fvAiHJo0ROKoHfHq1wYl3LKtv/nusvR2wRqUaLUZF7MWvR9PvW66sXI8pcw5h/a4kM0UmDms9F9xjjW2AIAh47oMjWLP1ygPKAe8ui8W/V58zT2AiseR9wGITmsTERGzatAmenp749NNPayzTuXNnAECHDh2qTE9NTcWYMWPg4uKCxo0b45lnnkFubq7JYzal80u2Qq/VodPsiVWnf74Nep0OY/Z9BtWxS0j95ZhIEZqW78QPALkCmT9+WOP8GyunQ19eiqA31po3MBPJu6PBtHcPo64/Np25lIO5K+NMG5TIajoGMg6eQ9rO4+i3YiZ6LHgJx2etgibfsnp++StrbweszadfX0BM3K06l39hbgyybpeYMCLxWdu54O+srQ3YsDsZGwy4+zh3ZRxOXcg2YUTis9R9wGITmo0bN0Kv12PKlClwdnausYyDQ8Uzw39NaAoLCzFw4ECkp6dj48aNWLNmDWJiYvDYY49Br9ebJXZTKExTIfWXY/Dr1x7e3cMqpwtaHW6fuQJ7j0ZI2nRQxAhNy963Bdz7TkLhhQMojK/6OFH2zmW4E7sLzedsh9zOUaQIjWvtL1er3Favi2+3X0WJ2rA6UlLbMRA7bx1cgn2QEX0W6QcsO6mz9nbAmpSV67Bm22UD6+jxddT9f8mWOms7F/ydtbUBKzYmGFxn5aZEE0TScFjqPmCxCU10dDQAYODAgbWWSU+vuA3/14RmzZo1yMjIwPbt2/HYY49h/Pjx+PHHH3Hy5Ens2LHDtEGb2IWlFdn3X7Ny7+5haDFxIBK/2YNuH/8DCntbESM0LZ/x7wFyeZVf5govHET6uncQErkFdk2CxAvOyFZvMfyiJP9uGbbsTTVBNA1HTceAVq1B0fVs5CfeEDEy87H2dsBa7Dx0A6octcH1Vm+5DL3est4j+DtrOhfUxFragHOXc3Hq4m2D6236PRX5FvZe6d9Z4j4gEyztDaj/16xZM6Snp+Ps2bM19mCm1Wrh6+uLnJwcJCcnIyQkBMCfCdDBg1Wz0+bNm2PAgAH45ptvDI6lS5cuUKlUBtWxEeSYq+9m8LoMoXS0x5gDi5Gwehcuf/87Rvz8MXLOJ+PM3LUGL2ue/DTKZca7gyWzdUCTz68ZbXk10dxKw+W3u8J30lx4j3r4LrtvvRkKoczwCwhjEyBHpvvcBxesgbP6KBqp9xk5ovoxxzFwz/Bt83Bz3x+I//LhfrQw5nFgru1vyO0A1d9d+/4odBxUr7q++fMhF8S/oDPHeQAw7rnA2OcBa78WeBgltu2Q7/xUvep63fkStjrxx2mxtvOAj48PYmNj61XXYrttLi4uBgCo1TU3LJs2bUJOTg5cXFwQHBxcOT0hIQHjx4+vVr5NmzZISDD81iUAqFQqZGRkGFTHVqYAmtRrdXXW9aNnUHQjG5fX/gYAOPrGCozZvxg3fj2FWycNu+WamZWJMsF43VvK7RxNuvl6TQmSPx2LRt3GGCWZAYDMzEzoNQ3g+XOZLVDPQbCLitUoyjJsXzUVcxwDxmbM48Bc29+Q2wF6CE3UQD2fmsrKug3oCh9c0MRMfR4AjH8uMPZ5wNqvBR5K4yCg5jcOHuh2Tj5QIv65kOeBurPYhMbHxwf5+fmIi4tDz549q8zLysrC7NmzAQDt27evMgZHfn4+3Nzcqi3P3d0dV67U79liHx8fg+vYCHLAhD9yNB3UCcFjeuOXwbMqpxVev4U/PtmA3ksisGPQLGjVdf+Fzs/Xz+h3aEwp//g2qFPPozTjKvKPbqo2v82KBNh6BRi0TD8/vwZyhwbIFMoBmY3BdV0c5XBt2tT4QdWDqY8BUzDmcWCO7W/o7QDVX5GdAnfqU1HQw8+nMWQQv+dPU58HAOOfC4x9HrD2a4GHobaxRZ6hlQQBkMng7eEEm8binwut7TxQn+vleyz2kbOZM2di+fLlaNasGfbv34+WLVsCAM6cOYNp06YhJSUF5eXliIiIwIoVKyrr2draIjIyEv/5z3+qLO+5557DiRMn6p3UGKq8pBQbmk81y7qMYUryetg42j+4YB2ptUDfPUZbnFnEjAQcGshPBNPePYT1uwwfVyJh+5MIC3EzfkD1ILVjADDucWDt208PJy2jECEjN9e5p8N7nhgciKglQ0wTlIF4HpBeO9CQ2oAStRZNh2xEQWHZgwv/RbvQxji/9YkGMeC01L5/QLx9wGI7BYiMjISHhwdu3ryJNm3aoF27dggNDUW3bt0QEhKCQYMqni3+e5fNjRs3RkFBQbXl5eXlwd29ns/xEJlZxMRwg+sM7OrbYJIZIno4QU1dMKpvM4PrvTYx7MGFiCTA0UGJf4wNNbjeaxPDGkQyQ4ax2ITG398fMTExGDVqFOzt7ZGWlgZ3d3esXr0au3fvxtWrVwFUT2jCwsJqfFcmISEBYWFs6Ekaurf3wuMD6/6YhI1Sjo8jHjFhRERkbnNf7QR7O0Wdyw/p4YfB3f1MGBGReb01rS28Gtf9bkF4czdMe6yFCSMiU7HYhAaoSE527dqFwsJCFBYW4tSpU3j55ZdRXFyMtLQ0yOVytG3btkqdxx57DEePHq3s0hkATp06heTkZIwePdrcm0BULzKZDBs+HYABXX0fWNZGKceGBQPQ55H6P7tKRA1PlzZe2PzZoDolNT3ae2Hrfwfzl2myKM18nLFn5aPwcLN7YNkWAa74deUwODka/v4pic+iE5raxMfHQxAEhIaGwtGxajcwL7/8Mnx9ffH4449j165d2Lp1KyZPnoxu3brh8ccfFyliIsM5Odrgt1XD8NGrneDjWfPLtY/2aoqD34zE+EeDa5xPRNI2ekAAjnw3CqP6NUNNuYpXY3u8+2IHRH89Eo1cpDXuBFFddGnjhZPrx+Dpkc1ho6x+2evsqMSrE1rjxA+jEeBbz27RSHQN5BVm87p48SKA6o+bAYCrqyuio6PxxhtvYNKkSVAqlXjsscewZMkSyOVWmf+RhNnZKjD31Ucw58UO2HHwBp7/8AgKS7RwdbJB7E+PIzSwkdghEpGJdW3rhV0rHkVqeiGiDqTho1VxKCrRorGrLW7umwQ727o/lkYkRS0CXLFhwQD8b3Z3bNmbijlLz6CoRAs3F1vc2DsRLk5M5qWOCU0Nmjdvjl27dpkzpIf21OmV0GnKoSut6M3jwvKfkbbjeJUyXp1boueClwAAMhslsk8n4tT730JfpoWzvxf6LJ0B97ZBKLqRjR1DZ5t9G+pLezcXVz8cXPm3XlMCjSoFHdZlQ+lStSMHVdRnyD34PaDXw75pKwTO/A5KZzcAQO7BH3Br+2IIeh1s3JogaOZ3Bnfd3FDZ2ijw1KPBeHPRSRSWaOHiZGNxyYzcVomuc59F0wEdodOUIS/hOmJmLKtWLnTyILSb8QQgl0F17BJO/OsrCNqKPvPdWgegxycvwN6r4rOJW7ARN/acMut2PIy6tAPA/bdT6p8B1S7Y3wWznm2HJT9cQlGJFo72SotKZm6smYk7Z3agLPs6wpachWNIx/tO/ztBr0f6d2/jbtxvkCmUULh4IHDGV7D3lcY7FXaNnTFs85+DKisc7OAS2AQ/tXsBZQVFVcq2jRiLFhP6Q1+mhU5TjlPvf4ucc0lwaNIYfT6PgLO/F3RlWtxNzcKJd9ZAk3vX3JtjEk08HDBjcjgWfHMeRSVaODkoLSqZ6fbv5xEwrAucm3ljx5C3kRefdt/pf+fTsw2GbHgXd5MzK6ftHv1e5TmlIWNCY0EOT19S604KAHkJadg54l8VF28yGQZ+8zZaPzccCWt2oaxIjbiFG2Hr4ohH/jXZfEEbgdLVA+Gfn6v8W/XzYhTFH66WzNw9tw+5B75D689OQeHogqzN/0Hm+vcQMP0LlKZfRvra2QhfchY27r7IPbQe11e9itAPd5t5a6i+Or83FRAERPV+HQDg4OVWrYxzM290ipyEnY9GQn27AIPWvoNWU4fi8trfoHCwxeC17yBm5nJkn74MmVwO28bSe/zgQe3A/bbTUj4Dsk6Nez8Fn3GRuDKnT52m/92d0ztQlHgM4UvPQ6a0qThH/PAuQiI3mzJso9HkF1X5MbLN9DHw6RleLZlxbxOE1s8Nw/b+b0FbUoqQJ/ui+/wXsHvkHAg6Pc4v2Yrs05cBAF0+mIauH0zD0Te/MOu2UP1c330Cl1Zux8hf/lOn6TW5m5wpqR+177HKZ6iio6MhCAJGjRoldihmpVOXVf4SrbBVQmlvi3uDFJQVFCH79GVoS+o+eFJDlbv/G3gOeaHa9JLU83AO7wOFowsAoFHnkcg99AMAQH39EhyC2sPG3bdy3t24X6G9m2u+wKnelA52CJ08CHELNlZOU98uqFYu8LEeuLk3tnLelXV7EfxEbwBAyBN9cfuPq5UnckGvt5hfJf/qfttpLZ8BWSaXNv1g6+lf5+nVyGQQtBroy0ohCAJ0JXdh41GHeg1U6NODcG3jgWrTBUGAXKmA0rHiRXlbVyeUZFUMQVmac6fy+AeA23HX4NzM2zwB00O7dTKx8rusy3RLYpV3aCxVn2WvQyYDbp9Nwh/zN9R4IeLs74VBa9+BS1ATpO+Pw+W1v4sQqekUJR6Htigfjbo+Vm2eU/POuP3rSpTnq6B0a4LcwxugVxdCW5gHh+AOKEmOQ2nGVdg3bYm8Q+sBQUDZ7etQunqIsCVkCJcgH5QVFKH9zHHw7dceutIynFu8GVlHL1Yp59zUE0Xptyv/Lkq/DaemngAAt5b+0JWVY/C6OXDydUde4g2cmfe95C7oH9QO3G87LeUzIKqPRl1Ho/DiQVx4zgdyBxfYejRFy08Oix1WvXh1aQW7Rk64ue+PavPyE64jfs0uPHV6JTT5RdCVleO3Jz6sVk4mlyPs+RG48fsZc4RMDYRLkA9G710EQafHtZ8O4sr30rhOtMo7NJbo1yc+xI7Bs7Dj0Uho8grRd+mMGssVpd/GjiFvY1P7l6Cws0HgyO5mjtS0cvZ/A4+Bz0CmqJ6ru7QfiCZj30bSvx/D5dk9YOPqBQCQKZSw9wtF4KtfIu3zZ5D4zy7QFuZC4eQG1LAcanhkSjmcm3mj4Fo6dg1/B6fe/xb9V78Fe8+6vyckUyjg17c9TkSuxo6hs1Giyq1850wq6tIO3G87LeEzIKqvkqRYqK9fQrtvM9D+u0y4tB+MG6umix1WvYROHoSkLYch6PTV5jk380bgyO7Y1nMGtnR+BQlrdqH/6reqleux4EVo7hQh4Ss+em0tci+mYPMjr2Dno5GIfn4RWj3zKIJG9xQ7rDphQmMhijNyAACCVoeEr3ahSff7DwKqLSlF6vZjCBnX1xzhmYVOXYT8o5vhOeT5Wst4j3wNYf+LRdjiU3BuNwA2Hv5QOLoCqHjOuvVnJxH2v1h4jXgV+jK1ZF4GtXbFGTnQ63RI2RYDAMi7lIqiG9loHFa1U4eijBw4+3tV/u3s71V57BRn5CDreDxKVBW35VO2HoHXIy3NtAXGUZd24H7baQmfAVF95R5cB5f2g6B0doNMLofHoGdRePGg2GEZTOloj+AxvZD0U3SN8wNH9UD+5RtQ38oHACT9dBBNuoVBbvPnD3jd//M8nPw8cfiVJZWPppPlKy9So7ywBABQkpWH1O1HH3g92VAwobEASgc72Lr+OZ5O8BN9kHsptVo5lyAfyJQVPdrIbZQIGNENeYnXzRanqeUf3QSH4A6w929da5nyvCwAFT2hZf74IXzGRVabJ+h0SP/+HXiNjIDczrHG5VDDoskrRNbRS/AbUNHRh3MzbzgHeOPOtYwq5a7vPolmj3ap7DCg1TOPInX7MQBA2s7j8OzYHDbOFWP2NB38CPIS0sy2DQ+rru3A/bZT6p8B0cOw8wlB4YVo6MsrenS6c2YXHALaPqBWwxP8eC/kJaThTlJmjfMLb9yCd9fWUDraAwD8h3bGnaQM6Mu1ACp6xHIJ9kX084sqp5F1cPB2w70Bq5RO9vAf0hm5l9JEjamu+DyNBbD3aoSBX8+GTCGHTAYUXs/G0deXAwB6LZ6Om3tjcXNvLHz7tEXYCyMh6PSQKRXIirmIC0u2Aqjo3Wjc0eVQ2Clh4+KI8X+sRvK2w4ib/6OYm2aQnH3fwPPRqo/HZG74EDbufvAaUfHYwNWPHgX0egjaMrgPmAavUX8+kpO2/HmUZV+HXqtBo86j0HTafLPGTw/nRORq9P7fa+jy/lQIegEnIlejRJVX5RgoupGNs4s3Y8SOip5eVMfjceWHfQAq7k5cWBaFkTs/gaAXUKLKw/HZX4q5SQapaztwv+2U+mdA1u36yldwJ3Y3yvNVuPbRMCgcXNB2dVKt0wEgbfmLcOs2Bm7dx8BrZARKbyYi8c0OkClsoGzsg8BXpbf/h04ejKsb9leZ1nH2RKhv5ePKur24secUPDs2x+jfF0KnKYe2RIMjEUsBAN5dWyH8xZEouJaOx3Z/CgAovJmNg89/ZvbtIMP1XPQy/Ad3hoO3G4ZufB/lRWpE9Xq91ulA1fND4KgeaPXsMAhaHWRKBa7vPFHrnb6GRiYIvJfYEJWXlGJD86lih1FnU5LXw+b/f+0xBrUW6LvHaIszi5iRgEMD/4nAf8hGZGSXoKm3I9L3N+zuuaV2DADGPQ6sffvJdKTSDvA8IL12QCptgFSOAal9/4B4+wAfOSMiIiIiIsliQkNERERERJLFhIaIiIiIiCSLCQ0REREREUkWExoiIiIiIpKsBt4nk/VSOthhSvJ6scOoM6WDnVGXZ6+o6C1GSuwVYkdgWaR2DADGPQ6sffuJeB6QXjvANsC4pPb9A+LtA0xoGiiZTCaJrg9NRSZr+F0gk2nxGLDu7SfieYDtgLXj9193fOSMiIiIiIgkiwkNERERERFJFhMaIiIiIiKSLCY0REREREQkWUxoiIiIiIhIspjQEBERERGRZDGhISIiIiIiyWJCQ0REREREksWEhoiIiIiIJIsJDRERERERSRYTGiIiIiIikiwmNEREREREJFlMaIiIiIiISLKY0BARERERkWQxoSEiIiIiIsliQkNERERERJLFhIaIiIiIiCSLCQ0REREREUmWUuwAqGaCAJTqxI6i7uwVgExmvOUJggCtWmO8BZqB0sEOMmN+CERERGS1eC1kwHrNvkaqk1Id0HeP2FHUXcxIwMGIe5NWrcGG5lONt0AzmJK8HjaO9mKHQURERBaA10J1x0fOiIiIiIhIspjQEBERERGRZDGhISIiIiIiyWJCQ0REREREksWEhoiIiIiIJIsJDRERERERSRYTGiIiIiIikiyOQ2NBCi8ewtX3B1aZJrd3gp1fS3gMmAbvx16HTGHZX7lPzzYYHjWvyrTyYjXupmQheesRJH6zB4JOL1J0RERERKZljddCln11a6Ua95uMRp1HAoKA8nwVcg+tQ/q3/0RpeiICI9aIHZ5ZpETFID06DpDJ4ODlhhbj+6PbvOfQKLQpTsxeLXZ4RERERCZlTddCTGgskGPII/AY8OfIsl4jX0P8a62Rs+9r+E39BDaNvESMzjxyL6YiZVtM5d9X1v6OJ2KWouXTgxG3YCM0uXdFjI6IiIjItKzpWojv0FgBhb0TnFr1AAQBGlWy2OGIQqvW4HbcNcjkcrgGNhE7HCIiIiKzsuRrISY0VuJeIqN0dhc5EvG4BFUcvJqCIpEjISIiIjI/S70WsoqEJicnB5GRkWjRogXs7e3RrFkzvPHGGyguLsYLL7wAmUyGFStWiB2m0eg1JdDezUH5ndtQp13EjS8joE45C8fQbrBv2lLs8MxC6WALO3cX2Hm4wq11ALrPfxEe7UJwO+4a7qZkiR0eERERkUlZ07WQxb9Dc+7cOYwYMQIqlQpOTk4IDw9HZmYmli1bhuTkZOTl5QEAOnbsKG6gRpS1cS6yNs6tMs2t5zgEvPKFSBGZX6fISegUOanKtLTdJ3FqztciRUREJB69XsDe4xnYsi8VuQUaAMDdojLcyCpCgK+zyNERmZ4gCDh5IRs/7EyqPAbuFJYhITkf4c0bixydaVjTtZBFJzQ5OTkYPXo0VCoVZs2ahblz58LFxQUAsGjRIrzzzjtQKpWQyWRo3769yNEaj+ewl9G413gIunKor1+EKmohynLSIbOxryxTGB+DpI9HVKsraMsg6HXo/LPOnCEb3ZUf9iJt5wnIbZRo3DoAbSPGwsnXAzpNWWWZ/qveAuQyHH7lf5XTbN2cMfbQEsR+vA4pUTE1LZqISFI2/ZaCd5fFIiW9sMr0whItgkdsxpgBAVj5Xi/4ejmKFCGRaR2OzcKbi07i3OW8KtOL1Fq0eSIKA7r64ot3e1pcYmNN10IW/cjZzJkzkZ6ejhkzZmDx4sWVyQwAREZGokOHDtBqtQgKCoKrq6uIkRqXnW8oXDsOQaPOI+AzLhIt3tuJkqQzuLFqemUZlzZ90WlTUZV/bVZehdLFE35P/1vE6I3jbooKWTEXkRF9FpdW/oIDzy6AZ8fm6LnwlcoyJ+Z8Be+urRA8tnfltB7zX0T26cuSOYCJiO5n6fpLmBR5sFoyc49eL2B79HX0nLYT1zNrLkMkZduj0zD05d+qJTN/dehMFno/swux8bfNGJnpWdO1kMUmNImJidi0aRM8PT3x6aef1limc+fOAIAOHTpUTruXAHXr1g12dnaQyWRmideUnMN6wX3ANOQf3YSixOM1ltGXa5CyYBycw/vAd/y7Zo7Q9G7HXkHy1iMIHtsbXl1aAQDKCopwfNYqdP/kRTg0aYzAUT3g06sNTrxjWX2zE5F12hNzE28uOlWnstczizAqYi/Kyy1rsD2ybhev5mHyO4dQrn3wfl1QWIZREXtxO09thsjEYcnXQhab0GzcuBF6vR5TpkyBs3PNzwc7ODgAqJrQJCUlYdu2bfDx8UHXrl3NEqs5+E78AJArkPnjhzXOv7FyOvTlpQh6Y615AzOj80u2Qq/VodPsiZXTMg6eQ9rO4+i3YiZ6LHgJx2etgibfsnr+ICLrNP/r8waVj08uwPaD100UDZH5/XfdJZRq6v4IfXZeKb6OumrCiMRnqddCFpvQREdHAwAGDhxYa5n09HQAVROafv36ISsrCzt27MCQIUNMG6QZ2fu2gHvfSSi8cACF8VVvIWbvXIY7sbvQfM52yO0s9xnqwjQVUn85Br9+7eHdPaxyeuy8dXAJ9kFG9FmkH4gTMUIiIuM4fyUXx87eMrjeyk2JJoiGyPxyC0rx028pBtf7cksidDrLvVNpqddCFpvQXL9e8StTYGBgjfO1Wi2OHTsGoGpCI5db7EcCn/HvAXJ5lbs0hRcOIn3dOwiJ3AK7JkHiBWcmF5Zug15X9ZcJrVqDouvZyE+8IWJkRETG89ux9HrVO3QmC6UarZGjITK/w7EqaMoM7+DoRlYxElMKjB9QA2KJ10IW28tZcXExAECtrvlZyE2bNiEnJwcuLi4IDg42aSxdunSBSqUyqI7M1gFNPr9mUB2XdgPQ+Reh1vkOzcKq9F6muZWGlM8mwP+5z+DSboBB6/q7li1DIZQZ77lTG0GOuehmcD3ViXis9X2q1vl3rmVgnf/EWuc/jJahLVEua9i/6mS5/ROQN0KWKgv+/v5ih0NEJnLHYTDg0K9edYNbtIFCKDZyRETmVWzbEXB+ol51Bw4dBTvtTeMGVA/Wdi3k4+OD2NjYetW12ITGx8cH+fn5iIuLQ8+ePavMy8rKwuzZswEA7du3N/mL/yqVChkZGQbVkds5oomJ4gEqBt9M/nQsGnUbA+9RMx56eZmZmdBrSowQWQVbmQIm/QBMIDMrE2VCA+/u2kUHyAG9TmfwPklEEuKVCzjUr6oq8zqg1xg3HiJza+QH1HOIpZxbGUCp+OdIXgvVncUmNEOGDEFiYiIWLlyIoUOHomXLlgCAM2fOYNq0acjJyQFgngE1fXx8DK4js63nmaiO8o9vgzr1PEozriL/6KZq89usSICtV0Cdl+fn52f0OzRo2Dc7qvHz9Wv4d2gUCugByBUK+DZtKnY4RGQipcq7yAUAQQAM+NFOqcuGt68npN+/J1k7rVyNW4DBx4BMr4aPhwJyiH+OtLZrofpcL98jEwSh9meUJCw9PR0dO3ZEbm4ulEolWrdujdLSUiQlJWHEiBHQ6/X4/fffsWbNGrz00ks1LuOjjz7CvHnzIMZHpNYCffeYfbX1FjMScDBielxeUooNzacab4FmMCV5PWwc7R9cUET+QzYiI7sETb0dkb5/stjhEJGJ6PUCWo3ZiqQbdw2qt3xOT8yYHG6iqIjMa8Srvxv8PtkbU9rg83d6mCgiw/BaqO4s9g14f39/xMTEYNSoUbC3t0daWhrc3d2xevVq7N69G1evVnTL99cOAYiIiCyBXC7DG1PaGFSnsastpj3WwkQREZnfm1MNOwZslHK8OqG1iaIhU7LYR84AICwsDLt27ao2vaioCGlpaZDL5Wjbtq0IkREREZnWaxPDcPJCNjbsTn5gWTtbObZ/PgSNXGzNEBmReQzr7Y+50zth3pdnH1hWJgO+/bgvWgW7mT4wMjqLTmhqEx8fD0EQ0LJlSzg6Vh93ZevWrQCAhISEKn8HBQWhS5cu5guUiIionuRyGb7/Tz94u9tj2Y8J0Olqfnzav4kTNn02EL06SuztY6I6mPtqJzRyscW7y2JrHWSzsastvprbB08ONW2vt2Q6VpnQXLx4EUDtj5uNHz++xr+fffZZrF271qSxERERGYtCIcf/ZvfArGfa4attV7BlXyoupxRAL1TclflxwUCMGRAApdJin0AnKyeTyfDWtLZ47vFQfP/LNfywKwnnruRCrwdsbeT48oPemDQ8BA72VnlJbDGs8tt7UEIjxX4S7sTuQcaG9wFBD0Gnhc8Ts+Ex6Nlq5VRRnyH34PeAXg/7pq0QOPM7KJ3doE67iNTPp1WW0xUXQFdyFx035JlzMwziEuyDvktfh527C8oLS3D0jRUouFr15T+vzi3Rc0FFpw8yGyWyTyfi1PvfQl+mBWQydPlgGpoO7Ai5UoFbpy/j5L++gr68YlC5thFj0WJCf+jLtNBpynHq/W+Rcy7J7NtJRPSwmjZxwkevPYKPXnuksnMQTzd7jBsSJHZoRGbR2NUOb05rizenta08Brwa2+MfY1uKHZpRDf3pAzh4uQF6PcqLS3Hq/W+Rdym1Wjm31gHo8ckLsPdqBACIW7ARN/acQouJAxH+4sjKco5+Hrh1MhEHX/jMXJtQL0xoLIAgCEhdMhUtPzkEx6D20NxKQ3xEa7j1GAeFo0tlubvn9iH3wHdo/dkpKBxdkLX5P8hc/x4Cpn8Bh6B2CP/8XGXZG6tnGNTNoRh6LXoFV9fvQ9LmQwgc1QN9ls7ArhH/qlImLyENO0f8C4JWB8hkGPjN22j93HAkrNmF0KcHw6NdMHY+Ggl9uRa9Fk9H2IsjEb9qB9zbBKH1c8Owvf9b0JaUIuTJvug+/wXsHjlHpK0lIiIiur/DL/8XZXcrxgUMGNENfT6PwI4hb1cpo3CwxeC17yBm5nJkn74MmVwO28YVg/YkbTqIpE0HK8s+fvB/SIk6Yr4NqCervMccHR0NQRAwatQosUMxHpkMuuICAIBOfRdKFw/IbOyqFClJPQ/n8D6VSU6jziORe+iHaovSl5Ui7/AGeA55weRh15e9hys8OjRH8raKg+z67pNw8vOAS1DVPsx16rKKZAaAwlYJpb1tRZ/0ANzDA5EZc7Hyjkx69Fk0f6o/gIokUa5UQOlY8RnaujqhJKvh3q0iIiIiupfMAICti2PlNc9fhTzRF7f/uIrs05cBAIJeD01u9S7ePTuFwt6zEW78Hmu6gI3EKu/QWBqZTIaQtzch+dNxUNg7QVuUj+b/ioLcpmpvNU7NO+P2rytRnq+C0q0Jcg9vgF5dCG1hHpQu7pXlCk5Ewc4nBI4hHc28JXXn1NQT6lv5EHR/Dt5UlJEDp6aeKExTVSnr7O+FQWvfgUtQE6Tvj8Pltb8DAHIvpKDVtKG4/O2v0JaWIXh0Lzg38wIA5CdcR/yaXXjq9Epo8ougKyvHb098aL4NJCIiIqqHPsteh2+vii6r902dX22+W0t/6MrKMXjdHDj5uiMv8QbOzPu+WlIT+vQgJG89XPnDcENmlXdoLI2g0yJry3/QfE4U2n19HS3/fQCpn0+D9m5OlXIu7Qeiydi3kfTvx3B5dg/YuFZcvMsUVfPanP3fwKMB350xVFH6bewY8jY2tX8JCjsbBI7sDqDitmrGwXMYHvUxRkR9jDspmZUHrXMzbwSO7I5tPWdgS+dXkLBmF/qvfkvMzSAiIiJ6oKMzl2NLl+mIW7gRXd6vPjCnTKGAX9/2OBG5GjuGzkaJKrfyfeN7lA52CH68N65tjDZX2A+FCY0FKEk5h/K8TLi06QcAcArtClsPf5SkVO933Xvkawj7XyzCFp+Cc7sBsPHwh8LRtXK+5lYqiq+chHu/p80Wf30UZ+TAoUljyBR/7sLOTT1RnJFTax1tSSlStx9DyLi+ldPO/Xczdj46G3vGvIc7V9MrOxUIHNUD+ZdvQH0rHwCQ9NNBNOkWBrkNb2oSERFRw5e85TB8erWB3f+/H3NPcUYOso7Ho0RV8Sh9ytYj8HqkaucIQaN7ouDKTdz5W2dLDRUTGgtg69UM5XlZUN9MBACUZiVBo0qGfdNW1cqW52UBAPSaEmT++CF8xkVWmZ+z/1u49XgCSmc3k8f9MEpz7yLvYiqaP1mRxAWO6oHirLxqj5u5BPlAplQAAOQ2SgSM6Ia8xOsAAIWdDWwbOQEA7Nxd0G7GWFz8YjsAoPDGLXh3bQ2loz0AwH9oZ9xJyqh834aIiIioIbF1dYRDk8aVfwcM7wpNfhE0+UVVyqXtPA7Pjs1h4+wAAGg6+BHkJaRVKRP69GDJ3J0B+A6NRbBxa4LAiDVI+WwCZDI5BEGPgJdXwNYrAJkbPoSNux+8RkwHAFz96FFAr4egLYP7gGnwGjWjcjmCXo/cA2sR/OY6sTbFIMcjV6PP5xFoN3McyovUOPrmFwCAXoun4+beWNzcGwvfPm0R9sJICDo9ZEoFsmIu4sKSioFSbVwcMTxqHgS9AJlchsSv9yB93x8AgBt7TsGzY3OM/n0hdJpyaEs0OBKxVLRtJSIiIrofG1dHDFgzC0p7Wwh6AaW5d3HgmU8BVL02Ks7IwYVlURi58xMIegElqjwcn/1l5XJcm/vBvU0QUn85JtamGEwmSHHQFSug1gJ994gdRd3FjAQcjJgel5eUYkPz6s99NmRTktfD5v/v6DRU9/reb+rtiPT9k8UOh4hEwHaArJ1UjgFeC9UdHzkjIiIiIiLJYkJDRERERESSxYSGiIiIiIgkiwkNERERERFJFhMaIiIiIiKSLCY0REREREQkWRyHpoGyV1R0hSwV9grjLk/pYIcpyeuNu1ATUzrYiR0CERERWQheCxmwXlHWSg8kkxl3XBepkclkDX5MFyIiIiJT4bVQ3fGRMyIiIiIikiwmNEREREREJFlMaIiIiIiISLKY0BARERERkWQxoSEiIiIiIsliQkNERERERJLFhIaIiIiIiCSLCQ0REREREUkWExoiIiIiIpIsJjRERERERCRZTGiIiIiIiEiymNAQEREREZFkMaEhIiIiIiLJYkJDRERERESSxYSGiIiIiIgkiwkNERERERFJFhMaIiIiIiKSLKXYARAR1UQQgFKd2FEYxl4ByGTGWZYgCNCqNcZZmJkoHewgM9IHYO3fPxER1R0TGiJqkEp1QN89YkdhmJiRgIORWlWtWoMNzacaZ2FmMiV5PWwc7Y2yLGv//omIqO74yBkREREREUkWExoiIiIiIpIsJjRERERERCRZTGiIiIiIiEiymNAQWQlBECAIQuX/icj66PUC9GwHiMjCsD8WIgtVVq7DjoM3EBOnwh+JuTh3ORfFai0AIPO2GsHDN6FLG090DvfEE4MC0SrYTdyAicjokm/eRdT+NMQm5OCPhBwk3yysnJd1W41e03aic7gnenf0xthBgbC342UBEUkPWy4iC5OZXYwvfkrE11FXkJ1XWmu5tMwipGUWYeu+NMxZGovB3f0QMSkMYwcFGm0sESIyP0EQsPvITXzxUyJ+O5ZeezkAJ85n48T5bKzYmAAPNzu88ERLREwKR4Cvs/kCJiJ6SExoiCyEIAj4Juoq/rn4FAqLyw2uf+BUJg6cysTg7n74Zl4fBPq5mCBK0yq8eAhX3x9YZZrc3gl2fi3hMWAavB97HTKFZTd7Pj3bYHjUvCrTyovVuJuSheStR5D4zR4IOr1I0Zmete8DGbeK8fLHR7EnpvZEpja5BRos+u4ivvgpEQvf6opXJ4RBLuePG0TU8Fluq05kRfLuaDDlX4fu+2tsXR04lYm2437Gqvd7YepjLYwQnfk17jcZjTqPBAQB5fkq5B5ah/Rv/4nS9EQERqwROzyzSImKQXp0HCCTwcHLDS3G90e3ec+hUWhTnJi9WuzwTM4a94Gte1Px4ryjuFNY9lDLKVZrMWP+CWzbl4afFg2Et4eDkSIkIjINdgpAJHG3ctXo/4/dRklm7ikqKce0dw/j8x8uGW2Z5uQY8gg8BkyFx8Bp8Bk3G60XnYSNhz9y9n2N8ju3xQ7PLHIvpiJlWwxSth5B/Kod2D3qXRRn5KDl04Nh5+EqdngmZ237wJqtlzFhdvRDJzN/dfBMFvo+txvpqmKjLZOIyBSY0BBJ2J3CMgyb/hsuJeWbZPlvfXYKX229bJJlm5PC3glOrXoAggCNKlnscEShVWtwO+4aZHI5XAObiB2O2VnyPrBhdxJe+fgYTNFp2dXrdzD0lV+Rk1/7+3hERGJjQkMkYTMXnMD5K3kmXUfE/BO4eNW06zCHexexSmd3kSMRj0tQRSKjKSgSORJxWOI+cDXtDl786KhJ13E59c7/J0zs5pmIGia+Q0MkUTsP3cC6nUkG1TmzcQx8PB2hyilB18k76lSnXKvHcx8cwcn1Y2BjI43fQPSaEmjv5kAQBGjzVbj925dQp5yFY2g32DdtKXZ4ZqF0sIWdu0vlOzStnnkUHu1CcDvuGu6mZIkdnslZwz6g0+nxjw+PoFSjM6hefdqBqANp2Px7KiYOD6lPqEREJmUVCU1OTg4WLVqEqKgopKenw8vLC+PGjcP8+fMxc+ZMfPvtt1i+fDlmzJghdqhEdaIp02H6f44ZXM/H0xH+TZwMrheXmIsVPyXgrWltDa4rhqyNc5G1cW6VaW49xyHglS9Eisj8OkVOQqfISVWmpe0+iVNzvhYpIvOyhn3g66irOH4u2+B69W0HIuYfx6h+zeDsaGNwXSIiU7L4hObcuXMYMWIEVCoVnJycEB4ejszMTCxbtgzJycnIy6t4lKZjx47iBkpkgK37UpGZXWLWdS7/MQEznw6HQtHw79J4DnsZjXuNh6Arh/r6RaiiFqIsJx0yG/vKMoXxMUj6eES1uoK2DIJeh84/G/ard0Nz5Ye9SNt5AnIbJRq3DkDbiLFw8vWATvPnS+P9V70FyGU4/Mr/KqfZujlj7KEliP14HVKiYsQI3SgsfR8QBAFLN8SbdZ25BRr8uCcZLz/V2qzrJSJ6kIZ/ZfIQcnJyMHr0aKhUKsyaNQtZWVmIi4uDSqXCwoULsXv3bpw5cwYymQzt27cXO1yiOvvip0SzrzM1o9CoPamZkp1vKFw7DkGjziPgMy4SLd7biZKkM7ixanplGZc2fdFpU1GVf21WXoXSxRN+T/9bxOiN426KClkxF5ERfRaXVv6CA88ugGfH5ui58JXKMifmfAXvrq0QPLZ35bQe819E9unLkk5mAMvfBw7HqpCYUmD29X7xUyLfpSGiBseiE5qZM2ciPT0dM2bMwOLFi+Hi8udAgZGRkejQoQO0Wi2CgoLg6mr53ZiSZUi+eRcnzhv+mIkxGPrOTkPhHNYL7gOmIf/oJhQlHq+xjL5cg5QF4+Ac3ge+4981c4Smdzv2CpK3HkHw2N7w6tIKAFBWUITjs1ah+ycvwqFJYwSO6gGfXm1w4h3LG6fG0vaBH3aJcyxeuJqHCxbQSQgRWRaLTWgSExOxadMmeHp64tNPP62xTOfOnQEAHTp0qJy2detWPPnkkwgMDISjoyNat26N9957D0VF1tkrEDU8py+KN4bGmUs5oq37YflO/ACQK5D544c1zr+xcjr05aUIemOteQMzo/NLtkKv1aHT7ImV0zIOnkPazuPot2Imeix4CcdnrYIm3zLbO0vaB9gOEBH9yWITmo0bN0Kv12PKlClwdnausYyDQ8Xox39NaBYvXgyFQoH58+fj119/xauvvopVq1Zh+PDh0Ov1Zomd6H5iE8S7mEjNKERugTTHo7D3bQH3vpNQeOEACuOrPk6VvXMZ7sTuQvM52yG3cxQpQtMrTFMh9Zdj8OvXHt7dwyqnx85bB5dgH2REn0X6gTgRIzQtS9kHikvKkSDC42b3/CFiG0REVBOLTWiio6MBAAMHDqy1THp6xfsAf01odu7cic2bN2PKlCno378/3njjDaxYsQLHjh3D0aOm7eufqC7iTTSIZl0lJBeIuv6H4TP+PUAur/ILfeGFg0hf9w5CIrfArkmQeMGZyYWl26DXVb1Lo1VrUHQ9G/mJN0SMzDwsYR+4knYHer1477HEJ4vbBhER/Z1MsNC3+5o1a4b09HScPXu2xh7MtFotfH19kZOTg+TkZISE1N63/tWrV9GqVSv8+OOPmDx5ssGxdOnSBSqVyuB6RDW57fI8ymwCa5x3b3yJ2vh4OkCpkEOr00OVo77vemobo8Kj8AfYl5v++X2ZrQOafH7NpOvQ3ErD5be7wnfSXHiPevhu22+9GQqh7P6fa13ZCHLM1XczyrLqYvi2ebi57w/Ef1m3cUlqMk9+GuUy49zJNsf3Dxh3HzDm938/GmUgclyfr3Heg9oAoO7tQG1tgI02E953Le89K7IeWW7/hF7eCHL9HfgW/O/BFcgsfHx8EBsbW6+6Ftttc3FxMQBAra65sd60aRNycnLg4uKC4ODg+y7r4MGDAICwsLD7lquNSqVCRkZGveoSVROiAWoZBqKu40soFfJ6jUMBALk5OUCR6fdnuZ0jmphw+XpNCZI/HYtG3cYYJZkBgMzMTOg1xulO21amgEk/ABPIzMpEmWCcro5N/f0Dxt8HjPn935ejI1BLPzaGjDFT33agvLyc5zSSNhcdIAf0Oh33ZQthsQmNj48P8vPzERcXh549e1aZl5WVhdmzZwMA2rdvD5lMVutyMjIy8MEHH2D48OH1HqvGx8enXvWIapJjI4OmlnmqnPtfTBl6h6Ymnh6usGvUtC6hPhSZrYNJl59/fBvUqedRmnEV+Uc3VZvfZkUCbL0CDFqmn5+fUe/QQGKv7fn5+hn1Do2pGXsfMOb3fz9lClfU1iXAg9oAwLA7NDWxVQrwamr6NoDIVLIUCugByBUK+HJfbjAe5nrZYh85mzlzJpYvX45mzZph//79aNmyJQDgzJkzmDZtGlJSUlBeXo6IiAisWLGixmUUFRVhwIABUKlUOHPmDHx9fc25CUQ1envxKfx33aV61b25bxL8mzgh/VYxmg39qV7LyI2ZCvdGdvWqawi1Fui7x+SrMaqYkYCDkX4mKi8pxYbmU42zMDOZkrweNo72Dy5YB9b+/d9PiVoLl57r6v0ezcO2A69OaI2V7/d+cEGiBsp/yEZkZJegqbcj0vcb/ioBNTwW2ylAZGQkPDw8cPPmTbRp0wbt2rVDaGgounXrhpCQEAwaNAhA1Q4B/kqtVmP06NFITU3F3r17mcxQg9E53FO0dQc3dTFLMkNEtXN0UCI8xE209YvZBhER1cRiExp/f3/ExMRg1KhRsLe3R1paGtzd3bF69Wrs3r0bV69eBVBzQlNeXo6nnnoKsbGx+PXXXxEeHm7u8Ilq1b29l2jr7taOFzJEDUH3diK2A23FWzcRUU0s9h0aoOIl/l27dlWbXlRUhLS0NMjlcrRt27bKvHtj1xw4cAB79uxBt27m62WIqC5C/F3Ru1MTHDt7y+zrfnZ0qNnXSUTVPTO6Bb75+arZ19uxtTvahjY2+3qJiO7HYu/Q3E98fDwEQUBoaCgcHat2bxkREYEtW7bgrbfegqOjI06ePFn57/Zt8UZmJvqr1ybUr8e9hxHi74Jhvf3Nvl4iqq5vZx+0ae5m9vW+NiHsvh3pEBGJwSoTmosXLwKo+XGzX3/9FQCwYMEC9OzZs8q/3bt3mzVOoto8OTSo3t0u19fMp8Mhl/NChqghkMlkeHNq2wcXNCKvxvZ4emRzs66TiKguLPqRs9rcL6FJS0szczREhrOzVWDNh70xMmKvWdbXta0nIiY17HfJLr4UBJnSDnK7iu5+fZ6cA/e+E6uUKbp8Aje+fBUAIGjL4RzeB81eWga5zZ8dHQiCgGsfDEZJShw6/lhgtvgfVrd/P4+AYV3g3MwbO4a8jbz4tPtOr0YmQ9e5z6DpwI7Qa/XQ5Bfi+NtfojBNOoMC12UfAICcfd9AtW0BBEEP13aDEDB9JWTKisGd1GkXceOr16EtqHik02/qJ2jcc5z5NsIAzz/REj/sSsKRP8zzHa18rxecHGsZBIuISERMaIgkakTfZvjH2FB8t73uo6nfG1eiLmNV3GNrI8faf/eDUtnwb+iGzN4Ex5COtc53DO6AsMVnIFPaQNDrkbLgSdzesxJNHn+rskz2jiWw82mOkpQ4M0RsPNd3n8Clldsx8pf/1Gn63wUM6wLvrq3xy+C3IWh1aP/mk3hkztM4/Iq0RtF+0D6guZWKzA0fIGxJHJRuTZD8yeO4/fsaeI+KgF5TgqT5jyP4zXVwDu8DQaeDtijPfMEbSC6X4duP+6Ldk1FQl9Z9QNP6tAMThgXjqUfvPwg1EZFYrDKhiY6OFjsEIqP4PLIHzl/JQ1xibp3Kd528w+B1fPlBb4Q3t4yXgOV2f74zJ2jLoC9TA395H0B9Ix4FJ7cjaOZ3yD++RYwQ6+3WyUSDpv+dIAAKWyUUdjbQanWwcXZASVbd9ispyT+2FY26jYFN44oB3LyGT0fW1vnwHhWBvMM/wqlVDziH9wEAyBQK2DRq2D16NW/miu8+7ofJ7xxEXUeVM7QdCG/uhlUcd4aIGjCrTGiILIWrsy1+WzUMQ1/5DeevGP+X5OVzeuIfY1safbmmkvb5MxAgwCm0G5o+s6DGi1HNrTQkz38cGlUyGnUeBa8RrwGoeATt+oqXEPj6N4BcYe7QRXdzbyx8e7fBxAtfQ1ukRrEqD789MVfssAz2oH2gLOcGbL0DK/+29Q5C2e0bAAD1zQTIlXZI+vdjKMtNh0Nge/g//98Gn9RMHB6CYrUWL34UU+ekpq7CQtywb/Vwjj9FRA1aw3+GhIjuy8vdAYe+GYnH+jUz2jJdnW2wceEAzJjcsN+b+atW848gfNkFhP8vDkpXT6QtfbbGcnZNghC+9Dzar1VBr9Wg4EQUACDzp3lw6zkODs3M34NcQ+DZoTncWgVgS6eXsanjy8iKuYiei14WOyyD1HUfqI2g0+Lu+f0IeG01wpacha1HU9xY9aqJojWu559oiaglg9HY1dZoyxza0w9HvhsFP2/zdkBCRGQoJjREFsDN1Q47lg/F9//pBzeXh7ugGd7bH/FRT2LSCGn1ZmTrFQAAkClt0GT0myiKj7lveYWDM9z7TELekQ0AgKL4w7i9ezkuvhSEK3P6QFdyFxdfCkL5Hevorr35+P7IOnYJZXdLAEFA8uZD8OnVRuywDFKXfcDWMwBl2dcr/y7LTqusZ+sVAJd2A2Hr0RQymQzuA6ai+OpJ8wRvBGMHBSFh+5N4fGDAQy3HxckGaz7sjd+/HA7PxvZGio6IyHSY0BBZCJlMhmfGhOLyjqfw0aud4Oft+OBKlXWBEX38sXP5UOxZ+Sj8faT1i6yutBjaooLKv/NiNsIxpFO1cqVZSRC05QAAfXkZCk7+DIfA9gCAVp/GoN3X19HuqzS0+vQoFI6uaPdVWoN/3MhYCm/cgm/vtpDbVDyJ7D+0Cwqu3BQ5qrqr6z7QuNeTuHN6B8rzVRAEAbd/+xLufScBANz7TEBJ0hnoSu4CAO7E7oFDkLQ6j/HxdMTPnw/Bb6uGYXT/ABgyZIy3uz3ee6kDErc/iZeeas3xZohIMvgODZGFaeLhgLmvPoJ3X+yIPUdv4mjcLfyRkIOzl3NRUFhWWa5lYCN0DvdA53BPjB0UiObNXEWM+uFoC24hecGTgF4HAQLsmoQg6M11AIC05S/CrdsYuHUfg8IL0UjetQwyuQKCTguX9oPhO/EDkaM3jp6LXob/4M5w8HbD0I3vo7xIjaher9c6HQB6LZ6Om3tjcXNvLC5/9xvcQv0x5sBi6Mt1UN8uwInI1SJvVd3VdR+w8wmB79PzcPlfFS+5u7QdAK9hrwCouEPj89S7uPxOL8hkcth4NEXga2tE26b6kslkGNbbH8N6+yMtoxA/R1/HHwk5+CMhB9du3IVOV/GijauzDTq28kDncA/07tgEowcEwNbG+t4fIyLpkwmCsV8hJKKGqungH5F5W42m3o5I3z9Z7HDuS60F+u4ROwrDxIwEHIz0M1F5SSk2NJ9qnIWZyZTk9bBxNM4jStb+/ZtSebkeCoWMA+WS1fIfshEZ2SWSOBdS3Uig6SUiY+EjJERkY8OnzYnIsrBVIyIiIiIiyWJCQ0REREREksWEhoiIiIiIJIsJDRERERERSRY7BSCiBsleUdFrlJTYG7HHW6WDHaYkrzfeAs1A6WBntGVZ+/dPRER1x4SGiBokmUwaXeCaikwmM1oXyFJk7d8/ERHVHR85IyIiIiIiyWJCQ0REREREksWEhoiIiIiIJIsJDRERERERSRYTGiIiIiIikiwmNEREREREJFlMaIiIiIiISLKY0BARERERkWQxoSEiIiIiIsliQkNERERERJLFhIaIiIiIiCSLCQ0REREREUkWExoiIiIiIpIsJjRERERERCRZTGiIiIiIiEiymNAQEREREZFkMaEhIiIiIiLJUoodANVMEARo1Rqxw6gzpYMdZDKZ2GGQBZHaMQAY9zgQBAHQSGv7Ycd2gIjIWKz9PGjQes2+RqoTrVqDDc2nih1GnU1JXg8bR3uxwyALIrVjADDycaDRQDvhWeMsy0yUm78H7NkOEBEZg9WfBw3AR86IiIiIiEiymNAQEREREZFkMaEhIiIiIiLJYkJDRERERESSxYSGiIiIiIgki72cEREREZHFulNYhrjEHPyRkIvLqQXIu1PRFXL+3TKs2JiAzuEe6NDSA44OvCyWKn5zRERERGRR9HoB+05kYOWmROw6chN6vVCtTEmpFq9/egIAYGerwMRhwXhtYhi6tfPimFoSw4TGgvj0bIPhUfOqTCsvVuNuShaStx5B4jd7IOj0IkVHZHo8BoiIKOYPFV7++Cgup96pcx1NmQ7rdiZh3c4k9O7UBF/N7YOwEDfTBWlC1nguZEJjgVKiYpAeHQfIZHDwckOL8f3Rbd5zaBTaFCdmrxY7PCKT4zFARGR9StRazFl6Bss3JkCofkOmzo6dvYVOE7bj49cewaxn20KhkOYr59Z0LmRCY4FyL6YiZVtM5d9X1v6OJ2KWouXTgxG3YCM0uXdFjI7I9HgMEBFZl7w7Gox87XecunjbKMvTlOnwzudncOribfy4cADsbBVGWa45WdO5UJopJxlEq9bgdtw1yORyuAY2ETscIrPjMUBEZLnuFJZh6Mu/Gi2Z+auoA2mY8HY0tFrpP6JlyedCJjRWwiWoYsfVFBSJHAmROHgMEBFZHkEQMGXOIcQl5ppsHTsO3UDkktMmW745Weq5kI+cWSClgy3s3F0qn5ls9cyj8GgXgttx13A3JUvs8IhMjscAEZF1WPvLNew+ctOgOmc2joGPpyNUOSXoOnlHnep8vj4eTwwKQt/OPvUJUxTWdC60ioQmJycHixYtQlRUFNLT0+Hl5YVx48Zh/vz5mDlzJr799lssX74cM2bMEDtUo+gUOQmdIidVmZa2+yROzflapIiIzIvHABGR5cvMLsZbn50yuJ6PpyP8mzgZVEcQgOfnxuDC1ifgYC+Ny2drOhdK4xt5COfOncOIESOgUqng5OSE8PBwZGZmYtmyZUhOTkZeXh4AoGPHjuIGakRXftiLtJ0nILdRonHrALSNGAsnXw/oNGWVZeS2Soze+xlSf47BhaVRldP7fB4Bey837J/yiRihkwmUl+ux49B1fLnlMrJySgAAqlw1Iv93GtMntEaIv6vIERofj4E/Hc7JxtATh7AgvD3+2bx1jWVsd27GSG9fbO/e18zRERHV39IN8bhTWPbggkaSdOMuNv6aguefaGm2dT4MazoXWvQ7NDk5ORg9ejRUKhVmzZqFrKwsxMXFQaVSYeHChdi9ezfOnDkDmUyG9u3bix2u0dxNUSEr5iIyos/i0spfcODZBfDs2Bw9F75SWUZfpsXRmcvRbuY4NA4PBAAEDO8K/6FdcOyfK8UKnYzs9MXbaD5qM56aFY39JzOh//93GnU6AZ+tvYgWo7bg5XlHUVauEzdQI+MxQERk2Uo1Wnzz81Wzr/eLnxIgPEyf0GZkTedCi05oZs6cifT0dMyYMQOLFy+Gi4tL5bzIyEh06NABWq0WQUFBcHW1vF+p77kdewXJW48geGxveHVpVTk990IK4lftQN9lr8PR1x09P5uOU+9+DfWtfBGjJWM5cf4WBjy/GzdVxbWWEQTgq21XMH5WNHQWNsjWX/EYICKyLNv2pyG3QGP29cYl5iI2Psfs6zUGSz4XWmxCk5iYiE2bNsHT0xOffvppjWU6d+4MAOjQoUPltJiYGAwZMgS+vr6ws7ODv78/Jk6ciMTERLPEbSrnl2yFXqtDp9kTq07/fBv0Oh3G7PsMqmOXkPrLMZEiJGMqUWsx9o39UGvqdudlx6EbWPTdRRNHJS4eA0REluPgafFeaj90Rrov1FvqudBiE5qNGzdCr9djypQpcHZ2rrGMg4MDgKoJTX5+Ptq1a4dly5Zh7969WLhwIeLj49GzZ0+kp6ebJXZTKExTIfWXY/Dr1x7e3cMqpwtaHW6fuQJ7j0ZI2nRQxAjJmDb+mozsvFKD6qz4KQHl5ZZ7l8baj4ESnQ45Gk2N/4iIpOaPRPHukoi57odlqedCi01ooqOjAQADBw6stcy9BOWvCc2YMWOwZMkSjB8/Hv3798eUKVMQFRWFO3fuYNu2baYN2sQuLK3Ivv+alXt3D0OLiQOR+M0edPv4H1DY24oYIRnLyk2G31HMzC7BzsM3TBBNw2HNx8DHV+Lht/eXGv8REUmJpkyHS0niPQ5lyjFvzMESz4UyQSpvNhmoWbNmSE9Px9mzZ2vswUyr1cLX1xc5OTlITk5GSEhIrcvKzc2Fp6cnVqxYgYiICINj6dKlC1QqlUF1bAQ55uq7GbwuQygd7THmwGIkrN6Fy9//jhE/f4yc88k4M3etwcuaJz+Ncpnl/rovJQJkyHT/qF51ndVH0Eh9wKjx1JfUjgHAuMeBg1yOhI49H3o593o5ezEgBE/6NauxzIiTh43Sy1n4uRNQ69kOEJFp6WROUDWOrHX+vXFmauPj6QClQg6tTg9VjrrWcrWNUyPTl8CvYKFhQdeDOc6DQMO5HvTx8UFsbGy96lpst83FxRUvQqvVNe+omzZtQk5ODlxcXBAcHFxtvk6ng16vx/Xr1zFnzhz4+PhgwoQJ9YpFpVIhIyPDoDq2MgXQpF6rq7OuHz2DohvZuLz2NwDA0TdWYMz+xbjx6yncOmnYL/yZWZkoEyyrpyzJktkC7vWrWlRchqIsw/ZVU5HaMQAY9zhwVCiAjkZZFACghbMzBnuZ9gPNzMxEiY7tABGZmNINaFz77LqOM6NUyA0ejwYABEFu8HVdfZjjPAhYxvWgxSY0Pj4+yM/PR1xcHHr2rPorZ1ZWFmbPng0AaN++PWQyWbX6/fv3x7FjFS9EtWjRAtHR0fDy8qp3LIayEeSACX/obDqoE4LH9MYvg2dVTiu8fgt/fLIBvZdEYMegWdCq6/5svZ+vH+/QNBACgExBB8gUBtd1cVLCtWlT4wdVD1I7BgDjHgcOcuk9Eezn58c7NERkcjqZA+733Ivq/8dcq40hd2hqIpfp4GuGc6Wpz4NAw7oerM/18j0W+8jZzJkzsXz5cjRr1gz79+9Hy5YVgyCdOXMG06ZNQ0pKCsrLyxEREYEVK1ZUq3/lyhUUFBQgNTUVn332GbKzs3Hs2DEEBASYJf7yklJsaD7VLOsyhinJ62HjaC92GPT/xry+r17vw5xcPxrd23ubICLDSe0YAIx7HAilpdBOePahl2POgTWVm7+HzJ7tABGZlk6nh1vv9SgqKa9X/Zv7JsG/iRPSbxWj2dCfDK7fqbUH4jaPrde6DWHt50FDSO8nwDqKjIyEh4cHbt68iTZt2qBdu3YIDQ1Ft27dEBISgkGDBgGo2iHAX7Vq1Qrdu3fHpEmTcODAARQWFmLRokXm3ASienttYtiDC/3NI2Ee6NaufnchiYiIzEWhkKNT63o+W20EncM9RFs31cxiExp/f3/ExMRg1KhRsLe3R1paGtzd3bF69Wrs3r0bV69WjC5bW0LzV25ubmjRogWSkpJMHTaRUTzaqym6tTUsOXn/5Y41Pn5JRETU0HQO97TKdVPNLPYdGgAICwvDrl27qk0vKipCWloa5HI52rZt+8DlZGdn48qVK+jevbspwiQyOrlchh3Lh6L/P3bjStqdB5b/7J/d8MTgINMHRkREZASj+wfg8/XxZl+vXC7DiD7+Zl8v3Z9FJzS1iY+PhyAIaNmyJRwdq3brN3XqVLRo0QIdO3aEm5sbrl27hiVLlkCpVOKtt94SKWIiwzXxcMDxH0Zj5oIT2Px7Ksq11V/Sa97MBf+O6IzJI5uLECGZQ39Pb5SNvn8PjQ+aT0TU0Azs5otWQY3q9KOdMT3WrxkC/VzMuk56MKtMaC5evAig5sfNevTogXXr1mHp0qUoLS1Fs2bNMHDgQLz77rsIDAw0d6hED8W9kR3WfzoA/327O77bfhWXkvJRqtHBw80O4wYHYWjPppDL+ZgZERFJi0wmw2sTw/DGwpNmXe+rEwx/R5VMjwnN38yYMQMzZswwd0gPbehPH8DByw3Q61FeXIpT73+LvEupVco4+3uhz9IZcG8bhKIb2dgxdHad5pH0NfFwwL9eePD7YlJl19gZwzbPrfxb4WAHl8Am+KndCygrKKqc7tY6AD0+fREOno2g1+qQczYJJ9/9GrrSsirL6/j2BHScNQE7hryNvPg0c20GEREZ4KUnW2HlpkSz3aUZ1qsphvVuGEMb1KSu50K/AR3Q5b0/e0+z92wE9e0C7Hy0YrDSAV/NgneXVnD0ccePrZ5B2d37d4PdEDChsRCHX/5v5Q4XMKIb+nwegR1D3q5SpqxIjbiFG2Hr4ohH/jW5zvOIGjpNflGVJLzN9DHw6RlepQEHAJ2mDKfe/Qb5idchk8vRb+UbaBcxFuf+u7myjGfHFvDs2AJFN7PNFj8RERnOwV6Jtf/uh97P7oJeb9pRSFycbPDVR30adOc5dT0XZh46jx2Hzlf+PXjdHKiOXar8+8q6vTj5r68w6dK3pg/aSCy2l7P7iY6OhiAIGDVqlNihGM1fs2dbF0eghuGFygqKkH36MrQl1QdIut88IqkJfXoQrm08UG16YaoK+YnXAQCCXo+cc8lwbvZnb3AKB1t0n/8CjkeuNlusRERUfz06eOPdFw37gVqVU4L0W8UPHIDzr754tyea+TgbGp6oajsX/pVDk8bw7dMWyVsPV07LirmI0ty7pg7PqKzyDo2l6rPsdfj2agMA2Dd1vsjREInDq0sr2DVyws19f9y3nNLBDi2nDMYf8zdUTuvy/jRc+X4vSjJzTR0mEREZyccRj0CVU4Kvo67WqXzXyTsMWv7CN7ti2ujQ+oQmmrqeC1tMHIj06LOSS2D+zirv0FiqozOXY0uX6YhbuBFd3pfWyLJExhI6eRCSthyGoKveq9s9chsl+q/+JzIOnceNX08DAHz7tYezvxeSNh00V6hERGQEMpkMqz/sgzentjHqcuVyGZb9qwcin29v1OWaQ13OhQAQOmkgrv14/7s4UsCExgIlbzkMn15tYNdYWrdGiR6W0tEewWN6Iemn6FrLyJQK9F/9FtTZ+Tj9wZ/PB/v2aQv3dsF46vRKPHV6JRx9PTBk/bvwH9rZHKETEdFDkMtlWBLZA78sHQIfT4eHXl7r4EY4vu4xvP60cZMkc6jLuRAAfHq2gcLOFpl/eZ9GqvjImQWwdXWEwsEO6lv5AICA4V2hyS+CJr/oATWJLEvw472Ql5CGO0mZNc6XKeTo/+Vb0OQX4fjbX1aZFzf/R8TN/7Hy76dOr0T0PxaxlzMiIgkZMzAQfR7xwfvLY/H9jiSUlGoNqu/hZoeIieGY82J72NtJ8zL5QefCe0KfHoSkzQch6O9/F0cKpPlNURU2ro4YsGYWlPa2EPQCSnPv4sAznwIAei2ejpt7Y3FzbywUDrYYd3Q5FHZK2Lg4Yvwfq5G87TDi5v9433lEUhE6eTCubthfZVrH2ROhvpWPK+v2Ivjx3gga1QN58WkYs+8zAMCtM1dw6t2vxQiXiIhMwL2RHVa+3xvzZ3bBup1J2LA7Geeu5KKsvOYLdycHJbq29cI/Hg/FhGHBkk1k7nnQuRAAbFwcETCyO34Z+M9q9Qf/MAfu4UEAgMcPLUFhqgq/PTm3WrmGRCYINXSHRaIrLynFhubSeQ9mSvJ62Djaix0GWRCpHQOAcY8DobQU2gnPGmVZ5qLc/D1k9mwHiKjhKSvXIT4pHwkpBShRayGXy+DsaIP2Ld3RMtAVCkXDewvD2s+DhpB2CkpERERE9AC2Ngp0CvNEpzBPsUMhE2h46SgREREREVEdMaEhIiIiIiLJYkJDRERERESSxYSGiIiIiIgki72cNVCCIECr1ogdRp0pHewgk8nEDoMsiNSOAcC4x4EgCIBGWtsPO7YDRETGYu3nQUMwoSEiIiIiIsniI2dERERERCRZTGiIiIiIiEiymNAQEREREZFkMaEhIiIiIiLJYkJDRERERESSxYSGiIiIiIgkiwkNERERERFJFhMaIiIiIiKSLCY0REREREQkWUxoiIiIiIhIspjQEBERERGRZDGhISIiIiIiyWJCQ0REREREksWEhoiIiIiIJIsJDRERERERSRYTGiIiIiIikiwmNEREREREJFlMaIiIiIiISLKY0BARERERkWQxoSEiIiIiIsliQkNERERERJLFhIaIiIiIiCSLCQ0REREREUkWExoiIiIiIpIsJjRERERERCRZ/wc9gslKGQURvQAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rnd_index = torch.randint(target_us.shape[0], (1, ))\n", + "\n", + "qc_list, _ = decode_tensors_to_backend(simulator, tokenizer, target_xs[rnd_index], target_ps[rnd_index])\n", + "qc_list[0].draw(\"mpl\")" + ] + }, + { + "cell_type": "markdown", + "id": "6975d819-7f39-4942-b185-1d82275b5eef", + "metadata": {}, + "source": [ + "Next, we further restrict to circuits with a maximum of 16 gates. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9f862cd3-590b-4528-aed8-786558e460c0", + "metadata": {}, + "outputs": [], + "source": [ + "gate_cnts = get_tensor_gate_length(target_xs)\n", + "\n", + "ind = (gate_cnts <= 16).nonzero().squeeze()\n", + "target_xs = target_xs[ind] \n", + "target_ps = target_ps[ind] \n", + "target_us = target_us[ind] " + ] + }, + { + "cell_type": "markdown", + "id": "6017c08b-11c0-4827-bce2-066bc16a13ac", + "metadata": {}, + "source": [ + "We plot the distribution of the gate counts for this testset, seeing it is uniformly balanced." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4407e4ba-90c0-49ab-a1b6-cf988eee8b5d", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAG0CAYAAADdM0axAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAANTFJREFUeJzt3Xl0FFX+/vGnA2RhSUKAbBIgsm8igoQAgkIAEdlkRDRIUCR+ERSICzDKIuCwKA4DIqiDIMoyykBUdNAQA7iwyTKKYAQnLCMkIEgiREJI398f/minScLWnaVT79c5dQ5969a9n6pU4mN1VbfNGGMEAABgEV4lXQAAAEBxIvwAAABLIfwAAABLIfwAAABLIfwAAABLIfwAAABLIfwAAABLKV/SBZRGdrtdR48eVZUqVWSz2Uq6HAAAcBWMMfr1118VHh4uL6/Cr+8Qfgpw9OhRRURElHQZAADgOhw5ckQ1a9YsdD3hpwBVqlSR9PvB8/f3L+FqAADA1cjKylJERITjv+OFIfwU4OJbXf7+/oQfAAA8zJVuWeGGZwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCmEHwAAYCnlS7oA4FrVGfeR28c8OKNnic5VFPMwV9HMU1bn8vSfVVmdq6TPi+Keq7hw5QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFgK4QcAAFhKqQo/mzZtUq9evRQeHi6bzabExETHutzcXI0dO1bNmzdXpUqVFB4ersGDB+vo0aNOY5w6dUqxsbHy9/dXYGCghg4dqjNnzhTzngAAgNKqVIWfs2fPqkWLFpo/f36+ddnZ2dq5c6cmTJignTt3avXq1UpNTVXv3r2d+sXGxuq7775TUlKS1q5dq02bNik+Pr64dgEAAJRy5Uu6gP/Vo0cP9ejRo8B1AQEBSkpKcmp75ZVX1KZNGx0+fFi1atXSvn37tG7dOm3fvl2tW7eWJM2bN0933XWXXnrpJYWHhxf5PgAAgNKtVF35uVaZmZmy2WwKDAyUJG3evFmBgYGO4CNJMTEx8vLy0tatWwsdJycnR1lZWU4LAAAomzw2/Jw7d05jx47V/fffL39/f0lSenq6goODnfqVL19eQUFBSk9PL3Ss6dOnKyAgwLFEREQUae0AAKDkeGT4yc3N1YABA2SM0YIFC1web/z48crMzHQsR44ccUOVAACgNCpV9/xcjYvB59ChQ/rss88cV30kKTQ0VMePH3fqf+HCBZ06dUqhoaGFjunj4yMfH58iqxkAAJQeHnXl52Lw2b9/v9avX69q1ao5rY+Ojtbp06e1Y8cOR9tnn30mu92uqKio4i4XAACUQqXqys+ZM2d04MABx+u0tDTt3r1bQUFBCgsL05/+9Cft3LlTa9euVV5enuM+nqCgIHl7e6tx48a68847NWzYMC1cuFC5ubkaOXKkBg4cyJNeAABAUikLP19//bXuuOMOx+uEhARJUlxcnCZPnqwPPvhAknTzzTc7bZeSkqLbb79dkrRs2TKNHDlSXbp0kZeXl/r376+5c+cWS/0AAKD0K1Xh5/bbb5cxptD1l1t3UVBQkJYvX+7OsgAAQBniUff8AAAAuIrwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALIXwAwAALKV8SReAsqHOuI/cPubBGT3dPiYAAFz5AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAllKqws+mTZvUq1cvhYeHy2azKTEx0Wm9MUYTJ05UWFiY/Pz8FBMTo/379zv1OXXqlGJjY+Xv76/AwEANHTpUZ86cKca9AAAApVmpCj9nz55VixYtNH/+/ALXz5o1S3PnztXChQu1detWVapUSd27d9e5c+ccfWJjY/Xdd98pKSlJa9eu1aZNmxQfH19cuwAAAEq58iVdwP/q0aOHevToUeA6Y4zmzJmj5557Tn369JEkLV26VCEhIUpMTNTAgQO1b98+rVu3Ttu3b1fr1q0lSfPmzdNdd92ll156SeHh4cW2LwAAoHQqVVd+LictLU3p6emKiYlxtAUEBCgqKkqbN2+WJG3evFmBgYGO4CNJMTEx8vLy0tatWwsdOycnR1lZWU4LAAAomzwm/KSnp0uSQkJCnNpDQkIc69LT0xUcHOy0vnz58goKCnL0Kcj06dMVEBDgWCIiItxcPQAAKC08JvwUpfHjxyszM9OxHDlypKRLAgAARcRjwk9oaKgkKSMjw6k9IyPDsS40NFTHjx93Wn/hwgWdOnXK0acgPj4+8vf3d1oAAEDZ5DHhJzIyUqGhoUpOTna0ZWVlaevWrYqOjpYkRUdH6/Tp09qxY4ejz2effSa73a6oqKhirxkAAJQ+pepprzNnzujAgQOO12lpadq9e7eCgoJUq1YtjR49WtOmTVP9+vUVGRmpCRMmKDw8XH379pUkNW7cWHfeeaeGDRumhQsXKjc3VyNHjtTAgQN50gsAAEgqZeHn66+/1h133OF4nZCQIEmKi4vTkiVL9Mwzz+js2bOKj4/X6dOn1aFDB61bt06+vr6ObZYtW6aRI0eqS5cu8vLyUv/+/TV37txi3xcAAFA6larwc/vtt8sYU+h6m82mKVOmaMqUKYX2CQoK0vLly4uiPAAAUAZ4zD0/AAAA7kD4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAlkL4AQAAluJS+Pniiy/cVQcAAECxcCn8dOzYUU2aNNHs2bN14sQJd9UEAABQZFwKPzNnzpQkPf3006pZs6b+9Kc/ad26dTLGuKU4AAAAd3Mp/Dz99NPau3evPv/8c8XGxuqTTz5Rz549Vbt2bU2aNEkHDx50U5kAAADu4ZYbntu3b68333xTx44d02uvvaYbbrhBU6dOVb169dStWze9++67ys3NdXmevLw8TZgwQZGRkfLz81PdunU1depUpytNxhhNnDhRYWFh8vPzU0xMjPbv3+/y3AAAoGxw69NelStX1iOPPKLVq1dr0KBBstvtWr9+vQYOHKiaNWvqxRdfVF5e3nWPP3PmTC1YsECvvPKK9u3bp5kzZ2rWrFmaN2+eo8+sWbM0d+5cLVy4UFu3blWlSpXUvXt3nTt3zh27CAAAPFx5dw1kt9u1du1aLVq0SP/617904cIFdejQQfHx8fLx8dErr7yicePG6dChQ3rllVeua46vvvpKffr0Uc+ePSVJderU0YoVK7Rt2zZJv1/1mTNnjp577jn16dNHkrR06VKFhIQoMTFRAwcOdM/OAgAAj+XylZ/9+/dr3Lhxqlmzpvr166evvvpKjz/+uPbu3atNmzZp0KBBuvfee7Vx40Y9+uijWrFixXXP1a5dOyUnJ+uHH36QJP373//WF198oR49ekiS0tLSlJ6erpiYGMc2AQEBioqK0ubNmwsdNycnR1lZWU4LAAAom1y68nPbbbfpq6++kjFGnTp10uzZs9W/f395e3sX2n/hwoXXPd+4ceOUlZWlRo0aqVy5csrLy9MLL7yg2NhYSVJ6erokKSQkxGm7kJAQx7qCTJ8+Xc8///x11wUAADyHS+EnNTVVCQkJio+PV/369a/YPyYmRikpKdc937vvvqtly5Zp+fLlatq0qXbv3q3Ro0crPDxccXFx1z3u+PHjlZCQ4HidlZWliIiI6x4PAACUXi6Fn59++kkVKlS46v41atRQp06drnu+p59+WuPGjXPcu9O8eXMdOnRI06dPV1xcnEJDQyVJGRkZCgsLc2yXkZGhm2++udBxfXx85OPjc911AQAAz+HSPT///e9/9eGHHxa6/sMPP3TrZ/1kZ2fLy8u55HLlyslut0uSIiMjFRoaquTkZMf6rKwsbd26VdHR0W6rAwAAeC6Xrvw8++yzOnLkiHr16lXg+tmzZ6tWrVpaunSpK9M49OrVSy+88IJq1aqlpk2bateuXXr55Zf18MMPS5JsNptGjx6tadOmqX79+oqMjNSECRMUHh6uvn37uqUGAADg2VwKP1988YXi4+MLXd+tWze9/vrrrkzhZN68eZowYYIee+wxHT9+XOHh4Xr00Uc1ceJER59nnnlGZ8+eVXx8vE6fPq0OHTpo3bp18vX1dVsdAADAc7kUfo4fP+64z6YgwcHBysjIcGUKJ1WqVNGcOXM0Z86cQvvYbDZNmTJFU6ZMcdu8AACg7HDpnp/AwED9+OOPha4/cOCAqlSp4soUAAAAbuVS+Lntttv0xhtvFPgZOunp6fr73/+uDh06uDIFAACAW7l8w/OHH36oli1b6sknn3Q8Tr57927Nnj1bZ86c0Z///Gd31AkAAOAWLoWfm2++WatWrdJDDz2kZ555RjabTdLv37FVvXp1vffee2rdurVbCgUAAHAHl7/Y9O6779bhw4f1ySefaP/+/ZKkBg0aqFu3bvLz83O5QAAAAHdyy7e6+/n58Tk6AADAI7j8re4AAACexOXws3LlSrVv317BwcEqV65cvqV8ebdcXAIAAHALl5LJiy++qHHjxqlatWpq27atqlWr5q66AAAAioRL4Wf+/PmKiopScnIyNzcDAACP4NLbXunp6Ro0aBDBBwAAeAyXwk+9evV0+vRpN5UCAABQ9FwKP08++aQWLVqkM2fOuKseAACAIuXSPT/lypVTcHCwGjVqpIcffliRkZEqV65cvn6DBw92ZRoAAAC3cSn8DBkyxPHvadOmFdjHZrMRfgAAQKnhUvhJSUlxVx0AAADFwqXw06lTJ3fVAQAAUCzc9vUWOTk5+umnn3T+/Hl3DQkAAOB2LoefnTt3qnPnzqpSpYpq1aqlL774QpJ0/PhxdenSRevXr3e5SAAAAHdxKfzs3r1bt912m3788cd8NzUHBwfrt99+01tvveVSgQAAAO7kUviZOHGiwsPD9d1332nGjBkyxjit79Kli7Zt2+ZSgQAAAO7kUvj5/PPPNWzYMFWuXFk2my3f+lq1auno0aOuTAEAAOBWLoWfc+fOKSAgoND1WVlZrgwPAADgdi6Fn7p162rHjh2Frv/ss8/UpEkTV6YAAABwK5fCzwMPPKC3337b6Ymui29/zZ49W+vWrdODDz7oWoUAAABu5NKHHD711FNKSkpS9+7d1ahRI9lsNo0ZM0YnTpxQenq6unbtqscee8xdtQIAALjMpSs/3t7eSkpK0ksvvSQ/Pz/5+vrqhx9+UPXq1TVr1iytXbtWXl5u+xxFAAAAl7l05UeSypcvrzFjxmjMmDHuqAcAAKBIcVkGAABYiktXfpYuXXpV/S799GcAAICS4lL4GTJkiGw2W75Pdr70Aw8JPwAAoLRwKfykpKTka7tw4YJ+/PFHvfrqq6pYsaJeeOEFV6YAAABwK5fCT6dOnQps79Kli+Li4tSmTRvt3LlTd9xxhyvTAAAAuE2R3fDs4+OjQYMG6dVXXy2qKQAAAK5ZkT7t5ePjo59++qkopwAAALgmRRZ+jh07poULFyoyMrKopgAAALhmLt3z07lz5wLbT506pe+//17nz5/XW2+95coUAAAAbuVS+PnPf/6T77F2m82moKAg3XPPPRo5cqTatWvnUoEAAADu5FL4OXjwoJvKAAAAKB58vQUAALAUwg8AALAUl9728vLyynfPz5XYbDZduHDBlWkBAACum0vhZ/Dgwdq5c6f27Nmjhg0bqnHjxpKkvXv36ocfflDz5s11yy23uKVQAAAAd3Ap/MTGxuqf//ynEhMT1bt3b6d1iYmJevDBBzV79mzFxMS4VCQAAIC7uHTPz4QJE/Too4/mCz6S1LdvX8XHx+u5555zZYp8fvrpJw0aNEjVqlWTn5+fmjdvrq+//tqx3hijiRMnKiwsTH5+foqJidH+/fvdWgMAAPBcLoWfb775RnXr1i10fb169fTtt9+6MoWTX375Re3bt1eFChX0r3/9S3v37tXs2bNVtWpVR59Zs2Zp7ty5WrhwobZu3apKlSqpe/fuOnfunNvqAAAAnsult72qVq2qTz/9VMOHDy9w/bp16xQQEODKFE5mzpypiIgILV682NH2v1+fYYzRnDlz9Nxzz6lPnz6SpKVLlyokJESJiYkaOHCg22oBAACeyaUrPw888IDef/99DR06VPv27VNeXp7y8vK0b98+Pfzww1q7dq1iY2PdVas++OADtW7dWvfee6+Cg4PVsmVLvfHGG471aWlpSk9Pd7rHKCAgQFFRUdq8eXOh4+bk5CgrK8tpAQAAZZNLV36mTZumAwcOaPHixVqyZIm8vH7PUna7XcYY9erVS9OmTXNLodLvX6exYMECJSQk6M9//rO2b9+uJ554Qt7e3oqLi1N6erokKSQkxGm7kJAQx7qCTJ8+Xc8//7zb6gQAAKWXS+HHx8dHa9as0aeffqrExESlpaVJkm688Ub16dNH3bp1c0uRF9ntdrVu3Vp/+ctfJEktW7bUnj17tHDhQsXFxV33uOPHj1dCQoLjdVZWliIiIlyuFwAAlD4uhZ+LunXr5vagU5CwsDA1adLEqa1x48b65z//KUkKDQ2VJGVkZCgsLMzRJyMjQzfffHOh4/r4+MjHx8f9BQMAgFLHbV9vceDAAX355ZfKzMx015D5tG/fXqmpqU5tP/zwg2rXri3p95ufQ0NDlZyc7FiflZWlrVu3Kjo6usjqAgAAnsPl8LN27VrVrVtXDRs2VMeOHbVjxw5J0vHjx1WvXj2tWrXK5SIvGjNmjLZs2aK//OUvOnDggJYvX67XX39dI0aMkPT7V2eMHj1a06ZN0wcffKBvv/1WgwcPVnh4uPr27eu2OgAAgOdyKfxs2LBB/fr1U1BQkCZNmiRjjGNdcHCw6tatq5UrV7pc5EW33nqr1qxZoxUrVqhZs2aaOnWq5syZ4/RE2TPPPKPHH39c8fHxuvXWW3XmzBmtW7dOvr6+bqsDAAB4Lpfu+ZkyZYpatGihrVu36pdfftHkyZOd1kdHR2vp0qWuTJHP3XffrbvvvrvQ9TabTVOmTNGUKVPcOi8AACgbXLrys337dsXGxjoecb9UzZo1L/uIOQAAQHFzKfzY7fbLPiX1888/y9vb25UpAAAA3Mql8NO4cWN9/vnnha5fu3atWrRo4coUAAAAbuVS+Bk6dKhWrVqlRYsWyW63S/r9npvs7Gw98cQT2rx5s+Lj491SKAAAgDu4dMPz8OHD9eWXX2rYsGF68sknZbPZdP/99+vkyZPKy8vTQw895Nbv9gIAAHCVy5/w/M4776h///5655139P3338sYo6ioKA0ePFj9+/d3R40AAABuc93h57ffftN7772nhg0bql+/furXr5876wIAACgS133Pj4+Pj4YNG6Zdu3a5sx4AAIAidd3hx8vLSxEREcrKynJnPQAAAEXKpae94uLi9PbbbysnJ8dd9QAAABQpl254bteunVavXq2bb75Zjz32mOrXr6+KFSvm69exY0dXpgEAAHAbl8JP165dHf8eNWqUbDab03pjjGw2m/Ly8lyZBgAAwG2uOfxs27ZN9erVU1BQkBYvXlwUNQEAABSZaw4/0dHRevvtt/XAAw8oLi5OZ86cUXx8vJ577jk1adKkKGoEAABwm2u+4dkY4/Q6JydH//jHP/j2dgAA4BFcetrroksDEQAAQGnllvADAADgKQg/AADAUq7rUfePP/7YcY9Pdna2bDab3nvvPe3evTtfX5vNpjFjxrhUJAAAgLtcV/hZvny5li9f7tT22muvFdiX8AMAAEqTaw4/KSkpRVEHAABAsbjm8NOpU6eiqAMAAKBYcMMzAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFMIPAACwFI8OPzNmzJDNZtPo0aMdbefOndOIESNUrVo1Va5cWf3791dGRkbJFQkAAEoVjw0/27dv12uvvaabbrrJqX3MmDH68MMP9d5772njxo06evSo7rnnnhKqEgAAlDYeGX7OnDmj2NhYvfHGG6pataqjPTMzU4sWLdLLL7+szp07q1WrVlq8eLG++uorbdmypQQrBgAApYVHhp8RI0aoZ8+eiomJcWrfsWOHcnNzndobNWqkWrVqafPmzYWOl5OTo6ysLKcFAACUTeVLuoBrtXLlSu3cuVPbt2/Pty49PV3e3t4KDAx0ag8JCVF6enqhY06fPl3PP/+8u0sFAAClkEdd+Tly5IhGjRqlZcuWydfX123jjh8/XpmZmY7lyJEjbhsbAACULh4Vfnbs2KHjx4/rlltuUfny5VW+fHlt3LhRc+fOVfny5RUSEqLz58/r9OnTTttlZGQoNDS00HF9fHzk7+/vtAAAgLLJo9726tKli7799luntoceekiNGjXS2LFjFRERoQoVKig5OVn9+/eXJKWmpurw4cOKjo4uiZIBAEAp41Hhp0qVKmrWrJlTW6VKlVStWjVH+9ChQ5WQkKCgoCD5+/vr8ccfV3R0tNq2bVsSJQMAgFLGo8LP1fjrX/8qLy8v9e/fXzk5OerevbteffXVki4LAACUEh4ffjZs2OD02tfXV/Pnz9f8+fNLpiAAAFCqedQNzwAAAK4i/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEvxqPAzffp03XrrrapSpYqCg4PVt29fpaamOvU5d+6cRowYoWrVqqly5crq37+/MjIySqhiAABQ2nhU+Nm4caNGjBihLVu2KCkpSbm5uerWrZvOnj3r6DNmzBh9+OGHeu+997Rx40YdPXpU99xzTwlWDQAASpPyJV3AtVi3bp3T6yVLlig4OFg7duxQx44dlZmZqUWLFmn58uXq3LmzJGnx4sVq3LixtmzZorZt25ZE2QAAoBTxqCs/l8rMzJQkBQUFSZJ27Nih3NxcxcTEOPo0atRItWrV0ubNmwsdJycnR1lZWU4LAAAomzw2/Njtdo0ePVrt27dXs2bNJEnp6eny9vZWYGCgU9+QkBClp6cXOtb06dMVEBDgWCIiIoqydAAAUII8NvyMGDFCe/bs0cqVK10ea/z48crMzHQsR44ccUOFAACgNPKoe34uGjlypNauXatNmzapZs2ajvbQ0FCdP39ep0+fdrr6k5GRodDQ0ELH8/HxkY+PT1GWDAAASgmPuvJjjNHIkSO1Zs0affbZZ4qMjHRa36pVK1WoUEHJycmOttTUVB0+fFjR0dHFXS4AACiFPOrKz4gRI7R8+XK9//77qlKliuM+noCAAPn5+SkgIEBDhw5VQkKCgoKC5O/vr8cff1zR0dE86QUAACR5WPhZsGCBJOn22293al+8eLGGDBkiSfrrX/8qLy8v9e/fXzk5OerevbteffXVYq4UAACUVh4VfowxV+zj6+ur+fPna/78+cVQEQAA8DQedc8PAACAqwg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUgg/AADAUsps+Jk/f77q1KkjX19fRUVFadu2bSVdEgAAKAXKl3QBReEf//iHEhIStHDhQkVFRWnOnDnq3r27UlNTFRwcXNLlFZs64z4qknEPzuhZJOMCAFAcyuSVn5dfflnDhg3TQw89pCZNmmjhwoWqWLGi3nzzzZIuDQAAlLAyd+Xn/Pnz2rFjh8aPH+9o8/LyUkxMjDZv3lzgNjk5OcrJyXG8zszMlCRlZWUVbbFFzJ6TXSTjFnRcimKuwo5/WZzL039WZXWukj4vinMuT/9ZldW5Svq8KO653DWuMebyHU0Z89NPPxlJ5quvvnJqf/rpp02bNm0K3GbSpElGEgsLCwsLC0sZWI4cOXLZrFDmrvxcj/HjxyshIcHx2m6369SpU6pWrZpsNluJ1JSVlaWIiAgdOXJE/v7+JVJDacGx+APH4g8ciz9wLP7AsXBmteNhjNGvv/6q8PDwy/Yrc+GnevXqKleunDIyMpzaMzIyFBoaWuA2Pj4+8vHxcWoLDAwsqhKvib+/vyVO2KvBsfgDx+IPHIs/cCz+wLFwZqXjERAQcMU+Ze6GZ29vb7Vq1UrJycmONrvdruTkZEVHR5dgZQAAoDQoc1d+JCkhIUFxcXFq3bq12rRpozlz5ujs2bN66KGHSro0AABQwspk+Lnvvvt04sQJTZw4Uenp6br55pu1bt06hYSElHRpV83Hx0eTJk3K93acFXEs/sCx+APH4g8ciz9wLJxxPApmM+ZKz4MBAACUHWXunh8AAIDLIfwAAABLIfwAAABLIfwAAABLIfyUoPnz56tOnTry9fVVVFSUtm3bdtn+7733nho1aiRfX181b95cH3/8cTFVWnSmT5+uW2+9VVWqVFFwcLD69u2r1NTUy26zZMkS2Ww2p8XX17eYKi46kydPzrdfjRo1uuw2ZfGckKQ6derkOxY2m00jRowosH9ZOyc2bdqkXr16KTw8XDabTYmJiU7rjTGaOHGiwsLC5Ofnp5iYGO3fv/+K417r35zS4HLHIjc3V2PHjlXz5s1VqVIlhYeHa/DgwTp69Ohlx7ye37XS4ErnxZAhQ/Lt15133nnFcT3xvHAV4aeE/OMf/1BCQoImTZqknTt3qkWLFurevbuOHz9eYP+vvvpK999/v4YOHapdu3apb9++6tu3r/bs2VPMlbvXxo0bNWLECG3ZskVJSUnKzc1Vt27ddPbs2ctu5+/vr2PHjjmWQ4cOFVPFRatp06ZO+/XFF18U2resnhOStH37dqfjkJSUJEm69957C92mLJ0TZ8+eVYsWLTR//vwC18+aNUtz587VwoULtXXrVlWqVEndu3fXuXPnCh3zWv/mlBaXOxbZ2dnauXOnJkyYoJ07d2r16tVKTU1V7969rzjutfyulRZXOi8k6c4773TarxUrVlx2TE89L1zmlm8TxTVr06aNGTFihON1Xl6eCQ8PN9OnTy+w/4ABA0zPnj2d2qKiosyjjz5apHUWt+PHjxtJZuPGjYX2Wbx4sQkICCi+oorJpEmTTIsWLa66v1XOCWOMGTVqlKlbt66x2+0Fri+r54Qxxkgya9ascby22+0mNDTUvPjii46206dPGx8fH7NixYpCx7nWvzml0aXHoiDbtm0zksyhQ4cK7XOtv2ulUUHHIi4uzvTp0+eaxikL58X14MpPCTh//rx27NihmJgYR5uXl5diYmK0efPmArfZvHmzU39J6t69e6H9PVVmZqYkKSgo6LL9zpw5o9q1aysiIkJ9+vTRd999VxzlFbn9+/crPDxcN954o2JjY3X48OFC+1rlnDh//rzeeecdPfzww5f9ouGyek5cKi0tTenp6U4/+4CAAEVFRRX6s7+evzmeKjMzUzab7Yrfz3gtv2ueZMOGDQoODlbDhg01fPhwnTx5stC+VjovLkX4KQE///yz8vLy8n3idEhIiNLT0wvcJj09/Zr6eyK73a7Ro0erffv2atasWaH9GjZsqDfffFPvv/++3nnnHdntdrVr107//e9/i7Fa94uKitKSJUu0bt06LViwQGlpabrtttv066+/FtjfCueEJCUmJur06dMaMmRIoX3K6jlRkIs/32v52V/P3xxPdO7cOY0dO1b333//Zb/E81p/1zzFnXfeqaVLlyo5OVkzZ87Uxo0b1aNHD+Xl5RXY3yrnRUHK5NdbwDONGDFCe/bsueJ779HR0U5fUtuuXTs1btxYr732mqZOnVrUZRaZHj16OP590003KSoqSrVr19a7776roUOHlmBlJWvRokXq0aOHwsPDC+1TVs8JXL3c3FwNGDBAxhgtWLDgsn3L6u/awIEDHf9u3ry5brrpJtWtW1cbNmxQly5dSrCy0ocrPyWgevXqKleunDIyMpzaMzIyFBoaWuA2oaGh19Tf04wcOVJr165VSkqKataseU3bVqhQQS1bttSBAweKqLqSERgYqAYNGhS6X2X9nJCkQ4cOaf369XrkkUeuabuyek5Icvx8r+Vnfz1/czzJxeBz6NAhJSUlXfaqT0Gu9LvmqW688UZVr1690P0q6+fF5RB+SoC3t7datWql5ORkR5vdbldycrLT/73+r+joaKf+kpSUlFRof09hjNHIkSO1Zs0affbZZ4qMjLzmMfLy8vTtt98qLCysCCosOWfOnNGPP/5Y6H6V1XPify1evFjBwcHq2bPnNW1XVs8JSYqMjFRoaKjTzz4rK0tbt24t9Gd/PX9zPMXF4LN//36tX79e1apVu+YxrvS75qn++9//6uTJk4XuV1k+L66opO+4tqqVK1caHx8fs2TJErN3714THx9vAgMDTXp6ujHGmAcffNCMGzfO0f/LL7805cuXNy+99JLZt2+fmTRpkqlQoYL59ttvS2oX3GL48OEmICDAbNiwwRw7dsyxZGdnO/pceiyef/5588knn5gff/zR7NixwwwcOND4+vqa7777riR2wW2efPJJs2HDBpOWlma+/PJLExMTY6pXr26OHz9ujLHOOXFRXl6eqVWrlhk7dmy+dWX9nPj111/Nrl27zK5du4wk8/LLL5tdu3Y5nmCaMWOGCQwMNO+//7755ptvTJ8+fUxkZKT57bffHGN07tzZzJs3z/H6Sn9zSqvLHYvz58+b3r17m5o1a5rdu3c7/Q3JyclxjHHpsbjS71ppdblj8euvv5qnnnrKbN682aSlpZn169ebW265xdSvX9+cO3fOMUZZOS9cRfgpQfPmzTO1atUy3t7epk2bNmbLli2OdZ06dTJxcXFO/d99913ToEED4+3tbZo2bWo++uijYq7Y/SQVuCxevNjR59JjMXr0aMdxCwkJMXfddZfZuXNn8RfvZvfdd58JCwsz3t7e5oYbbjD33XefOXDggGO9Vc6Jiz755BMjyaSmpuZbV9bPiZSUlAJ/Ly7us91uNxMmTDAhISHGx8fHdOnSJd9xql27tpk0aZJT2+X+5pRWlzsWaWlphf4NSUlJcYxx6bG40u9aaXW5Y5GdnW26detmatSoYSpUqGBq165thg0bli/ElJXzwlU2Y4wphgtMAAAApQL3/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAAAEsh/AAodjabTUOGDCnpMq5Ldna2nnjiCdWqVUvlypVTnTp1SrokANeI8AOUERs2bJDNZpPNZtMbb7xRYB+bzaa77767mCsrW2bOnKl58+bpvvvu05IlSzRnzpwSqSMxMVGTJ08ukbkBT0f4AcqgyZMn67fffivpMsqkpKQkNW/eXC+++KIefPBB9e3bt0TqSExM1PPPP18icwOejvADlDGtW7fW0aNHS+yKRGmTl5en7Oxst42Xnp6uoKAgt40HoPgRfoAyZsCAAWrVqpVmzpypkydPXrF/YfffLFmyRDabTRs2bHC0TZ48WTabTXv37tXo0aMVFhamihUrqkuXLkpNTZUkrV69Wrfccov8/PxUp04dvf7664XOvX79erVt21YVK1ZUaGioRo0apTNnzuTrl5mZqbFjx6pevXry8fFRjRo1dP/99+s///lPgTWvX79eU6dOVd26deXr66t33333ssfgwoULmjlzppo0aSJfX19Vq1ZN/fr107fffptv7LS0NG3cuNHxFuOV3nrKzs5WQkKCwsLC5Ofnp7Zt2yo5OVlDhgyRzWZz6rtt2zYNGTJEDRo0UMWKFVWlShW1b99ea9ascep3++2366233pIkRx02m01Llixx9Dl27JiGDx+uWrVqydvbW+Hh4YqPj9fx48edxjp16pTGjBnjOFbVqlVTq1at9OKLL152vwBPVr6kCwDgXjabTTNmzFDXrl31wgsv6OWXX3b7HHFxcapcubL+/Oc/68SJE5o9e7a6d++uqVOn6plnntHw4cP18MMPa9GiRXr00UfVpEkTdejQwWmMnTt3atWqVRo2bJgGDx6slJQUzZ07V3v27FFSUpK8vH7/f7PMzEy1a9dOhw8f1sMPP6ymTZvq2LFjevXVVxUVFaWvv/5atWvXdhr7qaeeUm5uroYNGyZ/f381bNjwsvsTGxurd999V127dtXw4cOVnp6u+fPnKzo6Wp9//rlatmypjh076u2339aYMWNUvXp1Pfvss5Kkm2666bJj33vvvfr444/Vt29fxcTEKC0tTf369VNkZGS+vmvWrNH333+vAQMGqHbt2jp58qTeeust3XPPPVq2bJkeeOABSdKzzz4ru92uzz//XG+//bZj+3bt2kmSDh8+rOjoaJ0/f15Dhw5V3bp1deDAAS1YsEApKSn6+uuvFRAQ4Khv06ZN+r//+z/ddNNN+u2337Rv3z5t2LBBTz/99GX3DfBYBkCZkJKSYiSZF1980RhjTNeuXY2Pj485ePCgo48k07NnT6ftJJm4uLh84y1evNhIMikpKY62SZMmGUnm7rvvNna73dH+t7/9zUgyVapUMYcPH3a0Hz9+3Pj4+JiBAwfmm1OSWbNmjVP7E088YSSZFStWOLX5+vqa3bt3O/U9ePCgqVKlilPtF2tu0KCBOXv2bMEH6hKffvqpkWQGDBjgtE+7d+825cqVMx06dHDqX7t2bdOpU6erGvujjz4ykswjjzxSYPulf4LPnDmTb4yzZ8+aBg0amMaNGzu1x8XF5dv+ot69e5saNWqYI0eOOLVv377dlCtXzkyaNMkYY8zp06eNJDN8+PCr2h+grOBtL6CMmjlzps6fP68JEya4fewnnnjC6S2b2267TZLUu3dvRUREONpr1Kihhg0bav/+/fnGaNiwYb6bhceNGydJjrd5jDFatmyZOnbsqBtuuEE///yzY6lUqZLatm2rTz/9NN/Yw4cPV8WKFa9qXy7O9eyzzzrtU4sWLdSrVy998cUXOnHixFWNdakPP/xQkpSQkODUftddd6lx48b5+leqVMnx7+zsbJ08eVLZ2dnq3Lmz9u3bp6ysrCvOmZmZqbVr16p3797y9fV1OmZ16tRRvXr1HMfMz89PPj4+2rp1qw4ePHhd+wh4IsIPUEa1bNlS999/v5YtW6ZvvvnGrWPfeOONTq+rVq0qSQW+lVO1atUC7z0q6D/+YWFhCgwMdNzLc+LECZ08eVKffvqpatSokW9JSkpSRkZGvnEaNGhw1fuSlpYmLy+vAutp2rSpo8/1uDh2vXr18q0r6K2448ePKz4+XiEhIapUqZKqV6+uGjVqaOHChZKk06dPX3HO1NRU2e12LVq0qMBjlpqa6jhm3t7emjNnjvbs2aPIyEg1bdpUjz/+uJKTk69rfwFPwT0/QBk2bdo0rVq1SmPHjtW//vWva9r2woULha4rV67cNbUbY65p7ku3i4mJ0dixY696u6u96lNcLr2xuSDGGHXr1k379u3TqFGj1Lp1awUEBKhcuXJavHixli9fLrvdflXjSNKgQYMUFxdXYB8/Pz/Hv//v//5Pffr00UcffaSNGzdq1apVeuWVV3Tfffdp5cqVV7mHgGch/ABlWGRkpIYPH66//e1vTk9t/a+goCCdOnUqX/ulT1K52759+/K1HTt2TKdPn3ZcWapRo4YCAwOVlZWlmJiYIqnjxhtvlN1u1759+/LdvLx3715JBV/Ruhp16tSR3W7X/v37811Zuvh03EXffPON/v3vf2vixIn5Pr/n73//e76xCwtU9erVk81m0/nz56/6mIWFhemRRx7RI488ory8PD344INasWKFnnzySd16661XNQbgSXjbCyjjnnvuOfn7++uZZ54pcH2DBg20efNmp8/C+eWXX7R48eIirSs1NVWJiYlObTNnzpQkx71AXl5eio2N1bZt27Rq1aoCx7n00e1rdXGu6dOnO12h2rNnjz744AN16NBBNWrUuK6xe/XqJUn661//6tT+8ccf5wt/F6+aXXqVbM+ePfkedZekypUrS1K+4FqtWjXdddddWr16tbZs2ZJvO2OM4x6m7OzsfJ+BVK5cOUcILCgUA2UBV36AMq569ep6+umnC73xeeTIkRo0aJA6d+6sBx98UKdPn9Ybb7yh2rVrKz09vcjqat68uQYNGqRhw4apfv36SklJ0apVq9SpUyfdd999jn4vvPCCvvzySw0YMEADBgxQ27Zt5e3trUOHDunjjz9Wq1atnD7f5lp17dpVAwYM0MqVK/XLL7/o7rvvdjzq7uvrq7lz51732HfddZe6d++uN954Qz///LPjUffXX39dN910k9O9WI0bN1bTpk01a9YsZWdnq2HDhvrhhx/02muvqXnz5tqxY4fT2G3bttUrr7yixx57TD179lSFChUUFRWlyMhILViwQB06dFDHjh01ePBgtWzZUna7Xf/5z3/0/vvva/DgwZo8ebJ++OEHderUSf369VOzZs1UtWpV7du3TwsWLFBkZKTjRnagzCm5B80AuNOlj7r/r7Nnz5qwsLACH3U3xphZs2aZWrVqGW9vb9OoUSOzaNGiyz7qnpaW5rR9WlqakeR4hPp/derUydSuXdupTf//8fqkpCTTpk0b4+vra4KDg83IkSNNVlZWgfVPmTLFNGvWzPj6+prKlSubRo0amUceecRs2bLF0a+gmq9Gbm6umTFjhmnUqJHx9vY2VatWNX369DHffPNNvr7X8qi7Mb8/vj5q1CgTHBxsfH19TZs2bUxycrLp37+/8fPzc+p78OBB86c//clUr17d+Pn5mVtvvdWsXr26wOOel5dnnnzySXPDDTcYLy8vI8ksXrzYsf7EiRPmqaeeMvXr1zc+Pj4mICDANGvWzDzxxBPmu+++M8YY8/PPP5vRo0ebFi1amICAAOPr62vq1q1rRo0aZY4ePXpNxxDwJDZjrvNORADAdWvevLlyc3P1/fffl3QpgOVwzw8AFKGCvmD2o48+0p49e9S1a9cSqAgAV34AoAiNHz9eu3bt0h133KGAgADt3r1bb775pvz9/bV7927VrFmzpEsELIfwAwBF6OOPP9aMGTO0d+9eZWZmKigoSJ07d9bUqVML/PBDAEWP8AMAACyFe34AAIClEH4AAIClEH4AAIClEH4AAIClEH4AAIClEH4AAIClEH4AAIClEH4AAICl/D+MmVs9cUU+QgAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "gate_cnts = get_tensor_gate_length(target_xs)\n", + "\n", + "d = np.bincount(gate_cnts)\n", + "plt.bar(range(d.size), d)\n", + "plt.xlabel(\"Number of gates\", fontsize=13)\n", + "plt.ylabel(\"Frequency\", fontsize=13)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "7835d6ad-03d1-4a7b-ab89-939b187468a5", + "metadata": {}, + "source": [ + "## Compile a single unitary" + ] + }, + { + "cell_type": "markdown", + "id": "142811b8-e9e7-433f-90d2-1f1adf8be00b", + "metadata": {}, + "source": [ + "First, we want to compile a single unitary for 4 qubits from the testset. We pick one with 8 gates." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "76c28d9d-9420-4197-bbe2-d8a99a879696", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAnMAAAEvCAYAAAAuFEcfAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAMn1JREFUeJzt3Xl8VPW9//H3yToJJCxJIIEEAoQlQUIQRNGKgGhlrVUWLWIXKlqh8KuU2Fqr1d4WEFxK0Ste63ZvpcGlVkHUCoLRKgYBpRCILAFCMsBAgOzJZOb3R0okkEBmMkvO5PV8PPIwM+f7PeczMsm88z3nfL+G0+l0CgAAAKYU5O8CAAAA4D7CHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATC/F3AWic0ylV1vq7iuazBEuG4e8qAACBwul0SlVV/i7DNeHhMvzwYUiYa6Uqa6Vr3/V3Fc2XPV6K4N0EAPCUqirZp/3Q31W4JGT1y5LF4vPjcpoVAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATY2awAFKyY6PyHhzd4LkgSzuFd+unmFEz1WXiz2UE808OAEAg4ZM9AHUaebs6DB0vOZ2qKbbqxMZXVPDCfaosyFXPOc/5uzwAAOBBhLkAFNn7csWMuqP+cdz4e7Xz3gGy/fN5dbvjDwrtEOfH6gAAgCdxzVwbEGxpp3b9r5KcTlVZ9/m7HAAA4EGEuTbibIgLad/Zz5UAAABP4jRrAHJUlct+xian0yl7sVXH33tWFfu3KbLvcFm69/N3eQAAwIPaxMiczWZTZmamUlJSZLFYlJSUpPnz56usrEyzZs2SYRhasWKFv8v0mKJVD+urmXH6+s4u2jU/XcfXPaOOI25Rym/+4e/S4EdOp1Nl5TU6capSdrvD3+UAbVKNQzpVLVXW+rsSBJKAH5nbvn27xo0bJ6vVqnbt2iktLU2FhYVavny59u3bp5MnT0qSMjIy/FuoB8V+d7Y6XT1VztoaVRzcIeubS1RtK5ARaqlvU7IzW3sfHXdBX6e9Wk5HrYb+nd80gcJWXKkX/p6nZ1/brQNHSiRJwcGGJo/qoXunp+r6K7vJMAw/VwkELrtD2mSVXs+XcmzfPt83WpqSLI1LlCID/tPYfzbZjumGzzZqcVq67uszoNE2Ye+s1vguCXrrymt9XJ1nBPTbx2azadKkSbJarVqwYIEefvhhRUVFSZIee+wx3X///QoJCZFhGEpPT/dztZ4TntBX0RljJUkdho5T+9TvaM+vv6ND/32Pei/8myQpauC1GpJV2qBf9YlC7V4wTHET5vq8ZnjHmx/m644HNqrivGGA2lqn/r7+oP6+/qDGDE/QG09cr47R4X6qEghcReXS/M3S/pILt31zRlr0tfTsbumJ4dIgLmmGmwL6NOu8efNUUFCguXPnatmyZfVBTpIyMzM1ePBg2e12JScnKzo62o+Velf71KvVedRMFX+SpdLcfzXaxlFTpf2Lb1H7tO8oYeoDPq4Q3vDmh/masmD9BUHufBu+KNKN97ynsvIaH1UGtA3HK6W7Pm08yJ2ruFr62WfSzmLf1IXAE7BhLjc3V1lZWYqNjdWiRYsabTN06FBJ0uDBgxs8f+DAAU2ePFlRUVHq1KmT7rzzTp04ccLrNXtTwvTfSkHBKnz1oUa3H3rmHjlqKpU8/yXfFgavOHm6SjMf2CSns3ntc/5t08PPbPVuUUAbs+hryVrRvLaVtdIDX0q1zfyZBc4VsGFu1apVcjgcmjFjhtq3b99om4iICEkNw1xJSYlGjx6tgoICrVq1Ss8995yys7M1ceJEORzmvWjckpCiztfeppKv16tkZ3aDbcfeWa7TW9aoz6/fUlB4pJ8qhCe99I88lVfaXerzwlt5Kq9wrQ+AxhWWS9lW1/ocKZf+ddQ79UAqr62Vraqq0S+zC9hr5jZs2CBJGj16dJNtCgoKJDUMc88995yOHDmijz/+WD169JAkJSYm6uqrr9bbb7+tm2++2XtFe1n81N/oZPYqFb76kPr/4SNJUsnXH6nglfvV96F1Cu+a7N8C4TErX9vjcp/iM9V67YMD+uH3+nqhIqBteeug5M4g2xsHpWvjPV4OJD26Z6ce3bPT32V4heF0NvdEjLkkJSWpoKBA27Zta/ROVbvdroSEBNlsNu3bt0+9e/eW9G34++ijjxq079Onj0aNGqW//OUvLtcybNgwWa2u/YlmhEWo61PfuHwsV1QdzdfuX16hhNseVpcW3vRw9P/1lbO6mecT4FVOBamw88Nu9W1f8Yk6VPzTwxUBbU/H2c/LknGTy/3stoOyPXSNFyoyn4igIO3KGNHi/Zy9m/WnPXrr1m5JjbYZ9/kmj9zNmrb9M1W4eRYvPj5eW7ZscatvwI7MlZWVSZIqKhoPGFlZWbLZbIqKilKvXr3qn9+1a5emTp16QfuBAwdq165dbtVitVp15MgRl/oEhUeqq1tHax5HVbn2LbpZHYZPbnGQk6TCwkI5qso9UBlazAiT3LwrrrSsQqVFrr1XAVwowu6Q5dLNLuAwQlz+vAhUkcHBUobn9pfSvr2uj/PmJ2vdZ2F5re+n9grYMBcfH6/i4mJt3bpVI0Y0TPZFRUVauHChJCk9Pb3BHFvFxcXq2LHjBfvr3Lmz9uxx/dTV2VpcZYRFuHWs5ir+1xuqOPCVKo/kqfiTrAu2D1yxS2FxPZq9v27dujEy10o4JRU6ayQj1OW+UZFBiu7e3fNFAW1MWK17f9waFafVnZ9BSXUjc2bTrVu3Fo3MuStgw9zYsWOVm5urJUuW6IYbblC/fnXLWOXk5GjmzJmy2epmbvTFZMHuDJtW2KVr3/VCMf8RM3qmYkbP9Nj+8vK+UUTAvpvMZ+YDG/V/a/a53G/zB88otXdHzxcEtDGbrNKCL1zv9/Pr0/TTews8X5AJOSsrZZ/2Q3+X4ZK8vDwZFnfGZFvGfLG3mTIzMxUTE6PDhw9r4MCBGjRokPr27avhw4erd+/eGjNmjKQLpyXp1KmTTp06dcH+Tp48qc6dmdER5jBneprLfUZfkUCQAzzkO12leBdPsAQb0s09vVMPAlvAhrnExERlZ2drwoQJslgsys/PV+fOnbVy5UqtXbtWeXl5ki4Mc6mpqY1eG7dr1y6lpqb6pHagpa5Mj9P3Rjf/NHloSJAenXO5FysC2pZgQ7q38ZWjmvSD3lKs7wd1EAACNsxJdcFszZo1KikpUUlJiTZv3qzZs2errKxM+fn5CgoK0mWXXdagz8SJE/XJJ5/UT1siSZs3b9a+ffs0adIkX78EwC2GYeivi0Zp1BUJl2wbGhKkvy4epe9cznwIgCeNT5LmNXOQfHyiNNf1AXVAUgBPTXIxmzdv1lVXXaX+/ftr9+7dDbadOXNGgwYNUmxsrB555BFVVlYqMzNTcXFx+uyzzxTkowsyvX3NnKdljxfXzLVCVdW1WvyXr/Tsa7tltV14g8qNV3fXQ3cP0TVDvHuHF9CWbSySXtor/buR5bqS2km395amJkvn3IsHmfOauZDVL/vlmrk2+fG7Y8cOSReeYpWk6OhobdiwQfPnz9dtt92mkJAQTZw4UU8++aTPghzgKeFhwXr4Z5fr1z8drLc/OqSfPPSxSsrtim4Xqi1/+5769uzg7xKBgDcqoe4r95T02THpmf+MIRiS3hgjBRHi0EKEuUb06dNHa9as8WVJLXZ6y7s68tcHJadDzlq74r+/UDFjLvyL5lTOGhW8+EvJUauInoOUPP8lBUdGS5Kqjx/SoZVzVHkkT0ZQsOLG/UxdJv7c1y8FXhAWGqwpN/bS/3vsc5WU2xXVLpQgB/hYase6r2d3Sw7VhTmCHDyBMBcAnE6nDjx5h/r9YaMik9NVdTRfO+cMUMerblFwZFR9u9qKUh388yz1/+MmWRIH6NDKuSrK+r0Sf7xUTqdT+xZ9X/G3/kqdrqmbNLnmFIsEAgDQ2rXJ84YbNmyQ0+nUhAkT/F2K5xiGastOSZJqK84oJCpGRmh4gyZntq5TZO8hsiTW3WIVN+5encxeJUkq+Wq9jJDw+iAnSaEduY4KAIDWrk2OzAUawzDU+5dZ2rfoFgVb2sleWqw+v3pTQaFhDdpVHz+ksC7fTmIU3jVZNcVFctbaVXl4l0I6xGn/0ttUeWSPwrskK/Enjys8vrevXw4AAHBBmxyZCzTOWruKXvsv9fn1mxr0/EH1+/16HXhqpuxnbM3fh8Oukq83KGH6b5X21DZFD/mu9j82zYtVAwAATyDMBYDy/dtVc7JQUQNHSpLa9b1CYTGJKt+/rUG7sLgeqj52sP5x1dF8hXZKkBEcorDYHorsPUQRPQZKkjqPnqny/VvltNf47oUAAACXEeYCQFhckmpOFqnicK4kqbJor6qs+2Tp3r9Bu+ghN6l8/1ZVFtTdF3983TPqfO1tdduGjlP1iQJVnzgiSTrz5buyJKbKCHF9sXYAAOA7XDMXAEI7dlXPOc9p/9JpMowgOZ0O9Zi9QmFxPVT414cU2rmb4sbdo+DIKPWc87z2/vFmqdYuS8/L1Gv+y5KkYEs79fzZs9r7+wmS06ngyA7q/cu/+feFAQCASyLMBYjOI29X55G3X/B8txmPNnjc8crJ6njl5Eb3ET3kRqUNudEr9QEAAO/gNCsAAICJEeYAAABMjDAHAABgYoQ5AAAAEyPMAQAAmBh3s7ZSlmApe7y/q2g+S7C/KwAABJTwcIWsftnfVbgmPPzSbbyAMNdKGYYUwb8OAKCNMgxDslj8XYYpcJoVAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOCHAOh1PfHDyt1e/vV1lFjSSprMKujTlFOlNa7efqAAAtZTidTqe/iwDgWXa7Q2s+PqTnXt+jT7cf1ZnSmibb9u0ZrVvHJuvuKQOU3D3Kh1UCbdPwtyWH6kZTvpjs72oQCAhzQABxOJx6+m+79NiLO1RwtMylvoYhTRzZQ0vvu0L9e3X0ToEACHPwOE6zAgFi76Ezuu7HazVv8ecuBzlJcjqldzYdUsa0t/T4yztUW+vwQpUAAE8jzAEBYO3Hh5Q+5U19su1oi/dVWVWrXz7+hcbd+75Ky5s+PQsAaB0Ic4DJvbUhXzf/vw9VUVnr0f3+87NCffee91RGoAOAVo0wh4BRXVOrRc9/pcoqu9v7OGwt1TN/2+XBqrzr86+OafrCj2S3e+fS139tP6ZpCzeIS2sBoPUK8XcBgCdU19Rq6oINenvjIX2yzao3nrhelnDX3t6HraUa9ZN3tb+gRKdKqvXAXRneKdZDKirt+uGDH6u6pvnXtuWsmqz42EhZbeW64va3m9Xn3ewCPff6Ht09dYC7pQIAvKhNjMzZbDZlZmYqJSVFFotFSUlJmj9/vsrKyjRr1iwZhqEVK1b4u0y0QO7+U/rw80JJdeHj1vvWuzRCd26Qk6QX//FNq79e7MEVXyrv4GmX+sTHRiqxazvFx0a61O+Xj3+h/CMlLvUBAPhGwIe57du3a9CgQVq6dKmsVqvS0tJUU1Oj5cuXa/r06crNzZUkZWRk+LdQtMjg/jFa98yNirTUjca5EujOD3IpPaK18S/j1T4y1Ks1t8SholI99X87fXa80vIaPfLsNp8dDwDQfAEd5mw2myZNmiSr1aoFCxaoqKhIW7duldVq1ZIlS7R27Vrl5OTIMAylp6f7u1y00MhhCS4HuqaCXPeu7XxSs7uee323HA7fXsf2t/f268SpSp8eEwBwaQEd5ubNm6eCggLNnTtXy5YtU1TUt7PbZ2ZmavDgwbLb7UpOTlZ0dLQfK4WnuBLozBrkqmtq9T9v7PH5cSuravXiW9/4/LgAgIsL2DCXm5urrKwsxcbGatGiRY22GTp0qCRp8ODB9c+dDX/Dhw9XeHi4DMPwSb3wnOYEOrMGOUnastOmYyf9M0K2NvuwX44LAGhawIa5VatWyeFwaMaMGWrfvn2jbSIiIiQ1DHN79+7VG2+8ofj4eF1xxRU+qRWed7FAZ+YgJ0lf7rL57dhbc20+P70LALi4gA1zGzZskCSNHj26yTYFBQWSGoa5kSNHqqioSG+//bbGjh3r3SLhVY0FuglzPtDIH681bZCTpC93nfDbsc+U1mjf4TN+Oz4A4EIBG+YOHjwoSerZs2ej2+12uz799FNJDcNcUFDA/i9pk84PdBu+KFL+kVJJ5gxyUt2drP49vuvrvgIAvCdgJw0uK6v7wKmoqGh0e1ZWlmw2m6KiotSrVy+v1jJs2DBZrVavHgMXFx6aqvL206Wz10A6HSr56hFdOfQX/i3MDcejZkmhPRrddnZS4KbEx0bU//fwP2+76HGamlh4+u0zZKnhRgjAXV1XHJQRFKxaR60SExsfcEDbEx8fry1btrjVN2DDXHx8vIqLi7V161aNGDGiwbaioiItXLhQkpSenu71mxysVquOHDni1WPgIkI7Sb3GfBvkJMkI0tHg66VDz0hO95f/8ote5VITU+CdnRT4UkKCg5rVrjEnjh+Vyng/A+7qenZ5PKeTzwZ4RMCGubFjxyo3N1dLlizRDTfcoH79+kmScnJyNHPmTNlsdReR+2Ky4Pj4eK8fA42zB0XLFvVj1QZ3rnvCWSsZwXXfR6crvN99iinNkiHzBLoTYbVq6l5Wq638on3jYyMUEhwke61DVlvjo9aX2ldcTKTCOnZvTqkAGnP2D0vDUPfu/CyhTkuyQsCGuczMTL366qs6fPiwBg4cqAEDBqiyslJ79+7VuHHjlJycrPfff7/B9XLe4u6wKVrm7F2rR8+52aG0rFrWE5UyJDklVYX1U8b3Vrq1lqu/LP7LV/r1nxp/T11qvdXD/7xNiV3byWqrUNINf3P52OFhwSrYu1lhocEu9wVQZ/jbkkNScFBw/Y14QEsE7NX+iYmJys7O1oQJE2SxWJSfn6/OnTtr5cqVWrt2rfLy8iTJJ2EOvtfU9CPBwXVv+ZiO4W4t/dUaDE2L9dux0/t1IsgBQCsTsGFOklJTU7VmzRqVlJSopKREmzdv1uzZs1VWVqb8/HwFBQXpsssu83eZ8LDmzCMXHhbs9lqu/nbloDhFWPwTqEZfkeCX4wIAmhbQYa4pO3fulNPpVN++fRUZeeGdf6+//rpef/117dq1q8FjTpe2fq5MCOzOWq6tQXT7MP1gXB+fH9cwpNlTBvj8uACAi2uTYW7Hjh2Smj7FOnXqVE2dOlWvvfZag8crVqzwWY1wnTsrO5g10N07PdXnx7zpmkT1SWINYwBobQhzjXA6nY1+vfTSSz6sEq5oyRJdTQW6qupar9bcEpenxWryqMbnmvMGw5AenJ3hs+MBAJqPMIeAEBIcpLDQurezOys7nB/owsOCFeTl+Qdb6r8fvFodo8J8cqxfzLxMV2d09cmxAACuaZNhbsOGDXI6nZowYYK/S4GHJMRFasPz4zXpuh5uL9F1NtDNnJiirMfGKDS0df94dOvSTn/+9YhLNzyH1VaugqNll5yP7lz9kzvov+YOdbU8AICPGE7n2amogcCXOHaVjhwrV/cukSr48HZ/l+MRv3tmqx55dptX9t2tS6Q+eWmieiVGeWX/QFt0dp65IElfTPZ3NQgErXvoAcAlPfyzIXp0zuUe329yt/ba9MIEghwAtHKEOcDkDMPQb+8eojefvF5dOls8ss8pNyRr818nK6UHd68CQGtHmAMCxPevT9bOv9+qH4zvI3fv3UiIi1TW0tF67fHr1SUmwrMFAgC8gjAHBJDYThb9dfEo7Vs7Tb+ala7YTs0bqRt1RYJWLxujg+9N17Tv9vZylQAATzLHyuIAXNIrMUqL5l+h/5o7VHvyT+vLXTZt231CK1/brfLKWkVagvXbu4do2MBYXZ4aq84dwv1dMgDATYQ5IIAFBwcprU8npfXppJmT+mr1+wdUXlmuTtHh+tUs5lkEgEDAaVYAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAaIWSk5PVv39/ZWRkKC0tTU8//XSTbVesWKHFixc3eM7pdGrMmDHq2LFjk/2OHj2qW265Renp6UpNTdVTTz1Vv23NmjWaPXt2S18GfIAwBwBAK5WVlaXt27dr3bp1euCBB/T1119f0KaiokJPPPGEfv7znzd4/sknn1SfPn0uuv/77rtPaWlp+vrrr7Vlyxa99NJLysnJkSRNnDhRX375pb755hvPvSB4BWEOAIBWrmfPnurfv7/y8vIu2Pb666/rmmuuUbt27eqf27lzp9566y396le/uuh+v/rqK40fP16S1K5dO40cOVL/+7//W7992rRpev755z30KuAthDkAAFq5HTt2aPfu3Ro8+MKVWzZu3Kgrr7yy/nFNTY3uuusurVy5UsHBwRfd79ChQ/Xqq6/K4XDo+PHjev/995Wfn1+/fcSIEVq/fr3HXge8gzAHAEArNX36dGVkZOjuu+/WCy+8oL59+17QpqCgQF27dq1//Mgjj+iWW25RamrqJff/+OOPq7S0VEOGDNEPfvADjRo1SiEh3670GR8fr4KCAs+8GHgNa7MCANBKZWVlKSMj46JtIiMjVVlZWf9406ZNOnTokFasWCG73a4zZ84oOTlZOTk5iouLa9A3NjZWL730Uv3je+65RwMHDqx/XFlZqYiICI+8FngPYQ4AABNLT0/Xnj176h9nZ2fXf5+fn6+MjIwGp07PdeLECUVHRys0NFTbtm3TW2+9pW3bttVvz83NbfTULloXTrMCAGBiU6ZM0fvvv9/s9hkZGSosLJQkffHFF0pLS1NqaqruvvturV69WgkJCfVt33vvPU2ZMsXjNcOzGJkDAKAVamo07XyDBg1Sly5dlJOToyuuuKLBtuTkZJ06darBc9u3b6//fty4cU1OPWKz2fTll19q5cqVrpQNP2BkDgAAk1u+fLmOHj3q0X3u27dPzz77rMLCwjy6X3geI3MAAJhcnz59LjlBsKvOne4ErRsjcwAAACZGmAMAADAxwhwAAICJEeYAAABMjDAHAABgYoQ5AAAAEyPMAQAAmBjzzLVSTqdTqqrydxnNFx4uwzD8XQUQMJxOqbLW31W4xhIs8WsA8D3CXGtVVSX7tB/6u4pmC1n9smSx+LsMIGBU1krXvuvvKlyTPV6K4FMF8DlOswIAAJgYYQ4AAMDECHMAAAAmRpgDAAAwMcIcAACAiRHmAAAATIwwBwAAYGLMCAQAAaRkx0blPTi6wXNBlnYK79ZPMaNmqsvEn8sI5lc/EEj4iQaAANRp5O3qMHS85HSqptiqExtfUcEL96myIFc95zzn7/IAeBBhDgACUGTvyxUz6o76x3Hj79XOewfI9s/n1e2OPyi0Q5wfqwPgSVwzByBgOJ1OHSoqbdE+TpdUq/iMidZFbqZgSzu163+V5HSqyrrP3+UA8CDCHICA4HQ6NX/J58qY+ndty7W5tY/TJdW66Wfv6ca73wvIQHc2xIW07+znSgB4EmEOQED486u79OdXd6n4TLWuv2udy4HubJD7/Ovj2rLTph/cv9E7hfqIo6pc9jM21Zw+ror8HTr07BxV7N+myL7DZenez9/lAfCgNhHmbDabMjMzlZKSIovFoqSkJM2fP19lZWWaNWuWDMPQihUr/F2mV2yyHVPYO6v1xL7dTbYJe2e1bt6c7cOqAM/78c19dc2QrpLkcqA7N8hJUkzHcC35xRVeq9UXilY9rK9mxunrO7to1/x0HV/3jDqOuEUpv/mHv0trsyrsUs5xyfmfx86LtgaaL+BvgNi+fbvGjRsnq9Wqdu3aKS0tTYWFhVq+fLn27dunkydPSpIyMjL8WyiAFolqF6Z1z9yocfd+oE+3Ha0PdOv/Z5yGpMY22a+xILfh+fFK72fuU5Gx352tTldPlbO2RhUHd8j65hJV2wpkhFrq25TszNbeR8dd0Ndpr5bTUauhf6/1ZckB63CplHVAWnNYKrV/+7xT0m+/lKb3li7r5LfyEAACemTOZrNp0qRJslqtWrBggYqKirR161ZZrVYtWbJEa9euVU5OjgzDUHp6ur/LBdBCZwNdc0foAjXISVJ4Ql9FZ4xVh6HjFH9LplJ+847K9+bo0H/fU98mauC1GpJV2uBr4DN5ComKVbcf/N6P1QeOz49JMzZJfzvQMMidte6I9OPsurAHuCugw9y8efNUUFCguXPnatmyZYqKiqrflpmZqcGDB8tutys5OVnR0dF+rBSApzQ30AVykGtM+9Sr1XnUTBV/kqXS3H812sZRU6X9i29R+7TvKGHqAz6uMPB8fVK67wup/BIDnE5JS3dIaw75pCwEoIANc7m5ucrKylJsbKwWLVrUaJuhQ4dKkgYPHlz/3Ouvv65bb71VPXv2VGRkpAYMGKDf/OY3Ki1t2XQH/lZeWytbVVWjX0CguVSga2tB7qyE6b+VgoJV+OpDjW4/9Mw9ctRUKnn+S74tLAA5ndJjO6RqR/P7LP133XV1gKsC9pq5VatWyeFwaMaMGWrfvn2jbSIiIiQ1DHPLli1Tjx499Mc//lGJiYnavn27HnnkEW3atEkff/yxgoLMmX8f3bNTj+7Z6e8yAJ9p6hq6t/50g+5/8os2F+QkyZKQos7X3qaTm/6qkp3Zihp4bf22Y+8s1+ktazRgWY6CwiP9WGVg2HlK2n3atT5ldum9I9L3e3qlJASwgA1zGzZskCSNHj26yTYFBQWSGoa5d955R3Fx386Mft111ykuLk4zZszQJ598opEjR3qpYu/6aY/eurVbUqPbxn2+ycfVAL7RWKAb89N3VVtbdx9hWwpyZ8VP/Y1OZq9S4asPqf8fPpIklXz9kQpeuV99H1qn8K7J/i0wQKw57F6/dw4R5uC6gA1zBw8elCT17Nn4T4Xdbtenn34qqWGYOzfInTVs2DBJ0pEjR9yqZdiwYbJarS71iQgK0q6MEW4drzEp7dvr+riuHtvf+fr166cKhwvnE/ykqON9UlAHFVmLlJiY6O9yfK6tvn6HwhQafadqQpLqg5yctQo++KTGj/mVf4trghEWoa5PfeNyv6hBozT0H01PehGRlNrgLtWqo/nav3SaEn+0VFGDRrlTar1+/frKWV3Ron0Eio4/e1GWQTe43G/bviNKvP1KL1SE1i4+Pl5btmxxq2/AhrmysjJJUkVF479YsrKyZLPZFBUVpV69el10Xx99VPfXa2pqqlu1WK1Wl4NgZHCwlOHW4fyisLBQ5bUmmMYgqlYKkhy1tW6Hc1Nrq68/KEKy1Jz3G8/QsWPHpcrW+f8hKDxS3vvzq46jqlz7Ft2sDsMnq8uEuS3eX2FhoRxV5R6ozPwsFRWyXLrZBex2e9v62YRHBGyYi4+PV3FxsbZu3aoRIxqOcBUVFWnhwoWSpPT0dBmG0eR+jhw5ot/+9re66aab3J6LLj4+3uU+ESa7Nq9bt27mGJkLDpZDUlBwsBK6d/d3OT7XFl+/wwiXLWqmakL+c5mB0ykZhmQEyeizULElLyus1rWRc18wwiK8fozif72higNfqfJInoo/ybpg+8AVuxQW16PZ++vWrRsjc/8RWnHSrX7GmaPq3kZ+NtGQO1nhrIANc2PHjlVubq6WLFmiG264Qf361S1fk5OTo5kzZ8pmq7ur7WIBrbS0VN/73vcUFhamF154we1a3Bk2dVZWyj7th24f09fy8vJkWNz5O9S3Eseu0pFj5UqIT1DBvwv8XY7PtbXXf/au1aJzbnYwDMlWXHcXtzMoUvak+Xr/EhML+0OFXbr2Xe8eI2b0TMWMnumx/eXlfaOIgP1Ucc2uU9KdH7ve79Fbh+vm+wL/ZxOeZa7hHxdkZmYqJiZGhw8f1sCBAzVo0CD17dtXw4cPV+/evTVmzBhJDa+XO1dFRYUmTZqkAwcO6IMPPlBCQoIvywfQQk1NPxIeGixJCgut+/Xn7lquwMWkdaz7ckX7EOkmBuXghoANc4mJicrOztaECRNksViUn5+vzp07a+XKlVq7dq3y8vIkNR7mampqNGXKFG3ZskXr1q1TWlqar8sH0ALNmUcupkO422u5As1x/yAp3IVP2fvTJQsjm3BDQL9tUlNTtWbNmgueLy0tVX5+voKCgnTZZZc12HZ2brr169fr3Xff1fDhw31VrldcF9tF1ZOmXbTNpbYDZtLcCYGDggy31nIFmmtgJ+nJK6WFOXVzyDUlSNKv0qVxbecGc3hYwI7MXczOnTvldDrVt29fRUY2nBxzzpw5eu211/SLX/xCkZGR+vzzz+u/jh8/7qeKATSHqys7uLqWK+Cq4XHSqlHSzD5Sh9CG28KDpElJ0isjpVuS/VEdAkVAj8w1ZceOHZIaP8W6bt06SdLixYu1ePHiBttefPFF/ehHP/J6fQBc5+4SXU2tFMEIHTylW6Q0f6B09wAp95RUWiNFhEgp0VLHMH9Xh0BAmDtPfn6+j6sB4An/u2av20t0NRboHlzxpdY+/V1vluxReQ/fKHuxVQoKUnBElJLuWq7I3kMatHE6HDrycqZOb31Pzlq72qdeox73/LeCQsNUkb9Dh1bOUc3pYzKCQ9Su73D1uPtpBYV7f4qUtsISLA2J8XcVCERt8jTrxcIcAHOac1uq7rvzMreX6Dr3lOuVg+L06uKmlwJsjXovXK205V8r7ant6jL5PuX/6UcXtLF9+BeV79uq1Ce2auDTuTKMIB1750+SJCPMoqS7V+iyZ3Yr7amv5Kgsk/XNJT5+FQDc0SZH5s6u2wogcBiGoWULhuu+mZepe9d2bu3jbKBzOKQOUeY6/xXSvmP997Xlp+smRj5PxYGvFDV4rIJC615b9NBxKlr1O8XfslCWbn3r2xnBwYrse4UqD/3b63UDaLk2GeYABCbDMNwOcmdFtTNXiDvXgSfvVMmOuuUH+z504YzDkX2Gyvb+SnWZMFdBYREq/mS1qo7lX9CutrJMtn8+r+4zF3m7ZAAeQJgDgADR6xevSJJObHhZBa/cf0Ggi7n+R6o+flB7HrhOQWERih48Vme2f9CgjaOmWgeWTld0xo3qNOL7PqsdgPva5DVzABDIYsb8UCU7PpL9zIkGzxuGoW63/05pT23TgMf+JUtSmiJ6DKzf7rTX6MDS6QrtlKCku/7k67IBuIkwBwAmZy89peoThfWPT33+lkKiYhQc1fAmEEd1peylxXV9zthkfXOxun4/U5LkrLVr/7LbFBzVWT3mPCejkWvuALROnGYFAJOrLT+t/Y9NlaO6QoYRpJDoOKU8uEaGYSj/zz9Vx+GT1fHKyaotP62834ySjCDJ6VCXifPVcfgkSdLJ7Cyd+uxNRSSnK/cXdVOatB9wjXrc87QfXxmA5iDMAYDJhXfpqdRlXzS6Lfnnz9d/H9qxqwY+ndtou5hRMxQzaoZX6gPgXZxmBQAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAAT427W1io8XCGrX/Z3Fc0XHu7vCoCAYgmWssf7uwrXWIL9XQHQNhHmWinDMCSLxd9lAPATw5Ai+A0NoBk4zQoAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAJG4bEyjb/3fR22lrq9j4++KNQP7v9I1TW1HqwMvpL9pVXTF25QZZXd7X0se2mH/vg/2z1XFAB4WYi/CwA8ofBYmUbNelffHDyjUT95VxtfGK+k+PYu7eOjLwo1Ye4HqqisVVmFXa89PkZhocFeqhielv2lVePufV9lFXaVltfojSeulyXctV9xy17aoYVPfFH/+IG7MjxcJQB4XpsYmbPZbMrMzFRKSoosFouSkpI0f/58lZWVadasWTIMQytWrPB3mWgBp7PuS5L2F5Ro1E/edWmE7twgd/7+YA5Op7P+3+zd7ALdet96l0bozg9yDt4AAEwi4MPc9u3bNWjQIC1dulRWq1VpaWmqqanR8uXLNX36dOXm5kqSMjIy/FsoWqR713ba+JfxSukRLcm1QHd+kJt0XQ+99vgYhYcFxqhcTY1Db/zzgG6YvU5FtnJJkvVEhTKf+EL7C874uTrPGTksQeueuVGRlrrROFcC3flB7vdzL9eDs4d4rVYA8KSADnM2m02TJk2S1WrVggULVFRUpK1bt8pqtWrJkiVau3atcnJyZBiG0tPT/V0uWsidQBfoQe6LHcfVZ8JqTVmwQR9+XiiHo+752lqnlr60QykTXtPsRz4JmGsE3Ql0BDkAZhfQYW7evHkqKCjQ3LlztWzZMkVFRdVvy8zM1ODBg2W325WcnKzo6Gg/VgpPcSXQBXqQ++yroxr1k7U6bC1rso3TKf3PG3s0dcEG1dY6fFid97gS6AhyAAJBwIa53NxcZWVlKTY2VosWLWq0zdChQyVJgwcPrn8uOztbY8eOVUJCgsLDw5WYmNjgdCxav+YEuqrq2oAOcuUVdt08/0NVVDVvxO3tjYf02Is7vFyV7zQn0JWU1xDkAASEgA1zq1atksPh0IwZM9S+feN3NUZEREhqGOaKi4s1aNAgLV++XB988IGWLFminTt3asSIESooKPBJ7Wi5pgKd/T+jTydOVQVskJOkVev26djJSpf6rPjbLtXUBMbonNR0oHP+58aGM6U19W0JcgDMLGDD3IYNGyRJo0ePbrLN2XB2bpibPHmynnzySU2dOlXXXXedZsyYoTfffFOnT5/WG2+84d2i4VGNBbrjxXUB5+x9ioEY5CTpmSzXR5ILj5XrnU2HvFCN/zQW6M4PuQQ5AGZnOJ2Bef99UlKSCgoKtG3btkbvVLXb7UpISJDNZtO+ffvUu3fvJvd14sQJxcbGasWKFZozZ47LtQwbNkxWq9XlfvCMWiNKx6N/rNrgmAbPW6p3q3PpahkKjIv/z3LKUGHn37nVt33Fx+pQsd6j9bQGVSE9dSLqDjmNsAbPR5WvV3Tlx36qCgC+FR8fry1btrjVN2AnDS4rq7vou6KiotHtWVlZstlsioqKUq9evS7YXltbK4fDoYMHD+rXv/614uPjNW3aNLdqsVqtOnLkiFt94SHRq6Ued0vGfwajnU5VHnhZhdXH/VuXNxhhUmf3upaWVau0KBDfq0ekbslSzJhvn3LWqmT/aypxur9aBAC0BgEb5uLj41VcXKytW7dqxIgRDbYVFRVp4cKFkqT09HQZhnFB/+uuu06ffvqpJCklJUUbNmxQXFyc27XAf6pCknUiaoacxjlXFRiGglMWKrbkRYU4AmeuNanuFHKhs1YyXD91HNUuRNHdu3u+KD8rsVytM5FjGj5pBCu8332KKc2SIQIdAP9qSVYI2NOs8+bN05///GclJSXpww8/VL9+/SRJOTk5mjlzpvbv36+amhrNmTOn0dUf9uzZo1OnTunAgQNaunSpjh07pk8//VQ9evTw9UtBC5w//cjYq7rpwJES7TtcIknqnRjl1tJfrd3kn//TrevfPv+/SboyvYsXKvKf86cfmfX9vlq17oDKK+sC3PhrE91a+gsAWouAvQEiMzNTMTExOnz4sAYOHKhBgwapb9++Gj58uHr37q0xY+r+Sj/35odz9e/fX1deeaVuu+02rV+/XiUlJXrsscd8+RLQQo3NI7dmxY3a9MIEt1aKMJN7p6e63Ofy1BgNH+Te6HNr1dg8cs8/MtLtlSIAoDUK2DCXmJio7OxsTZgwQRaLRfn5+ercubNWrlyptWvXKi8vT1LTYe5cHTt2VEpKivbu3evtsuEhF5sQuCVLf5nFjVd31/DLXAtmD87OaPSSA7O62ITALVn6CwBam4A9zXoxpaWlio6OlmEYKikpUWRk5EXbHzt2TH369NGdd96pp59+2kdVwl3NXdnhyNEyjZr1rvYeqrtmLtBOuR49UaHrfrxWe/JPX7Lt0vuG65c/GuSDqnyjuSs7fLylSOPu/YBTrgBMrU2Guc2bN+uqq65S//79tXv37gbb7rjjDqWkpCgjI0MdO3bUN998oyeffFJFRUXKyclRSkqKn6pGc7i6RFegB7qTp6s0b/FnWv3+AdXYL5wQuE9SlH4/Z6huH9/HD9V5h6tLdBHoAJhdmwxzzz//vO666y5NmzZNWVlZDbatWLFCr7zyir755htVVlYqKSlJo0eP1gMPPKCePXv6qWI0h7trrQZ6oJPqRulefCtP/95brMqqWsV0DNct1yfrhhHdFRTUNk6tXgyBDoCZtckwN3/+fC1fvlx/+MMf9MADD/i7HHjA9t0ndPWd77i9RFdjge6r17+v9pGhXqsZnvWXN/fop7/7pP6xqys7nB/optyQrNcev97jdQKApwXsDRAXs2NH3YLizbn5AeYwsE8n3XRNoiT3lug6/6aIu6cMIMiZzPhrkzSgVwdJ7i3Rde5NERGWYN0z1fU7ggHAH9rkyBwCU02NQ0/93781b8ZAt9daPXK0TGuzD2v2lAEerg6+UHS8XP/46KDumeZ+EPt4S5Fq7E5df1U3D1YGAN5DmAMAADCxNnmaFQAAIFAQ5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACYGGEOAADAxAhzAAAAJkaYAwAAMDHCHAAAgIkR5gAAAEyMMAcAAGBihDkAAAATI8wBAACY2P8HekKsAazjPpEAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ind = (gate_cnts == 8).nonzero().squeeze()[:1]\n", + "\n", + "qc_list, _ = decode_tensors_to_backend(simulator, tokenizer, target_xs[ind], target_ps[ind])\n", + "qc_list[0].draw(\"mpl\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6dbf2639-9bcb-402f-89bd-bdfef47a5d29", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "1d212f97803d4180bc93e2ccc8277855", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/40 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plot_k_best = 4\n", + "\n", + "idx = np.argsort(U_norms)\n", + "fig, axs = plt.subplots(1, plot_k_best, figsize=(10, 2), constrained_layout=True, dpi=150)\n", + "\n", + "for i, (idx_i, ax) in enumerate(zip(idx[:plot_k_best], axs.flatten())): \n", + " ax.clear()\n", + " generated_qc_list[idx_i].draw(\"mpl\", plot_barriers=False, ax=ax)\n", + " ax.set_title(f\"The {i+1}. best circuit: \\n infidelity {U_norms[idx_i]:0.1e}.\", fontsize=10)" + ] + }, + { + "cell_type": "markdown", + "id": "83e74d47-2ea6-4489-8db8-1c12f911cee4", + "metadata": {}, + "source": [ + "## Compile testset unitaries" + ] + }, + { + "cell_type": "markdown", + "id": "d5ba0233-747b-4010-905b-724cdf98aa16", + "metadata": {}, + "source": [ + "To get an overall performance estimation, we compile multiple unitaries, record the best infidelities and plot the distribution." + ] + }, + { + "cell_type": "markdown", + "id": "b113a7bc-e33d-47be-9e0d-08e5676ce3fd", + "metadata": {}, + "source": [ + "### Generate tensors" + ] + }, + { + "cell_type": "markdown", + "id": "0365b747-1bf9-445d-a2f9-e6926df11732", + "metadata": {}, + "source": [ + "To keep the tutorial short in computation time, we only take a few unitaries here, but this can be adjusted by the user to use the full testset." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26bb8354-af39-4059-a468-b9bc744dab09", + "metadata": {}, + "outputs": [], + "source": [ + "Us = target_us[:16]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eb331ca2-b61a-4f3b-88e4-4b37a4a11886", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "41ecaa3e39064fdabb2618c82cc2c817", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/16 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure(figsize=(7, 3), constrained_layout=True, dpi=150)\n", + "plt.title(f\"Best infidelities of {len(best_infidelities)} unitaries, with {num_of_samples_per_U} circuits sampled per unitary.\")\n", + "plt.xlabel(UnitaryInfidelityNorm.name(), fontsize=13)\n", + "plt.ylabel(\"Frequency\", fontsize=13)\n", + "plt.hist(best_infidelities, bins=60)\n", + "plt.xlim([-0.05, 1.05])\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d6c5de55-7f37-4b51-9308-e1908b675326", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8c9909e3-a035-46f7-8979-44be322a5fe0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "genQC Version 0.2.0\n" + ] + } + ], + "source": [ + "import genQC\n", + "print(\"genQC Version\", genQC.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "745e9097-b8fb-4621-af9e-e115630c5a1d", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": { + "10aa5cc8502446c0ba1d3917ecd6790c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "13d1f24a857c4b97b07ec36fe2b2f3e8": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "16846c1990e24e3b9b246ca483e670aa": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "173f24fff9024479b51f8642c2a20890": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_d3fb9ba04cbe4258b54914829554616d", + "style": "IPY_MODEL_4e66f780d7154ec4b7984ea9d58a94d8", + "value": " 40/40 [00:15<00:00,  2.67it/s]" + } + }, + "1d212f97803d4180bc93e2ccc8277855": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_5098ae4c3dbd4e2daadd9a7cd3358c29", + "IPY_MODEL_dc6cbcb4369a41e69129099a53150b72", + "IPY_MODEL_173f24fff9024479b51f8642c2a20890" + ], + "layout": "IPY_MODEL_840bce2699264e83a13ef0f0e5a46f2e" + } + }, + "26b2f763e7fe47fb92bac12432a45adc": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_5bc6d07104954da5835822c034ed472f", + "style": "IPY_MODEL_d08e198700c640f8bafd47ce5af3182d", + "value": " 16/16 [04:08<00:00, 15.70s/it]" + } + }, + "363950db340a4e0796907fa34e41d56d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "41ecaa3e39064fdabb2618c82cc2c817": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_bafdd6a005834225a60dc2d1e061aa64", + "IPY_MODEL_d580fa8c95e949a0a71e6341a5e0ec2e", + "IPY_MODEL_26b2f763e7fe47fb92bac12432a45adc" + ], + "layout": "IPY_MODEL_13d1f24a857c4b97b07ec36fe2b2f3e8" + } + }, + "48e5d282b12349388b6fdec4929adfdb": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "4e66f780d7154ec4b7984ea9d58a94d8": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "5098ae4c3dbd4e2daadd9a7cd3358c29": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_aa556319d474425f9a0f1f28b45bbe1a", + "style": "IPY_MODEL_16846c1990e24e3b9b246ca483e670aa", + "value": "100%" + } + }, + "5bc6d07104954da5835822c034ed472f": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "616b4af59c894e58afe303d3dbbaf8c1": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "6dbaefdf5c8e48c28cd867610e6ca21a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_721b89bfef704fe7a381047f9c8f4c8c", + "style": "IPY_MODEL_10aa5cc8502446c0ba1d3917ecd6790c", + "value": "Fetching 4 files: 100%" + } + }, + "721b89bfef704fe7a381047f9c8f4c8c": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "803c865f87fd4693bd655a8f82195f8f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "836761d0b78f459c98dfbf1e90db4711": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "840bce2699264e83a13ef0f0e5a46f2e": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "8a2c3bff466c4dd683f7a4162a39e5d8": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "8e6a61b9182c484fa1ea066833ffdb70": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "93e3597245eb4104885fde076800959a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_6dbaefdf5c8e48c28cd867610e6ca21a", + "IPY_MODEL_d109cd9c508546b79fd97126c9172a9f", + "IPY_MODEL_b2f7f31612984c84a20353334cefd4b1" + ], + "layout": "IPY_MODEL_bcbe8bfcb5264f388d7d942c24d7ebd3" + } + }, + "9fbfde92ec4d4e1c9c6a824cbfaabbbf": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "aa556319d474425f9a0f1f28b45bbe1a": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "b2f7f31612984c84a20353334cefd4b1": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_48e5d282b12349388b6fdec4929adfdb", + "style": "IPY_MODEL_b31576e33c174944ac4c727ffecf7f28", + "value": " 4/4 [00:00<00:00, 800.17it/s]" + } + }, + "b31576e33c174944ac4c727ffecf7f28": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "bafdd6a005834225a60dc2d1e061aa64": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_ddf711e546d3441fb242aff8a2e86981", + "style": "IPY_MODEL_616b4af59c894e58afe303d3dbbaf8c1", + "value": "100%" + } + }, + "bcbe8bfcb5264f388d7d942c24d7ebd3": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "d08e198700c640f8bafd47ce5af3182d": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "d109cd9c508546b79fd97126c9172a9f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_8e6a61b9182c484fa1ea066833ffdb70", + "max": 4, + "style": "IPY_MODEL_9fbfde92ec4d4e1c9c6a824cbfaabbbf", + "value": 4 + } + }, + "d3fb9ba04cbe4258b54914829554616d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "d580fa8c95e949a0a71e6341a5e0ec2e": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_363950db340a4e0796907fa34e41d56d", + "max": 16, + "style": "IPY_MODEL_836761d0b78f459c98dfbf1e90db4711", + "value": 16 + } + }, + "dc6cbcb4369a41e69129099a53150b72": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_8a2c3bff466c4dd683f7a4162a39e5d8", + "max": 40, + "style": "IPY_MODEL_803c865f87fd4693bd655a8f82195f8f", + "value": 40 + } + }, + "ddf711e546d3441fb242aff8a2e86981": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + } + }, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/examples/Discrete-continuous circuits with multimodal diffusion/1_qft_and_gpe.ipynb b/src/examples/Discrete-continuous circuits with multimodal diffusion/1_qft_and_gpe.ipynb new file mode 100644 index 0000000..745eb38 --- /dev/null +++ b/src/examples/Discrete-continuous circuits with multimodal diffusion/1_qft_and_gpe.ipynb @@ -0,0 +1,1195 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "c4e9a976-c6b5-4ce2-8e92-1dd7cee7c736", + "metadata": {}, + "source": [ + "---\n", + "categories:\n", + " - Unitary compilation\n", + " - Parameterized gates\n", + " - Quantum circuits\n", + " - Pretrained model\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "6c172f48-2011-45c9-ba09-b49edc98b7ec", + "metadata": {}, + "source": [ + "# Quantum Fourier transform and gate-pair tokenization\n", + "\n", + "> A short tutorial showing the compilation of the Quantum Fourier transform (QFT) and extracting tokens via Gate-Pair tokenization (GPE)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24ecd66b-9552-4e9b-aa68-ce292444d85e", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.imports import *\n", + "import genQC.utils.misc_utils as util\n", + "\n", + "from genQC.dataset.config_dataset import ConfigDataset\n", + "from genQC.pipeline.multimodal_diffusion_pipeline import MultimodalDiffusionPipeline_ParametrizedCompilation\n", + "from genQC.scheduler.scheduler_dpm import DPMScheduler\n", + "\n", + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer\n", + "from genQC.platform.simulation import Simulator, CircuitBackendType\n", + "from genQC.inference.sampling import decode_tensors_to_backend, generate_compilation_tensors\n", + "from genQC.inference.evaluation_helper import get_unitaries\n", + "from genQC.inference.eval_metrics import UnitaryInfidelityNorm\n", + "from genQC.benchmark.bench_compilation import SpecialUnitaries\n", + "import genQC.platform.tokenizer.tensor_tokenizer as gpe" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ae966b5-0f67-4ffe-9d2b-ed0f23c7c383", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" + ] + }, + { + "data": { + "text/plain": [ + "device(type='cuda')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "util.MemoryCleaner.purge_mem() # clean existing memory alloc\n", + "device = util.infer_torch_device() # use cuda if we can\n", + "device" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f903ecdc-f07c-4e0a-8fc9-1d1bea2bfec5", + "metadata": {}, + "outputs": [], + "source": [ + "# We set a seed to pytorch, numpy and python. \n", + "# Note: This will also set deterministic algorithms, possibly at the cost of reduced performance!\n", + "util.set_seed(0)" + ] + }, + { + "cell_type": "markdown", + "id": "73b1255b-b440-4bb9-89fc-c864fbcafaa1", + "metadata": {}, + "source": [ + "## Load model" + ] + }, + { + "cell_type": "markdown", + "id": "5baf1d7a-5cf5-4d06-919b-75bc0aa2c6f0", + "metadata": {}, + "source": [ + "Load the pre-trained model directly from [Hugging Face: Floki00/cirdit_multimodal_compile_3to5qubit](https://huggingface.co/Floki00/cirdit_multimodal_compile_3to5qubit)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6ff4eb91-6929-4d54-b6e5-38f6d7b18fdf", + "metadata": {}, + "outputs": [], + "source": [ + "pipeline = MultimodalDiffusionPipeline_ParametrizedCompilation.from_pretrained(\"Floki00/cirdit_multimodal_compile_3to5qubit\", device)" + ] + }, + { + "cell_type": "markdown", + "id": "1d836ce9-e753-4407-be75-72c5da485328", + "metadata": {}, + "source": [ + "The model is trained with the gate set:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13830bb8-7be4-4fed-9983-09f13d1bf8f4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['h', 'cx', 'ccx', 'swap', 'rx', 'ry', 'rz', 'cp']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pipeline.gate_pool" + ] + }, + { + "cell_type": "markdown", + "id": "2047cc5b-deeb-4bf7-8706-152e1aceddfc", + "metadata": {}, + "source": [ + "which we need in order to define the `vocabulary`, allowing us to decode tokenized circuits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36c309d1-bd8b-40b4-b0e7-2d887b6963cb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'h': 1, 'cx': 2, 'ccx': 3, 'swap': 4, 'rx': 5, 'ry': 6, 'rz': 7, 'cp': 8}" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "vocabulary = {g:i+1 for i, g in enumerate(pipeline.gate_pool)} \n", + "tokenizer = CircuitTokenizer(vocabulary)\n", + "tokenizer.vocabulary" + ] + }, + { + "cell_type": "markdown", + "id": "67f82d86-963b-4d43-8105-6656d42269b7", + "metadata": {}, + "source": [ + "### Set inference parameters" + ] + }, + { + "cell_type": "markdown", + "id": "d58cd4d1-c1ec-4e27-a286-7d302b467ca0", + "metadata": {}, + "source": [ + "Set diffusion model inference parameters." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eda49101-93cb-48c6-a761-ca8aae4ad1ba", + "metadata": {}, + "outputs": [], + "source": [ + "pipeline.scheduler = DPMScheduler.from_scheduler(pipeline.scheduler)\n", + "pipeline.scheduler_w = DPMScheduler.from_scheduler(pipeline.scheduler_w)\n", + "\n", + "timesteps = 40\n", + "pipeline.scheduler.set_timesteps(timesteps) \n", + "pipeline.scheduler_w.set_timesteps(timesteps) \n", + "\n", + "pipeline.lambda_h = 1.5\n", + "pipeline.lambda_w = 0.45\n", + "pipeline.g_h = 0.4\n", + "pipeline.g_w = 0.2\n", + "\n", + "# These parameters are specific to our pre-trained model.\n", + "system_size = 5\n", + "max_gates = 32" + ] + }, + { + "cell_type": "markdown", + "id": "3d32dbf9-7279-4edf-bdc7-ed3e33609a7f", + "metadata": {}, + "source": [ + "For evaluation, we also need a circuit simulator backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e996c1f7-76e9-460d-ab39-c5b43114ade9", + "metadata": {}, + "outputs": [], + "source": [ + "simulator = Simulator(CircuitBackendType.QISKIT)" + ] + }, + { + "cell_type": "markdown", + "id": "7835d6ad-03d1-4a7b-ab89-939b187468a5", + "metadata": {}, + "source": [ + "## Compile the QFT unitary" + ] + }, + { + "cell_type": "markdown", + "id": "b09547e6-2cb4-4e3f-8ec0-2894bac018cd", + "metadata": {}, + "source": [ + "We now compile the 4-qubit QFT." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cea55486-b01e-4ee6-a94f-9720740e8176", + "metadata": {}, + "outputs": [], + "source": [ + "samples = 512\n", + "num_of_qubits = 4\n", + "prompt = f\"Compile {num_of_qubits} qubits using: ['h', 'cx', 'ccx', 'swap', 'rx', 'ry', 'rz', 'cp']\"\n", + "\n", + "U = SpecialUnitaries.QFT(num_of_qubits).to(torch.complex64)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6dbf2639-9bcb-402f-89bd-bdfef47a5d29", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "947bf1d7d4d04ffea3abf8530908ad62", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/40 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure(figsize=(7, 3), constrained_layout=True)\n", + "plt.xlabel(UnitaryInfidelityNorm.name(), fontsize=13)\n", + "plt.ylabel(\"Frequency\", fontsize=13)\n", + "plt.hist(U_norms, bins=60)\n", + "plt.xlim([-0.05, 1.05])\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "ddec7c92-bc95-45e6-940a-22439e2324d4", + "metadata": {}, + "source": [ + "We plot the four best ciruits, w.r.t. the infidelity:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37788247-4e1d-4d39-8877-90deb67c5cd8", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABe0AAACyCAYAAADfwuTvAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAXEgAAFxIBZ5/SUgAAeb5JREFUeJzt3Xd4VNXWx/HvpNMSakgoCRAQ5FIlCEhJ6JHeVAQxIM0CooAKeBWwUQSRi+0amoKivhRRQFDphK6AIL2FGnpvafv9Y25GhiSQhCQzCb/P8+SBnLrOmcmZPevss7bFGGMQERERERERERERERGHc3F0ACIiIiIiIiIiIiIiYqWkvYiIiIiIiIiIiIiIk1DSXkRERERERERERETESShpLyIiIiIiIiIiIiLiJJS0FxERERERERERERFxEkrai4iIiIiIiIiIiIg4CSXtRURERERERERERESchJL2IiIiIiIiIiIiIiJOQkl7EREREREREREREREnoaS9iIiIiIiIiIiIiIiTUNJeRERERERERERERMRJKGkvIiIiIiIiIiIiIuIklLQXEREREREREREREXESStqLiIiIiIiIiIiIiDgJJe0dyGKxpOmnVKlSAKxYsQKLxUL37t0dFvu1a9eYMWMG/fv3p1atWnh6emKxWBgxYkSG7ufw4cNYLBZCQ0MzdLvZRWhoKBaLhcOHDzs6FDsP+usiIiIiIiIiIiKSWdwcHcCDLDw8PMm0NWvWcODAAapWrUq1atXs5hUuXDiLIru3ffv28eyzzzo6DIc4fPgwpUuXJiQkhBUrVjg6HKcyffp0evTowfDhwzP8Bo6IiIiIiIiIiMiDQEl7B5o+fXqSad27d+fAgQO0a9fOqZOe+fLlo2fPntSsWZOaNWuycOFC3n77bUeHleN8/fXXXL9+neLFizs6FDvFixdn165d5M6d29GhiIiIiIiIiIiI5ChK2ku6BAUFMXnyZNvvv/76qwOjybkCAgIcHUKy3N3dqVChgqPDEBERERERERERyXFU0z6bO3/+PC+88AL+/v54enpSqVIlpk6dmuLyR48epV+/fgQFBeHl5UXBggVp1aoVa9euzcKo0+fy5csMGDCAkiVL4uXlxcMPP8yECRNISEhIdvnr168zatQoqlevTt68ecmbNy+1a9fmq6++Snb5qKgoXnjhBR566CFy585NwYIF+de//kXfvn3Zs2cPACNGjKB06dIArFy50m7MgbSMMbBhwwY6d+5M8eLF8fT0xN/fn8aNGxMREWG3XEo17RPHOIiJieGdd96hQoUKeHp60q5dO9sy165dY8yYMQQHB+Pt7U2ePHmoUKECL730Env37rUtN2LECCwWS7JPfgCUKlUKi8ViNy25mvahoaH06NEDgJEjR9qdm5S2LSIiIiIiIiIiIvaUtM/GLl68SJ06dfjpp5+oX78+devWZffu3fTs2dOuF3yidevWUbVqVT799FPc3d1p2bIllSpVYsmSJTRo0IDvv//eAUeROrdu3aJRo0Z8/fXXPProozRt2pSoqCgGDhzIc889l2T506dPU6dOHYYNG0Z0dDQhISE0aNCA3bt30717d/r372+3/NGjR3nkkUf44osvAGjRogUhISF4enoSERHBunXrAKhWrRodO3YEoGjRooSHh9t+6tWrl6pjmThxIo899hjff/89/v7+dOjQgUqVKrFjxw5ee+21VJ+ThIQE2rVrx9ixYwkKCqJt27b4+/sDcPLkSWrVqsWQIUM4ePAgoaGhtGjRgjx58vDFF1+waNGiVO8ntcLCwqhbty4AVatWtTs3ZcuWtS3XvXv3TBm0WERERCSz3N4ZITU/pUqVAmDFihVp7tyR0f766y/69etH7dq1KVasGJ6envj4+FCnTh0mTZpEbGxshuwnuU4dD5KUOts42oP+uoiIiGRXKo+Tjc2fP5/OnTszffp0PD09Afjxxx9p37497777Lr169bIte/nyZTp27Mjly5eZOXMmXbt2tc3bvHkzzZo1o1evXjRq1IgiRYpk+bHcy/r166lSpQr79u2zDch74MABGjRowFdffUW7du3sepn36NGDv/76iwEDBjBmzBjb+Tl16hStWrXik08+oWXLloSFhQEwefJkzp8/T79+/Zg0aZLdvo8cOWL7MtOuXTuqVavGnDlzqFChQpp7kK9atYpXX32VvHnzMm/ePBo3bmybFxcXl6YyQ0ePHsXT05M9e/YkqXnfrVs3/v77b5588kmmTJlC3rx5bfMOHz7M5cuX0xR3agwZMgQ/Pz8iIyOdfkwGERERkbQIDw9PMm3NmjUcOHCAqlWrUq1aNbt5ie1VZ7Bq1So+/fRTAgMDqVixIkWKFOHMmTNERkayfv165syZw6+//oqHh4ejQ81whw8fpnTp0oSEhLBixQpHh+NUpk+fTo8ePRg+fLja7SIiIk5IPe2zMW9vbz755BNbQhqsSeVKlSpx5MgRu14eU6dO5eTJk7zyyit2CXuA4OBg3nrrLa5evcrMmTOzKvw0GzdunN0XoKCgIN566y0APvnkE9v0rVu3smjRImrWrMlHH31kd36KFi3Kl19+CcDnn39um37mzBkAmjRpkmS/AQEBBAUFZcgxjB49GmMMb775pl3CHsDNzY0WLVqkaXujRo1KkrDfuHEjS5cuxdfXl8mTJ9sl7MFa7qZKlSrpO4AM4O/vT/ny5Z3qy6xIRsrOvTF3797NmDFjaNiwIYULF8bd3R0/Pz86dOjA6tWrM2w/D3qvP/XGFMl+pk+fnuQn8SnLdu3aJZk3btw4B0f8jxYtWnDgwAEOHz7M77//zqxZs/j99985fPgwlSpVYuXKlbb2saTf119/za5du5K0zR2tePHi7Nq1i6+//trRoYg4nezcbk/Ou+++a4s1o3I7D3r7UO12cST1tM/GatSoQaFChZJMf+ihh9ixYwcnT560fagk9uDu0KFDstuqX78+YE34OqOCBQvStGnTJNOffvppXnjhBdauXUtCQgIuLi62Y23Xrh0uLknvSyXWuL/9WGvUqAHAsGHDcHV1pUmTJnh5eWXoMcTFxdl6+PTp0+e+t2exWGjdunWS6b///jtgPTf58uW77/1ktFGjRjFq1ChHhyGSabJzb8wmTZpw/Phx2xggBQsWZOfOncybN48ff/yRjz76iFdeecXRYWYK9cZMmXpjimRvZcqUSXZ60aJFeeONN+jWrRvLli2jX79+WRxZzhIQEODoEJLl7u5OhQoVHB2GiFPKzu32O+3Zs4f3338fi8WCMcbR4WQqtdtTpnZ7zqKe9tlYiRIlkp2emKi9deuWbVriXcG6desme8e4Zs2aAJw9ezZzg06nwMDAZKf7+PiQP39+bty4wYULF4B/jvXNN99M8Q751atX7Y61e/fuPPnkk+zcuZPWrVtToEABGjRowAcffEB0dHSGHMO5c+e4ceMGBQsWpECBAve9PV9fX7unCBIdPXoUIMOeDhCRtMnOvTErVKjA119/zZkzZ/jtt9/4/vvv2b59O1988QXGGAYPHszOnTsdHWa2p96YIg+m8+fP88ILL+Dv74+npyeVKlVi6tSpKS5/9OhR+vXrR1BQEF5eXhQsWJBWrVqxdu3aDIvJ3d0dIMNL41y+fJkBAwZQsmRJvLy8ePjhh5kwYQIJCQnJLn/9+nVGjRpl61yTePP4q6++Snb5qKgoXnjhBR566CFy585NwYIF+de//kXfvn3Zs2cPACNGjKB06dIArFy50u67QFp6x27YsIHOnTtTvHhxPD098ff3p3HjxkRERNgtl1JvzMTeuTExMbzzzjtUqFABT09Pu9Ke165dY8yYMQQHB+Pt7U2ePHmoUKECL730Env37rUtN2LECCwWS4olOkuVKoXFYrGbllxvzNDQUHr06AHAyJEj7c5NWst/imRn2bndfjtjDH369CF//vy0adPG0eHkKGq3iyOpp302llwv8pQkNpA7depEnjx5UlwuJ/TCSDzWevXqpTpx7erqyvfff8+QIUOYP38+y5YtY8OGDaxevZrRo0ezePFiHnvsscwMO80y+kmAlKT05UpEcp7EJ3Xu1LdvX+bOncuvv/7K//3f/zF8+PAsjixnUW9MkQfPxYsXqVOnDlevXqV+/fqcPXuWVatW0bNnTxISEuzGogJYt24dLVu25MKFC5QvX56WLVty5swZlixZwuLFi/nmm2946qmn7iumCxcuMH78eABatmx5X9u63a1bt2jUqBEHDhygUaNGxMTEsHTpUgYOHMi2bduSJIVPnz5N06ZN+euvv/Dz8yMkJARjDGvXrqV79+5s3rzZbsypo0eP8sgjj3D+/HnKlStHixYtiI+PJyoqioiICOrUqUP58uWpVq0aHTt2ZM6cORQtWtQ2lhVgS8rdy8SJExk4cCAJCQnUqFGDBg0acPbsWf766y9ee+01evfunartJCQk0K5dO1atWkVISAhVqlSxPTF98uRJmjZtyt9//02BAgUIDQ3F09OTgwcP8sUXX1CuXDkeeuihVO0ntcLCwoiLiyMyMjJJb+KyZcva/t+9e3e++uor9dgUcXKTJ09m1apVzJw5k99++83R4eQoareLQxlxKuHh4QYww4cPT3GZ5cuXG8CEh4ffdRvLly+3TWvcuLEBzObNmzM24P8ZNWrUPeNOj0OHDhnAFCpUKNn5ly5dMoDJlSuXiYuLM8YY8+677xrAjBs37r72fenSJfPqq68awNSsWTNJTCEhIWnaXlxcnMmVK5cBzIULF1K1TkhIiAHMoUOH7KYDJjAwMNl13n//fQOYAQMGpGofictPmjQp2Zjd3d3NnZeKlM7BtGnTMuV9IJLdpfXafu7cOfP8888bPz8/4+HhYf71r3+ZKVOmpLjukSNHzEsvvWTKlCljPD09TYECBUzLli1NZGRkhh3Da6+9ZgDTp0+f+97W7deQS5cumZdfftmUKFHCeHp6mgoVKpiPPvrIxMfHJ7vutWvXzAcffGCqVatm8uTJY/LkyWNq1aplpk+fnuzyhw8fNs8//7wpV66cyZUrlylQoICpWLGi6dOnj9m9e7cxxpjhw4cbINmflD5rk7N+/Xrz1FNPmWLFihkPDw/j5+dnGjVqZL788ku75e51bb9165YZOXKkKV++vPHw8DBt27a1LXP16lUzevRoU6NGDZMvXz6TO3duU758efPiiy+aPXv22JZLPKZp06YlG2tgYGCqru2JsSb3k9K2RR4Uabm2A6Zz587m5s2btnnz5s0zgAkICLBb59KlS8bf39+4urqamTNn2s3btGmTKVCggMmbN685ffp0muLdu3evCQ8PN926dTPNmjUzefPmNYB5/vnnU7zmpkXiNQQwVapUMWfOnLHN279/vylWrJgBzLx58+zWa9Giha3tevv5iY6ONsHBwQYwv/zyi23622+/bQDTr1+/JDFERUWZ/fv3J4kpre12Y4xZuXKlsVgsJl++fOb333+3mxcbG2sWLlxoN+1u13bAlC1b1hw7dizJfhK/pz355JPmypUrdvMOHTpktm3bZvs9o67txqSu3Z6a97hITpPd2u0nT540+fPnN40bN7aLf8aMGena3p3Uble7XRxH5XEeEIn14OfNm+fgSNLn3LlzLF26NMn07777DoA6derg6uoKZNyxent7M2rUKCwWCzt27LBNT3x8OC4uLk3bc3V1tT2WmpmDfSUOpjtr1iyuXr16z+X9/f0B7B69TbR8+XJiY2NTve/0nhsR+Udib8yffvqJ+vXrU7duXXbv3k3Pnj2ZPHlykuXXrVtH1apV+fTTT3F3d6dly5ZUqlSJJUuW0KBBA77//vsMievgwYMA+Pn5Zcj24J/emF9//TWPPvooTZs2JSoqioEDB/Lcc88lWf706dPUqVOHYcOGER0dTUhICA0aNGD37t10796d/v372y2f2Bvziy++AKyDMYaEhODp6UlERATr1q0DsPXGBGuN5/DwcNtPWnpjPvbYY3z//ff4+/vToUMHKlWqxI4dO3jttddSfU4Se2OOHTuWoKAg2rZta7tOnzx5klq1ajFkyBAOHjxIaGgoLVq0IE+ePHzxxRcsWrQo1ftJrbCwMOrWrQtA1apV7c7Nnb0xLRaLemKKpMDb25tPPvnErrRhu3btqFSpEkeOHLErqTJ16lROnjzJK6+8QteuXe22ExwczFtvvcXVq1fTPMjgqVOn+Oqrr5gxYwa//vorV69e5eWXX2bMmDFpeoI3NcaNG2dX+zkoKIi33noLgE8++cQ2fevWrSxatIiaNWvy0Ucf2Z2fokWL2trMn3/+uW36mTNngH/avLcLCAjIsBKRo0ePxhjDm2++SePGje3mubm50aJFizRtb9SoUUnKK2zcuJGlS5fi6+vL5MmTyZs3r938UqVKUaVKlfQdQAbw9/enfPnyTl3HW8SRnKHd/vLLL3Pjxg2762RmULs9KbXbJdM5+q6B2MusnvYXLlwwvr6+xt3d3fz3v/9Ncic0NjbWLF682Gzfvj1dcae2p3358uVN+fLlk+1lkpzbe+xUq1bNnD171jbv4MGDpnjx4gYwc+bMsVuvadOmBjAvvviiuXTpUpLtbt261a7Hztdff53ssSf2gHrooYds027dumXc3d2Nn5+frXd/aq1YscJYLBbj7e1tli1bZjcvrT12Uuppb4wxDRs2NIB5+umnzdWrV+3mHTp0yPz111+23/fv328AU6BAAbv9HDx40Dz88MO283/nNkimx07ie7NTp04pxjZkyBBTvnz5ZHv2i+RU2a035p32799vPD09M+yJLfXGVG9MkZwgLdf2hg0bJju/Q4cOBjBr1661TXv88ccNkGKvy02bNtk+K9IjLi7OHDx40IwfP954e3ubcuXKJbkepUfiNaRgwYLJzr948aLtCdnE7yJjxowxgHn//fdT3G7evHmNn5+f7ffJkycbwFSsWNH8/PPP5saNG/eMKa3X9tjYWNsTsufPn0/VOne7tlssFrvPrURpfUI2q6/tIg+i7NRu//nnnw1gRo4cmST+jO5pr3b7IbvpardLVlBP+wdE/vz5mT9/Pj4+PvTt25dSpUrRokULunbtSuPGjSlSpAhhYWHs378/1dts3749tWvXpnbt2nz22WeAtZZa4rT27dsnWWfPnj3s2bMnTb23AWrXro2Liwtly5alY8eOtGnThkqVKnH8+HGeeeYZOnToYLf8zJkzqV69Op999hmBgYE0bNiQrl270qpVKwICAqhWrRqLFy+2LT9nzhwqV65M2bJlad++PV26dKFOnTp06NABFxcX3nvvPduyHh4ehIWFER0dTdWqVXn22Wfp1asX06ZNu+dxhISEMHbsWK5cuUKjRo2oWbMmXbp0oVmzZhQvXpwuXbqk6bykZMaMGZQvX55Zs2YREBBA27ZtefLJJ6lRowZBQUF2Ty0EBQXx7LPPcuHCBapVq0abNm1o0qQJlStXplKlSikOApyc2rVr4+vry+zZswkNDeW5556jV69edgOmnTx5kj179jjtoMcijuYMvTFvFxcXR/fu3bl16xZPPfUUNWrUSPe2kqPemEmpN6ZIzlOiRIlkp+fLlw+w9mBMlHidr1u3rt0AoYk/NWvWBEh3W8rV1ZXSpUszcOBApk2bxr59+5L0eLwfKbUdfXx8yJ8/Pzdu3ODChQvAP8f65ptvJnusFouFq1ev2h1r9+7defLJJ9m5cyetW7emQIECNGjQgA8++IDo6OgMOYZz585x48YNChYsSIECBe57e76+vnafW4mOHj0KkGGfRyKStRzZbr969SovvvgiDz30EG+88UaGHM+9qN2elNrtkpk0EO0DpHbt2mzfvp0JEyawcOFCVq5cCVj/YENCQmjfvn2yF8iUbNmyhaioKLtpx48f5/jx40DKDfb08PT0ZPHixQwbNowff/yRs2fPUrp0aXr37s0rr7ySZHlfX1/Wrl1LREQE3333HVu2bGHt2rUULVqUMmXK8PLLL9O5c2fb8gMHDqREiRJERkayevVqrl27RrFixXjqqacYNGgQwcHBdtufPHkygwcP5rfffuPbb78lPj6euLg4evTocc9jGTx4MLVq1WLChAlERkaybds2ChcuTOXKlXn66afv+1yBdSTxTZs28fHHHzN79mx+++03XF1dKVGiBC+++CKtWrWyWz4iIoJixYrxzTffsGTJEkqWLMnQoUMZMmRImj4cvby8WLhwIcOGDWPjxo2sWrUKYwz16tVzuoF8RZxVjRo1bIPT3e6hhx5ix44dnDx5klKlSgHw66+/AiS5cZmofv36gLXhmF4vv/wya9asoUyZMrYbtBmlYMGCtpJmt3v66ad54YUXWLt2LQkJCbi4uNiOtV27dsmWcahevTp58+a1O9bEGwzDhg3D1dWVJk2aZPgg3nFxcaxYsQKAPn363Pf2LBYLrVu3TjI9cZDgp59+2pbkcyajRo1i1KhRjg5DxGmlpfxMQkICAJ06dSJPnjwpLpcRA9C1b9+evHnzsnjxYmJiYmylDrNK4rHWq1cv1W1OV1dXvv/+e4YMGcL8+fNZtmwZGzZsYPXq1YwePZrFixc7Xbszoz97UpJ4PkUkaziy3T5s2DCOHj3K0qVLk70pmNHUbk9K7XbJbEraO5np06czffr0uy4TGhqKMSZd2/Dz82PMmDGMGTPmPqK0uv2ucWrdLe7klCpVym6dTz/9lE8//TRV63p5edG/f/9U9Rxq0KABDRo0SHVcvr6+fP3116le/k7169e3fSjfTeIHyp1Scx7z5cvHW2+9ZbvzfTceHh4pXriTe53vfF1uFxwcbPuQTk5q3uMiD7L09sa8m/T2xnz//ff5/PPPKVq0KEuWLKFgwYLp2k5K7tUb8+LFi1y4cIFChQrZ9cZ88803U9zmzZs3bf/v3r07v/76Kz/88AOtW7fGy8uLmjVrEhYWxnPPPZch9fnVG1NEMlqJEiXYs2cPQ4YMyfCnm+5ksVgoWLAgR44c4cKFCxQtWvS+t3nkyJFkp1++fJmLFy+SK1cu8ufPD/zzmdeuXTsGDRqUpv1Ur16d6tWrM2LECC5fvsyIESOYMGECr7zyyn3drAYoXLgwuXLl4vz581y8eNEWb0YrWbIkAAcOHEjV8ok3VZIbtyo+Pj7DnjQQkdRxVLt948aNfPrpp3Tr1o1GjRqlMtr7o3Z7Umq3S2ZT0l5ERMSJOEtvzC+++IJ///vf+Pj4sHjxYrvBixxBvTEzlnpjijivpk2bsnTpUubNm5fpSfuDBw9y9OhRvL29M+wx+XPnzrF06dIkZQe+++47AOrUqYOrqytgPda33nqLefPmpTlpfztvb29GjRrFxx9/zI4dO2zTE5PccXFxadqeq6sroaGh/PLLL3z55Ze8/vrr6Y7tbpo0acKbb77JrFmzeO+995KUULhT4gCHe/fuTTJv+fLlaSpBmt5zIyL/cFS7fdGiRSQkJLB9+3ZCQ0Pt5u3evRuwdr6ZPHkyYWFhDBkyJNVxZgS12zOW2u0PLiXtRUREsqnM6o353Xff8dJLL5E7d24WLlxItWrVMmzbt1NvzNRTb0yRB0ffvn356KOPGDt2LAEBAfTq1csuMRQXF8fSpUspXrw4lSpVuuf2Jk2axBNPPJGkl+KePXsIDw/HGMOzzz5rS6QnSkwcJe4rLQYPHszvv/9uKxtx6NAh3nnnHQBeeukl23K1atWiadOm/Pbbb7z00kuMGjUKb29vu21t27aNkydPEhYWBljHbqpevXqSY//ll18wxtiul2C9Rru7u3PgwAHi4+OTHOPdvPHGGyxevJj333+fmjVr0rBhQ9u8uLg4fv311zTXPr7To48+SsOGDVm+fDl9+vQhIiLCLpl3+PBhrly5QuXKlQFsTwbPnDmTgQMH2spuHDp0iJdffjlN+y5WrBhgfR+kZOjQocybN49+/frRr1+/NG1fROxlRrt969atKc7bvXs3u3fvtl0n7pfa7amndrtkFA1EKyIikk0l1pWcN29ehm1z0aJFPPvss7i5uTFv3rx7PsJ7PxJ7Y94ppd6YcP/Hmtgb02KxZGhvTMA2qFZmSBxzZtasWck26O+k3pgi2Vf+/PmZP38+Pj4+9O3bl1KlStGiRQu6du1K48aNKVKkCGFhYezfvz9V2xs/fjzFixfnkUce4cknn+SJJ57g0UcfpWLFimzYsIEGDRokWyJxz5497NmzJ03XC7COo+Xi4kLZsmXp2LEjbdq0oVKlShw/fpxnnnkmST3nmTNnUr16dT777DMCAwNp2LAhXbt2pVWrVgQEBFCtWjUWL15sW37OnDlUrlyZsmXL0r59e7p06UKdOnXo0KEDLi4uvPfee7ZlPTw8CAsLIzo6mqpVq/Lss8/Sq1cvpk2bds/jCAkJYezYsVy5coVGjRpRs2ZNunTpQrNmzShevDhdunRJ03lJyYwZMyhfvjyzZs0iICCAtm3b8uSTT1KjRg2CgoLsPieDgoJ49tlnuXDhAtWqVaNNmzY0adKEypUrU6lSpTSNKVa7dm18fX2ZPXs2oaGhPPfcc/Tq1Yu1a9faljl58iR79uxJd5k9EflHRrbbR4wYgTEm2Z/w8HDAem0xxmRYaVq121NP7XbJKErai4iIZFN9+/bF19eXsWPH8uWXXyZ5dDIuLo4lS5bYNXLvJjIykk6dOmGM4fvvv6dZs2apWq9ChQpUqFDBNhB5WgwePJhz587Zfr9Xb8zIyEheeuklLl++nGRb27Zts0vszJgxI9ljT01vzLR44403sFgsvP/++yxfvtxuXlxcHIsWLUrT9pKT2Bvz9OnT9OnTh2vXrtnNP3z4MNu3b7f9fntvzNvHJsnM3pgVKlTgk08+SdO2RSR5tWvXZvv27bz++ut4e3uzcuVKfvzxR6KioggJCWH69Om2pMC9vP/++3Tu3Jlr166xZMkS5s+fz5EjR2jatCnTp09n+fLl9yzLkhaenp4sW7aMLl26sH79epYsWULJkiUZN25csskjX19f1q5dy3/+8x8qVqzIli1bmD17Nn/99RdlypThww8/ZPDgwbblBw4cyEsvvUS+fPlYvXo18+bN4/Tp0zz11FNs2LCBJ554wm77kydPplu3bpw7d45vv/2WKVOmsHLlylQdy+DBg1m5ciXt27fnyJEjzJ49mx07dlC5cmXGjx9/X+cpUfHixdm0aRPvvPMOJUqU4LfffuOXX37h+vXrvPjii7Rq1cpu+YiICIYMGYK3tzdLlizh8OHDDB06lFmzZqVpv15eXixcuJCmTZuydetWpk+fzpQpU5JNGonI/cvodnt6qd2udrva7dmIERERkUwVHh5uADN8+PAUl1m+fLkBTHh4+F23sXz5crvp69atM4ULFzaAKVmypHn88cdNly5dTKNGjUz+/PkNYObNm5eqOBOXL126tAkPD0/2JyIiIsl6gAHMoUOHUrWfQ4cOGcDUrl3bPPLIIyZ//vymQ4cOpnXr1iZ37twGMM8880yS9U6dOmWqV69uAJM/f34TGhpqunTpYlq2bGlKlixpADNgwADb8m3btjWACQoKMu3atTNPP/20qV27trFYLMbFxcX88MMPdttv3bq1Acy//vUv061bN9OzZ08zderUVB3Thx9+aCwWiwFMcHCwefrpp03Tpk2Nr6+v8fHxsVs2JCQk2fMFmMDAwBT3cezYMVO+fHkDmIIFC5o2bdqYJ554wjzyyCPGxcXFTJgwwW75Z5991gDGx8fHtG7d2jRu3NjkyZPHPPHEEyYwMNDc2QxMfF1CQkLspt+4ccP4+vra5vXo0cP07NnTREZG2pZJzXtcRERExNlll3b7veKfMWNGsvPVble7Xe327EM17UVERLKxxN6YEyZMYOHChbbeg/7+/oSEhNC+fftU98a8ePEiYO3VcejQoRSX69Wr133HDdbemIsXL2bYsGH8+OOPnD17ltKlS9O7d29eeeWVJMsn9saMiIjgu+++Y8uWLaxdu5aiRYtSpkwZXn75ZTp37mxbfuDAgZQoUYLIyEhWr17NtWvXKFasGE899RSDBg0iODjYbvuTJ09m8ODB/Pbbb3z77bfEx8cTFxdHjx497nksgwcPplatWkyYMIHIyEi2bdtG4cKFqVy5Mk8//fR9nyv4pzfmxx9/zOzZs/ntt99wdXWlRIkSKfbGLFasGN98842tp+vQoUMZMmRIqgcFg396Yw4bNoyNGzeyatUqjDHUq1fP6QYEExEREXFWGdluz2pqt6eN2u2SESzGGOPoICRtpk+fTo8ePRg+fDgjRoy47+2dOXOGQYMG8dtvv3H69GkSEhKYNm0a3bt3x2KxEBgYaPeIzr10796dr776iuXLlycZyTytktv/4cOHKV26NCEhIaxYseK+tp9ecXFxvPfee2zatIldu3Zx5swZYmNjKVmyJE2bNuWNN95IU03JU6dOsWDBAhYsWMCmTZs4ffo0uXPnpmrVqjz33HM8++yzWCyWTDwiez///DPjxo1jy5YtADzyyCO89tprtGzZMsmyUVFR/PTTTyxcuJCtW7dy7tw5fHx8CA4O5sUXX6RNmzZZFreIiIiIiIiIiEh2p5r2Qs+ePZkxYwa+vr48/fTThIeHU7ZsWUeHlWbTp0/HYrFkyI2Me7l58yYjR45k1apV+Pv7ExYWRvPmzYmJieHzzz+nSpUqbN68OdXbGzRoEL169WLBggWULFmSDh06ULlyZdasWUP37t158skn01yrLb0+/vhj2rRpw9q1a6lbty6NGjVi48aNtGrVKtmaZ127duXll19mxYoVVKhQgY4dO1KmTBmWLFlC27ZtGThwYJbELSIiIiIiIiIikhOoPE421L59e2rXrk3hwoXve1sxMTEsWrSIUqVKsWXLFlxc7O/j7Nq1C3d39/veT0YqXrw4u3btInfu3A6LwcvLizVr1lCrVi3c3P75M4qPj+ff//43o0eP5vnnn0914r5QoUK8//779O7dmyJFitimb9q0iSZNmjB79mymTJlCnz59MvxYbrdnzx4GDx6Mp6cny5cvp06dOoB1FPPHHnuMV199lbCwMLubOiVKlGDSpEmEh4eTL18+2/SFCxfSrl07JkyYQFhYWKoHtBQREREREREREXmQqad9NuTj40OFChUyJGkfHR1NfHw8gYGBSRL2YB1ZPC31s7KCu7s7FSpUICAgwGExuLm5UbduXbuEPYCrqyvvvvsuXl5e/PHHH1y6dClV25s4cSLDhg2zS9gD1KxZkyFDhgAwa9asjAn+HnHEx8fz/PPP2xL2AA899BBvvvkmcXFxTJw40W6d7777jn79+tkl7AFatmzJc889l2Wxi4iIiIiIiIiI5ARK2mdDKZWBSaxBv2LFClatWkWjRo3Ily8f3t7etGzZkp07d9otX6pUKVvd9ZUrV2KxWLBYLJQqVcq2zJ2/327q1KlUq1aNXLly4efnR/fu3YmOjr5r7OfPn2fo0KFUrFiRXLly4ePjQ6NGjViwYEGqj//w4cNYLBa7evmhoaG2AUdGjhxpOxaLxcL06dOZPXs2FouFLl26pLjdPn36YLFYmDZtWqpjSY7FYsHV1RWLxYKHh8d9bQugatWqAJw4cSLZ+UePHqVfv34EBQXh5eVFwYIFadWqFWvXrk3zvhYuXAhAp06dksxLnPbzzz+nenv3il1ERERERERERETsKWmfA/388880atSI69ev06JFC/z9/Vm0aBENGjSwS6p36tSJjh07AlC0aFHCw8MJDw9PNmF7pyFDhtCzZ0927txJgwYNaNCgAb/88gu1atXi/Pnzya6zd+9eqlWrxujRo7lx4wbNmzcnODiYDRs20Lp1a8aNG5fuYw4LC6Nu3bqANVGceCyJ9fnbtm2Ln58fc+fO5dy5c0nWv3r1KrNmzcLb25unnnoq3XEYYxgzZgzXrl2jYcOG5MqVK93bSnTw4EEA/Pz8ksxbt24dVatW5dNPP8Xd3Z2WLVtSqVIllixZQoMGDfj+++9TvZ+LFy9y5MgRAKpXr55kfsmSJSlcuDBRUVFcvnz5vmMXERERyekyesylM2fO8Oyzz+Lv72/rJDJ9+nTg7p1tUnJ7p5/7ldz+k+tsk9Xi4uIYMWIELVu2pEyZMuTLlw8vLy/KlSvHiy++SFRUVLq3vW3bNrp160aJEiXw9PSkaNGihIaG3ncnoLSIjIykRYsWFCxYkLx58/Loo4/y9ddfJ7vsqVOnmDJlCu3bt6dEiRJ4eHiQP39+QkJC+OqrrzDGZFncIiIicg9Gsp1p06YZwAwfPtxuenh4uAGMi4uLmTdvnm16XFyc6dixowHMW2+9ZbfOoUOHDGBCQkKS3RdgAgMD7aatW7fOWCwW4+PjY/7880/b9CtXrphGjRoZwABm+fLldjFUrlzZAGbs2LEmPj7eNm/fvn2mdOnSxtXV1Wzfvv2e+08p5pTOS6Jhw4YZwEyYMCHJvIiICAOYF154Idl17+b111834eHhpn379iYoKMgA5uGHHzYHDx5M87buFBMTYx5++GEDmPHjx9vNu3TpkvH39zeurq5m5syZdvM2bdpkChQoYPLmzWtOnz6dqn1t27bNAKZAgQIpLlOtWjUDmL/++uue27tw4YIpUqSIAcycOXNSFYOIiIhITnKv9mlatW7d2gCmSpUqpmvXriY8PNysXr3aGJN8u/leEr8/3N5uT6+MbLdnpCtXrhjA5M2b1zz22GOmU6dOpk2bNiYgIMAAxtvb22zatCnN242IiDBubm7G1dXV1K1b13Tu3Nk0bNjQFChQwDRu3DgTjiSp2bNnG1dXV2OxWExISIjp2LGjyZ8/vwHMoEGDkizftWtXAxg3NzdTu3Zt89RTT5l69eoZFxcXA5hOnTqZuLi4LIldRERE7k4D0eZATz/9NO3atbP97urqytChQ5kzZw6rVq267+1//vnnGGMYMGCAXY/svHnzMmnSJCpVqpSkl8bPP//M9u3b6dixI6+99prdvLJlyzJ+/Hg6dOhAREREkprpGaVPnz6MHj2aiIgIXnnlFbt5kydPBqB3795p3u6cOXM4cOCA7fcqVaowc+ZMSpcufV/xArz11lvs2rWL0qVL8/zzz9vNmzp1KidPnmTQoEF07drVbl5wcDBvvfUWAwcOZObMmbz66qv33NfVq1cB7jrAb548eQC4cuXKPbf3/PPPc+bMGWrXrk379u3vubyIiIhITtO+fXtq166dIWNRxcTEsGjRIkqVKsWWLVuSjEe1a9cu3N3d73s/Gal48eLs2rXrru3LzObl5cWaNWuoVauW3XhU8fHx/Pvf/2b06NE8//zzbN68OdXbXLZsGX369CEoKIiffvqJhx9+2DYvJiaGv//+O0OPITnnz5/nueeeIz4+njlz5tChQwfA2pu+Xr16jB8/nlatWtk95VCoUCHef/99evfubTeW1qZNm2jSpAmzZ89mypQp9OnTJ9PjFxERkbtTeZwcqFmzZkmmPfTQQwCcPHnyvre/evVqADp37pxkXsWKFW11zG/366+/Atgak3eqX78+ABs3brzv+FISGBhIWFgYO3futKv3vn37djZs2EBwcHCyZWHuZf/+/RhjOHPmDIsXL8bd3Z0aNWrw1Vdf3Ve83333HWPHjsXLy4tvv/02yZcdZzinKRkzZgzff/89BQsW5JtvvsFisWR5DCLZjUoo5OwSColWrVqFi4sLFouFXr16ZUDUqffzzz8TEhKCt7c33t7ehIaG2sYyuVNUVBSTJk0iLCwMPz8/3N3dKVy4MGFhYfz0009ZGrdIdubj40OFChUyJGkfHR1NfHw8gYGBSRL2ABUqVCAoKOi+95OR3N3dqVChAgEBAQ6Lwc3Njbp169ol7MHasendd9/Fy8uLP/74g0uXLqV6m/3798disTB37ly7hD2Ah4dHur5TpNXkyZO5fPkybdu2tfs+ULRoUcaOHQvA+PHj7daZOHEiw4YNs0vYA9SsWZMhQ4YAMGvWrEyOXCT7U7s9Z7fbVfpMnIWS9jlQiRIlkkzLly8fALdu3brv7ScOKpo4iO2dkvtAOXz4MABdu3a1GyQ28Sex4Xj27Nn7ju9uEnurR0RE2KYl/j89vexvV7hwYZo3b87SpUvx8/PjhRde4OjRo+na1rJly+jevTsuLi7MmjWL2rVrJ1km8ZzWrVs32XNas2ZN4J9zevbsWbp3757kZ82aNYD1SQmA69evpxjXtWvXgH/eT8mZOXMmQ4cOJU+ePCxcuJAyZcqk/QSIyH3r2bMnM2bMwNfXl6effto2xkd2k9Ffiu7m5s2bjBw5klWrVuHv709YWBjNmzcnJiaGzz//nCpVqqSpJ+adbt265bDeix9//DFt2rRh7dq11K1bl0aNGrFx40ZatWrFJ598kmT5rl278vLLL7NixQoqVKhAx44dKVOmDEuWLKFt27YMHDjQAUchkv2kdA27PaGyatUqGjVqRL58+fD29qZly5bs3LnTbvlSpUrZ2t4rV660tfdub3ffLbEzdepUqlWrRq5cufDz86N79+52Y10l5/z58wwdOpSKFSuSK1cufHx8aNSoEQsWLEj18SeX2AkNDaVHjx4AjBw50q79On36dGbPno3FYqFLly4pbrdPnz5YLJb7TqBYLBZbgszDwyNV60RGRrJz505CQ0OpXLlymvaXEec0UeJN1+TGI2vZsiVeXl78/vvv3Lx5M1XbS+x4lfhdT0SyjtrtaZdZ7fbJkycTHBzMrFmzKFWqFB06dOBf//oXf/31F998800mHElSc+bMISQkhMWLF1OlShXCwsLYt28f4eHhDB48OMnygwYNolevXixYsICSJUvSoUMHKleuzJo1a+jevTtPPvkk8fHxWRK7ZCyVx8mBkut542gJCQmAdcDYokWLprhcRvRCupsWLVpQsmRJfvjhByZOnIiHhwczZ84kb968PP300xmyDx8fH1q3bs1nn33Gb7/9xnPPPZem9Tdt2kTbtm2JiYlhypQpdqWObpd4Tjt16mQrW5OcChUqANbyN8n1/g8NDaVevXq2HlAXLlzg2rVryW7z2LFjQMo3bBYsWECPHj1wd3dn7ty5yd5sEJHkqYRCziyhcLv33nuPvXv30rNnT1tZtqywZ88eBg8ejKenJ8uXL6dOnTqAdYD4xx57jFdffZWwsDC7L4clSpRg0qRJhIeH292oXbhwIe3atWPChAmEhYUl+3SfiKTezz//zMSJEwkODqZFixZs3bqVRYsWsWHDBnbs2IGfnx9gbe8dPnyYOXPmULRoUcLCwoDUtZ2HDBnCmDFjcHd3p2HDhvj4+PDLL7+wfPnyZJ+QBev1oUmTJhw9epRSpUrRvHlzrly5wvr162ndujUffvhhsomD1AgLCyMuLo7IyEiqVq1KtWrVbPPKli1LrVq18PPzY+7cuZw7d45ChQrZrX/16lVmzZqFt7c3Tz31VLpiADDGMGbMGK5du0ajRo3IlStXqtZbtmwZAI899hg3btzgu+++448//sDV1ZUaNWrwxBNPJLutjD6n27ZtA+CRRx5JMs/Dw4NKlSqxefNm9u7dS5UqVe65vYMHDwLY3nMikjK123Nmu12lz8TpOLSivqTLvQaiTWkgKdIwONTd1ilVqpQBzM6dO5Ndp3r16kni6NmzpwHM7Nmz73Jk9xdzage0eueddwxgPvvsMzNz5kwDmF69eqUprnt5++23DWBGjx6dpvX+/vtvU6hQoRQHzL1d48aNDWA2b958H5HaSxyQK3FAs9sdOXLkrgOcrVixwnh5eRlXV9c0v84ikrGioqLuem1Pj8werDAlWTlY4d3ExsYaLy8vA5iLFy+mef0dO3YYDw8P06tXL9sx9ezZMxMiTeqFF14wgBkwYECSeR999JEBTL9+/VK9vT59+hjAdO/ePQOjFMmZ7tVud3FxMfPmzbNNj4uLMx07djSAeeutt+zWSU+7fd26dcZisRgfHx/z559/2qZfuXLFNGrUyABJru1xcXGmcuXKBjBjx4418fHxtnn79u0zpUuXNq6urmb79u333H962+3Dhg1LsT0cERFhAPPCCy8ku+7dvP766yY8PNy0b9/eBAUFGcA8/PDD5uDBg6neRufOnQ1ghg0bZsqXL287h4k/AQEB5q+//rJbJ73nNCWXLl2y7e/SpUvJLtOuXTsDmJ9++ume24uJiTEPP/ywAcz48eNTFYOIZAy12zNeetvtFStWNC4uLkmu4VlpzJgxBjBt27ZNMm/u3LkGMK1atUr19j744AMDmNDQ0AyMUrKK83XJFqeXWCv9hx9+SDJv9+7dbN26Ncn0pk2bAjBv3rxMiyvxkda4uLi7LterVy/c3NyIiIjIsNI4d1q5ciVAmuqKHj58mGbNmnHu3DlGjBiRZLDcO2XGOW3ZsiUAs2fPTjIvcVrr1q2TzPvzzz9p06YNt27dYvLkyXTs2DHDYhJ5UKiEQs4roZDIGEOfPn3w8fFhzJgxqVrn6NGj9OvXj6CgILy8vChYsCCtWrWyG5Mlte5WQiFx2s8//5zq7amEgkjGefrpp+2eqnR1dWXo0KGAdQyM+/X5559jjGHAgAF2ddbz5s3LpEmTkh136Oeff2b79u107NiR1157za7XZ9myZRk/fjzx8fF25SYzWp8+fXBxcUl2H4lPKqWn/T5nzhy++uor5s2bx4EDB6hSpQrff/89pUuXTvU2Lly4AMDYsWO5du0aixYt4tKlS2zfvp2mTZty5MgRWrdubVdyMqPP6dWrV23/T6mna+JTs1euXLnn9t566y127dpF6dKlbeVERSRlarfnvHa7Sp+JM1LSXtIssSH38ccf2x7LBGu98/79+yc7yEXHjh2pWLEi33zzDe+++26S2vrGGCIjI4mMjEx3XMWKFQOsZQDuxt/fnzZt2rBlyxZWrlxJlSpVePTRR9O0r4ULFyabOLl+/TpvvvkmK1euxM/Pz/bocqKhQ4dSoUKFJPWDT58+TbNmzTh+/DiDBg1i+PDh94yhb9+++Pr6MnbsWL788ktbuZxEcXFxLFmyhB07dqT6uAYMGICrqytffPEF69evt03ft28f77//Pm5ubgwYMMBunT179hAWFsbly5eZOHEi3bt3T9W+GjduTIUKFRwyUK5IdvTzzz/TqFEjrl+/TosWLfD392fRokU0aNDArnHeqVMn242zokWLEh4eTnh4eLINvzsNGTKEnj17snPnTho0aECDBg345ZdfqFWrFufPn092nb1791KtWjVGjx7NjRs3aN68OcHBwWzYsIHWrVszbty4dB9zWFgYdevWBawNzsRjSazz2bZtW7sSCnfKjBIKDRs2THUJhUSff/45a9euZfz48RQsWPCey69bt46qVavy6aef4u7uTsuWLalUqRJLliyhQYMGfP/996ne98WLFzly5AhAsgMjlixZksKFCxMVFcXly5dTtU2VUBDJOMmVmHrooYcAOHny5H1vf/Xq1QB07tw5ybyKFSsmWx7n119/BbAb3PR2iR14MrMNFxgYSFhYGDt37rRrc2/fvp0NGzYQHBycrsFe9+/fjzGGM2fOsHjxYtzd3alRo0ayJSRTktjmjouLY86cOTz++ON4e3tTqVIlfv75Z0qUKEFUVJRd7WNnOKcp+e677xg7dixeXl58++23Di13IZJTqN2e/drtd5Y+mzZtGv369WPAgAF8/fXX3LhxI9n1Mvqcpqb02c2bN9m7d2+qtqd2ezbnwF7+kk6OLo9jjDGDBw82gHF3dzfNmzc3Tz75pClatKgJCAgwrVu3TjaOvXv3mtKlSxvA+Pr6miZNmpguXbqYZs2aGV9f32QfgU1LzDdu3LBtJyQkxPTo0cP07NnTREZGJon/119/tT1S+sknnyR77HczfPhwA5jixYubli1bmi5duphGjRqZggULGsD4+PiYVatWJVkv8TW687VLfHw1d+7cJjw8PNmfQYMGJdneunXrTOHChQ1gSpYsaR5//HFbLPnz5zeA3SPXqZFYKsHNzc08/vjjpm3btiZXrlwGMP/5z3+SLF+tWjUDmCJFiqQY+6hRo5KsFxgYmGGP7InkBCqhkPNKKBhjzLFjx4y3t7dp2LChbdrdyuNcunTJ+Pv7G1dXVzNz5ky7eZs2bTIFChQwefPmNadPn07V/rdt22YAU6BAgRSXSbyOp+ZR4AsXLpgiRYoYwMyZMydVMYg8yO51bV+6dGmy62VUu93T09MA5tq1a8muk9gGvf3a/vjjjycp+ZLcT9myZdMdc2pKKPz0009JSnH179/fAOa///1viuulxcWLF03JkiVNrly5zJEjR1K1Tvv27Q1gKlasmOz8119/3QCma9eutmnpOafJtakT2wEZVR5n6dKlxtPT07i6uqb5O4PIg0zt9pzXblfpM3FGGohW0uXDDz+kfPnyTJo0iRUrVuDj40OzZs0YM2YMw4YNS3adcuXKsWXLFj755BPmzp3L+vXriYuLw8/Pj+rVq9OmTRuefPLJdMfk5eXFwoULGTZsGBs3bmTVqlUYY6hXrx6PPfaY3bL169fH3d0dNzc3unbtmuZ9dejQgStXrrB69Wo2bdrE+fPnyZUrF2XLlqVv3770798ff3//VG8v8THb69evp9jTJzAwMMld2tq1a7N9+3YmTJjAwoULbWV5/P39CQkJoX379jRp0iRNx/bqq69StmxZPvzwQ1vvrODgYF5//XVatWqVYuxnzpxJMfaQkBCGDBmSpjhExF5KJRTmzJmTJSUUKlWqlORJqjsf979d4uP+HTp0ICIigokTJ953jMnp06cPo0ePJiIiIklZsfstoXDgwAHb71WqVGHmzJlpKqEA0K9fP27evMnnn3+equWnTp3KyZMnGTRoUJLPp+DgYN566y0GDhzIzJkzefXVV++5vcQSCnfrOZmWEgrPP/88Z86coXbt2rRv3/6ey4vI3d054KAzSOxJHhYWRtGiRVNcLiMGYLybFi1aULJkSX744QcmTpyIh4cHM2fOJG/evDz99NMZsg8fHx9at27NZ599xm+//cZzzz13z3USS1mkVK4icfrp06dt09JzTpNrV5cqVYp27drh7e2Nj48Ply5d4tixY1SsWDHJsseOHbOL906bNm2ibdu2xMTEMGXKFLs2hojcH7Xbk+fM7fbbS5/5+fmxaNEi6taty5EjRxg4cCC//fYbrVu3ZufOnbZ2dUafU5U+kzspaZ8Nde/ePdkSJNOnT2f69OkprnfnRRusDb/kpt9tnUS9evWiV69eaYrDx8eHN998kzfffDPF7d5r/3eLOTg42Pb46d3MmzeP2NhYunTpQv78+VMVy+2qVKnC+PHj07xeSudmxYoVad5WIj8/P8aMGZPqOsmp0bp162Rr1yfn8OHD6dpHetcTeVA5QwmFO8cscYbH/RNLKCxatIi1a9fabtJmRAkFgLNnz/LHH3/w5ptvUqNGDSIiIggPD0/VNubOncuPP/7I22+/Tfny5VO1jjOc05SMGTOG77//noIFC/LNN98kWwtbRJyLv78/hw8fJioqiocffjjJ/KioqCTTSpQoAVjb+o4cp8jV1ZXevXvz9ttv88033+Dt7c2FCxfo1asX+fLly7D9JCbKz5w5k6rlEz9TEhM8d0osS5E3b17btPSc07t9DwNr+YlVq1bx559/Jknax8bGsmPHDry8vGxthdvt3LmTxx9/nKtXrzJhwgRbHWoRyRhqtyfPmdvtd5Y+SyyhnFj6rGzZsrbSZ4k3FpzhnKZEpc9yBufr2iGSyWJjY20J7pdeesnB0YiIpE7iF/7bJSYt7hwnJD0SBydKqUdecj0KE2++de3a1W6wqcSfIkWKANYGdGZK7Dly+wB+GTXQeOHChWnevDlLly7Fz8+PF154gaNHj95zvcuXL9O/f3/KlSuX4hNoyUk8p3Xr1k32nNasWRP455yePXvWdjP/9p81a9YA/ySNbh8Q8U7Xrl0DuGsSbObMmQwdOpQ8efKwcOFCypQpk+pjEhHHSUwY/PDDD0nm7d69O0lSB6Bp06aAtZNLZkkcGDAuLu6uy/Xq1Qs3NzciIiIy7Lp+p8QnVYOCglK1fIsWLXBzc2P79u3J1o1O3N7tiafMOKctW7YEYPbs2UnmLViwgJs3b9KkSRO8vLzs5h0+fJhmzZpx7tw5RowYkaS3q4jcP7XbU+aM7Xb4p81csWLFJGMeenp62gbQTbzGQ/rOaXLt9h9//NEuBki57Z6advuyZcvo3r07Li4uzJo1i9q1a6fmFIgTUk97eWD89NNP/Pjjj2zcuJG///6bdu3a2ZIfIiLOTiUUUuaMJRT+/PNPTpw4QalSpWjevLndvMQByBYuXEhoaCh+fn589913wD/ntFOnTrbHX5NToUIFwPoYbXIlFEJDQ6lXrx4BAQGAtUfotWvXkt3mvUooLFiwgB49euDu7s7cuXPV8BfJRp5//nlmzJjBxx9/TLt27WwDz167do3+/fsn25u7Y8eOVKxYkW+++Yby5cvz+uuv4+npaZtvjLENDps46GBaFStWDIA9e/bcdTl/f3/atGnD3LlzAevTrncmU+5l4cKFFChQIEm5zOvXr/P++++zcuVK/Pz8CAsLs5s/dOhQ5s2bR79+/ejXr59teuHChenRowcRERG8/PLLTJ061XYT4quvvuK3337Dy8vL7snozDinvXr14v3332f+/PnMnTvX1tPz9OnTvP766wAMGjTIbp3Tp0/TrFkzjh8/zqBBgxg+fHiq9vXss8+yceNGRo0apdJoIqmgdnvKnLHdDip9Js5JSXt5YPz5559MmzaNAgUK0KVLFyZNmuTokEREnIZKKNxbWksogLUHTkrlwKKjo4mOjrZrdJcoUYI9e/YwZMgQatSocc/t36vMXf78+QkICODIkSNs2bKFevXq2c0/evQoZ8+eJTAwEG9v7yTrr1y5kieeeAJjDN9++22yj3uLiPN67LHHGDx4MOPGjaNmzZo0atQIHx8fVq5ciaenJ61bt+bnn3+2W8fNzY0ff/yR5s2b8/bbb/PJJ59QpUoVfH19OXv2LFu3buX06dNMmDAh3Un72rVr4+vry+zZswkNDaVMmTK4uLjw3HPPJUmuP//887akfZ8+fdK8r02bNjFy5EiKFy9OtWrV8PHxITo6mq1bt3L+/Hl8fHz44Ycf7Ho4grWExZ49e5Ltdfrhhx+yfv16vvnmG1avXk1wcDBHjhxh8+bNuLq68uWXX1KyZEnb8plxTgsWLMjUqVN58skn6dSpE6GhoRQqVIjff/+dixcvMnDgQEJDQ+3W6du3L/v27SN37ty2J7XuVLhw4STjaB05coQ9e/Zw6dKlVMUmIplL7fZ7U+kzlT7LCZzv9p9IJhkxYgTGGM6fP88333xDwYIFHR2SiIjTUAmFe0tLCYXQ0FCMMcn+TJs2DYCePXtijLFL6md1CYXEacmNY/Lnn3/Spk0bbt26xeTJkx36BU9E0u/DDz8kIiKChx9+mBUrVrBixQqaNm3KunXrUmwPlytXji1btvDee+9RokQJ1q9fz9y5c9m7dy/Vq1fn008/5Zlnnkl3TF5eXixcuJCmTZuydetWpk+fzpQpU9i7d2+SZevXr4+7uzu5cuVKMkB3anTo0IGBAwdSrFgxNm3axA8//MCmTZsIDAxk6NCh7Nq1y/YZmFo+Pj6sW7eOYcOG4eHhwYIFCzh48CCtWrVi5cqVdOvWLck6mXFOO3bsyKpVq2jevDlbtmxh0aJFlC1blunTpyc7/lZiMur69et89dVXyf4k91khIs5F7fZ7U+mzVzIsJnEgIyIiIg43bdo0A5jhw4fbTQ8PDzeAWb58ebLrASYwMNBu2qFDhwxgQkJCUr1OZGSkAUz+/PnN1q1bbdOvXr1qmjRpYoAkccTGxpqKFSsawLzzzjvm5s2bdttMSEgwa9asMWvWrEl3zMuXLzeA6dSpU7LHcrsOHTrY4qxSpco9l7/TggULTGRkZJLp165dM8OGDTOA8fPzM1euXLGbP2TIEFO+fHkzadKkVO0n8bXu2bNnknkXLlwwvr6+xt3d3fz3v/818fHxdvNjY2PN4sWLzfbt21N9XLt37zaurq7G09PTrFu3zjZ97969plChQsbNzc3s27cvyTpFihQxgPnPf/6T6n01atTIlC9f3mzYsCHV64iI3M23335rABMeHu7oUEREjDFqt+fUdnvv3r0NYLp27Wpu3bplmz59+nQDGC8vL3PkyBHb9PSe07s5d+6c8fb2NoCZM2eObfqpU6dM2bJlk31/nTp1ypQrV84AZtCgQaneV7du3Uz58uXN3LlzU72OZC2VxxERERGVUCBzSiikVf78+Zk/fz6tW7emb9++vPfee1SqVIkCBQoQHR3Nn3/+ycWLF5k3bx6VKlVK1TbLly/Phx9+yMCBA6lfvz5NmzbFw8ODX3/9lRs3bvCf//yHsmXL2q3TuXNnzpw5Q5EiRfjjjz+SLaFQoUIFhgwZYjftwIEDREVF3XXgWxGR1IqNjWXMmDEAvPTSSw6ORkTEOajdrtJnt1Pps5xLSXsREREBrA3V8uXLM2nSJFasWIGPjw/NmjVjzJgxDBs2LNl1Eh/3/+STT5g7dy7r168nLi4OPz8/qlevTps2bXjyySfTHVNiCYVhw4axceNGVq1ahTGGevXqJWn8J5ZQcHNzS3cJhStXrrB69Wo2bdrE+fPnyZUrF2XLlqVv3770798ff3//dB9LatWuXZvt27czYcIEFi5caHsc19/fn5CQENq3b0+TJk3StM1XX32VsmXL8uGHH7J69WoAgoODef3112nVqlWS5RNLKJw5cybZAbMAQkJCkiTtRUQywk8//cSPP/7Ixo0b+fvvv2nXrh01a9Z0dFgiIk5D7faMb7cnlj774IMP+OGHH1iwYAF58+alVatWDBkyJNnEe2ac08TSZ++99x7r168nJiaGihUr0q9fP8LDw5Msf2fps+QEBgYmSdqL87MYc49REERERESygVmzZtGlSxfCw8OZPn26o8MREZF0GjFiBCNHjqRAgQI8/vjjTJo0SeNRiYjkIGq3i9ybkvYiIiKS7cXGxlKzZk22bdvGxo0b1SNTRERERMQJqd0ukjoqjyMiIiLZlkooiIiIiIg4P7XbRdLGxdEBiIiIiKTXn3/+ybRp0zhx4gRdunRhypQpjg5JRERERETuoHa7SNqoPI6IiIiIiIiIiIiIiJNQT3sRERERERERERERESehpL2IiIiIiIiIiIiIiJNQ0l5ERERERERERERExEkoaS8iIiIiIiIiIiIi4iSUtBcRERERERERERERcRJK2ouIiIiIiIiIiIiIOAkl7UVEREREREREREREnISS9iIiIiIiIiIiIiIiTsLN0QGIiGQnJjYWs+9Auta1lAvC4u6ewRGJiIiIiIiIiEhOoqS9iEgamH0HiB8yPF3ruo4eiaVihQyOSEREREREREREchKVxxERERERERERERERcRJK2ouIiIiIiIiIiIiIOAkl7UVEREREREREREREnISS9iIiTujw8Sv8svpokumzFh3g4uVbDohIRERERERERESyggaiFfmfuAT46G84dAWK5oI3KkMu/YVkCz8chJXR4O4Kr1aEwHyOjuj+Bfjn5aMZO0gwhpYNAgCYOm8vJ85cI7+3p4OjExERERERERFxDiYuDqKOpH3FwAAsbs6Z/HN4VJGRkaxfv55169bRpUsXLly4QM+ePR0dljyAxmyHH6PA/O/3q7Ew7lGHhiSp8Otx+Gw3XI2z/n7yOnzVALxcsz6Wr48e4tmSpW3/3g8XFwsfv16bV8auB+DUuZucOHONf/epnhGhioiIiIiIiIjkDFFHiHt1aJpXc5swCoLKZEJA98/h5XHWrl3LoEGDqFmzJsYYChQo4OiQ5AF18Mo/CXuAo9ccFoqkwdrT/yTswZq0P3ndMbGcj4lhyemTnI+JyZDtJSbuJ8z4m+WbTihhLyIiIiIiIiLyAHB4T3sXFxe+/PJLbt68CcCFCxeSXe6dd97hyJF0POYgkkqHar4I/o/Yfo8+tIde33zowIgkNU6VDYPybcHVHYDYy2cY+do7uMXdyJT9lYmJ47UU5hX08KC5rz+nbt1Mdv7o0aM56JG2y+7eM0W4esuDv3YUoFmnBQTkv5iq9QICAnj77bfTtC8REZHs4MTpa3zxf7sZ+eIjWCwWAOb+fhgvT1da1C+Z6fu/GAMx8VDEC/63e6dxKx7O3rSWenRzePcsySg34+DcLb2uIiIiDxKLMcbce7HMFR0dzcqVK3nqqaccHYo8wC7egiGb4dRNOHUDvguFgLyOjkruJcHAyC2w/YK1h/2I6tC8RCbub+du4ocMT9e6rqNH4lKxQqqXT6xh/+8+1en59iry5Han+WPFbTXuRSTr/L3/Av8qW+Ce00Qkayxdf4KVf5xk5IuPMG9pFAeOXua1HlUyfb8z98M3B63J8fpFre0OZ0nc77sEb/5hbcsG5oGPa0FBL0dHJfdrx3kYsRXO3ITS+WBSbcjn7uioREREnIs5cDDd5XEs9yiPs+fQRYJKeuN2253zrPgu6BT36f38/JSwF4fL7wlf1IV5jcEvlxL22YWLBUY+AnMbg3/uzE3YZ6VDx64Qffa6rSSOxWItlbPqj2guXr7l4OhEHjzHTl3jw2l/2X4fN307R6NVR00eDPPnz+enn35i5MiRzJ0719HhANC4djFCavjz5OBl7D9yKUsS9sbAvChr8vRyLPx+AvZfzvTdptqUfXDwKlyLg52XIGKvoyOSjDBtPxz+3+u64wJM2ePoiERE0u78TXh3q7XDXXTmPBQvd4hLgE92wlt/wPrTjo7mHwuOWmOaud/atsouBo/fSFxcAgDzlh5mxeaTmb5Ph5fHERGRpEqXyMew3tXsprm4WBjzqvOOjjx//nxu3LhBbGwsxYoVo3Hjxo4OSSTDNK9rvSP44bS/sFgsVCpbgLB6OeQuocg9HDt2DG9vb4KDg23laO40adIkoqKisjSufafycehsHjb96c6pv2dkeo93A5ypMRjy+AEQG3OTCeMmkvvGmczdcSrtLN8Fiv5T6nHtmlUM/vonB0YkGeHvh5+FIv/clFq14neOTV/swIgkMwQGBtK/f/8s25/a7ZLVhv0Bm89Z/3/oKkyv79h4HgQf/Q0/HLL+/49zMLUe+OV2bEyRp2Dcduu4hL+fsJZ865wFY8DOOXGUxkWKsu3SRUIK+6Z5/fKl8/PCkxUYPH4j9ar7cur8TV7qXDETIrWnpL2IiGSIY8eOERMTg4eHB8WLF092mYiIiCxP6ohkpIVbC3H2ijvh9aNZo5yJOEBgYCC9e/fO0n2+9NJLzJkzh7Nnz5I3b/KPImZlsgmsNeyLHr3M/B5VeGLQUvKUqWdX4z6z/BgF0/dZewl2LOfFax3eyNT9pcWxazB0M+y9DFUKwoTHG5DXvYGjw5L7dPAyvLXF+lTHI4XgoxZNyOXWxNFhSTandrtktZ2VB0CuIgDsj77IsH9/hAsJDo4qZ/uj3DNQwFqe9/RNePvTr/G95NjH8PYWb8LV4qEAxBqYtWwzO6b+mCHb9rt+k74pzDsbc4tlZ05TyMMjybxPP/2M6Nypqye462A+ZvzoTd+Gx/n3jnsvf7/tdqeoaS/ibDostZZbkewlK163rKxpf7tew1czeaTzd0eYNm0ahQoVInfu3DRpoi+UkrOMm76dSmUL8NVPe3nk4cJZUo5DRJI6cfoas345yKDwygAMHreBx+uV5FZsfJYMRBuXAOGr4JvQTN9VmhkDXVfCNyHOU2tf7l/i6/ptqKMjkZxE7XbJSm//CYuPQQLQ0A8+dN4HyHOMbw/AF7vhejw85A1f1oW8Dh4T5eBleHUjHL8ObhYY/yjULZox275bTfu79bRPTU17sJbEOXHmOjv2ncfTw41xgx61q3GfGdTTXkREMkyPHj0cHYJIpvjsu522kjhrtkRT5aGCfDJrJ/2ezvzHIkXEXjHfPLaEfaLGtYtl2f7dXKxj6jgjiwVcLUrY5zSJr6tIRlK7XbLSiOpQszBM3gujgh0dzYOhSxAE5YP3tsGndRyfsAco4w2f1IblJ+GXYxmXsL+XjsWsnTrSUxoHYEnkMU6cuc5LnSvy70mb6daqLMP+s5mxAzP37pOS9iIiaWApF4Tr6JHpXldEsqfn2j+El+c/zabmdUsQEhznwIhERERERLIHFwu0DrCWmcvkzslym1q+4OsFBTwdHck/SuaFZ8vBymhHR5J6IcF+tjHOwFrj/p2XHrnLGhlDSXsRkTSwuLtjSWeJGxHJvm5P2N9tmoiIiIiIiOQcjvouqPtbIqkUERHBoUOHWLduHQcPHmTlypVcunSJsWPHsn79ekeHJyIiIiIiIiIiIjmAuoiJpFLiiM/u7u6UKFGCMmWsA1W8/vrrHD9+3JGhiYiIiIiIiIiIPJgCA3CbMCpd6zkrJe1F0qhEiRJJphUvXtwBkYiIiIiIpM6MGTNo0aIFx44dIzIykmeeeYY5c+bg5eVFvXr1WL9+PeXKlaNatWqODlXS4M7XtVu3bkydOpVy5cqRL18+rl+/TtGiRfW6iohIjmZxc4OgMo4OI0MpaS8iIiIiIpLDdevWDYD4+HhefPFFAHr06GGbX7JkSU6dOuWQ2CT9kntdBwwYYLeMXlcRcWbJ3XycMmUKNWvWpEyZMqxZs0Y3leWBpKS93DcTG4vZdyDd61vKBWFxd8/AiEREREREJDm+vr4pzitatGgWRiIZSa+riGRXyd18fOWVV2zzn3jiCd18lAeSkvZy38y+A8QPGZ7u9V1Hj8RSsUIGRiQiIiIiIiIiItmFbj6K2HNxdAAiIiIiIiIiIiIiImKlnvYiIiIiIiIiIiI5kImLg6gjaVspMMA6sKeIOIz+AkVERERERERERHKiqCPEvTo0Tau4TRgFQWUyKSARSQ2Vx8mBvl14gPj4BLtpZy/c5JfVRx0UkYiIiIiIiIiIiIikhpL2OVDRQrl46YN1tsT92Qs36ftOJJXKFXBwZM4t3sAnO+GV9XD+FsTEOzqiuzMGpu+DVzbAiC1wNdbREYmIiIiISHZ3/NS1VE0TERGRzOPwpH1kZCTjx4+nU6dOrF27ltmzZzs6pGyvce1iPNG0NC99sI5TZ6/T951IPn6jFiX98mZ5LF8fPWT3rzP7aAd8vR/WnIarcfD2FkdHdHfT90PEHlhzChYchdc3OToiERHnsm3POVZtPmk37YsfdhET6+R3ZUVE0um34xB9AzadcXQk2cutePjvbpj4N1yOcXQ0SS0+Zn1dt5zL/H0ZYxj/9XY2bv/nTTR13l4WRx7L/J2LSIr2RV1i8Rr7v8Mpc/dw7bp674nkVA5P2q9du5ZBgwZRs2ZNHnvsMUeHk2M0rl2MZnWKU6fbz4wfXNMhCXuA8zExLDl9kvMxTtj6vcOeS3B7UaFDVxwWSqpsOQe3bgv46DXr0wIiIolWRUOXFfDEMvjuoKOjsbpwE/pGQvvf4d9/QEImXreqPFSQFZujbYn7cdO3U6pYPjzcXTNvp04owcBbf1jPed9IOH/T0RFZfX/Q+t58eoX1vSrOa/78+fz000+MHTuWHTt2ODocScHsQ/DeVrgQA2/+AWtPOTqi7OP1TRCxF2YcgP7rneuJ2xn74b1t1td16Gb482zm7s9isTBuUC2+W3yAjdvPMHXeXhISDD07lM/cHYtkQ1dj4aW11jbW65sgLuHe66RXuUAfduy/YEvcf/bdTgp4e5Int3vm7TQD/XocOi+H3ZfgxyhHR2N14hr0WgMdlsK7W63VDEScicMHonVxceHLL7/k5s2b7Ny5k507dya73DvvvMORI2kc7foBdjPWjcjDZcAYHn92EnVKHcLFkjn7KhMTx2spzCvo4UFzX39O3Uo5QzB69GgOejj8rcjBR/uBXzXb79GH99Gr1xjHBXQPUdWeg4B/bnRdiD5K394jHRiR451q+B69ev3b0WFkitWHytCr11epXj4gIIC33347EyNKav78+dy4cQOA4OBgypYtm6X7l6Qm74W9l63/n3kA2gWCl4Pz1Z/uhj/+11Pw+HUILmyNKzNYLBbe6luNd/+7le8WH6RNaCBh9Upkzs6c2MKjsOS49cb00evwyW54u5pjY7oVb02ORVsvGUzeCw38HBuTpOzYsWN4e3uTL18+R4cid7HhLFz7X7L5fAwsj4bHijo2puwgLgH2Xf7n932X4dh1KOMkb/c/zsLN/72uZ2/B7yfgkcKZu08XF2viPuyFJdT8V2Hefzk4c3coWU7t9ozxxW7rtResbaxZB6FbJp7Kwd0rM276dn5eGUXjWsXp0KTUfW1vzomjNC5SlG2XLhJS2DdjgkzB1L2w/38dI7/aD20CyLQcVWp9thu2nrf+/8R1aFAUQvwdG5PI7SzGOP5eUnR0NCtXruSpp55ydCg5QmIN+4/fqMXIz7fw9ONB/N9vh/h0WB1cXTP+4YqEnbuJHzI83eu7jh6JS8UKGRhR+py5AW9shtM3Ib8HDK8G5XwcHVXKrsbCa5usPezP3YT/1IaaRRwdlWN1WApzGzs6iszRa/hqJo+s7+gw7urTTz8lJiaG5s2b4+HhkWzjPyIigqgoJ+la8QBYW/F5Lue1Jqm9bl2k/vaJuCY49hHa7aXac9y3hvUXk0DFwz8RcGZzpu5z3T5vdp/MQ+N/XSCg0P13M1+xqwChD1/IgMiyxtEiwfxdqg1YrG2A4mf+oPKheQ6NKd7ixuoqA7jpaR1vJ9/V49Td+blDY8ouAgMD6d27d5bvd86cOcTFxeHu7k6HDh2SzJ80aZLDru8r9vgSWv50lu5zc7UBBG+dmKX7vJcDpVtytEQIWFywxMcSdPBnSpxc6+iwsoXN1V/mar4AAHJdP0ONLRNxi3eOx5L2BrXjRPF6ALjE3aLcgXn4n8rcz02A7cd9SEiwsDvamwYPncbfxznOR04VGBhI//79s2x/ardnjL8DWnLUr47t93JHfyXo5KpM3efmQ/nYcTQv9StcJMj3RqrW8bt+k777jyaZ/t/D+yni4UUhD48kSfv/li1JdG6vDIkZYE2lflzNbe2hkfvmOer/9TEWHJuO3FrmCaILV7X+YuKptv87/C7scmhMmWF9hd7U3h3h6DCScNa47iUt3wXvt93uFEl7yVivf7SR/l0qUtIvry3Rt3T9CQ6fuJIpjzXmlKR9orgEcHN44ajUi0uAJ5fn3GR1Wihp73jTpk0jICCAuLg4mjdv7uhwHni/Hocv98Dxa9C3AnQv5+iIrD2rh26GXRehflEYFZy519xx07dTqWwBVv95Ek8PN0KD/WgQfH9daP49aTPv9c8+vQ7jEmDoH7DzAlyJhR8agV8uR0dlHUx9XhScuQkjqkGzB+8hCMkgg8dtYNzgWlm6z24rYUZIlu7ynuISYMLfcOwaVMwPfcqDxcG9GLOLqKswaad1LIDxtaxPgTmLmHj46G84eR2qFoTnHsr8fSaWxOnVsTyDPlyPATqHBfFo5Qe8h1AOo3b7/btwy9qRbscFqFUEPqwJHpn4VOtn3+3Er3Bu/tx1lvz5PKlUtkCqniI1Bw4S9+rQJNPv1tPebcIoLEFlMiz2n6Ks4/IlGOhcxvrjaFFX4a0/Yd8laFQM3n3E8b3/M0PP1TDFCVMJzhrXvWTld0HH1ySRDDd24KNJpjWuXcwBkWRP2SlhD9kvXsnZevTo4egQ5DbNikPTYtYGkTMk7MGaLJ5WH55bBR8m/bjKUFt3n7N9mVmzJZq3+lZjwowd1K7q+0DVtXdzsX6JjDfQZ41zJOzB+p7sVhZ6r1bCXiQjuLnAa5UdHUX2FJgXxj1qvRnjTAl7sCYAh1TJuv0ZY/AvnIvH65cErKXmPhz4KL+tO551QUiWULv9/hXwhMn1rO3aibUzd1/7oi7hVzg3HZqU4s9dZxncvTKTvv2b+teLpruufcdi1r/zzC6NA9AmEFpZH2hymsR4YF74uoH19Xu/hqOjEUlKSXsREZEczGJxzp6WWRFTtQqF7tinhYHPPrgZLVcnfB+4Oun7U0TkQWWxWGwJ+0QuLhaa19XdVZGUZEVbplygD+UC7ev39u/yr8zfcQZylmT9ndQWFWelProiIiIiIiIiIiIiIk5CSXsRERERERERERERESeh8jhy3yzlgnAdPfK+1hcRERERERERkQwWGIDbhFFpXkdEHEtJe7lvFnd3LBUrODoMERERERERERG5jcXNDYLKODoMEUkjJe1FcqiIiAiaNGlCdHQ0RYsW5ejRo5QuXZpvv/2W6tWr4+LiQt68ealTp46jQ81UyZ2H6tWr89NPP1GwYEFy5cqFl5dXjj8PIneaMWMGLVq04NixY+zatYuWLVsydepUfH19KVasGFWqVKFAgQKODlNEJNu4/bo6f/58Bg4cyNdff03VqlUpWbIk+fLl03U1G7r9dT106BCNGzdmypQplC1blipVquh1FRHJge78rtSiRQuWLVuGm5tbjrj23/nZ1qRJEyIiIqhZsybx8fEO+y54e1yRkZE8++yzLFiwAGMMAQEBnDhxgnLlylGtWrUsj80RlLQXyaF69+4NgLu7OyVKlKBMGeud9SFDhtiWOX78uENiy0opnYdnnnnGtsyDcB5E7tStWzcA4uPjqVq1KgADBgwA4OzZs3h7ezssNhGR7Oj26+rbb78NwIsvvgjoupqdJfd5+corrwB6XUVEcqrkrv3t2rUDcsa1P7nje/XVVwHHHt/tcSW2oTp37my3zKlTp7I8LkdR0l4khytRokSK84oXL56FkTiWzoNI8nx9fZNMK1y4sAMiERHJGXRdzZn0uoqIPHhy+rXfWY8vubgSFS1aNAsjcSwXRwcgIiIiIiIiIiIiIiJW6mkvIiKSCUxcHEQdSdtKgQHWgaIkR9F7QUQymomNwxyOSvN6llKBWNx1bZHMo/emiIja/5Ix9G4QkVQxsbGYfQfSta6lXBAWd/cMjkjEyUUdIe7VoWlaxW3CKAgqk0kBicPovSAiGcwcjiJ+0LA0r+c6/gMs5YIyISIRK703RURQ+18yhJL2IpIqZt8B4ocMT9e6rqNHYqlYIYMjEhERERERERERyXlU015ERERypLi4BIwxSaaJiIiIiGQn8fEJJCSoXSvyIFHSXkREHmgbz8DiYxCbA9u8xsDaU3DxFjyIbfrdhy7yzhdbbIn7S1di6DdqHTGx8Q6OTJzZX+dg4VG4GefoSEREsp4xhpkL9ttNuxUTz/eLDzooIpF/5OR2+70cPnGVNydttiXur12Ppf+odVy7HuvgyLK3vRfh/C24HOPoSDLH3xesx3fNidq1Z27C/Ci45UQxOSsl7XOgeUsPJ7kDe/HyLZauP+GgiETkQTB//ny+++47vvvuO5YvX+7ocFLl013w6gb495/Wf++4dGaKOSeOcjE2hpVnT2f6vsbvgEGbIOo6vLHZmsR/kFQqV5BGjxbjnS+2cONWHK9P2Mh7/Wrg4e7q6NCArH0vSOrM2A/918PwLdBvPcTo/o7cYfdFeHEtHL4KC9M4vtyDbvo+6LkGBqyH6BuOjsbetvPwQqT1df3tuKOjcSyLxUK+PO6MnrINgLj4BF4evY5q5Qs6OLK023wW+kZCn0hYGe3oaJyL2u3ZT1BJbzo2KcWbkzZzKyaOQeM38lbfauTJrbHj0mvxUXhxPRy9Ds+vzZrEfVa2/+cctrZnj16HF9bCDSdIkh+7Zr0uv7sNDlyFTWccHZFzU9I+E1yPgw+2weubrHePspqri4WBH26wJe4vXr5FrxFrCCyWN0vjiEuASTvhtY0wZc+DlyzKCv93yPo+O38Lbj7AiYUVJ63n4exNa49icYxjx45x8uRJAgICkpQkcVabzsCt//XU2X4BTmdBEuFszC2WncmaJO3ms//0RNp+Hq46QUMtq9Wv4cejlYrw7aKDvNevBoULeDk6JJusfC9I6qw9Ddf+93m67Tzsv+zYeJzF/Pnz+emnnxg5ciTz5893dDgO9cFfsPEs3IiHSbusvcWcyZwTR+3+dRZ/nIVp+6x/V5Gn4f2tjo7oH8bA6L9g0znr6zpxJ1zMoT0uU6ttw0AeLpOf977cwvKNJxnYrRLlS+e/7+1m5fsz3sDY7fDHOfjzHEzYYf2eLlZqt2dPwf8qQot6JZjx8wH+3bsqxXzzODqkbG159D/X+72XrXmFzJaV7f/V0XDlfw9i7LxovR462vwjcOSa9f+xBhY/4DfK78ViHHyFjoyMZP369axbt45XXnmF/fv30717d0eGdN+ej4TN//tjyOsGA/4F7QOzNoaflkexbONJzl64wc2YBEa/UpOyAd5ZGsPwP+GXY5AAuFngqdLwaqUsDSFH+/YAfL7b+uUC4DFf+E/tzNtfws7dKQ5E+/XRQzxbsrTt3zu5jh6JSyYNRLv+NLz1J1z434ftwz4wtT6457Bbkr2Gr2byyPqODuOepk2bxpkzZ2jdujUPP/xwkvkRERFERTngbmYK/ijXlTMFrHHmvnGGun9/hmtCxjxi6nf9Jn33J/1SOufEURoXKcq2SxcJKexrN++/ZUsSnTvjksqbyodzzqccAHmvn6Tujs+w4BxfzNZX6E3t3RGZvp+bsRaW7SyIi8WQ2zOB+g9dxGK5v22u2FWA0IcvpHp5Z3gvJMqq854WzhTTttIdOVmkOgBety7w2N+f4xF33cFR2QsMDKR3795Zus9PP/0Ub29vChYsSFxcHG3btk2yzKRJkxx2fV+xx5fQ8pn/BdgAG2oO4WauwtYJCfE8svU/eF/N+m+cxW/G0O/IqSTT/3t4P6Vy5+FqXBwdi5VMMv+TgKIc9/LIihDtHPOvw/5yHW2/57scRY2tk7I8juQkWFzYUHMot7wKAOASH0ONPz8mz40H+6ZqXIKFH7eUICEBnqyZ+iR7Su9NuPv7M6Pfm3GunmwIfoNYT+t3YNfY69T88yO8bl3MsH1kpMDAQPr375+l+1S7PeNkVVsmJs7C738XxNXF4OFqCH34wn23ax9ku0s257BfXbC44B57lZq7p+F9I/nrV1o5Q/v/71JtOOr7KAAeMZepvfNLcsdczLDtp8ep/BX4q0wn4t28wCRQ/ugSSkdHOjSmtErLd8H7bbc7PGn/4Ycf8tprrzFmzBgGDRrEjBkz6NGjhyNDui/X4uCJZXD6tl43oX4w7tGsj+XbRft5/t21bJ7VlodK+WT5/jsvh/1X/vm9akGYUi/Lw8ixBqy39lRK5J8LfmwCrpn0oX23pP3HB/bwcD5vdl25zCtB5ZPMz8yk/YgtsOC2z8LcbvB1fSiVL1N25zDZJWmf3VyJhY92wKpomFALqmTgk9/mwEHiXh2apnXcJozCElQmw2I4dxMm/g0xBp4rBw9l/UdBinquhimZ/Ja+dCWG1yds5P3+wRQu4MXqP6JZtvEEbz9fHct9fMP596TNvNc/ONXLO8N7IVFWnPe0cqaYbsTBhL9h6Ql4t4b1hrhYzZkzh927d9OsWTNq1qzp6HDsDB63gXGDa2XJvob/aa2nHA9UKQBfPAYeDqi4lbDvAPGDhiWZPufEUToWK2n7906u4z/ApVxQVoRo58Ita+mBA1fAAoSXhX4VszyMFA3dbP27TwAeKQSf1QG3HNYBJC1uxcTz8uh1DOxWid2HL7Hr4EWG9KyaqnVTem/C3d+fmfHeHLzRWhbHALWKwKTa4KIEZ7aVme32+5UVbZlr12MZNH4jb/etRjHfPGz++wxzfj/M+/2DcdEbO10Sq0NE34CGfhCW9GMz3Zyh/R8TDx/vtH4nbF4cGhXLsE3flx8OWjs6B+WDPuXJdjee0vpd8H64Zcle7sLFxYUvv/ySmzdvMmbMGKpWTb4x8M4773DkiPMXjjRYuNjwHcjnb5v214bV9PryqyyN41acK2sOBZHfzUKH3pOoFXA4y/8QTtQdAoXK2n4/uGs7vaZPzNogcrDDj/SCEv90rb9w6hh9e4/ItP2ViYnjtRTmFfTwoLmvP6duJf+M+OjRoznokTmXm5MV2kG5FmCxfrOKvXKeEW+8g1vM1UzZn6OsPlSGXr1Sfx0JCAjg7bffzsSIcoZ87jC8urWh7UwN/4xSyAveqeHoKBzn/KVbtoQ9WEvl5MvjTmxcgtPUtRfnkssNhlWFA5eVsL9Tx44d773QA2B4dahZGC7FQodAxyTs7yYxEZpcwt6RCnjC53Xg56Mw77BzJewB3qsBtX3heiy0L/VgJ+yNMQwYs95WEiexLM6EGTt4tdv9PTad1e/P0cHw0xGYshcmPKqEfXaX09vt93LxSowtYQ/WUjme7q7cioknl5fDU3vZkptLzq4G4eEKr1d2dBRJPVnG+iP35vC/7EGDBhEdHc3KlSt56qmnUlwuOyWffj0OX+y2DrBQrSB89Hh98rpnXReyxBr2v71jLYmTWCrno9dqZekd2L/PWweXOHQFyvnAqMaVKdl7cpbtP6e7GAMDN8CJ65DHHQY8WoIGz2Te+b1bT/vEkjjJlcYBGDJkSKb1tI9NgNc2wZ5L4OECT1cqSOenPs6UfTmStad98r2WRCR5pUskfeSmWoVCDohERHIKFwu0CnB0FNlTQS8ILwe/n3B0JEm5WqCtXlfAOhDthwNrki/PP6Vq2jYM5Mq17Ffo380FOpSCeVHOd4NNJK2KF01av77yQw/g3QuRB4jDk/YAfn5+d03YZzfNikMjf+i0DL50QDmYkV9ssath36ahtaD+1Hl76dUxaemSzPKvgvBtKHRYCjMaZL9HXpxdfg9ruaGrcdaSMJlVFsfZubvAx7Xgaix4uT7YPaNERERERO7X7Qn7u00TERGRzOMUSfucyM3FcY/fffRarSS1ets0DHTIqPAuFuuPEvaZw2KxPiYokFfnQUREREREREREcgAl7XOglAbXu59B90REJI0CA3CbMCrN60gOpPeCiIiIiMiDQ+1/yQBK2ouIiCRjxowZtGjRgmPHjrFr1y5atGjBsmXLKFy4MPHx8VSpUoUCBQqkuL7FzQ2CNMJOWtx+ziMjI+natSsTJ07kkUceoWrVqqxfv55y5cpRrVo1R4eaJs78Xrjzfd6qVSsWLFiAMYbHHnsMb2/vu77Psyoub29vKleuTFRUVKr+/rIiphYtWjB37lzKli3rsJjkwWUpFYjr+A/StZ5IZtJ7Uxzh9s/oQ4cO0bhxY6ZMmUJgYCCFChWicuXKDm/PREZG8swzz7Bs2TKMMQQGBnLw4EHKli2b7dq2cm/O3P6X7ENJexERkWR069YNgPj4eKpWrQpAu3btADh79ize3t6OCi3Huv2cv/jii4D9QPQlS5bk1KlTDoktp0rufd65c2fAse/z5OIC63vAUXElF1P37t0BXRMk61nc3bCUC3J0GBkmuRuIX3/9NVWqVNFNsWwmu7w370zyNmrUiGXLllGqVCklULOh5D6jX3nlFcB52jOJbdvE7xMAjzzyiNq2IpIiJe1FJFUs5YJwHT0y3euKZFe+vr5JphUuXNgBkTw4kjvniYoWLZqFkTw4nPV97oxxOWNMItldcgm3xASXbopJZkipc4YSqNmbs35Gq20rIumhpL2IpIrF3R1LxQqODkNEREREcihnTbhJznXne04JVBERcRZK2jtAREQETZo0ITo6mqJFi3L06FFKly7Nt99+S8eOHfn7778pWrQoderUcXSomSq581C9enVmzZpFo0aNHpjzIPKgMnFxEHUkbSsFBljrA4qIZDO65omISHalzzARkaynK6gD9O7dGwB3d3dKlChBmTLWwSmGDBkCQLly5Th+/LjD4ssqKZ2Hvn37Ag/OeRB5YEUdIe7VoWlaxW3CKA3oIyLZk655IiKSXekzTEQkyylp70AlSpRIcV7x4sWzMBLH0nkQERERyVlMbBzmcFSa1rGUCsTirq8nIpI9pOc6B7rWiYhI6uiTQkREREREMpQ5HEX8oGFpWsd1/AcavF5Eso30XOdA1zoREUkdF0cHICIiIuLsNu04gzHG9rsxhk07zjgwIhEREREREclsf+09z62YeLtpWfFdMNv2tDexsZh9B9K1rqVcEBZ39wyOSEREspvLMTDhbzh8Fbadh6oFHR1Rzhdv4PNd1nP+42FoV8rREaXOzVvxvPPFFt5+vjrGGN7971ZCg/0cHVaq/RgFa0/BievW18DV4uiIYO8lmLoPoq7C2ZtQ2MvREcGNuH+uCWtPwWNFHR2RiIiIgHO2242BL/dYY/ruIHRWCX+RHMk7jzuDxm1g/OBaAHyzcD8xsQnUrFQkU/ebfZP2+w4QP2R4utZ1HT0SS8UKGRyRiMiDbf78+dy4cYM9e/bw6KOP8vjjj6drO3NOHKVxkaJsu3SRkMK+GRylvRFbYNUp6//f2QrfNACvbPvJmD1M3gNf7QcDfLwTSuSF4MKOjure6tewJuhHfrGF1X9G816/YBoE+zs4qtTZfBYm/g1X4qy/R+yB5x3cDIo38PafsP+K9fe3/4TPHnNsTAAfbINfjv/v/3/BjAZQwDNz95mV17yc4sTpa3wyaxfvv1wDi8V6B+qHJQfJm9udFvVLOjg6SUm8gRPXID7B0ZHIg+TMTYjVey4JtdszxswDMHUvxAOf7gJfL2hUzLExiUjGK1U8H4PDKzNo3AaiTl4lqKQ3Pdo9lOn7VWpCREQyxLFjx4iJiaFhw4acO3cu2WUiIiKIirIO2OV3/SZ9k1nmbMwtlp05TSEPjyTzPv30M6JzZ1x33G0P94F8Adb4L97kzQ8mkSvmUoZtX5LaVqYjpnB1AK7GwUffLaZM9BoHR5U6xsDCrYU5d9WNJfM28OuPjo4odQ761eVKwD9fxheu3cqxmbMdGBHEunpypPIr4JEPgJ1RJ/n3ok8dGhPAxvLdwacsANHXE3h7/Bf4XD+RIdvOqGteYGAgvXv3zpCYsqtivnlo9lhxhv1nMx+8HMyew5coXjQPr3ar5OjQJAWxCfDqBmvv2NgE+OUYPF7C0VFJTvfJTpgXBVdiYfRfMKSKoyNyHmq3Z4wdpdoS71sTgBvx8MX8FSw7/rtDYxKRzLP3QD7+iPLmX3lX8e/N917+ftvtStqLiEiGeOmll5g2bRpubm7ky5cv2WVu/8AyBw4S9+rQJMsU9vCkURFftl26mMw+XsQSlHHPnX62C745ALcSoKa/Fx+2fw0XJygbkpOtiob3t8G5W1A6L0zsFUYRrzBHh3VPiSVx/tPeD4vFwrKNJ3j7+eq2Xr7O7OxNeHEdHLwChTxh8OPVCPGv5tCYjIHXNsHKaHBzgVaV/Bn89HsOjQlg5n6I2AvX4qBaIRfGv/EiHq4Zs21nuOal1/z587FYLOzYsYOHHnqITp06pXtbc04cpWOxkrZ/0yu0pvVJl/av/M6VazFK2Du5xcdg/W2lX384pKS9ZK6b8bDoGFyKtf7++3Ho9VDWlWLLqGtdZlG7PWNsPmt9WvD0TQjIAx89G0qJPKGODUpEMsU3C/cTFJzAlzX9GffVdsYProVnRn1RSIGS9iIiTij67HVOn79JlYfsizWu3XqKyuUKkC9P0t4szqBHjx73vY3ELzZZUSbixYfh0SLWpGaTYji84f8gaOAHJfLA3xegni8UcII65qkx6dudhAb72ZXE+c83fzPgGedPFBb2gv/WgTWn4V/5oYy3oyMCiwXG1ITlJyC3G9Rxkqowz5SFKgXg6HVo7E+GJezvJiuveel17NgxvL29CQwM5PLly8kuM2nSJFuPTIDiN2Pol8xyZ2NuseT0yWS3MXHiRI57pf7zbU90Po5eyM2Fax4MGjSYbHAP7YF1qnAVqNAVXKx/VFGHDjB4/ucOjkpysgSLK5eDX4dchQC4fvUS744Yj3vc9QzbR0rXOUj7tS4wMJD+/ftnWGypoXb7/QsuDF88BlvPQZ2iUCSbtGtFJG0WrjpCTGyCrSTO4PDK/HvSH3w46NFM3a+S9pngVrz1LvDZm/DbcWha3NEROUaCgen7rOfhu4PwVGn0ZUoklXzyejDwww288VwVqpa3ftlYuv4EP/x6kM/edILCzzlIdqinntOUyWf9yU5efOph3NxcbL/Xr+FHnarOm2S9UwEvaB3g6CjsuVqgiRO2kaoUsv7IP1566SXmzJmDn58fp0+fTnaZO5NNCfsOED9oWJLlCnt40tzXnzknjiaZN2DAAFzKBaUqph+WHKT46eu82q0SKzadZMnaY3zwcnC2ePrlQZRg4K0/Ycs5yOsGg2oHUevZcY4OS3K4bw/A94es77+25X3o9cQ7Gbr9lK5zkHHXOrHnjO32gLzWHxHJuZo/VsLuu2Cp4vkYNSA40/fr8KR9ZGQk69evZ926dQwbNoz169fz4osv3tc2vz56iGdLlrb9m9UGb4R1/3v8c9Rf1rqNLZzvibhM98E2WHAU4gxM2mkthfDSw46OSiR7yOXlxpSR9ek5fDVvPFeFE6ev2xL2rq4u996AiGSo2xtpd5smklN17NgxY7bzv16Z91Mu4sTpa5w8c91WEiexVM4va45pIFon5WKB92tYOzd5uKgjj2SNLkHw5P/SAVn9kZ0R1zoREXEOjvou6PBvm2vXrmXQoEHUrFmTMmXKpPjIbVqcj4lhyemTnI+JyYAI0+Z6HOy/8s/vl2NhefJPxeV4Oy5YE/ZgrTu3JfnxbUQkBYmJ+1c/3MDOQxeVsBcREcE6EO2dpalCa/orYZ8NeLoqYS9Zy80l6xP2IiIiGcHhPe1dXFz48ssvuXnzJvv27SNv3uSfK3rnnXc4cuSI7fcyMXG8lsI2C3p40NzXn1O3biY7f/To0Rz0yJxDNxYXLjd8B/L62aZt3RhJry+nZcr+nNmJekOh4D+P/e3f/Te9pk9wYEQi2c+JS94cO1eYy9fy0qnbKxTKnbo6nAEBAbz99tuZHN19CgzAbcKoNK8jIpIt6ZonIiLZlT7DRESynMUYYxwdRHR0NCtXruSpp55K9ToJO3cTP2R4uvbnOnokLhUrpGvd1PjpCEzZC9dioURemFgLfJxzzMhMtfksjP4Lzt8CXy/rI7FBTjD4nUh2cXsN+5jYBFupnMQa9yIiIs7qbrWeU+I6/gPVeRaRbCM91znQtU5ERFLHKZL26eHMSXuAG3HW0jiFvawDrT2oYuKtSfvCXnosUSQtDh27wuip2+xK4ty4GUfvkWv4ZGgd8nt7OjhCERGRlJnYOMzhqDStYykViMXd4Q8Ci4ikSnquc6BrnYiIpI6S9iIiTiohweDiYrnnNBERERERERERyTnU91lExEkll5xXwl5EREREREREJGdT0l5ERERERERERERExEkoaS8iIiIiIiIiIiIi4iSybU17ExuL2XcgXetaygVhcXfP4IhERERERERERERERO5Ptk3ai4iIiIiIiIiIiIjkNCqPIyIiIiIiIiIiIiLiJJS0FxERERERERERERFxEkrai4iIiIiIiIiIiIg4CSXtRURERERERERERESchJL2IiIiIiIiIiIiIiJOQkl7EREREREREREREREnoaS9iIiIiIiIiIiIiIiTUNJeRERERERERERERMRJKGkvIiIiIiIiIiIiIuIklLQXEREREREREREREXESStqLiIiIiIiIiIiIiDgJJe1FRERERERERERERJyEkvYiIiIiIiIiIiIiIk5CSXsRERERERERERERESehpL2IiIiIiIiIiIiIiJNQ0l5ERERERERERERExEkoaS8iIiIiIiIiIiIi4iSUtBcRERERERERERERcRJK2ouIiIiIiIiIiIiIOIn/B+d+JIRDnnQcAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plot_k_best = 4\n", + "\n", + "idx = np.argsort(U_norms)\n", + "fig, axs = plt.subplots(1, plot_k_best, figsize=(10, 2), constrained_layout=True, dpi=150)\n", + "\n", + "for i, (idx_i, ax) in enumerate(zip(idx[:plot_k_best], axs.flatten())): \n", + " ax.clear()\n", + " generated_qc_list[idx_i].draw(\"mpl\", plot_barriers=False, ax=ax)\n", + " ax.set_title(f\"The {i+1}. best circuit: \\n infidelity {U_norms[idx_i]:0.1e}.\", fontsize=10)" + ] + }, + { + "cell_type": "markdown", + "id": "02831e92-8b4b-4534-b411-e22a432ab1a8", + "metadata": {}, + "source": [ + "## Gate-Pair tokenization" + ] + }, + { + "cell_type": "markdown", + "id": "78fbf7dc-03e4-44a4-b9f2-7b0d6b3f07d0", + "metadata": {}, + "source": [ + "Now we want to extract reusable substructures (gadgets) from generated circuits. We use all generated tensors in `out_tensor`, regardless if their circuits have good or bad infidelity." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f1bb2aed-1070-427a-a085-7332fdbfbdbd", + "metadata": {}, + "outputs": [], + "source": [ + "gate_pair_tokenizer = gpe.GatePairTokenizer(unique_class_values=pipeline.embedder.unique_class_values, \n", + " zero_token=0, \n", + " padding_token=9, \n", + " device=\"cpu\")" + ] + }, + { + "cell_type": "markdown", + "id": "a4df80e9-2bf3-4216-b143-36999e2d598e", + "metadata": {}, + "source": [ + "Next, we run our proposed Gate-Pair Encoding (GPE) scheme:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "49fe6f42-d84d-4196-8e7d-5127b11e0685", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "962235847487477e88ee001eaa67b2ee", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/100 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "max_depth = min(max_depth, max(unpacked_vocab_configs_depths.keys()))\n", + "fig, axs = plt.subplots(max_depth, topk, figsize=(12, 6), dpi=200)\n", + "\n", + "for ax in axs.flatten():\n", + " ax.clear() \n", + " ax.set_axis_off()\n", + "\n", + "for (depth, unpacked_vocab_configs), (unpacked_vocab_configs_cnts), axs_sel in \\\n", + " zip(unpacked_vocab_configs_depths.items(), unpacked_vocab_configs_cnts_depths.values(), axs):\n", + " \n", + " if depth > max_depth:\n", + " break\n", + " \n", + " for i, (ax, unpacked_vocab_config, unpacked_vocab_config_cnt) in \\\n", + " enumerate(zip(axs_sel, unpacked_vocab_configs, unpacked_vocab_configs_cnts)):\n", + " \n", + " zero_ps = torch.zeros((1, unpacked_vocab_config.shape[-1])) - 1\n", + " instr = tokenizer.decode(unpacked_vocab_config, zero_ps)\n", + " qc = simulator.genqc_to_backend(instr, place_barriers=False)\n", + "\n", + " #------\n", + "\n", + " ax.clear() \n", + " qc.draw(\"mpl\", \n", + " plot_barriers=False, \n", + " ax=ax, \n", + " idle_wires=False)\n", + "\n", + " for text in ax.texts:\n", + " if 'q' in text.get_text():\n", + " text.set_visible(False)\n", + " text.remove()\n", + "\n", + " ax.patch.set_facecolor('none')\n", + " ax.patches[0].set_color(\"none\")\n", + " \n", + " ax.set_title(f\"Occurrences: {unpacked_vocab_config_cnt.item()}\", fontsize=6)\n", + " if i==0:\n", + " plt.figtext(-0.03, 1-(depth-0.7)/max_depth, f\"Depth {depth}:\", horizontalalignment='left', verticalalignment='top', fontsize=12)\n", + "\n", + "plt.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "146b3c78-3975-4d48-8fe4-7d257c0dcedd", + "metadata": {}, + "source": [ + "As we only extract discrete tokens, the parameters of the continuous gates are set to 0 for plotting." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22d4e269-2a56-4026-81f3-8e22889adaf9", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8c9909e3-a035-46f7-8979-44be322a5fe0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "genQC Version 0.2.0\n" + ] + } + ], + "source": [ + "import genQC\n", + "print(\"genQC Version\", genQC.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "87d2050b-6b59-44d0-8a41-f393bc656bcf", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": { + "021f52d503b44f5e93f326f4e1c681e9": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_b7b3c47c7bb14d2ea1ee06e8afdba7f6", + "style": "IPY_MODEL_12d3e5aabc814faa8604361b1c80490a", + "value": "100%" + } + }, + "0ba4139e005649238a5b4040b628100f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_2dc7af76b38b435e8115388ff771bdcb", + "max": 40, + "style": "IPY_MODEL_804a105cc9d647bbab287df9ff67502f", + "value": 40 + } + }, + "0de51d1b7df643d181aabefb36044dd0": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "122ff94a05ef4363937c00a7ddcb041b": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "12d3e5aabc814faa8604361b1c80490a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "189915e75b474731878f4dd2d82eea7d": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "18f39bb92f814ba192c66550cf35f972": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_122ff94a05ef4363937c00a7ddcb041b", + "max": 40, + "style": "IPY_MODEL_0de51d1b7df643d181aabefb36044dd0", + "value": 40 + } + }, + "2dc7af76b38b435e8115388ff771bdcb": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "318e739a6c804569a9a6fe91a95791f9": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_c9419d0d5f8e441e815aaccc5a7e69e1", + "style": "IPY_MODEL_4da4dd4163ae4fffa6ba18402643beee", + "value": "Fetching 4 files: 100%" + } + }, + "341b725d6ea741f4afb8cede9a4ad7a0": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "3ad8e473e03047969891e6c1204a1290": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_021f52d503b44f5e93f326f4e1c681e9", + "IPY_MODEL_51fcfd77dc8c4475ba826f165c8f1f46", + "IPY_MODEL_eb040d59b1a24ae287c1cc61832eaf82" + ], + "layout": "IPY_MODEL_824ffe4f7f30494b8139208af9f5d021" + } + }, + "4690a080654a4e1ea3b9edd4d3c073b1": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "49813f4fc64b44a799ec78f0921ce5d0": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "4b5ae46e8b1445eda653dfcc62686dbe": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "4d2312b354ca4f30807c08d024c6e9f0": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "4da4dd4163ae4fffa6ba18402643beee": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "51fcfd77dc8c4475ba826f165c8f1f46": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_b391734f3fc7449a801ebc1b0dfec054", + "style": "IPY_MODEL_7e807267d2c74ffc91806e201348b15b", + "value": 100 + } + }, + "6b156939bb904c9dafe187a6c1770ea3": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "6d6e280e276840b3babdf728594bc1c1": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_edff37b931ba467f8c3af55668d610dc", + "style": "IPY_MODEL_4d2312b354ca4f30807c08d024c6e9f0", + "value": "100%" + } + }, + "7e807267d2c74ffc91806e201348b15b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "7e928f8f42024d3dae49e80ddd82b3b2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_c649ae350b714328a6b377c44e47f509", + "style": "IPY_MODEL_eeaf1b6b54674163b2a61d665af4df79", + "value": " 40/40 [01:52<00:00,  2.84s/it]" + } + }, + "804a105cc9d647bbab287df9ff67502f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "82066e5a51a344d49ba6c6f44b06325b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_8837e58cb680404e8479e38a03e27688", + "style": "IPY_MODEL_e9132d33e6fe420cbe944ee26871e305", + "value": 100 + } + }, + "824ffe4f7f30494b8139208af9f5d021": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "8837e58cb680404e8479e38a03e27688": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "8fcdeb78bf434af0b590a4b334a69901": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "947bf1d7d4d04ffea3abf8530908ad62": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_9d2698b2d5244c8ab28111bfebf13091", + "IPY_MODEL_18f39bb92f814ba192c66550cf35f972", + "IPY_MODEL_7e928f8f42024d3dae49e80ddd82b3b2" + ], + "layout": "IPY_MODEL_c157678062c94873bd42f3fcc219c967" + } + }, + "962235847487477e88ee001eaa67b2ee": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_6d6e280e276840b3babdf728594bc1c1", + "IPY_MODEL_82066e5a51a344d49ba6c6f44b06325b", + "IPY_MODEL_b47cb0c8ab634af4b7c9571985cccf3f" + ], + "layout": "IPY_MODEL_49813f4fc64b44a799ec78f0921ce5d0" + } + }, + "9a1869704d7a48e3b26e80f244377358": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "9d2698b2d5244c8ab28111bfebf13091": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_4b5ae46e8b1445eda653dfcc62686dbe", + "style": "IPY_MODEL_a7327c7ff92e4b0e87ef3ec38a95779d", + "value": "100%" + } + }, + "a67d491da7384d2e9c7d96de8779849f": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "a7327c7ff92e4b0e87ef3ec38a95779d": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "b391734f3fc7449a801ebc1b0dfec054": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "b47cb0c8ab634af4b7c9571985cccf3f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_341b725d6ea741f4afb8cede9a4ad7a0", + "style": "IPY_MODEL_dced64245df74f84840f0e2e922f1d5c", + "value": " 100/100 [00:40<00:00,  2.51it/s]" + } + }, + "b7b3c47c7bb14d2ea1ee06e8afdba7f6": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "bda858eb4a444b6f9c2e4d12918619d8": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_dbbafaa32f044dc79cde20dca640d182", + "style": "IPY_MODEL_d8cd8f60d9a44adabb401eee87dc5b95", + "value": "100%" + } + }, + "c157678062c94873bd42f3fcc219c967": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "c649ae350b714328a6b377c44e47f509": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "c9419d0d5f8e441e815aaccc5a7e69e1": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "cae28b86bb0e49368d6dac67ccc30360": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "d3e70533f1024409ab6f0a3662f0f515": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "d8cd8f60d9a44adabb401eee87dc5b95": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "dbbafaa32f044dc79cde20dca640d182": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "dbef30da12f646c2b35e308367007bd2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_e3497cfed0494def925f01b715b31d10", + "style": "IPY_MODEL_4690a080654a4e1ea3b9edd4d3c073b1", + "value": " 4/4 [00:00<00:00, 798.72it/s]" + } + }, + "dced64245df74f84840f0e2e922f1d5c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "dfc99b2ee2b74d00b6097a20baade825": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_318e739a6c804569a9a6fe91a95791f9", + "IPY_MODEL_ff3ef79425414a96859a7dae83ee809a", + "IPY_MODEL_dbef30da12f646c2b35e308367007bd2" + ], + "layout": "IPY_MODEL_6b156939bb904c9dafe187a6c1770ea3" + } + }, + "dfe56ee8b5b24cf2aaf2d149d0911663": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "e3497cfed0494def925f01b715b31d10": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "e9132d33e6fe420cbe944ee26871e305": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "eb040d59b1a24ae287c1cc61832eaf82": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_a67d491da7384d2e9c7d96de8779849f", + "style": "IPY_MODEL_cae28b86bb0e49368d6dac67ccc30360", + "value": " 100/100 [00:00<00:00, 1785.68it/s]" + } + }, + "eb4ee8e9c8454fac8dc424778e9f73a2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_dfe56ee8b5b24cf2aaf2d149d0911663", + "style": "IPY_MODEL_189915e75b474731878f4dd2d82eea7d", + "value": " 40/40 [01:53<00:00,  2.82s/it]" + } + }, + "edff37b931ba467f8c3af55668d610dc": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "eeaf1b6b54674163b2a61d665af4df79": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "fa240a3d6d114b6da392ac2fbe059980": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_bda858eb4a444b6f9c2e4d12918619d8", + "IPY_MODEL_0ba4139e005649238a5b4040b628100f", + "IPY_MODEL_eb4ee8e9c8454fac8dc424778e9f73a2" + ], + "layout": "IPY_MODEL_8fcdeb78bf434af0b590a4b334a69901" + } + }, + "ff3ef79425414a96859a7dae83ee809a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_d3e70533f1024409ab6f0a3662f0f515", + "max": 4, + "style": "IPY_MODEL_9a1869704d7a48e3b26e80f244377358", + "value": 4 + } + } + }, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/examples/Quantum circuit synthesis with diffusion models/0_hello_circuit.ipynb b/src/examples/Quantum circuit synthesis with diffusion models/0_hello_circuit.ipynb new file mode 100644 index 0000000..4d0d61f --- /dev/null +++ b/src/examples/Quantum circuit synthesis with diffusion models/0_hello_circuit.ipynb @@ -0,0 +1,752 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "a1eaa873-0e5d-4dd3-9e44-440a70df611f", + "metadata": {}, + "source": [ + "---\n", + "categories:\n", + " - Entanglement generation\n", + " - Quantum circuits\n", + " - Pretrained model\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "69a855f1-55dd-482e-94f2-9ad02804be4d", + "metadata": {}, + "source": [ + "# Generate a circuit\n", + "\n", + "> A minimal example to generate a circuit. We load a pre-trained (SRV, 3 to 8 qubit) model and condition on a given Schmidt-Rank-Vector (SRV)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3bde494e-9091-41a4-a601-bbcf9712c564", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.imports import *\n", + "import genQC.utils.misc_utils as util\n", + "\n", + "from genQC.pipeline.diffusion_pipeline import DiffusionPipeline\n", + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer\n", + "from genQC.platform.simulation import Simulator, CircuitBackendType\n", + "\n", + "from genQC.inference.sampling import generate_tensors, decode_tensors_to_backend\n", + "from genQC.inference.evaluation_helper import get_srvs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "029be4f3-0d9a-4d0a-93d9-2338fda7a983", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" + ] + }, + { + "data": { + "text/plain": [ + "device(type='cuda')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "util.MemoryCleaner.purge_mem() # clean existing memory alloc\n", + "device = util.infer_torch_device() # use cuda if we can\n", + "device" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "785dc2ca-1fe8-4f0d-94bc-964b8f1733ac", + "metadata": {}, + "outputs": [], + "source": [ + "# We set a seed to pytorch, numpy and python. \n", + "# Note: This will also set deterministic algorithms, possibly at the cost of reduced performance!\n", + "util.set_seed(0)" + ] + }, + { + "cell_type": "markdown", + "id": "f77a3020-247c-4ac0-aaf1-ee5c371b5f06", + "metadata": {}, + "source": [ + "## Setup and load" + ] + }, + { + "cell_type": "markdown", + "id": "742ae430-46f2-4099-ac8f-f422a4ddc1dc", + "metadata": {}, + "source": [ + "Load the pre-trained model directly from [Hugging Face: Floki00/qc_srv_3to8qubit](https://huggingface.co/Floki00/qc_srv_3to8qubit)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5d60c23-9514-4432-bc82-622c088fced6", + "metadata": {}, + "outputs": [], + "source": [ + "pipeline = DiffusionPipeline.from_pretrained(\"Floki00/qc_srv_3to8qubit\", device)" + ] + }, + { + "cell_type": "markdown", + "id": "104f977d-a6c5-4dbf-b272-b1a8fc6f013a", + "metadata": {}, + "source": [ + "Check on what gates the model was trained" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "62b067ac-d5a4-4424-b7da-571ae95067c6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['h', 'cx']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pipeline.gate_pool" + ] + }, + { + "cell_type": "markdown", + "id": "431d3e29-f121-4c61-95bc-bfd7960a4870", + "metadata": {}, + "source": [ + "Set 20 sample steps and use rescaled guidance-formula." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "96702fba-5a10-44e6-bef9-634d9e41a1af", + "metadata": {}, + "outputs": [], + "source": [ + "pipeline.guidance_sample_mode = \"rescaled\"\n", + "pipeline.scheduler.set_timesteps(20) " + ] + }, + { + "cell_type": "markdown", + "id": "65acff8f-8486-42c9-8e78-b44f31de568b", + "metadata": {}, + "source": [ + "## Inference / sampling" + ] + }, + { + "cell_type": "markdown", + "id": "a09bd191-0374-45af-b923-f131c5d36af9", + "metadata": {}, + "source": [ + "Set our desired condition SRV" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d1d4b69e-c14a-4dac-9cdf-2b65ecaee158", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Generate SRV: [2, 1, 2, 1, 2]'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "srv = [2, 1, 2, 1, 2] # set your target SRV; can be 3 to 8 qubit\n", + "num_of_qubits = len(srv) \n", + "\n", + "prompt = f\"Generate SRV: {srv}\" # model was trained with this phrase\n", + "prompt" + ] + }, + { + "cell_type": "markdown", + "id": "0184031c-627c-4b82-9607-e35a22f699f4", + "metadata": {}, + "source": [ + "Define sample parameters" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "721ba4f1-60f0-4f23-9306-bdf07f8fe659", + "metadata": {}, + "outputs": [], + "source": [ + "g = 10 # guidance scale\n", + "max_gates = 16 # how many time steps the tensor encoding has\n", + "samples = 64 # how many circuits to generate" + ] + }, + { + "cell_type": "markdown", + "id": "2842cc0c-770a-451c-ac44-e7265dbd87c2", + "metadata": {}, + "source": [ + "Sample tokenized circuits" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "51a84305-bcf1-43b5-a3b9-752a2defaf81", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "3728850a9cc2452db5d4236acf614a80", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig, axs = plt.subplots(2, 4, figsize=(18,5), constrained_layout=True)\n", + "for qc,is_srv,ax in zip(qc_list, srv_list, axs.flatten()): \n", + " is_srv = [int(x) for x in is_srv]\n", + " qc.draw(\"mpl\", plot_barriers=False, ax=ax, style = \"clifford\")\n", + " ax.set_title(f\"{'Correct' if is_srv==srv else 'NOT correct'}, is SRV = {is_srv}\")\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0244290d-5c57-4b70-b670-a839876a9ccf", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "52132fa0-9208-442d-a31d-af65bcfba714", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "genQC Version 0.2.0\n" + ] + } + ], + "source": [ + "import genQC\n", + "print(\"genQC Version\", genQC.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d6aed55c-e9ec-4c5e-a691-37695556bde4", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": { + "01fd2d6f0f8049c09946379c771a467c": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "0f3daaab2a3a4d229f890bd491a05e82": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_5ead7394a77a46c89a0dc48418f4e39e", + "max": 2, + "style": "IPY_MODEL_eff7700fa64041f3abb2862855b60290", + "value": 2 + } + }, + "2786583fbd6a457cb54fe2c9fb700ad0": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "3728850a9cc2452db5d4236acf614a80": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_ef23d7660a5e417db0d9eeba1031e9ad", + "IPY_MODEL_aedc7c86f4ee4d98a9e56c0688ecde25", + "IPY_MODEL_7a6f3a282b084773908db5ee1ba437cc" + ], + "layout": "IPY_MODEL_2786583fbd6a457cb54fe2c9fb700ad0" + } + }, + "5b1ceaae9fc44634ac4b685d7bd73a0a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "5ead7394a77a46c89a0dc48418f4e39e": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "5f2c514cac1c47eca2ab21e293678346": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "785851c8014c4432af6e1fbd0d6aa0f5": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "7a6f3a282b084773908db5ee1ba437cc": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_b9786c3808464a3f8b1c03d6bea7a50e", + "style": "IPY_MODEL_785851c8014c4432af6e1fbd0d6aa0f5", + "value": " 20/20 [00:00<00:00, 55.44it/s]" + } + }, + "86be99d6a44d4001be89fc20c52ad5c3": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "9c0135bb50e44bb5b81cbe7f3d4b9cdf": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_cf8d8da83cdf4941b075544e68cf8dd0", + "style": "IPY_MODEL_c2f8914870514273ada4c3f0c9f4f9b9", + "value": "Fetching 2 files: 100%" + } + }, + "a1ff221e8278428d9d3bbe4bc9ac86f8": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_9c0135bb50e44bb5b81cbe7f3d4b9cdf", + "IPY_MODEL_0f3daaab2a3a4d229f890bd491a05e82", + "IPY_MODEL_a4091280768a4ee29d4ba6c0ec4f0e9e" + ], + "layout": "IPY_MODEL_a478f88390034ed6b78b289f74a466c2" + } + }, + "a4091280768a4ee29d4ba6c0ec4f0e9e": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_5f2c514cac1c47eca2ab21e293678346", + "style": "IPY_MODEL_b2cb6f61f8cb449e946c418c2a71dd82", + "value": " 2/2 [00:00<00:00, 400.09it/s]" + } + }, + "a478f88390034ed6b78b289f74a466c2": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "aedc7c86f4ee4d98a9e56c0688ecde25": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_86be99d6a44d4001be89fc20c52ad5c3", + "max": 20, + "style": "IPY_MODEL_5b1ceaae9fc44634ac4b685d7bd73a0a", + "value": 20 + } + }, + "b2cb6f61f8cb449e946c418c2a71dd82": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "b9786c3808464a3f8b1c03d6bea7a50e": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "c2f8914870514273ada4c3f0c9f4f9b9": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "cf8d8da83cdf4941b075544e68cf8dd0": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "ef23d7660a5e417db0d9eeba1031e9ad": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_01fd2d6f0f8049c09946379c771a467c", + "style": "IPY_MODEL_ff8e452e252b4f26922e88b85ba10ddf", + "value": "100%" + } + }, + "eff7700fa64041f3abb2862855b60290": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "ff8e452e252b4f26922e88b85ba10ddf": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + } + }, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/examples/1_editing_and_masking.ipynb b/src/examples/Quantum circuit synthesis with diffusion models/1_editing_and_masking.ipynb similarity index 99% rename from src/examples/1_editing_and_masking.ipynb rename to src/examples/Quantum circuit synthesis with diffusion models/1_editing_and_masking.ipynb index 40bd51a..199cfd7 100644 --- a/src/examples/1_editing_and_masking.ipynb +++ b/src/examples/Quantum circuit synthesis with diffusion models/1_editing_and_masking.ipynb @@ -1,5 +1,18 @@ { "cells": [ + { + "cell_type": "raw", + "id": "0c597a85-7e30-4b05-9a05-39cd65c7b44b", + "metadata": {}, + "source": [ + "---\n", + "categories:\n", + " - Entanglement generation\n", + " - Quantum circuits\n", + " - Pretrained model\n", + "---" + ] + }, { "cell_type": "markdown", "id": "69a855f1-55dd-482e-94f2-9ad02804be4d", @@ -16,6 +29,18 @@ "In this notebook we show editing and masking of circuits." ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "438ed9c8-1c47-40a6-ad6f-0f126e7784bc", + "metadata": {}, + "outputs": [], + "source": [ + "# NOTE: this notebook is designed for an old version of genQC! Please use ´pip install genQC==0.1.0 -q´\n", + "import genQC\n", + "assert genQC.__version__ in [\"0.1\", \"0.1.0\", \"0.1.1\"]" + ] + }, { "cell_type": "code", "execution_count": null, @@ -28,8 +53,8 @@ "from genQC.inference.infer_srv import convert_tensors_to_srvs, schmidt_rank_vector\n", "import genQC.platform.qcircuit_dataset_construction as data_const\n", "from genQC.platform.simulation.qcircuit_sim import instruction_name_to_qiskit_gate\n", - "import genQC.util as util\n", - "from qiskit.quantum_info import DensityMatrix" + "# import genQC.util as util\n", + "# from qiskit.quantum_info import DensityMatrix" ] }, { @@ -743,6 +768,14 @@ "import genQC\n", "print(\"genQC Version\", genQC.__version__)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e812d846-4722-428f-826e-1cc22c500f18", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/src/examples/Quantum circuit synthesis with diffusion models/2_unitary_compilation.ipynb b/src/examples/Quantum circuit synthesis with diffusion models/2_unitary_compilation.ipynb new file mode 100644 index 0000000..c08d618 --- /dev/null +++ b/src/examples/Quantum circuit synthesis with diffusion models/2_unitary_compilation.ipynb @@ -0,0 +1,1719 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "b2972412-2867-4931-a132-46ed417c4ea8", + "metadata": {}, + "source": [ + "---\n", + "categories:\n", + " - Unitary compilation\n", + " - Quantum circuits\n", + " - Pretrained model\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "69a855f1-55dd-482e-94f2-9ad02804be4d", + "metadata": {}, + "source": [ + "# Compile unitaries\n", + "\n", + "> A short tutorial showing how to use the unitary compilation model." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3bde494e-9091-41a4-a601-bbcf9712c564", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.imports import *\n", + "import genQC.utils.misc_utils as util\n", + "\n", + "from genQC.pipeline.diffusion_pipeline import DiffusionPipeline\n", + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer\n", + "from genQC.platform.simulation import Simulator, CircuitBackendType\n", + "from genQC.inference.sampling import decode_tensors_to_backend, generate_compilation_tensors\n", + "from genQC.inference.evaluation_helper import get_unitaries\n", + "from genQC.inference.eval_metrics import UnitaryInfidelityNorm\n", + "\n", + "from qiskit import QuantumCircuit\n", + "import qiskit.quantum_info as qi" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "029be4f3-0d9a-4d0a-93d9-2338fda7a983", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" + ] + } + ], + "source": [ + "device = util.infer_torch_device() # use cuda if we can\n", + "util.MemoryCleaner.purge_mem() # clean existing memory alloc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e29645b3-226d-49b2-9a22-bb6c18e05f1b", + "metadata": {}, + "outputs": [], + "source": [ + "# We set a seed to pytorch, numpy and python. \n", + "# Note: This will also set deterministic algorithms, possibly at the cost of reduced performance!\n", + "util.set_seed(0)" + ] + }, + { + "cell_type": "markdown", + "id": "f77a3020-247c-4ac0-aaf1-ee5c371b5f06", + "metadata": {}, + "source": [ + "## Setup and load" + ] + }, + { + "cell_type": "markdown", + "id": "742ae430-46f2-4099-ac8f-f422a4ddc1dc", + "metadata": {}, + "source": [ + "Load the pre-trained model directly from [Hugging Face: Floki00/qc_unitary_3qubit](https://huggingface.co/Floki00/qc_unitary_3qubit)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5d60c23-9514-4432-bc82-622c088fced6", + "metadata": {}, + "outputs": [], + "source": [ + "pipeline = DiffusionPipeline.from_pretrained(\"Floki00/qc_unitary_3qubit\", device, )" + ] + }, + { + "cell_type": "markdown", + "id": "431d3e29-f121-4c61-95bc-bfd7960a4870", + "metadata": {}, + "source": [ + "Set 20 sample steps and use rescaled guidance-formula." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "96702fba-5a10-44e6-bef9-634d9e41a1af", + "metadata": {}, + "outputs": [], + "source": [ + "pipeline.guidance_sample_mode = \"rescaled\"\n", + "pipeline.scheduler.set_timesteps(20) \n", + "g = 10" + ] + }, + { + "cell_type": "markdown", + "id": "2b557151-ba81-423e-8f28-8e35a781b92b", + "metadata": {}, + "source": [ + "The model was trained with a gate pool of:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e6a15d3a-b658-429f-99d5-2bcbdcb955cf", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['h', 'cx', 'z', 'x', 'ccx', 'swap']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pipeline.gate_pool" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "114a1229-9e8d-47d8-874c-189a197e4ebd", + "metadata": {}, + "outputs": [], + "source": [ + "vocabulary = {g:i+1 for i, g in enumerate(pipeline.gate_pool)} \n", + "tokenizer = CircuitTokenizer(vocabulary)\n", + "simulator = Simulator(CircuitBackendType.QISKIT)" + ] + }, + { + "cell_type": "markdown", + "id": "65acff8f-8486-42c9-8e78-b44f31de568b", + "metadata": {}, + "source": [ + "## Compile a unitary" + ] + }, + { + "cell_type": "markdown", + "id": "37444b73-0b79-4fd3-9e91-2dd521f428d3", + "metadata": {}, + "source": [ + "Compile a given unitary $U$. Note, there has to be a solution with the `pipeline.gate_pool` in order to find the exact solution." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0a78b750-181c-4060-bbce-d829c190ffbb", + "metadata": {}, + "outputs": [], + "source": [ + "def compile_and_plot(U, prompt):\n", + " U_r, U_i = torch.Tensor(np.real(U)), torch.Tensor(np.imag(U))\n", + " U_tensor = torch.stack([U_r, U_i], dim=0)\n", + " \n", + " out_tensor = generate_compilation_tensors(pipeline, \n", + " prompt=prompt, \n", + " U=U_tensor, \n", + " samples=samples, \n", + " system_size=num_of_qubits, \n", + " num_of_qubits=num_of_qubits, \n", + " max_gates=max_gates, \n", + " g=g, \n", + " no_bar=False, \n", + " tensor_prod_pad=False, \n", + " enable_params=False)\n", + "\n", + " out_tensor = out_tensor.unique(dim=0)\n", + " \n", + " qc_list, error_cnt = decode_tensors_to_backend(simulator, tokenizer, out_tensor)\n", + "\n", + " approx_Us = get_unitaries(simulator, qc_list)\n", + " approx_Us = torch.from_numpy(np.stack(approx_Us)).to(torch.complex128)\n", + " target_Us = torch.complex(U_r, U_i).unsqueeze(0).to(torch.complex128)\n", + " \n", + " U_norms = UnitaryInfidelityNorm.distance(target_Us, approx_Us)\n", + "\n", + " corr = ( U_norms.abs() < 1.0e-3 )\n", + " corr_qc = [qc for qc, c in zip(qc_list, corr) if c]\n", + " corr_qc = sorted(corr_qc, key=lambda x: len(x.data)) # sort to get the shortest solutions\n", + "\n", + " fig, axs = plt.subplots(1,4, figsize=(12, 4), constrained_layout=True, dpi=150)\n", + " axs[0].set_title(f\"{prompt}\")\n", + " for qc,ax in zip(corr_qc, axs.flatten()): \n", + " qc.draw(\"mpl\", plot_barriers=False, ax=ax)\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b49dc061-6ad1-4e64-ab63-3c2a6b7c092e", + "metadata": {}, + "outputs": [], + "source": [ + "samples = 512\n", + "num_of_qubits = 3\n", + "max_gates = 12" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7f3744d1-fa19-4403-bd0c-d0dadac805a3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"Compile using: ['h', 'cx', 'z', 'x', 'ccx', 'swap']\"" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "prompt = \"Compile using: ['h', 'cx', 'z', 'x', 'ccx', 'swap']\" # model was trained with phrases like this, allow full gate set\n", + "prompt" + ] + }, + { + "cell_type": "markdown", + "id": "6f38e035-f39c-4abd-bdc9-6386305cd4ca", + "metadata": {}, + "source": [ + "#### Exercise 1" + ] + }, + { + "cell_type": "markdown", + "id": "204dbc2d-b69f-45bd-9a48-d10e4d6bde84", + "metadata": {}, + "source": [ + "Inspired from [(quantumcomputing.stackexchange.com/questions/13821/generate-a-3-qubit-swap-unitary-in-terms-of-elementary-gates/13826)](https://quantumcomputing.stackexchange.com/questions/13821/generate-a-3-qubit-swap-unitary-in-terms-of-elementary-gates/13826). Note, this unitary WAS in the training set." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b826437-bcc8-40fd-9c99-f69082fe2efe", + "metadata": {}, + "outputs": [], + "source": [ + "U = np.matrix([[1,0,0,0,0,0,0,0],\n", + " [0,1,0,0,0,0,0,0],\n", + " [0,0,1,0,0,0,0,0],\n", + " [0,0,0,0,1,0,0,0],\n", + " [0,0,0,1,0,0,0,0],\n", + " [0,0,0,0,0,1,0,0],\n", + " [0,0,0,0,0,0,1,0],\n", + " [0,0,0,0,0,0,0,1]], dtype=np.complex128) \n", + "\n", + "assert np.allclose(U.H@U, np.identity(2**num_of_qubits)) and np.allclose(U@U.H, np.identity(2**num_of_qubits)) #check if unitary" + ] + }, + { + "cell_type": "markdown", + "id": "16dd2cd2-2ff5-4e7f-94d2-be51713fa322", + "metadata": {}, + "source": [ + "Plot correct (exact) compiled circuits:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e7b35524-f303-4744-b948-1c7dac4fa2e7", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6749b88ab48941ecb6c367d988f75fa4", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "compile_and_plot(U, prompt)" + ] + }, + { + "cell_type": "markdown", + "id": "8914651c-a30e-4a5b-aaa4-d98debd7147a", + "metadata": {}, + "source": [ + "#### Exercise 2" + ] + }, + { + "cell_type": "markdown", + "id": "3d0c618b-2e8d-4037-a1ec-482324112fb8", + "metadata": {}, + "source": [ + "Inspired from [(quantumcomputing.stackexchange.com/questions/12439/procedures-and-intuition-for-designing-simple-quantum-circuits/12440)](https://quantumcomputing.stackexchange.com/questions/12439/procedures-and-intuition-for-designing-simple-quantum-circuits/12440). Note, this unitary WAS in the training set." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1b170062-d7aa-4bef-b1d1-a68e85e682ba", + "metadata": {}, + "outputs": [], + "source": [ + "U = np.matrix([[1,0,0,0,0,0,0,0],\n", + " [0,0,0,0,0,0,0,1],\n", + " [0,1,0,0,0,0,0,0],\n", + " [0,0,1,0,0,0,0,0],\n", + " [0,0,0,1,0,0,0,0],\n", + " [0,0,0,0,1,0,0,0],\n", + " [0,0,0,0,0,1,0,0],\n", + " [0,0,0,0,0,0,1,0]], dtype=np.complex128) \n", + "\n", + "assert np.allclose(U.H@U, np.identity(2**num_of_qubits)) and np.allclose(U@U.H, np.identity(2**num_of_qubits)) #check if unitary" + ] + }, + { + "cell_type": "markdown", + "id": "dc81558e-a227-4490-94bc-044ba6dcd502", + "metadata": {}, + "source": [ + "Plot correct (exact) compiled circuits:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3886fbf5-3f6f-4a44-89b7-d0a332a69334", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "0c02e2bbc3454fc7ba2621a251caa75b", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "compile_and_plot(U, prompt)" + ] + }, + { + "cell_type": "markdown", + "id": "46d42b16-2fcf-422a-a206-3eee667f4d4b", + "metadata": {}, + "source": [ + "#### Exercise 3" + ] + }, + { + "cell_type": "markdown", + "id": "c9824ae0-f3c6-4755-8a5b-aacb22678ec9", + "metadata": {}, + "source": [ + "A randomly generated unitary (from a random circuit). This unitary WAS NOT in the training set, it is new to the model!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "63c51c9b-638a-42c4-8029-9add147d2255", + "metadata": {}, + "outputs": [], + "source": [ + "U = np.matrix([[ 0.70710678, 0. , 0. , 0. , 0.70710678, 0. , 0. , 0. ],\n", + " [ 0. , -0.70710678, 0. , 0. , 0. , -0.70710678, 0. , 0. ],\n", + " [-0.70710678, 0. , 0. , 0. , 0.70710678, 0. , 0. , 0. ],\n", + " [ 0. , 0.70710678, 0. , 0. , 0. , -0.70710678, 0. , 0. ],\n", + " [ 0. , 0. , 0.70710678, 0. , 0. , 0. , 0. , 0.70710678],\n", + " [ 0. , 0. , 0. , 0.70710678, 0. , 0. , 0.70710678, 0. ],\n", + " [ 0. , 0. , -0.70710678, 0. , 0. , 0. , 0. , 0.70710678],\n", + " [ 0. , 0. , 0. ,-0.70710678, 0. , 0. , 0.70710678, 0. ]], dtype=np.complex128)\n", + "\n", + "assert np.allclose(U.H@U, np.identity(2**num_of_qubits)) and np.allclose(U@U.H, np.identity(2**num_of_qubits)) #check if unitary" + ] + }, + { + "cell_type": "markdown", + "id": "5b5e50fd-da8d-47fb-aabb-92044aaba2ce", + "metadata": {}, + "source": [ + "Plot correct (exact) compiled circuits:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d6d5023f-b3f4-4cc6-81cb-eead8ffee190", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "10fa6f9af7dd44d1967ad187cd73ef34", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "compile_and_plot(U, prompt)" + ] + }, + { + "cell_type": "markdown", + "id": "60d35449-67b2-4ce0-9b5f-ac60670538e5", + "metadata": {}, + "source": [ + "## Transpile and discover" + ] + }, + { + "cell_type": "markdown", + "id": "41449006-9c42-4109-bb54-95581f90679a", + "metadata": {}, + "source": [ + "Set an initial circuit we want to transpile, optimize or use for discovering sub-arrangements:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f1c82de5-3645-403b-a54e-5185860b0f7c", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbIAAADuCAYAAABcSIIkAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAHfNJREFUeJzt3XtYFOe9B/DvLAvCclERFBAUFVEhgBYkGiuK0TYWNRpjtDFGj7bpaaKS1khSaxKjxyAJMWliTbTHa9tQjJcmRm1u3ohHDYhGBZRqwMpl1VVUbiLLzvnDRyp1wd1ldod3+X6eJ08ed2be97fruN99Z96ZkWRZlkFERCQojdoFEBERtQaDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaO0iyAwGA1JSUhAWFgZ3d3eEhIQgOTkZ1dXVmDNnDiRJwqpVq9Qu026qauqxdusZPP3yPkyY9xWefnkf1m49g6qaerVLIyJqNUmWZVntIuzpxIkTGDt2LPR6PTw9PREeHo6ysjJcunQJSUlJuHbtGg4fPoysrCz8+Mc/VrtcRRmNJrz6x2NYnVmAm1X3h5aPlyuenzoAy16IhVbbLn7TEJETcuogMxgMGDRoEEpKSrBgwQK8/vrr8Pb2BgC89dZbePnll6HVatHQ0IDr16/Dx8dH5YqVU19vwuTffoOdB/71wHXHj+iBbSsfhasrw4yIxOPU31zz589HSUkJ5s6di/T09MYQA4CUlBTExMTAaDQiNDTUqUIMABau/M6iEAOAnQf+hYUrv7NzRURE9uG0QVZQUIDMzEz4+fkhNTXV7DqxsbEAgJiYmCavFxUVYcKECfD29kbnzp3x7LPP4urVq3avWSlXrtXiwy0FVm3z0SdnYKi4ZaeKiIjsx2mDLCMjAyaTCdOnT4eXl5fZdTw8PAA0DbLKykokJiaipKQEGRkZWLt2LbKysjBu3DiYTCaH1N5a6/9eiNv11tVad7sB63cU2qkiIiL70apdgL3s3bsXAJCYmNjsOiUlJQCaBtnatWtRWlqKgwcPokePHgCA4OBgPPLII/jss88wceJE+xWtkH8cKrVpuz2HSpAyO1rhaoiI7MtpJ3uEhISgpKQEx48fx8CBA+9bbjQaERgYCIPBgPPnz6N3794A/h18+/bta7J+nz59MHLkSKxbt87qWuLi4qDX661/Eza67PMr1GuDrN7O1ViGrjfX2KEiIqKWBQQEICcnx6ZtnXZEVl1dDQCora01uzwzMxMGgwHe3t7o1atX4+v5+fmYMmXKfetHRkYiPz/fplr0ej1KS20bJdmkQ7VNf7P1dVWOrZOISAFOG2QBAQGoqKhAbm4uhg4d2mRZeXk5Fi5cCACIjo6GJEmNyyoqKtCpU6f72vP19cXZs2dtrsWRrrtcQTX6Wr2dl8aAjt2726EiIqKWteZ70mmDbPTo0SgoKEBaWhrGjBmD8PBwAEB2djZmzJgBg8EAAGYPOyrN1uGyrQqLb6DfhK1Wb5f79Ur07dnRDhUREdmP085aTElJQZcuXXDx4kVERkYiKioKffv2RXx8PHr37o1Ro0YBuH/qfefOnXH9+vX72rt27Rp8fX0dUXqrhYd2RFJCiFXbjEsIYYgRkZCcNsiCg4ORlZWFpKQkuLu7o7i4GL6+vlizZg127dqFwsI7U83/M8gGDBhg9lxYfn4+BgwY4JDalbBxWQLCLQym8J4dsWFZgp0rIiKyD6edtdiSqqoq+Pj4QJIkVFZWQqfTNS5LT0/HokWL8MMPPyA4OBgAcPToUQwZMgTbt2/HpEmT1Crbapev1mJqyj7szy5vdp2RgwOR+VYiunbxcGBlRETKaZdBdjeY+vXrhzNnzjRZdvPmTURFRcHPzw9vvPEGbt26hZSUFPj7++Pw4cPQaMQbxGafvoIPtxRgf7YeF8oqYZIBnbsL9q9PwuCH/NUuj4ioVZx2skdLTp06BeD+w4oA4OPjg7179yI5ORnTpk2DVqvFuHHj8O677woZYgAw+CH/xsAKHp2B0ss16OzTgSFGRE6BQWZGnz598PnnnzuyJCIispGYQ4xWelCQERGRONrliOzufRiJiEh87XJERkREzoNBRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQGGRERCQ0BhkREQmNQUZEREJjkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQGGRERCQ0BhkREQmNQUZEREJjkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERC06pdABHdT5ZloK5O7TKs06EDJElSrDlZllFTa1SsPUfQeWgV+wy4D1iOQUbUFtXVwfjUTLWrsIp2yybA3V2x9mpqjfAaslmx9hyh6siz8NS5KtMY9wGL8dAiEREJjUFGRERCY5AREZHQGGRERCQ0TvZoR4xGE0yyrHYZqjGZZNTdboBGI8HNVaPK7CoiUh6DzEnJsoysY3p8ebgUOXkGHCu4CkPFrcbl5Vdq8Nh//wNxkX74ydDuGB4b4HRf7CX6amz9qgg5+QYcyzfgbPEN3M1xN1cNosN9ERvhh6HRXTF5TCi8lJptRkQOJclyO/6J7oRqao3Y8GkhVmcWIP/8dYu3i+jTCb9+agBmTwyHzkPs3zf7s8vxwcf5+HT/BTQ0WLZ7e3u64tnxYZj38wj069XJvgVaQL51S8ip15KCU6+ra+rb9fR77gOW4zkyJ5J1TI/oJ7dj7puHrQoxAMg/fx3zUg8j+sntyDqmt0+Bdnb1+i1Mf2U/EufsxvZvii0OMQCorK7HH/9WgKjJO7B87QkYjSY7VkpESmKQOYGGBhMWvvMdRszehfMXK1vV1vmLlRgxexdeSj+KhgZxvsy/OVKGyEnb8fHu861qp95owuJVxzDkmZ0oKmndZ0lEjsEgE5zRaML03+1H+qZTUOogsSwD72w+jem/2y/EyGTHN8UY+/wXuHS1VrE2j+Ub8ONZn+NM0XXF2iQi+2CQCUyWZcx5PQuZ/yiyS/uZ/yjCnNez0JZPo35xqARTF+5DvR0Ct+xyDUb/cg+KSzkyI2rLGGQCW7e9EJt3nrNrH5t3nsP6HYV27cNWl67WYvrv9tslxO4qvVyDZxYdEOowK1F7wyAT1L/Kq/Db9KNWb5edMQEXv5qG7IwJFm/z2/SjuKivsrove5JlGb/+n0O4et26u4Pb8v4PHb+EDz7Ot7ZEInKQdhFkBoMBKSkpCAsLg7u7O0JCQpCcnIzq6mrMmTMHkiRh1apVapdpleS0I6isrrd6uwA/HYK7eSLAT2fxNjer6pGcdsTqvuxp5/5/Ycc3F6zezpb3DwCLPshB2eVqq/sjIvtz+iA7ceIEoqKi8Pbbb0Ov1yMiIgL19fV4//33MXXqVBQUFAAABg4cqG6hVigqqcSn+6z/Em+Nv++90KbOFf3hr3kO7a/2VgP+tO2sQ/tUwgHDZbjt3IKV5880u47bzi2YeDTLgVU51vL5sZBPzsF/Texrdvm+dT/DrZxZiAzr7ODKHKM97ANOHWQGgwHjx4+HXq/HggULUF5ejtzcXOj1eqSlpWHXrl3Izs6GJEmIjo5Wu1yLffRJgWIzFC0ly8BHnzT/D8GRCn64jr3flTu837XbzqK+nufKRLNk9XGc+uc1rHzpYXTv1nQk/uIzkRg5OBCvr85F3rkKlSqk1nLqIJs/fz5KSkowd+5cpKenw9vbu3FZSkoKYmJiYDQaERoaCh8fHxUrtZwsy/jz5627VspWm3eeaxMzGP+6y74TXJpTdrkGe78rU6Vvsl290YSZiw/C08MV65YMb3w9PLQjls+Lw5GTl/H2xlMqVkit5bRBVlBQgMzMTPj5+SE1NdXsOrGxsQCAmJiYxtfuBl98fDw6qPTY7paUXqpB+ZUaVfouv1KDssvq9H2v705fUa3v7Dz1+ibbHS+4itR13+Onw4Lxy8n9oNFI2Lw8AZIEzFx8ECaT+j/QyHZi31SvBRkZGTCZTJg+fTq8vLzMruPh4QGgaZCdO3cO27Ztw+DBg+Hm5oZDhw45pF5LHSswqNp/Tr4B3bt5qta/LMvIyVPvM1Cz79aoaWiAoc66GZ7OZtna45gwsgfSF8RjYP8ueDiqK3779lEUFt9QuzSHcOZ9wGmDbO/evQCAxMTEZtcpKSkB0DTIEhISUF5+5/zLkiVL2lyQqX0cP+9cBR5P7Kla/3pDLSpu3lat/zwr72HZViw9m4elZx07QaatMRplzFx8ENkZE/D81AHIytXjvb+cVrssh3HmfcBpg+zChTuz+nr2NP+lazQaG0Pq3iDTaJQ/2hoXFwe9Xpkb8d7weBTwSDC7LDtjwgOnlQf4eTT+/+JX05pdT2+oweCff3bf66lvvYvVb3xjRcXKMmp8gU7JzS5/0GfQ2vf/Q3EpgoODrajYNh4aDfIHDlWsvV/06I3JQSFml409ckCRPsLDw1FrUm4yjAmugO9ixdoDgBtVt1F3uwFuri7YnXVR8UlTfcPDoYH1l8WY0972gYCAAOTk5Ni0rdMGWXX1nWt+amvN338vMzMTBoMB3t7e6NWrl11r0ev1KC0tVaaxbpWAh/lFd6+RsoTWRWPxuveqqryJqksKvRdbuNUDnZpfbOlnYOv7NzU0KPd32QKdiwswULn2wry88Kh/N+UaNKOsrAw1DQ3KNSi5Ab7KNQcAG5YOh5urC/LPV2DxcwOx5Ysi/KDgzaHLy8oAWZkjBtwHLOe0QRYQEICKigrk5uZi6NCmv2rKy8uxcOFCAEB0dLTdJ3QEBAQo1lalewfcbGaZ3vDgiRgBfh7QumhgbDBBb2j+JrvNteXj5Q5vbXdLSrWLBskLLY1tH/QZtPb9u2gaENDd/u/fww5HBuwtKChI8RGZkhdZzHs6AonxQVj0fg4+3XcBuZkTsX7pcIycvVuxPgKDghQdkYmmNftAa74nnTbIRo8ejYKCAqSlpWHMmDEIDw8HAGRnZ2PGjBkwGO6ctHfEhdC2DpfN+WzfBTye/LXZZeYOhf2ni19NQ3A3T+gNtQgZ8zer+//zn1ZggornyGRZhl/CX3HthvmT1g/6DFr7/seP+RF2vFdi9XbWEvGhioWFhW32wZphPXyQmhyH705dQdr6kzCZZCz5MBepyYMx7+kIxW5B9s/Cwnb9YE2l9wFLiRf5FkpJSUGXLl1w8eJFREZGIioqCn379kV8fDx69+6NUaNGAWh6fkwEsRF+7bp/SZIQG9FFtf7V7JtsI0nAxmUJcNFImLn4QONU+7c2nEL26StITY5D72DvB7RCbZnTBllwcDCysrKQlJQEd3d3FBcXw9fXF2vWrMGuXbtQWHjnju6iBVlQVx2Culp3n0Bn6PteD0d1Va3v+If8VeubbLNgZhSGDeqG11bn4kzRv6fam0wyZr16EFoXDdYvHd5CC9TWOW2QAcCAAQPw+eefo7KyEpWVlTh69Ciee+45VFdXo7i4GBqNBg899JDaZVpFkiQ8Oz5Mlb5nju/bJi4Qfyapjyr9du+qw6j4IFX6Jtv079URy174EQ5/fxnvbLp/qn3++etY8mEuRsQFYt7TESpUSEpw2nNkLcnLy4MsywgPD4dOd/8IY+vWrQCA/Pz8Jn8ODQ1FXFyc4wptxq+e7I+09Scder9FjUbCr6b0c1yHLejXqxNGDwnC10cce7uoX03pD61WrN9+I/y64vb4p1pc50HLRXam6AY8Bm9qcZ0V605ixbqTDqrI8drDPtAug+zUqTv3VWvusOKUKVPM/nnmzJnYuHGjXWuzRGh3bzzxaCi2fV3ssD4njeqJnkFt5zzCi89EOjTIdO5a/OKJthHkRNSUWD8vFfKgIJNl2ex/bSHE7novZQg6ers5pK+O3m74w8tDHNKXpZISemDKT+x7/d+9VrwYh0B/9c8PEtH9GGSCCg7wxLsLH7Z6O72hBiWXqi265uyu91IeVvX+is3546Kh8Ots3VRfW97/iLgAvDCN50+I2qp2eWjx7n0YRTfr8b749vglrN9RaPE2llxrdq/Zk8Ixc4L5BxKqzd/XA39LS8TPXvgCty18Tpi17z8kwBN/fnMENBr1J7kQkXntckTmLCRJwppXh2G6nWbxTU/qg7WvDWsTMxWb8+iQIHySPgpursrvysHdPPH12rEICTD/9AQiahsYZILTajXYvHwEXpkTrdioQaOR8MqcaGxePgIuLm1/F5mQ2BNffPSYote4xT/kj283JSE8tKNibRKRfbT9byl6II1GQmryYHy7KQn9WvnF2y+0I77dlITU5MFCHU4bOTgQp7c/gVmPt+4waAc3F6S9OBiHNo9rU7M0iah5DDInMjSmG45vmYg1rw1DdLh1tw2P6eeLNa8Nw/EtEzE0xr53yLaXzj4dsGFZAr7dNA5TH+sFrdbyIO7k7YbfzIhE3o4nkDI7WrjrxYjas3Y52cOZebhr8dyT/fHLyf1w+PvL+OpwKY7lX8WxAgPKr9RAlu/cey7QX4fYAX6IjeiCnzzSHUOiu7bpc2HWGDaoG4YN6ga9oQbbvi5GTp4Bx/INOFN0A/XGO5NCJAkYEt0VsRF+GBLtj0mjQqHz4D8HIhHxX66TkiQJjwzshkcG/nt0JcsyjEYZWq3kNKHVkgA/3X3T5rs/+jHKrtQiyF+H//vzeJUqIyIlMcjaEUmS4Orq/AHWkvYQ4ETtDU8EEBGR0BhkREQkNAYZEREJjUFGRERCk2TZkU+1IlJX8OgMlF6uQfeuOpR8/XO1y2mWLMtAXZ3aZVinQwdFJ9PIsoyaWqNi7TmCzkOr2GfAfcBynLVI1AZJkgS4W3dnf2cjSRI8da5ql6Ea7gOW46FFIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIanxDdRgn3mHOVHnHurGRZRk2tUe0yrKLz0HIfIFUwyNqqujoYn5qpdhUW027ZxMeyK6im1givIZvVLsMqVUeehafOVe0yqB3ioUUiIhIag4yIiITGICMiIqExyIiISGgMMiIiEhqDjIiIhMYgIyIioTHIiIhIaAwyIiISGoOMiIiExiAjIiKhMciIiEhoDDIiIhIag4yIiITWLoLMYDAgJSUFYWFhcHd3R0hICJKTk1FdXY05c+ZAkiSsWrVK7TLt4oDhMtx2bsHK82eaXcdt5xZMPJrlwKoc69LVWixfewKRk7ah7EoNAODKtVtYv6NQuGd+2WL5/FjIJ+fgvyb2Nbt837qf4VbOLESGdXZwZUTKcPrnkZ04cQJjx46FXq+Hp6cnIiIiUFZWhvfffx/nz5/HtWvXAAADBw5Ut1BSnCzLWPrRcSz/0/eoN5qaLLttNGHO61l46Z2j2LgsARMSe6pUpf0tWX0c40f0wMqXHsaXh0tReqmmcdmLz0Ri5OBAvPJeNvLOVahYJZHtnHpEZjAYMH78eOj1eixYsADl5eXIzc2FXq9HWloadu3ahezsbEiShOjoaLXLJQXJsozfvHUUSz48fl+I3avi5m1M+s03+OTLIgdW51j1RhNmLj4ITw9XrFsyvPH18NCOWD4vDkdOXsbbG0+pWCFR6zh1kM2fPx8lJSWYO3cu0tPT4e3t3bgsJSUFMTExMBqNCA0NhY+Pj4qVktK2flWMP/w1z6J1TSYZMxbtR3FppZ2rUs/xgqtIXfc9fjosGL+c3A8ajYTNyxMgScDMxQdhMslql0hkM6cNsoKCAmRmZsLPzw+pqalm14mNjQUAxMTENL62detWTJ48GT179oROp0P//v3x+9//HlVVVQ6p215qGhpgqKsz+58zsjTE7qq7bcLarWftVE3bsGztcZw4cxXpC+Lxwe+G4uGorvj9B8dQWHxD7dKIWsVpz5FlZGTAZDJh+vTp8PLyMruOh4cHgKZBlp6ejh49euDNN99EcHAwTpw4gTfeeAMHDhzAwYMHodGImf1Lz+Zh6VnrvtxFdbLwGg4dv2T1dv+7/Sxe//UgdHBzsUNV6jMaZcxcfBDZGRPw/NQByMrV472/nFa7LKJWc9og27t3LwAgMTGx2XVKSkoANA2ynTt3wt/fv/HPI0aMgL+/P6ZPn45vv/0WCQkJVtcSFxcHvV5v1TYeGg3yBw61uq/m/KJHb0wOCjG7bOyRA61uPzw8HLWm5s9FOVK12yDAa6LV212puIWeYYOgNV1TvigrmeAK+C5WvN0bVbdRd7sBbq4u2J11EbKCRxT7hodDg3rlGqR2JSAgADk5OTZt67RBduHCBQBAz57mZ6MZjUYcOnQIQNMguzfE7oqLiwMAlJaW2lSLXq+3eludiwsw0KbuzArz8sKj/t2Ua/A/lJWVoaahwW7tW6VLP8D8IPyBLl2pAG7Z9vesKMkN8FW+2Q1Lh8PN1QX55yuw+LmB2PJFEX4oUebcYHlZGSDfVqQtIms4bZBVV1cDAGpra80uz8zMhMFggLe3N3r16tViW/v27QMADBgwwKZaAgICrN7GQ7BDmEFBQW1mRFbj1gFWTySXZUCS0M3fB1pTd3uUZRUTXFGucJvzno5AYnwQFr2fg0/3XUBu5kSsXzocI2fvVqT9wKAgjsjIZrZ8T97ltEEWEBCAiooK5ObmYujQpofoysvLsXDhQgBAdHQ0JElqtp3S0lK8+uqreOyxx2y+1syW4bJ86xaMT820qT81FBYWQnJ3V7sMAED5lRr0+MnfYGyw4riZJCGiTyec3n66xf3BUapr6uE1ZLNi7YX18EFqchy+O3UFaetPwmSSseTDXKQmD8a8pyPwwcf5re7jn4WF8NS5KlAtkXXE+tlvhdGjRwMA0tLSUFhY2Ph6dnY2EhMTYTAYALR8IXRVVRUef/xxuLm5Yf369Xatl5QT6K/DE6NDrd7u+akD2kSIKU2SgI3LEuCikTBz8YHGqfZvbTiF7NNXkJoch97B3g9ohajtctogS0lJQZcuXXDx4kVERkYiKioKffv2RXx8PHr37o1Ro0YBaHp+7F61tbUYP348ioqK8OWXXyIwMNCR5VMrLZwVBTdXy3fvHoGemDEuzI4VqWfBzCgMG9QNr63OxZmif0+1N5lkzHr1ILQuGqxfOryFFojaNqcNsuDgYGRlZSEpKQnu7u4oLi6Gr68v1qxZg127djWO0swFWX19PZ588knk5ORgz549iIiIcHT51Epxkf74S+pIaF0ePMLq6uuOPat/Ch8vNwdU5lj9e3XEshd+hMPfX8Y7m+6fap9//jqWfJiLEXGBmPc093MSkyTLSk7AFUNVVRV8fHwgSRIqKyuh0+kal5lMJkybNg2fffYZdu/e3ThyczTRzpFpt2xqM+fI7vXNkTK8tPIoTpy5f0q9JAGPDQvGHxc9gl5t7NCa0ufIHKHqyLM8R0aqcNrJHi3Jy8uDLMsIDw9vEmIA8MILL+CTTz7BK6+8Ap1OhyNHjjQu69Onj9np+dR2PTokCLmZE3Hk5GV8vPsH6A010Lpo0CfEG7MnhaN3MG9NRiS6dhlkp07duUGqucOKe/bsAQCsWLECK1asaLJsw4YNmDVrlt3rI2VJkoShMd0wNMZ+19ERkXoYZP+huLjYwdUQEVFrOO1kj5a0FGRERCSWdjkiu3sfRiIiEl+7HJEREZHzYJAREZHQGGRERCQ0BhkREQmNQUZEREJjkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0Nrl88hEIMsyUFendhmW69ABkvTgh1iSZWRZRk2tUe0yrKLz0HIfIFUwyIiISGg8tEhEREJjkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQGGRERCQ0BhkREQmNQUZEREJjkBERkdAYZEREJDQGGRERCY1BRkREQmOQERGR0BhkREQkNAYZEREJjUFGRERCY5AREZHQGGRERCQ0BhkREQmNQUZEREL7f12HUMz6VrqnAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "qc = QuantumCircuit(3)\n", + "qc.h(2)\n", + "qc.cx(0,1)\n", + "qc.cx(2,1)\n", + "qc.h(1)\n", + "qc.x(1)\n", + "qc.h(1)\n", + "qc.x(2)\n", + "\n", + "U = qi.Operator(qc).to_matrix() # the unitary of the circuit\n", + "\n", + "#-----------------------------------------\n", + "\n", + "fig = qc.draw(\"mpl\")\n", + "fig" + ] + }, + { + "cell_type": "markdown", + "id": "db8fb061-d950-4727-a1b1-56ea46e48f1b", + "metadata": {}, + "source": [ + "We set different gate pool targets to see what the model gives us:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8a35405a-d98d-460b-9b0c-92d167266ad0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(\"Compile using: ['h', 'cx', 'z', 'x', 'ccx', 'swap']\", 'all'),\n", + " (\"Compile using: ['h', 'cx', 'z', 'ccx']\", 'no x, no swap'),\n", + " (\"Compile using: ['h', 'cx', 'x', 'ccx']\", 'no z, no swap'),\n", + " (\"Compile using: ['h', 'x', 'ccx']\", 'no cx, no z, no swap'),\n", + " (\"Compile using: ['h', 'z', 'x', 'ccx']\", 'no cx, no swap')]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cs_1 = f\"Compile using: {[x for x in pipeline.gate_pool]}\", \"all\"\n", + "\n", + "cs_2 = \"Compile using: ['h', 'cx', 'z', 'ccx']\" , \"no x, no swap\" \n", + "cs_3 = \"Compile using: ['h', 'cx', 'x', 'ccx']\" , \"no z, no swap\" \n", + "cs_4 = \"Compile using: ['h', 'x', 'ccx']\" , \"no cx, no z, no swap\" \n", + "cs_5 = \"Compile using: ['h', 'z', 'x', 'ccx']\" , \"no cx, no swap\" \n", + "\n", + "cs = [cs_1, cs_2, cs_3, cs_4, cs_5]\n", + "cs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "67e3aa4e-e7e0-4f7a-9a5c-14a70dd6b77b", + "metadata": {}, + "outputs": [], + "source": [ + "samples = 512\n", + "num_of_qubits = 3\n", + "max_gates = 12" + ] + }, + { + "cell_type": "markdown", + "id": "adfbef8e-d91a-4ed3-b756-0e76c33674fa", + "metadata": {}, + "source": [ + "Compile with the different gate-sets and plot correct (exact) compiled circuits. Note, some of the circuits might look the same but the gate time-sequences are distinct. Qiskit reorders \"parallel\" gates to make smaller plots." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6268abb3-1965-4a0e-b5dc-0f3c8db8264a", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "ae50feaf2ed44d22a7ac3e578af52ad5", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "7f3c6b2a064f41f5b322224677a87ee7", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "28dcd971b8b444e5bbd889d9011d0bc2", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6265ec7334ff4d5d99bba8f022ec088e", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "0298541f82a240129de94458bb7b67ff", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/20 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "for c, note in cs: compile_and_plot(U, c)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "acee9cc1-5b36-45d3-9844-2dbb885d6bfe", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ef5f0dc2-ad3a-467a-89dd-36d296d458ac", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "genQC Version 0.2.0\n" + ] + } + ], + "source": [ + "import genQC\n", + "print(\"genQC Version\", genQC.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "87bc40b7-b02b-415e-a679-9e02a5a09b0d", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": { + "0298541f82a240129de94458bb7b67ff": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_930ba28ac47a4bebbb4199ed948aa46e", + "IPY_MODEL_7bd54b72babc41f4b52d9ce220d9783a", + "IPY_MODEL_74e912f110f54b8aabb277c2e081bc0a" + ], + "layout": "IPY_MODEL_8aa9e67a45104804b6d81b5a4e49cbeb" + } + }, + "0833094c16cc4cd19e9d443a52969b96": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_fa95db801d064294aa915bfbb4027f0a", + "style": "IPY_MODEL_d6583f6818654dc18afdec2988394f4c", + "value": " 20/20 [00:01<00:00, 20.01it/s]" + } + }, + "098fcc9c1ce54e909728d59de93a23c5": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "0c02e2bbc3454fc7ba2621a251caa75b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_593cf9c04ac044458f12412a98638089", + "IPY_MODEL_87d790fe47334753a509d39f80094803", + "IPY_MODEL_2ae50452f7644708aeefa0f5cfc2c3e4" + ], + "layout": "IPY_MODEL_3758caa7322c4ba3be6597629bff6a03" + } + }, + "0cc3571558b343f89546b8bed66939e7": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "0da36d09da6e4de5b79deb2c032aae92": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "0e8677125e6c4781a376e79fd24d4d5b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "101b609da5af4273be556966dfa19584": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "101cb6f917ec44bd8be5578f98a8f83d": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_eeaf80f66c6343028e7cd16a934594dc", + "max": 20, + "style": "IPY_MODEL_cca75f888b44402ab03bbe0ae81d7f34", + "value": 20 + } + }, + "10fa6f9af7dd44d1967ad187cd73ef34": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_a43177f8d01f4df8b02b9ee9dbcd9e7c", + "IPY_MODEL_9009d0fea9994869976a3182b421716c", + "IPY_MODEL_9f364abd328642a5aa25444788aa3232" + ], + "layout": "IPY_MODEL_ede127b1c05749f497fa9e8fb7dd7c3b" + } + }, + "1c1b7331ec164ee799ce823c6ee36fe1": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "2029352e52364079b9787288eee4f9e0": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "2880b76772ff4e5d9b164084fcdc7fb1": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_2029352e52364079b9787288eee4f9e0", + "max": 20, + "style": "IPY_MODEL_40fbc3cac71c4f2fa1c17214f84617bd", + "value": 20 + } + }, + "28dcd971b8b444e5bbd889d9011d0bc2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_d5da81c8e7994dcaa5b0a14020a94347", + "IPY_MODEL_101cb6f917ec44bd8be5578f98a8f83d", + "IPY_MODEL_e27ae8ec281941829bd41501d40bc571" + ], + "layout": "IPY_MODEL_aada31cfad3e421cb9dd0aa2c0ad375d" + } + }, + "294fc42ccb3f449f9b9830d1a34dd30a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "2ae50452f7644708aeefa0f5cfc2c3e4": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_e395dc9e66a040f0af05816f1f9ce04a", + "style": "IPY_MODEL_3fbf3156e96542e48b2df76562e4c971", + "value": " 20/20 [00:00<00:00, 20.57it/s]" + } + }, + "2df62b119beb419b8c26577216d83be0": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "320705878bc04784a4154c5231654d98": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "346ea51f47d445ed92010a8e2f94d398": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "372c9c4dbf7549e087975d892ca0328e": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_e0e807e874db44688403db6e5c1881d9", + "style": "IPY_MODEL_a78b0378abfe430d90972e593af02ecb", + "value": " 20/20 [00:00<00:00, 19.86it/s]" + } + }, + "37539ab59279428a991d3100f3013e10": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "3758caa7322c4ba3be6597629bff6a03": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "37c0fa9b5c124e6da733577779235e9b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "3b1cd3f6a59d40e8a22389a13b2b4137": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "3c62ad0751bb42bb9abe7e86e51d9eb7": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_59a48eccc59f4179b29832b45b143b76", + "style": "IPY_MODEL_6dfbb417585945398f1f31fd640bf82d", + "value": "100%" + } + }, + "3d288fade7994efb9dfbf8f605fa4e76": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_ed3d58d5aaec4b80867ef7335ec4a8ca", + "style": "IPY_MODEL_c3627eb30a46446fbad163e4d2019490", + "value": "100%" + } + }, + "3e6160d9a9bb417992337401c1b06c24": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "3ea2b1fab3894d7ebd3a3802f7924889": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "3ec8d841fd054480bd46e9e55fef9226": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "3ee80340baae4a9981beefe75cc9beb5": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "3fbf3156e96542e48b2df76562e4c971": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "40fbc3cac71c4f2fa1c17214f84617bd": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "444d218d5d9e45b6ad5f5875c68f0ab5": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "4f5cae7061e442fb9b6fbb9839836d97": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "549cf32909e04601a27f9e4352df7730": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "54bf2bb5fccf49099a13ef9f24c5e62d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "57141bac1651424cb0fd3a0ec434795f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_ac25c36362ab428895d475d60bcf7b7a", + "style": "IPY_MODEL_ca20f304f28e439c8f4bd2b757d71277", + "value": "100%" + } + }, + "593cf9c04ac044458f12412a98638089": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_1c1b7331ec164ee799ce823c6ee36fe1", + "style": "IPY_MODEL_bcfbae6e2de64373b5a8a67c1bea86ec", + "value": "100%" + } + }, + "59a48eccc59f4179b29832b45b143b76": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "59e8e753c3c84c088c2b83247c4dfc4e": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "5e789e4091a945f0b900cf68ab373316": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "6265ec7334ff4d5d99bba8f022ec088e": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_3c62ad0751bb42bb9abe7e86e51d9eb7", + "IPY_MODEL_b0f97e6c275e4268b1308677ab3f9c7f", + "IPY_MODEL_eb969490152e41269a36ea75995056ac" + ], + "layout": "IPY_MODEL_444d218d5d9e45b6ad5f5875c68f0ab5" + } + }, + "6749b88ab48941ecb6c367d988f75fa4": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_57141bac1651424cb0fd3a0ec434795f", + "IPY_MODEL_9348de1ac9d24267aad67d3c9d5df705", + "IPY_MODEL_0833094c16cc4cd19e9d443a52969b96" + ], + "layout": "IPY_MODEL_e2caee050c794c9abb75f4af955eed67" + } + }, + "67e43b661028495aa9870a92902b3bed": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "68a90c9ed89145f89b0894b47d2ffbb8": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "6dfbb417585945398f1f31fd640bf82d": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "731ddd98742c429dbdf5c91a96fe15e2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_3ee80340baae4a9981beefe75cc9beb5", + "max": 2, + "style": "IPY_MODEL_320705878bc04784a4154c5231654d98", + "value": 2 + } + }, + "74e912f110f54b8aabb277c2e081bc0a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_3ea2b1fab3894d7ebd3a3802f7924889", + "style": "IPY_MODEL_3e6160d9a9bb417992337401c1b06c24", + "value": " 20/20 [00:00<00:00, 20.63it/s]" + } + }, + "7803241950aa47dba5b4747c4f21d233": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_67e43b661028495aa9870a92902b3bed", + "style": "IPY_MODEL_af8f4ea420d049d598f3d8776496eec0", + "value": "Fetching 2 files: 100%" + } + }, + "7bd54b72babc41f4b52d9ce220d9783a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_e7870033150746d6963e19fb87c48f34", + "max": 20, + "style": "IPY_MODEL_dbbdc4d236274044b3d3b8b1d9441370", + "value": 20 + } + }, + "7f3c6b2a064f41f5b322224677a87ee7": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_3d288fade7994efb9dfbf8f605fa4e76", + "IPY_MODEL_2880b76772ff4e5d9b164084fcdc7fb1", + "IPY_MODEL_372c9c4dbf7549e087975d892ca0328e" + ], + "layout": "IPY_MODEL_82f72fad30054961ac1a5b3079b8e7b3" + } + }, + "82f72fad30054961ac1a5b3079b8e7b3": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "87d790fe47334753a509d39f80094803": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_4f5cae7061e442fb9b6fbb9839836d97", + "max": 20, + "style": "IPY_MODEL_37539ab59279428a991d3100f3013e10", + "value": 20 + } + }, + "8aa9e67a45104804b6d81b5a4e49cbeb": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "9009d0fea9994869976a3182b421716c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_54bf2bb5fccf49099a13ef9f24c5e62d", + "max": 20, + "style": "IPY_MODEL_3ec8d841fd054480bd46e9e55fef9226", + "value": 20 + } + }, + "930ba28ac47a4bebbb4199ed948aa46e": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_101b609da5af4273be556966dfa19584", + "style": "IPY_MODEL_549cf32909e04601a27f9e4352df7730", + "value": "100%" + } + }, + "9348de1ac9d24267aad67d3c9d5df705": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_d66354d629094fb99fc24a31b7d87d3e", + "max": 20, + "style": "IPY_MODEL_ca038a8a22c24a37b6569fbe03be141b", + "value": 20 + } + }, + "9f364abd328642a5aa25444788aa3232": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_c92031f02c384325b845329c4500d3ed", + "style": "IPY_MODEL_294fc42ccb3f449f9b9830d1a34dd30a", + "value": " 20/20 [00:00<00:00, 20.59it/s]" + } + }, + "a3bdd67c32774890a32b0da136583ed4": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_d8e9ad75f24d46c483e15c22b327ead0", + "style": "IPY_MODEL_de6f11dc1be84740821d0cd2e4b45548", + "value": " 2/2 [00:00<00:00, 285.55it/s]" + } + }, + "a43177f8d01f4df8b02b9ee9dbcd9e7c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_098fcc9c1ce54e909728d59de93a23c5", + "style": "IPY_MODEL_a6016c4347614ed7b47a85bab923e7f2", + "value": "100%" + } + }, + "a6016c4347614ed7b47a85bab923e7f2": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "a78b0378abfe430d90972e593af02ecb": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "aada31cfad3e421cb9dd0aa2c0ad375d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "ac25c36362ab428895d475d60bcf7b7a": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "ae50feaf2ed44d22a7ac3e578af52ad5": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_e14ae7f0142245b58a67c217d6f5656a", + "IPY_MODEL_d004d9c8e0f147d5b067683f299c5ba5", + "IPY_MODEL_eed4126ec40d41c2ba6eb8a3a7cdcabb" + ], + "layout": "IPY_MODEL_0cc3571558b343f89546b8bed66939e7" + } + }, + "af8f4ea420d049d598f3d8776496eec0": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "b0f97e6c275e4268b1308677ab3f9c7f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_ddba76dc375c40f7a04ea247fbb9b169", + "max": 20, + "style": "IPY_MODEL_37c0fa9b5c124e6da733577779235e9b", + "value": 20 + } + }, + "b3299358bf3a4fcabeefa50eb0c69ed9": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "bcfbae6e2de64373b5a8a67c1bea86ec": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "c3627eb30a46446fbad163e4d2019490": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "c599e663ed1540cf9bd7ef82f6b627fd": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "c92031f02c384325b845329c4500d3ed": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "ca038a8a22c24a37b6569fbe03be141b": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "ca20f304f28e439c8f4bd2b757d71277": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "cca75f888b44402ab03bbe0ae81d7f34": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "d004d9c8e0f147d5b067683f299c5ba5": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_b3299358bf3a4fcabeefa50eb0c69ed9", + "max": 20, + "style": "IPY_MODEL_346ea51f47d445ed92010a8e2f94d398", + "value": 20 + } + }, + "d2a146d969be4226a7c2cf3ab8b960a1": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "d5da81c8e7994dcaa5b0a14020a94347": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_c599e663ed1540cf9bd7ef82f6b627fd", + "style": "IPY_MODEL_d76a1ce507424e278f0b4e853965b23d", + "value": "100%" + } + }, + "d6583f6818654dc18afdec2988394f4c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "d66354d629094fb99fc24a31b7d87d3e": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "d76a1ce507424e278f0b4e853965b23d": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "d8a1aeb16dc145e2bc2b877572d1937e": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "d8e9ad75f24d46c483e15c22b327ead0": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "dbbdc4d236274044b3d3b8b1d9441370": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "ddba76dc375c40f7a04ea247fbb9b169": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "de6f11dc1be84740821d0cd2e4b45548": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "e0e807e874db44688403db6e5c1881d9": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "e14ae7f0142245b58a67c217d6f5656a": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_0da36d09da6e4de5b79deb2c032aae92", + "style": "IPY_MODEL_0e8677125e6c4781a376e79fd24d4d5b", + "value": "100%" + } + }, + "e27ae8ec281941829bd41501d40bc571": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_d2a146d969be4226a7c2cf3ab8b960a1", + "style": "IPY_MODEL_5e789e4091a945f0b900cf68ab373316", + "value": " 20/20 [00:00<00:00, 20.57it/s]" + } + }, + "e2caee050c794c9abb75f4af955eed67": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "e395dc9e66a040f0af05816f1f9ce04a": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "e7870033150746d6963e19fb87c48f34": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "eb969490152e41269a36ea75995056ac": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_d8a1aeb16dc145e2bc2b877572d1937e", + "style": "IPY_MODEL_68a90c9ed89145f89b0894b47d2ffbb8", + "value": " 20/20 [00:00<00:00, 20.64it/s]" + } + }, + "ed3d58d5aaec4b80867ef7335ec4a8ca": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "ede127b1c05749f497fa9e8fb7dd7c3b": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "eeaf80f66c6343028e7cd16a934594dc": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "eed4126ec40d41c2ba6eb8a3a7cdcabb": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_59e8e753c3c84c088c2b83247c4dfc4e", + "style": "IPY_MODEL_2df62b119beb419b8c26577216d83be0", + "value": " 20/20 [00:00<00:00, 19.86it/s]" + } + }, + "f848c31412ce47d29d781c352a05bf26": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_7803241950aa47dba5b4747c4f21d233", + "IPY_MODEL_731ddd98742c429dbdf5c91a96fe15e2", + "IPY_MODEL_a3bdd67c32774890a32b0da136583ed4" + ], + "layout": "IPY_MODEL_3b1cd3f6a59d40e8a22389a13b2b4137" + } + }, + "fa95db801d064294aa915bfbb4027f0a": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + } + }, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/examples/3_dataset_and_fineTune.ipynb b/src/examples/Quantum circuit synthesis with diffusion models/3_dataset_and_fineTune.ipynb similarity index 99% rename from src/examples/3_dataset_and_fineTune.ipynb rename to src/examples/Quantum circuit synthesis with diffusion models/3_dataset_and_fineTune.ipynb index 459c84d..6b7e26a 100644 --- a/src/examples/3_dataset_and_fineTune.ipynb +++ b/src/examples/Quantum circuit synthesis with diffusion models/3_dataset_and_fineTune.ipynb @@ -1,5 +1,18 @@ { "cells": [ + { + "cell_type": "raw", + "id": "39698881-e18c-4ae1-b791-f7b5d9e6489a", + "metadata": {}, + "source": [ + "---\n", + "categories:\n", + " - Entanglement generation\n", + " - Quantum circuits\n", + " - Training\n", + "---" + ] + }, { "cell_type": "markdown", "id": "69a855f1-55dd-482e-94f2-9ad02804be4d", @@ -16,6 +29,18 @@ "In this notebook we create a (demo) 9-qubit dataset and fine-tune the model with it. Note, we use direct fine-tuning similar as you would train the model from scratch (with a higher learn-rate and larger dataset)." ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "e37a2b43-48f2-437b-ac2b-486aa788a8c5", + "metadata": {}, + "outputs": [], + "source": [ + "# NOTE: this notebook is designed for an old version of genQC! Please use ´pip install genQC==0.1.0 -q´\n", + "import genQC\n", + "assert genQC.__version__ in [\"0.1\", \"0.1.0\", \"0.1.1\"]" + ] + }, { "cell_type": "code", "execution_count": null, @@ -1613,6 +1638,14 @@ "import genQC\n", "print(\"genQC Version\", genQC.__version__)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e3a34a4a-5d62-4504-8010-2444ef49fa75", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/src/examples/tutorials.qmd b/src/examples/tutorials.qmd new file mode 100644 index 0000000..184b170 --- /dev/null +++ b/src/examples/tutorials.qmd @@ -0,0 +1,24 @@ +--- +html: + code-copy: false +listing: + - id: tutorials-links + image-placeholder: "../webpage/assets/logo.png" + sort: false + type: grid + categories: true + sort-ui: [title] + filter-ui: [title] + contents: "*{.qmd,.ipynb}" +--- + +# Tutorials Overview + +#### Welcome to the `genQC` Tutorials. + +Here you can familiarize yourself with basics concepts and modules. Tutorials show how to reproduce some figures and results of the corresponding [research papers](../webpage/research.qmd). + +
+ +::: {#tutorials-links} +::: \ No newline at end of file diff --git a/src/get_started.ipynb b/src/get_started.ipynb new file mode 100644 index 0000000..82c4be5 --- /dev/null +++ b/src/get_started.ipynb @@ -0,0 +1,456 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# genQC · Generative Quantum Circuits\n", + "\n", + "> Generating quantum circuits with diffusion models" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "

\n", + " \"python-3.12\"\n", + " \"DOI\"\n", + " \"https://florianfuerrutter.github.io/genQC\"\n", + " \"huggingface.co/collections/Floki00\"\n", + " \"Online_Demo\"\n", + "

" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Code repository for generating quantum circuits with diffusion models." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![Generation process for 4-qubit QFT.](https://github.com/FlorianFuerrutter/genQC/blob/main/src/webpage/assets/qft_4qubit_circuit_15s_wpause.gif?raw=true)\n", + "\n", + "\n", + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 📰 News" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "- 🔥 [2025-06-01] *Discrete-continuous circuits with multimodal diffusion* - model released on [Hugging Face: huggingface.co/collections/Floki00](https://huggingface.co/collections/Floki00/discrete-continuous-circuits-with-multimodal-diffusion-6839c4e4553e56b957bbd5bf)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## The codebase" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The code contained within this repo allows the sampling of pre-trained diffusion models and includes our pipeline to fine-tune and train models from scratch. Pre-trained weights can be found on [[Hugging Face]](https://huggingface.co/collections/Floki00/) and can be downloaded automatically via our code (see minimal example). For the text CLIP model weights we use the [`OpenCLIP`](https://github.com/mlfoundations/open_clip) library, which will download (and cache) the CLIP model on first usage of our pipeline. In case you prefer reading a documentation, rather than notebooks or code, see the project page under [[Documentation]](https://florianfuerrutter.github.io/genQC/)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This repo inlcudes:\n", + "\n", + "1. `genQC/` a full release of our used diffusion pipeline.\n", + "2. `src/examples/` examples and tutorials to show how to use the library.\n", + "3. `src/` the source notebooks for [`nbdev`](https://github.com/fastai/nbdev)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Examples" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Minimal example" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A minimal example to compile the 4-qubit Quantum Fourier transform (QFT) unitary, using parameterized circuits." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from genQC.pipeline.multimodal_diffusion_pipeline import MultimodalDiffusionPipeline_ParametrizedCompilation\n", + "from genQC.inference.sampling import generate_compilation_tensors, decode_tensors_to_backend\n", + "from genQC.utils.misc_utils import infer_torch_device, set_seed\n", + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer\n", + "from genQC.benchmark.bench_compilation import SpecialUnitaries\n", + "from genQC.platform.simulation import Simulator, CircuitBackendType\n", + "\n", + "device = infer_torch_device()\n", + "set_seed(0)\n", + "\n", + "pipeline = MultimodalDiffusionPipeline_ParametrizedCompilation.from_pretrained(\n", + " repo_id=\"Floki00/cirdit_multimodal_compile_3to5qubit\", \n", + " device=device)\n", + "\n", + "pipeline.scheduler.set_timesteps(40) \n", + "pipeline.scheduler_w.set_timesteps(40) \n", + "\n", + "pipeline.g_h, pipeline.g_w = 0.3, 0.1\n", + "pipeline.lambda_h, pipeline.lambda_w = 1.0, 0.35\n", + "\n", + "U = SpecialUnitaries.QFT(num_qubits=4).to(torch.complex64)\n", + "\n", + "out_tensor, params = generate_compilation_tensors(pipeline, \n", + " prompt=\"Compile 4 qubits using: ['h', 'cx', 'ccx', 'swap', 'rx', 'ry', 'rz', 'cp']\", \n", + " U=U, \n", + " samples=8, \n", + " system_size=5, \n", + " num_of_qubits=4, \n", + " max_gates=32)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABXgAAAEvCAYAAAD/z6lrAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAATCNJREFUeJzt3Xl4U2X+/vE7bekGhQIFWihLKTu0DbuIGwgIyOLCojAMOigyouKgoKPOF/cFnRmHwQUVFWd+MlUYFVFxAQREQRhkEcoqBUopUNbSjbY5vz9iK4W0NCFLT/J+XVcvSM55Tj5ZTnJy5znPYzEMwxAAAAAAAAAAwHSCfF0AAAAAAAAAAMA1BLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAAAAAAYFIEvAAAAAAAAABgUgS8AAAAAAAAAGBSBLwAAAAA4CdKSmzVYhsAAMB7CHgBAAAAwA8cOZav7rcu0sKv97q8je83HlbHG/+rnemn3FgZAADwJIthGIaviwAAAAAAuO7E6UJdddtn+nn3CQUHW5Q6s49u7p/g1Da+33hYA//4pXJyixTXIFKr5w1RQnyUhyoGAADuEhA9eLOzszV9+nS1atVK4eHhatq0qaZMmaLc3FxNmDBBFotFs2fP9nWZAAAAfq3YJp08K+UXS3QxANyrTq1Qde8UI0kqKTE0evpyp3rynhvuSlLHxGjFxkR4pFYAAOBeIb4uwNM2btyoQYMGKSsrSzVr1lSHDh2UmZmpWbNmac+ePTp+/LgkyWq1+rZQAAAAP2QzpHXZ0od7pZVZUunInk0ipZuaS8ObSdFhPi0R8AtBQRa99fiVkqR3Pt5VFvKmztRFe/KeH+72u6yxFs3qr4hwv/+6CACAX/DrIRqys7PVuXNnZWRk6IEHHtCMGTMUFWU/xWjmzJl66KGHFBISopKSEp08eVK1a9f2ccUAAAD+4/RZafp6aX12xeuEBUlPd5X6xHmvLsCf2WyG7nh8ld75eJckXXS4BsJdAADMz68D3jFjxmj+/Pm655579M9//vOC5VarVZs2bVJCQoJ++eUXH1QIAADgn/KLpbu+l7advPi6FkkzuxPyAu5S1ZCXcBcAAP/gt2PwpqWlKTU1VTExMXruueccrtO1a1dJUkpKSrnr9+7dq2HDhikqKkp169bV73//ex07dszjNQMAAPiL17dXLdyVJEPSXzbYe/wCuHSlwzXcfkNrSY7H5CXcBQDAf/htwDt//nzZbDaNHTtWtWrVcrhORIR90oBzA96cnBz16dNHGRkZmj9/vt544w2tWrVKQ4YMkc1mc7gdAAAA/Ca/WPpkv3NtCkqkxQc8Uw8QiCoLeQl3AQDwL377Cb5s2TJJUp8+fSpcJyMjQ1L5gPeNN97QwYMHtXLlSjVr1kySFB8fr8svv1yLFi3SDTfc4LmiAQAA/MA3mdKZYufbLdwnjUl0fz1AoHI48dq05apRI0gFhSWSCHcBAPAHfjsGb9OmTZWRkaGffvpJVqv1guXFxcWKi4tTdna29uzZo5YtW0r6LRBevnx5ufUTExN1zTXXaO7cuU7X0q1bN2VlZTl/JwAAAEwo6sbHVLP/JJfaZk1uJhmcNQW4kyGLTtYcprywLuWuDyvao/o578siF36RAQAAbhUbG6v169e71NZvf6bNzc2VJOXn5ztcnpqaquzsbEVFRSkh4bfJBrZt26aRI0desH7Hjh21bds2l2rJysrSwYMHXWoLAABgNk0Lz6qmi20zjxyVcbbArfUAkBT5pZSQLAX9+hXQMFR4aKkyT+3zbV0AAOCS+W3AGxsbqxMnTmjDhg3q1atXuWWHDh3StGnTJEnJycmyWCxly06cOKHo6OgLtlevXj3t2LHD5VoAAAACRYTh2mxptsI8NW5Q383VACgMaapjUb+TYTnn65/FIjWdoHp1ayuiKM13xQEAAEmXlh/6bcDbr18/paWl6YUXXlD//v3Vpk0bSdK6des0btw4ZWdnS5LD4RvczdXu1QAAAGa057Q0+lvn2w1NjNQTv86RAMA9SidUM36dUC00xKKzxb+O0mcJ1qnoW/XGzD66uX9CJVsBAADVWZCvC/CU6dOnq379+jpw4IA6duyopKQktW7dWj169FDLli3Vt29fSeUnWJOkunXr6uTJkxds7/jx46pXr543SgcAADC1xNpSVxc64o4kXwLcqjTczfk13O13WWPF1A2XJEWGB0uSfeK16cu18Ou9PqsTAABcGr8NeOPj47Vq1Spdf/31Cg8PV3p6uurVq6c5c+bos88+086dOyVdGPC2b9/e4Vi727ZtU/v27b1SOwAAgNlNaieFWC6+XqmrY6WO0R4rBwg4jsLdRbP6lw1PFx0VqttvaC2JkBcAALPz24BXsoe1ixcvVk5OjnJycrR27VpNnDhRubm5Sk9PV1BQkDp16lSuzZAhQ/Tdd98p45zTA9euXas9e/Zo6NCh3r4LAAAAptS5vvRkl6qFvN1ipKe72IcEBXDpKgp3I8J/G6HPYrHorcevJOQFAMAP+HXAW5GtW7fKMAy1bt1akZGR5ZZNnDhRcXFxGj58uBYvXqwFCxbo1ltvVY8ePTR8+HAfVQwAAGA+A5pIr18uXdbA8fL6YdLEttI/L5Mi/HZmCMC7qhLulgoKIuQFAMAfBGTAu2XLFkkXDs8gSbVr19ayZcsUFxenW265RXfccYcuv/xyLV68WEFBAflwAQAAuMxaX5rdS1rYV7r3nNGuLJI+628PeGtwiAW4hTPhbilCXgAAzC8g+0pUFvBKUmJiohYvXuzNkgAAAPxa81rS+NbSK2mSTfaAN4RgF3CbYycLNHjyV06Fu6VKQ15JeufjXWUh7+YF0eqQWNejdQMAgEsXkIfVFwt4AQAAAMBM6keH6+XpPWWxOBfuljq/J+9jd1oJdwEAMImA7MG7bNkyX5cAAAAAAG512/A2alQvQtd0j3Mq3C1VGvIO79Ncw/s090CFAADAEwIy4AUAAAAAfzToyqaX1D4oyEK4CwCAyQTkEA0AAAAAAAAA4A8IeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkQnxdAAAAAAAA5zMMQyos9HUZVRcWJovF4usqAAABiIAXAAAAAFD9FBaqeNR4X1dRZSEfzJPCw31dBgAgADFEAwAAAAAAAACYFAEvAAAAAAAAAJgUAS8AAAAAAAAAmBQBLwAAAAAAAACYFJOsAQAAv1ZSYtOO9FPatOO4Tp05K5vNUER4iNo0ryNr23qqGVnD1yUCAAAAgMsIeAEAgN8pPFuihV+na+5HO7R2y1Hl5hc7XC8oyKL2CXU0emBL3XFTW8U1iPRypQAAAABwaQh4AQCA3zhbVKKX3t2il/+9VUdPFFx0fZvN0NY9J/V/r2zQk3N+0oh+CXpxag/Fx9b0QrUAAAAAcOkYgxcAAPiFjduPqceYRXr0n/+rUrh7vuJiQ/9Z8os63rRQb3+0U4ZheKBKAAAAAHAvAl4AAGB68z7Zpe5jPtGmHccveVunzxRpwoxVuvWh5TpbVOKG6gAAAADAcwh4AQCAqb25YLtu+8tKFRe7t8dt6pK9uvlPS1VUZHPrdgEAAADAnQh4AcBPGIahl97douOnCl3exqmcs5r59mbZbJyaDnNYvGK/7npqtee2v/KAJj3tue0DAAAAwKUi4AUAP2AYhqa8sEbT/vaj+t35hUsh76mcsxr4xyV66OV1mvjEd4S8qPaOnSzQHY9/J2eGyl03f5gOfH2L1s0fVuU2b3+0U4uW73OhQgAAAADwvIAIeLOzszV9+nS1atVK4eHhatq0qaZMmaLc3FxNmDBBFotFs2fP9nWZAOCyrOx8ffjVXknST9uPOR3yloa7azYflSR9vHyfDmSd8UitgLvc+9wPOnws36k2sTGRim9UU7ExkU61u+up1ZfUOx4AAAAAPCXE1wV42saNGzVo0CBlZWWpZs2a6tChgzIzMzVr1izt2bNHx4/bJ2OxWq2+LRQALkFcg0gtnztYfSZ8rqzs/LKQ95s3B6lenbBK254f7taPDtOytwareeMob5QeEApLpKWZ0g9HpTNFUniwlFRXGtJUqh3q6+rM6X/bsjX/i1+8dntZ2fl66d0tenZKN6/dJuBP8oqlLzKkDcfs/48MkbrWlwbG2/8PeMqK7CPq/8O3er5DsqYmtnO4TuinH2hwwzh93PNKL1cXWHackhYfkLLyJENSowj7sVD7aF9XBgDm59eHU9nZ2Ro6dKiysrL0wAMPaMaMGYqKsgcWM2fO1EMPPaSQkBBZLBYlJyf7uFoAuDTtEqKdDnkrCneT29TzZul+yzCkebulf++RTp4tv+zrTOmVNGlYM2lKR3voi6p7NTXN67f51n93aMYfOysslCcLqKpim/T6dunDdCm3uPyyLw9K/9gmjUqQ7morhQTEuYVA4Nl+Upq5Rdp84sJlqXuljtHStCSpU11vVwYA/sOvD6Puu+8+ZWRk6J577tFLL71UFu5K0vTp05WSkqLi4mK1aNFCtWvX9mGlAOAepSFvbEyEpMqHayDc9SzDkJ7aJM1OuzDcLVVos4cek3+Q8osdr4MLnThdqPc/3+P12z16okALvt7r9dsFzKrYJk1bJ727+8Jwt1RusfTOLmn6evv6APzL/7KlO1c7DndLbT0p3bVaWnPEa2UBgN/x24A3LS1NqampiomJ0XPPPedwna5du0qSUlJSyq4rDYR79OihsLAwWSwWr9QLAO5SlZCXcNfz5u2WFu2v2rqbjktPbvRoOX5l2dpMFRSW+OS2P1t5wCe3C5jRy1ulVYertu7KLHtvXgD+IytPenCdlF+Fj+xCm/2Hnoxcz9cFAP7IbwPe+fPny2azaezYsapVq5bDdSIi7OHHuQHv7t27tXDhQsXGxqp79+5eqRUA3K2ykJdw1/MKSqR/7XauzdeZ0j7mtauS/207FpC3DZjJ8UJp4T7n2ixMl04ylyE8JK+kRNmFhQ7/4Bkfpks5RVVfP69Y+o/3htcHAL/it2PwLlu2TJLUp0+fCtfJyMiQVD7gveqqq3To0CFJ0uOPP67Vq1d7sEoA8BxHY/L2mfC5QkMsWv9rSEW46xnfHJROOfGFptTCdGlqJ7eX43c2pGX77LZ37jul02fOqnYtZscDKrNov1Tk5JALZ23SogPS71t5piYEtid3bNWTO7b6uoyAUVgifVLFM5nO9ekBaXJ7KcJvkwoA8Ay/fdvct8/eZaB58+YOlxcXF5eFt+cGvEFBftupGUAAOj/k3bzzeNkywl3P+f6oi+2OSFPdW4pf2p/l2/M3Mw7nqgMBL1Cp710cS/P7IwS88Iw7mrXUzY2bOlw2aM0KL1fj/7afqngOgsrkFtvH6+3ZwP01AYA/89uANzfX/uUvPz/f4fLU1FRlZ2crKipKCQkJHq2lW7duysrK8uhtAEBlSoIbS7XvkCzB9isMQ5b9r2pw34d9W5ifqnv3ewrr1NfpdnsyshQf380DFfmXrDpTpGDHP0ysmz9MsTGRFbYtHbYkNiZCB76+pfLbyc5T91sXXXD9NX37K7SEz3VXNZq9T5agYJXYShQf7/iHeJhf/T8vUY2mzp+S8MOGzYofNdgDFaHUoeipUlAdHco6pPj4eF+XU6mIoCBts/Zyy7Za1aqlaxs0csu2KtKmTRvl25gtUJJCO/RRvXv+5VLbMbdPVOHGz91cEQBUf7GxsVq/fr1Lbf024I2NjdWJEye0YcMG9epV/qDg0KFDmjZtmiQpOTnZ4xOpZWVl6eDBgx69DQCoUFCElPD738JdSbJYlF2jn5TxN6mE2SzcLez0cYW50K4oL4fPi6qILJSCHS+KjYlUfKOaF91ESHBQldZz5OjhQ1JhpkttITUyDPt/DIPXux+rdeaUarjQriDnJK8LT4sqkYIkW0lJtX+sI4ODJauvq6i6zMxM5ZX4ZhLQ6qZW9H65eo5YduZ+na7mr00AqG78NuDt16+f0tLS9MILL6h///5q06aNJGndunUaN26csrPt4/dZrVaP1xIbG+vx2wAAR2yWMGVHjVNRyK+nJBo2yfLrUDQRzVWjzUOKyXlPQYbjsx3gmpDDO1xqZ2T8rCZNmri5Gv9zNPisKjrrMys7r9K2sTERCgkOUnGJTVnZlb/uK9pWbINaCjZ4nlxW+sO6xcLr3Y9ZMrdJ7Xs73S7o0DZeFx52KDhYNklBwcGKq+aPdYTJhs9r3LgxPXh/ZTl7TMbZAllCw51qZ5QUqU7BYUVV89cmAHjCpeSHfhvwTp8+Xe+//74OHDigjh07ql27diooKNDu3bs1aNAgtWjRQl9++WW58Xc9xdXu1QBwKU7lnNXAPy7Roc32AWHrR4cpOMiiI8cLFBQk2WxSUUhjxfZ6Ud+8OUj16rjS5xSOnCyUBn9tnzDIGf/vTzfK+vSNninKj0x5/gfNen+bw2WOhlQ414Gvb1F8o5rKys5X0/7/cfq2mzSMVMZm1wJ82PVYJNkkBQcFl014C/+TfkYascz5dl89O0lNZ01yf0EoE99vvg4eyVNcbJwyfq7e+6BRUKDiUeN9XUaV7dy5U5Zw5wJNf/b4T9LiA861GdC0hp7b9pNnCgIAP2aun0SdEB8fr1WrVun6669XeHi40tPTVa9ePc2ZM0efffaZdu7cKUleCXgBwNtKw90154S7y94arBoh9rf9mOjwsrFIf9p+TP3u/ELHTxX6rF5/Ex0mDXJyWMMO0VIK891VSdcOMQF524CZtKgl9W7oXJsrG0lNa3mmHgDeNzrBucDBIumWlp6qBgD8m98GvJLUvn17LV68WDk5OcrJydHatWs1ceJE5ebmKj09XUFBQerUyfnJHwCgOqso3E1u81t6WCMkSMvnDibk9aAHO9lD26qICZNe6Pbbmeuo3NXdYn32WPXpHuebGwZM6PHOUrMqDnXdvJY0o7Nn6wHgXe2jpWlJVV9/Skd+7AYAV/l1wFuRrVu3yjAMtW7dWpGRF860vWDBAi1YsEDbtm0rd5mhFgBUd1UJd0u1S4gm5PWgiBDp1V7S1RcZRqldHentK6W4Cz+OUIHmjaM06Arvz/weHhas3w9r7fXbBcyqbpj01hVSl/qVr9ctRnqrtxQd6p26EFiujmmos0NHaWpiuwrXOTt0lD7ueaUXqwocIxOkx61SzUoGh4wIlh5Jln6X6LWyAMDv+O0YvJXZsmWLpIqHZxg5cqTDy+PHj9e7777r0doAwFXOhLulSkPePhM+V1Z2flnIy5i87lGrhvTXHtKe09KCdGnNUelA7m/L3+wtWevRc9cVk2/poM9XeXfsyDGDE9kvACfVC5Pe6C1tPWF/H/z0nPE4hzWTRrSo+tkOAMxpSDOpb2Ppiwz7e8DPJ35b9lCSfVirWjV8Vx8A+IOA7MF7sYDXMAyHf4S7AKorV8LdUvTk9bzE2tJDydJH1/72wRskqXN9wl1XXXd5E/VMauC12wsLDdb025w4zxRAOR3r2odgOPc98P+shLtAoIgMkW5uIb17Zfn3gZEJhLsA4A4EvADgB0KCLQqtESzJuXC31Pkhb2iNIAUHkTyi+goODtI7T12lsNBgr9ze0/d0VduEaK/cFgAAAAA4IyAD3mXLlskwDF1//fW+LgUA3KJmZA19/soA3XRtC6fD3VKlIe8NfZvry9cHqk4UgyGiemvfMlpP39PVqTZZ2XnKOJyrrOy8Kre53NpQfxrX0dnyAAAAAMArAnIMXgDwRzUja2jh36+9pG20S4jWRy/3c1NFgOc9ML6Tdu0/pTcW7KjS+t1vXeTU9tu2qKOPX+6n4OCA/E0cAAAAgAnwbQUAAJiWxWLRa4/11t2j27t920mt62r53MFqUC/C7dsGAAAAAHch4AUAAKYWFGTR7Ed6ae4TV6q2m2ZqufPmtvpu3hDFNYh0y/YAAAAAwFMIeAEAgOlZLBb94cY2+vm/N2nIVU1d3k7L+Ch9+fp1emPGFapdi3GoAQAAAFR/jMELAAD8RtPYWvp09gCl/XJSr32QpnmLdun0maJK21gs0uArm+ru0e113eVNGG8XAAAAgKkQ8AIAAL/TvmW0Zj3cS399oKe2/XJC/9t2TJt2HNNb/92hvIISRYaH6MnJXdStY4w6t6tPb10AAAAApkXACwAA/FaNGkFKaVtfKW3rS5IWfpOuvII81a0dqgfGJ/m4OgAAAAC4dJyDCAAAAAAAAAAmRcALAAAAAAAAACZFwAsAAAAAAAAAJkXACwAAAAAAAAAmRcALAAAAAAAAACYV4usCAAAAAAC4QFiYQj6Y5+sqqi4szNcVAAACFD14AQAAAADVjsVikSU83Dx/FouvHzK/06JFC7Vt21ZWq1UdOnTQK6+8UuG6s2fP1vPPPy9Jstlsmjp1qjp06KDk5GT16dNHu3fvdtjOZrPp3nvvVWJiolq1aqXZs2eXLXv88cfVoEEDWa1WWa1WjR07ttztPfvss266pwBwaejBCwAAAAAAqqXU1FRZrVbt27dPycnJuvLKK5WcnFxunfz8fP3tb3/Tli1bJEmLFi3S6tWrtWnTJtWoUUNPP/20HnnkEX3wwQcXbP/f//63tm3bpp07d+rUqVPq3Lmz+vTpo44dO0qSxo4dq5dffvmCdhMnTlT79u01efJk1alTx/13HACcQA9eAAAAAABQrTVv3lxt27bVzp07L1i2YMEC9e7dWzVr1pRk7/1dWFiogoICGYah06dPKz4+3uF2U1NTdeeddyo4OFj16tXT6NGjNX/+/IvWExoaqgEDBuj999+/tDsGAG5AwAsAAAAAAKq1LVu2aPv27UpJSblg2bfffquePXuWXR46dKiuueYaxcbGKi4uTkuXLtWTTz7pcLv79+9X8+bNyy63aNFC+/fvL7v84YcfKiUlRX379tXy5cvLte3Vq5eWLl16qXcNAC4ZAS8AAAAAAKiWRo8eLavVqrvuuktvv/22WrdufcE6GRkZatSoUdnl9evX6+eff9bBgweVmZmpa6+9VpMmTXL6tidNmqT09HRt2rRJTz31lEaPHq19+/aVLY+NjVVGRoZrdwwA3IgxeAEAAAAAQLVUOgZvZSIjI1VQUFB2+b333lPfvn0VHR0tSRo/frwGDBjgsG2zZs20b98+9erVS5KUnp6uZs2aSbIHuKV69+6tzp07a/369WU9fgsKChQREeHqXQMAt6EHLwAAAAAAMK3k5GTt2LGj7HLLli21bNkynT17VpK0ePFiderUyWHbkSNH6s0331RJSYmOHz+u1NRUjR49WpLK9c7dtWuXNm7cqKSkpLLr0tLSHA4ZAQDeRsALAAAAAABMa8SIEfryyy/LLk+ePFkJCQlKSUlRcnKyli5dqtdee61sudVqVWZmpiRp3LhxateunVq3bq3u3btr6tSpZSHuo48+qk6dOslqteqWW27RK6+8ojZt2pRtZ8mSJRoxYoSX7iUAVIwhGgAAAAAAQLWTnp5epfWSkpLUsGFDrVu3Tt27d1dYWJjefPPNCtffuHFj2f+Dg4P1yiuvOFxv3rx5FW5j27ZtKi4u1hVXXFGlGgHAk+jBCwAAAAAATG3WrFk6fPiw127vwIEDmjNnjtduDwAqQw9eAAAAAABgaomJiUpMTPTa7V133XVeuy0AuBh68AIAAAAAAACASRHwAgAAAAAAAIBJEfACAAAAAAAAgEkR8AIAAAAAAACASRHwAgAAAAAAAIBJEfACAAAAAAAAgEkR8AIAAAAAAACASYX4ugAAAAAAAACUZxiGVFjo6zKcExYmi8Xi6yqAgEPACwAAAAAAUN0UFqp41HhfV+GUkA/mSeHhvi4DCDgM0QAAAAAAAAAAJkXACwAAAAAAAAAmRcALAAAAAAAAACZFwAsAAAAAAAAAJkXACwAAAAAAAAAmRcALAAAAAAAAACZFwAsAAAAAAAAAJkXACwAAAAAAAAAmRcALAAAAAAAAACZFwAsAAAAAAAAAJkXACwB+ZF9mTrXYBgAAAAAA8A4CXgDwEyvXH1LHG/+rx1/d4PI2Xnxns9oNX6ivvs9wY2UAAAAAAMBTCHgBwA8cOpqnwZO/Um5+sZ54/SeXQt4X39ms6X9fp4LCEg2f8o32ZtCTFwAAAACA6i4gAt7s7GxNnz5drVq1Unh4uJo2baopU6YoNzdXEyZMkMVi0ezZs31dJjzIZkg/n5BWZEnfHZbSya287uRZac0R6dtD0o9HpdxiX1fkX+IaROrpe7qWXXY25C0Nd0s9dqdVCfFRbq0R8LVd+06poLBEknS2qEQlJTYfVwQAQOAwDGnXKckovfzrdfC8FdlHFPrpB/rbnu0VrhP66Qe6Ye0qL1YVmPafsWcSK7KkLcelEvYBuEmIrwvwtI0bN2rQoEHKyspSzZo11aFDB2VmZmrWrFnas2ePjh8/LkmyWq2+LRQecaZI+niftHCfdCC3/DJrPWlEC2lAEynI4pPyAsL2k9J/9kpfHZTOnpOlRAZLg5tKtyRILcgR3eL+cZ0kSX96ca0ke8grSY/f3aXSdueHu0/f01WPTrR6pkjAywzD0MKv0/VqapqWrztUdv3RE4Vqdf2HmjSqnSaOaKe6tcN8WCUAAP6ryCZ9dkD6MF3aceq36w1Jt34rjUiQhjaVwoJ9VCDgYYYhLT0kfbhX+t+x8suaREo3NpdubiFF1fBJefATfh3wZmdna+jQocrKytIDDzygGTNmKCrKniTNnDlTDz30kEJCQmSxWJScnOzjauFuWfnSfWukXyrorbvxuP1v2SHpqS4cUHjCx/ukZzfbe1CfL69EWpAufbpfeqardE2c18vzS86GvIS78GdFRTb9YcZK/XvxHofL0zPP6OGX12vuRzu15LXr1DK+tpcrBADAv50pkh748cJQq9TuHOn5zdIXGdLfekh1Qr1bH+BpxTbpiZ+kLw46Xn4wT5qdJi3aL/3zMqlJTe/WB//h10M03HfffcrIyNA999yjl156qSzclaTp06crJSVFxcXFatGihWrX5kudPzl9Vrr3h4rD3XMtOyQ9/pPjEBKu+/qg9PSmiz+uhTbp4fXS+mzv1BUI7h/XSX+f1rPsckXDNRDuwp8ZhqFJT6+uMNw91659pzXgriU6ejzfC5UBABAYimzSg+sqDnfPtem4NPVH6deRlAC/YBjSM5sqDnfPtT9XmvyDdKLQ83XBP/ltwJuWlqbU1FTFxMToueeec7hO16728SpTUlLKrluwYIFuvvlmNW/eXJGRkWrXrp0effRRnTlzxit1wz3+3y/SXieesq8z7ePCwj3OlkgvbKn6+sWG/Zd7xuByn4uFvDm5RYS78Gs/bDqitz/aWeX19xzI0fNvb/ZgRQAABJYlGc514th0XPr0gOfqgZRXUqLswkKHf3A/Z1/TGXnSvN2eqwf+zW+HaJg/f75sNpvGjh2rWrVqOVwnIiJCUvmA96WXXlKzZs307LPPKj4+Xhs3btQTTzyhFStWaOXKlQoK8ttM3G8U2exDAzhrQbp0WUO3lxOQlh6yT6rmjPQz9l/3u8V4pqZA5Gi4hqhI+9v+6dyisvUId+GPXk1Nc7rNOx/v1FOTuyoywm8PjwAA8JoP051vs2CvdHNzycIcKR7x5I6tenLHVl+XETAWpDvfZtF+aVI7KZwhJOEkv/0Gs2zZMklSnz59KlwnIyNDUvmA99NPP1WDBg3KLl999dVq0KCBxo4dq++++05XXXWVhyqGu6w5Ih1z4QfIlVnSqbOM++QOi1385f3T/QS87nZ+yJuTV1xuOeEu/FFuXpE+/Gqv0+1OnD6rT1fs1+iBLT1QFQAAgWNvjrTtpPPtdufYJ2JrF+3uiiBJdzRrqZsbN3W4bNCaFV6uxr/lFUvfZDrf7nSRtCpL6t/E/TXBv/ltwLtvn70LZ/PmzR0uLy4u1urVqyWVD3jPDXdLdevWTZJ08GAVBk5xoFu3bsrKynKpLZwXceU41bnV8bAclbFJSul9rYoP7XB/UQEm5v+WKyS2tdPt/rt0teYOG+2BilAn7DKdqjmo3HW185bqtSdn6LUnfVRUgGo0e58sQcEqsZUoPt7xZxQuTXFQtM5G/8mlthPveVgPFHzv5opwLvaBwMbz73uHoqdKQXV0KOuQ4uPjfV0O/FRo+6tU7973XWp7/Zg/qHDzV26uyJwigoK0zdrLbdtrVauWrm3QyG3bc6RNmzbKt9k8ehtmEFy/mRo85dox5eRHnlTe0jfcXBHMIDY2VuvXr3eprd8GvLm5uZKk/HzHE6akpqYqOztbUVFRSkhIqHRby5cvlyS1b9/epVqysrJcDofhvJiTJ1XHxbaHD2epgOfqktUpKnbpzaWwoIB9xVNizkjnzch6OidXp4/weHtbo9LBpg2D17un1CiQol1revrUKZ3O5nnxJPaBwMbzXw1ElUhBkq2khOcAHhMVc1T1XGx77Fi2TvHalCRFBgdLVl9X4ZzMzEzllTBbXmhRDV3YfbBqTp08qSPsA3CS3wa8sbGxOnHihDZs2KBevcr/4nXo0CFNmzZNkpScnCxLJQP8HDx4UH/5y180cOBAWa1Wl2uB94QZuS61M0qKFRNukdGEcyEuVVDOYUnO/yBSI++YmvD4u11OeG+djhxw4YJGwxRVO0q187/1ek0BrfQzx2Lh9e4hNtXQIaNYsjh/mFM3SooM43nxKPaBwMbz73OHgoNlkxQUHKw4ngN4SHCwfUIOwzAq/b59rtJ1oy2FqsVrU5K9B6/ZNG7cmB68kiyhITKKCmWpEeZ021q2XNVgHwhIl5If+m3A269fP6WlpemFF15Q//791aZNG0nSunXrNG7cOGVn26fzrCy0PXPmjIYPH67Q0FC9/fbbLtfiavdquKbYJl3/tfPj8PaND9GLO3/2TFEBZkmG9NgG59u998AodX1ulPsLCmAvvrNZ0/++ruzy0/d0Vc2IkN/G5I3oo6l/mqrH7+7iqxIDTo9F9iFhgoOCy8aCh/v9/pEV+tdi56YhrlcnTBk/fqaIcL89PKoW2AcCG8+/78X3m6+DR/IUFxunjJ95DuA5t62Sfj5R9dnSLBaL2tSW1v3wJZOs/cooKFDxqPG+LsMpO3fulCU83NdlVAt/2SB94eTbbJ0a0uqP5yiMSdbgJPP9HFRF06dPV/369XXgwAF17NhRSUlJat26tXr06KGWLVuqb9++ksqPv3uu/Px8DR06VHv37tVXX32luLg4b5aPSxASJN3kwpBuI1u4vZSA1TdOqufkZHUto6Qu9T1TT6ByFO4+OtGq+8d10t+n9Sy7/onXf9Ljr7qQyAPV2N2jnT+L4A83tCHcBQDATUa0cK0N4S78hSsZw7BmItyFS/w24I2Pj9eqVat0/fXXKzw8XOnp6apXr57mzJmjzz77TDt37pTkOOAtKirSiBEjtH79en3xxRfq0KGDt8vHJRqbKCVGVX39QU2k7jGeqyfQhAZLDydLVT02qxH06/oczLlNReFuKUJe+LueyQ00cUTbKq/funltPTwh2YMVAQAQWAY2kXo6MQhpl/rSkKaeqwfwtqS60o1OdD5rVlMa7/xc5YAkPx6iQbJPirZ48eILrj9z5ozS09MVFBSkTp06lVtms9k0duxYLV26VJ9//rl69OjhrXLhRrVqSLN7SfetkXadrnzdAU2kv1gJF92tb2NpRmfpqY1SiVHxeuHB0vPd6L3rThcLd0vdP87+/lc6XMMTr/8kSQzXAL9gsVj0yiOXq6jYpnc+3lXpuu0S6mjJa9epfjSnEwIA4C4hQdLM7tL0ddLao5Wv26W+9Nce9o4icL+rYxrq7NDKh8K72HI4z2KRHkqyDyP56YHK102oJc26TIp28kxYoJRfB7wV2bp1qwzDUJs2bRQZGVlu2eTJk/Xhhx/q4YcfVmRkpNasWVO2LDExUQ0auDoPIrytQbg09wpp0X5pQbqUfqb88m4x9lOA+sZJQYS7HjGkqdS2tvRBun3soYJzJlOtFSINbSaNaiE1reWrCv1PVcPdUoS88GchIUGa+8SVGt6nuV5NTdNX35efjbhVs9r646h2mnBjW9WJ4mgaAAB3qxki/aOntOSg/TvZzyfKL29XRxqZYD+jknAX/igkSPo/q3R1rH0fWHPejx1Na9pzieHN7B3VAFcFZMC7ZcsWSY6HZ/jiiy8kSc8//7yef/75csveeecd3XbbbR6vD+4TGSLd0lIanSDtOCWNWykZsg8d8Prlvq4uMLSuIz2aIt3XQer7xW+P/xcDpIiAfAfyHGfD3VKEvPBnFotFw/s01/A+zbU3I0fdx3yiYycL1aBumHYsGqEgfuEDAMCjQoLsHT+GNJV+yZEO59u/EzQMtw+rx5mU8HcWi3RNnP3vYK50w9Lfvhcv7EuHM7hHQMYrlQW86enpXq4G3mCxSO2i7W+gpW+k8K6oGuUff8Jd9zp0NE9PvbGx7HJVw91S54e8M9/drPHDWish3onBrIFqLiE+SuG/dg8KrRFMuAsAgJe1jLL/AYGqSc3y34s5HIW7+O0ka5WpLOAFADOKaxCpJa9dp1qRNZwOd0uVTrwWER6sxf8cQLgLAAAAAIAJBGQfumXLlvm6BABwu8utjbT9k5vVpFFNl7dx/7hOGjkg4ZK2AQAAAAAAvCcge/ACgL9yRzBLuAsAAAAAgHkQ8AIAAAAAAACASRHwAgAAAAAAAIBJEfACAAAAAAAAgEkR8AIAAAAAAACASRHwAgAAAAAAAIBJEfACAAAAAAAAgEkR8AIAAAAAAACASRHwAgAAAAAAAIBJEfACAAAAAAAAgEkR8AIAAAAAAACASYX4ugAAAAAAAACcJyxMIR/M83UVzgkL83UFQECiBy/gQIsWLdS2bVtZrVZ16NBBr7zySoXrzp49W88//7wkKT09Xddcc43q1Kkjq9Vabr1ly5apR48e6tChgzp27Kjp06fLZrNJkg4fPqwePXqouLjYY/fJbFx9Dr799ltFRETIarWW/eXn5ztsd9ttt6lJkyZl602bNq1s2YMPPqj333/fvXcKcIKr+4DNZtPUqVPVoUMHJScnq0+fPtq9e7fDdocPH9ZNN92k5ORktW/fXi+//HK55StWrFD37t3VsWNHdejQQT/88IMkafHixZo4caJ77ihQAVf3gXfeeafcZ0BMTIxuuukmh+1sNpvuvfdeJSYmqlWrVpo9e3bZssr2D/YB73H1dSBJc+fOVevWrZWYmKg777xTRUVFDtvl5eXp1ltvVatWrdSmTRstWLCg3PJXX31V7du3V1JSklJSUlRQUCCp8uOIRx55RO3atVNKSoq6deumL7/8slydzz77rEuPB4DAYrFYZAkPN9efxeLrh82veCKbqEhaWpoiIyN1//33l13H8ZCJGEAA6f6JYXT9xP5vZZo3b2789NNPhmEYRnp6ulG7dm1j06ZNF6yXl5dnJCQkGGfOnDEMwzCOHTtmrFq1yli8eLGRkpJSbt0NGzYYe/bsMQzDMPLz843evXsb77zzTtnyKVOmGHPnznX5vplBVR9/w3D9OVi+fPkFj31Fxo8fb/z97393uOzw4cNG69atjeLi4iptC6gqT78PffTRR0aPHj2Ms2fPGoZhGE899ZQxcuRIh7cxZswY49FHHzUMwzDOnDljpKSkGD/++KNhGIZx8OBBo3nz5sa2bdsMwzCMgoIC48SJE2Vtu3TpYuzcubPK97u6aHLt+4aS3jKaXPu+r0sJWJ7eB87XsWNHY8GCBQ6XzZs3z+jbt69RXFxsHDt2zGjWrJnx888/G4ZR+f5hGObdB3zNmWMBw3D9dfDLL78YcXFxxqFDhwybzWYMHTrUmD17tsPbeOKJJ4zx48eXtWvQoIGRnZ1tGIZhfPzxx8bll19unDx50jAMwzhy5EjZsUFlxxGff/65kZeXZxiGYWzcuNGoXbt2WW2FhYVGy5Yty7bpbbwPAoBveeN7cWXZhCNnz541rrjiCmPMmDHGlClTyq7neMg86MELXETz5s3Vtm1b7dy584JlCxYsUO/evVWzZk1JUr169XTFFVeUXT5X586d1bJlS0lSeHi4rFar0tPTy5bfeuutmjNnjmfuhMk58xy4S8OGDZWYmKivvvrKrdsFXOHMPmCxWFRYWKiCggIZhqHTp08rPj7e4XY3bdqkwYMHS5Jq1qypq666Sv/6178k2XusjRkzRu3bt5ckhYWFKTo6uqztqFGj9NZbb7nzbgIVcvVzYO3atTpy5IiGDRvmcLupqam68847FRwcrHr16mn06NGaP3++pMr3D4l9wBeceR0sWLBAw4YNU2xsrCwWiyZNmlT23J4vNTVVkyZNkiQlJCTommuu0UcffSRJevHFFzVjxgzVqVNHktSgQQMFBwdftNZBgwYpIiJCkpSUlCTDMHT06FFJUmhoqAYMGMCZQgAAp7grm3DkySef1MiRI9W6dety13M8ZB4EvMBFbNmyRdu3b1dKSsoFy7799lv17NnT6W1mZWVpwYIFGjJkSNl1Xbt21ebNm3X69OlLqtcfOfsc7NmzR126dFH37t316quvVrrtf/zjH0pOTtaQIUO0cePGcst69eqlpUuXXnL9wKVyZh8YOnSorrnmGsXGxiouLk5Lly7Vk08+6XC7Xbt21fvvvy+bzaajR4/qyy+/LPvhadu2bcrPz1e/fv1ktVp17733Kjc3t6wt+we8ydXP4rlz52rcuHGqUaOGw+X79+9X8+bNyy63aNFC+/fvl1T5/iGxD/iCM6+Dyp7b81W27rZt27R+/Xr17t1b3bp106xZs8q1rew4otQ777yjli1blrsNXj8AAGd5IpuQ7D+I//DDD7r33nsvWMbxkHkwyRpQgdGjRysiIkKRkZF6++23L/glS5IyMjI0cOBAp7Z7+vRpDR06VNOnT1e3bt3Krg8JCVHdunWVmZmp2rVrX3L9/sCV56BLly7KyMhQnTp1lJGRocGDBysmJkajRo26oO0zzzyjuLg4BQUF6aOPPtKgQYO0a9cu1apVS5IUGxurbdu2ee4OAhfhyj6wfv16/fzzzzp48KBq166thx9+WJMmTdK///3vC9r+9a9/1YMPPqjOnTurYcOGuuaaa8p6mBUXF2vlypX65ptvVKtWLd1+++2aMWOGXnrpJUn2/SMjI8ND9xywu5TP4tzcXP3nP//RmjVrXLrtyvYPiX3Amzx1TFYVxcXF2rt3r1auXKkTJ07o6quvVsuWLTVkyJCLHkdI0tKlS/XEE0/o66+/LjcuJa8fAEBVefJzMC8vT3fffbcWLFjgcPxkjofMg4AXqEBqaupFByOPjIwsm2ijKnJycjRw4EANHz5cU6dOvWB5QUFB2el8cO05ODccj4+P16233qpVq1Y5DHibNGlS9v8bb7xRDz/8sHbs2KGuXbtK4vmA77myD7z33nvq27dv2XAK48eP14ABAxy2jYmJ0bvvvlt2edKkSerYsaMkqVmzZrJarapbt64k+zAyzz33XNm67B/whkv5LP7www/LJgisSLNmzbRv3z716tVLkn1CkmbNmkmqfP+Q2Ae8yZXXQbNmzbRnz56yy+c+t+crfR3ExcWVrVv6vtmsWTPdeuutCg4OVkxMjAYPHqw1a9ZoyJAhFz2OWLFihW6//XZ9+umnatu2bbnb5PUDAKgqT2QTpfbs2aP9+/erT58+kqSTJ0/KZrPpxIkTmjdvHsdDJsIQDcAlSE5O1o4dO6q07pkzZzRw4EANHDhQjz322AXLDx8+LIvFoqZNm7q7TL92/nNw6NAh2Ww2SfZAffHixercubPDtuf+0rhmzRodO3ZMrVq1KrsuLS3N4ekvQHVy/j7QsmVLLVu2TGfPnpVkn922U6dODtseO3asbFb5n376SR9//LHuvvtuSdKYMWO0fPlyFRYWSpK++OKLcvsD+weqi4o+i+fOnasJEyZU2nbkyJF68803VVJSouPHjys1NVWjR4+WVPn+IbEPVDfnvw5uvvlmLVq0SFlZWTIMQ6+//rpuueUWh21Hjhyp119/XZK0d+9effvtt7rhhhsk2d8LlyxZIknKz8/Xt99+W/a8V3YcsXLlSo0bN06ffPKJw9cJrx8AgDs5k02cKykpSUePHlV6errS09N1//336w9/+IPmzZsnieMhMyHgBS7BiBEj9OWXX5ZdzsvLU3x8vEaOHKlt27YpPj5ef/7znyXZx2j78ccf9d///ldWq1VWq1XPPPNMWdslS5boxhtvVFAQu6Uzzn8OFi5cqKSkJKWkpOiyyy5T//79dfvtt0uSMjMzy/3yedtttykpKUlWq1V/+tOf9OGHH5ZNomIYhpYuXaobb7zRq/cHcNb5+8DkyZOVkJCglJQUJScna+nSpXrttdfKllutVmVmZkqSfvzxR3Xo0EHt27fXXXfdpQ8++KCsB9vll1+uYcOGqXPnzkpKSlJ2dvYF71kjRozw0r0EKnb+PiBJO3bs0MaNG8vC2nOduw+MGzdO7dq1U+vWrdW9e3dNnTpVSUlJkirfPyT2germ/NdBy5Yt9cQTT6h3795q1aqVGjRooLvuukvShccD06ZNU35+vhITE3Xddddp9uzZiomJkSRNnTpVhw8fVocOHdStWzcNGjRII0eOlFT5ccSECRNUWFio22+/vey4b8uWLWW3yesHAOBOzmQT538OVobjIfOwGIZh+LoIwFt6LJJssv+y8aPjCbWddv311+vxxx9X9+7dL2k7V155pd54442yGev9kScef8l9z8G5lixZon//+98Oxy0FLkV1fh+qquzsbPXt21fr169XaGioV27TXeL7zdfBI3lq0jBSGd/c6utyAhL7QGDz1LGA5P3Xgau2bdumu+66S6tWrfLJ7fM+CAC+ZabvxZXheKh6oasgcIlmzZqlw4cPX9I2Dh8+rD/+8Y9+He56kjueg/OdOnVKM2fOdOs2AU/xxD5QmT179uj111/nQA7VBvsAJO+/Dlx14MABzZkzx9dlAAD8DMdDgY1J1oBLlJiYqMTExEvaRqNGjTRmzBg3VRR43PEcnM/Rab1AdeWJfaAyPXv29NptAVXBPgDJ+68DV1133XW+LgEA4Ic4Hgps9OAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMK8XUBcMwwDKmw0NdlVF1YmCwWi6+rAAAAAAAAgB8wXTYm+SwfI+CtrgoLVTxqvK+rqLKQD+ZJ4eG+LgMAAAAAAAD+wGTZmOS7fIwhGgAAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAAAAAwKQIeAEAAAAAAADApAh4AQAAAAAAAMCkCHgBAAAAwA/kFxRrxNSlWrPpiMvb2L3/tK6f/KWOHs93Y2UAAMCTCHgBAAAAwOTyC4o17L6vtfCbdA2YtMSlkHf3/tPqM+Fzfb4qQ9fe+QUhLwAAJhEQAW92dramT5+uVq1aKTw8XE2bNtWUKVOUm5urCRMmyGKxaPbs2b4uEwAAAABcYhj2P0nKyS1yOuQtDXczDudesD0AAFC9+X3Au3HjRiUlJenFF19UVlaWOnTooKKiIs2aNUujR49WWlqaJMlqtfq2UA9ZkX1EoZ9+oL/t2V7hOqGffqAb1q7yYlXedaxAmrtTGrVcsv16nU3SJ/ulgmJfVhYYDEPacEz68/ryj/8TP0nbTvqwMMCLdpySntkkDf26/H7w41G+PHtDQWGx/vXpLl05frEOHc2TJB0+lq/HX92gzCO5Pq4uMGTkSv/YKt3wTfl9YFmmVGyrrCX8gWFIm45Lf/lf+ef/Lxukzcd5H3SXyIgQLZrVX9f2bCzJuZD3/HC3U6u6WvrmIDWsH+HRmgEgUBSWSJ8dkO74rvxn4Rs7pKMFvqwsMARCNhbi6wI8KTs7W0OHDlVWVpYeeOABzZgxQ1FRUZKkmTNn6qGHHlJISIgsFouSk5N9XC3czTCkN3dKb++Uih18cXhqo/3L5ozO0tWxXi8vIGTlS9PXOQ5yPz1g/+sRIz3bTYoO9Xp5gMedKZIe2yB9d9jx8rt/kFrXll7sLsXX9G5tgeKr7zM09s8rlH2i/JFzcYmhJ17/SU+/uVEPjk/Ss/d1U1CQxUdV+q8im/TCZvuPqo4yvOnrpUYR0gvdpE51vV4evCC7QHpovT3gPd8XGfY/az37a6B+uPfr8zelIe+w+77W0rWZZSHvV68P1GUpDR22IdwFAM/64Yj9R82TZy9c9sYOe4e0cYnS3e0lDkfhKr/uwXvfffcpIyND99xzj1566aWycFeSpk+frpSUFBUXF6tFixaqXbu2DyuFuxmG9Let9jdLR+FuqdNF0rQfpW8yvVdboDicL0347uK9dH/Mliaulk47+LADzCy3WJr0fcXhbqldp6U/fCcdpCOp2y1esV+DJ391Qbh7rpISQy+8vVmTnlotg26EblVss//I93EF4W6pw/n2fWWLgwAQ5na80N5TyVG4e66Nx6U7VksnCr1Tl79zpicv4S4AeNbKLOn+tY7D3VIlhvTubum5zZzVAtf5bcCblpam1NRUxcTE6LnnnnO4TteuXSVJKSkpZdetWrVK/fr1U1xcnMLCwhQfH19uKAeYw9JD0vxfqrauTdL/bZAy8zxaUsB59H/2L+1V8UuO9Oxmz9YDeNtLW6Ttp6q27vFCew83DujcJys7T6OnLVdJSdUe1DcX7tC8Rbs8XFVgeW+3tOoiP3CUKiiRHviRoZP8zeM/SRlVPL46kGtfH+5RlZC3uNhGuAsAHpRdID3yP3uAWxUf7bMP4wC4wm8D3vnz58tms2ns2LGqVauWw3UiIuwHL+cGvCdOnFBSUpJmzZqlr776Si+88IK2bt2qXr16KSMjwyu1e0JeSYmyCwsd/vmjqoa7pc7a7G+mcI+0k/beOM5Ylmkf0gHwB8cLpSUHnWuz/ZTz+w0q9tZ/dyjPybTwH/9vK7143aTYJn2w17k2x89KX3FGjd/YmyN9X/X5vSRJq49I+854pp5AVFHIe7aoRJKUfbKQcBcAPOjjffYfsZ3x/i90+vAkf87G/HYM3mXLlkmS+vTpU+E6pYHtuQHvsGHDNGzYsHLrde/eXW3bttXChQs1ZcoUD1TreU/u2Kond2z1dRlesevUxU8FdOTjfdKdbaTQYPfXFGgWpDvfxiZ7yP7Hdu6uBvC+RfvtY48668N0qXN9t5cTcEpKbJrz4Q6n223cflxrNh9Rr5RGHqgqsHybJWW7cJy8IF0a1szt5cAHFqa73m5qJ3dWEtgcjcl75tchgUps9gSBcBcA3K/YJv3XhU5kO09LP5+Qkuq5vyb4dzbmtwHvvn32Pal58+YOlxcXF2v16tWSyge8jtSvb/+2HRLi2sPVrVs3ZWVlOdUmIihI26y9XLo9R+5o1lI3N27qcNmgNSsueftt2rRRvq16TIMd0Wu06oz7q9PtTpyVWne7QiVH091fVICp/+cvVaNpR6fbvfrRcj3zyjgPVAR4V/QdcxTe5Xqn233+0169N/xKD1QUWEosUcqq+6BLbQfddLdqFa51c0WBp9bwP6vWdZOdbrf1uE3xTZvTdcUP1HvgI4Umdne63btfrtHfBo7wQEWBzaYaCou6VYU1EsuNiR1SfFjZ62aqS8p9PqsNAPxRUJ1YNXxuvUttb773MeWteNe9BZmU2bIx6dLysdjYWK1f79rrxm8D3txc+0/T+fmOz/lOTU1Vdna2oqKilJCQcMHykpIS2Ww27du3T3/+858VGxurUaNGuVRLVlaWDh507lzdyOBgyerSzTnUqlYtXdvAcz2SMjMzlVfi5LkHHtIg/6zquNj26Mkc5Tv5XOFC0cGhquFCu7MKdnpfAaqjCJtFrkwGbwsJYx9wh9CGUl3Xmp7KydepozwHl6ppUYkcD5BVOUtQkDKPHJNxljF7zK62JUShLrQrstTgfdBTwt6TWj0mBf16lGYYKj4wX1lndvq2LgDwQ2G2CDV0se3pgiId5rNQkvmyMcl3+ZjfBryxsbE6ceKENmzYoF69yqf9hw4d0rRp0yRJycnJslgsF7S/+uqry3r4tmrVSsuWLVODBg1crsVZEUHmGh65cePG1aYHb3io84+dYRiyWCyKiQqXrUkTD1QVWIKLcl1qV6OkQE14/OEHQo1KpsmtRFDhGfYBNyixRMq582Z+Ex0VqpqhPAeXKjLItWMCo/isGjfgnER/EFJc4GK7fN4HPaA4qJ6yo25TSdA5P8FbLLK0mKSY0/9SaIl55xoBgOooqFakpN+yBmdE1ZBC+CyUZL5sTLq0fMyV/LCU3wa8/fr1U1paml544QX1799fbdq0kSStW7dO48aNU3Z2tiTJarU6bD937lydPHlSe/fu1YsvvqgBAwZo9erVatbM+YHhXOlebRQUqHjUeKfb+crOnTtlCXelv5r7ZRdI139d9ZkqJclisahllLRu01o5+d4LB+Zsl950oTPI478boBGP8QUD5vfZAWmGC7PB33ZFG91v4gk9q5Put36i9VuznWpjsUgbVvxLCfFRHqoqcGw+Lv3hO+fbXRMfqr+yD/iFebukf6Y53+6hm67Q76bzGnCn3ftPq8+Ez1Xy64RqHVpGq350mFZtOCzDEq6Cxndr0esDdVmKq33NAADnMwzp9yultFPOBQwWSSvefkGNI1/wTGEmY7ZsTPJdPma+KLyKpk+frvr16+vAgQPq2LGjkpKS1Lp1a/Xo0UMtW7ZU3759JVU8/m7btm3Vs2dP3XLLLVq6dKlycnI0c+ZMb94FuCgmXOob53y7ES1EuOsmNzSXgp18LGuGSIPiPVMP4G39Gkt1XDg3+WbHw8bDBXePbu90m8FXNiXcdZOkulKb2s63G9nC7aXAR4Y3k5w9qSosSBrCJHtuVRruZvwa7nZqVVfL5w7WktcG6tqejSVJOblFGjBpidZsOuLLUgHAr1gs0ogLRwO9qCsaSY0j3V8P/J/fBrzx8fFatWqVrr/+eoWHhys9PV316tXTnDlz9Nlnn2nnTnv3wotNsCZJ0dHRatWqlXbv3u3psuEm41pJNZx4dcdGSIMJF92mUYQ01PG45RUa09Ie8gL+ICxY+n2ic20GNpGaujJoKRy6ZWBLtWpW9YQxONiih/+Q7MGKAovFIk1o41ybjtFSD9dGw0I1FB0m3dzCuTYjE6RoVwbuhUOOwt2lbw5Sw/oRiowI0aJZ/Ql5AcCDrmssNa1Z9fWDLdL4Vp6rB/7NbwNeSWrfvr0WL16snJwc5eTkaO3atZo4caJyc3OVnp6uoKAgderU6aLbOXLkiHbs2KHERCe/rcNnOkRLT3WpWi/SeqHSrMukWq7MCoYKTU+SelXxi/qgeOnOtp6tB/C237eSbqxij9wu9aXHrB4tJ+BEhIfoi1evU5OGF+8CERRk0dtPXKkrurg+5hUudG1j6d4qdqRuUUv6Ww8piDNp/MqUDlKfKu5WfeOke5zveI8KVBbuliLkBQDPCg+RZvWUGlbhbP0gSf9nlaz1PV0V/JXFMAwnRir1D2vXrtVll12mtm3bavv27eWW/e53v1OrVq1ktVoVHR2tXbt26e9//7sOHTqkdevWqVUr7/ycYrZxRkI+mFdtxuA9149HpZe3SjtPX7jMIqlXQ+mhJKmJE7+qoeqKbdIradJ/90m5xRcujw6199y9rTVf6uGfDEP6f3ukf+2RjhVeuDwyWBre3B6ChQZ7v75AkJGVq3ue+16frjggm+3CQ56OidGaObWHBl/p5GkHqLIlGdJr26WDeRcuC7FI/RtLDya5NqwJqr8SQ3pzh5S6V8opunB57RrS6ATpjrbOD+8Ex6oS7p4rL79Yw+77WkvXZkqSomrW0FeMyQsAbnM4X3phi/RdluRo6q3EKOm+DlLvRl4vrdozWzYm+S4fC8iA96233tKdd96pUaNGKTU1tdyy2bNn67333tOuXbtUUFCgpk2bqk+fPnrkkUfUvLn3Bkc024u4uga8kj1g2XJCWnJQOlYgBQdJ8ZHSsGZSPMGuV+QV27/gbzou5ZVItULsp+FeG0eohcBQZJOWH5J+OCKdKZYigqVOde1Dw3D2gHfsP3RGb3+0Uzv3nVJRsU0N60XoloEtdUWXRk7PbAzn2QxpzVH7fnDqrH2s1cTa9uF86lfPwwe4WUGx9FWmtOGY/UffmiFS1/pS/yZSOMcCbuNsuFuKkBcAPO9QnrRov7Q/194Zql6YdF0TKaUe8wFVxGzZmETA61VTpkzRrFmz9Mwzz+iRRx7xdTkOme1FXJ0DXgAAAMDfHTtZIOvIj50Od0s5CnnXvT9MbROiPVUyAACVMls2JvkuH/PrMXgrsmXLFklVm2ANAAAAAKq7enXCdPsNrSU5H+5KF47JO7xPM6cmqwQAAL4TkHPWL1u2zNclAAAAAIDbWCwWPXF3FzWqH6GR/ROcCndLlYa8r6am6U/jOio4OCD7AwEAYDoBGfACAAAAgL+xWCyafEuHS9pGZESIHrwtyU0VAQAAb+AnWQAAAAAAAAAwKQJeAAAAAAAAADApAl4AAAAAAAAAMCkCXgAAAAAAAAAwKQJeAAAAAAAAADApAl4AAAAAAAAAMCkCXgAAAAAAAAAwKQJeAAAAAAAAADApAl4AAAAAAAAAMCkCXgAAAAAAAAAwKYthGIavi8CFDMOQCgt9XUbVhYXJYrH4ugoAAAAAAAD4AdNlY5LP8jECXgAAAAAAAAAwKYZoAAAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACTIuAFAAAAAAAAAJMi4AUAAAAAAAAAkyLgBQAAAAAAAACT+v/lCJ0tn7jYOgAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "vocabulary = {g:i+1 for i, g in enumerate(pipeline.gate_pool)} \n", + "tokenizer = CircuitTokenizer(vocabulary)\n", + "simulator = Simulator(CircuitBackendType.QISKIT)\n", + "\n", + "qc_list, _ = decode_tensors_to_backend(simulator, tokenizer, out_tensor, params)\n", + "qc_list[0].draw(\"mpl\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Further examples" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "More detailed examples and tutorial notebooks are provided on the project page [[tutorials]](https://florianfuerrutter.github.io/genQC/examples/tutorials.html) or in the directory `src/examples/`." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The installation of `genQC` is done via `pip` within a few minutes, depending on your downloading speed." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Method 1: pip install" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To install `genQC` just run:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "```sh\n", + "pip install genQC\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note, this will install missing requirements automatically. You may want to install some of them manually beforehand, e.g. `torch` for specific cuda support, see [https://pytorch.org/get-started/locally/](https://pytorch.org/get-started/locally/)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Requirements:** `genQC` depends on `python` (min. version 3.12) and the libraries: `torch`, `numpy`, `matplotlib`, `scipy`, `omegaconf`, `qiskit`, `tqdm`, `joblib`, `open_clip_torch`, `ipywidgets`, `pylatexenc`, `safetensors`, `tensordict` and `huggingface_hub`. All can be installed with `pip install`. In `src/RELEASES.md` [[doc]](https://florianfuerrutter.github.io/genQC/RELEASES.html) and the [GitHub release descriptions](https://github.com/FlorianFuerrutter/genQC/releases), specific tested-on versions are listed." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Method 2: clone the repository" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To use the latest GitHub code, you can clone the repository by running: " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "```sh\n", + "git clone https://github.com/FlorianFuerrutter/genQC.git\n", + "cd genQC\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The library `genQC` is built using jupyter notebooks and [`nbdev`](https://github.com/fastai/nbdev). To install the library use in the clone directory:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "```sh\n", + "pip install -e .\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Test installation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can run the provided `src/examples/Quantum circuit synthesis with diffusion models/0_hello_circuit` [[doc]](https://florianfuerrutter.github.io/genQC/examples/Quantum%20circuit%20synthesis%20with%20diffusion%20models/hello_circuit.html) [[notebook]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/Quantum%20circuit%20synthesis%20with%20diffusion%20models/0_hello_circuit.ipynb) example to test your installation. On a computer with a moderate GPU this inference example notebook should run under half a minute. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## License" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The code and weights in this repository are licensed under the [Apache License 2.0](https://github.com/FlorianFuerrutter/genQC/blob/main/LICENSE.txt)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## BibTeX" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We kindly ask you to cite our paper if any of the previous material was useful for your work." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Quantum circuit synthesis with diffusion models\n", + "\n", + "``` latex\n", + "@article{furrutter2024quantum,\n", + " title={Quantum circuit synthesis with diffusion models},\n", + " author={F{\\\"u}rrutter, Florian and Mu{\\~n}oz-Gil, Gorka and Briegel, Hans J},\n", + " journal={Nature Machine Intelligence},\n", + " doi = {https://doi.org/10.1038/s42256-024-00831-9},\n", + " vol = {6},\n", + " pages = {515-–524},\n", + " pages={1--10},\n", + " year={2024},\n", + " publisher={Nature Publishing Group UK London}\n", + "}\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": { + "146e96bf002d4819bcaa45f1c16b70b6": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "2c697df188b646e0ab347270d14960ca": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "3ccea5db1deb417c81b1697a1e0a2d24": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_da7b00514b3b404cb5647af324e31281", + "IPY_MODEL_ab4ff0b22a664acd9ae8f40a4823082f", + "IPY_MODEL_9a6d5010abde4b17b18078c9460c09b4" + ], + "layout": "IPY_MODEL_8a1674d463224f09b8378fd5c69a42a5" + } + }, + "5e0f4b112a5b49cca8219bb958988f96": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "6c66ed03f0664cf4b59efe8b1107fa8c": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "8a1674d463224f09b8378fd5c69a42a5": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "9a6d5010abde4b17b18078c9460c09b4": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_2c697df188b646e0ab347270d14960ca", + "style": "IPY_MODEL_5e0f4b112a5b49cca8219bb958988f96", + "value": " 4/4 [00:00<00:00, 800.48it/s]" + } + }, + "ab4ff0b22a664acd9ae8f40a4823082f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_146e96bf002d4819bcaa45f1c16b70b6", + "max": 4, + "style": "IPY_MODEL_c9c084f8e7204cabac0a4ab5f18c0c30", + "value": 4 + } + }, + "c9c084f8e7204cabac0a4ab5f18c0c30": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "da7b00514b3b404cb5647af324e31281": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_db733df6b58e4b54ade60e77ca1bc8e4", + "style": "IPY_MODEL_6c66ed03f0664cf4b59efe8b1107fa8c", + "value": "Fetching 4 files: 100%" + } + }, + "db733df6b58e4b54ade60e77ca1bc8e4": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + } + }, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/src/index.ipynb b/src/index.ipynb deleted file mode 100644 index fe55add..0000000 --- a/src/index.ipynb +++ /dev/null @@ -1,354 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# genQC · Generative Quantum Circuits\n", - "\n", - "> Generating quantum circuits with diffusion models" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "

\n", - " \"awesome\"\n", - " \"generative-models\"\n", - " \"diffusion-models\"\n", - " \"python-3.10\"\n", - " \"DOI\"\n", - "

" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Code repository for generating quantum circuits with diffusion models. [[Arxiv]](https://arxiv.org/abs/2311.02041) [[Demo]](https://huggingface.co/spaces/Floki00/genQC)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "![](https://github.com/FlorianFuerrutter/genQC/blob/main/src/assets/inference.png?raw=true)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## The codebase" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The code contained within this repo allows the sampling of pre-trained diffusion models and includes our pipeline to fine-tune and train models from scratch. Pre-trained weights can be found on [Hugging Face](https://huggingface.co/collections/Floki00/generative-quantum-circuits-6550e926c67f60a368b02bc3) and can be downloaded automatically via our code (see minimal example). For the CLIP model weights we use the [OpenCLIP](https://github.com/mlfoundations/open_clip) library, which will download (and cache) the CLIP model on first usage of our pipeline. In case you prefer reading a documentation rather than notebooks or code see [[Documentation]](https://florianfuerrutter.github.io/genQC/)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The repo inlcudes:\n", - "\n", - "1. `genQC/` a full release of our used diffusion pipeline.\n", - "2. `src/examples` examples how to reproduce some figures of the [Paper](https://arxiv.org/abs/2311.02041).\n", - "3. `src/` the source notebooks for [nbdev](https://github.com/fastai/nbdev)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Examples" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Minimal example" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "A minimal example to generate a 5 qubit circuit conditioned on a SRV of $[1,1,1,2,2]$. You can try it out on your own with our [[Demo]](https://huggingface.co/spaces/Floki00/genQC), no coding required." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "18bd4685d5ee43d1912f11ccd3dde9af", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Fetching 2 files: 0%| | 0/2 [00:00" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "print(f\"Circuit is SRV {srv_list[0]}\")\n", - "qc_list[0].draw(\"mpl\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Included examples" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Example notebooks are provided in the directory `src/examples/`." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "- `0_hello_circuit` [[doc]](https://florianfuerrutter.github.io/genQC/examples/hello_circuit.html) [[notebook]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/0_hello_circuit.ipynb): How to sample a circuit (conditioned on a SRV)\n", - "- `1_editing_and_masking` [[doc]](https://florianfuerrutter.github.io/genQC/examples/editing_and_masking.html) [[notebook]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/1_editing_and_masking.ipynb): Presents editing and masking of circuits\n", - "- `2_unitary_compilation` [[doc]](https://florianfuerrutter.github.io/genQC/examples/unitary_compilation.html) [[notebook]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/2_unitary_compilation.ipynb): Compile unitaries and transpile circuits\n", - "- `3_dataset_and_fineTune` [[doc]](https://florianfuerrutter.github.io/genQC/examples/dataset_and_finetune.html) [[notebook]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/3_dataset_and_fineTune.ipynb): How to create a dataset and fine-tune a pre-trained model" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Installation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The installation of `genQC` is done via `pip` within a few minutes, depending on your downloading speed." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Method 1: pip install" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To install `genQC` just run:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```sh\n", - "pip install genQC\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note, this will install missing requirements automatically. You may want to install some of them manually beforehand, e.g. `torch` for specific cuda support, see [pytorch.org/get-started/locally](https://pytorch.org/get-started/locally/)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**Requirements:** `genQC` depends on `python` (min. version 3.10) and the libraries: `torch`, `numpy`, `matplotlib`, `scipy`, `pandas`, `omegaconf`, `qiskit`, `tqdm`, `joblib`, `open_clip_torch`, `ipywidgets`, `pylatexenc` and `huggingface_hub`. All can be installed with `pip`. In `src/RELEASES.md` [[doc]](https://florianfuerrutter.github.io/genQC/RELEASES.html) and the release descriptions specific tested-on versions are listed." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Method 2: clone the repository" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To use the latest GitHub code you can clone the repository by running: " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```sh\n", - "git clone https://github.com/FlorianFuerrutter/genQC.git\n", - "cd genQC\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The library `genQC` is built using jupyter notebooks and [`nbdev`](https://github.com/fastai/nbdev). To install the library use in the clone directory:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "```sh\n", - "pip install -e .\n", - "```" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Test installation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can run the provided `0_hello_circuit` [[doc]](https://florianfuerrutter.github.io/genQC/examples/hello_circuit.html) [[notebook]](https://github.com/FlorianFuerrutter/genQC/blob/main/src/examples/0_hello_circuit.ipynb) example to test your installation. On a computer with a moderate GPU this inference example notebook should run under half a minute. " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## License" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The code and weights in this repository are licensed under the [Apache License 2.0](https://github.com/FlorianFuerrutter/genQC/blob/main/LICENSE.txt)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## BibTeX" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We kindly ask you to cite our paper if any of the previous material was useful for your work." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "``` latex\n", - "@article{furrutter2024quantum,\n", - " title={Quantum circuit synthesis with diffusion models},\n", - " author={F{\\\"u}rrutter, Florian and Mu{\\~n}oz-Gil, Gorka and Briegel, Hans J},\n", - " journal={Nature Machine Intelligence},\n", - " doi = {https://doi.org/10.1038/s42256-024-00831-9},\n", - " vol = {6},\n", - " pages = {515-–524},\n", - " pages={1--10},\n", - " year={2024},\n", - " publisher={Nature Publishing Group UK London}\n", - "}\n", - "```" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/src/index.qmd b/src/index.qmd new file mode 100644 index 0000000..57613ac --- /dev/null +++ b/src/index.qmd @@ -0,0 +1,129 @@ +--- +pagetitle: "genQC" +section-divs: false +toc: false +page-layout: custom #custom full +theme: + light: [simplex, webpage/index.scss] +repo-actions: false +description: | + Generating quantum circuits with diffusion models. + +code-annotations: hover + +#hide-description: true +#image: quarto-dark-bg.jpeg +#resources: +# - images/hero_animation.mp4 +--- + +::: {.hero-banner} + +# genQC · Generative Quantum Circuits + +Generating quantum circuits with diffusion models + +::: {.hero-buttons} +[Get Started](./get_started.ipynb){.btn-action-primary .btn-action .btn .btn-success .btn-lg role="button"} +[Tutorials](./examples/tutorials.qmd){.btn-action .btn .btn-info .btn-lg role="button"} +[API Reference](./webpage/api_reference.qmd){.btn-action .btn .btn-warning .btn-lg role="button"} +::: + +::: {.features} +::: {.feature} +

Generative circuit synthesis

+

Synthesis of discrete-continuous quantum circuits.

+::: +::: {.feature} +

Pre-trained models

+

Easy inference of pre-trained model weights.

+::: +::: {.feature} +

Open-source research

+

Full open source research code and model weights.

+::: +::: +::: + +::: {.figure} +

"Compile 4-qubit QFT"

+img +::: + + + +::: {.container} +::: {.left} +

Quick start genQC

+::: {.left-grid} +::: {.left-grid-text} +

+To install genQC just run: +

+::: +::: {.left-grid-src} +```sh +pip install genQC +``` +::: +::: +

+On the right hand side, a minimal example to generate a parametrized quantum circuit conditioned on the 4-qubit QFT unitary. +

+[View tutorials](./examples/tutorials.qmd){.btn-action .btn .btn-info .btn-lg role="button"} +::: + +::: {.right} + +```python +import torch +from genQC.pipeline.multimodal_diffusion_pipeline import MultimodalDiffusionPipeline_ParametrizedCompilation +from genQC.inference.sampling import generate_compilation_tensors, decode_tensors_to_backend +from genQC.utils.misc_utils import infer_torch_device +from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer +from genQC.benchmark.bench_compilation import SpecialUnitaries +from genQC.platform.simulation import Simulator, CircuitBackendType + +device = infer_torch_device() + +pipeline = MultimodalDiffusionPipeline_ParametrizedCompilation.from_pretrained( # <1> + repo_id="Floki00/cirdit_multimodal_compile_3to5qubit", # <1> + device=device) # <1> + +pipeline.scheduler.set_timesteps(40) # <2> +pipeline.scheduler_w.set_timesteps(40) # <2> + +pipeline.g_h, pipeline.g_w = 0.3, 0.1 # <2> +pipeline.lambda_h, pipeline.lambda_w = 1.0, 0.35 # <2> + +prompt = "Compile 4 qubits using: ['h', 'cx', 'ccx', 'swap', 'rx', 'ry', 'rz', 'cp']" # <2> + +U = SpecialUnitaries.QFT(num_qubits=4).to(torch.complex64) # <3> + +out_tensor, params = generate_compilation_tensors(pipeline, # <4> + prompt=prompt, # <4> + U=U, # <4> + samples=8, # <4> + system_size=5, # <4> + num_of_qubits=4, # <4> + max_gates=32) # <4> + +vocabulary = {g:i+1 for i, g in enumerate(pipeline.gate_pool)} # <5> +tokenizer = CircuitTokenizer(vocabulary) # <5> +simulator = Simulator(CircuitBackendType.QISKIT) # <5> + +qc_list, _ = decode_tensors_to_backend(simulator, tokenizer, out_tensor, params) # <5> + +qc_list[0].draw("mpl") +``` +1. Load a pre-trained Diffusion model. +2. Set inference parameters. +3. Specify QFT unitary. +4. Generate tokenized circuits. +5. Decode tensors to circuits. +::: + +::: + diff --git a/src/inference/eval_metrics.ipynb b/src/inference/eval_metrics.ipynb new file mode 100644 index 0000000..f124fd1 --- /dev/null +++ b/src/inference/eval_metrics.ipynb @@ -0,0 +1,237 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Evaluation metrics\n", + "\n", + "> Different metrics used for evaluation." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp inference.eval_metrics" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from scipy.stats import unitary_group" + ] + }, + { + "cell_type": "markdown", + "id": "27afa544-b435-4ad2-9897-d731ccfc100d", + "metadata": {}, + "source": [ + "## Base norm" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ccbc4adc-098b-415a-98c8-8b80a0697d34", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class BaseNorm(abc.ABC): \n", + " \"\"\"Base class for norms.\"\"\"\n", + " \n", + " @staticmethod\n", + " @abc.abstractmethod\n", + " def distance(approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: raise NotImplementedError()\n", + " \n", + " @staticmethod\n", + " @abc.abstractmethod\n", + " def name() -> str: raise NotImplementedError()" + ] + }, + { + "cell_type": "markdown", + "id": "c19c4594-4749-4e91-bfc5-27cf95508de0", + "metadata": {}, + "source": [ + "## Unitary distances" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1b178cbc-d116-49c9-862d-16f646daa39b", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class UnitaryFrobeniusNorm(BaseNorm):\n", + " \"\"\"\n", + " The Frobenius-Norm for unitaries: defined in https://arxiv.org/pdf/2106.05649.pdf.\n", + " \"\"\"\n", + "\n", + " def __call__(self, approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: \n", + " return Unitary_FrobeniusNorm.distance(approx_U, target_U)\n", + " \n", + " @staticmethod\n", + " def distance(approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor:\n", + " d = 0.5 * torch.linalg.matrix_norm((approx_U-target_U), ord=\"fro\")**2\n", + " return d\n", + " \n", + " @staticmethod\n", + " def name() -> str: return \"Frobenius-Norm\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "90080d81-c872-4da3-9f81-31c8908c1056", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class UnitaryInfidelityNorm(BaseNorm):\n", + " \"\"\"\n", + " The Infidelity-Norm for unitaries: defined in https://link.aps.org/accepted/10.1103/PhysRevA.95.042318, TABLE I: 1.\n", + " \"\"\"\n", + "\n", + " def __call__(self, approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: \n", + " return Unitary_infidelity.distance(approx_U, target_U)\n", + " \n", + " @staticmethod\n", + " def distance(approx_U: torch.Tensor, target_U: torch.Tensor) -> torch.Tensor: \n", + " \"\"\"Supports batched intputs, can be used as loss. Input shapes [b, n, n] or [n, n].\"\"\"\n", + " d = torch.matmul(torch.transpose(target_U, -2, -1).conj(), approx_U) # out [b, n, n] or [n, n]\n", + " d = torch.diagonal(d, offset=0, dim1=-2, dim2=-1).sum(-1) # do partial (batched) trace, out [b, n] or [n] \n", + " d = 1.0 - (d / target_U.shape[-1]).abs().square()\n", + " return d\n", + " \n", + " @staticmethod\n", + " def name() -> str: return \"Unitary-Infidelity\"" + ] + }, + { + "cell_type": "markdown", + "id": "121c092a-e560-4ef0-9831-84344661137f", + "metadata": {}, + "source": [ + "Test the metrics on random unitaries:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "48c7c0a8-9a2b-4ce9-a1f7-547ce74ddee9", + "metadata": {}, + "outputs": [], + "source": [ + "approx_U = torch.tensor(unitary_group.rvs(8))\n", + "target_U = torch.tensor(unitary_group.rvs(8))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0f84359-e366-432e-b83d-926c9816b564", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Frobenius-Norm\n" + ] + }, + { + "data": { + "text/plain": [ + "(tensor(0., dtype=torch.float64), tensor(8.5523, dtype=torch.float64))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "print(UnitaryFrobeniusNorm.name())\n", + "UnitaryFrobeniusNorm.distance(target_U, target_U), UnitaryFrobeniusNorm.distance(approx_U, target_U)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c019a5ce-b3f2-4af9-9228-613447c8f3f3", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Unitary-Infidelity\n" + ] + }, + { + "data": { + "text/plain": [ + "(tensor(4.4409e-16, dtype=torch.float64), tensor(0.9895, dtype=torch.float64))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "print(UnitaryInfidelityNorm.name())\n", + "UnitaryInfidelityNorm.distance(target_U, target_U), UnitaryInfidelityNorm.distance(approx_U, target_U)" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/inference/evaluation_helper.ipynb b/src/inference/evaluation_helper.ipynb new file mode 100644 index 0000000..2cc0543 --- /dev/null +++ b/src/inference/evaluation_helper.ipynb @@ -0,0 +1,120 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e65be610-fea3-4579-b325-80c39c7edd8d", + "metadata": {}, + "source": [ + "# Evaluation helper\n", + "\n", + "> Handy helper functions for model evaluations." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b82820c7-3649-4559-9e1f-04fe3e390ebc", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp inference.evaluation_helper" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68f0714b-e45a-46d8-aa05-d6735d4d0e33", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.utils.async_fn import run_parallel_jobs\n", + "from genQC.platform.simulation import Simulator " + ] + }, + { + "cell_type": "markdown", + "id": "338cf491-3528-440e-9079-d4388efaa448", + "metadata": {}, + "source": [ + "# SRV helper" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f265206e-7f08-4d74-955b-25eb644adc22", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_srvs(simulator: Simulator, backend_obj_list: Sequence, n_jobs: int = 1, **kwargs): \n", + " \"\"\"Returns SRVs of a given list of backen objects `backend_obj_list`.\"\"\"\n", + " def _f(backend_obj):\n", + " return simulator.backend.schmidt_rank_vector(backend_obj, **kwargs)\n", + " \n", + " return run_parallel_jobs(_f, backend_obj_list, n_jobs)" + ] + }, + { + "cell_type": "markdown", + "id": "ab5d451d-20de-465d-b47e-174e89e3e4de", + "metadata": {}, + "source": [ + "## Compilation helper" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4cf36492-a8c9-437c-850d-09148fe39eeb", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_unitaries(simulator: Simulator, backend_obj_list: Sequence, n_jobs: int = 1, **kwargs):\n", + " \"\"\"Returns unitaries of a given list of backen objects `backend_obj_list`.\"\"\"\n", + " def _f(backend_obj):\n", + " return simulator.backend.get_unitary(backend_obj, **kwargs)\n", + " \n", + " return run_parallel_jobs(_f, backend_obj_list, n_jobs)" + ] + }, + { + "cell_type": "markdown", + "id": "227082e1-29c8-4340-a1a3-2e4071e31604", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "298aa915-8684-49c0-843e-101a5d84dc20", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/inference/export_cudaq.ipynb b/src/inference/export_cudaq.ipynb deleted file mode 100644 index 9446ddc..0000000 --- a/src/inference/export_cudaq.ipynb +++ /dev/null @@ -1,364 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Export to CUDA-Q" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp inference.export_cudaq" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from typing import Sequence, List, Optional\n", - "import cudaq" - ] - }, - { - "cell_type": "markdown", - "id": "d968941a-c24b-4e2e-bcce-edf497476aa0", - "metadata": {}, - "source": [ - "## CircuitInstructions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5667d650-74c3-4116-95c5-642bb1e69cd0", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "@dataclass\n", - "class CircuitInstruction():\n", - " name: str\n", - " control_nodes: Sequence[int]\n", - " target_nodes: Sequence[int]\n", - " params: Sequence[float]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "95130627-b530-4580-abec-35c77f14d094", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "class CircuitInstructions():\n", - " def __init__(self, tensor_shape: torch.Size) -> None:\n", - " assert len(tensor_shape) == 2 # ... [qubits, time]\n", - " self.tensor_shape = tensor_shape \n", - " self._instructions = []\n", - " self.instruction_names_set = set()\n", - " \n", - " def add_instruction(self, \n", - " name: str, \n", - " control_nodes: Sequence[int], \n", - " target_nodes: Sequence[int], \n", - " params: Sequence[float]) -> None:\n", - " self.instruction_names_set.add(name)\n", - " self._instructions.append(CircuitInstruction(name, control_nodes, target_nodes, params))\n", - "\n", - " @property\n", - " def data(self) -> List[CircuitInstruction]: return self._instructions\n", - "\n", - " @property\n", - " def length(self) -> int: return len(self._instructions)\n", - "\n", - " @property\n", - " def num_qubits(self) -> int: return self.tensor_shape[0]\n", - "\n", - " @property\n", - " def max_gates(self) -> int: return self.tensor_shape[1]\n", - " \n", - " def __repr__(self) -> str: return str(self._instructions)\n", - "\n", - " def print(self) -> None:\n", - " for instruction in self.data: \n", - " print(instruction) " - ] - }, - { - "cell_type": "markdown", - "id": "4adddad8-46fc-4241-ac39-b4ca430ae0dd", - "metadata": {}, - "source": [ - "## CircuitsCudaqBackend" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b3aed961-03b0-4158-8d3a-3188d92b39ac", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "class CircuitsCudaqBackend():\n", - "\n", - " BASIC_BACKEND_TYPE = type[cudaq.kernel]\n", - " \n", - " # Has to match with insides of belows kernel\n", - " KERNEL_VOCABULARY = {\"h\":1, \"cx\":2, \"z\":3, \"x\":4, \"y\":5, \"ccx\":6, \"swap\":7} \n", - " \n", - " def _construct_kernel(self,\n", - " gate_list: List[str],\n", - " target_1_nodes_list: List[int],\n", - " target_2_nodes_list: List[int],\n", - " control_1_nodes_list: List[int],\n", - " control_2_nodes_list: List[int]\n", - " ) -> cudaq.kernel:\n", - " \"\"\"Construct a `cudaq.kernel` from provided paramters.\"\"\"\n", - " \n", - " num_gates = len(gate_list)\n", - " gate_list = [self.KERNEL_VOCABULARY[g] for g in gate_list]\n", - "\n", - " # Note: `@cudaq.kernel` decorator has a overhead of 20ms, regardless of the for-loop inside\n", - " \n", - " @cudaq.kernel\n", - " def place_gate_kernel(gate: int, \n", - " qvector: cudaq.qview,\n", - " target_1: int, \n", - " target_2: int, \n", - " control_1: int, \n", - " control_2: int): \n", - " if gate == 1: h(qvector[target_1])\n", - " elif gate == 2: cx(qvector[control_1], qvector[target_1])\n", - " elif gate == 3: z(qvector[target_1])\n", - " elif gate == 4: x(qvector[target_1])\n", - " elif gate == 5: y(qvector[target_1])\n", - " elif gate == 6: x.ctrl(qvector[control_1], qvector[control_2], qvector[target_1])\n", - " elif gate == 7: swap(qvector[target_1], qvector[target_2])\n", - " \n", - " @cudaq.kernel \n", - " def kernel(input_state: List[complex]):\n", - " qvector = cudaq.qvector(input_state)\n", - " for i in range(num_gates):\n", - " place_gate_kernel(gate_list[i], qvector, target_1_nodes_list[i], target_2_nodes_list[i], control_1_nodes_list[i], control_2_nodes_list[i])\n", - " \n", - " return kernel\n", - "\n", - " def check_error_circuit(self, \n", - " gate: str, \n", - " num_target_nodes: int, \n", - " num_control_nodes: int) -> bool:\n", - " \"\"\"Check number of connections of given gate. Used to check for error circuits.\"\"\"\n", - "\n", - " if gate not in self.KERNEL_VOCABULARY:\n", - " raise NotImplementedError(f\"Unknown gate {gate}, not in `self.KERNEL_VOCABULARY`.\")\n", - " \n", - " if gate in [\"h\", \"z\", \"x\", \"y\"]:\n", - " if num_target_nodes != 1 or num_control_nodes !=0: return False\n", - "\n", - " elif gate in [\"cx\"]:\n", - " if num_target_nodes != 1 or num_control_nodes !=1: return False\n", - "\n", - " elif gate in [\"ccx\"]:\n", - " if num_target_nodes != 1 or num_control_nodes !=2: return False\n", - "\n", - " elif gate in [\"swap\"]:\n", - " if num_target_nodes != 2 or num_control_nodes !=0: return False\n", - "\n", - " else:\n", - " raise NotImplementedError(f\"Unknown gate {gate}, implemetation is faulty!\")\n", - "\n", - " return True\n", - "\n", - " \n", - " def export_cudaq(self, instructions: CircuitInstructions) -> cudaq.kernel:\n", - " \"\"\"Convert given genQC `CircuitInstructions` to a `cudaq.kernel`.\"\"\"\n", - "\n", - " # num_qubits = instructions.num_qubits\n", - " num_gates = instructions.length\n", - "\n", - " # @cudaq.kernel can only take list[int] and no str directly\n", - " # -> we have to map everything to list[int] \n", - " # set default value to 9999 so an error wil be raised if we have a faulty tensor encoding\n", - " \n", - " gate_list = []\n", - " target_1_nodes_list = [9999] * num_gates\n", - " target_2_nodes_list = [9999] * num_gates\n", - " control_1_nodes_list = [9999] * num_gates\n", - " control_2_nodes_list = [9999] * num_gates\n", - "\n", - " for i, instruction in enumerate(instructions.data):\n", - "\n", - " gate = instruction.name.lower()\n", - " control_nodes = instruction.control_nodes\n", - " target_nodes = instruction.target_nodes\n", - " \n", - " if len(instruction.params) > 0:\n", - " raise NotImplementedError(f\"Only support non parametrized gates currently.\")\n", - " \n", - " num_target_nodes = len(target_nodes)\n", - " num_control_nodes = len(control_nodes)\n", - " \n", - " if not self.check_error_circuit(gate, num_target_nodes, num_control_nodes):\n", - " return None\n", - " \n", - " gate_list.append(gate)\n", - " \n", - " if num_target_nodes > 0:\n", - " target_1_nodes_list[i] = target_nodes[0]\n", - " if num_target_nodes > 1: \n", - " target_2_nodes_list[i] = target_nodes[1] \n", - " \n", - " if num_control_nodes > 0:\n", - " control_1_nodes_list[i] = control_nodes[0] \n", - " if num_control_nodes > 1: \n", - " control_2_nodes_list[i] = control_nodes[1] \n", - " \n", - " #--------------------\n", - " kernel= self._construct_kernel(gate_list, target_1_nodes_list, target_2_nodes_list, control_1_nodes_list, control_2_nodes_list)\n", - " return kernel\n", - " \n", - " def get_unitary(self, kernel: cudaq.kernel, num_qubits: int) -> np.ndarray:\n", - " \"\"\"Return the unitary matrix of a `cudaq.kernel`. Currently relies on simulation, could change in future releases of cudaq.\"\"\"\n", - " \n", - " N = 2**num_qubits\n", - " U = np.zeros((N, N), dtype=np.complex128)\n", - " \n", - " for j in range(N): \n", - " state_j = np.zeros((N), dtype=np.complex128) \n", - " state_j[j] = 1\n", - " \n", - " U[:, j] = np.array(cudaq.get_state(kernel, state_j), copy=False)\n", - " \n", - " return U\n", - "\n", - " def draw(self, kernel: cudaq.kernel, num_qubits: int, **kwargs) -> None:\n", - " \"\"\"Draw the given `cudaq.kernel` using cudaq.\"\"\" \n", - " c = [0] * (2**num_qubits)\n", - " c[0] = 1\n", - " print(cudaq.draw(kernel, c))" - ] - }, - { - "cell_type": "markdown", - "id": "fecd2687-2986-4a2d-96fd-ea2d55aae235", - "metadata": {}, - "source": [ - "## Decode" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "101024b0-3c9c-40de-9e56-7f55de3b7673", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def tensor_to_instructions(tensor: torch.Tensor, \n", - " vocabulary_inverse: dict, \n", - " params_tensor: Optional[torch.Tensor] = None, \n", - " params_4pi_normalization: bool = True,\n", - " sign_labels: dict = {\"control_nodes\":-1, \"target_nodes\":+1}) -> CircuitInstructions:\n", - " \"\"\"Convert a given `torch.Tensor` to `CircuitInstructions`.\"\"\"\n", - " \n", - " assert tensor.dim() == 2, f\"{tensor.shape=}\"\n", - " num_of_qubits, time = tensor.shape\n", - " \n", - " instructions = CircuitInstructions(tensor_shape=tensor.shape)\n", - " \n", - " for t in range(time): \n", - " enc_time_slice = tensor[:, t] # contains all bits at time t \n", - " \n", - " for gate_index, gate in vocabulary_inverse.items(): \n", - " \n", - " target_nodes = (enc_time_slice == (sign_labels[\"target_nodes\"] * gate_index)).nonzero(as_tuple=True)[0]\n", - " control_nodes = (enc_time_slice == (sign_labels[\"control_nodes\"] * gate_index)).nonzero(as_tuple=True)[0]\n", - " \n", - " if target_nodes.nelement() > 0: \n", - " params = []\n", - " if exists(params_tensor):\n", - " params = params_tensor[:, t]\n", - " if params_4pi_normalization:\n", - " params = (params+1.0) * 2.0*np.pi # [-1, 1] to [0, 4pi]\n", - " params = params.tolist()\n", - "\n", - " instructions.add_instruction(gate, control_nodes.tolist(), target_nodes.tolist(), params)\n", - " \n", - " break #break on first hit, per def only one gate allowed per t\n", - " \n", - " elif control_nodes.nelement() > 0: # no target but control means error\n", - " raise RuntimeError(\"target_nodes.nelement() <= 0 but control_nodes.nelement() > 0\")\n", - " \n", - " #else we are fine with tensors that have time steps with no action!\n", - " \n", - " return instructions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7d81b862-99c2-4f71-ba05-0acfe7f5d20a", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "backend = CircuitsCudaqBackend()\n", - "\n", - "def genqc_to_cudaq(tensor: torch.Tensor, vocabulary_inverse: dict) -> cudaq.kernel:\n", - " \"\"\"Convert given `torch.Tensor` to a `cudaq.kernel`.\"\"\"\n", - " instructions = tensor_to_instructions(tensor, vocabulary_inverse) \n", - " kernel = backend.export_cudaq(instructions)\n", - " return kernel" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/inference/infer_compilation.ipynb b/src/inference/infer_compilation.ipynb deleted file mode 100644 index 6fec960..0000000 --- a/src/inference/infer_compilation.ipynb +++ /dev/null @@ -1,564 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Inference compilation functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp inference.infer_compilation" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from genQC.util import *\n", - "from genQC.inference.infer_misc import *\n", - "from genQC.inference.infer_gate_hist import get_tensor_gate_length\n", - "import genQC.platform.qcircuit_dataset_construction as data_con\n", - "from genQC.dataset.dataset_helper import check_duplicates_in_dataset, uniquify_tensor_dataset, shuffle_tensor_dataset\n", - "from genQC.platform.simulation.qcircuit_sim import instruction_name_to_qiskit_gate\n", - "\n", - "from joblib import Parallel, delayed\n", - "import qiskit.quantum_info as qi" - ] - }, - { - "cell_type": "markdown", - "id": "5ae5d100-cb10-457e-8486-ef7866604f59", - "metadata": {}, - "source": [ - "## Misc" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "505366e1-7aa9-4cde-a521-06236566620d", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def split_U_to_tensor(U: np.ndarray):\n", - " U_r, U_i = torch.Tensor(np.real(U)), torch.Tensor(np.imag(U))\n", - " U = torch.stack([U_r, U_i], dim=0)\n", - " return U" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9b1379df-a6a4-4853-a11c-e78918626175", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_new_unitary_indices(Us, dataset, silent=False):\n", - " if type(Us) == list: \n", - " Us = torch.stack([split_U_to_tensor(U) for U in Us]) #numpy to torch\n", - "\n", - " if not silent:\n", - " print(f\"- Checking {Us.shape[0]} unitaries for duplicates in dataset, {torch.unique(Us, dim=0).shape[0]} given unitaries are unique.\")\n", - " \n", - " # need to check uniques only \n", - " Us_dataset = torch.unique(dataset.U, dim=0)\n", - "\n", - " # to vecs\n", - " Us = torch.reshape(Us , [Us.shape[0] , -1]).to(Us_dataset.device) \n", - " Us_dataset = torch.reshape(Us_dataset, [Us_dataset.shape[0], -1]) \n", - " \n", - " #---------------\n", - "\n", - " #check\n", - " comp = ( Us_dataset.unsqueeze(dim=0) == Us.unsqueeze(dim=1) ) # gives [num of Us, num of dataset, ch]\n", - " comp = torch.all(comp, dim=-1) # gives [num of Us, num of dataset]\n", - "\n", - " #reduce\n", - " comp = torch.all(comp==False, dim=1) # gives indices that ARE NOT in datset\n", - " # comp = torch.any(comp, dim=1) # gives indices that ARE in datset\n", - "\n", - " #get indices\n", - " comp = comp.nonzero().squeeze(dim=1)\n", - " \n", - " if not silent:\n", - " print(f\"- Checked {Us.shape[0]} given unitaries with dataset. Returned indices of {comp.shape[0]} not in dataset unitaries.\") \n", - " \n", - " return comp.cpu()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e3de392c-0032-4c12-b33f-ea8d8ffc1d01", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_new_unitary_indices_batch(Us, dataset, auto_batch_size=32, silent=False, n_jobs=1):\n", - " if type(Us) == list: \n", - " Us = torch.stack([split_U_to_tensor(U) for U in Us]) #numpy to torch\n", - "\n", - " if not silent:\n", - " print(f\"- Checking {Us.shape[0]} unitaries for duplicates in dataset, {torch.unique(Us, dim=0).shape[0]} given unitaries are unique.\")\n", - "\n", - " #----------------------------------------\n", - " samples = Us.shape[0]\n", - " num_batches = int(np.ceil(samples/auto_batch_size))\n", - "\n", - " Us_chunks = Us.chunk(num_batches)\n", - "\n", - " indices = []\n", - " \n", - " if n_jobs > 1:\n", - " f = lambda Us_chunk: get_new_unitary_indices(Us_chunk, dataset, silent=True)\n", - " indices = Parallel(n_jobs=n_jobs)(delayed(f)(Us_chunk) for Us_chunk in Us_chunks) \n", - "\n", - " else: \n", - " for Us_chunk in Us_chunks: \n", - " comp = get_new_unitary_indices(Us_chunk, dataset, silent=True)\n", - " indices.append(comp)\n", - " \n", - " indices = torch.cat(indices)\n", - "\n", - " if not silent:\n", - " print(f\"- Checked {samples} given unitaries with dataset. Returned indices of {indices.shape[0]} not in dataset unitaries.\") \n", - " \n", - " return indices" - ] - }, - { - "cell_type": "markdown", - "id": "f28517c0-6105-4e0e-b643-1b1742b5085b", - "metadata": {}, - "source": [ - "## Generation" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "164ccd9c-b696-4595-add0-ad7654690b73", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def generate_comp_tensors(pipeline, prompt, U, samples, system_size, num_of_qubits, max_gates, g, no_bar=True, unique=False, auto_batch_size=512):\n", - " #----------------------\n", - " #prepare condtions\n", - "\n", - " prompt = str(prompt)\n", - " c = pipeline.text_encoder.tokenize_and_push_to_device(prompt)\n", - "\n", - " U = U.unsqueeze(0).to(pipeline.device)\n", - " if system_size > num_of_qubits:\n", - " n = 2**system_size\n", - " pad = (0, n-U.shape[-1], 0, n-U.shape[-2]) \n", - " U = F.pad(U, pad, \"constant\", 0)\n", - "\n", - " #----------------------\n", - " #sample and post process to tensor encodings \n", - " \n", - " batch_samples = [auto_batch_size] * int(np.floor(samples/auto_batch_size))\n", - " if samples % auto_batch_size > 0: batch_samples.append(samples % auto_batch_size)\n", - " if len(batch_samples) == 0: batch_samples.append(samples)\n", - "\n", - " out_tensor_list = []\n", - " for batch_sample in batch_samples: \n", - " \n", - " c_batch = c.repeat(batch_sample, *[1]*(c.dim()-1))\n", - " U_batch = U.repeat(batch_sample, *[1]*(U.dim()-1))\n", - " \n", - " latents = torch.randn((c_batch.shape[0], pipeline.model.clr_dim, system_size, max_gates)) \n", - " out_tensor = pipeline(latents=latents, c=c_batch, U=U_batch, g=g, no_bar=no_bar) \n", - " out_tensor_list.append(out_tensor)\n", - " \n", - " out_tensor = torch.cat(out_tensor_list)\n", - " # out_tensor = pipeline(latents=latents, c=c, U=U, g=g, no_bar=no_bar) \n", - "\n", - " out_tensor = pipeline.model.invert_clr(out_tensor)\n", - " out_tensor = out_tensor[:, :num_of_qubits]\n", - " \n", - " if unique: out_tensor = torch.unique(out_tensor, dim=0)\n", - " \n", - " if not no_bar: print(f\"[INFO]: (generate_comp_tensors) Generated {'unique_cnt ' if unique else ''}{out_tensor.shape[0]} tensors\")\n", - "\n", - " return out_tensor" - ] - }, - { - "cell_type": "markdown", - "id": "7759fbca-3c32-4bbd-9893-180688df5b61", - "metadata": {}, - "source": [ - "## Accuracy" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1aaeed42-d97e-42e4-8cc4-c65e47163211", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def check_correct_gates(qc, num_of_qubits, gate_pool, max_gates, allowed_gate_clrs):\n", - " tensor = data_con.encode_circuit(qc, num_of_qubits, data_con.gate_pool_to_gate_classes(gate_pool), max_gates)\n", - " gen_gate_clrs = torch.unique(tensor.abs()).tolist() \n", - " gate_corr = set(gen_gate_clrs).issubset(set(allowed_gate_clrs)) # are gates correct?\n", - " return gate_corr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5a7bf2d2-d50d-47f5-914d-faeb7d2e00ca", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def check_correct_unitary_exact(qc, U):\n", - " is_U = qi.Operator(qc).to_matrix()\n", - " is_U = split_U_to_tensor(is_U)\n", - "\n", - " u_corr = torch.allclose(is_U, U) # is U correct?\n", - " return u_corr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e1c12a93-b414-4726-b3be-2fe2ad432fa9", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def check_correct_unitary_distance(qc, target_U, norms):\n", - " is_U = qi.Operator(qc).to_matrix()\n", - " is_U = torch.complex(torch.Tensor(np.real(is_U)), torch.Tensor(np.imag(is_U)))\n", - " \n", - " target_U = torch.complex(target_U[0], target_U[1])\n", - " \n", - " d = []\n", - " for norm in norms:\n", - " u_dist = norm.distance(is_U, target_U).item()\n", - " d.append(u_dist)\n", - " \n", - " return d" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c78f2ef0-f735-43d6-9950-09003e337db2", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_gate_and_U_acc(out_tensor, allowed_gate_clrs, U, gate_pool, num_of_qubits, max_gates, norms=[], no_bar=True):\n", - "\n", - " if isinstance(gate_pool[0], str):\n", - " gate_pool = [instruction_name_to_qiskit_gate(gate) for gate in gate_pool]\n", - " \n", - " #-------------------------\n", - " #decode \n", - " qc_list, error_cnt = convert_tensors_to_circuits(out_tensor, gate_pool)\n", - " if not no_bar: print(f\"Error circuits: {error_cnt}\")\n", - " \n", - " #-------------------------\n", - " acc = [] # combinded acc\n", - " gate_acc = [] # only gates acc\n", - " u_acc = [] # only U acc\n", - " u_norms = [] # list of tuple(norms) for every qc\n", - " \n", - " comb_corr_qc = []\n", - " gate_corr_qc = []\n", - " u_corr_qc = []\n", - " \n", - " #only check circuits that are non-error!\n", - " for qc in qc_list:\n", - " \n", - " #---------------\n", - " # check if in out_tensor only color that correspond to the condtion gate_pool \n", - " gate_corr = check_correct_gates(qc, num_of_qubits, gate_pool, max_gates, allowed_gate_clrs)\n", - " \n", - " #---------------\n", - " # check unitary \n", - " u_corr = check_correct_unitary_exact(qc, U) # true or false\n", - " u_norm = check_correct_unitary_distance(qc, U, norms) # metrics values list\n", - "\n", - " #--------------- \n", - " acc.append(gate_corr and u_corr) \n", - " gate_acc.append(gate_corr) \n", - " u_acc.append(u_corr)\n", - " u_norms.append(u_norm)\n", - " \n", - " if gate_corr and u_corr: comb_corr_qc.append(qc)\n", - " if gate_corr: gate_corr_qc.append(qc)\n", - " if u_corr: u_corr_qc.append(qc)\n", - " \n", - " #average accuracy over sample\n", - " acc = np.mean(acc).item()\n", - " gate_acc = np.mean(gate_acc).item()\n", - " u_acc = np.mean(u_acc).item()\n", - " \n", - " return acc, gate_acc, u_acc, np.array(u_norms), error_cnt, comb_corr_qc, gate_corr_qc, u_corr_qc, qc_list" - ] - }, - { - "cell_type": "markdown", - "id": "c43df64d-4e2f-4866-a8d6-81251a7a5a4a", - "metadata": {}, - "source": [ - "## Tests" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ff118e92-cdba-4d1d-afd5-4e1ec6d8b3a7", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_comp_acc(pipeline, samples, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices: callable, Us, ys, train_dataset=None, norms=[]):\n", - " \n", - " if exists(train_dataset):\n", - " not_dups_ind = get_new_unitary_indices_batch(Us, train_dataset) \n", - " Us = [Us[i] for i in not_dups_ind]\n", - " ys = [ys[i] for i in not_dups_ind]\n", - " \n", - " #--------------------\n", - " acc_s = []\n", - " gate_acc_s = []\n", - " u_acc_s = []\n", - " u_norms_s = []\n", - " uniques_cnt_s = []\n", - " error_cnt_s = []\n", - " \n", - " num_found_distinct_circuits_s = []\n", - " \n", - " for U,y in tqdm(zip(Us,ys), total=len(Us)):\n", - " \n", - " allowed_gate_clrs = str_cond_to_gate_indices(y) \n", - " if isinstance(U, np.ndarray):\n", - " U = split_U_to_tensor(U)\n", - " \n", - " out_tensor = generate_comp_tensors(pipeline, y, U, samples, system_size, num_of_qubits, max_gates, g, unique=False)\n", - " outs = get_gate_and_U_acc(out_tensor, allowed_gate_clrs, U, gate_pool, num_of_qubits, max_gates, norms) \n", - " \n", - " acc, gate_acc, u_acc, u_norms, error_cnt, comb_corr_qc, gate_corr_qc, u_corr_qc, qc_list = outs\n", - "\n", - " if len(qc_list) > 0:\n", - " uniques_cnt = torch.stack([data_con.encode_circuit(qc, num_of_qubits, data_con.gate_pool_to_gate_classes(gate_pool), max_gates) for qc in qc_list]).unique(dim=0).shape[0] #how many uniques in sample (not counting erroro circuits)\n", - " # uniques_cnt = out_tensor.shape[0] - error_cnt #was with unique acc definition\n", - " else:\n", - " uniques_cnt = 0\n", - "\n", - " if len(comb_corr_qc) > 0:\n", - " num_found_distinct_circuits = torch.stack([data_con.encode_circuit(qc, num_of_qubits, data_con.gate_pool_to_gate_classes(gate_pool), max_gates) for qc in comb_corr_qc]).unique(dim=0).shape[0] #how many distinct exact solutions we have\n", - " else:\n", - " num_found_distinct_circuits = 0\n", - " \n", - " #--------------------\n", - " acc_s.append(acc)\n", - " gate_acc_s.append(gate_acc)\n", - " u_acc_s.append(u_acc)\n", - " u_norms_s.append(u_norms)\n", - " uniques_cnt_s.append(uniques_cnt)\n", - " error_cnt_s.append(error_cnt)\n", - " num_found_distinct_circuits_s.append(num_found_distinct_circuits)\n", - " \n", - " solved_tasks = np.count_nonzero(num_found_distinct_circuits_s)\n", - " print(f\"Solved {solved_tasks} correctly (at least one qc) that is {100*solved_tasks/len(num_found_distinct_circuits_s):0.2f}%\")\n", - " \n", - " return acc_s, gate_acc_s, u_acc_s, u_norms_s, uniques_cnt_s, error_cnt_s, num_found_distinct_circuits_s " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f4219c42-aaf5-4a8c-9c18-017ba9d6cc26", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_comp_acc_on_testset(pipeline, samples, num_of_U, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices: callable, \n", - " prompt_mod: callable, test_dataset, train_dataset=None, norms=[], fix_y=None):\n", - " '''returns: acc_s, gate_acc_s, u_acc_s, uniques_cnt_s, error_cnt_s, num_found_circuits_s, task_qc_len_s'''\n", - " \n", - " if hasattr(test_dataset, \"z\"): # mixed dataset has padding but a z record! \n", - " Us, ys, zs = uniquify_tensor_dataset(test_dataset.U, test_dataset.y, test_dataset.z) \n", - " Us, ys, zs = shuffle_tensor_dataset(Us, ys, zs)\n", - " Us, ys, zs = Us[:num_of_U], ys[:num_of_U], zs[:num_of_U]\n", - " task_qc_len_s = zs[:, 1]\n", - " \n", - " else: # not mixed dataset has no padding\n", - " Us, ys, xs = uniquify_tensor_dataset(test_dataset.U, test_dataset.y, test_dataset.x) \n", - " Us, ys, xs = shuffle_tensor_dataset(Us, ys, xs)\n", - " Us, ys, xs = Us[:num_of_U], ys[:num_of_U], xs[:num_of_U]\n", - " task_qc_len_s = get_tensor_gate_length(xs) \n", - "\n", - " if exists(fix_y): ys = [fix_y for y in ys]\n", - " else: ys = [prompt_mod(y) for y in ys]\n", - " \n", - " \n", - " print(f\"Picked {Us.shape[0]} unitaries from test set\")\n", - " print(f\"Sample task: {ys[0]}\")\n", - " print(Us[0])\n", - " print(xs[0])\n", - " \n", - " out = test_comp_acc(pipeline, samples, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices, Us.cpu(), ys, train_dataset, norms) \n", - " return *out, task_qc_len_s" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7f3246d9-9ef9-4d67-82e8-45c1dce8dff2", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_comp_acc_on_rnd_samples(pipeline, samples, num_of_U, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices: callable,\n", - " prompt_mod: callable, # takes a single prompt and returns it modified\n", - " rnd_min_gates, rnd_max_gates,\n", - " train_dataset=None, norms=[], fix_y=None):\n", - " '''returns: acc_s, gate_acc_s, u_acc_s, uniques_cnt_s, error_cnt_s, num_found_circuits_s, task_qc_len_s'''\n", - " \n", - " enc_t, ys, Us = data_con.gen_compilation_rndGates_dataset(num_of_U, num_of_qubits, rnd_min_gates, rnd_max_gates, gate_pool)\n", - " task_qc_len_s = get_tensor_gate_length(enc_t) #should give a complexity meassure, longer circuits have a more complex unitary to compile? \n", - "\n", - " if exists(fix_y): ys = [fix_y for y in ys]\n", - " else: ys = [prompt_mod(y) for y in ys]\n", - " \n", - " print(f\"Sample task: {ys[0]}\")\n", - " print(split_U_to_tensor(Us[0]))\n", - " print(enc_t[0])\n", - " \n", - " out = test_comp_acc(pipeline, samples, system_size, gate_pool, num_of_qubits, max_gates, g, str_cond_to_gate_indices, Us, ys, train_dataset, norms)\n", - " return *out, task_qc_len_s" - ] - }, - { - "cell_type": "markdown", - "id": "38a9071d-5cca-4edd-9bb9-a1614bf4c3fc", - "metadata": {}, - "source": [ - "## Plot" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a53b2487-bc9e-406c-9580-2a99c6c33fc7", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def plot_hist_overview(out_tuple, num_of_samples_per_U, rnd_min_gates, rnd_max_gates, max_gates, num_of_qubits):\n", - " acc_s, gate_acc_s, u_acc_s, u_norms_s, uniques_cnt_s, error_cnt_s, num_found_circuits_s, task_qc_len_s = out_tuple \n", - " \n", - " if not exists(rnd_min_gates): rnd_min_gates = \"\"\n", - " if not exists(rnd_max_gates): rnd_max_gates = \"\"\n", - " \n", - " fig, axs = plt.subplots(2, 3, figsize=(13, 6.4), squeeze=False, constrained_layout=True) \n", - " fig.suptitle(f\"Histogram of compilation accuracies (Unitary cnt={len(acc_s)}, samples_per_U={num_of_samples_per_U} {rnd_min_gates=} {rnd_max_gates=} {max_gates=} qubits={num_of_qubits})\")\n", - "\n", - " n = 20\n", - " density = False\n", - " bins = np.linspace(0,1, n+1)\n", - "\n", - " #-----------------\n", - " plt.sca(axs[0, 0])\n", - " plt.title(\"Combined accuracy\")\n", - " plt.xlabel(r\"Accuracy\")\n", - " plt.ylabel(r\"Bin population\" if density==False else \"Accuracy distribution\")\n", - " plt.hist(acc_s, density=density, bins=n*4)\n", - "\n", - " #-----------------\n", - " plt.sca(axs[0, 1])\n", - " plt.title(\"Unitary accuracy\")\n", - " plt.xlabel(r\"Accuracy\")\n", - " plt.hist(u_acc_s, density=density, bins=bins)\n", - "\n", - " #-----------------\n", - " plt.sca(axs[0, 2])\n", - " plt.title(\"Gate accuracy\")\n", - " plt.xlabel(r\"Accuracy\")\n", - " plt.hist(gate_acc_s, density=density, bins=bins)\n", - "\n", - " #-----------------\n", - " plt.sca(axs[1, 0])\n", - " plt.title(\"Generated unique circuits\")\n", - " plt.ylabel(r\"Bin population\" if density==False else \"Number distribution\")\n", - " plt.xlabel(r\"Number of unique circuits\")\n", - " plt.hist(uniques_cnt_s, density=density, bins=n)\n", - " \n", - " #-----------------\n", - " plt.sca(axs[1, 1])\n", - " plt.title(\"Generated error circuits\")\n", - " plt.xlabel(r\"Number of error circuits\")\n", - " plt.hist(error_cnt_s, density=density, bins=n)\n", - " \n", - " #-----------------\n", - " plt.sca(axs[1, 2])\n", - " plt.title(\"Absolute number of distinct correct circuits\")\n", - " plt.xlabel(r\"Number of found circuits\")\n", - " plt.hist(num_found_circuits_s, density=density, bins=n*4)\n", - " \n", - " #-----------------\n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/inference/infer_gate_hist.ipynb b/src/inference/infer_gate_hist.ipynb deleted file mode 100644 index 1bf6d99..0000000 --- a/src/inference/infer_gate_hist.ipynb +++ /dev/null @@ -1,102 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Inference gate distribution" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp inference.infer_gate_hist" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *" - ] - }, - { - "cell_type": "markdown", - "id": "d968941a-c24b-4e2e-bcce-edf497476aa0", - "metadata": {}, - "source": [ - "## Gate length" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4990eae0-c931-4b6a-b540-82c77c26c992", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_tensor_gate_length(clr_tensor, padding_token=0):\n", - " '''Careful with padding tokens!'''\n", - " assert clr_tensor.dim() == 3 #[b, s, t]\n", - " \n", - " collabsed_clr_tensor = (clr_tensor != padding_token).to(torch.int8)\n", - " red_clr_tensor = torch.sum(collabsed_clr_tensor, dim=1) # [b, t]\n", - " return torch.count_nonzero(red_clr_tensor, dim=1) # [b]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2b55214a-6f4d-41f0-a61f-beddcf037411", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_circuit_gate_length(qcs): \n", - " lengths = torch.zeros(len(qcs), dtype=int) \n", - " for i,qc in enumerate(qcs): \n", - " if hasattr(qc, \"data\"): \n", - " lengths[i] = len(qc.data) \n", - " return lengths" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/inference/infer_misc.ipynb b/src/inference/infer_misc.ipynb deleted file mode 100644 index b6a69ba..0000000 --- a/src/inference/infer_misc.ipynb +++ /dev/null @@ -1,141 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Inference miscellaneous functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp inference.infer_misc" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from genQC.platform.qcircuit_dataset_construction import decode_circuit, gate_pool_to_gate_classes\n", - "from genQC.platform.simulation.qcircuit_sim import instruction_name_to_qiskit_gate" - ] - }, - { - "cell_type": "markdown", - "id": "d968941a-c24b-4e2e-bcce-edf497476aa0", - "metadata": {}, - "source": [ - "## Misc" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a9b6311a-916c-48e9-8f94-7f612884c643", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_rnd_gatepool_subset(gate_pool, min_sub_gate_pool_cnt=2):\n", - " rng = np.random.default_rng()\n", - " \n", - " n = len(gate_pool) + 1\n", - " c_range = np.arange(n-1)\n", - " \n", - " sub_gate_pool_cnt = rng.integers(min_sub_gate_pool_cnt, n) \n", - " sub_gate_pool_ind = rng.choice(c_range, size=sub_gate_pool_cnt, replace=False) \n", - " sub_gate_pool = [gate_pool[ind] for ind in sub_gate_pool_ind] # pick random subeset of gates\n", - " \n", - " return sub_gate_pool" - ] - }, - { - "cell_type": "markdown", - "id": "ecb1da3e-e9cc-420e-9a05-39d6c033b371", - "metadata": {}, - "source": [ - "## Convertion" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e0e50913-ebc3-43c4-a5b6-f8e75cb776cc", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def convert_tensors_to_circuits(out_tensor, gate_pool, params_tensor=None, place_barrier=False): \n", - " if isinstance(gate_pool[0], str):\n", - " gate_pool = [instruction_name_to_qiskit_gate(gate) for gate in gate_pool]\n", - " \n", - " error_cnt = 0\n", - " qc_list = []\n", - "\n", - " if not exists(params_tensor):\n", - " params_tensor = [None]*out_tensor.shape[0]\n", - " \n", - " #TODO: para this loop\n", - " \n", - " for i,(enc_tensor,p) in enumerate(zip(out_tensor, params_tensor)): \n", - " try:\n", - " qc = decode_circuit(enc_tensor=enc_tensor, gate_pool=gate_pool, place_barrier=place_barrier, params_tensor=p)\n", - "\n", - " except Exception as e:\n", - " error_cnt += 1\n", - " # print(e)\n", - " continue\n", - " \n", - " qc_list.append(qc) \n", - " \n", - " return qc_list, error_cnt" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/inference/infer_srv.ipynb b/src/inference/infer_srv.ipynb deleted file mode 100644 index f726e51..0000000 --- a/src/inference/infer_srv.ipynb +++ /dev/null @@ -1,737 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Inference SRV functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp inference.infer_srv" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from genQC.inference.infer_misc import *\n", - "from genQC.inference.infer_gate_hist import get_circuit_gate_length\n", - "from genQC.platform.qcircuit_util import get_entanglement_bins\n", - "from genQC.platform.simulation.qcircuit_sim import schmidt_rank_vector\n", - "from genQC.inference.infer_compilation import generate_comp_tensors\n", - "\n", - "from joblib import Parallel, delayed\n", - "import qiskit.quantum_info as qi" - ] - }, - { - "cell_type": "markdown", - "id": "1cfa411d-6574-4e83-875b-40c146b4a8e3", - "metadata": {}, - "source": [ - "## Generation" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "45069ca8-b558-413d-9848-934d631b674e", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_all_srvs(num_of_qubits):\n", - " srvs = [x for x in itertools.product(*([[1,2]]*num_of_qubits))]\n", - " srvs = np.array(srvs, dtype=int)[np.sum(srvs, axis=1)!=num_of_qubits+1].tolist()\n", - " srvs = sorted(srvs, key=lambda x: sum(x))\n", - " return srvs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "868fe792-0d99-46e6-ac15-4c2a6e7ef640", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def generate_srv_tensors(pipeline, prompt, samples, system_size, num_of_qubits, max_gates, g, no_bar=True, unique=False, auto_batch_size=512):\n", - " if samples==0:\n", - " out_tensor = torch.zeros((0, system_size, max_gates))\n", - " return out_tensor\n", - " \n", - " #----------------------\n", - " #prepare condtions\n", - " \n", - " prompt = str(prompt)\n", - " c = pipeline.text_encoder.tokenize_and_push_to_device(prompt)\n", - "\n", - " #----------------------\n", - " #sample and post process to tensor encodings\n", - "\n", - " batch_samples = [auto_batch_size] * int(np.floor(samples/auto_batch_size))\n", - " if samples % auto_batch_size > 0: batch_samples.append(samples % auto_batch_size)\n", - " if len(batch_samples) == 0: batch_samples.append(samples)\n", - "\n", - " out_tensor_list = []\n", - " for batch_sample in batch_samples: \n", - " \n", - " c_batch = c.repeat(batch_sample, *[1]*(c.dim()-1))\n", - " \n", - " latents = torch.randn((c_batch.shape[0], pipeline.model.clr_dim, system_size, max_gates)) \n", - " out_tensor = pipeline(latents=latents, c=c_batch, g=g, no_bar=no_bar, enable_guidance=True) \n", - " out_tensor_list.append(out_tensor)\n", - " \n", - " out_tensor = torch.cat(out_tensor_list)\n", - " out_tensor = pipeline.model.invert_clr(out_tensor)\n", - " out_tensor = out_tensor[:, :num_of_qubits]\n", - " \n", - " if unique: out_tensor = torch.unique(out_tensor, dim=0)\n", - " \n", - " if not no_bar: print(f\"[INFO]: (generate_srv_tensors) Generated {'unique_cnt ' if unique else ''}{out_tensor.shape[0]} tensors\")\n", - "\n", - " return out_tensor" - ] - }, - { - "cell_type": "markdown", - "id": "29ae8e82-dcc4-456b-8422-cd8ffdd25eab", - "metadata": {}, - "source": [ - "## Convertion" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ab7e53f2-c673-40ae-8378-833fd78be0fd", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def convert_tensors_to_srvs(out_tensor, gate_pool, sort_srv=False, place_barrier=False, n_jobs=1): \n", - " qc_list, error_cnt = convert_tensors_to_circuits(out_tensor, gate_pool=gate_pool, place_barrier=place_barrier)\n", - " \n", - " srv_list = []\n", - " \n", - " #---------------------------------------------\n", - " # This is a bottle-neck for more qubits, speed up with async\n", - " \n", - " if n_jobs > 1:\n", - " assert sort_srv == False\n", - " \n", - " f = lambda qc: schmidt_rank_vector(qi.DensityMatrix(qc))\n", - " # srv_list = Parallel(n_jobs=n_jobs, prefer=\"threads\")(delayed(f)(qc) for qc in qc_list) #prefer=\"threads\"\n", - " srv_list = Parallel(n_jobs=n_jobs)(delayed(f)(qc) for qc in qc_list) \n", - " \n", - " else: \n", - " for qc in qc_list: \n", - " srv = schmidt_rank_vector(qi.DensityMatrix(qc))\n", - " \n", - " if sort_srv: srv = sorted(srv) \n", - " srv_list.append(srv) \n", - " \n", - " return qc_list, error_cnt, srv_list" - ] - }, - { - "cell_type": "markdown", - "id": "6f247332-0894-4603-bbee-c556bc5820b6", - "metadata": {}, - "source": [ - "## Accuracy" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8ca4de23-cdb5-4b00-a89e-04b14cffcf7c", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_srv_accuracy(srv_list, target_srv):\n", - " if not isinstance(srv_list , (torch.Tensor, torch.IntTensor, torch.FloatTensor, torch.LongTensor)): srv_list = torch.tensor(srv_list)\n", - " if not isinstance(target_srv, (torch.Tensor, torch.IntTensor, torch.FloatTensor, torch.LongTensor)): target_srv = torch.tensor(target_srv, device=srv_list.device)\n", - " \n", - " srv_uniques, srv_uniques_cnt = torch.unique(srv_list, dim=0, return_counts=True)\n", - " \n", - " if srv_uniques.numel() == 0: return 0\n", - "\n", - " comp = torch.all(target_srv==srv_uniques, dim=1)\n", - " index = comp.nonzero().squeeze() \n", - " \n", - " if index.dim() == 0: correct_srv_percentage = srv_uniques_cnt[index]/srv_uniques_cnt.sum() \n", - " else: correct_srv_percentage = 0 \n", - " \n", - " return correct_srv_percentage" - ] - }, - { - "cell_type": "markdown", - "id": "1c8e729d-edf2-4228-af86-4f4449acc61d", - "metadata": {}, - "source": [ - "## Tests" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10594109-3085-4c7d-a92b-d7535ab0ad2e", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def true_sample_bin_dist(samples_per_bin, bin_size):\n", - " true_samples = [max(samples_per_bin//bin_size, 1) for i in range(bin_size)]\n", - "\n", - " if samples_per_bin-sum(true_samples) > 0:\n", - " true_samples[0] += (samples_per_bin-sum(true_samples))\n", - " \n", - " # assert sum(true_samples)==samples_per_bin\n", - " # assert len(true_samples)==bin_size\n", - "\n", - " # print(f\"{true_samples=}\")\n", - " \n", - " return true_samples" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "61603163-81b9-439c-b827-72fe37a21b88", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_srv_clr_distribution_bin_samples(pipeline, samples_per_bin, system_size, num_of_qubits, max_gates, g, gate_pool, silent=False, device=\"cpu\", U=None, prompt_mod: callable=lambda c: c,\n", - " only_diag=False, n_jobs=1):\n", - " dist_srvs = get_all_srvs(num_of_qubits) \n", - " cond_srvs = dist_srvs\n", - "\n", - " values = torch.zeros((len(cond_srvs), len(dist_srvs)), device=device)\n", - "\n", - " #---------------------\n", - "\n", - " ent_bins, ent_labels = get_entanglement_bins(num_of_qubits)\n", - "\n", - " i = 0\n", - " \n", - " for ent_bin in tqdm(ent_bins, total=len(ent_bins)):\n", - "\n", - " true_samples = true_sample_bin_dist(samples_per_bin, len(ent_bin))\n", - " \n", - " for ind,srv in tqdm(enumerate(ent_bin), total=len(ent_bin)): \n", - " if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - "\n", - " qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs)\n", - " \n", - " if only_diag:\n", - " values[i, i] = get_srv_accuracy(svr_list, srv)\n", - " else:\n", - " for j, dist_srv in enumerate(dist_srvs): \n", - " values[i, j] = get_srv_accuracy(svr_list, dist_srv)\n", - " \n", - " if not silent:\n", - " print(f\"{cond_srv}: unique_cnt {out_tensor.unique(dim=0).shape[0]} error_cnt {error_cnt} acc {values[i, i]:.2f}\")\n", - "\n", - " i += 1\n", - " \n", - " return values" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bd284fc6-c028-45c1-888d-b2bd8fa6a565", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_srv_clr_distribution(pipeline, samples_per_srv, system_size, num_of_qubits, max_gates, g, gate_pool, silent=False, device=\"cpu\", U=None, prompt_mod: callable=lambda c: c,\n", - " dist_srvs=None, cond_srvs=None, only_diag=False, n_jobs=1):\n", - " if not exists(dist_srvs):\n", - " dist_srvs = get_all_srvs(num_of_qubits) \n", - " \n", - " if not exists(cond_srvs):\n", - " cond_srvs = dist_srvs\n", - "\n", - " values = torch.zeros((len(cond_srvs), len(dist_srvs)), device=device)\n", - "\n", - " #---------------------\n", - " \n", - " for i, cond_srv in tqdm(enumerate(cond_srvs), total=len(cond_srvs)): \n", - "\n", - " if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(cond_srv), U, samples_per_srv, system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(cond_srv), samples_per_srv, system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " \n", - " qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs)\n", - " \n", - " if only_diag:\n", - " values[i, i] = get_srv_accuracy(svr_list, srv)\n", - " else:\n", - " for j, dist_srv in enumerate(dist_srvs): \n", - " values[i, j] = get_srv_accuracy(svr_list, dist_srv)\n", - "\n", - " if not silent:\n", - " print(f\"{cond_srv}: unique_cnt {out_tensor.unique(dim=0).shape[0]} error_cnt {error_cnt} acc {values[i, i]:.2f}\")\n", - "\n", - " return values" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f31d5177-b9a3-4852-8f6f-1bebc69efca1", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_guidance_dep(pipeline, srvs, samples, system_size, num_of_qubits, max_gates, gs, gate_pool, prompt_mod: callable=lambda c: c, U=None, n_jobs=1): \n", - " guidance_dep_out = []\n", - "\n", - " for srv in srvs:\n", - " unique_percentage_list = []\n", - " error_cnt_percentage_list = []\n", - " correct_srv_percentage_list = []\n", - " \n", - " for g in tqdm(gs): \n", - " if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, samples, system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), samples, system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " \n", - " #---------------------------------\n", - " #calculate the copy percentage, dataset and sample? \n", - " \n", - " unique_percentage = out_tensor.unique(dim=0).shape[0]/out_tensor.shape[0] \n", - " unique_percentage_list.append(unique_percentage)\n", - " \n", - " #---------------------------------\n", - " #decode tensors, get srv\n", - " \n", - " qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs)\n", - " error_cnt_percentage_list.append(error_cnt/out_tensor.shape[0])\n", - " \n", - " #---------------------------------\n", - " #record the correct number \n", - " \n", - " correct_srv_percentage = get_srv_accuracy(svr_list, srv) \n", - " correct_srv_percentage_list.append(correct_srv_percentage)\n", - "\n", - " guidance_dep_out.append((unique_percentage_list, error_cnt_percentage_list, correct_srv_percentage_list))\n", - " \n", - " return guidance_dep_out" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b782437e-a1bb-45e0-835f-e02c173bc798", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_srv_acc_vs_length(pipeline, samples, system_size, num_of_qubits, max_gates, g, gate_pool, prompt_mod: callable=lambda c: c, U=None, n_jobs=1):\n", - " ent_bins, ent_labels = get_entanglement_bins(num_of_qubits)\n", - " \n", - " ent_ls = []\n", - " ent_accs = []\n", - " ent_cnts = []\n", - " \n", - " for ent_bin in tqdm(ent_bins, total=len(ent_bins)):\n", - " ls_acc = dict() #keep track over bins\n", - " ls_cnt = dict()\n", - "\n", - " true_samples = true_sample_bin_dist(samples_per_bin, len(ent_bin))\n", - " \n", - " for ind,srv in enumerate(ent_bin): \n", - " if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " \n", - " qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs) \n", - "\n", - " lengths = get_circuit_gate_length(qc_list) #work in qc space to check only non errors\n", - "\n", - " if lengths.numel() < 1: continue\n", - " \n", - " for l in lengths.unique(): #range(lengths.min(), lengths.max()):\n", - " indices = (lengths==l).nonzero().squeeze() \n", - " \n", - " if indices.numel() > 0: \n", - " srvs = torch.tensor(svr_list)[indices] \n", - " if indices.dim() == 0: srvs = srvs.unsqueeze(0)\n", - " \n", - " acc = get_srv_accuracy(srvs, srv)\n", - " \n", - " #---------- \n", - " t = ls_acc.pop(l, [])\n", - " t.append(acc)\n", - " ls_acc[l] = t\n", - " \n", - " t = ls_cnt.pop(l, 0)\n", - " t += srvs.shape[0]\n", - " ls_cnt[l] = t\n", - " \n", - " ls = sorted(ls_acc) # sorted keys (l)\n", - " accs = [np.mean(ls_acc[l]) for l in ls] # average acc per l\n", - " cnts = [np.sum(ls_cnt[l]) for l in ls]\n", - " \n", - " ent_ls.append(ls)\n", - " ent_accs.append(accs)\n", - " ent_cnts.append(cnts)\n", - "\n", - " return ent_ls, ent_accs, ent_cnts, ent_labels" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ac48279f-ad30-474c-98e3-da6d2d94f32a", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_srv_acc_vs_maxLength(pipeline, samples_per_bin, system_size, num_of_qubits, max_gates_list, g, gate_pool, prompt_mod: callable=lambda c: c, U=None, n_jobs=1):\n", - " ent_bins, ent_labels = get_entanglement_bins(num_of_qubits)\n", - " \n", - " ent_accs = []\n", - " for ent_bin in tqdm(ent_bins, total=len(ent_bins)):\n", - " \n", - " true_samples = true_sample_bin_dist(samples_per_bin, len(ent_bin))\n", - " \n", - " bin_accs = []\n", - " for max_gates in max_gates_list:\n", - "\n", - " accs = []\n", - " for ind,srv in enumerate(ent_bin): \n", - " if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " \n", - " qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs) \n", - "\n", - " acc = get_srv_accuracy(svr_list, srv)\n", - " \n", - " accs.append(acc) \n", - " bin_accs.append(np.mean(accs)) \n", - " ent_accs.append(bin_accs)\n", - " \n", - " return ent_accs, ent_labels" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f6fc9c5c-41c6-4905-8d2e-76e301cef284", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def test_srv_length_distribution(pipeline, samples_per_bin, system_size, num_of_qubits, max_gates, g, gate_pool, silent=False, U=None, prompt_mod: callable=lambda c: c, n_jobs=1):\n", - " ent_bins, ent_labels = get_entanglement_bins(num_of_qubits)\n", - "\n", - " ls = []\n", - " \n", - " for ent_bin in tqdm(ent_bins, total=len(ent_bins)):\n", - "\n", - " true_samples = true_sample_bin_dist(samples_per_bin, len(ent_bin))\n", - " \n", - " bin_ls = []\n", - " \n", - " for ind,srv in tqdm(enumerate(ent_bin), total=len(ent_bin)): \n", - " if exists(U): out_tensor = generate_comp_tensors(pipeline, prompt_mod(srv), U, true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - " else: out_tensor = generate_srv_tensors( pipeline, prompt_mod(srv), true_samples[ind], system_size, num_of_qubits, max_gates, g=g, unique=False)\n", - "\n", - " qc_list, error_cnt, svr_list = convert_tensors_to_srvs(out_tensor, gate_pool, n_jobs=n_jobs)\n", - "\n", - " qc_ls = get_circuit_gate_length(qc_list) #tensor [qcs]\n", - " bin_ls.append(qc_ls)\n", - "\n", - " ls.append(torch.cat(bin_ls))\n", - "\n", - " return ls #[ent_bins, num_of_non_err_samples]" - ] - }, - { - "cell_type": "markdown", - "id": "717dcd10-5e19-4bf7-a8ac-7e360c9e9e08", - "metadata": {}, - "source": [ - "## Plot" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7d9df84e-d680-4db0-907d-913fe68fe782", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def plot_srv_clr_distribution_hist(values, samples, num_of_qubits, save=False, dist_srvs=None, cond_srvs=None):\n", - " if not exists(dist_srvs):\n", - " dist_srvs = get_all_srvs(num_of_qubits) \n", - " \n", - " if not exists(cond_srvs):\n", - " cond_srvs = dist_srvs\n", - "\n", - " n = len(dist_srvs)\n", - " values = values.cpu()\n", - " \n", - " fig = plt.figure(figsize=(12,12))#, constrained_layout=True) \n", - " plt.title(f\"Generated samples per condition: {samples}\")\n", - " plt.ylabel(r\"Condition\")\n", - " plt.xlabel(r\"Generated distribution\")\n", - "\n", - " #--------------------------------------------\n", - " if num_of_qubits < 6 or 0:\n", - " plt.yticks(range(len(cond_srvs)), [str(b) for b in cond_srvs])\n", - " plt.xticks(range(n), [str(b) for b in dist_srvs], rotation=90 if n>3 else 0)\n", - " else:\n", - " plt.yticks([])\n", - " plt.xticks([])\n", - "\n", - " #--------------------------------------------\n", - " plt.imshow(values, vmin=0, vmax=1)\n", - " # plt.imshow(values.cpu(), norm=\"log\")\n", - " plt.colorbar()\n", - "\n", - " #--------------------------------------------\n", - " #print acc\n", - " x_shift = 1*40 if num_of_qubits==5 else 0\n", - " if num_of_qubits < 4:\n", - " for i in range(n): \n", - " plt.text(x_shift+i, i, f\"{values[i, i]:0.2f}\", color='black', ha='center', va='center', fontsize=\"large\")\n", - "\n", - " #--------------------------------------------\n", - " #draw rects\n", - " off = 0.5\n", - " for i in range(2, num_of_qubits):\n", - " w = scipy.special.comb(num_of_qubits, i, exact=True)\n", - " plt.gca().add_patch(plt.Rectangle((off, off), w, w, ls=\"-\", ec=\"white\", fc=\"none\")) #, transform=plt.gca().transAxes))\n", - " off += w\n", - "\n", - " #--------------------------------------------\n", - " #print average acc for rects\n", - " off = 0\n", - " for i in [0]+list(range(2, num_of_qubits+1)):\n", - " w = scipy.special.comb(num_of_qubits, i, exact=True) \n", - " d1 = off\n", - " d2 = d1 + w\n", - " mean_acc = values[d1:d2, d1:d2].diag().mean()\n", - " plt.text(off+2*w/3, off+w/7, f\"{mean_acc:0.2f}\", color='red', ha='center', va='center', fontsize=\"x-large\") \n", - " off += w\n", - "\n", - " #--------------------------------------------\n", - " if save:\n", - " plt.savefig('plot_srv_clr_distribution_hist.svg', bbox_inches='tight')\n", - " \n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18161ea0-df08-4ba1-8e45-5a5d58bdd73c", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def plot_srv_clr_distribution_bin_accuracy(values, samples, num_of_qubits, save=False, plot_percentages=False, trainSet_srv=None):\n", - " values = values.cpu().diag()\n", - " ent_bins, ent_labels = get_entanglement_bins(num_of_qubits)\n", - " \n", - " n = sum(len(srvs) for srvs in ent_bins)\n", - " x = np.arange(n) # the label locations\n", - " width = 0.8\n", - "\n", - " #------------------------\n", - " fig = plt.figure(figsize=(6.6, 4), constrained_layout=True) \n", - " # plt.title(f\"Generated samples per condition: {samples}\", fontsize=14)\n", - " plt.ylabel(r\"Accuracy\", fontsize=25)\n", - " plt.yticks(fontsize=14)\n", - " plt.xticks([])\n", - " \n", - " i = 0\n", - " for j,(label, srvs) in enumerate(zip(ent_labels, ent_bins)): \n", - " label = f\"{sum(srvs[0])-num_of_qubits}\"\n", - " incre = len(srvs)\n", - " rects = plt.bar(x[i:i+incre], values[i:i+incre], width, label=label)\n", - " i += incre\n", - " if plot_percentages: plt.gca().bar_label(rects, padding=3, fmt=\"%0.2f\")\n", - "\n", - " ncols = len(ent_labels)//2+1 if len(ent_labels) > 5 else len(ent_labels)\n", - " leg1 = plt.legend(loc=\"lower center\", fontsize=14, ncols=ncols, title=\"# of entangled qubits:\", title_fontsize=14,bbox_to_anchor=(0.5, 1.01))\n", - " ax = fig.add_artist(leg1)\n", - " \n", - " if exists(trainSet_srv):\n", - " if trainSet_srv.shape[-1]==num_of_qubits:\n", - " srvs = []\n", - " for s in ent_bins: srvs.extend(s)\n", - " \n", - " dataset_percentages = [get_srv_accuracy(trainSet_srv, srv).cpu() for srv in srvs]\n", - " xmin = x - width*0.55\n", - " xmax = x + width*0.55 \n", - " ag = plt.hlines(dataset_percentages, xmin, xmax, label=\"Random sampling\" , color=\"black\", linestyle=\"-\", linewidths=2.3)\n", - " \n", - " plt.legend(handles=[ag], fontsize=14, frameon=False)\n", - "\n", - " ymin, ymax = plt.ylim() \n", - " plt.ylim(ymin, ymax+0.04)\n", - "\n", - " if save:\n", - " plt.savefig(f\"plot_srv_clr_distribution_bin_accuracy.svg\", bbox_inches='tight', transparent=True)\n", - " \n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6feddf06-e127-4cca-bf8e-0c2cc173c839", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def plot_guidance_dep(srvs, gs, guidance_dep_out, samples, save=False): \n", - " assert len(srvs) == len(guidance_dep_out)\n", - "\n", - " n = len(srvs)\n", - " fig, axs = plt.subplots(1, n, figsize=(12, 5), squeeze=False, constrained_layout=True) \n", - " fig.suptitle(fr\"Generated {samples} samples per $g$ and SRV\")\n", - " \n", - " for i,srv in enumerate(srvs): \n", - " unique_percentage_list, error_cnt_percentage_list, correct_srv_percentage_list = guidance_dep_out[i]\n", - " \n", - " #---------------------------------\n", - " #plot now gs vs the numbers \n", - " \n", - " plt.sca(axs[0, i])\n", - " plt.xlabel(r\"Guidance scale $g$\") \n", - " plt.title(f\"SRV = {srv}\")\n", - " plt.plot(gs, unique_percentage_list , label=\"Unique tensors percentage\")\n", - " plt.plot(gs, error_cnt_percentage_list , label=\"Error circuits percentage\")\n", - " plt.plot(gs, correct_srv_percentage_list, label=\"Correct SRV percentage\")\n", - " \n", - " if i == (n-1): plt.legend() \n", - "\n", - " if save:\n", - " plt.savefig(\"plot_guidance_dep.svg\", bbox_inches='tight', transparent=True)\n", - " \n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13cdecfb-080b-49c3-a13e-d9a7c1711fce", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def plot_srv_acc_vs_length(ent_ls, ent_accs, ent_cnts, ent_labels, samples, plot_dist=True, save=False):\n", - " fig, axs = plt.subplots(2 if plot_dist else 1, 1, figsize=(12, 7), squeeze=False, constrained_layout=True) \n", - "\n", - " #-------------------\n", - " plt.sca(axs[0,0])\n", - " plt.title(f\"Generated samples per entanglement: {samples}\")\n", - " plt.ylabel(\"Accuracy\")\n", - " plt.xlabel(\"Gate number\")\n", - " for i,ent_label in enumerate(ent_labels): \n", - " plt.plot(ent_ls[i], ent_accs[i], label=f\"{ent_label}\")\n", - " plt.legend()\n", - "\n", - " #-------------------\n", - " if plot_dist:\n", - " plt.sca(axs[1,0])\n", - " plt.title(f\"Used samples per l to calculate accuracy, should match gate distribution\")\n", - " plt.ylabel(\"Used samples\")\n", - " plt.xlabel(\"Gate number\")\n", - " for i,ent_label in enumerate(ent_labels): \n", - " plt.plot(ent_ls[i], ent_cnts[i], label=f\"{ent_label}\")\n", - " plt.legend()\n", - "\n", - " #-------------------\n", - " if save:\n", - " plt.savefig('plot_srv_acc_vs_length.svg', bbox_inches='tight')\n", - " \n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "05c4ebca-e67c-4d8e-bea1-7e0e2cce5aa9", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def plot_srv_acc_vs_maxLength(ent_accs, ent_labels, max_gates_list, samples, plot_dist=True, save=False):\n", - " fig = plt.figure(figsize=(12, 4), constrained_layout=True) \n", - " \n", - " plt.title(f\"Generated samples per maxGates per entanglement: {samples}\")\n", - " plt.ylabel(\"Accuracy\")\n", - " plt.xlabel(\"Max number of gates / tensor size\")\n", - " plt.xticks(max_gates_list)\n", - " \n", - " for ent_acc,ent_label in zip(ent_accs, ent_labels): \n", - " plt.plot(max_gates_list, ent_acc, label=f\"{ent_label}\")\n", - " \n", - " plt.legend()\n", - "\n", - " if save:\n", - " plt.savefig('plot_srv_acc_vs_length.svg', bbox_inches='tight')\n", - " \n", - " plt.show()" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/inference/sampling.ipynb b/src/inference/sampling.ipynb new file mode 100644 index 0000000..4aac6ea --- /dev/null +++ b/src/inference/sampling.ipynb @@ -0,0 +1,472 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "da4523d0-f8da-4994-9477-3c28ef7be56d", + "metadata": {}, + "source": [ + "# Sampling functions\n", + "\n", + "> Sampling functions for model inference." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e4200f54-7513-4597-973f-b8134853db03", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp inference.sampling" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "54fea3fd-4a6f-43cc-a2ff-b2cb35d2d626", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.utils.async_fn import run_parallel_jobs\n", + "from genQC.platform.simulation import Simulator \n", + "from genQC.platform.tokenizer.base_tokenizer import BaseTokenizer\n", + "from genQC.pipeline.pipeline import Pipeline" + ] + }, + { + "cell_type": "markdown", + "id": "529e8286-ea0d-4de5-90e7-6ea1c1f1700c", + "metadata": {}, + "source": [ + "## Generation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ff3a4d77-0335-4ffd-9190-900b0efe24dd", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_batch_samples(samples: int, auto_batch_size: int = 512) -> list[int]:\n", + " batch_samples = [auto_batch_size] * int(np.floor(samples/auto_batch_size))\n", + " \n", + " if samples % auto_batch_size > 0: \n", + " batch_samples.append(samples % auto_batch_size)\n", + " \n", + " if len(batch_samples) == 0: \n", + " batch_samples.append(samples)\n", + " \n", + " assert sum(batch_samples) == samples\n", + " return batch_samples" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8a47a3b2-84a4-44b0-846b-bba6f9bd878b", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def batched_sampling(pipeline: Pipeline,\n", + " cond_kwargs: dict[str, torch.Tensor], \n", + " samples: int, \n", + " system_size: int, \n", + " num_of_qubits: int, \n", + " max_gates: int, \n", + " g: float = 1.0, \n", + " init_latents: Optional[torch.Tensor] = None,\n", + " no_bar: bool = True, \n", + " unique: bool = False, \n", + " auto_batch_size: int = 512, \n", + " enable_params: bool = True, \n", + " reduce_spatial: bool = True,\n", + " return_predicted_x0: bool = False):\n", + " \n", + " \"\"\" e.g. cond_kwargs.keys = {\"c\", \"micro_cond\", \"negative_c\", \"U\"} \"\"\"\n", + "\n", + " assert \"c\" in cond_kwargs\n", + " \n", + " c_in = cond_kwargs[\"c\"].shape[0]\n", + " if c_in == 1:\n", + " # Same conditions for all samples\n", + " for cond in cond_kwargs.values():\n", + " assert cond.shape[0] == 1\n", + "\n", + " cond_kwargs = {kw : val.repeat(auto_batch_size, *[1]*(val.dim()-1)) \n", + " for kw, val in cond_kwargs.items()}\n", + " \n", + " else:\n", + " # Different conditions for all samples\n", + " for cond in cond_kwargs.values():\n", + " assert cond.shape[0] == samples\n", + " \n", + " cond_kwargs = {kw:val.to(pipeline.device) \n", + " for kw, val in cond_kwargs.items()} \n", + "\n", + " #----------------------------------------\n", + " if exists(init_latents):\n", + " assert init_latents.shape[0] == samples\n", + " init_latents = init_latents.to(pipeline.device)\n", + " \n", + " #----------------------------------------\n", + " \n", + " # Sample and post process to tensor encodings \n", + " batch_samples = get_batch_samples(samples=samples, auto_batch_size=auto_batch_size)\n", + "\n", + " #----------------------------------------\n", + " \n", + " off = 0\n", + " out_tensor_list = []\n", + " predicted_x0_list = []\n", + " \n", + " for batch_sample in batch_samples: \n", + " #------------\n", + " if c_in == 1:\n", + " # Same conditions for all samples\n", + " _cond_kwargs = {kw:val[:batch_sample] \n", + " for kw, val in cond_kwargs.items()} \n", + " else:\n", + " # Different conditions for all samples\n", + " _cond_kwargs = {kw:val[off:off+batch_sample] \n", + " for kw, val in cond_kwargs.items()}\n", + " \n", + " #------------\n", + " if exists(init_latents):\n", + " latents = init_latents[off:off+batch_sample] \n", + " \n", + " else:\n", + " if pipeline.embedder.channel_last:\n", + " latents = torch.randn((batch_sample, system_size, max_gates, pipeline.model.params_config.clr_dim)) \n", + " else:\n", + " latents = torch.randn((batch_sample, pipeline.model.params_config.clr_dim, system_size, max_gates)) \n", + "\n", + " off += batch_sample\n", + "\n", + " #------------\n", + " out_tensor = pipeline.denoising(latents=latents, \n", + " g=g, \n", + " no_bar=no_bar, \n", + " # enable_guidance=True, \n", + " return_predicted_x0=return_predicted_x0,\n", + " **_cond_kwargs) \n", + "\n", + " if return_predicted_x0:\n", + " out_tensor, predicted_x0 = out_tensor\n", + "\n", + " out_tensor_list.append(out_tensor)\n", + "\n", + " if return_predicted_x0:\n", + " # predicted_x0 ... [timesteps, *out_tensor.shape]\n", + " predicted_x0_list.append(predicted_x0)\n", + "\n", + " #----------------------------------------\n", + "\n", + " out_tensor_raw = torch.cat(out_tensor_list).to(pipeline.device)\n", + "\n", + " if return_predicted_x0:\n", + " predicted_x0_raw = torch.cat(predicted_x0_list, dim=1).to(pipeline.device)\n", + "\n", + " if enable_params: out_tensor, params = pipeline.embedder.invert(out_tensor_raw, reduce_spatial=reduce_spatial)\n", + " else: out_tensor = pipeline.embedder.invert(out_tensor_raw)\n", + " \n", + " #----------------------------------------\n", + " \n", + " out_tensor = out_tensor[:, :num_of_qubits] \n", + " \n", + " if unique: \n", + " if enable_params: \n", + " raise NotImplementedError(\"We have unique and enable_params enabled, how should we handle that?\")\n", + " out_tensor = torch.unique(out_tensor, dim=0)\n", + " \n", + " if not no_bar: print(f\"[INFO]: (generate_comp_tensors) Generated {'unique_cnt ' if unique else ''}{out_tensor.shape[0]} tensors\")\n", + "\n", + " if enable_params: \n", + " if return_predicted_x0:\n", + " return out_tensor, params, predicted_x0_raw\n", + " return out_tensor, params\n", + " \n", + " elif return_predicted_x0:\n", + " return out_tensor, predicted_x0_raw\n", + " \n", + " return out_tensor" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe9a3240-a3e6-4901-870d-677a3ea27376", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def prepare_prompts(pipeline: Pipeline, \n", + " prompt: str | Sequence[str], \n", + " negative_prompt: Optional[str | Sequence[str]] = None):\n", + " \n", + " # Prepare conditions\n", + " c = pipeline.text_encoder.tokenize_and_push_to_device(prompt)\n", + "\n", + " if exists(negative_prompt):\n", + " negative_c = pipeline.text_encoder.tokenize_and_push_to_device(negative_prompt)\n", + " assert negative_c.shape[0] == 1\n", + " else:\n", + " negative_c = None\n", + "\n", + " return c, negative_c" + ] + }, + { + "cell_type": "markdown", + "id": "aec071d2-8130-4ea8-832c-4555b15da115", + "metadata": {}, + "source": [ + "### Task specific sampling" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ece2684d-7a76-47f4-8791-73d71867863a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def generate_tensors(pipeline: Pipeline, \n", + " prompt: str | Sequence[str], \n", + " samples: int,\n", + " system_size: int, \n", + " num_of_qubits: int, \n", + " max_gates: int, \n", + " g: float = 1.0, \n", + " init_latents: Optional[torch.Tensor] = None,\n", + " no_bar: bool = True, \n", + " unique: bool = False, \n", + " auto_batch_size: int = 512, \n", + " enable_params: bool = False,\n", + " reduce_spatial: bool = True,\n", + " return_predicted_x0: bool = False,\n", + " negative_prompt: Optional[str | Sequence[str]] = None,\n", + " micro_cond: Optional[torch.Tensor] = None) -> torch.Tensor:\n", + "\n", + " if exists(micro_cond):\n", + " raise NotImplementedError()\n", + "\n", + " # Prepare conditions\n", + " c, negative_c = prepare_prompts(pipeline, prompt, negative_prompt)\n", + " \n", + " cond_kwargs = {\"c\":c}\n", + " if exists(negative_c): cond_kwargs[\"negative_c\"] = negative_c\n", + " if exists(micro_cond): cond_kwargs[\"micro_cond\"] = micro_cond\n", + "\n", + " # Perform sampling\n", + " out = batched_sampling(pipeline=pipeline, \n", + " cond_kwargs=cond_kwargs, \n", + " samples=samples, \n", + " system_size=system_size,\n", + " num_of_qubits=num_of_qubits,\n", + " max_gates=max_gates,\n", + " g=g, \n", + " init_latents=init_latents,\n", + " no_bar=no_bar,\n", + " unique=unique,\n", + " auto_batch_size=auto_batch_size,\n", + " enable_params=enable_params,\n", + " reduce_spatial=reduce_spatial,\n", + " return_predicted_x0=return_predicted_x0)\n", + " return out" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3e25a4f6-c9fc-404c-b415-b7b68be998bd", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def generate_compilation_tensors(pipeline: Pipeline, \n", + " prompt: str | Sequence[str], \n", + " U: torch.Tensor, \n", + " samples: int, \n", + " system_size: int, \n", + " num_of_qubits: int, \n", + " max_gates: int, \n", + " g: float = 1.0, \n", + " tensor_prod_pad: bool = True,\n", + " init_latents: Optional[torch.Tensor] = None,\n", + " no_bar: bool = True, \n", + " unique: bool = False, \n", + " auto_batch_size: int = 512, \n", + " enable_params: bool = True, \n", + " reduce_spatial: bool = True,\n", + " return_predicted_x0: bool = False,\n", + " negative_prompt: Optional[str | Sequence[str]] = None,\n", + " negative_u: Optional[torch.Tensor] = None,\n", + " micro_cond: Optional[torch.Tensor] = None) -> torch.Tensor:\n", + " \"\"\"\n", + " Samples tensor encodings from the DM for the given sample parameters.\n", + "\n", + " What kind of unitary padding we have depends on what we used for model training, so it depends on the concrete model weights.\n", + " \"\"\"\n", + "\n", + " if torch.is_complex(U):\n", + " U = torch.stack([U.real, U.imag], dim=-3)\n", + " \n", + " if exists(micro_cond):\n", + " raise NotImplementedError()\n", + "\n", + " # Prepare conditions\n", + " c, negative_c = prepare_prompts(pipeline, prompt, negative_prompt)\n", + "\n", + " cond_kwargs = {\"c\":c}\n", + " if exists(negative_c): cond_kwargs[\"negative_c\"] = negative_c\n", + " if exists(micro_cond): cond_kwargs[\"micro_cond\"] = micro_cond\n", + "\n", + " def tensor_pad(U):\n", + " # Prepare unitary condition\n", + " assert U.dim() in [3, 4]\n", + " if U.dim() == 3: \n", + " # [2, N, N] to [1, 2, N, N]\n", + " U = U.unsqueeze(0)\n", + " \n", + " if system_size > num_of_qubits:\n", + " N = 2**system_size\n", + " \n", + " if tensor_prod_pad:\n", + " # Pad with identity tensor product, assume Big Endian \n", + " \n", + " U_pad = torch.zeros((U.shape[0], 2, N, N), device=U.device, dtype=U.dtype)\n", + " \n", + " U_side = U.shape[-1]\n", + " for jj in range(N//U_side): \n", + " _slice = slice(U_side * jj, U_side * (jj+1))\n", + " U_pad[..., _slice, _slice] = U \n", + " \n", + " U = U_pad\n", + " \n", + " else:\n", + " # zero pad\n", + " pad = (0, N-U.shape[-1], 0, N-U.shape[-2]) \n", + " U = F.pad(U, pad, \"constant\", 0)\n", + " return U\n", + " \n", + " cond_kwargs[\"U\"] = tensor_pad(U)\n", + " if exists(negative_u): \n", + " cond_kwargs[\"negative_u\"] = tensor_pad(negative_u)\n", + " \n", + " # Perform sampling\n", + " out = batched_sampling(pipeline=pipeline, \n", + " cond_kwargs=cond_kwargs, \n", + " samples=samples, \n", + " system_size=system_size,\n", + " num_of_qubits=num_of_qubits,\n", + " max_gates=max_gates,\n", + " g=g, \n", + " init_latents=init_latents,\n", + " no_bar=no_bar,\n", + " unique=unique,\n", + " auto_batch_size=auto_batch_size,\n", + " enable_params=enable_params,\n", + " reduce_spatial=reduce_spatial,\n", + " return_predicted_x0=return_predicted_x0)\n", + " return out" + ] + }, + { + "cell_type": "markdown", + "id": "368ff9cc-2515-4056-9dfe-2538380884c3", + "metadata": {}, + "source": [ + "## Convertion" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fc14c810-6793-4f64-a421-7cc902ec38b8", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def decode_tensors_to_backend(simulator: Simulator, \n", + " tokenizer: BaseTokenizer, \n", + " tensors: torch.Tensor, \n", + " params: Optional[torch.Tensor] = None, \n", + " silent: bool = True,\n", + " n_jobs: int = 1,\n", + " filter_errs: bool = True) -> tuple[Sequence[any], int]:\n", + " tensors = tensors.cpu()\n", + "\n", + " if exists(params):\n", + " params = params.cpu()\n", + " iter_pack = zip(tensors, params)\n", + " _decode = lambda x, p: tokenizer.decode(x, p)\n", + " \n", + " else:\n", + " iter_pack = zip(tensors, )\n", + " _decode = lambda x: tokenizer.decode(x)\n", + " \n", + " def _f(iter_vars):\n", + " try:\n", + " instructions = _decode(*iter_vars)\n", + " backend_obj = simulator.backend.genqc_to_backend(instructions, place_barriers=False)\n", + " return backend_obj\n", + " except Exception as err:\n", + " if silent: return None\n", + " raise err\n", + " \n", + " pot_qcs = run_parallel_jobs(_f, iter_pack, n_jobs)\n", + "\n", + " if filter_errs:\n", + " backend_obj_list = [pot_qc for pot_qc in pot_qcs if exists(pot_qc)]\n", + " err_cnt = sum(1 for pot_qc in pot_qcs if not_exists(pot_qc))\n", + " assert len(backend_obj_list) + err_cnt == len(pot_qcs)\n", + " else:\n", + " backend_obj_list = pot_qcs\n", + " err_cnt = None\n", + " \n", + " return backend_obj_list, err_cnt" + ] + }, + { + "cell_type": "markdown", + "id": "f7ed6e9d-329c-43f1-b3c6-2fa96ceab9e2", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "48ea3c5a-896d-478f-9c5d-5ecfa408eae0", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/clip/frozen_open_clip.ipynb b/src/models/clip/frozen_open_clip.ipynb new file mode 100644 index 0000000..82e6eb9 --- /dev/null +++ b/src/models/clip/frozen_open_clip.ipynb @@ -0,0 +1,715 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Frozen OpenCLIP\n", + "\n", + "> Interface to the [OpenCLIP](https://github.com/mlfoundations/open_clip) library." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.clip.frozen_open_clip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.models.config_model import ConfigModel\n", + "from genQC.utils.async_fn import run_parallel_jobs\n", + "from genQC.utils.misc_utils import infer_torch_device\n", + "import open_clip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ed668754-6e3d-480a-8bce-c12eed6d939e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OpenCLIP version: 2.30.0\n" + ] + } + ], + "source": [ + "print(\"OpenCLIP version:\", open_clip.__version__)" + ] + }, + { + "cell_type": "markdown", + "id": "3bca1a5d-2c82-4e97-8181-425a38cfe5ee", + "metadata": {}, + "source": [ + "## CLIP model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13bd5df0-fbf5-4f5e-bb82-6cab672c3542", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class FrozenOpenCLIPEmbedderConfig:\n", + " arch: str\n", + " version: str\n", + " #device: str\n", + " max_length: int\n", + " freeze: bool\n", + " layer: str" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dffe5b29-8362-46fe-9cdc-e83c996ca8b0", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class FrozenOpenCLIPEmbedder(ConfigModel):\n", + " \"\"\"Loads and freezes the [OpenCLIP](https://github.com/mlfoundations/open_clip) transformer encoder for text prompts.\"\"\"\n", + " \n", + " LAYERS = [\n", + " # \"pooled\",\n", + " \"last\",\n", + " \"penultimate\"\n", + " ]\n", + "\n", + " njobs = 1\n", + "\n", + " def __init__(self, arch=\"ViT-B-32\", version=\"datacomp_xl_s13b_b90k\", max_length=77, freeze=True, layer=\"penultimate\", **kwargs):\n", + " super().__init__(**kwargs) \n", + " \n", + " assert layer in self.LAYERS \n", + " self.params_config = FrozenOpenCLIPEmbedderConfig(arch, version, max_length, freeze, layer)\n", + " \n", + " model, _, _ = open_clip.create_model_and_transforms(arch, device=\"cpu\", pretrained=version)\n", + " self.device = \"cpu\"\n", + " \n", + " del model.visual \n", + " self.model = model\n", + " # self.to(device)\n", + " \n", + " self.tokenizer = open_clip.get_tokenizer(arch)\n", + " assert torch.numel(self.tokenizer(\"test\"))\n", + " \n", + " assert max_length <= 77 # max set by the clip \n", + " self.max_length = max_length\n", + " \n", + " if freeze: self.freeze()\n", + " \n", + " self.layer = layer\n", + " if self.layer == \"last\": self.layer_idx = 0\n", + " elif self.layer == \"penultimate\": self.layer_idx = 1\n", + " else: raise NotImplementedError()\n", + "\n", + " #create empty token, can also be, e.g., A nice picture\n", + " self.empty_token = self.tokenize_and_push_to_device(\"\")\n", + " \n", + " def freeze(self, freeze: bool = True):\n", + " super().freeze(freeze=freeze)\n", + " \n", + " for param in self.model.parameters(): \n", + " param.requires_grad = not freeze\n", + " \n", + " def to(self, device):\n", + " self.model = self.model.to(device) \n", + " self.device = device\n", + " return self\n", + " \n", + " @torch.inference_mode()\n", + " def tokenize_and_push_to_device(self, text, to_device=True):\n", + " if self.njobs > 1:\n", + "\n", + " tokens_list = run_parallel_jobs(self.tokenizer, np.array_split(text, self.njobs), self.njobs)\n", + " tokens = torch.cat(tokens_list, dim=0)\n", + " \n", + " else:\n", + " # tokens = open_clip.tokenize(text)\n", + " tokens = self.tokenizer(text)\n", + " \n", + " if to_device:\n", + " tokens = tokens.to(self.device)\n", + " return tokens\n", + " \n", + " @torch.inference_mode()\n", + " def forward(self, c, **kwargs):\n", + " return self.encode_with_transformer(c)\n", + "\n", + " @torch.inference_mode()\n", + " def encode_with_transformer(self, text):\n", + " cast_dtype = self.model.transformer.get_cast_dtype()\n", + " \n", + " x = self.model.token_embedding(text).to(cast_dtype) # [batch_size, n_ctx, d_model] \n", + " x = x + self.model.positional_embedding[None, :x.shape[1]].to(cast_dtype)\n", + "\n", + " if not self.model.transformer.batch_first:\n", + " x = x.permute(1, 0, 2) # NLD -> LND\n", + " \n", + " x = self.text_transformer_forward(x, attn_mask=self.model.attn_mask)\n", + "\n", + " if not self.model.transformer.batch_first:\n", + " x = x.permute(1, 0, 2) # LND -> NLD\n", + " \n", + " x = self.model.ln_final(x) # [batch_size, n_ctx, transformer.width]\n", + " \n", + " return x\n", + "\n", + " @torch.inference_mode()\n", + " def text_transformer_forward(self, x: torch.Tensor, attn_mask=None):\n", + " for i, r in enumerate(self.model.transformer.resblocks):\n", + " if i == len(self.model.transformer.resblocks) - self.layer_idx:\n", + " break\n", + " #if self.model.transformer.grad_checkpointing and not torch.jit.is_scripting():\n", + " #x = checkpoint(r, x, attn_mask)\n", + " #else:\n", + " \n", + " x = r(x, attn_mask=attn_mask)\n", + " \n", + " return x\n", + "\n", + " #--------------------------------------------------------------\n", + " \n", + " def get_config(self, save_path=None, without_metadata=False):\n", + " return super().get_config(save_path=None, without_metadata=without_metadata)\n", + " \n", + " def store_model(self, config_path: str, save_path: str=None, without_metadata=False): \n", + " super().store_model(config_path, save_path=None, without_metadata=without_metadata)\n", + "\n", + " @staticmethod\n", + " def from_config(config, device: torch.device, save_path: str=None): \n", + " config[\"save_path\"] = None\n", + " return ConfigModel.from_config(config, device, save_path=None) " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c351de75-c3ac-4434-9e74-0472ad849d44", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: Cuda device has a capability of 8.9 (>= 8), allowing tf32 matmul.\n" + ] + } + ], + "source": [ + "device = infer_torch_device()\n", + "a = FrozenOpenCLIPEmbedder().to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "55d98fe4-f697-445d-93e7-39516f2c9f6c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[49406, 314, 272, 267, 273, 267, 273, 316, 49407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", + " [49406, 314, 272, 267, 273, 267, 320, 273, 316, 49407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], device='cuda:0')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p=\"[1, 2, 2]\", \"[1, 2, a 2]\"\n", + "a.tokenize_and_push_to_device(p)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f8639dcf-bba2-4778-b57c-1bb3e6e7bda9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([1, 77])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a.tokenize_and_push_to_device(\"\").shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ad8a36a9-f409-49b4-aebe-9e0e3be4a7b3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([2, 77])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a.tokenize_and_push_to_device([\"1,1,2\", \"2,2,2\"]).shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9bed0226-be41-462d-b4ce-7afa9001a13e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([77, 77])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a.model.attn_mask.shape" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aa4393a9-1c98-4ae0-8cad-a096a6b24f73", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(torch.Size([2, 77, 512]),\n", + " tensor([[[-0.3819, -0.3694, -0.0712, ..., 0.0959, -0.0834, -0.0929],\n", + " [-0.2669, 0.1847, -0.5890, ..., 0.7211, -1.7483, 1.2858],\n", + " [-0.9821, -0.6650, 0.2107, ..., -0.4223, 0.5351, 0.8494],\n", + " ...,\n", + " [-0.0300, 1.3871, 0.3989, ..., 0.2657, -0.1257, -1.3758],\n", + " [-0.0797, 1.4044, 0.3595, ..., 0.2328, -0.0766, -1.3314],\n", + " [ 0.1599, 1.5989, 0.2775, ..., 0.1202, -0.1294, -1.5480]],\n", + " \n", + " [[-0.3819, -0.3694, -0.0712, ..., 0.0959, -0.0834, -0.0929],\n", + " [-1.2507, 1.4711, 0.7264, ..., 1.1489, -0.4983, 0.4494],\n", + " [-1.2645, -0.3412, 0.9422, ..., 0.1529, 0.0271, 0.4574],\n", + " ...,\n", + " [-0.0694, 1.4021, 0.4687, ..., 0.2277, -0.0694, -1.3635],\n", + " [-0.1196, 1.4167, 0.4262, ..., 0.1955, -0.0225, -1.3245],\n", + " [ 0.1381, 1.6182, 0.3528, ..., 0.0775, -0.0853, -1.5246]]], device='cuda:0'))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "c = a.tokenize_and_push_to_device([\"1,1,2\", \"2,2,2\"])\n", + "enc = a(c)\n", + "enc.shape, enc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "56415466-0a23-405d-a554-8b8be57f7df5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'2 , 2 , 2 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a.tokenizer.decode(c[1].tolist())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab965662-8481-4880-9b48-2f8eb5e5762e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'2 , 2 , 2 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "open_clip.decode(c[1])" + ] + }, + { + "cell_type": "markdown", + "id": "0d1099f7-49e3-4625-a0be-6e69b05dce91", + "metadata": {}, + "source": [ + "## Cached model" + ] + }, + { + "cell_type": "markdown", + "id": "84408ea2-4845-47df-b0ea-0fab27a33de0", + "metadata": {}, + "source": [ + "Model takes now also (batched) scalar int values that are defined to unique conditions like $[1,2,2]=4$. If input is now such int the output is the cached pre-embedded tensor. If a non int, like a token string is passed we just do the normal embedding live." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b06adb4f-56f7-4ca1-bd21-fabe060eba61", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class CachedFrozenOpenCLIPEmbedderConfig(FrozenOpenCLIPEmbedderConfig):\n", + " enable_cache_token_limit: bool" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b57fe509-5765-422c-a1b5-5acc153e4c9b", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CachedFrozenOpenCLIPEmbedder(FrozenOpenCLIPEmbedder):\n", + " \"\"\"Adds caching support to `FrozenOpenCLIPEmbedder`.\"\"\"\n", + "\n", + " def __init__(self, arch=\"ViT-B-32\", version=\"datacomp_xl_s13b_b90k\", max_length=77, freeze=True, layer=\"penultimate\", enable_cache_token_limit: bool = True, **kwargs):\n", + " super().__init__(arch=arch, version=version, max_length=max_length, freeze=freeze, layer=layer, **kwargs) \n", + " self.enable_cache_token_limit = enable_cache_token_limit\n", + "\n", + " self.params_config = CachedFrozenOpenCLIPEmbedderConfig(arch, version, max_length, freeze, layer, enable_cache_token_limit)\n", + " \n", + " def get_token_count(self, tokens, padding_token=0):\n", + " # tokens .. [b, seq]\n", + " collabsed_tokens = (tokens != padding_token).to(torch.int32)\n", + " return torch.count_nonzero(collabsed_tokens, dim=-1) # [b]\n", + " \n", + " @torch.inference_mode()\n", + " def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_index=None, b_size=2048, y_on_cpu=False): \n", + " self.cached_empty_token_index = cached_empty_token_index \n", + " if exists(str_list): self.cached_tokens = self.tokenize_and_push_to_device(str_list) \n", + " elif exists(tokens): self.cached_tokens = tokens\n", + " else: raise RuntimeError(\"please provide str_list or tokens\")\n", + " \n", + " # note: we need to split the tokens in batches for forward pass, n gets large\n", + " # cached_tokens [n, 77] ... int\n", + " # cached_embeddings [n, 77, 512] ... float\n", + "\n", + " if self.enable_cache_token_limit:\n", + " self.max_length = self.get_token_count(self.cached_tokens).max().item()\n", + " self.params_config.max_length = self.max_length\n", + " self.params_config.enable_cache_token_limit = self.enable_cache_token_limit\n", + " print(f\"[INFO]: - `generate_cache` infered a TOKEN limit of {self.max_length}\")\n", + "\n", + " #self.cached_tokens = self.cached_tokens[:, :self.max_length]\n", + " \n", + " n = self.cached_tokens.shape[0]\n", + " \n", + " n_chunks = int(np.ceil(n / b_size))\n", + " \n", + " in_device = self.cached_tokens.device\n", + " \n", + " last_ind = 0\n", + " for i, cached_tokens in tqdm(enumerate(self.cached_tokens.chunk(n_chunks)), total=n_chunks):\n", + " \n", + " x = super().forward(cached_tokens.to(self.device)) # ... [b, seq, ch]\n", + " \n", + " if i == 0:\n", + " mem = n * x.shape[1] * x.shape[2] * x.element_size() * 1e-9\n", + " print(f\"[INFO]: caching trying to allocate memory {(n, x.shape[1], x.shape[2])} on {'cpu' if y_on_cpu else self.device}, approx. {mem:.3f} GB\")\n", + " self.cached_embeddings = torch.zeros((n, x.shape[1], x.shape[2]), device=\"cpu\" if y_on_cpu else self.device, dtype=x.dtype) # alloc huge memory !!\n", + " \n", + " self.cached_embeddings[last_ind:last_ind+x.shape[0]] = x.to(self.cached_embeddings.device)\n", + " \n", + " last_ind += x.shape[0]\n", + "\n", + " if self.enable_cache_token_limit:\n", + " self.cached_embeddings = self.cached_embeddings[:, :self.max_length]\n", + " \n", + " if not y_on_cpu:\n", + " self.cached_embeddings = self.cached_embeddings.to(in_device) \n", + "\n", + " @torch.inference_mode()\n", + " def look_up_cos_sim_cached_index(self, str_list: list=None, tokens=None):\n", + " if exists(str_list): tokens = self.tokenize_and_push_to_device(str_list) \n", + " else: raise RuntimeError(\"please provide str_list or tokens\")\n", + " \n", + " emb = super().forward(tokens.to(self.device))\n", + " c_emb = self.cached_embeddings\n", + " #-----------------\n", + " # do cos sim search\n", + " \n", + " emb = emb.flatten(start_dim=1) # [m, seq*ch]\n", + " c_emb = c_emb.flatten(start_dim=1) # [n, seq*ch]\n", + "\n", + " norm_emb = emb / torch.linalg.vector_norm( emb, dim=1, keepdim=True)\n", + " norm_c_emb = c_emb / torch.linalg.vector_norm(c_emb, dim=1, keepdim=True) \n", + " \n", + " sim = torch.matmul(norm_c_emb, norm_emb.T) # matmul out is [n, m]\n", + " max_idx = torch.argmax(sim, dim=0) # reduce the c_emb dim, [m]\n", + " \n", + " return max_idx \n", + " \n", + " # @torch.inference_mode()\n", + " def forward(self, c, **kwargs): \n", + " in_device = c.device\n", + " \n", + " if c.dim() == 1: c_emb = self.cached_embeddings[c.to(self.cached_embeddings.device)].to(in_device) #list of ints \n", + " elif c.dim() == 2: c_emb = super().forward(c.to(self.device)) #tokenized input \n", + " else: raise NotImplementedError(\"\")\n", + "\n", + " if self.enable_cache_token_limit:\n", + " c_emb = c_emb[:, :self.max_length]\n", + " \n", + " return c_emb" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7311aa65-c8f2-4ffd-b176-3b0d054e59f0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: - `generate_cache` infered a TOKEN limit of 7\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "a04b9237dad34a6f85281456d8d09958", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/1 [00:00 Contrastive pre-training of an unitary encoder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.clip.unitary_clip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.models.config_model import ConfigModel\n", + "import genQC.models.transformers.attention as attn\n", + "import genQC.models.layers as layers\n", + "from genQC.models.position_encoding import LearnedPositionalEmbedding, RotaryPositionalEmbedding, RotaryPositionalEmbedding2D" + ] + }, + { + "cell_type": "markdown", + "id": "811fe91f-e18b-40f7-804d-b8ef3eb1c5d4", + "metadata": {}, + "source": [ + "## Layers" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "909ad61a-11c8-4e5f-a4d3-7dd80fce577a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class RotaryMultiheadAttention(nn.Module):\n", + " \"\"\"\n", + " MultiheadAttention described in the paper: Attention Is All You Need (https://arxiv.org/abs/1706.03762).\n", + " We add a rotary position encoding (RoPE). \n", + "\n", + " The attention core is `F.scaled_dot_attention` from pytorch. \n", + " Could be switched to `https://github.com/Dao-AILab/flash-attention` or `xFormers`.\n", + " \"\"\"\n", + "\n", + " def __init__(self,\n", + " in_dim: int,\n", + " embed_dim: int, \n", + " num_heads: int, \n", + " bias: bool = True, \n", + " p_rope: float = 1.0, \n", + " max_seq_len: int = 4096, \n", + " base_rope: float = 10_000,\n", + " enable_qk_norm: bool = False) -> None:\n", + " \n", + " super().__init__()\n", + "\n", + " self.num_heads = num_heads\n", + " self.bias = bias\n", + " self.head_dim = embed_dim // num_heads \n", + "\n", + " self.q_proj = nn.Linear(in_dim, embed_dim, bias=bias)\n", + " self.k_proj = nn.Linear(in_dim, embed_dim, bias=bias)\n", + " self.v_proj = nn.Linear(in_dim, embed_dim, bias=bias)\n", + " \n", + " self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)\n", + "\n", + " self.enable_qk_norm = enable_qk_norm\n", + " if self.enable_qk_norm:\n", + " self.q_norm = nn.RMSNorm(self.head_dim)\n", + " self.k_norm = nn.RMSNorm(self.head_dim)\n", + " \n", + " self.rope = RotaryPositionalEmbedding(head_dim=self.head_dim, p=p_rope, max_seq_len=max_seq_len, base=base_rope)\n", + " \n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.xavier_normal_(self.q_proj.weight)\n", + " nn.init.xavier_normal_(self.k_proj.weight)\n", + " nn.init.xavier_normal_(self.v_proj.weight)\n", + " nn.init.xavier_normal_(self.out_proj.weight)\n", + "\n", + " if self.bias:\n", + " nn.init.zeros_(self.q_proj.bias)\n", + " nn.init.zeros_(self.k_proj.bias)\n", + " nn.init.zeros_(self.v_proj.bias)\n", + " nn.init.zeros_(self.out_proj.bias)\n", + "\n", + " \n", + " def forward(self, query: torch.Tensor, key: torch.Tensor, value: torch.Tensor, pos_idx: Optional[torch.Tensor] = None) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes batch first. When `pos_idx` is provided we use RoPE, else NOT!\n", + "\n", + " Shapes:\n", + " query ... [b, n1, c]\n", + " key/value ... [b, n2, c]\n", + " \"\"\"\n", + "\n", + " assert key.shape == value.shape\n", + " \n", + " b, n1, _ = query.shape\n", + " _, n2, _ = key.shape\n", + "\n", + " q = self.q_proj(query)\n", + " k = self.k_proj(key)\n", + " v = self.v_proj(value)\n", + "\n", + " q = q.view(b, n1, self.num_heads, self.head_dim)\n", + " k = k.view(b, n2, self.num_heads, self.head_dim)\n", + " v = v.view(b, n2, self.num_heads, self.head_dim)\n", + "\n", + " if self.enable_qk_norm:\n", + " q = self.q_norm(q)\n", + " k = self.k_norm(k)\n", + " \n", + " if exists(pos_idx):\n", + " q = self.rope(q, pos_idx=pos_idx)\n", + " k = self.rope(k, pos_idx=pos_idx)\n", + "\n", + " # scaled_dot_product_attention takes [b, num_heads, seq, head_dim]\n", + " q = q.permute((0, 2, 1, 3)) \n", + " k = k.permute((0, 2, 1, 3)) \n", + " v = v.permute((0, 2, 1, 3)) \n", + " \n", + " # see https://pytorch.org/docs/stable/generated/torch.nn.functional.scaled_dot_product_attention.html\n", + " attn = F.scaled_dot_product_attention(query=q, \n", + " key=k, \n", + " value=v, \n", + " attn_mask=None, \n", + " dropout_p=0.0,\n", + " is_causal=False, \n", + " scale=None, \n", + " #enable_gqa=False\n", + " )\n", + "\n", + " # back to [b, seq, num_heads, head_dim]\n", + " attn = attn.permute((0, 2, 1, 3)) \n", + "\n", + " # pack heads together\n", + " attn = attn.reshape(b, n1, self.num_heads * self.head_dim)\n", + " attn = self.out_proj(attn)\n", + " return attn" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a6bd03c7-70e4-4264-8a08-b5ee2a294f8c", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class FeedForwardBlock(nn.Module):\n", + " \"\"\"\n", + " A small dense feed-forward network as used in `transformers`. Assumes channel last.\n", + " Inspired by https://arxiv.org/pdf/2401.11605 and added \n", + " from https://arxiv.org/pdf/2002.05202 a modification to SiGLU structure.\n", + " \"\"\"\n", + "\n", + " def __init__(self, in_dim: int, hidden_dim: int, dropout: float = 0.0) -> None:\n", + " super().__init__()\n", + " self.hidden_dim = hidden_dim\n", + " self.proj_in = nn.Linear(in_dim, 2*hidden_dim) # factor two for GLU part split\n", + " self.proj_out = nn.Linear(hidden_dim, in_dim) \n", + " self.act = nn.SiLU()\n", + " self.drop = nn.Dropout(dropout)\n", + "\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.zeros_(self.proj_out.bias)\n", + " # nn.init.xavier_normal_(self.proj_out.weight)\n", + " \n", + " def siglu(self, x: torch.Tensor) -> torch.Tensor:\n", + " x = self.proj_in(x) \n", + " return x[..., :self.hidden_dim] * self.act(x[..., self.hidden_dim:])\n", + "\n", + " #@torch.compile\n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " x = self.siglu(x)\n", + " x = self.drop(x)\n", + " x = self.proj_out(x)\n", + " return x" + ] + }, + { + "cell_type": "markdown", + "id": "2b7cbdb3-f4ad-49d2-9227-e34484610a06", + "metadata": {}, + "source": [ + "## Unitary-text encoder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0b4b123f-40cd-4321-ab5e-9427dd907396", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class UnitaryEncoderAttnBlock(nn.Module):\n", + " \"\"\"A self-attention block with 2d-RoPE.\"\"\"\n", + " \n", + " def __init__(self, \n", + " ch: int, \n", + " y_emb_size: int,\n", + " num_heads: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + "\n", + " self.self_att = RotaryMultiheadAttention(in_dim=ch+y_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope)\n", + " self.self_att.rope = RotaryPositionalEmbedding2D(head_dim=self.self_att.head_dim, p=p_rope, base=base_rope)\n", + " \n", + " self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch) \n", + " self.norm_self = nn.RMSNorm(ch)\n", + " self.norm_ff = nn.RMSNorm(ch)\n", + " self.drop = nn.Dropout(dropout)\n", + " \n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + "\n", + " # note a bonus of res-pos-norm is that we can init as identity!\n", + " nn.init.zeros_(self.norm_self.weight) \n", + " nn.init.zeros_(self.norm_ff.weight)\n", + "\n", + " def forward(self, x: torch.Tensor, y_emb: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes batch first.\n", + " \n", + " Shapes:\n", + " x ... [b, n, ch1] \n", + " y_emb ... [b, n, ch2]\n", + " pos_idx ... [b, n, 2] or [n, 2]\n", + " \"\"\"\n", + "\n", + " # Self-attention part\n", + " self_out = torch.cat([x, y_emb], dim=-1)\n", + " self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx)\n", + " self_out = self.norm_self(self_out) \n", + " self_out = self.drop(self_out) + x \n", + "\n", + " # Feed-Forward part\n", + " feed_out = self.ff(self_out)\n", + " feed_out = self.norm_ff(feed_out) \n", + " feed_out = self.drop(feed_out) + self_out \n", + " return feed_out" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b68db894-85a3-4d58-b6c3-1f44075f7e7d", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class UnitaryTextEncoderConfig: \n", + " text_embed_ch: int\n", + " text_encoding_ch: int \n", + " text_attn_num_heads: int \n", + " text_attn_depth: int\n", + " \n", + " unitary_encoding_ch: int \n", + " unitary_downscale_factor: int\n", + " \n", + " main_num_heads: int\n", + " main_depth: int\n", + " \n", + " use_rope: bool\n", + " p_rope: float\n", + " base_rope: float\n", + " dropout: float" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f61c338b-4f43-4d18-a2b2-0cba26bad9a4", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class UnitaryTextEncoder(ConfigModel):\n", + " def __init__(self, \n", + " text_embed_ch: int,\n", + " text_encoding_ch: int,\n", + " text_attn_num_heads: int,\n", + " text_attn_depth: int,\n", + " unitary_encoding_ch: int,\n", + " unitary_downscale_factor: int,\n", + " main_num_heads: int,\n", + " main_depth: int,\n", + " use_rope: bool,\n", + " p_rope: float,\n", + " base_rope: float,\n", + " dropout: float) -> None: \n", + " \"\"\"\n", + " text_embed_ch ... number of channels of the input text encodings `y_emb`\n", + "\n", + " The text channels `text_encoding_ch` are concatenated with the unitary channels `unitary_encoding_ch`.\n", + " \"\"\"\n", + " super().__init__() \n", + "\n", + " self.params_config = UnitaryTextEncoderConfig(text_embed_ch=text_embed_ch,\n", + " text_encoding_ch=text_encoding_ch,\n", + " text_attn_num_heads=text_attn_num_heads,\n", + " text_attn_depth=text_attn_depth,\n", + " unitary_encoding_ch=unitary_encoding_ch,\n", + " unitary_downscale_factor=unitary_downscale_factor,\n", + " main_num_heads=main_num_heads,\n", + " main_depth=main_depth,\n", + " use_rope=use_rope,\n", + " p_rope=p_rope,\n", + " base_rope=base_rope,\n", + " dropout=dropout)\n", + " \n", + " # Text pre-process\n", + " self.text_proj = nn.Linear(text_embed_ch, text_encoding_ch)\n", + " self.text_norm = nn.RMSNorm(text_encoding_ch)\n", + " \n", + " self.text_attn_blocks = nn.ModuleList([attn.BasisSelfAttnBlock(ch=text_encoding_ch,\n", + " num_heads=text_attn_num_heads, \n", + " dropout=dropout,\n", + " batch_first=True) \n", + " for d in range(text_attn_depth)\n", + " ])\n", + " \n", + " # Unitary pre-process\n", + " self.unitary_proj = nn.Conv2d(2, unitary_encoding_ch, kernel_size=1, stride=1, padding=\"same\") \n", + " self.unitary_downscale = nn.PixelUnshuffle(unitary_downscale_factor)\n", + " self.unitary_downscale_factor = unitary_downscale_factor\n", + "\n", + " self.use_rope = use_rope\n", + " if not self.use_rope:\n", + " self.unitary_pos_enc = layers.PositionalEncoding2D(d_model=unitary_encoding_ch, freq_factor=1_000) \n", + " \n", + " # Main transformer\n", + " self.encoding_ch = unitary_encoding_ch * (unitary_downscale_factor**2)\n", + " \n", + " self.transformer_blocks = nn.ModuleList([UnitaryEncoderAttnBlock(ch=self.encoding_ch,\n", + " y_emb_size=text_encoding_ch,\n", + " num_heads=main_num_heads, \n", + " dropout=dropout,\n", + " p_rope=p_rope,\n", + " base_rope=base_rope) \n", + " for d in range(main_depth)\n", + " ])\n", + "\n", + " self.norm_final = nn.RMSNorm(self.encoding_ch)\n", + " \n", + " print(f\"[INFO]: Creating `UnitaryTextEncoder` with `{unitary_downscale_factor=}` and `encoding_ch={self.encoding_ch}`.\")\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " # nn.init.xavier_normal_(self.text_proj.weight)\n", + " # nn.init.xavier_normal_(self.unitary_proj.weight)\n", + "\n", + " nn.init.zeros_(self.text_proj.bias)\n", + " nn.init.zeros_(self.unitary_proj.bias) \n", + "\n", + " def preproc_text(self, y_emb):\n", + " y_emb = self.text_proj(y_emb) # ... [batch, seq_y, text_encoding_ch]\n", + "\n", + " for text_attn_block in self.text_attn_blocks:\n", + " y_emb = text_attn_block(y_emb)\n", + "\n", + " return y_emb\n", + " \n", + " def preproc_unitary(self, U):\n", + " u_emb = self.unitary_proj(U) # ... [batch, unitary_encoding_ch, N, N]\n", + " if not self.use_rope:\n", + " u_emb = self.unitary_pos_enc(u_emb)\n", + " u_emb = self.unitary_downscale(u_emb) # ... [batch, unitary_encoding_ch * r^2, N/r, N/r]\n", + "\n", + " # Reshape and permute from image to sentence shape\n", + " b, ch, *_ = u_emb.shape\n", + " u_emb = torch.reshape(u_emb, (b, ch, -1)) # to [batch, unitary_encoding_ch * r^2, (N/r)^2]\n", + " u_emb = torch.permute(u_emb, (0, 2, 1)) # to [batch, (N/r)^2, unitary_encoding_ch * r^2]\n", + "\n", + " return u_emb\n", + "\n", + " def forward(self, y_emb: torch.Tensor, U: torch.Tensor, pool: bool = False, penultimate: bool = False) -> torch.Tensor: \n", + " \"\"\"\n", + " penultimate_output = False ... take all attn layers\n", + " penultimate_output = True ... skip the last attn layers\n", + " \n", + " Shapes:\n", + " y_emb ... [b, seq, text_embed_ch] \n", + " U ... [b, 2, N, N]\n", + " \"\"\"\n", + " \n", + " # Pre-process multimodial inputs\n", + " x = self.preproc_unitary(U) # ... [batch, seq_u, unitary_encoding_ch * r^2]\n", + " y_emb = self.preproc_text(y_emb) # ... [batch, seq_y, text_encoding_ch]\n", + " \n", + " y_emb = y_emb.mean(dim=1, keepdim=True) # ... [batch, 1, text_encoding_ch]\n", + " y_emb = self.text_norm(y_emb)\n", + " y_emb = y_emb.expand(x.shape[0], x.shape[1], -1) # ... [batch, seq_u, text_encoding_ch]\n", + " \n", + " # Main transformer pass\n", + " if self.use_rope:\n", + " N = U.shape[-1] // self.unitary_downscale_factor\n", + " pos = torch.arange(N).expand(N, -1)\n", + " pos_idx = torch.stack([pos.T, pos], dim=-1).reshape(-1, 2) # ... [seq_u, 2]\n", + " else:\n", + " pos_idx = None\n", + " \n", + " if not penultimate:\n", + " for transformer_block in self.transformer_blocks:\n", + " x = transformer_block(x, y_emb=y_emb, pos_idx=pos_idx)\n", + " \n", + " else:\n", + " for transformer_block in self.transformer_blocks[:-1]:\n", + " x = transformer_block(x, y_emb=y_emb, pos_idx=pos_idx)\n", + " \n", + " if pool: \n", + " x = torch.mean(x, dim=1) # [batch, ch] \n", + "\n", + " x = self.norm_final(x)\n", + " return x" + ] + }, + { + "cell_type": "markdown", + "id": "6e6dcc3c-6085-4930-943b-80b0cd5cd429", + "metadata": {}, + "source": [ + "## Circuit encoder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5d326b93-4ed2-48f2-982c-80471a428751", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class SelfAttnBlock(nn.Module):\n", + " \"\"\"A self-attention block with RoPE.\"\"\"\n", + " \n", + " def __init__(self, ch: int, num_heads: int, dropout: float = 0.0, p_rope: float = 1.0, base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + "\n", + " self.self_att = RotaryMultiheadAttention(in_dim=ch, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope)\n", + " \n", + " self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch, dropout=dropout) \n", + " self.norm_self = nn.RMSNorm(ch)\n", + " self.norm_ff = nn.RMSNorm(ch)\n", + " self.drop = nn.Dropout(dropout)\n", + "\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + "\n", + " # note a bonus of res-pos-norm is that we can init as identity!\n", + " nn.init.zeros_(self.norm_self.weight) \n", + " nn.init.zeros_(self.norm_ff.weight)\n", + " \n", + " def forward(self, x: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes batch first.\n", + " \n", + " Shapes:\n", + " x ... [b, n, ch] \n", + " pos_idx ... [b, n]\n", + " \"\"\"\n", + "\n", + " # Self-attention part\n", + " self_out = x\n", + " self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx)\n", + " self_out = self.norm_self(self_out) \n", + " self_out = self.drop(self_out) + x \n", + "\n", + " # Feed-Forward part\n", + " feed_out = self.ff(self_out)\n", + " feed_out = self.norm_ff(feed_out)\n", + " feed_out = self.drop(feed_out) + self_out \n", + " return feed_out" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1d7888f9-f229-4f75-8599-df4d0e7f202a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class PackingTransformer(ConfigModel):\n", + " \"\"\"\n", + " The first stage packing/unpacking transformers of the CirDiT model. \n", + " Applies a RoPE for time dimension only, not on spatial dimension.\n", + " \"\"\"\n", + " \n", + " def __init__(self,\n", + " ch: int, \n", + " depth: int,\n", + " num_heads: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + "\n", + " self.blocks = nn.ModuleList([\n", + " SelfAttnBlock(ch=ch, \n", + " num_heads=num_heads, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + " for d in range(depth)\n", + " ])\n", + " \n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Shapes:\n", + " x ... [b, s, t, ch]\n", + " \"\"\"\n", + "\n", + " b, s, t, ch = x.shape\n", + "\n", + " # create pos_idx such that they only depend on the time position\n", + " pos_idx = torch.arange(t, device=x.device, dtype=torch.int32).expand(b, s, -1)\n", + " pos_idx = pos_idx.reshape(b, -1)\n", + "\n", + " # flatten spatial and time into seq\n", + " x = x.reshape(b, s*t, ch)\n", + " \n", + " for block in self.blocks:\n", + " x = block(x=x, pos_idx=pos_idx)\n", + "\n", + " # undo flatten\n", + " x = x.reshape(b, s, t, ch)\n", + " \n", + " return x" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ef895de4-cdd0-4d4e-81d4-1c0ec3bf1341", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CoreTransformer(nn.Module):\n", + " \"\"\"\n", + " The main transformer of the `CirDiT` model. \n", + " Applies a RoPE for time dimension.\n", + " \"\"\"\n", + "\n", + " def __init__(self,\n", + " ch: int, \n", + " depth: int,\n", + " num_heads: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + " \n", + " self.blocks = nn.ModuleList([\n", + " SelfAttnBlock(ch=ch, \n", + " num_heads=num_heads, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + " for d in range(depth)\n", + " ])\n", + "\n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Shapes:\n", + " x ... [b, t, ch]\n", + " \"\"\"\n", + " \n", + " pos_idx = torch.arange(x.shape[1], device=x.device, dtype=torch.int32) \n", + " \n", + " for block in self.blocks:\n", + " x = block(x=x, pos_idx=pos_idx)\n", + "\n", + " return x" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b1a77ad-c900-4a0b-8b8e-bf7a2fdddaa4", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class CircuitEncoderConfig:\n", + " embedder_config: dict\n", + " \n", + " ch_packing: int \n", + " ch_core: int\n", + " \n", + " depth_packing: int\n", + " depth_core: int \n", + " \n", + " num_heads_packing: int\n", + " num_heads_core: int \n", + " \n", + " dropout: float \n", + " p_rope: float\n", + " base_rope: float" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7d534173-c74d-480d-bf36-645ed8ccce9b", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitEncoder(ConfigModel):\n", + " def __init__(self,\n", + " embedder_config: Optional[dict], \n", + " ch_packing: int, \n", + " ch_core: int,\n", + " depth_packing: int,\n", + " depth_core: int, \n", + " num_heads_packing: int,\n", + " num_heads_core: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000,\n", + " embedder: Optional[nn.Module] = None) -> None: \n", + " super().__init__()\n", + "\n", + " if exists(embedder):\n", + " self.embedder = embedder\n", + " embedder_config = self.embedder.get_config(None)\n", + " else:\n", + " assert exists(embedder_config)\n", + "\n", + " self.params_config = CircuitEncoderConfig(embedder_config=embedder_config,\n", + " ch_packing=ch_packing, \n", + " ch_core=ch_core,\n", + " depth_packing=depth_packing,\n", + " depth_core=depth_core, \n", + " num_heads_packing=num_heads_packing,\n", + " num_heads_core=num_heads_core, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + "\n", + " if not_exists(embedder):\n", + " self.embedder = ConfigModel.from_config(embedder_config, device=None, silent=True)\n", + "\n", + " self.packing = PackingTransformer(ch=ch_packing, \n", + " depth=depth_packing, \n", + " num_heads=num_heads_packing, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + "\n", + " self.core = CoreTransformer(ch=ch_core, \n", + " depth=depth_core, \n", + " num_heads=num_heads_core, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + "\n", + " self.encoding_ch = ch_core\n", + " \n", + " self.proj_in = nn.Linear(self.embedder.embedding_dim, ch_packing)\n", + " self.core_proj = nn.Linear(ch_packing, ch_core)\n", + "\n", + " self.norm_packing = nn.RMSNorm(ch_packing) \n", + " self.norm_core = nn.RMSNorm(ch_core) \n", + " self.norm_final = nn.RMSNorm(ch_core)\n", + " \n", + " self.qubit_pos_enc = LearnedPositionalEmbedding(dim=ch_packing, max_seq_len=64) #here max number of qubits\n", + "\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.orthogonal_(self.core_proj.weight)\n", + " nn.init.zeros_(self.core_proj.bias)\n", + "\n", + " def forward(self, tokens: torch.Tensor, params: torch.Tensor, pool: bool = False) -> torch.Tensor: \n", + " # Embed the circuits\n", + " x = self.embedder(h=tokens, w=params) \n", + "\n", + " # Pre-process circuit and add pos-encoding\n", + " b, s, t, _ = x.shape\n", + "\n", + " x = self.proj_in(x)\n", + " x = self.qubit_pos_enc(x)\n", + "\n", + " # Pack spatial into tokens\n", + " x = self.norm_packing(x) \n", + " x = self.packing(x=x)\n", + " \n", + " # Downsample, reduce spatial, ... [b, t, ch_core]\n", + " x_main = x.mean(dim=1) \n", + " x_main = self.core_proj(x_main) \n", + " \n", + " # Core transformer\n", + " x_main = self.norm_core(x_main)\n", + " x_main = self.core(x=x_main)\n", + "\n", + " if pool: \n", + " x_main = torch.mean(x_main, dim=1) # [b, ch] \n", + " \n", + " x_main = self.norm_final(x_main)\n", + " return x_main" + ] + }, + { + "cell_type": "markdown", + "id": "3bca1a5d-2c82-4e97-8181-425a38cfe5ee", + "metadata": {}, + "source": [ + "## Unitary CLIP model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbd4627e-3176-475a-91f0-899c18e0f724", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class UnitaryCLIPConfig:\n", + " text_encoder_config: dict\n", + " clip_embed_size: int " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0b3deb74-e352-438b-9549-a44706596c94", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class UnitaryCLIP(ConfigModel):\n", + " \n", + " def __init__(self, \n", + " text_encoder_config: Optional[dict],\n", + " unitary_text_encoder: UnitaryTextEncoder,\n", + " circuit_encoder: CircuitEncoder,\n", + " clip_embed_size: int,\n", + " text_encoder: Optional[nn.Module] = None) -> None:\n", + " super().__init__() \n", + "\n", + " if exists(text_encoder):\n", + " self.text_encoder = text_encoder\n", + " text_encoder_config = self.text_encoder.get_config(None)\n", + " else:\n", + " assert exists(text_encoder_config)\n", + " \n", + " self.params_config = UnitaryCLIPConfig(text_encoder_config=text_encoder_config,\n", + " clip_embed_size=clip_embed_size)\n", + "\n", + " if not_exists(text_encoder):\n", + " if \"device\" in text_encoder_config:\n", + " device = text_encoder_config[\"device\"]\n", + " else:\n", + " device = \"cpu\"\n", + " \n", + " self.text_encoder = ConfigModel.from_config(text_encoder_config, device=device, silent=True)\n", + " \n", + " self.unitary_text_encoder = unitary_text_encoder\n", + " self.circuit_encoder = circuit_encoder\n", + "\n", + " self.unitary_text_proj = nn.Linear(self.unitary_text_encoder.encoding_ch, clip_embed_size)\n", + " self.circuit_proj = nn.Linear(self.circuit_encoder.encoding_ch , clip_embed_size) \n", + " self.temperature = torch.nn.Parameter(torch.zeros(1))\n", + " \n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " initrange = 0.1 \n", + " self.unitary_text_proj.bias.data.zero_()\n", + " self.unitary_text_proj.weight.data.uniform_(-initrange, initrange) \n", + " self.circuit_proj.bias.data.zero_()\n", + " self.circuit_proj.weight.data.uniform_(-initrange, initrange)\n", + " \n", + " def forward(self, tokens: torch.Tensor, params: torch.Tensor, y: torch.Tensor, U: torch.Tensor) -> torch.Tensor:\n", + "\n", + " y_emb = self.text_encoder(y, pool=False)\n", + " \n", + " ut_enc = self.unitary_text_encoder(y_emb=y_emb, U=U, pool=True) \n", + " ut_enc = self.unitary_text_proj(ut_enc) # out [b, embed_size]\n", + " ut_enc = F.normalize(ut_enc, dim=-1) \n", + " \n", + " #------------ \n", + " \n", + " qc_enc = self.circuit_encoder(tokens=tokens, params=params, pool=True) \n", + " qc_enc = self.circuit_proj(qc_enc) # out [b, embed_size] \n", + " qc_enc = F.normalize(qc_enc, dim=-1) \n", + " \n", + " #------------ \n", + " \n", + " scores = torch.matmul(ut_enc, qc_enc.T) * torch.exp(self.temperature) #[b, b]\n", + " \n", + " #scores is: I=unitary_text T=circuit\n", + " #--------------------------------\n", + " #| I1*T1 I1*T2 I1*T3 ...\n", + " #| I2*T1\n", + " #| I3*T1\n", + " # ...\n", + " #--------------------------------\n", + " \n", + " return scores" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/config_model.ipynb b/src/models/config_model.ipynb index 4351f3e..9b57250 100644 --- a/src/models/config_model.ipynb +++ b/src/models/config_model.ipynb @@ -5,15 +5,9 @@ "id": "7883f9c7-7102-4dd1-9a85-f3d2f1e653fc", "metadata": {}, "source": [ - "# Config model" - ] - }, - { - "cell_type": "markdown", - "id": "f4e2e8f6-575f-4f5f-81f7-854ad4525351", - "metadata": {}, - "source": [ - "Model base class that handles loading and storing from/to config-files." + "# Config model\n", + "\n", + "> Model base class that handles loading and storing from/to config files." ] }, { @@ -35,8 +29,8 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.config_loader import *\n", - "from genQC.util import *\n", + "from genQC.utils.config_loader import *\n", + "from genQC.utils.misc_utils import *\n", "from datetime import datetime" ] }, @@ -56,18 +50,40 @@ "outputs": [], "source": [ "#| export\n", - "class Config_Model(nn.Module):\n", + "class ConfigModel(nn.Module):\n", " \"\"\"A basic `nn.Module` with IO functionality.\"\"\"\n", - " def __init__(self): super().__init__()\n", " \n", - " #---------------------\n", + " def __init__(self, save_type=None): \n", + " super().__init__()\n", + " self.save_type = default(save_type, \"safetensors\")\n", " \n", + " def freeze(self, freeze: bool = True):\n", + " if freeze: self.eval()\n", + " else: self.train()\n", + "\n", + " for param in self.parameters(): \n", + " param.requires_grad = not freeze \n", + "\n", + " #Todo: add a debose/debug log here\n", + " \n", + " def unfreeze(self):\n", + " self.freeze(False)\n", + " \n", + " #---------------------\n", + "\n", + " def check_save_type(self, save_path):\n", + " if exists(self.save_type) and exists(save_path):\n", + " if not save_path.endswith(f\".{self.save_type}\"):\n", + " save_path += f\".{self.save_type}\"\n", + " return save_path\n", + " \n", " def get_config(self, save_path=None, without_metadata=False):\n", " if not without_metadata: \n", " config = {}\n", " config[\"target\"] = class_to_str(type(self)) \n", - " config[\"save_path\"] = save_path\n", + " config[\"save_path\"] = self.check_save_type(self.save_path) if hasattr(self, \"save_path\") and not exists(save_path) else self.check_save_type(save_path)\n", " config[\"save_datetime\"] = datetime.now().strftime(\"%m/%d/%Y %H:%M:%S\")\n", + " config[\"save_type\"] = self.save_type\n", " config[\"params\"] = self.params_config \n", " else:\n", " config = self.params_config \n", @@ -76,44 +92,85 @@ " return config\n", " \n", " def store_model(self, config_path: str=None, save_path: str=None, without_metadata=False): \n", - " \n", + " \n", " config = self.get_config(save_path, without_metadata)\n", " \n", " if exists(config_path):\n", " if without_metadata: save_dataclass_yaml(config, config_path)\n", " else : save_dict_yaml(config, config_path) \n", - " \n", + " \n", " if exists(save_path):\n", - " torch.save(self.state_dict(), save_path) \n", + " store_model_state_dict(self.state_dict(), self.check_save_type(save_path)) \n", " \n", " #---------------------\n", " \n", " @staticmethod\n", - " def from_config(config, device: torch.device, save_path: str=None): \n", + " def from_config(config, device: torch.device, save_path: str=None, verbose=True, silent=False, freeze: Optional[bool] = None): \n", " \"\"\"Use this if we have a loaded config. Maybe within other classes (e.g. pipeline and nested models)\"\"\"\n", + "\n", + " _config = copy.deepcopy(config)\n", " \n", - " model = instantiate_from_config(config)\n", - " model = model.to(device) \n", - " print(f\"[INFO]: `{class_to_str(type(model))}` instantiated from given config on {device}.\")\n", + " if exists(device): _config[\"device\"] = device # for loading sub-models\n", + " else: device = _config.pop(\"device\", \"cpu\")\n", + "\n", + " if exists(freeze):\n", + " _freeze = freeze\n", + " \n", + " else:\n", + " if \"is_frozen\" in _config: \n", + " _freeze = _config.pop(\"is_frozen\", None)\n", + " if not_exists(_freeze):\n", + " raise RuntimeError(f\"The `is_frozen` flag in `config` is invalid. Please provide a boolean. `is_frozen` is: {freeze}\")\n", + " else:\n", + " _freeze = True\n", + " #print(f\"[INFO]: `{class_to_str(type(model))}`. No valid `is_frozen` flag in `config`. Model is frozen by default.\")\n", " \n", + " #--------------------------------\n", + " # instantiate model\n", + " model = instantiate_from_config(_config)\n", + " model = model.to(device) \n", + " if not silent: print(f\"[INFO]: `{class_to_str(type(model))}` instantiated from given `config` on {device}.\")\n", + "\n", " #-------------------------------- \n", - " if not exists(save_path): \n", - " if \"save_path\" in config:\n", - " save_path = config[\"save_path\"]\n", + " # load pretrained weights\n", + "\n", + " model.save_type = _config.pop(\"save_type\", None)\n", + "\n", + " if exists(model.save_type):\n", + " if not exists(save_path): \n", + " if \"save_path\" in _config: \n", + " save_path = model.check_save_type(_config[\"save_path\"])\n", + " \n", + " \n", + " if exists(save_path): \n", + " state_dict = load_model_state_dict(model.check_save_type(save_path), device)\n", + " \n", + " m, u = model.load_state_dict(state_dict, strict=False)\n", + " \n", + " if len(m) + len(u) > 0 and verbose:\n", + " print(f\"[WARNING]: missing keys: {m}\")\n", + " print(f\"[WARNING]: unexpected keys: {u}\")\n", + " \n", " else:\n", - " print(\"[INFO]: Found no key `save_path` path in config.\")\n", - " \n", - " if exists(save_path):\n", - " model.load_state_dict(torch.load(save_path, map_location=torch.device(device).type, weights_only=True), strict=True)\n", + " if not silent: print(f\"[INFO]: `{class_to_str(type(model))}`. No `save_path` provided. Found no key `save_path` in `config`. No state dict loaded.\")\n", " else:\n", - " print(f\"[INFO]: `{class_to_str(type(model))}`. No save_path` provided. No state dict loaded.\")\n", + " if not silent: print(f\"[INFO]: `{class_to_str(type(model))}`. Found no key `save_type` in `config`. No state dict loaded.\")\n", + " \n", + " #--------------------------------\n", + " # freeze \n", + "\n", + " if exists(_freeze):\n", + " model.freeze(_freeze)\n", + " if not silent: print(f\"[INFO]: `{class_to_str(type(model))}`. Freeze model: {_freeze}\")\n", + " else:\n", + " if not silent: print(f\"[INFO]: `{class_to_str(type(model))}`. No valid `is_frozen` flag in `config`. Model is frozen by default.\")\n", "\n", " return model\n", " \n", " @staticmethod\n", " def from_config_file(config_path, device: torch.device, save_path: str=None): \n", " config = load_config(config_path)\n", - " return Config_Model.from_config(config, device, save_path) " + " return ConfigModel.from_config(config, device, save_path) " ] }, { @@ -141,6 +198,13 @@ "display_name": "python3", "language": "python", "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/src/models/embedding/base_embedder.ipynb b/src/models/embedding/base_embedder.ipynb new file mode 100644 index 0000000..0f9d275 --- /dev/null +++ b/src/models/embedding/base_embedder.ipynb @@ -0,0 +1,108 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "7883f9c7-7102-4dd1-9a85-f3d2f1e653fc", + "metadata": {}, + "source": [ + "# Base embedder\n", + "\n", + "> Class for base embedder." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e71cd37f-025e-411a-b830-16f4d512c617", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.embedding.base_embedder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7db9efd6-abbc-4256-b0d9-7f7da64d81b4", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.models.config_model import ConfigModel" + ] + }, + { + "cell_type": "markdown", + "id": "0b1ba64b-f053-42ba-a2a2-5b855d74d80b", + "metadata": {}, + "source": [ + "## Base embedder class" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c245730f-dc67-4209-8ae5-d5080da0e362", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class BaseEmbedder(ConfigModel, abc.ABC):\n", + " def __init__(self) -> None: \n", + " super().__init__() \n", + "\n", + " # Note: While using DDP with huggingface-Accelerate we noticed\n", + " # the fixed weights didn't get synced if there is no parameter\n", + " # that requires a gradient. So we add a dummy to make sure\n", + " # all model instances/nodes have the same embedder!\n", + " self.dummy_parameter = torch.tensor(0.0)\n", + " self.dummy_parameter = nn.Parameter(self.dummy_parameter)\n", + "\n", + " def forward(self, *args, **kwargs): \n", + " return self.embed(*args, **kwargs)\n", + " \n", + " @abc.abstractmethod\n", + " def embed(self, x): pass\n", + " \n", + " @abc.abstractmethod\n", + " def invert(self, x): pass" + ] + }, + { + "cell_type": "markdown", + "id": "cdfd7536-56f8-479d-911f-3d62b427ecba", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6c9f41df-ba6d-429d-a6a2-a9ff60a1ddbe", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/embedding/rotational_preset_embedder.ipynb b/src/models/embedding/rotational_preset_embedder.ipynb new file mode 100644 index 0000000..b8b50e2 --- /dev/null +++ b/src/models/embedding/rotational_preset_embedder.ipynb @@ -0,0 +1,1437 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "05a8ff60-1782-451e-9bc9-ccd9d7a03bea", + "metadata": {}, + "source": [ + "# Rotational preset embedder\n", + "\n", + "> Class for a rotational preset embedder." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "779dd562-2fb7-4992-8ff2-36d8a350947a", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.embedding.rotational_preset_embedder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "99334d1d-82c6-4f8a-afd7-6d63139b6d32", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.utils.math import gram_schmidt\n", + "from genQC.models.embedding.base_embedder import BaseEmbedder" + ] + }, + { + "cell_type": "markdown", + "id": "42d3c11c-d57f-4f9f-b6bf-0e2b873378ae", + "metadata": {}, + "source": [ + "## MultimodialEmbedder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5f0229fc-4437-4d3a-8cef-da668275a488", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class MultimodialEmbedder(BaseEmbedder):\n", + " \n", + " def __init__(self, zero_sum_space: bool) -> None:\n", + " super().__init__()\n", + "\n", + " self.zero_sum_space = zero_sum_space\n", + " \n", + " h_mean, h_std = torch.tensor(0.0), torch.tensor(1.0)\n", + " w_mean, w_std = torch.tensor(0.0), torch.tensor(1.0)\n", + " \n", + " self.register_buffer('h_mean', h_mean)\n", + " self.register_buffer('h_std', h_std)\n", + "\n", + " self.register_buffer('w_mean', w_mean)\n", + " self.register_buffer('w_std', w_std)\n", + "\n", + " def set_scaling(self, h: torch.Tensor, w: torch.Tensor) -> None:\n", + " self.h_mean, self.h_std = torch.tensor(0.0), torch.tensor(1.0)\n", + " self.w_mean, self.w_std = torch.tensor(0.0), torch.tensor(1.0)\n", + "\n", + " return #disbled; not needed for new emb initialization\n", + " \n", + " x = self.embed(h, w)\n", + "\n", + " if not self.channel_last:\n", + " x_h = x[:, :self.clr_dim]\n", + " x_w = x[:, self.clr_dim:]\n", + " else:\n", + " x_h = x[..., :self.clr_dim]\n", + " x_w = x[..., self.clr_dim:]\n", + " \n", + " self.h_mean, self.h_std = x_h.mean(), x_h.std()\n", + " self.w_mean, self.w_std = x_w.mean(), x_w.std()\n", + " \n", + " def scale_emb(self, x_emb: torch.Tensor) -> torch.Tensor:\n", + " # x_emb .. [b, ch, s, t]\n", + "\n", + " # mean\n", + " if not self.zero_sum_space:\n", + " if not self.channel_last:\n", + " x_emb[:, :self.clr_dim] -= self.h_mean\n", + " x_emb[:, self.clr_dim:] -= self.w_mean\n", + " else:\n", + " x_emb[..., :self.clr_dim] -= self.h_mean\n", + " x_emb[..., self.clr_dim:] -= self.w_mean\n", + " \n", + " # variance\n", + " if not self.channel_last:\n", + " x_emb[:, :self.clr_dim] /= self.h_std\n", + " x_emb[:, self.clr_dim:] /= self.w_std\n", + " else:\n", + " x_emb[..., :self.clr_dim] /= self.h_std\n", + " x_emb[..., self.clr_dim:] /= self.w_std\n", + " \n", + " return x_emb\n", + "\n", + " def invert_scale_emb(self, x_emb: torch.Tensor) -> torch.Tensor:\n", + " # x_emb .. [b, ch, s, t]\n", + "\n", + " # variance\n", + " if not self.channel_last:\n", + " x_emb[:, :self.clr_dim] *= self.h_std\n", + " x_emb[:, self.clr_dim:] *= self.w_std\n", + " else:\n", + " x_emb[..., :self.clr_dim] *= self.h_std\n", + " x_emb[..., self.clr_dim:] *= self.w_std\n", + "\n", + " # mean\n", + " if not self.zero_sum_space: \n", + " if not self.channel_last:\n", + " x_emb[:, :self.clr_dim] += self.h_mean\n", + " x_emb[:, self.clr_dim:] += self.w_mean\n", + " else:\n", + " x_emb[..., :self.clr_dim] += self.h_mean\n", + " x_emb[..., self.clr_dim:] += self.w_mean\n", + " \n", + " return x_emb" + ] + }, + { + "cell_type": "markdown", + "id": "9d16c463-4fc5-4e98-bcb6-bef16cb61564", + "metadata": {}, + "source": [ + "## MultimodialPresetEmbedder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22c7fbfb-c37e-4f47-942f-29b4433e9a52", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class MultimodialPresetEmbedderConfig: \n", + " clr_dim: int\n", + " num_clrs: int\n", + " params_dim: int\n", + " num_params_per_clr: int\n", + " zero_sum_space: bool\n", + " explicit_node_type_embeddings: bool\n", + " channel_last: bool\n", + " parametrized_tokens: Optional[list[int]] = None \n", + " unique_class_values: Optional[list[int]] = None" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b8cc949e-c462-48ea-a9c0-2d1d3b9f43ce", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class MultimodialPresetEmbedder(MultimodialEmbedder):\n", + " \"\"\"\n", + " Embedder class for multimodial discrete and continuous data, e.g. parametrized gates/actions. \n", + " Embeddings are fixed and not trained.\n", + " \"\"\"\n", + " \n", + " def __init__(self, \n", + " clr_dim: int, \n", + " num_clrs: int, \n", + " params_dim: int, \n", + " num_params_per_clr: int, \n", + " zero_sum_space: bool,\n", + " explicit_node_type_embeddings: bool = True,\n", + " channel_last: bool = True,\n", + " parametrized_tokens: Optional[list[int]] = None,\n", + " unique_class_values: Optional[list[int]] = None) -> None:\n", + " \"\"\"\n", + " Note `explicit_node_type_embeddings` means we convert the `+-k` to all postive, but there are often unsused connection types. For instance, `1=H` the minus node is never used.\n", + "\n", + " To improve this and reduce the `clr_dim`, we can provide `unique_values` which are the only tokens that actually appear.\n", + " \"\"\"\n", + " super().__init__(zero_sum_space=zero_sum_space)\n", + "\n", + "\n", + " if exists(unique_class_values):\n", + " assert isinstance(unique_class_values, list)\n", + " self.unique_class_values_tensor = torch.tensor(unique_class_values)\n", + " \n", + " explicit_node_type_embeddings = False\n", + "\n", + " print(f\"[INFO]: provided `unique_class_values` ({unique_class_values}), enforcing `num_clrs=len(unique_class_values)={len(unique_class_values)}`.\")\n", + " num_clrs = len(unique_class_values)\n", + " \n", + " self.explicit_node_type_embeddings = explicit_node_type_embeddings \n", + " self.channel_last = channel_last\n", + " self.parametrized_tokens = parametrized_tokens\n", + " self.unique_class_values = unique_class_values\n", + "\n", + " if (num_params_per_clr*num_clrs) > params_dim and num_params_per_clr > 0:\n", + " print(f\"[WARNING]: For `num_params_per_clr` larger 0, we need at least a `params_dim` (is {params_dim}) of\"\n", + " f\" `num_params_per_clr*num_clrs` (is {num_params_per_clr*num_clrs}),\"\n", + " f\" automatically setting `params_dim` to {num_params_per_clr*num_clrs} to inforce this!\")\n", + " \n", + " params_dim = num_params_per_clr*num_clrs\n", + "\n", + " if self.zero_sum_space and ((num_params_per_clr*num_clrs) + 1) > params_dim and num_params_per_clr > 0:\n", + " print(f\"[WARNING]: `params_dim` is set to the minimum `num_params_per_clr*num_clrs`={num_params_per_clr*num_clrs},\"\n", + " f\" but for `{zero_sum_space=}` we need one more dimension, automatically setting it to\"\n", + " f\" `num_params_per_clr*num_clrs+1` {num_params_per_clr*num_clrs+1}.\")\n", + " \n", + " params_dim = num_params_per_clr*num_clrs + 1\n", + " \n", + " if self.zero_sum_space: \n", + " if self.explicit_node_type_embeddings and ((num_clrs*2 - 2) + 1) > clr_dim:\n", + " print(f\"[WARNING]: `clr_dim` is set to {clr_dim} and `{explicit_node_type_embeddings=}`,\"\n", + " f\" but for `{zero_sum_space=}` we need one more dimension than the number of tokens `(num_clrs*2 - 2)` (is {(num_clrs*2 - 2)}),\"\n", + " f\" automatically setting it to `clr_dim=(num_clrs*2 - 2) + 1` {(num_clrs*2 - 2) + 1}.\")\n", + "\n", + " # has empty and padd tokens, these only have the plus branch (so -2)!\n", + " clr_dim = (num_clrs*2 - 2) + 1\n", + "\n", + " elif (num_clrs + 1) > clr_dim:\n", + " print(f\"[WARNING]: `clr_dim` is set to {clr_dim} and `{explicit_node_type_embeddings=}`,\"\n", + " f\" but for `{zero_sum_space=}` we need one more dimension than the number of tokens `num_clrs` (is {num_clrs}),\"\n", + " f\" automatically setting it to `clr_dim=num_clrs+1` {num_clrs+1}.\")\n", + " \n", + " clr_dim = num_clrs + 1\n", + " \n", + " self.clr_dim = clr_dim\n", + " self.num_clrs = num_clrs\n", + " self.params_dim = params_dim\n", + " self.num_params_per_clr = num_params_per_clr\n", + " \n", + " self._num_discrete_embeddings = self.num_clrs\n", + " self._num_param_embeddings = self.num_params_per_clr * self.num_clrs\n", + " self.embedding_dim = self.clr_dim + self.params_dim\n", + "\n", + " if self.explicit_node_type_embeddings:\n", + " # use distinct embeddings for +-k and not just +-v\n", + " # has empty and padd tokens, these only have the plus branch (so -2)!\n", + " self._num_discrete_embeddings = self.num_clrs*2 - 2\n", + " \n", + " self.num_embeddings = self._num_discrete_embeddings + self._num_param_embeddings\n", + " self.emb_clr = nn.Embedding(num_embeddings=self.num_embeddings, embedding_dim=self.embedding_dim) \n", + " print(f\"[INFO]: Created `nn.Embedding` with a total of {self.num_embeddings} vectors in a {self.embedding_dim} dimensional space.\")\n", + " \n", + " self.params_config = MultimodialPresetEmbedderConfig(clr_dim=self.clr_dim, \n", + " num_clrs=self.num_clrs, \n", + " params_dim=self.params_dim, \n", + " num_params_per_clr=self.num_params_per_clr,\n", + " zero_sum_space=self.zero_sum_space,\n", + " explicit_node_type_embeddings=self.explicit_node_type_embeddings,\n", + " channel_last=self.channel_last,\n", + " parametrized_tokens=self.parametrized_tokens)\n", + " \n", + " self._init_weights(zero_sum_space=self.zero_sum_space)\n", + " \n", + " def _init_weights(self, zero_sum_space) -> None:\n", + " self.emb_clr.weight.requires_grad = False\n", + " \n", + " _dtype = self.emb_clr.weight.dtype\n", + " self.emb_clr = self.emb_clr.to(torch.float64)\n", + " \n", + " # keep spaces ortho with clr\n", + " self.emb_clr.weight.data.zero_()\n", + " nn.init.orthogonal_(self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim])\n", + " nn.init.orthogonal_(self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:])\n", + "\n", + " if zero_sum_space:\n", + " assert self._num_discrete_embeddings < self.clr_dim, f\"{self._num_discrete_embeddings} < {self.clr_dim}\"\n", + " if self._num_param_embeddings > 0:\n", + " assert self._num_param_embeddings < self.params_dim, f\"{self._num_param_embeddings} < {self.params_dim}\"\n", + " \n", + " # Convert to zero-sum space\n", + " self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim] -= torch.mean(self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim], dim=-1, keepdim=True) \n", + " if self._num_param_embeddings > 0:\n", + " self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:] -= torch.mean(self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:], dim=-1, keepdim=True) \n", + "\n", + " # Orthonormalization that conserves zero-sum space\n", + " self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim] = gram_schmidt(self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim])\n", + " if self._num_param_embeddings > 0:\n", + " self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:] = gram_schmidt(self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:])\n", + " \n", + " self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim] /= torch.std(self.emb_clr.weight.data[:self._num_discrete_embeddings, :self.clr_dim], dim=-1, keepdim=True, correction=0)\n", + " if self._num_param_embeddings > 0:\n", + " self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:] /= torch.std(self.emb_clr.weight.data[self._num_discrete_embeddings:, self.clr_dim:], dim=-1, keepdim=True, correction=0) \n", + " \n", + " self.emb_clr = self.emb_clr.to(_dtype)\n", + " \n", + " def print_emb_matrix(self) -> None:\n", + " print(self.emb_clr.weight.data)\n", + "\n", + " #-----------------------------------------------\n", + "\n", + " def tokens_to_unique_class_values(self, x: torch.Tensor) -> torch.Tensor:\n", + " if exists(self.unique_class_values):\n", + " self.unique_class_values_tensor = self.unique_class_values_tensor.to(x.device)\n", + " return torch.searchsorted(self.unique_class_values_tensor, x)\n", + " return x\n", + "\n", + " def unique_class_values_to_tokens(self, x: torch.Tensor) -> torch.Tensor:\n", + " if exists(self.unique_class_values):\n", + " self.unique_class_values_tensor = self.unique_class_values_tensor.to(x.device)\n", + " return self.unique_class_values_tensor[x]\n", + " return x\n", + " \n", + " #-----------------------------------------------\n", + "\n", + " def embed_discrete(self, h: torch.Tensor) -> torch.Tensor:\n", + "\n", + " if self.unique_class_values:\n", + " # tokens are already correct\n", + " tokens = h \n", + " x_emb = self.emb_clr(tokens)\n", + " \n", + " elif self.explicit_node_type_embeddings:\n", + " # e.g. num_clrs=4: [-2, -1, zero, 1, 2, padd] to all positive [0, 1, 2 (zero), 3, 4, 5 (padd)]\n", + " tokens = h \n", + " x_emb = self.emb_clr(tokens + (self.num_clrs-2))\n", + " \n", + " else:\n", + " sign = torch.sign(h + 0.1) #trick: add 0.1 so that the sign of 0 is +1, else the 0 token would be all 0s. \n", + " tokens = torch.abs(h)\n", + " \n", + " x_emb = self.emb_clr(tokens) \n", + " x_emb = x_emb * sign.unsqueeze(-1) # [b, s, t, ch]\n", + " \n", + " return x_emb, tokens\n", + " \n", + "\n", + " def embed(self, h: torch.Tensor, w: torch.Tensor) -> torch.Tensor: \n", + " \"\"\"\n", + " sample from p(x0|h, w)\n", + " h discrete\n", + " w cont\n", + " \"\"\"\n", + "\n", + " x_emb, tokens = self.embed_discrete(h)\n", + "\n", + " v_p = self.embed_continuous(w, tokens) \n", + " x_emb += v_p \n", + "\n", + " if not self.channel_last: \n", + " # contiguous important for multi-node cluster \n", + " x_emb = torch.permute(x_emb, (0, 3, 1, 2)).contiguous() # to [b, ch, s, t]\n", + " \n", + " return x_emb\n", + " \n", + " #-----------------------------------------------\n", + "\n", + " def get_discrete_sim(self, x: torch.Tensor) -> torch.Tensor:\n", + " #collaps clr to gate ... use cos sim\n", + " \n", + " clrs = self.emb_clr.weight.detach()[:self._num_discrete_embeddings] # is [clr_num, clr_dim]\n", + " \n", + " model_device = clrs.device\n", + " x = x.to(model_device)\n", + " \n", + " # to shape [b*space*time, clr_dim]\n", + " x_flat = x.reshape(-1, x.shape[-1])\n", + " \n", + " #normalize for cos sim \n", + " norm_clr = F.normalize( clrs[:, :self.clr_dim], dim=1) #clrs / torch.linalg.vector_norm( clrs, dim=1, keepdim=True) #torch.linalg.vector_norm( clrs[:, :self.clr_dim], dim=1, keepdim=True) \n", + " norm_x_flat = F.normalize(x_flat[:, :self.clr_dim], dim=1) #x_flat / torch.linalg.vector_norm(x_flat, dim=1, keepdim=True) #torch.linalg.vector_norm(x_flat[:, :self.clr_dim], dim=1, keepdim=True) \n", + " \n", + " #matmul out is [clr_num, b*space*time] = [clr_num, clr_dim] x [b*space*time, clr_dim].T\n", + " sim = torch.matmul(norm_clr, norm_x_flat.T) \n", + "\n", + " return sim\n", + "\n", + " @torch.inference_mode()\n", + " def invert_discrete(self, x: torch.Tensor, return_sim: bool = False, finite_temperature: bool = False) -> torch.Tensor:\n", + " #collaps clr to gate ... use cos sim\n", + " \n", + " input_device = x.device\n", + "\n", + " if not self.channel_last: \n", + " x = x.permute(0, 2, 3, 1)\n", + " \n", + " #sim out is [clr_num, b*space*time]\n", + " sim = self.get_discrete_sim(x)\n", + "\n", + " if self.explicit_node_type_embeddings or self.unique_class_values:\n", + " #get highest similarity\n", + " if finite_temperature:\n", + " _cat = torch.distributions.categorical.Categorical(logits=sim.transpose(-1, -2))\n", + " scores_flat = _cat.sample()\n", + " else:\n", + " scores_flat = torch.argmax(sim, dim=0) #reduce the clr_num dim\n", + "\n", + " if self.explicit_node_type_embeddings:\n", + " scores_flat = scores_flat - (self.num_clrs-2)\n", + " \n", + " else:\n", + " #get highest abs(similarity) and sign of it\n", + " abs_sim = sim.abs()\n", + " \n", + " if finite_temperature:\n", + " _cat = torch.distributions.categorical.Categorical(logits=abs_sim.transpose(-1, -2))\n", + " max_idx = _cat.sample()\n", + " else:\n", + " max_idx = torch.argmax(abs_sim, dim=0) #reduce the clr_num dim\n", + " \n", + " sign = torch.sign(sim[max_idx, torch.arange(x_flat.shape[0])])\n", + " scores_flat = max_idx * sign\n", + "\n", + " # back to [b, space, time]\n", + " scores = scores_flat.reshape(x.shape[0], x.shape[1], x.shape[2]).to(torch.int64) \n", + " scores = scores.to(input_device)\n", + "\n", + " if return_sim:\n", + " return scores, sim\n", + " return scores\n", + "\n", + " @torch.inference_mode()\n", + " def invert(self, x: torch.Tensor, reduce_spatial: bool = True) -> torch.Tensor: \n", + " \"\"\"sample from p(h, w|x0)\"\"\"\n", + "\n", + " pred_tokens = self.invert_discrete(x)\n", + " pred_params = self.invert_continuous(x, pred_tokens, reduce_spatial=reduce_spatial)\n", + "\n", + " pred_tokens = self.unique_class_values_to_tokens(pred_tokens)\n", + " \n", + " return pred_tokens, pred_params\n", + "\n", + " #-----------------------------------------------\n", + "\n", + " def _prepare_params(self, tokens: torch.Tensor, w: torch.Tensor) -> torch.Tensor:\n", + " tokens = tokens.abs()\n", + "\n", + " # w ... [b, nP, s or 1, t]\n", + " \n", + " if self.parametrized_tokens:\n", + " # Force all non parameterized embeddings to all zero or random lambdas !\n", + " pmask = self.get_parametrized_mask(tokens).unsqueeze(1) # [b, 1, s, t] \n", + " rnd_w = torch.zeros((w.shape[0], w.shape[1], pmask.shape[2], w.shape[3]), device=w.device) \n", + " w_m = torch.where(pmask, w, rnd_w)\n", + " \n", + " else:\n", + " # this does not include padding tokens!\n", + " pmask = (tokens > 0).unsqueeze(1)\n", + " w_m = torch.where(pmask, w, 0.0) # ... [b, nP, s, t]\n", + " \n", + " return w_m\n", + "\n", + " def _reduce_params_spatial(self, tokens: torch.Tensor, params: torch.Tensor) -> torch.Tensor:\n", + " tokens = tokens.abs()\n", + "\n", + " if self.parametrized_tokens:\n", + " #check if not param gate\n", + " mask = self.get_parametrized_mask(tokens).unsqueeze(1).float() # ... [b, 1, s, t]\n", + " else:\n", + " #check if not empty token\n", + " mask = (tokens > 0).unsqueeze(1).float() # ... [b, 1, s, t]\n", + "\n", + " # to catch all zero tokens at t, compute how many we have per timestep\n", + " red_mask = mask.sum(-2) # ... [b, 1, t]\n", + " red_mask = torch.where(red_mask > 0.0, red_mask, 1.0)\n", + " \n", + " params = (params*mask).sum(-2) / red_mask # ... [b, nP, s, t] to [b, nP, t] average over s, ignore masked positions \n", + " return params\n", + "\n", + " def get_parametrized_mask(self, tokens: torch.Tensor) -> torch.Tensor:\n", + "\n", + " parametrized_tokens = torch.tensor(self.parametrized_tokens, device=tokens.device) \n", + " \n", + " if exists(self.unique_class_values):\n", + " parametrized_tokens = self.tokens_to_unique_class_values(parametrized_tokens)\n", + " \n", + " pmask = torch.isin(tokens.abs(), parametrized_tokens) \n", + " \n", + " return pmask" + ] + }, + { + "cell_type": "markdown", + "id": "53a57b59-09d1-48ae-b846-99f4129b0c70", + "metadata": {}, + "source": [ + "### RotationalMultimodialPresetEmbedder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a6af141a-7caa-49ad-a0c0-96657c570b1e", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class RotationalMultimodialPresetEmbedder(MultimodialPresetEmbedder):\n", + " \n", + " def __init__(self, \n", + " clr_dim: int, \n", + " num_clrs: int, \n", + " params_dim: int, \n", + " num_params_per_clr: int, \n", + " zero_sum_space: bool,\n", + " explicit_node_type_embeddings: bool = True,\n", + " channel_last: bool = True,\n", + " parametrized_tokens: Optional[list[int]] = None,\n", + " unique_class_values: Optional[list[int]] = None\n", + " ) -> None:\n", + "\n", + " self.channel_last = channel_last\n", + " self.parametrized_tokens = parametrized_tokens\n", + " \n", + " if (2*num_params_per_clr*num_clrs) > params_dim and num_params_per_clr > 0:\n", + " print(f\"[WARNING]: We need at least a `params_dim` (is {params_dim}) of `2*num_params_per_clr*num_clrs` (is {2*num_params_per_clr*num_clrs}),\"\n", + " f\" automatically setting `params_dim` to {2*num_params_per_clr*num_clrs} to inforce this!\")\n", + " \n", + " params_dim = 2*num_params_per_clr*num_clrs\n", + "\n", + " if zero_sum_space and (2*num_params_per_clr*num_clrs+1) > params_dim and num_params_per_clr > 0:\n", + " print(f\"[WARNING]: `params_dim` is set to the minimum `2*num_params_per_clr*num_clrs`={2*num_params_per_clr*num_clrs},\"\n", + " f\" but for `{zero_sum_space=}` we need one more dimension, automatically setting it to\"\n", + " f\" `2*num_params_per_clr*num_clrs+1` {2*num_params_per_clr*num_clrs+1}.\")\n", + " \n", + " params_dim = 2*num_params_per_clr*num_clrs + 1\n", + " \n", + " super().__init__(clr_dim=clr_dim,\n", + " num_clrs=num_clrs,\n", + " params_dim=params_dim,\n", + " num_params_per_clr=2*num_params_per_clr, # pass factor 2 to create more embeddings for cos-sin encoding\n", + " zero_sum_space=zero_sum_space,\n", + " explicit_node_type_embeddings=explicit_node_type_embeddings,\n", + " channel_last=channel_last,\n", + " parametrized_tokens=parametrized_tokens,\n", + " unique_class_values=unique_class_values) \n", + "\n", + " self.num_params_per_clr = num_params_per_clr # remove the factor 2\n", + " self._num_param_embeddings = self.num_params_per_clr * self.num_clrs \n", + " self.nP = num_params_per_clr\n", + "\n", + " self.params_config = MultimodialPresetEmbedderConfig(clr_dim=self.clr_dim, \n", + " num_clrs=self.num_clrs, \n", + " params_dim=self.params_dim, \n", + " num_params_per_clr=self.num_params_per_clr,\n", + " zero_sum_space=self.zero_sum_space,\n", + " explicit_node_type_embeddings=self.explicit_node_type_embeddings,\n", + " channel_last=self.channel_last,\n", + " parametrized_tokens=self.parametrized_tokens,\n", + " unique_class_values=self.unique_class_values)\n", + "\n", + " \n", + " def embed_continuous(self, w: torch.Tensor, tokens: torch.Tensor) -> torch.Tensor:\n", + " # take care that v_empty stays that! not apply params to all bits only to a [s,t] pos\n", + " # params ... [b, nP, t]\n", + " # w ... qc=[b, nP, t] mbqc=[b, nP, s, t]\n", + "\n", + " tokens = tokens.abs()\n", + " \n", + " if w.dim() == 3:\n", + " w = w.unsqueeze(2) # to [b, nP, 1, t]\n", + "\n", + "\n", + " w_m = self._prepare_params(tokens, w)\n", + " \n", + " w_m = w_m.unsqueeze(-1) # ... [b, nP, s, t, 1]\n", + " w_m = w_m * torch.pi # [-1, 1] to [-pi, pi]\n", + "\n", + " # first pick starting points of indices\n", + " # then add a numerator for all the number of paramters\n", + " # then add a numerator for cos-sin vectors\n", + " \n", + " #Note: .view(-1, 1, 1) introduces some numeric variances in 1e-07 range, but should be faster!\n", + " indices = self._num_discrete_embeddings + tokens * self.nP * 2 # ... [b, s, t] \n", + " indices = indices.unsqueeze(1) + torch.arange(self.nP, device=indices.device).view(-1, 1, 1) * 2 # ... [b, nP, s, t]\n", + " indices = indices.unsqueeze(1) + torch.arange(2, device=indices.device).view(-1, 1, 1, 1) # ... [b, 2, nP, s, t] \n", + " p_clrs = self.emb_clr(indices).contiguous() # ... [b, 2, nP, s, t, ch]\n", + " \n", + " v_p = torch.cos(w_m)*p_clrs[:, 0] + torch.sin(w_m)*p_clrs[:, 1] # ... [b, nP, s, t, ch]\n", + " v_p = torch.sum(v_p, dim=1) # ... [b, s, t, ch]\n", + "\n", + " return v_p\n", + "\n", + " @torch.inference_mode()\n", + " def invert_continuous(self, x: torch.Tensor, tokens: torch.Tensor, reduce_spatial: bool = True) -> torch.Tensor:\n", + " \"\"\"reduce_spatial=True for circuits, False for mbqc\"\"\"\n", + " \n", + " model_device = self.emb_clr.weight.device\n", + " input_device = x.device\n", + "\n", + " if not self.channel_last:\n", + " x = x.permute(0, 2, 3, 1) # to [b, s, t, ch]\n", + " x = x.unsqueeze(1).unsqueeze(1) # to [b, 1, 1, s, t, ch]\n", + " \n", + " x = x.to(model_device) \n", + " tokens = tokens.to(model_device).abs()\n", + "\n", + " #-----\n", + " # params should [b, nP, max_gates]\n", + " # x ... [b, ch, s, t] \n", + " # tokens ... [b, , s, t] \n", + "\n", + " #Note: .view(-1, 1, 1) introduces some numeric variances in 1e-07 range, but should be faster!\n", + " indices = self._num_discrete_embeddings + tokens * self.nP * 2 # ... [b, s, t] \n", + " indices = indices.unsqueeze(1) + torch.arange(self.nP, device=indices.device).view(-1, 1, 1) * 2 # ... [b, nP, s, t]\n", + " indices = indices.unsqueeze(1) + torch.arange(2, device=indices.device).view(-1, 1, 1, 1) # ... [b, 2, nP, s, t] \n", + " p_clrs = self.emb_clr(indices).contiguous() # ... [b, 2, nP, s, t, ch]\n", + "\n", + " overlaps = (x * p_clrs).sum(-1) # ... [b, 2, nP, s, t]\n", + " params = torch.arctan2(overlaps[:, 1], overlaps[:, 0]) # ... [b, nP, s, t]\n", + " params = params / torch.pi # [-pi, pi] to [-1, 1]\n", + " \n", + " # now reduce spatial s, average over non empty token s\n", + " if reduce_spatial:\n", + " params = self._reduce_params_spatial(tokens, params)\n", + " \n", + " return params.to(input_device)" + ] + }, + { + "cell_type": "markdown", + "id": "14d1bda6-a435-4e67-9ae3-72f5daad9c83", + "metadata": {}, + "source": [ + "### RotationalMultimodialPresetEmbedderTiny" + ] + }, + { + "cell_type": "markdown", + "id": "472a5ae9-a178-420f-9967-3afc2db95e95", + "metadata": {}, + "source": [ + "Has the same logic as `RotationalMultimodialPresetEmbedder`, but uses the same parameter vector-subspace for all tokens. This makes the parameter embeddings the same for all tokens while reducing the dimesionality of the embeddings, i.e. it is independent of the number of tokens." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "86651b06-97c3-491b-b3dc-88a635753351", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class RotationalMultimodialPresetEmbedderTiny(MultimodialPresetEmbedder):\n", + " \"\"\"Mostly the same as `RotationalMultimodialPresetEmbedder`, but the param embedding is not depending on the tokens.\"\"\"\n", + " \n", + " def __init__(self, \n", + " clr_dim: int, \n", + " num_clrs: int,\n", + " params_dim: int, \n", + " num_params_per_clr: int, \n", + " zero_sum_space: bool,\n", + " explicit_node_type_embeddings: bool = True,\n", + " channel_last: bool = True,\n", + " parametrized_tokens: Optional[list[int]] = None,\n", + " unique_class_values: Optional[list[int]] = None\n", + " ) -> None:\n", + " super(MultimodialPresetEmbedder, self).__init__(zero_sum_space=zero_sum_space) # call grandparent class\n", + "\n", + " if exists(unique_class_values):\n", + " assert isinstance(unique_class_values, list)\n", + " self.unique_class_values_tensor = torch.tensor(unique_class_values)\n", + " \n", + " explicit_node_type_embeddings = False\n", + "\n", + " print(f\"[INFO]: provided `unique_class_values` ({unique_class_values}), enforcing `num_clrs=len(unique_class_values)={len(unique_class_values)}`.\")\n", + " num_clrs = len(unique_class_values)\n", + " \n", + " self.zero_sum_space = zero_sum_space\n", + " self.explicit_node_type_embeddings = explicit_node_type_embeddings \n", + " self.channel_last = channel_last\n", + " self.parametrized_tokens = parametrized_tokens\n", + " self.unique_class_values = unique_class_values\n", + " # assert exists(parametrized_tokens)\n", + "\n", + " if (2*num_params_per_clr) > params_dim and num_params_per_clr > 0:\n", + " print(f\"[WARNING]: We need at least a `params_dim` (is {params_dim}) of `2*num_params_per_clr` (is {2*num_params_per_clr}),\"\n", + " f\" automatically setting `params_dim` to {2*num_params_per_clr} to inforce this!\")\n", + " \n", + " params_dim = 2*num_params_per_clr\n", + "\n", + " if self.zero_sum_space and (2*num_params_per_clr+1) > params_dim and num_params_per_clr > 0:\n", + " print(f\"[WARNING]: `params_dim` is set to the minimum `2*num_params_per_clr`={2*num_params_per_clr},\"\n", + " f\" but for `{zero_sum_space=}` we need one more dimension, automatically setting it to\"\n", + " f\" `2*num_params_per_clr+1` {2*num_params_per_clr+1}.\")\n", + " \n", + " params_dim = 2*num_params_per_clr + 1\n", + " \n", + " if self.zero_sum_space:\n", + " if self.explicit_node_type_embeddings and ((num_clrs*2 - 2) + 1) > clr_dim:\n", + " print(f\"[WARNING]: `clr_dim` is set to {clr_dim} and `{explicit_node_type_embeddings=}`,\"\n", + " f\" but for `{zero_sum_space=}` we need one more dimension than the number of tokens `(num_clrs*2 - 2)` (is {(num_clrs*2 - 2)}),\"\n", + " f\" automatically setting it to `clr_dim=(num_clrs*2 - 2) + 1` {(num_clrs*2 - 2) + 1}.\")\n", + "\n", + " # has empty and padd tokens, these only have the plus branch (so -2)!\n", + " clr_dim = (num_clrs*2 - 2) + 1\n", + "\n", + " elif (num_clrs + 1) > clr_dim:\n", + " print(f\"[WARNING]: `clr_dim` is set to {clr_dim} and `{explicit_node_type_embeddings=}`,\"\n", + " f\" but for `{zero_sum_space=}` we need one more dimension than the number of tokens `num_clrs` (is {num_clrs}),\"\n", + " f\" automatically setting it to `clr_dim=num_clrs+1` {num_clrs+1}.\")\n", + " \n", + " clr_dim = num_clrs + 1\n", + " \n", + " self.clr_dim = clr_dim\n", + " self.num_clrs = num_clrs\n", + " self.params_dim = params_dim\n", + " self.num_params_per_clr = num_params_per_clr\n", + " self.nP = num_params_per_clr\n", + " \n", + " self._num_discrete_embeddings = self.num_clrs\n", + " self._num_param_embeddings = self.num_params_per_clr * 2\n", + " self.embedding_dim = self.clr_dim + self.params_dim\n", + " \n", + " if self.explicit_node_type_embeddings:\n", + " # use distinct embeddings for +-k and not just +-v\n", + " # has empty and padd tokens, these only have the plus branch (so -2)!\n", + " self._num_discrete_embeddings = self.num_clrs*2 - 2\n", + " \n", + " self.num_embeddings = self._num_discrete_embeddings + self._num_param_embeddings \n", + " self.emb_clr = nn.Embedding(num_embeddings=self.num_embeddings, embedding_dim=self.embedding_dim) \n", + " print(f\"[INFO]: Created `nn.Embedding` with a total of {self.num_embeddings} vectors in a {self.embedding_dim} dimensional space.\")\n", + " \n", + " self.params_config = MultimodialPresetEmbedderConfig(clr_dim=self.clr_dim, \n", + " num_clrs=self.num_clrs, \n", + " params_dim=self.params_dim, \n", + " num_params_per_clr=self.num_params_per_clr,\n", + " zero_sum_space=self.zero_sum_space,\n", + " explicit_node_type_embeddings=self.explicit_node_type_embeddings,\n", + " channel_last=self.channel_last,\n", + " parametrized_tokens=self.parametrized_tokens,\n", + " unique_class_values=self.unique_class_values)\n", + " \n", + " self._init_weights(zero_sum_space=self.zero_sum_space)\n", + "\n", + " def embed_continuous(self, w: torch.Tensor, tokens: torch.Tensor) -> torch.Tensor:\n", + " # take care that v_empty stays that! not apply params to all bits only to a [s,t] pos\n", + " # params ... [b, nP, t]\n", + " # w ... qc=[b, nP, t] mbqc=[b, nP, s, t]\n", + "\n", + " tokens = tokens.abs()\n", + " \n", + " if w.dim() == 3:\n", + " w = w.unsqueeze(2) # to [b, nP, 1, t]\n", + "\n", + " w_m = self._prepare_params(tokens, w)\n", + " \n", + " w_m = w_m.unsqueeze(-1) # ... [b, nP, s, t, 1]\n", + " w_m = w_m * torch.pi # [-1, 1] to [-pi, pi]\n", + "\n", + " # first pick starting points of indices\n", + " # then add a numerator for all the number of paramters\n", + " # then add a numerator for cos-sin vectors\n", + " \n", + " #Note: .view(-1, 1, 1) introduces some numeric variances in 1e-07 range, but should be faster!\n", + " indices = torch.full_like(tokens, self._num_discrete_embeddings) #+ 0 * tokens * self.nP * 2 # ... [b, s, t] \n", + " indices = indices.unsqueeze(1) + torch.arange(self.nP, device=indices.device).view(-1, 1, 1) * 2 # ... [b, nP, s, t]\n", + " indices = indices.unsqueeze(1) + torch.arange(2, device=indices.device).view(-1, 1, 1, 1) # ... [b, 2, nP, s, t] \n", + " p_clrs = self.emb_clr(indices).contiguous() # ... [b, 2, nP, s, t, ch]\n", + "\n", + " # This cos-sin combination conserves mean and variance of the embeddings\n", + " v_p = torch.cos(w_m)*p_clrs[:, 0] + torch.sin(w_m)*p_clrs[:, 1] # ... [b, nP, s, t, ch]\n", + " v_p = torch.sum(v_p, dim=1) # ... [b, s, t, ch]\n", + "\n", + " return v_p\n", + "\n", + " @torch.inference_mode()\n", + " def invert_continuous(self, x: torch.Tensor, tokens: torch.Tensor, reduce_spatial: bool = True) -> torch.Tensor:\n", + " \"\"\"reduce_spatial=True for circuits, False for mbqc\"\"\"\n", + " \n", + " model_device = self.emb_clr.weight.device\n", + " input_device = x.device\n", + "\n", + " if not self.channel_last:\n", + " x = x.permute(0, 2, 3, 1) # to [b, s, t, ch]\n", + " x = x.unsqueeze(1).unsqueeze(1) # to [b, 1, 1, s, t, ch]\n", + " \n", + " x = x.to(model_device) \n", + " tokens = tokens.to(model_device).abs()\n", + "\n", + " #-----\n", + " # params should [b, nP, max_gates]\n", + " # x ... [b, ch, s, t] \n", + " # tokens ... [b, , s, t] \n", + "\n", + " #Note: .view(-1, 1, 1) introduces some numeric variances in 1e-07 range, but should be faster!\n", + " indices = torch.full_like(tokens, self._num_discrete_embeddings) #+ 0 * tokens * self.nP * 2 # ... [b, s, t] \n", + " indices = indices.unsqueeze(1) + torch.arange(self.nP, device=indices.device).view(-1, 1, 1) * 2 # ... [b, nP, s, t]\n", + " indices = indices.unsqueeze(1) + torch.arange(2, device=indices.device).view(-1, 1, 1, 1) # ... [b, 2, nP, s, t] \n", + " p_clrs = self.emb_clr(indices).contiguous() # ... [b, 2, nP, s, t, ch]\n", + "\n", + " # Note we dont need to normalize x as this norm cancels in the fraction of arctan2(y/x)\n", + " overlaps = (x * p_clrs).sum(-1) # ... [b, 2, nP, s, t]\n", + " params = torch.arctan2(overlaps[:, 1], overlaps[:, 0]) # ... [b, nP, s, t]\n", + " params = params / torch.pi # [-pi, pi] to [-1, 1]\n", + " \n", + " # now reduce spatial s, average over non empty token s\n", + " if reduce_spatial:\n", + " params = self._reduce_params_spatial(tokens, params)\n", + "\n", + " return params.to(input_device)" + ] + }, + { + "cell_type": "markdown", + "id": "979e66c6-890b-4406-bf16-96be0751ede2", + "metadata": {}, + "source": [ + "## Test" + ] + }, + { + "cell_type": "markdown", + "id": "02f1b232-402c-44f4-9493-36b128f69e8a", + "metadata": {}, + "source": [ + "### Encode decode check: fixed tensor" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2134bec1-ef0f-4a66-af00-77fcc7b47e97", + "metadata": {}, + "outputs": [], + "source": [ + "s, t = 3, 8\n", + "\n", + "rnd_tokens = torch.tensor([[[ 2, 0, 1, 0, 2, -3, 0, 8],\n", + " [-2, 4, 0, 5, 2, 3, 6, 8],\n", + " [ 2, 4, 0, 0, -2, 0, 0, 8]],\n", + " \n", + " [[ 8, 8, 1, 0, 2, -3, 0, 8],\n", + " [ 8, 8, 0, 7, 2, 3, 1, 8],\n", + " [ 8, 8, 8, 8, 8, 8, 8, 8]]])\n", + "\n", + "rnd_params = torch.rand((2, 1, t))*2-1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "28570210-5d67-444e-ab6e-af097de3b60f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: provided `unique_class_values` ([-3, -2, 0, 1, 2, 3, 4, 5, 6, 7, 8]), enforcing `num_clrs=len(unique_class_values)=11`.\n", + "[WARNING]: We need at least a `params_dim` (is 1) of `2*num_params_per_clr` (is 2), automatically setting `params_dim` to 2 to inforce this!\n", + "[WARNING]: `params_dim` is set to the minimum `2*num_params_per_clr`=2, but for `zero_sum_space=True` we need one more dimension, automatically setting it to `2*num_params_per_clr+1` 3.\n", + "[WARNING]: `clr_dim` is set to 1 and `explicit_node_type_embeddings=False`, but for `zero_sum_space=True` we need one more dimension than the number of tokens `num_clrs` (is 11), automatically setting it to `clr_dim=num_clrs+1` 12.\n", + "[INFO]: Created `nn.Embedding` with a total of 13 vectors in a 15 dimensional space.\n" + ] + } + ], + "source": [ + "unique_class_values = None\n", + "unique_class_values = rnd_tokens.unique(sorted=True)\n", + "\n", + "num_clrs = 9\n", + "num_params_per_clr = 1\n", + "parametrized_tokens = [5, 6, 7]\n", + "\n", + "clr_dim = 1\n", + "params_dim = 1\n", + "\n", + "embedder = RotationalMultimodialPresetEmbedderTiny(clr_dim=clr_dim, \n", + " num_clrs=num_clrs, \n", + " params_dim=params_dim, \n", + " num_params_per_clr=num_params_per_clr,\n", + " zero_sum_space=True,\n", + " explicit_node_type_embeddings=True, \n", + " channel_last=True,\n", + " parametrized_tokens=parametrized_tokens,\n", + " unique_class_values=unique_class_values.tolist())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9174e846-9e86-4f31-b735-20c8277648e3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([[[ 2, 0, 1, 0, 2, -3, 0, 8],\n", + " [-2, 4, 0, 5, 2, 3, 6, 8],\n", + " [ 2, 4, 0, 0, -2, 0, 0, 8]],\n", + " \n", + " [[ 8, 8, 1, 0, 2, -3, 0, 8],\n", + " [ 8, 8, 0, 7, 2, 3, 1, 8],\n", + " [ 8, 8, 8, 8, 8, 8, 8, 8]]]),\n", + " tensor([[[-0.0690, 0.7864, 0.9059, 0.3405, 0.9263, -0.5743, 0.6541, 0.8584]],\n", + " \n", + " [[-0.3695, -0.8219, 0.2678, -0.3850, -0.5806, -0.2786, 0.0526, 0.5283]]]))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rnd_tokens, rnd_params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46130163-667c-4f56-acc2-5dec2ef47b2e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[[ 4, 2, 3, 2, 4, 0, 2, 10],\n", + " [ 1, 6, 2, 7, 4, 5, 8, 10],\n", + " [ 4, 6, 2, 2, 1, 2, 2, 10]],\n", + "\n", + " [[10, 10, 3, 2, 4, 0, 2, 10],\n", + " [10, 10, 2, 9, 4, 5, 3, 10],\n", + " [10, 10, 10, 10, 10, 10, 10, 10]]])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rnd_tokens_cls = embedder.tokens_to_unique_class_values(rnd_tokens)\n", + "rnd_tokens_cls" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b68914c6-1203-42f1-bc37-10427f216949", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([2, 3, 8, 15])\n" + ] + }, + { + "data": { + "text/plain": [ + "(tensor([[[ 2, 0, 1, 0, 2, -3, 0, 8],\n", + " [-2, 4, 0, 5, 2, 3, 6, 8],\n", + " [ 2, 4, 0, 0, -2, 0, 0, 8]],\n", + " \n", + " [[ 8, 8, 1, 0, 2, -3, 0, 8],\n", + " [ 8, 8, 0, 7, 2, 3, 1, 8],\n", + " [ 8, 8, 8, 8, 8, 8, 8, 8]]]),\n", + " tensor([[[ 0.0000, 0.0000, 0.0000, 0.3405, 0.0000, 0.0000, 0.6541, 0.0000]],\n", + " \n", + " [[ 0.0000, 0.0000, 0.0000, -0.3850, 0.0000, 0.0000, 0.0000, 0.0000]]]))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "enc_tensor = embedder.embed(rnd_tokens_cls, rnd_params)\n", + "print(enc_tensor.shape)\n", + "\n", + "recon_tensor, recon_params = embedder.invert(enc_tensor, reduce_spatial=1)\n", + "recon_tensor, recon_params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2d02d749-36e1-450e-9fb1-6b5a825a8d4e", + "metadata": {}, + "outputs": [], + "source": [ + "assert torch.allclose(recon_tensor, rnd_tokens)\n", + "assert not torch.allclose(recon_params, rnd_params, atol=1e-06) # note decoding puts 0s on all non param times, but we had rnd ones\n", + "\n", + "pmask = embedder.get_parametrized_mask(embedder.tokens_to_unique_class_values(recon_tensor))\n", + "assert torch.allclose(torch.where(pmask.any(1, keepdim=True), recon_params, 0.0), torch.where(pmask.any(1, keepdim=True), rnd_params, 0.0), atol=1e-06)" + ] + }, + { + "cell_type": "markdown", + "id": "3013e755-dd52-4d79-bae9-2b3675ef979c", + "metadata": {}, + "source": [ + "### Encode decode check: random circuits" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6e27664d-bf41-4bc9-ae6f-cb97bed17469", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.platform.simulation import Simulator, CircuitBackendType\n", + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer, Vocabulary\n", + "from genQC.platform.circuits_generation import get_rnd_encoded_circuits, CircuitConditionType\n", + "from genQC.dataset.balancing import get_tensor_gate_length" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2f5c40c5-61a7-4f32-bd0e-059525685c5e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'h': 1, 'cx': 2, 'ccx': 3, 'swap': 4, 'rx': 5, 'ry': 6, 'cp': 7}" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "g = ['h', 'cx', 'ccx', 'swap', 'rx', 'ry', 'cp']\n", + "\n", + "simulator = Simulator(CircuitBackendType.QISKIT)\n", + "tokenizer = CircuitTokenizer({gi:i+1 for i,gi in enumerate(g)})\n", + "tokenizer.vocabulary" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "679ad014-1bab-4bda-9ec1-aae819e8cb50", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[5, 6, 7]" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "parametrized_tokens = CircuitTokenizer.get_parametrized_tokens(tokenizer.vocabulary)\n", + "parametrized_tokens" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31ba6cae-9aee-4287-9b0d-cc5de883f6ef", + "metadata": {}, + "outputs": [], + "source": [ + "def get_rnd_qc():\n", + " tensors, ys, Us, params = get_rnd_encoded_circuits(backend=simulator.backend, \n", + " tokenizer=tokenizer,\n", + " condition=CircuitConditionType.UNITARY,\n", + " samples=b, \n", + " num_of_qubits=s, \n", + " min_gates=2, \n", + " max_gates=t,\n", + " min_sub_gate_pool_cnt=len(tokenizer.vocabulary),\n", + " optimized=False)\n", + " \n", + " l = get_tensor_gate_length(tensors, padding_token=0)\n", + " for i, li in enumerate(l): \n", + " tensors[i, :, li:] = 8\n", + " \n", + " return tensors, ys, Us, params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "124bb90d-35da-481d-bf89-2328c893c486", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "5315df66a2274e7680906b4a7f09978e", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/512 [00:002}: mean {a.mean():0.3} norm {torch.linalg.vector_norm(a):0.3} std {a.std(correction=0):0.3}\")\n", + "\n", + "for i in range(embedder._num_param_embeddings):\n", + " a = embedder.emb_clr(torch.tensor([embedder._num_discrete_embeddings+i]))[:, embedder.clr_dim:]\n", + " print(f\"params {str(i):>2}: mean {a.mean():0.3} norm {torch.linalg.vector_norm(a):0.3} std {a.std(correction=0):0.3}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "09389ae7-6608-45b7-977c-b7b387391ab1", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAesAAAGiCAYAAADHpO4FAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAKjtJREFUeJzt3X9wVfWd//HXTSA3CEn4VZJcDCZ16SI/g0SyEbuFMWsGKZbtWJFSycaW7tpkBTLbQlpDrAgRXNmskpJCi7gzIthOoS5SWDaCfB1BIDEdmVV+LAgZ2ASYllwJJWHvPd8/LPfr/RIgyTk393xyn4+Zzx/33PO5n88ZW955vz+fc47HsixLAADAteKiPQEAAHBrBGsAAFyOYA0AgMsRrAEAcDmCNQAALkewBgDA5QjWAAC4HMEaAACXI1gDAOByBGsAAFyOYA0AQBfs27dPM2fOlM/nk8fj0bZt227bZ+/evbr33nvl9Xr1F3/xF9q4cWOXxiRYAwDQBa2trZowYYKqq6s7df6pU6c0Y8YMTZs2TQ0NDVq4cKG+973vadeuXZ0e08OLPAAA6B6Px6OtW7dq1qxZNz1n8eLFevvtt3XkyJHQsccff1yXLl3Szp07OzVOH7sTdVowGNS5c+eUlJQkj8cT7ekAALrIsix99tln8vl8iouLXAH36tWram9vt/07lmXdEG+8Xq+8Xq/t35ak/fv3Kz8/P+xYQUGBFi5c2OnfcF2wPnfunDIyMqI9DQCATY2Njbrzzjsj8ttXr15V1l0D1HQ+YPu3BgwYoMuXL4cdq6io0LPPPmv7tyWpqalJqampYcdSU1Pl9/v1pz/9Sf369bvtb7guWCclJUmSTtdnKnlAzyyp/+1XxvXIOAAQC/5X1/SedoT+PY+E9vZ2NZ0P6FTdXUpO6n6s8H8WVNak02psbFRycnLouFNZtVNcF6yvlyKSB8TZ+g/QFX08fXtkHACICX/eCdUTS5nJSc7EiuTk5LBg7aS0tDQ1NzeHHWtublZycnKnsmrJhcEaAIDOClhBBWxskw5YQecmcxN5eXnasWNH2LHdu3crLy+v07/BrVsAAGMFZdluXXX58mU1NDSooaFB0ue3ZjU0NOjMmTOSpLKyMs2bNy90/j/8wz/o5MmT+tGPfqRPPvlEP/vZz/Tmm29q0aJFnR6TzBoAYKyggrKTG3en9+HDhzVt2rTQ59LSUklSYWGhNm7cqP/5n/8JBW5JysrK0ttvv61FixbpX//1X3XnnXfqF7/4hQoKCjo9JsEaAIAumDp1qm71iJKOnk42depUffjhh90ek2ANADBWwLIUsPFsLzt9exLBGgBgrO6uO3+xvwnYYAYAgMtFLFhXV1crMzNTiYmJys3N1cGDByM1FAAgRgVlKWCjxXRmvWXLFpWWlqqiokL19fWaMGGCCgoKdP78+UgMBwCIUdG4dSsaIhKsV69erfnz56uoqEijR49WTU2N7rjjDm3YsCESwwEA0Ks5vsGsvb1ddXV1KisrCx2Li4tTfn6+9u/ff8P5bW1tamtrC332+/1OTwkA0EvFym5wxzPrixcvKhAIdPiGkaamphvOr6ysVEpKSqjxxi0AQGcFHWgmiPpu8LKyMrW0tIRaY2NjtKcEAICrOF4GHzp0qOLj4zt8w0haWtoN5zv5gm8AQGy5vqvbTn8TOJ5ZJyQkaNKkSaqtrQ0dCwaDqq2t7dIbRgAAuJ2AZb+ZICJPMCstLVVhYaFycnI0efJkVVVVqbW1VUVFRZEYDgAQo+yuO5uyZh2RYD179mxduHBBS5cuVVNTk7Kzs7Vz584bNp0BAIDbi9izwUtKSlRSUhKpnwcAQEF5FJDHVn8T8CIPAICxgtbnzU5/E0T91i0AAHBrZNYAAGMFbJbB7fTtSQRrAICxYiVYUwYHAMDlyKwBAMYKWh4FLRu7wW307UmuDdZ/+5Vx6uPp2yNj7TrX0CPjfFGBL7vHxwSA3oYyOAAAcAXXZtYAANxOQHEK2Mg7Aw7OJZII1gAAY1k216wt1qwBAIgs1qwBAIArkFkDAIwVsOIUsGysWRvybHCCNQDAWEF5FLRRJA7KjGhNGRwAAJcjswYAGCtWNpgRrAEAxrK/Zk0ZHAAAOIDMGgBgrM83mNl4kQdlcAAAIito83Gj7AYHAACOILMGABgrVjaYEawBAMYKKi4mHopCsAYAGCtgeRSw8eYsO317EmvWAAC4HJk1AMBYAZu7wQOUwQEAiKygFaegjQ1mQUM2mFEGBwDA5cisAQDGogwOAIDLBWVvR3fQualEFGVwAABcjswaAGAs+w9FMSNnJVgDAIxl/3GjZgRrM2YJAEAMI7MGABiL91nHkAJfdo+PuetcQ4+OF41rBIBIi5UyOMEaAGAs+/dZmxGszZglAAAxjMwaAGCsoOVR0M5DUQx5RSbBGgBgrKDNMrgp91mbMUsAAGIYmTUAwFj2X5FpRs5KsAYAGCsgjwI27pW207cnmfEnBQAAMYzMGgBgLMrgAAC4XED2StkB56YSUY7/SVFZWan77rtPSUlJGjZsmGbNmqWjR486PQwAADHD8WD97rvvqri4WAcOHNDu3bt17do1PfTQQ2ptbXV6KABAjLteBrfTTOB4GXznzp1hnzdu3Khhw4aprq5Of/3Xf+30cACAGBYrL/KI+CxbWlokSYMHD+7w+7a2Nvn9/rAGAEBnWH9+RWZ3m9XN9e7q6mplZmYqMTFRubm5Onjw4C3Pr6qq0l/+5V+qX79+ysjI0KJFi3T16tVOjxfRYB0MBrVw4UJNmTJFY8eO7fCcyspKpaSkhFpGRkYkpwQAgC1btmxRaWmpKioqVF9frwkTJqigoEDnz5/v8PxNmzZpyZIlqqio0Mcff6xf/vKX2rJli3784x93esyIBuvi4mIdOXJEmzdvvuk5ZWVlamlpCbXGxsZITgkA0ItcL4PbaV21evVqzZ8/X0VFRRo9erRqamp0xx13aMOGDR2e//7772vKlCn69re/rczMTD300EOaM2fObbPxL4pYsC4pKdH27du1Z88e3XnnnTc9z+v1Kjk5OawBANAZ19+6ZadJumE5tq2trcPx2tvbVVdXp/z8/NCxuLg45efna//+/R32uf/++1VXVxcKzidPntSOHTv08MMPd/o6HQ/WlmWppKREW7du1TvvvKOsrCynhwAAwFEZGRlhS7KVlZUdnnfx4kUFAgGlpqaGHU9NTVVTU1OHfb797W/rueee0wMPPKC+ffvq7rvv1tSpU7tUBnd8N3hxcbE2bdqk3/72t0pKSgpNPiUlRf369XN6OABADAvYfEXm9b6NjY1hlV2v12t7btft3btXK1as0M9+9jPl5ubqxIkTWrBggZYtW6by8vJO/YbjwXrt2rWSpKlTp4Ydf/XVV/V3f/d3Tg8HAIhhXyxld7e/pE4vww4dOlTx8fFqbm4OO97c3Ky0tLQO+5SXl+uJJ57Q9773PUnSuHHj1Nraqu9///v6yU9+ori42/+xEZEyeEeNQA0AMF1CQoImTZqk2tra0LFgMKja2lrl5eV12OfKlSs3BOT4+HhJn8fMzuDZ4AAAYwUVp6CNvLM7fUtLS1VYWKicnBxNnjxZVVVVam1tVVFRkSRp3rx5Gj58eGjde+bMmVq9erUmTpwYKoOXl5dr5syZoaB9OwRrAICxApZHARtl8O70nT17ti5cuKClS5eqqalJ2dnZ2rlzZ2jT2ZkzZ8Iy6WeeeUYej0fPPPOMzp49qy996UuaOXOmli9f3ukxPVZnc/Ae4vf7lZKSoqn6hvp4+kZ7OhGz61xDj45X4Mvu0fEAxK7/ta5pr36rlpaWiN2Oez1WPPV/vinvgO7HirbL17T2q7+J6FydQGYNADCWUxvM3I5gDQAwlmXzzVmWIS/yIFgDAIwVkEeBbr6M43p/ExCso6Sn15B7eo1cYp0cAJxCsAYAGCto2Vt3Drpqi/XNEawBAMYK2lyzttO3J5kxSwAAYhiZNQDAWEF5FLSxScxO355EsAYAGCsaTzCLBsrgAAC4HJk1AMBYsbLBjGANADBWUDYfN2rImrUZf1IAABDDyKwBAMaybO4GtwzJrAnWAABj8dYtAABcLlY2mJkxSwAAYhiZNQDAWJTBAQBwuVh53ChlcAAAXI7MGgBgLMrgAAC4XKwEa8rgAAC4HJk1AMBYsZJZE6wBAMaKlWBNGRwAAJcjswYAGMuSvXulLeemElEEawCAsWKlDE6wBgAYK1aCNWvWAAC4HJk1AMBYsZJZE6wBAMYiWKNXKfBl9/iYu8419Oh40bhGAOgJBGsAgLEsyyPLRnZsp29PIlgDAIzF+6wBAIArkFkDAIzFBjMAAFwuVtasKYMDAOByZNYAAGNRBgcAwOVipQxOsAYAGMuymVmbEqxZswYAwOUiHqxfeOEFeTweLVy4MNJDAQBijCXJsmy0aF9AJ0W0DH7o0CH9/Oc/1/jx4yM5DAAgRgXlkYcnmHXf5cuXNXfuXK1fv16DBg2K1DAAAPR6EQvWxcXFmjFjhvLz8295Xltbm/x+f1gDAKAzru8Gt9NMEJEy+ObNm1VfX69Dhw7d9tzKykr99Kc/jcQ0AAC9XNDyyBMD91k7nlk3NjZqwYIFev3115WYmHjb88vKytTS0hJqjY2NTk8JAACjOZ5Z19XV6fz587r33ntDxwKBgPbt26c1a9aora1N8fHxoe+8Xq+8Xq/T0wAAxIDru7rt9DeB48H6wQcf1EcffRR2rKioSKNGjdLixYvDAjUAAHbwBLNuSkpK0tixY8OO9e/fX0OGDLnhOAAAuD0eNwoAMBaZtYP27t3bE8MAAGJMrOwGJ7MGABgrVjaY8SIPAABcjswaAGCszzNrO2vWDk4mggjWAABjxcoGM8rgAAC4HJk1AMBYluy9k9qQKjjBGpFT4Mvu0fF2nWvo0fGknr9GAOEogwMAAFcgswYAmCtG6uBk1gAAc/25DN7dpm6Wwaurq5WZmanExETl5ubq4MGDtzz/0qVLKi4uVnp6urxer77yla9ox44dnR6PzBoAYKxoPMFsy5YtKi0tVU1NjXJzc1VVVaWCggIdPXpUw4YNu+H89vZ2/c3f/I2GDRumX//61xo+fLhOnz6tgQMHdnpMgjUAAF2wevVqzZ8/X0VFRZKkmpoavf3229qwYYOWLFlyw/kbNmzQH/7wB73//vvq27evJCkzM7NLY1IGBwAYy04J/Is7yf1+f1hra2vrcLz29nbV1dUpPz8/dCwuLk75+fnav39/h33eeust5eXlqbi4WKmpqRo7dqxWrFihQCDQ6eskWAMAzHV93dlOk5SRkaGUlJRQq6ys7HC4ixcvKhAIKDU1Nex4amqqmpqaOuxz8uRJ/frXv1YgENCOHTtUXl6ul156Sc8//3ynL5MyOAAg5jU2Nio5OTn02ev1OvbbwWBQw4YN07p16xQfH69Jkybp7NmzevHFF1VRUdGp3yBYAwCM5dQGs+Tk5LBgfTNDhw5VfHy8mpubw443NzcrLS2twz7p6enq27ev4uPjQ8fuueceNTU1qb29XQkJCbcdlzI4AMBclgOtCxISEjRp0iTV1taGjgWDQdXW1iovL6/DPlOmTNGJEycUDAZDx44dO6b09PROBWqJYA0AQJeUlpZq/fr1eu211/Txxx/rqaeeUmtra2h3+Lx581RWVhY6/6mnntIf/vAHLViwQMeOHdPbb7+tFStWqLi4uNNjUgYHABgrGs8Gnz17ti5cuKClS5eqqalJ2dnZ2rlzZ2jT2ZkzZxQX9/9y4YyMDO3atUuLFi3S+PHjNXz4cC1YsECLFy/u9JgEawCA2aLwyNCSkhKVlJR0+N3evXtvOJaXl6cDBw50ezzK4AAAuByZNQDAWLHyikyCNQDAXDHy1i2CNQDAYJ4/Nzv93Y81awAAXI7MGgBgLsrgAAC4XIwEa8rgAAC4HJk1AMBcX3jNZbf7G4BgDQAwllNv3XI7yuAAALgcmTUAwFwxssGMYA0AMFeMrFlTBgcAwOXIrAEAxvJYnzc7/U1AsEavUeDL7vExd51r6NHxonGNgKuxZg0AgMuxZg0AANyAzBoAYC7K4AAAuFyMBGvK4AAAuByZNQDAXDGSWROsAQDmYjc4AABwAzJrAICxeIIZAABuFyNr1hEpg589e1bf+c53NGTIEPXr10/jxo3T4cOHIzEUAAC9nuOZ9R//+EdNmTJF06ZN0+9+9zt96Utf0vHjxzVo0CCnhwIAICY4HqxXrlypjIwMvfrqq6FjWVlZNz2/ra1NbW1toc9+v9/pKQEAeimPbK5ZOzaTyHK8DP7WW28pJydH3/rWtzRs2DBNnDhR69evv+n5lZWVSklJCbWMjAynpwQA6K2u37plpxnA8WB98uRJrV27ViNHjtSuXbv01FNP6emnn9Zrr73W4fllZWVqaWkJtcbGRqenBACA0RwvgweDQeXk5GjFihWSpIkTJ+rIkSOqqalRYWHhDed7vV55vV6npwEAiAXsBu+e9PR0jR49OuzYPffcozNnzjg9FAAg1lkONAM4HqynTJmio0ePhh07duyY7rrrLqeHAgAgJjgerBctWqQDBw5oxYoVOnHihDZt2qR169apuLjY6aEAADHu+hPM7DQTOB6s77vvPm3dulVvvPGGxo4dq2XLlqmqqkpz5851eigAQKyLkTJ4RB43+vWvf11f//rXI/HTAADEHJ4NDgAwV4zsBidYAwCMFStv3eJ91gAAuByZNQDAXHYfGWrI40YJ1gAAc7FmDQCAu8XKmjXBGrChwJfdo+PtOtfQo+NJPX+NAG5EsAYAmIsyOAAALmf3kaGGBGtu3QIAwOXIrAEA5qIMDgCAy8VIsKYMDgCAy5FZAwCMFSv3WZNZAwDgcgRrAABcjjI4AMBcMbLBjGANADBWrKxZE6wBAGYzJODawZo1AAAuR2YNADAXa9YAALhbrKxZUwYHAMDlyKwBAOaiDA4AgLtRBgcAAK5AsAYAmMtyoHVDdXW1MjMzlZiYqNzcXB08eLBT/TZv3iyPx6NZs2Z1aTyCNQDAXFEI1lu2bFFpaakqKipUX1+vCRMmqKCgQOfPn79lv08//VT/9E//pK9+9atdHpNgDQCIeX6/P6y1tbXd9NzVq1dr/vz5Kioq0ujRo1VTU6M77rhDGzZsuGmfQCCguXPn6qc//am+/OUvd3l+BGsAgLGubzCz0yQpIyNDKSkpoVZZWdnheO3t7aqrq1N+fn7oWFxcnPLz87V///6bzvO5557TsGHD9N3vfrdb18lucACAuRy6dauxsVHJycmhw16vt8PTL168qEAgoNTU1LDjqamp+uSTTzrs89577+mXv/ylGhoauj1NgjUAwFwOBevk5OSwYO2Uzz77TE888YTWr1+voUOHdvt3CNYAAHTS0KFDFR8fr+bm5rDjzc3NSktLu+H8//7v/9ann36qmTNnho4Fg0FJUp8+fXT06FHdfffdtx2XNWsAgLGcWrPurISEBE2aNEm1tbWhY8FgULW1tcrLy7vh/FGjRumjjz5SQ0NDqD3yyCOaNm2aGhoalJGR0alxyawBAOaKwuNGS0tLVVhYqJycHE2ePFlVVVVqbW1VUVGRJGnevHkaPny4KisrlZiYqLFjx4b1HzhwoCTdcPxWCNaAQQp82T0+5q5zDT06XjSuEeiK2bNn68KFC1q6dKmampqUnZ2tnTt3hjadnTlzRnFxzhauCdYAAGNF69ngJSUlKikp6fC7vXv33rLvxo0buzwewRoAYK4YeesWG8wAAHA5MmsAgLliJLMmWAMAjOX5c7PT3wSUwQEAcDkyawCAuSiDAwDgbtG6daunEawBAOaKkcyaNWsAAFzO8WAdCARUXl6urKws9evXT3fffbeWLVsmyzLkzxcAgFksG80QjpfBV65cqbVr1+q1117TmDFjdPjwYRUVFSklJUVPP/2008MBAGIYa9bd9P777+sb3/iGZsyYIUnKzMzUG2+8oYMHD3Z4fltbm9ra2kKf/X6/01MCAMBojpfB77//ftXW1urYsWOSpN///vd67733NH369A7Pr6ysVEpKSqh19t2eAADYKoEbVAp3PLNesmSJ/H6/Ro0apfj4eAUCAS1fvlxz587t8PyysjKVlpaGPvv9fgI2AKBTKIN305tvvqnXX39dmzZt0pgxY9TQ0KCFCxfK5/OpsLDwhvO9Xq+8Xq/T0wAAoNdwPFj/8Ic/1JIlS/T4449LksaNG6fTp0+rsrKyw2ANAEC3xch91o4H6ytXriguLnwpPD4+XsFg0OmhAAAxjjJ4N82cOVPLly/XiBEjNGbMGH344YdavXq1nnzySaeHAgAgJjgerF955RWVl5frBz/4gc6fPy+fz6e///u/19KlS50eCgAQ6yiDd09SUpKqqqpUVVXl9E8DABCOYA0AgLvFypo1L/IAAMDlyKwBAOaiDA4AgLt5LEseG291tNO3J1EGBwDA5cisAQDmogwOAFKBL7tHx9t1rqFHx5N6/hrhHHaDAwAAVyCzBgCYizI4AADuRhkcAAC4Apk1AMBclMEBAHC3WCmDE6wBAOaKkcyaNWsAAFyOzBoAYDRTStl2EKwBAOayrM+bnf4GoAwOAIDLkVkDAIzFbnAAANyO3eAAAMANyKwBAMbyBD9vdvqbgGANADAXZXAAAOAGZNYAAGOxGxwAALeLkYeiEKwBAMaKlcyaNWsAAFyOzBoAYK4Y2Q1OsAYAGIsyOAAAcAUyawCAudgNDgCAu1EGBwAArkBmDQAwF7vBAaDnFfiye3zMXecaenS8aFxjb0UZHAAAuAKZNQDAXEHr82anvwEI1gAAc7FmDQCAu3lkc83asZlEFmvWAAC4HJk1AMBcPMEMAAB349YtAADQoerqamVmZioxMVG5ubk6ePDgTc9dv369vvrVr2rQoEEaNGiQ8vPzb3l+RwjWAABzWQ60LtqyZYtKS0tVUVGh+vp6TZgwQQUFBTp//nyH5+/du1dz5szRnj17tH//fmVkZOihhx7S2bNnOz0mwRoAYCyPZdlukuT3+8NaW1vbTcdcvXq15s+fr6KiIo0ePVo1NTW64447tGHDhg7Pf/311/WDH/xA2dnZGjVqlH7xi18oGAyqtra209fZ5WC9b98+zZw5Uz6fTx6PR9u2bQv73rIsLV26VOnp6erXr5/y8/N1/Pjxrg4DAECPycjIUEpKSqhVVlZ2eF57e7vq6uqUn58fOhYXF6f8/Hzt37+/U2NduXJF165d0+DBgzs9vy4H69bWVk2YMEHV1dUdfr9q1Sq9/PLLqqmp0QcffKD+/furoKBAV69e7epQAADcWtCBJqmxsVEtLS2hVlZW1uFwFy9eVCAQUGpqatjx1NRUNTU1dWrKixcvls/nCwv4t9Pl3eDTp0/X9OnTO/zOsixVVVXpmWee0Te+8Q1J0r/9278pNTVV27Zt0+OPP35Dn7a2trByg9/v7+qUAAAx6oul7O72l6Tk5GQlJyc7Na2beuGFF7R582bt3btXiYmJne7n6Jr1qVOn1NTUFPbXQkpKinJzc29aHqisrAwrPWRkZDg5JQAAHDN06FDFx8erubk57Hhzc7PS0tJu2fef//mf9cILL+g//uM/NH78+C6N62iwvl4C6Ep5oKysLKz00NjY6OSUAAC9WQ/vBk9ISNCkSZPCNodd3yyWl5d3036rVq3SsmXLtHPnTuXk5HRtULngoSher1derzfa0wAAmCgKTzArLS1VYWGhcnJyNHnyZFVVVam1tVVFRUWSpHnz5mn48OGhTWorV67U0qVLtWnTJmVmZoaS1wEDBmjAgAGdGtPRYH29BNDc3Kz09PTQ8ebmZmVnZzs5FAAAUXmC2ezZs3XhwgUtXbpUTU1Nys7O1s6dO0NV5TNnzigu7v8VrteuXav29nY9+uijYb9TUVGhZ599tlNjOhqss7KylJaWptra2lBw9vv9+uCDD/TUU085ORQAAFFTUlKikpKSDr/bu3dv2OdPP/3U9nhdDtaXL1/WiRMnQp9PnTqlhoYGDR48WCNGjNDChQv1/PPPa+TIkcrKylJ5ebl8Pp9mzZple7IAAIThRR4dO3z4sKZNmxb6XFpaKkkqLCzUxo0b9aMf/Uitra36/ve/r0uXLumBBx7Qzp07u7RFHQCAzvAEP292+pugy8F66tSpsm7xl4jH49Fzzz2n5557ztbEAADA56K+GxwAgG6jDA4AgMt1881ZYf0NwFu3AABwOTJrAICxnHo2uNsRrAEA5oqRNWvK4AAAuByZNYCYV+DL7tHxdp1r6NHxevr6epSl0Dupu93fAARrAICxWLMGAMDtLNlcs3ZsJhHFmjUAAC5HZg0AMFeM7AYnWAMAzBWU5LHZ3wCUwQEAcDkyawCAsdgNDgCA28XImjVlcAAAXI7MGgBgrhjJrAnWAABzxUiwpgwOAIDLkVkDAMwVI/dZE6wBAMbi1i0AANyONWsAAOAGZNYAAHMFLcljIzsOmpFZE6wBAOaiDA4AANyAzBoAYDCbmbXMyKwJ1gAAc1EGBwAAbkBmDQAwV9CSrVI2u8EBAIgwK/h5s9PfAJTBAQBwOTJrAIC5YmSDGcEaAGAu1qwBAJFQ4Mvu0fF2nWvo0fH8nwU16Cs9NFiMZNasWQMA4HJk1gAAc1mymVk7NpOIIlgDAMxFGRwAALgBmTUAwFzBoCQbDzYJmvFQFII1AMBclMEBAIAbkFkDAMwVI5k1wRoAYK4YeYIZZXAAAFyuy8F63759mjlzpnw+nzwej7Zt2xb67tq1a1q8eLHGjRun/v37y+fzad68eTp37pyTcwYAQJJkWUHbzQRdDtatra2aMGGCqqurb/juypUrqq+vV3l5uerr6/Wb3/xGR48e1SOPPOLIZAEACGNZn5eyu9t665r19OnTNX369A6/S0lJ0e7du8OOrVmzRpMnT9aZM2c0YsSI7s0SAICOWDbXrHtrsO6qlpYWeTweDRw4sMPv29ra1NbWFvrs9/sjPSUAAIwS0Q1mV69e1eLFizVnzhwlJyd3eE5lZaVSUlJCLSMjI5JTAgD0JsGg/WaAiAXra9eu6bHHHpNlWVq7du1NzysrK1NLS0uoNTY2RmpKAIDe5vp91naaASJSBr8eqE+fPq133nnnplm1JHm9Xnm93khMAwCAXsHxYH09UB8/flx79uzRkCFDnB4CAABJkhUMyvJ0v5Rtyq1bXQ7Wly9f1okTJ0KfT506pYaGBg0ePFjp6el69NFHVV9fr+3btysQCKipqUmSNHjwYCUkJDg3cwAA2A3escOHD2vatGmhz6WlpZKkwsJCPfvss3rrrbckSdnZ2WH99uzZo6lTp3Z/pgAAxKguB+upU6fKusVfIrf6DgAARwUtyUNmDQCAe1mWJBvrzoYEa17kAQCAy5FZAwCMZQUtWTbK4KYs3RKsAQDmsoKyVwY349YtyuAAAGNZQct2647q6mplZmYqMTFRubm5Onjw4C3P/9WvfqVRo0YpMTFR48aN044dO7o0HsEaAIAu2LJli0pLS1VRUaH6+npNmDBBBQUFOn/+fIfnv//++5ozZ46++93v6sMPP9SsWbM0a9YsHTlypNNjeiyXFexbWlo0cOBAPaCH1Ud9oz0dADDe1mMf9eh4/stB3XXvp7p06ZJSUlIiM4bfr5SUFNux4n91Te9phxobG8MejX2rR2Hn5ubqvvvu05o1ayRJwWBQGRkZ+sd//EctWbLkhvNnz56t1tZWbd++PXTsr/7qr5Sdna2amprOTdRymcbGxuuPo6HRaDSawa2xsTFiseJPf/qTlZaW5sg8BwwYcMOxioqKDsdta2uz4uPjra1bt4YdnzdvnvXII4902CcjI8P6l3/5l7BjS5cutcaPH9/p63XdBjOfz6fGxkYlJSXJ4/F0qa/f71dGRsYNfyH1Fr39+iSusbfgGnuH7l6jZVn67LPP5PP5Ija3xMREnTp1Su3t7bZ/y7KsG+LNzbLqixcvKhAIKDU1Nex4amqqPvnkkw77NDU1dXj+9cdxd4brgnVcXJzuvPNOW7+RnJzca//PI/X+65O4xt6Ca+wdunONkSp/f1FiYqISExMjPo4bsMEMAIBOGjp0qOLj49Xc3Bx2vLm5WWlpaR32SUtL69L5HSFYAwDQSQkJCZo0aZJqa2tDx4LBoGpra5WXl9dhn7y8vLDzJWn37t03Pb8jriuD2+H1elVRUXHTtQbT9fbrk7jG3oJr7B1i4Rq7o7S0VIWFhcrJydHkyZNVVVWl1tZWFRUVSZLmzZun4cOHq7KyUpK0YMECfe1rX9NLL72kGTNmaPPmzTp8+LDWrVvX6TFdd+sWAABut2bNGr344otqampSdna2Xn75ZeXm5kr6/O2UmZmZ2rhxY+j8X/3qV3rmmWf06aefauTIkVq1apUefvjhTo9HsAYAwOVYswYAwOUI1gAAuBzBGgAAlyNYAwDgcr0mWHf1dWUmqays1H333aekpCQNGzZMs2bN0tGjR6M9rYh64YUX5PF4tHDhwmhPxVFnz57Vd77zHQ0ZMkT9+vXTuHHjdPjw4WhPyzGBQEDl5eXKyspSv379dPfdd2vZsmUyeR/rvn37NHPmTPl8Pnk8Hm3bti3se8uytHTpUqWnp6tfv37Kz8/X8ePHozPZbrjV9V27dk2LFy/WuHHj1L9/f/l8Ps2bN0/nzp2L3oRjVK8I1l19XZlp3n33XRUXF+vAgQPavXu3rl27poceekitra3RnlpEHDp0SD//+c81fvz4aE/FUX/84x81ZcoU9e3bV7/73e/0X//1X3rppZc0aNCgaE/NMStXrtTatWu1Zs0affzxx1q5cqVWrVqlV155JdpT67bW1lZNmDBB1dXVHX6/atUqvfzyy6qpqdEHH3yg/v37q6CgQFevXu3hmXbPra7vypUrqq+vV3l5uerr6/Wb3/xGR48e1SOPPBKFmca4Tr/yw8UmT55sFRcXhz4HAgHL5/NZlZWVUZxV5Jw/f96SZL377rvRnorjPvvsM2vkyJHW7t27ra997WvWggULoj0lxyxevNh64IEHoj2NiJoxY4b15JNPhh375je/ac2dOzdKM3KWpLC3LQWDQSstLc168cUXQ8cuXbpkeb1e64033ojCDO35/6+vIwcPHrQkWadPn+6ZScGyLMsyPrNub29XXV2d8vPzQ8fi4uKUn5+v/fv3R3FmkdPS0iJJGjx4cJRn4rzi4mLNmDEj7L9nb/HWW28pJydH3/rWtzRs2DBNnDhR69evj/a0HHX//fertrZWx44dkyT9/ve/13vvvafp06dHeWaRcerUKTU1NYX97zUlJUW5ubm9+t8fj8ejgQMHRnsqMcX4x41253VlJgsGg1q4cKGmTJmisWPHRns6jtq8ebPq6+t16NChaE8lIk6ePKm1a9eqtLRUP/7xj3Xo0CE9/fTTSkhIUGFhYbSn54glS5bI7/dr1KhRio+PVyAQ0PLlyzV37txoTy0irr/i0O7rD01x9epVLV68WHPmzOn1bxpzG+ODdawpLi7WkSNH9N5770V7Ko5qbGzUggULtHv37l77yrtgMKicnBytWLFCkjRx4kQdOXJENTU1vSZYv/nmm3r99de1adMmjRkzRg0NDVq4cKF8Pl+vucZYde3aNT322GOyLEtr166N9nRijvFl8O68rsxUJSUl2r59u/bs2WP7nd9uU1dXp/Pnz+vee+9Vnz591KdPH7377rt6+eWX1adPHwUCgWhP0bb09HSNHj067Ng999yjM2fORGlGzvvhD3+oJUuW6PHHH9e4ceP0xBNPaNGiRaEXGvQ21/+N6e3//lwP1KdPn9bu3bvJqqPA+GDdndeVmcayLJWUlGjr1q165513lJWVFe0pOe7BBx/URx99pIaGhlDLycnR3Llz1dDQoPj4+GhP0bYpU6bccMvdsWPHdNddd0VpRs67cuWK4uLC/1mJj49XMBiM0owiKysrS2lpaWH//vj9fn3wwQe95t+f64H6+PHj+s///E8NGTIk2lOKSb2iDH6715WZrri4WJs2bdJvf/tbJSUlhdbCUlJS1K9fvyjPzhlJSUk3rMH3799fQ4YM6TVr84sWLdL999+vFStW6LHHHtPBgwe1bt26Lr0mz+1mzpyp5cuXa8SIERozZow+/PBDrV69Wk8++WS0p9Ztly9f1okTJ0KfT506pYaGBg0ePFgjRozQwoUL9fzzz2vkyJHKyspSeXm5fD6fZs2aFb1Jd8Gtri89PV2PPvqo6uvrtX37dgUCgdC/P4MHD1ZCQkK0ph17or0d3SmvvPKKNWLECCshIcGaPHmydeDAgWhPyTGSOmyvvvpqtKcWUb3t1i3Lsqx///d/t8aOHWt5vV5r1KhR1rp166I9JUf5/X5rwYIF1ogRI6zExETry1/+svWTn/zEamtri/bUum3Pnj0d/v+vsLDQsqzPb98qLy+3UlNTLa/Xaz344IPW0aNHozvpLrjV9Z06deqm//7s2bMn2lOPKbwiEwAAlzN+zRoAgN6OYA0AgMsRrAEAcDmCNQAALkewBgDA5QjWAAC4HMEaAACXI1gDAOByBGsAAFyOYA0AgMsRrAEAcLn/C92O7BqtAROnAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "#get scalar produkt of all\n", + "p = torch.zeros((embedder.num_embeddings, embedder.num_embeddings))\n", + "for i,j in itertools.product(range(embedder.num_embeddings), range(embedder.num_embeddings)):\n", + " v1 = embedder.emb_clr(torch.tensor([i])) \n", + " v2 = embedder.emb_clr(torch.tensor([j]))\n", + " p[i, j] = (v1 * v2).sum() / (torch.linalg.norm(v1)*torch.linalg.norm(v2))\n", + " \n", + "plt.imshow(p)\n", + "plt.colorbar()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f4c8232d-0b6e-4f52-94e6-e5e4587ae0d1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor(-7.4943e-09), tensor(1.))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#before normalization global scale\n", + "enc_tensor = embedder.embed(tensors, params)\n", + "enc_tensor.mean(), enc_tensor.std(correction=0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20f48a04-fe8b-4c27-9c54-ce4caed8e8b9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1.])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "enc_tensor[..., :embedder.clr_dim].std(-1, correction=0).unique()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38591b4d-50ce-4a36-befd-08c579466399", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([1.0000, 1.0000, 1.0000, 1.0000])" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "enc_tensor[..., embedder.clr_dim:].std(-1, correction=0).unique()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "986eda9a-3450-4acf-8648-6090ec5d30b6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(1.)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "enc_tensor[..., :embedder.clr_dim].var(correction=0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9801b83b-b448-4c33-8e1f-a91b5249e901", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensors.shape=torch.Size([511, 4, 24])\n", + "params.shape=torch.Size([511, 1, 24])\n", + "tensor(5.9605e-08)\n" + ] + } + ], + "source": [ + "recon_tensor, recon_params = embedder.invert(enc_tensor)\n", + "\n", + "print(f\"{tensors.shape=}\")\n", + "print(f\"{params.shape=}\")\n", + "print((params-recon_params).abs().max())\n", + "\n", + "assert torch.allclose(embedder.unique_class_values_to_tokens(tensors).long(), recon_tensor)\n", + "assert torch.allclose(params, recon_params, atol=1e-07)" + ] + }, + { + "cell_type": "markdown", + "id": "00f38b38-3b96-43aa-8bc1-3097bcc2bfce", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "947b033b-cd7d-4fb7-99dc-12035a0ae647", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/frozen_open_clip.ipynb b/src/models/frozen_open_clip.ipynb index 50da002..ed1fccd 100644 --- a/src/models/frozen_open_clip.ipynb +++ b/src/models/frozen_open_clip.ipynb @@ -5,7 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# Frozen OpenCLIP" + "# Frozen OpenCLIP\n", + "\n", + "> Interface to the [OpenCLIP](https://github.com/mlfoundations/open_clip) library." ] }, { @@ -27,10 +29,30 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.models.config_model import Config_Model\n", + "from genQC.models.config_model import ConfigModel\n", + "from genQC.utils.async_fn import run_parallel_jobs\n", + "from genQC.utils.misc_utils import infer_torch_device\n", "import open_clip" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "ed668754-6e3d-480a-8bce-c12eed6d939e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OpenCLIP version: 2.32.0\n" + ] + } + ], + "source": [ + "print(\"OpenCLIP version:\", open_clip.__version__)" + ] + }, { "cell_type": "markdown", "id": "3bca1a5d-2c82-4e97-8181-425a38cfe5ee", @@ -48,10 +70,10 @@ "source": [ "#| export\n", "@dataclass\n", - "class FrozenOpenCLIPEmbedder_config:\n", + "class FrozenOpenCLIPEmbedderConfig:\n", " arch: str\n", " version: str\n", - " device: str\n", + " #device: str\n", " max_length: int\n", " freeze: bool\n", " layer: str" @@ -65,7 +87,7 @@ "outputs": [], "source": [ "#| export\n", - "class FrozenOpenCLIPEmbedder(Config_Model):\n", + "class FrozenOpenCLIPEmbedder(ConfigModel):\n", " \"\"\"Loads and freezes the [OpenCLIP](https://github.com/mlfoundations/open_clip) transformer encoder for text prompts.\"\"\"\n", " \n", " LAYERS = [\n", @@ -74,18 +96,23 @@ " \"penultimate\"\n", " ]\n", "\n", - " def __init__(self, arch=\"ViT-H-14\", version=\"laion2b_s32b_b79k\", device=\"cpu\", max_length=77, freeze=True, layer=\"penultimate\"):\n", - " super().__init__() \n", + " njobs = 1\n", + "\n", + " def __init__(self, arch=\"ViT-B-32\", version=\"datacomp_xl_s13b_b90k\", max_length=77, freeze=True, layer=\"penultimate\", **kwargs):\n", + " super().__init__() \n", + " \n", " assert layer in self.LAYERS \n", - " self.params_config = FrozenOpenCLIPEmbedder_config(arch, version, device, max_length, freeze, layer)\n", + " self.params_config = FrozenOpenCLIPEmbedderConfig(arch, version, max_length, freeze, layer)\n", " \n", - " model, _, _ = open_clip.create_model_and_transforms(arch, device=torch.device(device), pretrained=version)\n", - " del model.visual\n", + " model, _, _ = open_clip.create_model_and_transforms(arch, device=\"cpu\", pretrained=version)\n", + " self.device = \"cpu\"\n", " \n", + " del model.visual \n", " self.model = model\n", - " self.to(device)\n", - "\n", + " # self.to(device)\n", + " \n", " self.tokenizer = open_clip.get_tokenizer(arch)\n", + " assert torch.numel(self.tokenizer(\"test\"))\n", " \n", " assert max_length <= 77 # max set by the clip \n", " self.max_length = max_length\n", @@ -100,33 +127,37 @@ " #create empty token, can also be, e.g., A nice picture\n", " self.empty_token = self.tokenize_and_push_to_device(\"\")\n", " \n", - " def freeze(self):\n", - " self.model = self.model.eval()\n", - " \n", - " for param in self.parameters(): \n", - " param.requires_grad = False \n", - " \n", + " def freeze(self, freeze: bool = True):\n", + " super().freeze(freeze=freeze)\n", + " \n", " for param in self.model.parameters(): \n", - " param.requires_grad = False\n", + " param.requires_grad = not freeze\n", " \n", " def to(self, device):\n", " self.model = self.model.to(device) \n", " self.device = device\n", " return self\n", " \n", - " @torch.no_grad()\n", + " @torch.inference_mode()\n", " def tokenize_and_push_to_device(self, text, to_device=True):\n", - " # tokens = open_clip.tokenize(text)\n", - " tokens = self.tokenizer(text)\n", + " if self.njobs > 1:\n", + "\n", + " tokens_list = run_parallel_jobs(self.tokenizer, np.array_split(text, self.njobs), self.njobs)\n", + " tokens = torch.cat(tokens_list, dim=0)\n", + " \n", + " else:\n", + " # tokens = open_clip.tokenize(text)\n", + " tokens = self.tokenizer(text)\n", + " \n", " if to_device:\n", " tokens = tokens.to(self.device)\n", " return tokens\n", " \n", - " @torch.no_grad()\n", + " @torch.inference_mode()\n", " def forward(self, c, **kwargs):\n", " return self.encode_with_transformer(c)\n", "\n", - " @torch.no_grad()\n", + " @torch.inference_mode()\n", " def encode_with_transformer(self, text):\n", " cast_dtype = self.model.transformer.get_cast_dtype()\n", " \n", @@ -145,13 +176,17 @@ " \n", " return x\n", "\n", - " @torch.no_grad()\n", + " @torch.inference_mode()\n", " def text_transformer_forward(self, x: torch.Tensor, attn_mask=None):\n", " for i, r in enumerate(self.model.transformer.resblocks):\n", " if i == len(self.model.transformer.resblocks) - self.layer_idx:\n", " break\n", - "\n", - " x = r(x, attn_mask=attn_mask) \n", + " #if self.model.transformer.grad_checkpointing and not torch.jit.is_scripting():\n", + " #x = checkpoint(r, x, attn_mask)\n", + " #else:\n", + " \n", + " x = r(x, attn_mask=attn_mask)\n", + " \n", " return x\n", "\n", " #--------------------------------------------------------------\n", @@ -165,7 +200,7 @@ " @staticmethod\n", " def from_config(config, device: torch.device, save_path: str=None): \n", " config[\"save_path\"] = None\n", - " return Config_Model.from_config(config, device, save_path=None) " + " return ConfigModel.from_config(config, device, save_path=None) " ] }, { @@ -173,9 +208,18 @@ "execution_count": null, "id": "c351de75-c3ac-4434-9e74-0472ad849d44", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" + ] + } + ], "source": [ - "a = FrozenOpenCLIPEmbedder()" + "device = infer_torch_device()\n", + "a = FrozenOpenCLIPEmbedder().to(device)" ] }, { @@ -189,7 +233,10 @@ "text/plain": [ "tensor([[49406, 314, 272, 267, 273, 267, 273, 316, 49407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])" + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", + " [49406, 314, 272, 267, 273, 267, 320, 273, 316, 49407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], device='cuda:0')" ] }, "execution_count": null, @@ -198,7 +245,7 @@ } ], "source": [ - "p=\"[1, 2, 2]\"\n", + "p=\"[1, 2, 2]\", \"[1, 2, a 2]\"\n", "a.tokenize_and_push_to_device(p)" ] }, @@ -226,7 +273,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15e7dcf8-5836-48a7-8b21-d118f8f11996", + "id": "ad8a36a9-f409-49b4-aebe-9e0e3be4a7b3", "metadata": {}, "outputs": [ { @@ -247,7 +294,7 @@ { "cell_type": "code", "execution_count": null, - "id": "4902b7cf-1eed-4b82-b4e5-c1ece8bfd416", + "id": "9bed0226-be41-462d-b4ce-7afa9001a13e", "metadata": {}, "outputs": [ { @@ -274,22 +321,22 @@ { "data": { "text/plain": [ - "(torch.Size([2, 77, 1024]),\n", - " tensor([[[-0.3134, -0.4476, -0.0082, ..., 0.2542, -0.0324, -0.2960],\n", - " [ 0.0668, -1.2381, 0.9908, ..., 0.1785, 0.1592, -0.4320],\n", - " [ 0.6988, -0.2168, -1.2912, ..., 2.1063, -0.0302, -0.5666],\n", + "(torch.Size([2, 77, 512]),\n", + " tensor([[[-0.3819, -0.3694, -0.0712, ..., 0.0958, -0.0834, -0.0929],\n", + " [-0.2665, 0.1840, -0.5888, ..., 0.7207, -1.7479, 1.2859],\n", + " [-0.9813, -0.6659, 0.2100, ..., -0.4228, 0.5374, 0.8488],\n", " ...,\n", - " [ 0.4703, -1.4072, -0.4847, ..., -0.1257, -0.1650, 0.1206],\n", - " [ 0.5117, -1.3949, -0.4672, ..., -0.4288, -0.2166, 0.2904],\n", - " [ 0.1480, -2.1998, -1.1187, ..., 0.0823, -0.4157, 0.6237]],\n", + " [-0.0302, 1.3877, 0.3986, ..., 0.2663, -0.1264, -1.3759],\n", + " [-0.0793, 1.4047, 0.3585, ..., 0.2325, -0.0762, -1.3315],\n", + " [ 0.1596, 1.5992, 0.2774, ..., 0.1208, -0.1303, -1.5472]],\n", " \n", - " [[-0.3134, -0.4476, -0.0082, ..., 0.2542, -0.0324, -0.2960],\n", - " [-0.1180, -1.6322, 1.2987, ..., -0.1378, -0.1529, -0.3377],\n", - " [-0.7251, -0.8167, -0.9966, ..., 2.2262, -0.2325, -0.0138],\n", + " [[-0.3819, -0.3694, -0.0712, ..., 0.0958, -0.0834, -0.0929],\n", + " [-1.2511, 1.4713, 0.7262, ..., 1.1487, -0.4976, 0.4495],\n", + " [-1.2653, -0.3404, 0.9427, ..., 0.1537, 0.0260, 0.4574],\n", " ...,\n", - " [ 0.3887, -1.3395, -0.5868, ..., -0.1621, -0.0594, 0.1253],\n", - " [ 0.4360, -1.3350, -0.5684, ..., -0.4643, -0.1131, 0.2847],\n", - " [ 0.1691, -2.1725, -1.1441, ..., 0.0633, -0.3175, 0.7041]]]))" + " [-0.0698, 1.4014, 0.4691, ..., 0.2275, -0.0690, -1.3637],\n", + " [-0.1190, 1.4172, 0.4266, ..., 0.1950, -0.0225, -1.3243],\n", + " [ 0.1392, 1.6179, 0.3527, ..., 0.0764, -0.0845, -1.5251]]], device='cuda:0'))" ] }, "execution_count": null, @@ -306,33 +353,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8efcdfb1-b8c4-44c4-b15a-2900df2d3cb6", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor([[49406, 272, 267, 272, 267, 273, 49407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", - " [49406, 273, 267, 273, 267, 273, 49407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", - " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "c" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "952cf514-2838-4516-8313-51b4507c8cfd", + "id": "56415466-0a23-405d-a554-8b8be57f7df5", "metadata": {}, "outputs": [ { @@ -387,6 +408,19 @@ "Model takes now also (batched) scalar int values that are defined to unique conditions like $[1,2,2]=4$. If input is now such int the output is the cached pre-embedded tensor. If a non int, like a token string is passed we just do the normal embedding live." ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "b06adb4f-56f7-4ca1-bd21-fabe060eba61", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class CachedFrozenOpenCLIPEmbedderConfig(FrozenOpenCLIPEmbedderConfig):\n", + " enable_cache_token_limit: bool" + ] + }, { "cell_type": "code", "execution_count": null, @@ -397,9 +431,20 @@ "#| export\n", "class CachedFrozenOpenCLIPEmbedder(FrozenOpenCLIPEmbedder):\n", " \"\"\"Adds caching support to `FrozenOpenCLIPEmbedder`.\"\"\"\n", + "\n", + " def __init__(self, arch=\"ViT-B-32\", version=\"datacomp_xl_s13b_b90k\", max_length=77, freeze=True, layer=\"penultimate\", enable_cache_token_limit: bool = True, **kwargs):\n", + " super().__init__(arch=arch, version=version, max_length=max_length, freeze=freeze, layer=layer, **kwargs) \n", + " self.enable_cache_token_limit = enable_cache_token_limit\n", + "\n", + " self.params_config = CachedFrozenOpenCLIPEmbedderConfig(arch, version, max_length, freeze, layer, enable_cache_token_limit)\n", + " \n", + " def get_token_count(self, tokens, padding_token=0):\n", + " # tokens .. [b, seq]\n", + " collabsed_tokens = (tokens != padding_token).to(torch.int32)\n", + " return torch.count_nonzero(collabsed_tokens, dim=-1) # [b]\n", " \n", - " @torch.no_grad()\n", - " def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_index=0, b_size=2048, y_on_cpu=False): \n", + " @torch.inference_mode()\n", + " def generate_cache(self, str_list: list=None, tokens=None, cached_empty_token_index=None, b_size=2048, y_on_cpu=False): \n", " self.cached_empty_token_index = cached_empty_token_index \n", " if exists(str_list): self.cached_tokens = self.tokenize_and_push_to_device(str_list) \n", " elif exists(tokens): self.cached_tokens = tokens\n", @@ -409,6 +454,14 @@ " # cached_tokens [n, 77] ... int\n", " # cached_embeddings [n, 77, 512] ... float\n", "\n", + " if self.enable_cache_token_limit:\n", + " self.max_length = self.get_token_count(self.cached_tokens).max().item()\n", + " self.params_config.max_length = self.max_length\n", + " self.params_config.enable_cache_token_limit = self.enable_cache_token_limit\n", + " print(f\"[INFO]: - `generate_cache` infered a TOKEN limit of {self.max_length}\")\n", + "\n", + " #self.cached_tokens = self.cached_tokens[:, :self.max_length]\n", + " \n", " n = self.cached_tokens.shape[0]\n", " \n", " n_chunks = int(np.ceil(n / b_size))\n", @@ -418,7 +471,7 @@ " last_ind = 0\n", " for i, cached_tokens in tqdm(enumerate(self.cached_tokens.chunk(n_chunks)), total=n_chunks):\n", " \n", - " x = super().forward(cached_tokens.to(self.device))\n", + " x = super().forward(cached_tokens.to(self.device)) # ... [b, seq, ch]\n", " \n", " if i == 0:\n", " mem = n * x.shape[1] * x.shape[2] * x.element_size() * 1e-9\n", @@ -428,11 +481,14 @@ " self.cached_embeddings[last_ind:last_ind+x.shape[0]] = x.to(self.cached_embeddings.device)\n", " \n", " last_ind += x.shape[0]\n", - " \n", + "\n", + " if self.enable_cache_token_limit:\n", + " self.cached_embeddings = self.cached_embeddings[:, :self.max_length]\n", + " \n", " if not y_on_cpu:\n", - " self.cached_embeddings = self.cached_embeddings.to(in_device)\n", + " self.cached_embeddings = self.cached_embeddings.to(in_device) \n", "\n", - " @torch.no_grad()\n", + " @torch.inference_mode()\n", " def look_up_cos_sim_cached_index(self, str_list: list=None, tokens=None):\n", " if exists(str_list): tokens = self.tokenize_and_push_to_device(str_list) \n", " else: raise RuntimeError(\"please provide str_list or tokens\")\n", @@ -453,13 +509,18 @@ " \n", " return max_idx \n", " \n", - " @torch.no_grad()\n", + " # @torch.inference_mode()\n", " def forward(self, c, **kwargs): \n", " in_device = c.device\n", " \n", - " if c.dim() == 1: return self.cached_embeddings[c.to(self.cached_embeddings.device)].to(in_device) #list of ints \n", - " elif c.dim() == 2: return super().forward(c, **kwargs) #tokenized input \n", - " else: raise NotImplementedError(\"\")" + " if c.dim() == 1: c_emb = self.cached_embeddings[c.to(self.cached_embeddings.device)].to(in_device) #list of ints \n", + " elif c.dim() == 2: c_emb = super().forward(c.to(self.device)) #tokenized input \n", + " else: raise NotImplementedError(\"\")\n", + "\n", + " if self.enable_cache_token_limit:\n", + " c_emb = c_emb[:, :self.max_length]\n", + " \n", + " return c_emb" ] }, { @@ -468,10 +529,17 @@ "id": "7311aa65-c8f2-4ffd-b176-3b0d054e59f0", "metadata": {}, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: - `generate_cache` infered a TOKEN limit of 7\n" + ] + }, { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "5a795ff325c540ccbb1407ab880c633f", + "model_id": "12879829a1754f95b66f31f5b0fae6cf", "version_major": 2, "version_minor": 0 }, @@ -486,17 +554,38 @@ "name": "stdout", "output_type": "stream", "text": [ - "[INFO]: caching trying to allocate memory (2, 77, 1024) on cpu, approx. 0.001 GB\n" + "[INFO]: caching trying to allocate memory (7, 77, 512) on cuda, approx. 0.001 GB\n" ] } ], "source": [ - "a = CachedFrozenOpenCLIPEmbedder()\n", - "p = [\"1,1,2\", \"2,2,2\"]\n", + "a = CachedFrozenOpenCLIPEmbedder(enable_cache_token_limit=True).to(device)\n", + "p = [\"1,1,2\", \"2,2,2\", \"4,4,4\", \"6,4,7\", \"6,4,8\", \"6,4,9\", \"6,4,1\"]\n", "\n", "a.generate_cache(p)" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "c1ea0bbb-2ed1-43c1-bf61-64f4015acd0e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "CachedFrozenOpenCLIPEmbedderConfig(arch='ViT-B-32', version='datacomp_xl_s13b_b90k', max_length=7, freeze=True, layer='penultimate', enable_cache_token_limit=True)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a.params_config" + ] + }, { "cell_type": "code", "execution_count": null, @@ -506,7 +595,7 @@ { "data": { "text/plain": [ - "(torch.Size([3, 77, 1024]), torch.Size([3, 77, 1024]), True)" + "(torch.Size([3, 7, 512]), torch.Size([3, 7, 512]), False)" ] }, "execution_count": null, @@ -515,13 +604,76 @@ } ], "source": [ - "c_cached = torch.tensor([0,0,1], device=a.device)\n", + "c_cached = torch.tensor([0, 0, 1], device=a.device)#.cpu()\n", "c_uncached = a.tokenize_and_push_to_device([\"1,1,2\", \"1,1,2\", \"2,2,2\"])\n", "\n", "enc_cached = a(c_cached)\n", - "enc_uncached = a(c_uncached)\n", + "enc_uncached = a(c_uncached)#.cpu()\n", "\n", - "enc_cached.shape, enc_uncached.shape, torch.allclose(enc_cached, enc_uncached, atol=1e-5)" + "enc_cached.shape, enc_uncached.shape, torch.allclose(enc_cached, enc_uncached, atol=1e-3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "582840fe-b9a3-4f30-9d0e-601518c5c0fc", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(torch.float32, torch.float32)" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "enc_cached.dtype, enc_uncached.dtype" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4f4fbc98-a8fd-48cd-a2a4-7b09888c5709", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(0.0015, device='cuda:0')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "(enc_cached[0, :4, :10]-enc_uncached[1, :4, :10]).abs().max()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7a970cbc-5448-4464-ba86-3699013fd78a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor(1.9731, device='cuda:0')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "(enc_cached[0, :4, :10]-enc_uncached[2, :4, :10]).abs().max()" ] }, { @@ -552,7 +704,105 @@ }, "widgets": { "application/vnd.jupyter.widget-state+json": { - "state": {}, + "state": { + "12879829a1754f95b66f31f5b0fae6cf": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", + "state": { + "children": [ + "IPY_MODEL_190330a62b1846f89f4214f16c0df640", + "IPY_MODEL_150711e3844e4632bb412e9e92e2d870", + "IPY_MODEL_476eb8d8243246b8a3b4c617a1ce72ab" + ], + "layout": "IPY_MODEL_8510ee3948484c5a958363ccea0f6607" + } + }, + "150711e3844e4632bb412e9e92e2d870": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", + "state": { + "bar_style": "success", + "layout": "IPY_MODEL_a23d4a4f6992491d8bc7849a11b7614d", + "max": 1, + "style": "IPY_MODEL_8b130d6937f346a9ac5a835a5d3e7618", + "value": 1 + } + }, + "190330a62b1846f89f4214f16c0df640": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_4ff4fb43f2a04aa2a47698c90ffb6292", + "style": "IPY_MODEL_7e19f890dbbd4fb5b1fb102aa0fa573f", + "value": "100%" + } + }, + "40dfa85f24d448abbbaf846dc29c4c9f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "476eb8d8243246b8a3b4c617a1ce72ab": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", + "state": { + "layout": "IPY_MODEL_f5435bc893e34c2ba7b9407d9dc65e88", + "style": "IPY_MODEL_40dfa85f24d448abbbaf846dc29c4c9f", + "value": " 1/1 [00:00<00:00, 76.92it/s]" + } + }, + "4ff4fb43f2a04aa2a47698c90ffb6292": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "7e19f890dbbd4fb5b1fb102aa0fa573f": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", + "state": { + "description_width": "", + "font_size": null, + "text_color": null + } + }, + "8510ee3948484c5a958363ccea0f6607": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "8b130d6937f346a9ac5a835a5d3e7618": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", + "state": { + "description_width": "" + } + }, + "a23d4a4f6992491d8bc7849a11b7614d": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "f5435bc893e34c2ba7b9407d9dc65e88": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + } + }, "version_major": 2, "version_minor": 0 } diff --git a/src/models/layers.ipynb b/src/models/layers.ipynb index 5c8eaeb..602e505 100644 --- a/src/models/layers.ipynb +++ b/src/models/layers.ipynb @@ -5,15 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# Layers" - ] - }, - { - "cell_type": "markdown", - "id": "1074866c-9b85-4cd3-ab81-dd09c62348d0", - "metadata": {}, - "source": [ - "Common model layers." + "# Layers\n", + "\n", + "> Common model layers." ] }, { @@ -89,7 +83,10 @@ " self.use_conv = use_conv \n", " self.up_sample = nn.Upsample(scale_factor=kernel_size) \n", " if self.use_conv: \n", - " self.conv1 = nn.Conv2d(in_ch, out_ch, kernel_size=(1,3), stride=1, padding=\"same\") \n", + " if kernel_size==(1,2): kernel_size_conv = (1,3)\n", + " else: kernel_size_conv = 3\n", + " \n", + " self.conv1 = nn.Conv2d(in_ch, out_ch, kernel_size=kernel_size_conv, stride=1, padding=\"same\")\n", " else: \n", " self.convId = nn.Conv2d(in_ch, out_ch, kernel_size=1, stride=1, padding=\"same\") if in_ch!=out_ch else nn.Identity() \n", " \n", @@ -178,14 +175,14 @@ "#| export\n", "class ResBlock2D(nn.Module):\n", " \"\"\"A 2d residual block.\"\"\"\n", - " def __init__(self, in_ch, out_ch, kernel_size, skip=True):\n", + " def __init__(self, in_ch, out_ch, kernel_size, skip=True, num_groups=32):\n", " super().__init__() \n", " self.act = nn.SiLU() \n", " self.conv1 = nn.Conv2d( in_ch, out_ch, kernel_size=kernel_size, stride=1, padding =\"same\") \n", " self.conv2 = nn.Conv2d(out_ch, out_ch, kernel_size=kernel_size, stride=1, padding =\"same\")\n", " \n", - " self.norm1 = torch.nn.GroupNorm(num_groups=32, num_channels=in_ch) #, eps=1e-6, affine=True) \n", - " self.norm2 = torch.nn.GroupNorm(num_groups=32, num_channels=out_ch) #, eps=1e-6, affine=True) \n", + " self.norm1 = torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_ch) #, eps=1e-6, affine=True) \n", + " self.norm2 = torch.nn.GroupNorm(num_groups=num_groups, num_channels=out_ch) #, eps=1e-6, affine=True) \n", " \n", " self.skip = skip \n", " if self.skip: \n", @@ -218,7 +215,7 @@ "outputs": [], "source": [ "#| export\n", - "class ResBlock2D_Conditional(nn.Module):\n", + "class ResBlock2DConditional(nn.Module):\n", " \"\"\"A 2d residual block with input of a time-step $t$ embedding.\"\"\"\n", " def __init__(self, in_ch, out_ch, t_emb_size, kernel_size, skip=True):\n", " super().__init__() \n", @@ -321,15 +318,16 @@ "#| export\n", "class PositionalEncoding(nn.Module):\n", " \"\"\"An absolute pos encoding layer.\"\"\"\n", - " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000):\n", + " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0):\n", " super().__init__()\n", " self.dropout = nn.Dropout(p=dropout)\n", "\n", " position = torch.arange(max_len).unsqueeze(1)\n", - " div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model))\n", + " div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(freq_factor) / d_model))\n", " pe = torch.zeros(max_len, d_model)\n", " pe[:, 0::2] = torch.sin(position * div_term)\n", " pe[:, 1::2] = torch.cos(position * div_term)\n", + " pe = pe.contiguous()\n", " self.register_buffer('pe', pe)\n", "\n", " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", @@ -351,8 +349,8 @@ "#| export\n", "class TimeEmbedding(PositionalEncoding):\n", " \"\"\"A time embedding layer\"\"\"\n", - " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000):\n", - " super().__init__(d_model, dropout, max_len) \n", + " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0):\n", + " super().__init__(d_model, dropout, max_len, freq_factor) \n", " self.ff = FeedForward(d_model, d_model) \n", " \n", " def forward(self, t: torch.Tensor): \n", @@ -370,9 +368,9 @@ "source": [ "#| export\n", "class PositionalEncodingTransposed(PositionalEncoding):\n", - " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000):\n", - " super().__init__(d_model, dropout, max_len) \n", - " self.pe = torch.permute(self.pe, (1, 0)) # [max_len, d_model] to [d_model, max_len]\n", + " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0):\n", + " super().__init__(d_model, dropout, max_len, freq_factor) \n", + " self.pe = torch.permute(self.pe, (1, 0)).contiguous() # [max_len, d_model] to [d_model, max_len]\n", " \n", " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", " \"\"\"\n", @@ -393,20 +391,20 @@ "#| export\n", "class PositionalEncoding2D(PositionalEncodingTransposed):\n", " \"\"\"A 2D absolute pos encoding layer.\"\"\"\n", - " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000):\n", - " super().__init__(d_model=d_model//2, dropout=dropout, max_len=max_len) \n", + " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0):\n", + " super().__init__(d_model=d_model//2, dropout=dropout, max_len=max_len, freq_factor=freq_factor) \n", " self.d_model_half = d_model//2 \n", " # self.proj = nn.Conv2d(d_model, d_model, kernel_size=1, stride=1, padding =\"same\") \n", " \n", " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", " \"\"\"\n", " Arguments:\n", - " x: Tensor, shape ``[batch_size, gate_color, space , time]``\n", + " x: Tensor, shape ``[batch_size, gate_color, space, time]``\n", " \"\"\"\n", " \n", " p1 = self.pe[None, :, :x.size(2), None] #space encoding\n", " p2 = self.pe[None, :, None, :x.size(3)] #time encoding \n", - " \n", + " \n", " x[:, :self.d_model_half] = x[:, :self.d_model_half] + p1\n", " x[:, self.d_model_half:] = x[:, self.d_model_half:] + p2\n", " \n", @@ -425,8 +423,8 @@ "#| hide\n", "#| export\n", "class PositionalEncoding2DSpaceOnly(PositionalEncodingTransposed):\n", - " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000):\n", - " super().__init__(d_model=d_model, dropout=dropout, max_len=max_len) \n", + " def __init__(self, d_model: int, dropout: float = 0.0, max_len: int = 5000, freq_factor: float = 10000.0):\n", + " super().__init__(d_model=d_model, dropout=dropout, max_len=max_len, freq_factor=freq_factor) \n", " \n", " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", " \"\"\"\n", @@ -443,23 +441,73 @@ "execution_count": null, "id": "18df9b32-740e-4be5-a999-f89a1e3d264e", "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1, 40, 16, 500])\n" + ] + } + ], + "source": [ + "d_model = 40\n", + "\n", + "a = torch.zeros((1, d_model, 16, 500))\n", + "l = PositionalEncoding2D(d_model=d_model, freq_factor=1_000) \n", + "\n", + "l_pos = l(a)\n", + "print(l_pos.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "01047bc4-ac25-47d8-9c01-c2e6b73addb3", + "metadata": {}, "outputs": [ { "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzYAAABuCAYAAAD1eoZGAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAq0xJREFUeJzs/Xe0JNd533t/994VOqeT45zJOWCQIwkCYBApUhRJZVqS05Us0pYoW6/ou65t+fq1RNv3SnddSV62HCRLpkQFUiRFMAEkQIDIM4MBJsczZ04+p0/nylX7/aMPBqBIy6BNviLN+qw1a2F6Fbprqn71PLW7dlcJrbUmlUqlUqlUKpVKpb6Lyb/uFUilUqlUKpVKpVKp/1npwCaVSqVSqVQqlUp910sHNqlUKpVKpVKpVOq7XjqwSaVSqVQqlUqlUt/10oFNKpVKpVKpVCqV+q6XDmxSqVQqlUqlUqnUd710YJNKpVKpVCqVSqW+66UDm1QqlUqlUqlUKvVdLx3YpFKpVCqVSqVSqe966cAmlUqlUqlUKpVKfdf7tg1sfuu3fouZmRkymQy33347zz333Lfro1KpVCqVSqVSqdT3uG/LwOZjH/sYH/rQh/in//Sfcvz4cQ4fPsxb3vIWVldXvx0fl0qlUqlUKpVKpb7HCa21/la/6e23386tt97Kb/7mbwKQJAlTU1N88IMf5Jd/+Zf/yv83SRIWFxcpFosIIb7Vq5ZKpVKpVCqVSqW+S2it6XQ6jI+PI+VffU3G+FZ/eBAEHDt2jA9/+MM3XpNS8uCDD/L0009/3fK+7+P7/o2/LywssG/fvm/1aqVSqVQqlUqlUqnvUtevX2dycvKvXOZbPrBZX18njmNGRka+5vWRkRHOnTv3dcv/6q/+Kr/yK7/yda//0u+/GztnfqtXL5VKpVKpVCqVSn2X8J2Qf/X+T1AsFv+7y37LBzbfrA9/+MN86EMfuvH3drvN1NQUv3DLKUpF9de4ZqlUKpVKpVKpVOqvU7sT86/gdf1E5Vs+sBkcHEQpxcrKyte8vrKywujo6Nctb9s2tm1/3evveeQHMbJf/3oqlUqlUqlUKpX63hC5PvCR17Xst3xgY1kWN998M48++ig/8AM/APRvCPDoo4/ygQ984HW/z8HaKnbO+lavXiqVSqVSqVQqlfou4TsBT73OZb8tU9E+9KEP8ZM/+ZPccsst3HbbbfzGb/wGvV6Pn/7pn37d7/Evb3k8nYqWSqVSqVQqlUp9D2t3Yv7d61z22zKw+eEf/mHW1tb4J//kn7C8vMyRI0f43Oc+93U3FPir/MRj34+ZTkVLpVKpVCqVSqW+Z4WuD/w/r2vZb9vNAz7wgQ98U1PP/rKCEWCa6XNsUqlUKpVKpVKp71VhGLzuZf/a74r23/Lv7/08peJf/RCeVCqVSqVSqVQq9b+udifhT17nst+xA5sPPvUQVnrzgFQqlUqlUqlU6ntW4ATwOn9l8x07sFly8hg6/Y1NKpVKpVKpVCr1vap/u+fX5zt2YPNHb/pkele0VCqVSqVSqVTqe1i7EzP0Opf9jh3Y/IsX70qfY5NKpVKpVCqVSn0P850AuPy6lv2OHdh8dWUSI5v5616NVCqVSqVSqVQq9dckcr3Xvex37MDmzx/8s3QqWiqVSqVSqVQq9T2s3YmZfJ3LfscObH7v4iEyefOvezVSqVQqlUqlUqnUXxOvFwIXXtey37EDmz+4tA+VTkVLpVKpVCqVSqW+Z8X/S0xFe+jjFNOpaKlUKpVKpVKp1PesTidm9+tc9jt2YPP48hS5TjoVLZVKpVKpVCqV+l7l9ELg1Ota9jt2YPNrL96JTKeipVKpVCqVSqVS37MS1wM++7qW/Y4d2PzxA5+kUEinoqVSqVQqlUqlUt+rut2YW17nst+xA5u5bpH8d+7qpVKpVCqVSqVSqW+zXjd63ct+x44cfv6ZB5GZdCpaKpVKpVKpVCr1vSrxPOCx17Xsd+zA5j/c+zD5wnfs6qVSqVQqlUqlUqlvs1434s2vc9nv2JGDrWIySvx1r0YqlUqlUqlUKpX6axKp+HUv+x07sPkbj70jnYqWSqVSqVQqlUp9D+tPRXv2dS37HTuw+bVbHyeXTkVLpVKpVCqVSqW+ZzndiB99nct+x44c9lbXKaa3e06lUqlUKpVKpb5ndcxv01S0X/3VX+XjH/84586dI5vNctddd/GRj3yE3bt331jG8zx+8Rd/kT/6oz/C933e8pa38Nu//duMjIx8Mx/Fex55Nyp9QGcqlUqlUqlUKvU9K3Y94MXXtew3NbB5/PHH+bmf+zluvfVWoijiH//jf8yb3/xmzpw5Qz6fB+AXfuEX+MxnPsOf/MmfUC6X+cAHPsAP/uAP8tWvfvWb+kf8g/0vkM2b39T/k0qlUqlUKpVKpf7X4fZC/v7rXFZorfX/6Aetra0xPDzM448/zn333Uer1WJoaIiPfvSjvPe97wXg3Llz7N27l6effpo77rjjv/ue7XabcrnM/JmdDFU0Ak2YKNzYQvoaEWtIEhJLIQywrAhTJEigm5joUEAoIIxASjAkwk4wZEJORfhaEsWSMDAQYYJINEhJYgImFA0f0CQIeqENEchAg9ZoKUhsScYMMWSCgcZJDMJYIXwBcQJJgrYMpKlRZkxWxCQIvEQRBxIiIIxBSVASbWsyKsIWMSGCMDIIIoX0Y4QGhCC2JcpIyBkBAk2iJd3IRgQaGWuINdoQaFNiWyGG1Bgk9BKTOJbgC4g2L+OZCmFppErIy5AISZgIgsBEhBoRxaAUWkFiCQqGjxRJ/98QWkSxRPkJaNACEltiGRGmEWOgCbTCjQ2kD8Qa4gRt9veVYcfYIkFubrcklCSRQASb+2pze5gqJicjQgRRIvEDExls7ishSCwBBhRMH4FGI+i+sq9C+vtACrQlscwIpRIskeDGBmGiwHt1X2EaYGqkmZCXEQkQaEnoG+gYRBAjpEQrQWILsirEkhExgiA2CCID6SeIBEAT2wrDSLDNEANNrAXd2EIGICLQcQyGRBsS044wZYJJgqMN4kiSBBIRbu4rQ4GlkUZCQYZECGIt8HwLIo2Mkv72MATaFORNH9VfEXqRTfzKvko0WoC2FIYZYxoxFgmBVniJAk/2t0ecgKlAgbQTsjJGovG0JAoVSSQRftTfHlKQZASWisjKkARBmCi8cHNfbf4TEksgDMhaAYp+qelEr+4rHSWg+tk1rBhDJmRkjJsYRIkk8SREr9lXhgZTU1QhGgi1wA9MdAQy7G+P/nEqyBoBpuyviBebBLGB9DRC94/nxFYoI8E0IywSYiS92EQE9I/TKO7vAyWQmRhLJFgixtOKKFLEoUQE/fcXSpLYIJWmoHwSBLGWOIGFCDUy0iC4sa9ypo8SGommG9skkUAG9I8XQNsKZcYYKiEjYgKtCBJF4m/uqyjpr5sBWJq8ipAkBFoShAZJLJF+3N8e4tV9lVEhQL+mRps1NdncHpZEKMhYIYbQCKATm5v1VEC0eZyaErlZU7MywksUUSKJfIUI+3kTxmZNNaBk+GggRuAGVn9fBa/sK0gsSdYMMTbz5sYmQbK5rxKNThK0bSCNBMOMyWzWIycx0L6A+JWa2t9XZPoZskS8We8VUagQfowAkJLYFiiVkDd8AGIt6YX2q/tKaxJDginIWAFKaAw03dgijgUieLWmakuhzARpJP26pSVBIol8AxFt7ivV31eJBQUjQKKJEXihSRK9dl+9WlNtFSHRBImBG5vfsP+ZVoQtEsRf0f+wE0zVX7dvuv+FIMPX9D9L9vNxo/+ZhLH8hv1PGjE5udn/tCT21V/qf4LEhpyKMEVMhCCIDMJI9Wuq1jf6n2EkZDf7X6wlvchGBpvbY7PHaENg2SGmeE3/i+Tm9ni1/2Hp/r6XISGS6Eb/SxCRBiVJDNDmq/1PI3BC69WaqnX/uLL7PcZUr/Y/LzYQHpDoV2uqAcqOyYgYCThakQTq6/tfRmPJmOwr/S+W+OHX9z9hQP6/1/9MiWVHKJlgiwQnNggTCb7sZzfRr+l/mrwMSaCf383+J4MYXtP/ckaA+co5TWQSxgrl9c8HQBNnFIZ6bf+TdGOzX9siIN6sqYZA2RHWX+5/r6mpKIW2Neob9D8R6f6x9Vf1v0iigv7x8tr+Z6j+eYivFf436n8GCEuTk9Fr+p/R31d/qf/ZKiLzl/ufn/TP3V6pqd+o/4X98wGi/rGgDYlh/6X+F0viV84HEg2GAZYGQ1NUwWaPEXh/uf+pfkZe2//c2NrcV7p/PoDu9xgjwTQiLJH0+1Vi9PMR66/pfyLT70PmN+h/N2pqRmComPyNdev3Pxnq/nGFJjElGIKs9Ur/g25s3eh/Ok5AgDYVhhWjVH97BIliva2Z3nuBVqtFqVT6K8cR/1O/sWm1WgDUajUAjh07RhiGPPjggzeW2bNnD9PT0//NgY3v+/i+f+Pv7XYbgP/r7AHef8sxBmTIV9Z28G/OPsDUHzvY1xz00hqtB7aSPSR56zuf5Q25dYZkyC/O3UP36QrR01nMp86QDFeI9k+R+ZE1bpta5EOjL/LZ7hAvL43w6GduofT4LPZ8Fzk5xtobbJJbY/79LR9DqpBGZPN/PP4OvBcNRr9YR/sB3liGlbdP8mP3fIWbJua4ye7wG9eP8MVrOxj8fQNxbRW9tkH3rQcYuLnN1nuv8/cHz7EWZfhPGzuY/dRWvFOa3GPn0dsnibeVaP54wN+ZOsH3D13kKa/ME2f38uVjBxn7kysYnoZsluvvG2T73lV+6ba/oCBiFpwqP//Cexh81Kf8kkuyuELvyAj+rQO88wee4mhlndszTf7367cxd26A5DNl1PFLoBOCO3aTe8sGk3vrfGTqWZ52qzzbGuSLf3obxokG+eNLiC3jdA9kWH2Lya8f+nOmC3XWYovfeu5ezp8ZY+qPlyGMiLOChR/awpuOnOKhfSc5aHX5wvpO/t/Z2xn+qMC40oUr8/TetIvMQcmBd53nh8rXGDU8fnXlEEtPjdJ8vkTuC6dgsIrePkbnJ1zum57lF6ee5WmvyktLE/zJl+5h8PPXyS44iGKBlQfKqKMJ/+aeP6JoBISxwc89+x7iYwYjX3bQq+t4YzmaD2zhrW95jkPjS7wlv8pvLh/mq9en4KMV1PlFxMoG/hsOkLmtR+XuOv9y4nnqicFnumN89VOH6b0M5UcuIiZGCbbkmf8xm3+w4wkeHD7PfJzh0+cO8rmXDzP9hwuobgQkLLx3ip37V/gbdz7GLtNh3qnx82ffRu2zkDsVoC/M4h6eIDlS45Yfepn7qsu8Mb/KR1YOMXt2mLVHRsk8fhapBcmB7Xjf7zK1p85vbP8Sp4ICp9tVfu8vHiT77AbFE+uIaoXWoRzr9+f5V3f+CdOFDRSaf3LiQS6dHWH6T9roZovI1Ky9eyd33n6WW/dd4vvyazy8Mc3vLe3D+IMy4lILde4a3j17MfdLqu9c4h8Mn2bM6vK7zRlefmIHSyeqVP7iLKJaRo/XmH+/wVu3neODW59iNspwbGma33nmQcY/tURmwQOtWXtwmMzhhH/44KeYshysRPK3XnobPGdRfSKBC7P440V6d89w6w+c4tD4Kj9WvsZ/rO/i2PwYK38yiXXiGmq5RXLzHqK7Qsw7u/zbHY/S1JIX3DIfe/gNOC9B7ZE5RKWMN5Vl7n0VfuHgF7l75BIAv3vhJj574SBbf7+BbDgkrsvqD+5g+6FVHrr7GA9kN5jzKvwf1+7G+lQOdTpCnrhAcHgLel+F0R+d412DszxUnOd3mts4d2qC809spfTZsxiJgi3jrLxHsm33Gr++7zNcjzNcapf5N4+8k/KTTUovtUAIWkfKtO8p8eE3fYKtxQajKuQfnH6IubMDjH0iRl9fIjISWm/bw/77rrBn7wJ/p3qFz7an+OTaDOsfnUScaWCdmiO+eTfJQUX89i7/55anGLPbPOIM8MgTR5l9aZChT1xE5vPEA3lm31/h+3ed5G/seJoIwRNLu/jNlx5g+k8a2AsuutWm/pZpCoc0P/7WL3NTpkUOwc9efJDk6TzWUwp5/DzhRIXgpi1MvG+WWyaW+ZnBc3ysPc3L8yO8+Mm95J+6irXchV0zNN8gkbf6/KeDn8ARMOvn+L+/+P34L8LQl1ZBSdzJLEvvHOFnbv8iR0evMWl4/MaVe3nk8m6mfj9ALG2g220a79zH1NF1Dt99ifeXrjHnl/iN1YO0/3yY+HSM/dQ54v0zJDtL8ONNfmrsNA9VZvlUd4hjp3bw/HO7GPzEBYxYIgYqzL2vyO49y/zzI5/G15KrnQH+8VPvZejRDqVTPXS7Q/voEN7tFf7m27/AvmKDPVaPv3/5fpbO1sh92kacuUKiwH3jHsYeWmTb7lU+PPISj3ZHeXxjjJMf2486uU7mpQXYuw33kEnzLQn/z57PMpptcT7M8V+evJ8rp4cY/7M5kIq4ZDH3Y2O8/cAJ3rHrOGPK53PLe/itS/cw9VEfa66HXlql9dAOCocS3vCOE7ytsEJJJPzy9dvpPFkleD6L9ZXTJOMDJHvGUT9W597Jef7+6Eke7g3y8vUxHvnCzVQeuUpmxUWMDbPyQBbzpoDfvu1jIGMaYYYPf+VdJMdg5LEmuufgTuVYf/MkP/amxzg0usgtdpuPzN3Ck7NbqPxXC3l1Gd1o0XvzfgZuazJ55yK/NHSK62GeP2rOcO4TO/FPReQfvwg7pwi3F6j/SMzPzzzDm2pXOeaXeOT0QZ54cR/jfzyL8gHbZO6HR9i/d4EP3voFKjLkcmeYf3TyXYx81qdw2kPPL9G9ZZz4aIXve8/T3Fpa52a7xYfnb2Pp9AD+FyqYz54HJYlu3on5jgYzu9f5tamnecqtcLwxwGc+cRf282vkX15DTo7SOpKh/iaT//fonzKabdFMDP71M29h9swgkx9fA88nzEmW3zfDgzef5J7d57jNbvLJtT38x7mbGPgDA3W1DVcX6L1pD7mDmu3vuMJPVy9RUQG/vr6PuccmaZ4okP/8acTIAMnMEBvvD/m+LRf5wOQLPOeXOHF9mj9+8h6GP3OdzIqHyGRYekuN3JGQ/++9f0xJhQSRxc88/06MZwVDT3roxRXc6SLt+6Z5y9uf4+DoKm/Pr/Abywc5NjdO+LEaxuk5RL1DcM8+rLt6lG9r8K+nn2IhtnmsO8Qjn7wV7+WI8peuIsZH8LblmH9fhl/e8yh3D15hNbb42Jlb+NLp/Wz5wyVkN0TriMUfmmHf/gXed/tTHLS6XOwN8csXH6D2aYl9JoCzV3Bv2QIHyxz84bO8ubLAXbk1/tXqQWZfHmH58VHyj5xGKBN2b6HznoAdu1b5N9sf5aWgyNnmAP/ps2+m+NV1iqcbiGKBjaMFWvfk+Mi9H2M818JG8w+Pv43F01Um/7yLXm8QZgX1d+7krrtPc3j3NX6wsMzHN7byx0s70X9QQ17YQF2Yx79nL8ZBKHzfOh8eO0HVdPmj9iTHvryX5ZMlKn9xDlmrEo+VmfsbNj+47SX+1sxzLEYWT81v5T8deyOTf7aMvexBHLH61nFKh0I+8MDDbLN6iNjkb5/+PsyvmpSeTtDnrhJsqeLeNsmRHzzD0dFVfrQyy7+v7+LluVHmPzFN5rkrqPUe+qZdePeFZG7t8u93fZ61xOBlt8TvfuZBgpMRtS8vIkoF3Jkc8z9Q5R/e9DA3D1/DIuG3z93GYxd3M/MHLUS9Q+K5rL9nN7uOLHLvHS/ztvwqF5wBfnX+VuQnSsgzAer4BYKbtsHeIrUfWeBHhy5xX3GJ/9DcypmT05x/eprKX5xFSQsxMczCD5ns27XEr+37LAuxzcVmlX/12LsY+PIGxTMdSBKat1Rx7yjyj97852zLtxhVIT97+s2snq4w9GkNV+eJMoL2Q7vZ98Bl9uxa4n+rXuLTrWkeXhoELryuscn/8BWbJEl45zvfSbPZ5MknnwTgox/9KD/90z/9NQMVgNtuu43777+fj3zkI1/3Pv/sn/0zfuVXfuXrXv+1L76VofE27tkKl66Ueez4CNW5PEImeGMtbrujx/jWHtV9S+j1Ms5agS88OUBwNUe8kKWb6zC1tcfBwx0Kt7QxMwLLt5g9mWN1Nsv5ZwYQQYxlRey6o0N1X0hpa8BgeZVg2aI7m+XhL08TrNsUNkyMXW1q0z6H7vSoTC1hmgHR5RpPvlTj0uUihfNlvKxLWOnx0IMNyjNdctvaqHqV9aUsL79UoH6igtdUBNJl75Euo9s9ync45IwIM05YPpbjyvkyF85UyLQF+VpIdXeP7Xf5lEd71IaX8C4VWbte4JOPT5JfyZJxFN50k137XHbtc6nsX0S6FslamS98tUbreg59rkgv36U46HL3vU1y+7rYgyG2m2PxUoblKzbnnhgicjVSx+y5s01lW0j1UEC1uAFdTftcniefHWVlvkBpzcKc6pGb9LjljS7lsTr5WgP/wgBnL5d56qUaxYslEmI61Tb33tNidKtD6eA6slnC3cjxzNNlWhcLOPNZPNljapvLjv09Snf2yGRjsiSsnMyxfDXH8WeGsHqCjBUzclOH8UMBAzM+w6PzBEsZetfz/NmXptGrWYrrNv50k+EtHrfc7lDeuYxhxiTzNZ5+scLcbB7xUgXXdtEFl/sf2KC4zSG7zcHq5Wku28yeznD5mQHcpkJHEduPdBnYFjB4p0cp18HWPo2TRU6dqXH2XJXSmoVV87G3OBy5z6U61qEyuUR4pcryfIFPf3WU/Fweo6doDzc5cLDLzj0O1ZtWEG4W3Szy1afLNK7l6J4t4imHypDHkdvbFI44ZAYi8mjqF23qsxmefXwUugIr0ozf0mFgR8jYgYDB4QXogXupyOeemmB9Pk9lMUc03iEz5nHv/V1KU3WyAx2S2SHOXipy4mwJ9XKZQMc4+S733tdkaMahcKiF4eTxmzZnX8izeKpMYz6DjgLGt7uM73IZvMcjX/AoWD2aL+e5fqXI08+NkG8Y2GZCbk+HnUd9Rra4DMxcJ1rO0Vss8KdfnkAs5sitZGiPNhmfcbj1li6lA2sYJoi1KsdeLLI0m6VzvIqvPFTO5643Nijs9Mhu9cklgu6SydqFDC88OYzXMDBdGDvUpToTMHWnT6W6ji1cuqfKPH9qkLMXq9Su59BVD8Z63HN/j+pEm9KWVeK5QZbn8zzyXA3jYhHdUbTLbQ4fabN1l0Pplg1UmIFullPP5lm9nGflbIkk9qkMBey8qUvlVo/cYEA106V1IUt9LsuXvjyB2TbIBILiwRbjuwKm9wcMTs9BTxFcK/LJJ8ZpLeQpXSvQG2qTG3G5//4Whe0N7JqLWB7kwsU8Fy7k6B6rEcQRoelyx31Nqls9CoccMrEkbBssvpjl7Ika6/NZTEczuN1hYJvH9H0BpUqLYr5J91SJS5fLPHlslMpyBmXE6C1tbrrdZXTGobZznmSlRHelyKe+Moyey6GWs7TKTbZs73H4SJfiTRsYpsToFjh7PM/qtSzzz9eIkwA7G3Do7jbFvQH5mf72cBdNWlcyfOnLEwQNi2xHUdrXprolYPcdPpXRJSzp41+o8MSLQ1y8UqZ6uUhQcogHHB58sEV5uk1hpgFLgywv5Hj2xRLey2XCjsSxexw+2mZ8h0vptg6WlkhfMft8nuvni1w7V8JwE0pDAeP7HMbv9CkOudQqq/QuFliby/OZx6fIr9vYvoCdTbbvDdi+16O2aw66FuFSib94coTufA7rcpFOuUVxxOW++5rkd7ewaiFGs8LsxSzzlzNcf3qQKIjR0ueme9qUtgcUD/iULIekLWiczvHcc8OsLeTItRTFrQ7FaY99b/AoDW2QLzXxzlY4e7HCUy8NUZ0toFWEO9Lhrrs7jM30KO9bgbUKvbU8jz5Vxb9cIFy06WW7zOzosedAl+JtbSxLYIUmsyf6/e/ccwNILyaTjdh6a5fBgyHlaZ/B2jL+gk3nWo5Pf3kavZYh3zSRO1sMbfE5dJtHZesCpowJr9Z4/ESNa1cL5M6VcfMOScXhwQc3KM10yc50URtV1hYznD+dZ/n5Kl5bEOGz75YuQ9t8Krc75M0AI4xZPVHg/JkKl86XybUkuaGA8rYeu+71qYx0qQ0v4V4ss3S9yKefnKC0mMXyJM5kk30HHXbscagdXIRulni9xBefrNGZyxFfLNLNdqmOuNx2Z5P8wS5WLSbj2yydz7A6m+HUE0MknsbQMTvu6FDdEVDbHzBQXkO3BZ0Leb701Dgbi3lKqzZqS5fchMdtb3Qojq+TK3cIL9V4+WKFY2cq5M+WiGREt9LljW9oMDTjUNy/gWyWcTayvPBskcbZIt0lm1C4bNnpsmWPQ/kuh2w2IiciVl/MsXAlz/Hnh8h0JJlsxOCBDlM3BQxMewyNzxMsZOnMF/jTL02jVrIU6hbudJPxrR5Hb+5R2ruMIYClKk8er7A4myN5ud//ZNHljW/aIL/DIbPFxXZzNBYsFs5nOffVAby2QgQR227uUtsaMHS7T6XQwkx8WieLnDg1yIWLFcorNsaghzXlcMsbXapjbUrjK4RXqixcL/K5Z4fJXS0gXUlroMnRmzps3eVQuWkV4eZImgWeebpE40qe9oUCvnSojfgcuKVD6WaHTC2iICLq57Osz2Z4+vFRVE9ixZqRmzsM7woY3RswNDaPbkncqwU+/cQk7cU8lYUswUSH3JjLvW/sUppZJ1NxSOaGOHWhyOnzRThZwSfEyznc98YNajMu+QMdLDeH27C4dDzHtZMVmos2RBETOx1Gd3gM3eNRLPb6/e+lAlcvlXjm+AjFNRMrE2Pv7LDvNo/h6c3+t1ikvVjgzx4bw1jIYa9laI/0a+qRI13Kh9dQQiE3yrxwvMjqbJbmiQq+9LALPre9oUVht0d2OqBAQmfBpH45wzOPjxI0DSwHRo50qM4ETN8WUB1cwdQ+zpkyX31xmMtXytTmcsQ1F0Yc3vBAl8pUi8JknWRukPn5PE8cryLPloh6gm6xw9Gb20ztcCje0sAMMuiezZlnCyxfLLByoYAOQ2qjPjMHewzc4ZOveVRzbVrncqzN5Xj08QkyTYNMJMjubzG9N2B6j8/g1jl0y8SfL/LnXxmnt5CjMFegO9iiNOZy330tijs3sCoBYmWAcxfyXLyg+Ivf+M/f3is2P/dzP8epU6duDGr+R334wx/mQx/60I2/t9ttpqam2F7r8EhnG+ef2A0vdxh/5gp6/zb8fRl676nypm0nOVBc5nm3wicvHubkyWnG/+tllHCQhZjmT45x5OA53nb4OSoq5LmNGf7Zpbcx83CP3KUetcXrNO+fITlSYtsPnufe/DpblccHrtxH83iV6Mt5rCdPYVQLBEe2ob7fZ2pbgw9MPs9fOKO8uDLMo0/eSukrcwxcWUdssenclqV3d4533vIIGcOnHlr8k7N30zqRYfzTayi/jTFssfaeGbbc8yS3bb3CLdkW/27+Zj5+bS+jn5So2TpDS9dofd9B1E0Ogw8t8/7aBZzY4DfX9nLtqS30TihqX3gZtk8R7ajR/aEqB7dc50fHTvGEX+WZuZ184dmjjH/8MrlOF3Ix6w8MUT0o+L67PkeemDWnxN899qNUvhxQe84lu7hC98AQvbvG2fLeSxwZWOeN+Tq/NHcXl64MwacrGCcuMeh38O7Zj3gopHy4y89MHuOEW+aJ1hCff/x2ON6m8vQcYotNb1+WjXePc9/hF9hbWWIlsfid+T2cPL2FmY8tYgY9CqZD/ce2MnD4HHcfeYlDRpcn6zP8i4tvYOSTYF7oUr54jd79uwkP2wz88BLvqM6x0+7wf64eZvHsCI2vVil//iWolEh2TuK9u8jYthZ/d8szPO1UOLk8yh985S5qX5gnd7WDrBg03lAmvLPEm9/4Raq2C0j+3pkfwT2RYeLhLtnVdcRQhrW372D0wRc4NL3AO3Mr/ObiYT5+fT98vII8t0z1+nX8Nx3CuN3AesDhJ6ZO4Qn4RHecZ04cpH3SYOTh8zAxQjBdpPnDI+zfcYV3j73M9SDDZ68d4ePHb2P6o/MYzYBMsk79h6cZONTg7jc+wQ7LYcMv8jOn30ntcUHxRED17FWcA2P4t44w8CNL3DOwzNvzS/zzpSNcvDjMyqcmyDxxlrLvEh/Zhf9GKB+K+dt7nuVcmONUr8p/eeFmzOfaVJ5Zhpok2Jen8bZJ7rr9GXaVVzC05lfOfz8vnZlk5g8a6EaHrOyy8iN7KN1+kUM3XeH7i6t8uTnFby8cwf5iEXW+w+hLV3Dv3kNyMIv1vjZvH7zELrvFv6tv4/TL27j23DCVT5xClmKYyNB6+yBHdl7mJ3Z9ldk4y8nVSX77xVsZ//Qyuas9CqzRe9MI8c1F7nrzV9hi9ShoeP/x9xE8n2HgCzHmxVn0cI7OA7sov+sUB2aW+Zu1q/z79d18ZXEbq1+YwnxxntFr80S37yO4PYu+D35s+1MEJDztVPjYC4dovmgw+OkriJrEn8xTf+cU+w49ykNTZ5BS84eXbuNPz97E9j9rYKz3qHQXWXrvLqxDDQ7e/yJvsDdY94r8wqX7sR7NYryYUH7hLMGBKfyDwwz+SIvbR5Z4d3WWf9vYzvn5Mc5+eQflh88y4LvobVmcN5UpHtC8f/9nWYxsrnTK/MvnbiH3ZJvq8xtYsol3qETzgWlufugFdlXXmDI9fvH02zl3bpSpP43g+jIV3aL+zgPk777GtsOr/Ez5Kl9qj/OHKwdY/+wknG4xePwK0W17iHZliN+X8NYtL7MrX+ezzhCPX9rBxRNjjPzJeUQmJh7QNN8+yq27z/DuPV8lTCTPLm/lIyceYPrP6mSvOuTay9QfmkIcLXPgXSe4KddkWEX87fNvJTheIPuYQeH4OcKhIu5tO7Df02bndIOfHzzNf9nYynOLkxz/7D5yT1+jOreK2FfCPVQkvDfDe498GVTAbJjl18/eS/dFk5GHF8mqJoxnab5jip23fom7Ji8xLT3+7ZU7+fjFg2z5qI9YbFCsL9B4zwGqR5tMPzjL+ytz1MMsv7p8mO7Tw8QnYfSJ00R7thDtrpH8eMAtY9d5b+0iH2+PcuzaFE8+uZ/BPztPxusghkza3zdI5cA677ntMwQIFpwKv3T2fmqPOdSOdSh2VmgdGqB99wR73n6K/eU6h80OHzj/Zq6cG6Twxzbi7FUKuovz5oNUH1pk9HCPD4y9yBO9Ib7QmObUE/sRLzYYe/4ayb4dOLttWu+yeWjnC+zIr/OyV+APzx/hzEvjTPzhVaTsYRZCGj89wW0HX+LNB55nzPT5ytp2fu3yA0x/xidztUft+jU2HtpJfMhg+t2XeWt+lXHp8/Ozd9F6vor/ZJ7cV06RDFUID2zBeHfAlpkNfm7ieR52hnl5cYTPPXkH5S/NUr2+gZ6w2bizQHh7nnfd9nlMGdIKLf7R6XvwjpuMfW4Dw2mixrLUf2CGLfc/ztHpOe7ONvj167fx+dkdDHzSQlxZZXBljtZbDyNv7VB5wzo/NXiajcjiP9e3c/7JHTgnBYOPnIUdU0Tby7R/ZICbtl7lB0bOcjwo8tiVCb74whHG/+wqttND2xHLPzzG8P5V3nLHc1RFyGK3ys+c+CGGPh9QftElN79M76ZRnDvGmPqhK9xWW+O+3Dq/dP1Orl0aJPiLAYznz1OLu3i370O8NaK8v8fPTj3Ps26VZ5uD/MVjd2K8sEHxhSXkVAbnUJb6O0rcf/NTbCut0taKXz/1fZw9Pc6WP1rG8Lrk7R5zP7qTwaMvc+eBU9xmtvn82g5+/crtDH5CYVzqUrk0R++BvUSHJZX3rfDu2hWmrB7/em0/c6cnqD9bpfbZl9ADVeJt43R/sMLEzFX+9vTTPOuWeXFhgt9/4k6GHp4jd93BKCo2Hqqibinw4ANfoGZ6CC35u+d+jPh5k9FHPLILyzCWp/7Qdkbf9jyHJ5d4d26Zf7N4hCfm9hF+fAB1ZoHq0gLB/YcQd0nMN3j81JaTNLXkc71hvnz8JnonBUOfv0AyLvG2FKj/0DiHd3+Zt4ycYyU0+fiVW/nUS0eZ+YNFVHuz//3oVsYOrXLHG57ikN1h0a3wD869jepjkuzJkNrpS/SOTOPfPETlR1e4r7bIm/NL/IulQ1w+P8zC5yfJPXqaUuIRH9yO86BgYH/I397zNOfDHGc7VX7nhVvIPt2gdGwdXZQ4h4s03jTJ3fd8la2FdbIk/H/O/wAXTo+w5Y866HqTrNlj7X17qNx9joOHrvKe4iKfa8zwe0uH4As1xPkWI6eu4t69n/hgFvluh3cNX2LKavNfmlMce3EPc8cGqH3iNKKi0ZM2je8f5s5dZ/jxHV9lMbZ4YWmGf3vyViY+tUz2uks+XKPzlnHEkSJ3vuVJtpkO2UTwEy++B/20ReVxjXXuKowX6d27jdJ76hycWubv1C7x2+t7eGp+K/Nf3Ib5/FWGFxcIj+7Hv0Mh75L8xM4naAMnnBJ/8Pxhei8KBj4/hygrwukc6++a5sDRz3Hv+AVyMuI/XriHT587zLY/bSHWu5R6Syy9dy8jh9fYe+9pvi+7yrxb5n+fvQv5SBHxckz1+XMER7YS7K9Q/PEOd48s8I7KNf5Dc4Yz16c49fgOKp86QyX2SaZztB+sMbh3jffvf4qlyORyq8Y/f/6tVB5vkj+2/rrHFf9DV2w+8IEP8MlPfpKvfOUrbN269cbrX/rSl3jggQdoNBpUKpUbr2/ZsoWf//mf5xd+4Rf+u+/9ym9sHvrozzG61eX7rZeY1B7TkaZuRMxR4Avs5PLyCO31PNZpG87HiLkAeXYOhADTJDo0BdMKsSsh3u8yWG1zaHCBN8TzjGqXXKw4Z2S4Igo83N2Pt1AkXMqRnDBQsz2MKy2S1TpkLORgDf9ojWRSIfcFVGcajJQ3eGv2NFvDmFqS0JQhL4oRjusxXlyYIlmysWZNkuMaFnuoy8sA6HyGZPckyT6BntYk+122D66yrbTO25MrFJMEKxacMjOcjYd4wttB73KVeMlEnjWQZ1qI5R56cRVRKUO1gHvLCGJrjNoSUNu7xs7cCjdn5jgc+Jg6pkfEl+UWLgaDHFudInPFQi0oohMCObuBXG5CFKGHKujJQcLbTfREDNt9Dk7Osd2ucz+zlEOF1oLzSvFEuIOz3hjNiwOo6xI1J9AvtGG9DWtNZLVCPFLE2zuEOuhjjvsUdzW4tTDLTnODW4MesY7pafi8nOFsb5hzrRHyZzKwIIkvSNSpJUTbQbsezIyjx4p4t2RgW4A97nJw6zUOqyUOsMKIr2gjWJCCPwuPsNyu0ZgvkzknkQsJyUkHFteh56OKRYKtA4RbSohbfTKTDvnJDm+oXmCH6HIwdAjjgDVsnpGjPNnaxkqrQu5cBn1ZoK/HyBeX0T0XghC5bYpgSw5vTxZ10KUy0mH71CJvUJeZ0j3GPcmKiFkkw8fio6yvlems5SmekiSzEfHlEHF5EZGAsG2Cg5PEUxb6UER2R4fqYJt7hi5yKOowGbvIMOaiKHKWCn/R2k+4lsOas+FlYMFDn9uAZhukQk2O09tfJNxiYRzpMT6+zuRgnbdYZxmJEgYDwRIB55JBvpzs4srCEPG6RemCJDgTkSz6yIvLCCkhYxEemiHeIYi3a/IHWmyprXGwOs89YZ1SHGIGCcdlmfNxlc90D2DOZVDLBsbLmuRql2S+CysNRCGPHKjSunUIPZ1g7HbZvnOR6UKduzKX2eoLslHCWuzxVbZxKhznpcVxsguKwrzAOxHDYgcx30Bo0OUCyY5x3JsUTMVk97U5MjTH9vw6b4hXsMMEQjguihwLp3ja3YpxMYuxJLCuaIKX2yTrDmKliapVSQaKtI+OIHZ7WFMe2/cusDe7yF57hQOuII4DWnHAZ/Q+LrlDvFQfZ/CixFyE8FQEVzdgo4cIIvT4EHqiRvtOG2PaJzvT457p8+wwGhzW6xRdcBLFeW3xGf8As84Q0ZU8uasaYyHGfbEL611Eq4eqVgnHy7g7B9C3uBTGe0xsX+eO4kVmZIf9vsYJXRqx5HPs5qX2OFdbg4yekYjrCXo2Ijq9Bj0f4YWwfYpwokDrpiyZvR0qox3u2nKRQ2KVLbrLgKtYSWAuMfiv/u00G0W85SzVMxq9EOGed5HzDUQQI/M5vN2jRNN5glsDhrY0GR5r8sbaabbGITNRjOP2uKaLPK8neaSxi14jz+BliTwfwaKPf7qOcHxEopFbp+nuyNPbYZM92mJqZJ39owvcbcwxFIcMuAazcczluMifBzezsVIkWLcZOZPgXw0I5gPElTWEkAjLwj8yQ7jFwN+fML53jYlanbsGzrM/DKlGEZHrclIPcS4e4BONQ2SWDYqLAvvlkGjBxZvtIBtdhJ1Bjg7TvKlKOC0wj7Q5NHWdbZV17rJmGfIlhUAxG7ocj6Z4OtrJ1fkhrBXF4GyCe8olXvHQ1zaQUkE+S3hgBmefIJ5JGD+0yr7qPAdKC9wculhBSOJHPJ0McyoY4fPdvVSuSjIrmsLZkN6lLuGai1zvIcslqJXYuH0YtoaoHQ63777Mtuw6B+15trgWhAnLvsMX4gNc8Mc4vTTO0LWE4lKC/5JLvNQhWe2htCQZLJNsGaFxu4kx5TOwu8EbR8+yzW5wKHYwvAg/EDwXV/mKu52XepMULhvkFmJycxGtUx2Shofc6KFqNaLhIu3DwyQHXXKTPW7ZfY2D2TlmjA12uBm6gUs9CPlkdDMXu8Ocbwyz5VyMtRyhL3j4V5oknaD/G60t40TjJdZuz1DY3qY61eJt0y+zVXbYqh3yXViPDM5FGf60dzOrnQryukXtUoi5FNE83UWs9xBOhKqU8WequNsq+Lf7jEw02DW9wn3lM0wQMOVbNJw2S1GGL0b7eX5jirVmma0XItQ1H7Hg0zm7gfASZKQR26dwpnI0DuQoHVlnYrjO/ZPn2SdaDCUBpZ7kSii5Gmb5qHMXznqWeN1i/HRItODjzDqw0EEkApnN4h4Yx5/O0Ls5Zsf2ZbYMrfNg7SUmIsVAKGl1W5yNhjgRTvFwfS9y3WTsekTmrEuy7NO+3EH1QoRUiC0TtPYWcGdMsrfWOTi6wOHBBW42lqmEgpJncMaNOBMO8VnvCM3FEnJdMXkhxL3cI1hxiec6SMNEZDJ4h6dxtivc3Zp9B+fYVVnijvJFtocGdhjj9RyeDiY5EwzzqY2DDC4klFdjyqccegsuzpKLavrIQgGGqtRvrRFvSZAHujwwc4EdxTWOWIvUPBvlSy46Lk8FuzjhbWV2fpjKsmZoPsQ/3SJa8wiWexjSQpfzhLsm2DiiENMhOw/Nc2ftIntyq+yMBHghgRfxmDfBCXeSJzs7GL6SkF+NqFzy2LjYJWz4qEaAHKiS1Aps3DZEssvHnHF42+4z7MysstNaYdQp4PgRi67Hp7xbudwb5uLKKFuu+hRXApIzLdwlh6AVYmqDZHSAaLLGyp02xS0dRrev8a7xF5k2ekwnGtkLaQUGz7hVHunu42pnhOKsoDoXkF8MWD/bQrdCRDdC1aoEY0W6e2q4t/oMjLe4b8dlbsldYUJ2GfaKrDsOKx58wr2Ni60Rrm/U2H3Bw1pykdd6tK52SVyNEUuSmTH8iTxrt2QZ2LPO6Hid90weZ1KEDOkYu6O55luc83J8rHUH3VYetWQyed5FrHi0LrXQ9QBCgSr1e5W3JU/rjojdU8vsH1/kofJLDCaCcpBjrdPmql/my95enlnbitvIsv2Ki321i1jxWL/URYUCHYZ8qfeH3/orNlprPvjBD/KJT3yCxx577GsGNQA333wzpmny6KOP8p73vAeA8+fPMzc3x5133vnNfBQHi6sMZrt4GcX1qMyyU8RbD9noWPjLFp05Tbsek5+PyLRBonB2VRH5GFXSbNuxQm4YMuMQlwMKhS6j2RaLcYH1sITRy7HSEKy1LLxrkvZSjL/mU17W4AnCWhZ3rIZdSqiO+Gzfdh17EPSgQbFQp5Dt4GYNLooK0rcJ1kKWGkX8DYvOJU1Sj7FXJfmmAMPE3VFBV2KyZc3WmWXyUxpjUBOVI4bzdQYzLS4nFZRrIfwMy9c19WaBYBlaV0J0U1NcBRIDPZDFGR2kOASFqseenZdRIwoxJCjk1qhmu4S25KVkBFxJ3IrZWCnhNCx6VxOCpQSjKSi2JGHBxt9eJq6G1AYThkdXKc8kiEFNVE6YztYpmC6XqGIEBWLHZH09or2SIaxrmpcC7IZBtqkwlEk0WsCfUJTGoTrQZWy6jTGhUAMRVq5JJeMRGJIXkjHoQtgVtBYLOKsm3nJCeC3G7kCuBcFAjnjYICpmGZ90KQ+6VGdi4mGQ5ZhJu46WCdeosOBWcLvQbsUE1w2CekxzLqS4bGJ1wVQm3pYSiRlSGNdMjG5QHVnHGFOoqoeZcyjYAW1t8YIuQkfQ6Ro013J41wV+PcKbjcg3FVZPEIzkiWyLJBcxtbVLabhNeTwmHJRkiw5D1gY9ZXAtrrCgK/QaIc2OQl8HbymmvRrBgoXZkxjCwNlZRuUS7AHN9m1L5AbBHJOIskMm51AwfRbJsaZLKMdktWnSaNpEV2LctZDusqC0ZiJ8RTyYI5hWmIWYsek226YaZAY1cU1RzjcoWU0aZoZukuGqLtBd91lq5YlXBe7lGL8Ro5ckdkchlIWzu4xR0dhlze4d17FHBcYw6KJDLdOmYHhc0mVUpFCuxfV1k2Yzi7gW0F2Q6I2E8qqFDk2SgRzuJBRrUBrqsWtHCzUoSAYVtewGJatH28hwOihBZOLWA+rrJYINiXcxJK5DuGGQayvibIZgewk5lFCsaianFsjOaNRAQlIIGLPb2EbAWWpIz0K7Jgsrku66jbHq07qsMFoGhQ0TLW3iYYE/KamNQaXWZf/2JowrxEBCxd4gb4b0lM1xqsSeJmjHdBZz+HVBdN2ntWBidRT5tsKvZIlqEmoxI6M+A8OLFGc0eihBFyMGjS6R0lxIqqgoj99TrKxDuCgR9YDWFYtow8DumAjTJpiEaKvFwBQMDTUYnFiHcQOj5pOzG5gqoSkyPKdLJL0EpyvoLmYJlzXJqsvGrI3dlWS6Jv5QjmTSQhcjtmxtUxxsUdyiiQYTzIJPRfVoYXEpqTIblGm3NY22RlxLiFdDOksmctnEcEykSnC2F5GZmPKYZvvUMoVBjRozsModbLuLkLCU5FlPMsTdhHrbpl3PwdWAuC6pz1vkmwrDtQlG8sR5G1lImNnWYOvoBrnRhLCmKeU7VJTDKjmaicGVsEijHrPathDzEcF8iFOXbCxaKMdESkFvVwmrpMnWNHt2zGEPgTEmMYptinYXjdjMrgEdmN/I0WplMS85BGsWG2smpXUTHfS3T7DVIFfWjEy02DWzgTEIUVUwnGmTVx4rFFiP8xBkaa0F1DfK6PWE6KJP1DCprxtYHYvEEni7SmQGNbmqZtf2q5jjAjUIMt9m0OwRIzid1BCBhK5iaSVLr2GTvd7FuZ7BbyqS9QyxiIkHJf6MQW0YqoMd9u1swJAgqQnGrDamClnXBZphldiV9DYCOss5oo0EfblHZ80kaEmyPZugmBCWJPZIQnXQZ3T8GvY2iRyI0HmXnApwMTgVDyA8g7CnWF62CVck9rpD53KWsKUIWhliMyIaUwRbTcamEkoDTQ7N1EkmFaoaMmQ2iQWsJUWaQY2gE+N0EnoLFvFqhLHYpTlvYXUlmXYWbyAiGQ0wBhLGp1pUBlsc3QbJUIjM+5giYSPJ4sVZlJul21UsbyiYi1HrLt05kHUT0zHQVow/I0kyIaPTCeOjq5RHl9GjBtlKj6K1gatN5uMMy34Jv12l2THoLZnI6z5GvUN9ziLT7b9fMBJCPkaWErZur5MbqpOd1MS1kFzOQYmEpbhAPRIoJ0+9KVhrGahZD70i8FY19WUL6YFQ4O4EsxBTHYGZrdfJDgKjBrlCg7zZoR1ncYMcV/0MQbPKYjNPd8Mmc7lLWLfYWDEpbGQhMAiHE4JShFVKmNlWZ9f4OtZQQliNGcx0UCLmelRmybdRvRzLazH1Zg616JNccwkbJmsrJtLJoA2FuxtyA5pCTbNr5xXUsESOCHK5OmXl0IpznPYLCE8RtxKWV0s4TZvC5RbhikmzYZDUC0SJIhpSeDssygMwMtZk3/YNxIAmqmoGrS6ahGtBlQWnQOJa1NdCWmt5qIfIi13chsF6w8Bw8kQ5A3+HpDimKdYCJmYuo6YUaiDGyjb7+YhynPTy0BXEXcHaUpFgXZFfauHO2kRtRdwsEBqaaMTE2REwNh5RGWxyaEedZBSoJZSMLp6WzAU1FntVgh60mxHOvAkbIdZsi86qid8xsNwSflURDwWoMc3QSIeB0RZHtyvkQIDM9Yi0YiUq0AxsREfhdhVrqzn0QoK93sG9mkG0FG43R2jHRJMhcTZkamtAcXCdyuQK0bjErniUVJtGlMHXNnPdKk4zot0Bf14hlz0yK00aCxZWz8bsGrjDApGJYEAzPbNOYXAda6tEDLrYORdXW1wPs6yGBrJlUm8brDUsjKseah3CRZv6uonyJLGV4G/zUfmEsemQmYkFcsMJ8aiiWGpRMtoshWXqgYVyijj1CqvtLO6KgT3bIa6HbCxaZDp5pJ8hGBWocoLO+PDI6xs/fFNXbP7e3/t7fPSjH+WTn/zk1zy7plwuk81mAfjZn/1ZHn74YX73d3+XUqnEBz/4QQCeeuqp1/UZr1yxuXx2ml4u4ffrt3NubYyXrk5TOynILQSUzjaIl1fB8xHZDMnUMNFYmdWbsoitLpktXX5y5hkOZTe4yW4ikazEJi/4RT42fwuzqyM4L1Wpng/ILngY566jHRcdRajxMfzJEu5MifWboDbRYu/2BX504Dm22122Gharscv1KMPHmkc5dm0L1xcHqR1X5Gd75K51iOfmERqEaZLsmsYbz9HYmyE50GN0dIP3zhzn7swqW0wHC8WF0Oacn+d3rt/LxvUKztUiQ8dDrMUO5uwqcbuNMBRqcABn1xDuRJb1mwQz21bYOrrCTw8/yRYTRpVkJXZ42Rvi8d52Pnv+AO5CjuJZg+rpFsZyF319AaSCjA07pulsz9KdNvGOOhwZvc69Y5d4R/4qNZmQlSbHfIOXnUE+tnwba+cGiRZshk5EWLPrqNUWcaOBzOcR5RLOwXG60wbtHYLtBxfYPzDPDwwc54AlyIqERuLymLOVF7tjfOL8EcwrFtlrksHjLcRKA71a719xKxcREyPUDxXwJgXOgYA3bznLoeoi7yycpywtJCYv+PBEawdfbezkyouTWHOC8qWE3OlFaHZJmi1kuYSuFvH3jNHYYxBNxkwcWeKBgbPcWbrKrXZMTEQvifnz7g5ONCb40vXdZE5myc4nVM/0YG4J2j1IEsTIIMlwmfpNVZytMdFMwDv3vMQthXnuzc0yKHN0NVwMY/54/VbO1Sc4f3qS6nlN7npI/uUlkmYL7biocol4tEY4XWP1qIU55VLducFPjD/DvlyDo5aHT0Q9Nvl8b4pHlvZwZmUc43ie4mxIcdZFXJhD+wHEMXJqnGC0QHtPmeaBmNJEhzftOsdDxfPsteuMqTxLccj1UPKf1u7h8uII12aHGTwB2XmH3KUG0coKJAkqlyfcMUYwVmD9iE1mR5uBiSZ/a/pJ9tlddpgeETGXwwLHvAE+Onsr60tV9Nk8A6c97MUe8uI8OghACuTkOM7WEs50nvWbY7ZOrHJwap4fqz7PhBExqmyuRR4X/DIfbxzlxMUtNBdLDLwoyF9uYS22iecXEaaJyGaJ90zTm7Jp7TCxjrTYMbzMO8dPck92gyEVIBEc84ucdqv87rU7ca8WSa5lGD7uYyw0kIvrJL0eIpdFDQ7SPjCIM2mysV9zeOccewaX+NHas0waJkWhmIsdnult4enONh49vRdx3aJ4SVJ5sY5cb5MsryJsGwo59PZJGnsyuFMSjnS4b+widwzM8rbcAjkJAsWTXp5jnQk+s3KQ1ssDqHnFwKkQ8/ISYqND0ukgq1WoFukcHqWzVeJuSTh8+Cq3lq/yYPksu00LTcx67PHp9n5eao3zyIW9FM4Z5OY11RMbsFonabYRyoChKnpikPWbCgRTMfFul/duPc7h4goP5BbICBNPK573TT5bP8CJ+gwrJ4YpzGqKsxH2qevQ6ZE4DmpwkHioiLNriPoBiTHhsXP/Am+vneRwbpnDlqKb+NRjzZ+2D3BsZYpjCzMUj1vkrgeUL3RJri2A54OQMD1GNFqifriAvzPAmnT4oV3HuTW3yE2ZFUoyw2osuRhKfnfpbq6tDrN0foiBUwnZBZfM2SWSdhsdRqhajWC6hj9VZvWoojzdYWTLOj899lV22j32mAntxOd6mOMLvW18bnYf88sD5E5mKF10yc076EvX+ndJkgKxdQp3Mk9nR57OTQETY3XeNHOO7yteYNrsMSBzXIoSLvsZfm/1buZmh6lfrzB8PCFzvY091yBaW0MoA1ko4O+dwB3PUN9vMLC3ztTIGj85/hT7zZAxFeHogJP+AMfcYf7rpVvxrhcwLtoMnuxhLLdhdql/d6mMhZwcp727hDNp0TwacWRqjsMj8/xI5QRDSlCWNudCn5POKA83D3Hq9BbChQy105A7v46x2iZeXUNms1DIEeybojNj0d0qKd+0ztGBa7xl6DR3ZRxyIiLUMY+5I7zUGeGPr92MPp/DmDcYOu4gF9YRaw0S30eWS4ihGo0jAzhTks6uhPv2nOdAZZEfqJxkVGVQSK5EHo+093KstYVnTu0ke1VRnNWUTq5CvUVS30Dm8+hqkXjbGOsHbKLJBPtgi3eMvcTNpQXuzzYQaHwt+KIzxNMbW/nKyk6CExUy1zWVCwHGxXlo90hcFzkyjB4s0j44RGuXJpkKuPvARe4uXeLO/FW2mVmcJGI1CfjDxi2cWR/n+ctbqb4syc2HlE9tEK+uoR0XadskE0PEY1VWj+bQW3zMbV3eP/MsB3J1brPrGEKyEZs87xf4s8WjXFwbo/NSjcrFiNx1H/PMXP/8IwhQY6ME4yXcbRXWj0BposvenfP80MDz7M402W7Y1BOPpcjgY82bODY/zaWFUarHDPJzDvmrXZK5BUhihGGgt0/hj+WpH8gS7XWpjbX4ka0vcEd2hR1mm4wwuRqZnPOz/Mf5e1ldqNG+XGboRIi92MW8vELc6YAQGIMDuDsH8SbyrN0kmNi6zpbxVf7myBNsNWMmDcla4nDOr/Fodzufu7if9lKR4imT8tkO9tLmugkBlgnbp+nO5OjO2LhHHfaMLvGG8Yu8s3CZIRWSExYvBZIzbpX/unwnKxcH8eezDB2PseY2MJabxPUNRDaLLBdxD4zTm7Ro7pbMHFhk1+Ai7xt6gf0WlGXCRuLwVWeKF3tj/OmFo8hZm8xVg8ETbeRKE720BlpDMY+YHKVxoIgzqegeCrh/+gJHagu8u3SWijQxMTkeJDzd3spjjT1cOjmJmjeono/JnllGNrrEGw1kqQjlAt6+CZo7Ff60ZuzwMvcNXOC+6iVusyMEEY4Oebi3lReb4zx8bT/26Sz2PAy81IPry9DsoOMIOTSAHqqycbRKb4vG3xby9j0vc6S4wAOFSwzLLJ4WXAwjPrFxlFMbk5w+NU35IuTnIgovLaObLZJuF1UqE49UCKcHWbvJRE4GlPc0+JGx5ziYX+M22yUippUoPtub5PGVHZxYmUYeL1K4FlK84iMvXEO7HjqKkJPjRMNFWvsrtPYlZMYd7t9zlgeKFziUWWHCyLEWx1yPNL+3dhcXl0e5dHWMwRchu+CRP79BvLrar+H5PNHMCOFYkbWbMljbu6hKg+Pv/fXXdcXmmxrYCCG+4ev/+T//Z37qp34KePUBnX/4h3/4NQ/oHB0d/aYGNm/9L38PYxgKxYCS8hiRHXZFq9Rin5Egwo5AaYgFdA3oGJIrVoVFWWJZFKl7eULXIupaWHUD3ZVEGxJn2SZuCtRiiFrvIjseut7s34ZX634RLWbR5TzBZBEqEjWsyY24qGIMNYGuuMhcSLHoMyi7VIXD7nCVgTCkGsVk+3dUJUHTsgRNw2TeLHBRDtHQOep+jrhjoR0Dq24StwRxW9JdzMJGjFiPMJfa0HWh1UVv3mZVZrPoWomklCEYLyCHBbKqyY/1ECUQRY0ecLCyIblcwKhqMah7bI02GAtC8rEmG0JMQiSgbcGKkWNV5TijRmlFGTpRhrBpI3sKs2WQrAnCjsJZycBKhGiGGEvt/om+46HDsH+yaVvo4RpxJUs0mIUpgVGOyA66yAEQhRiqLvmCT84KGFEtJpI2I3GXKS8gE4MVQURCIPv7dNYqsSYLXJSDbIR53NAkappYbRPVVSRrGr9p4TdM9PUI2fRQdQfqbQgCdBQhbBuRsWGoSjicI65YJJOSTM3HqgSo4QRRCBHFgHLZoaA8aqrHjmidWhwwFkRkQlBx/xaJXaXpKsEVq8KCrLAgyqwERSJPkfQUmbqF6Ap0Q+OsZohakngxxlh3kG0P1lvoMOwPRDIZKOShXMCfLJJUDZIRSX7UwShGqIEEUfExciGlUo+K4VAWHruiNQajkIEwJhcAiSbRCS1D01Imi1aR83KYus6zEhWIuwocRbZuIdqgO5reYpaoAfF6jLHURXRdRLOH9n0QAmlZUCujizmC8SLRiIIqFMYdVDlGlWLkoEcm61PIewyaHWraYTpuMOaHFCJNPoBEJ8RomiphzcyxpvKcUmNsxDkacY6opTB6imzLRNQT4q7EWbP7VxpaMeZSB5o9hOOhPR9hGAjThKEaSSVLOJgnnFTISkJ22MMaCJGFGFnzKRYdcrbHkNlhLOkwFPeY8kOyoSYTCZI4IhD9fTpnlVmVec7LYVbjIk5oEbUlmbaJ2ZPItYSwZRI0DdzrGtEKUHUXudYGP0AHAcK0EBkLMVAlHMwTV2y8aRN7IMCqBGRGAmQxQhVDqpU2ecOjbLpsjevUon7W8gEYsSaJY3oyoScFszeyVuFaVCH0FUlPUtgwUT2BaiR4axZhW+Isglp3kG0fud7pD3qjqL9u+SyiVCCYKBNVTYIRg+y4h1kKsQcDVCXEzIXUKi1KpktRemyN6gxGEQNhTHHzlr5JEtORMW1lsmgWOS9HWKXA9ahC4giE2183o6URHXBXTIKmwK8L1GIX0fGRzV5/iqnW/RP+ahldzBKOV/CHDeKqJDvpYZUDzHKAMRiSy3uU8l0G7S5lXMbiFhNBRDFKKHiCJI6JdUxbxqwbOdZVnpfVBKtJnrW4QNSRWI4k11aYdY3uCvx1A2dFELcS5HIP2XQRPR/dc0BKhFLIwRpJOUc0kMeZthGVBHskIDPkYhQizIGIcrFNwfYYsLsM6S4DicO0F5ILIRsKkjAgIKErYhbMMmuqwEU5wlxcoR3bxF1Brq3I9CTWmiZqS6KWpL0goRki6h5yvQteAK6HUAosqz/ld6hAVMngTNkYgxFGLSQ30sMoxpilmIHKBgXLo2q5jCcNqnHAmB9RDCRmpEnCEEfEuALmzDLzssq8qHIxHiAIDGJXUN5QmD2B1dQE65KwI+kuS1jzEC0fuba5bmGIkBKRyyEKeaKJKmHVwh2xMCcCzHJIdsjBrEbY+ZDBygYVy6VoeIzHTQbCkIEopuIpRBSTRCFdIjrSZNkocF6NskyJK3GNyJUIT1LZEJgdMDoQrkq8psDdkOhFp5+1hguO1/9iSoj+dL18lmisijdiEVYVajrErnhkyj7WUEgh71IttBnI9CgKlyHdYcIPKUaasqfQYUiURHQJWVc56irHaTXBki6xmBQJugaWKyi2IVMH0RPEdeiuSsI2JEsesukgun6/zwNCSmS1gi7niWsFetMZdAXEaExuuItVDLEHImqlNoWMw3C2TVU7lLXLtB+SDwT5UJL4fv9mMDpi0djsoWqEK0mNZpzB7xkUO5BzILsu0G1N0obWoiJuxcT1ALXWRbghOF7/vM8w+l/EDZaJKxl601n0YIKoRRTHOpjFGLsUMlLboGi71OweQ7pNKQ4ZD0JKvoEVapIgwNEhLpoFo8SCrLIga5yKh3EDk9BX1Boa24FsQ6DrCVFH0F41iNZ8knaIWu0hXB+CCAGIbAaRzxOP1QgrFt6ITTIZo8oh+eEemVpINh8wVG1Ss7sUTZch3b5R16qegQhjdBjSSXy6wmBV5bmoxliixPlkCM8z0L5koBGT6QisrkCvxvgtidOQBEs+tANkw0X0PIj7t3yWhQLkMiRjA3jDNkHNIJpOsKse2apHbsinmHepFjqM5poUpEeFLmNBSDGEimf0pzPHEe3EY0NmqYssZ4wp5nWJ+aRMz7WwXSh1YgobEqOrYSOmu67w2+AtBcimj+wGiI7Tvx0/9L/0LeXRtRK9qSxRVRKOJRRGeljFgNygz2CpQznrMJrboCRcitpjwg/Jh/3pttILCeIQJwlYVAXqosBlY5QLyQAbcY6Oa1PqxuSdhGJdIdsxtBPaywZBK8HfiDb3Z4jo+f1jVClksYAeKBOXczhTOcIhjZ/1ePn//qffnqlo/z2ZTIbf+q3f4rd+67e+mbf+OiUjImNoRqwmZcthyG6Tkx6ahGZiYUQZZKTQvsYNQtxQk6wrhCuQrqC1YdPuWbRbNuaGgXIEdkeS6QpML0E5CYmURAWTMFsgMRK00lgFiZ2LyRZ6FAY8jIJAlSDJShJbEJkCaYSYRsCI2WDQ7lA2XGzlEyaSlrbpBdn+M0tCTc8LcQNJ0lKojkB7gmbDptWy8bsmxobC6klMR5Bta5Svka4mNhVJ2SYqQmwmCBPMnCRfCrBzEfmai1EGUQCdUcQWxCYII6JgulTMDuPZJkXlYUm//7yBxKAbZtCBJok0PS/AcywSX2G0NVFP0uqarK9nSHoK1VZkmhLTE2S6GuVqCCCxFdFAhmTAIDZjlC1QtqBU87DzAdlSB6MC5AU6IwktgTYSpBFRNXqULIeJbIOSdDFFQDs2cWITFVtoXxOGMZ4fEbZNtC8xuxqvJWk6JvV6DtlWqJ4k05RYrsB0wewm6FAQ24poNEsiLBIjwcgKzIykUHUYKPuYeYFZAV2QJBlBaAqEESKNkBGjQ8lyGLI7FKSLJKGdmPQ2syYC+lkLEuKmgXDBdBPcDdnPWttG1RWGI29kTXkau6dJEkGcMQjHsiTKRhsaMy+xc5pswWGo5mMUBEYFklx/3aLNdTONgCGj28+a6ZJVLrGWtBKDXphDRAJC6Lo+vVASNRVGVyPdBLcpaTRtvJ6JUTcwb2RNoLwY29PEUhLnTaJslsS0wQAjJ8iXIuxcl0LNQ5VBFkBnFbG9mTUVkjd8KkaX8Uw/axnp95/ZlJg4YRZC0JGm4/q4vkUcKMy2hh54XcHaK1lrKTItieGJ/nZzIoxQkyhJVLFIypLYspGWwMgIilUfOx+SLfUwBzazlpVEtiAxEzBCKsqlZDhMZFqUpIMtA7qxgZeYGJGN8CGMYlw/xO+ZJIHC6iYkbY3TE6zVs6/JmsJywXQEVjeEEBIl8QdtEmGQmBnUK1mreFRLAWZe9tetKNFZsXkcRAgj6K+b2c9aUbkYJHS1wgszyNhA+OAFIW6YEHRNcAWGG5M0NJ2uotWyURsGhiOxOpJMR2D4mkwvJAkEiaHwhzMk0uwfBzlJJiex8x6DA1E/a2VICpIk288aRohp+JSVy6DZpWw65JSH1oJOIvHC/I2sOV4/a2HHQHY1hpeQNBMazQxuz0RtGFjd12YtwfYi4kSSZBThkE1s9J9LpLKCfCnBzrsUaiFGGWQedF4RZ/rPQtJGQF75lJTPkNmhqDzyyiNIFO3EwI1yEPSz1vN8HN8kdAyMToLqxehuTH3dJu4ZyHY/a6b7atbMoP98sKhgEOchNhTSEigbitUIO98lW3IxB0HkBTrfz5o2E7QKKMqAovIYsrqUpUNWerh5RRhbdGMbfIjCGDcI8RybKFCoboxsRSSOwVo9h2gbyFey5oDlCoxuCKEg0ZKwYpIISaxMVFZg2IJCJSBXbmHlO/2slfrrFloCbUYIFVKQAWXlMWB2KSkXi4ReQRJGGWRkQABBEOKFCb5jkbgC5caoRoTfVTTaNht1A+X265rdERiexuxF/awJSVCzSJQiUTZGTmDnJHY+ZGBgA6MgMMugS5IkJ4g296ehAvIypKxcKoZDMeMhNTiJIIqyiEhCCK4X4AQC3zOhC4YXYzRDWs0Mbs+gXjcxewLTkWTbAsNLMLyYxJfEhiKomcTDsv8smSzkigI7F1CorWOWBaoASbF/HMSb65ZTAVkZUjYcSsqlqDyijKCrFUGYu1HXnM2sBa6J7GgMJ8LshtTXLaKeYq1lYN/ooQKjFyODCBEKooxBYmmiEYW0QNqCYkVj5x1yJQ9zQCDyAoqSMCPQVkJiBGRlQE6Gm+vlkpc+fkYQxxZObIOfI4pifD/EcW1C10D2Esx2iHIU7XWbTlttZk1iOQLTFZjdqP+8mbB/HCQF0T8OMv26ZpcTSuU2Vr6DNSigKNCFV2puDEZIRobkZEBxc5tliPGygjiykLEBfh4/CPHDBM+xiD2FdGPsRoDTFXTaBt0Nq1/X2oJMR2L4GrMXI/0IEcWEJYOkIkiMBJUT2FlFthBTqDUwX8laVZLkxeYxGqKMAHtz3QrKp6g8pA2uFsQ5+2uzFoLnmSSt/nGQafl0m5JeV9HdyGJ2X82a8hIMN0Z5IQmKsGQQ17JggMwIckWJnYso1uqYZdHvoeX+usW26O9PFWDJkKwMyBs+ReWTmAJHS8KcjQqz6Ahcz6fnK7zAhIbGdCIyXZ/GuoHTM2i0rf75miuwOwLTiRFBhOlFRIYkLBtEtVw/axYUqoJM3idbqmNtZk1XBHFWktiaRAVkZIAlg/5+VQE5GRCagm6scGMbFeSIwxg/CHFcE9+3YCPBageYjsKvm6y1TdZ7kkyjnzVrM2sEIWYQE2YUSRbiQYHM9OtaviLIlXvYeQdroJ+1QAW8/DrHD/9TD+j8dnjlis3q+S1kCpqVOGI1znI1rHGit4UFp8K5xjDd9TxR2yKzrMjWNVYrIb8UoFouquWgGy0S3+9/A601KIXMZPrP4MhniYaKeIMmQUnRG5ME1YS4ElMe6zBebDFT3ODmwlXGzRZbjAajSpGVEluYhDrG0zHLcchsWGUxKnO8O8O1bpX5dpXGchHVNLA2JLlljdWOya4GGOsdRM9DN9tor3/5Dq0RloWwbeRAlbiYJS5nccZs/LLEGxR4IzGyFFIY6rGzusZEtsnR4jVmjA1GjB7jSmEJAwNFQEQniaknEWeDERaCKmedcc42Rqh387RWi9jrErMlyS8l2I0IqxlirrSh56C7PRLH7T8QUWtkNovI2IhyiWigSFS0cMZsvJogKIM7lpCpeuTLLgcHF5nObrAru8xea4WaihiSCluYCCAkYj2O2Egk54NhLnnDzLqDnKyP02nlcJpZMksKqwWZdU1uJcBoB5j17uY280kcp78/hUDmcoh8DvI5opEyYdnEGzDpjUrCksYfSsgP9agUe+wbWGZvbolJq8E+a5maEpSFJCOs/hUsHbOcBKxtZu1kb4p5t8rZxgittcKNrGU2NFZLk1/0MVouquWSbDTR3yhrlc1vBgeLeIPW12QtqiQUxzqMFdtMlza4uXCNCbPJdnP967Lm65iVOOBaVGUxLHGsN8O1To3rnSobyyVkS2FtKPJLm1lbDzHWOoiuh260vnHWqhWSUpa4nKM3ZhOUJd6AwB2JkeWI/FCPHdU1JnItbipcY8asM2L0mFQKW6gbWesmEY0k5nwwyHxY5bQzwdnGCBvdAhurRax1hdXqHweZjRCrFfWz1nWg6xD3ejeyJjKZ/rpV+lmLizbOqI03IPA3s2ZXfLIVl0NDS0xn6uzMrbDXWmZARowo+TVZq8cRzURwLhzisjfMNW+AF9cnabVzdBtZMksGVruftfxyiNEJMNe76FYH7Xokvd6rWcv0vxWkkCMeLhOUTfwBi96oICyBN5SQG+5RLjrsqa2yJ9/P2gF7kUEJFfn1WVuPM8yGVU72plnwKpxtjtBYKxK2Lexlg0xdY7f0a+qaCxvN/nHgea9mze4fn+SzREOlb5i13GiXsVKbLaUGR4uzr8maICckWWndyNp6HDAXlVmKijzX3ca1Xo35ToW15TKyZWC+NmtrEeZm1mi2SNz+VVy07l/J3byqkJSyxJU8zmg/a+6AxBtNoBSRHeqxo1pnPN/k5uI1tph1RlWXSQMywsAUBpGO6SURG0nIhXCQhbDCGWec040x1rsF6qslrLrCbElyS5pMI8JqRVjLm1nr9L5x1kpF4leyNmbj1SR+WeCOxZiVgEzFY//QEtPZDXbmVthvLTKgQkYVZIWFRBCRsJEENGI4Hw5y2Rtm1h/kZH2CZjtPu5Ejs2hgbn57fyNra11otv8bWevXtmi4TFi28Gv9uhZsZi0z7FAquuwZWGFPfpkpe4MD1iKDSlORgpyw0WgiYlZin7XY5lpU4SVnigW3yunmKBvrRYK2hb1sYr+StcUAo+V9bdZc92uzVipCPks8+ErWjH7WapqwHJMd6zFaajNVbHJLaZYJs8E2a50JxY2sRToh0DGrsc98XGQpKnKst5XZbo3r3SorS1XEZtZySxqrnZBdj7DWuv2sNVokjvNq1gyjn7VahaSYIynlcMYz/R46IHFH+lmzh122V9cZz7e4qTjHNnONcaPzdVlzdEQzCTkf1PpZcyc40xxlrVtkbbWEWTc2e6jGbkTYzQhrufNq1rrdV7Nm2QjbQpZLxLUiccnGHcngDkiCssAZTzAqPnbFY+/QCls2s3bAXmBQhYwp/TVZayQBrQTOBzWu+MNc8wc4Xp+i0c7TauSwl8zNuvZK1sJ+1lpttOOR9BzQ/QdZStvuX13L54hHKgQlE79m0RuTm3VNYw25FIsOuwbX2Jtf6mfNXmBYaaqSr8naWuxTTyzmohIv9qaZ96qcaY6xXi/it22sZZNMfbOHLoWYTQ/VdKHe2KxrPiT9h4JKy7pR15KBEt6gvVnXFH6t/xtga7zHSLHDVLnBLaVrTJgNtptrjBsJeaHIvyZr64nPfFRgOSryQm8rs70ac90ay8sVaBmYGwa5JY3dTvp1bb2H7Hqw8ZqsJcmrWauWSQo5knIOdyyLv1nX3NEESjHmkMu2Wp2xXIujpTm2W6uMGR0mVX+av/2XsnYxrLIUljnrjnOqOcZar8jyahlz3cBoS/KLkGnEWM0Ie6UDHQe6PZJO98aMoxtZKxX7WStmcEczeAOSoNTPmqoEmBWfvcMrTGfr7Mqtsj+zwKD0GTMS8sJGbWatlQS0EzgXVrniD3HNH+TExiQbrQLNZh5ryezXtXXIr0QY7QBrrdefOue6JN3Nmnsja1lELkc8VCYs2zeyFmxmzRx2KRRddgyss6+wxFRmgwP2PMMqRvZiZvYsfeunov3/wysDmx/7g7+Dqii0HaANDSYIJRBCYxKTjwOySchA5FKKPPJxSCkIsSONFSeYoUYmGrn5BFgtBImE0JBESuBYkq5h4ipFw8rSVjZdadFSGWKhSBAkEf2nWkegPBMZKZSnED4QaCJX43smoW/Q62YIHAPfVegGCDdBdWNUJ0B6EbLr9y9RBiHaD9BxdGOHoxRCKUQ2A7YFGYu4lCHJGv1v0ssGOisQRcgUQ6xMTL7gYWUCTCvGyAmwBNqCJBuhjQhthWD1p68LBQYJho4pxD6VuL+9qoFLPorJxjG5QGPE/T8ifvVJwokSxFL0t5kp8ZWkbVm0DZueslg3cjjSwpUmIapfLxONCECEAhWYGJ5ChgLhQexp4gA8x8LzLHzPxGmaJK5A9zSqFaHcGNkLUV0f4YfQ8/qDhijun6BveuXECcuEQo4kY5LkTOKyTZyRxEUDyqAyCdlSSDbnY9kh2VyAsvvfWuhs/5swbcVo2wdDI0yNVCCFRumEfBKQiUMGIodiFJCLIypBhBUlWJHGDPtPs3/lCe6vZC0wBKESuKaia5g4hkHdzNFRGbrSoqkyREhiJDoCEWtUpFGeiYokxmuyFrsa3zNuZM13DQJHETdBOAmqF6HaAdKPkL2wn7UwRHt+v+i9NmuG8WrWsnY/axmDuGAQlfpZo9TPmpmJyOc97Gy4mTXA6v9JshEYEcIKkZYG2c+bQYyp+8doOfLIJwHVwCcXxWSjhFzQn9qn4uRrshZJbhyjjqHwDUXLtGkbNl1lsWYUcKSJJwxCFGjQsUYGGhVKjEBhegYiFEhPE/sJcSDwHBPf3cxayyR2IemBakVIL0b1QmS7nzXh+P3fBUXRN8yasC10IYvOWCR5i6hkEWf733BS1qisJlMMyeZfyZqPsZk1stzImrR9hJH0syZBigSTmGwckklCBkJ3M2sxpSDu5ywCI0wQSXIja4mAWEKg+lnrmQZdw8YxTDbMHC2ZoaMsWvLVrIlYI2ONDBNMz0BFEtMzkL6+kbXAU4SBQbezmTVXETVBugmyF/ez5kXIXoBwPAii/uD+L2dNGYhcpv+bvoxNXLL7da1gEZbUjazZm1krvFLX7BhzM2vCArIhGBHSDJFWglQaKfWNupaLAsqbda0cBOSihEykyQa6n7Xo67P2ynZzTANfKRpmlraR6WdN5XGkiSNMos26JhKNEcSoSGBtZk0G/awlviYONJ5j4bsmvm/Sa5nEDsTOZtbcaLOuBf265vj9aYxR1P/NDIAQ/WmD1ub03kIOvVnXorK9mTWFLoPMajKvqWu5vIeyQW1mTVgxWDHK9pBGgjQSlNKbde2VrEXUQpdCFJKNYkqBxoo0VgRGtPk09iQh0f2nt8cCfEMQSoljmrQNG8ewqBs52ipDR9o0lU2EItain7M4QQUa21cYkcT0FPJGXYsJPYMwUHRfU9fCpka8krVOiPA3s9bzIQzhRtb6g0GhVH/glcuCbaOzFnEpu5k1k7CkSLICXe5nzcpE5AoemUyAaUeYOd3PmQUyGyKNCGkGGFaMkP1+YOgYY3O7lSKffBxSCQKykSYTQdbfzFqs0VGE1v3py5HQxFIQGIKeYeIpg6aZ62dNWqwZeXrSwhUGgTb6/TPRWEGMEYEdKExPoTbrWuIlJAGbdc3E9w26bauftR7IVoj0YqQTojoBwo/A3cxaHEMcQf958V9T1yjmN3uoRVS2iLOqf4yWNDKrsYsB2byPbYdk8x6GDcrSiCwIK0FaEabtbmYtRimQaAxiMlGAnURUQ49CFJGNEkpBv6aZkcYIvzZrCZpIagIlCZWkZ1h0zMzmOUeBlrLpSpuNzbqWaNnPWZxgBgkZX2JEEusvZ81XhL7a7KEK31EELW5kTb5yvub0ewFB1J/W/kpd28yaMAzEZtZe6aFx1ujPACpvZq2ksYpRv4cWPTIZH+s1WROWRmVDlBGizADTivq9QPafp6R0QjYOKEUBuTikHERkQ7AjTS4QyDhBRgk6jtE6QWtNKPq1zVcCx7DwlEnTzNEysnSlzZqRpyv6dS1E9e8+GSdYQYwZQSbob7O/nDXfMfA8E98z6LVNIgciB+RmD5VOhOz4iCBCOMGr2+y1WbOsr89a3iIsb/bQoiIpgcgm2Jt1zc7065phJ8RhwB/9zd/59j7H5ttt2S1gZAwMEWATkVMeJdMlZwRUrP7l4awMKCuXnAiwRUQWjaEVJhJDW0gtkYlEJAKtNUmcECYRUZJgxwlWpPAShRFG2EFINrZJPEkntHFCm3UnixuY9DyTwDHQgUS6CuUKZCgwHDB8gQrBdEEFGivUGG6MCGMIIwj78yM1CXFGoC1FkrfQwgQ0WmmEFKAEhqVRZoSyEuxshDQVhq2QUiG0QISiPwCIBQkGbmTiWII4EcR2f3qZJkZZEYYIKSqPrAwoKYey5ZKTAWXDoSA9sjKkIAIsNBZgaxOlFYY2EEn/80j6D2KNk4QwDrHiBD8BMzKw4gzZ2EI4mlYYI0PNmpenG5h0fJu2YxP5ishTCEciQ4nhCJTH5vYSqGDzv50EESbIMOkX4c2TSp3EJFKTZCU6Y6FJ0MJCSw0ChCFQpkSaGjPjYZr/v/bePeaWrKwb/D1rraq938u5dEPTTYsoExmdHoQoCOk4iYl0QMMYvHz/EPINUROiNhlQY4KTCH/S0YmJGAZNTMTMJOKHGTQSMfYANiFCCw0dELQ/ZwaFUZqmabrPe9m7qtZaz/zxrFvVrr3f9zR9eQ9Zv5N9atW616qnntt6qt4BeqFhlmuoVkEpBVgCegKfErwn+GGBa34JP0BCC9mDA0NdqBYtBuzrDleaE+ybHleaVUFrp9iDxYIc9gEY1tDQaHwDYgViDfIAe4b3Xv6QovdYOEbjNPacxPIu+gFLu4TrSGita/HN0yXWg8HpuoE9MeBBQZ1KaJbqCWa1SWtNz1iuZXsegxXl0jl4OPASQmv7LZgYTAAUi3PAqExrC8ZyaaFaBb3U0FoBIMAq8JrAHvBosHYtTlvAMcEPDGcZIAvTWrQkoQd7qsdls5YQJtXjaqC1JUVaIzQAFtxCsZZfQWvWOVk3P6B1HTpH0LZD45ZY2gX4BLhmlzgaFvjG6gAnfYNr3QLHJy1sr+FXGmqlwnoRdAfoIa4doHvArD2awUP1ToRWMJa9t/Dag/cJfs8A0PCqhQRKA6pRUI2CNoxm2UG3FnrRwewbUENQOmjKA+DXBA8CDy2O/CLQGuCYwc6BvMdSNVhgwIFe47I5xb7pcVNzmp9ZFWnNYwkltMYahhvhBT7QGjOsc+idRe8ZjevQuD7RWjNYLE4X8B3hqF/gtG/wrdUS695gtWrgTjWoV1CrSGuAWZGsV0lrg4SSRVrjfpDnEw5+AXCrwAcFrWmhNaUVzBLQzQDdeuztDYHWDJRR8iKxI3BHYCY4Muhcg9UAOAa8ZXjHIBqE1iDhQfuB90dau2xWOCShtX2yQmtMaLiFhobyakRrzjtYL+vWuh6dV6DBorE9lm4BPgWuDQvha6t9nPQNjtctTlctXK/gVxr6dEJrPaFZI9Adw6w8zOChBgd0FnAWPFh4Z+GVF+Vnz4ChwQqAFseAahSUUdCNR7O3DmunYA5WoFaBjAI8ARbwawVPBO8aHHMLZwEf3qMk50DeYV8ZLDHgUK9xWZ1iX3e42qxwqNfYox5X1CkW5NCCsQcFzQYm/OAJxAbs5D0+5z06ZzE4RuN6aDtg4RtgcGh6i8ZauDVwNCyw6lp863SJrhelyJ9qUE/QKwWzIqhhSmsMNTBMz2jXAxBpzQ7C19iCFwxuFfxBCygEWgOUJpAhNAtAtwNM47G/n/kaNQpQQms+0Jonjd7tY20JloP8dB6KBjStxRIiL/dUjytqhavNaZCnKxxQhyVZHJBFA4JhhZZb6MDXyBsJ3ffyfNogDxpnsfYKNDhZO7eAPwGawUINS5yuDE66Fiddi9VpA98rcNQ5Aq2ZTmSnPJtCa3rtYQYHinzNOXgruodXDn4/0BppsGrkoxiB1nSjoFuRobrtoRdr6H0DakVWgAls5Rl1RGDb4gQLeMtCb3Ag56C8xaHSWKLHgepwyRS0plbYUwMu0RpLcmjA2GMNDQPFIkPBJOsW5OfgLTrnMHgLYwdoZ9G6Br5n6H5AYy2GFeGob3HUGzx+uoe+N+hXBjjV4lhd0XZa6z304HHQWWBwgB3AQ6Y1v2BwS2A0smbEgCZ5PjWhWTJ0K3xtP/A1tdSgVgGaAE/wA8ET4LXG4PfRWYIFg63oHYYGtO2AJXpc1SfYox6X1HqT1mCxRx4NKzRQaHgh+i0rkNdgrwAP9M7Ceoe1c2icw9L3oMFBWYvG9XAnDN0vQYPHo6v9oN+2WJ824F4BidYwprW1yAHdA3rl0FqHxeCATtaLXeRrDrzP8PsGiLSmAVIE3WqhtYbR7Ae+tlhD74c1awhA0G87glcE7xucoIW3jN5157YfLuyOzSP//N/g0iWFHjaFfh0zsGKNb7klHvMHOHJ7eHi4gm8Oh3jC7uHh1WVc6xY46pY4Ol7Cdxq8MtDHCnodDJFTQHeM5pjRnAaGcGKh1ha0tuJ97Aag7+FXnSjYwThBXCoi+dsIWoNMAxgNWi5l56BpwIdL+IWBXzYYDjXcQmHYVxgOALcg2EPA7jHcEnCHDmrpoJcWlw7WOFx0uLJY47a9a7jSSKz7bc0TuKxXeI4+xhXVYY88LitgSRoNabTUpPXzomqkLc6OGcdMeMwvceIXeNRdwqP2ENfsPr7WXcFj/T6e6Pfw6MkBuq7Bet2Ajw1Up6BPFcyxCOrmBDCn8g5Qe+RkV6Vz4kXrBlmz05Ws1WDhux7wTqx2WTR5CVFpkJa/K0GNrBf294BFA142sAetvMOzr9GHONThgGD3AbcA7CHD7Xnw0sMcDthb9thf9rhl/wSXmzVubk/w/MUTuGJOcYs5wnP1MQ5Uj6uqw6EiLIiwpAaGNAwUGACHfz1seunyyCucssG3/B4ec4c4cks8PFzBo/0hHrf7+PrqEo66BU7WC5wcL+A7A6w0zDFBdwRzkmmtPWaYUw/deZhjC7UWDyTiDl4/hHAxJ78prUGBjE7rRYuF7La0jfxdpIWB2zMYDnRYL4XhgGS99gG7z/BLhrsktGYWFlcPV7i0WOPqYo3nL5/AZSMx+M9v5AXGm/UJrqoe+8rjkBRa0jCkNmjNwqFnhxVbrBg48hqP+b1Ea98YLuNxt4eH11fweL+Ha/0CjwVa61YN6NiAOjF6zQmg10BzwonWmmMPvbLQawd10gmddYO8eG5FwfZ9D3gPjjsFkda0kWd0uShobQluA60dNvALBbun0R8quAUw7BPsQaY1v+fAS4/2sMf+Qmjt1v1jXGlWeM7iBLe18mw+z1zDTfoUB9TjqhpwoBSWpLAkA0UKekJrax7Qw+GUHa55jVNv8E1/gMfcIa65Jb7W34THhgM8MSzx8OllHHctTroF1scL8FqBVhrmWEF3QmfNSeZrZuWE1o4G4WmdvAScaU283eI88InWiFR4YV4+0ECNAfaWQNgV9QeLQGsaw6ERWtsnDIcE1wL2INDaguEPHcye7MLcfHiKS22HK+0Kt+8FWjMlrZ3ishqwR4xDpYXWoGFIJ1qzcIHWLE7ZY82Ea17jm+4Ax36JR90hvjFcFjnQXcbj3R6u9Us8drKPYd1gWBvQkYHqKNNal2nNrBnNsZNdlS7SWi+0tu4CXxtSOMqI1pQCGZNCtRBp7WAPvDDwiwbusAlyQMt6jWiNYQ8ZvOeAhcfyUoeDRY+DRY/b9q/hSuBrt7eP45Je4WZ9EvjagKvKYUkKbUFrCmokCzoe0LHHij0e9w1OuMG33D6+4S7jCbeHr/dX8M1eaO0bp5dw3LU4XS/QHbfAWpx5wtcAswKaYw5r54WvhV1Pinwt0VoPv85yYJbWTCO7U8aA9vbAiwZYtHAHC/iFFr52qDKtRb52ANh9L8rnoUO7N6BdDLj54BRXFmtcXayCHFjhluYIzzVHuKTWeI4+wSXyWBLjQBksyEAXazaltWveY8WEa77Bt/wBjrzIgW/aQ1yze/jaWmjtqFvi8ZM92LWBXRvoYw21DobI8ZjWdOfRHDvolZX3Ro47oB9kzcIXprgfhOY4r1mitaaRZ3TRAq3s7vH+En7RgJcGQ6A1ux/lAGE4QKa1Awb2HWjhsH+4xuGyx+Giw217R7jcrHBzc4LvSrR2jJv1CgdkcUU5LJVGC4WlakDhHwC4sGZrtuiZccoej/kFTnyLb/l9fMNGWruMx7oDPDHs4ZHTQ5ysW6zWLexJC1qLM685Dor1qaxZ4munonOYY6E1dAOwWoe1ipEwIkNlzQKtKZPWDW0TvqwZ5MBC+JpbarilxnBJwwZaK3UOu8/gBQOHAxZL+T334BSX2zVuak9x+/JxXDErPNcc4RZzhAPqcLNe4VAxlgQckoEhPaE10TksO/RwuOYZp6xwzbf4pjtMtPbocCnrt/0Sx90CTxzvwXUafhVorRP9tgm0Zk4YzamH7hjm2EKvLKizEslR6LdI+q3bpLUoB4wWnaPNOofwtUBrS4LdU+gPhdbk2Yx8zYP2LNTS4eCgw6WF/G7ff0JkaHOC25rHcVmvcbM+xk1qhX3lcFlxCA1VaMmAQDg68rjlB/6fGzsU7X/+L/8Z7WEDb5x4t5QTjyoBTgdrDiqHjYHgmRC0B2jnYTyj8Q57zqLxHgtnsbRO8qxF6zyM81hYD+MY2kvIgvKShvUglpA2MCPf8uApIgITgRXB63BUCoNRcFrCtzqjYZVCbxTW2mBQCitj0GuDXimstIHVClYpeCX9EUkYFIGh4aHYQTFDe5mb8gTtxAOpvIKysitFTrx48CweNuvhHWAtMFgD6xS6oUE/NBicxrpr0Q8aw6DRrbV4F3oCrQCyDNXJjwYP1XmozoW0Ey/kIC+nYbDBuzaE7UeXYj4xJS+l8nauNoARI4eNARoNXjRgo2S3YaHBRl429q2Cbwh+qeRl4gbAHkO38msXFk3j0DYOy7ZHYxwWpkfbWBjt0BgPbcSzpwyBNAGKwEZ2zVgxvHHggtY8Ab64L1buhnjjWRw48AzjPIz3aL3H0gqt7bkBrXNonMfSObRW6LEd5KhcpLVwT52sFe2gNZmHvLTuFcmL9UbBKsKgFTqjMWiFXmusjdDaWjfotE50NygFSwqsSXa9SMKgFDMUPAxLqJPyLPNkQDsN5WT3UzstdOYgH8jw4uX0A8N5wFpCbw1coLVuaDBYg3XXYBg0hkGhL2ltzVADQD1DrxzIBlrrvaTXVryQdkxrUcEc0doUSskfFjUmharQogUbDTQG3BpwI7TmlgZsCH6h4RcFrS0ANARaMlTroRvGcjmgMQ5ta7FoBzTaYtEMaI2F1h6N8TCGoDTE46mRaA2KwdrD60Br4cgKcErBk0q0JhwA8J5AzIAHmkhrzmHfDYmvLayD8Sz8LdKazaEx2joJO/A51JR4QmsUxiMGlAKHEFRWCl4TOq3gtIItaG2tjfA1rbHWBp3S6JXGyhg4FTy8OoQiEKAp8FT2aNhBcaA1BygWvqadhvJU8DUAlgOtMdzA8IHWOtvAOo2+N0JrzmDdmUBrGsNKwQ8E7gFaAzRw4GVhl7gPfC3S2iBpdIMwTuvGPM37TVqjQGdKiTGYaK3Z4Gu+1cLPGiV0tlBgQ3B7JN7hRkLMdOthGo9FpLXGYdl2aIxDY4TetPJoDEMb8YgqIy/IQ6Hgax6sbaAxD69lt9srgiMFphAKy3LvmSXkjjzQOBdozWPflrQmNLiwHk2gr9Z6oTMPqCBDKdEap1+5ckwAqyA3ieA0wWkloc+BvoSvCd/qtAl8TQe6C7SmhdZ8kMVEAFGUnR6aJVxRMcN4D+0IyhOMU9C+oDUHkA+05mSn0A7y4ZfBkshQq7EeWvSDwWDlRe6+17CDwrBW4B7gnqDWXmitZ0nbQGd9oLsu0poLtBZozEqIuvC0GDo5oTWtAR2MaSNh7Cj4ml8asAkycxllqIJbKHBDIXIEEna3FDozrcdiYdE0Fm3jsNd0MMahNQOaxsEoj6Zh2RnTEFrTkJ1GE+nMAYGfQTm4uKOmSPQ0CjvZHH4hbF0xJ1m5CLqZ8R6tdVgGHa21jDaEyjc2hNIGfY08y7svQXbGUNi0dEEt9CrI0MDXvFZwSiUdbYhyUwda00Jrp4UMXWsd+DRJ6DWE1gwkdEyxPBuKPRrP0A7QTDDOCN05BWVJ/hi3kw9QsM+05jzBWoV+MLBOY9236ApaG3oFNygMKwIPBB5CePDAoJ4DL5Oj6oTuqKS1fobWvM/hw2nNKH0VkpQSh6rRgDFi3BgNNBp+2YCbwNcWWuRm4Gu+Eb4moesMvfTQjdDb3nJAYyzaxmLZ9jBaaK1tHLTyME3YhVUEaghQjG7d493/6f+4sUPR/v6R74I+XYBbD9IeqvHQxkFrj9ZYLBqLRjksmwFLPWCpLPbDly8WymJf92jJhq90dGjIYUEWCxrQwGFJFg15mBCKZcQvDg0VfhriKw8WdnSgR+MJDM+c42jZw7Ns1w/MsMywDHSeYJnQscbaN+hZY+UbdL5B7w2O3ULyncFx36JzDdbO4Ni26J3B2hmcDg0GpxKBW6tgBy3bw1aBBgJZ+amBQBZQFpJ2gBrC+UaaQ5qxHEQIKSuMl7zEvJINcczWBgHvJaQiCHjvHJhFInDDYEMAK3Dw5MSDMJcQdheMNygGKQ9lBpC2IK2gVSe7YV6BBi2KDWvQoISZr8KWpSZwqwBDopC2BGgNZwyO2yVYA96IouANwA3BN5D8BmADeMPgNigAhkGNBxkPbTyaxmZaMwNa7bA0QmutcjjQHRY60JqSUMiFGrCkIdBdjxYODXksyKIBwwQ60+mnoQKNRXojpkxnQmaJ1nygsUxrjJ49LAMDA2uvYFklGhvYYOUarFlo7cQtsPIN1rbBSdeiC/R1Ylv0TmNtDVbB6O0GDWs1nCO4XmgNToH6QGdWvGrkIOkhpgONhbQewrtDA+QvcjvG3iBHcm5Ma0N478xlwS7MV7y93nsAoqChRaAxDWY1ojMAYGLZtSExYkkxSDko0wuz1ApKh9VPtCZ0h0GBtALWQl/QSpROQ+BAS2waDKZB1+4HmgplmuDbQHMa8G1UMgHfMhC+vqhaB6UZ2rhkfC8ai4Ue0GiHPSNfyWm1xYHusVAWrbLYV13ma9SjIYsFWbTkYODRkkcDhgbQjPiZKriZ0FlJb5G/MTNciNdmMHrv4AP9rT3DMWAZWLPG4DXWbAJfE1pb+RZdoLW1N+i9wdFqkWjteFiiC7TWWYPBKfS9gbUK3in4XgFWCT9LvCzSFwl99Zm+lBW6073QV+JrTpSKppd3a8ix8LL4XldSIMU4lrTwNWYP9g5MXpQ2LYTFrOWFxQlfQ1RySMJ7SHmht8aD1BD4mpZyr6EGeZmMnITLoKA1NoGvaQS+psFao2sIq2Zf8gyEHnXkbYAP9MUm0FsjtAbDUI3ITW2ExkzkZWZAoxz2zZB42YHphJ4SrTm0ZJMMXZJFAydyk3ywoyjRWuRlido474SUtBbpq+RrPTu4IDsHD1gm9EF2Cq01WAW5ufKZ1o7tUujJGxzbBTqn0TmDJ4Zl4Gsa3SCOvWHQcIOGdwTuhc7gJMyrpK+UHnbJzUxrjWUs4ntclkGDTXwt0VdUKDk4ZAJPY7bi8CAWjSzKz6nsjHQWnINKC61BMZTxEgmhNYwKsqSUoV4DQwiRWomBA02Blwnv8q2G1xqrhnDSHGZZ2VDiZZmvcTaeWwbpIDtbF5w7Fq2xiZctTKAp02OpQ1r3gdaGLENpwFL1aOCF3uCgidGAA40RDAldqcTXgqY2KzsDrTHDskv01rMPtAZ0DAxeYWCFNZsNWjv1LVYu8rUW677B2hscDwv0TmPlDFZ2D73V6KxBN2g4JzqasxIOz0PQ0wKtRb6leoIKTsJSbsa0DrRGjoW/BWfVchAaEnoLx+iA8RwcMVZkqBN+5lleVUBw5KKhICnUmKepwNMoGBaKQJpBOrxvZhyUUiBNaFTQnJ0SHc1LOtHaWgX6El0spw0G06BvgGuBvqY6mqQ56GsM26/PsBoyLuyOzf/3pf8Wly9p+BS84WEhDNDBY5A3CcSIgDDBUzboWWOAxqlv0bNBxw2O/QIdG6zcAsdO0qeuxco2wgwHEby9M1j14gEcrIYdNLxVYKuAXoEcgXrKBkMvQpcsxDMTDYqOA7FKTCJZiR1WgwdZBg2yCwLngzc6MMDBBkHrwUHJ4+AxZJffBWHPYO+QXGwlGEihOJE447a/UulDBdEahzEiWONRa3CjwVolDyNrEg9jI95X14pw9VrCAqKAde2YKL1hEboNZ2HbeJBm6NbBGAdjPPZaUeYWxolxqi0OzBAMVYsD3QkTVDa8HxTyVReYoSh6TRC2y+C4bEhBkxishlTaBh79PaYiKa8sItMaPAYOtMaMAUDPhAGENZtAXwan3KLzDVbc4MQv0XmDlW9x6tpkvK5sMGSHFr01whCHBtYGA6LXYKeAgeQvjTuhMzVEphfTgabcHK1xojUKxoPqhL7IcYi9DrTWF7QWQpLYBeEb6S4Yr8y8SWuFO4yid4fCblz0XofwnLhrAiXGKZpGjAWtZNck0lqjgmEQdkw0Eq0J3WXB6tponEaBW9CaZqD1olhqhmktGuPRGIe9VozUhbY4aAKt6T7R2qHusK+Frg71GkslhupBErxWYsTJoyVGSwRNJLQGgg50FsM01BZas3CJs1mI0BkCXxNhS+ihYJmw5gZdoLVjv0x0dxxo7cQtsuC1LdauQecMTgZxjgxOY90buOAU8b0O7wMRaFBj+nJisIrCBqgu0hoHfhf4XO9DO/FMU3SIxJeABxc8g4GXBWOVrRdD1Qae5jntuDGztN3gZxBPNRDoS2c6i/RV8LXEy5SSnTkjHu7I14SXicD1DcElvibOEF8ocZm+hN5cG4zUJhoPwtNUIy/nt42VXRXtsd/2idYOA63t6QGXzDo5REa0RgNaVdKaw5IcDAGGGG2gsYaUKHcgaNJjGivoLEtOTmFpLvxdKQuPLsjOgUkMVYih2rFBzw2OAn2tfYsTv0h87cQKrZ3aFisnTrnToUVnRW72g4F3Cs5qidsPyl0yHnqMaS3RVJCVFoGXBTrsAy8bOHieOctNX/I1Bx5cMlbZ2iQ3OewccfJQ8xZ+hsTH0k7cNlrTCjBNkJvC1yIvE1qjEa25hhJf8w0l+trgZVqMVVfIzUhr1HjoYKi2rcXCODTGYb8ZsNBWaM30WOgB+5HWyGJfdzhQfTIiEq1Rjzbws6XyQW4CDYnMbEIYrQoh0WfRmoU4Qhw8BnZJRxsAOBDWrAJ9BacIi+Pt1C+CwdriyEW6a3BiF5nWbJPk5hCcIkNv4K2CtwroNeDEISK0FgyEIRqpXMjTQHcDRvQV0/GdXzie6Ggu7eJy5G8x7b28OxN0NHgfZOcOWlMaMXQi7Y4kXU3CguMOMNJucOZlMDrQlxLe1Siho8TXEOiukJt6ytc40KDQG1ofPmzjYYw4RJaB1lptE60ttcWh6bDUYqQe6C7wr07SZDOtBRnakhdaIwS+RjAkTpGGRIdQQY5O6ezakcML7vivN/aOzZ/8y0uxODThZVSJ+/Eh7cmHY85jAizCdjRkm91DvrFvoSRsjWXbXcLWFBwLQS2NMARmiOefATCl7XQJx5EtU+Ug29lxW9tLiFgKZeO4Jc8hxMKH9j7l6RB6RCzWN3lJx7wYkjTaWo158aFgPxt9EyH8h1IYk/wADkpoDAMAqRQOEEMCvKbwZS9JeyVfRnNayVeYQriADyFQjnLaU/yKmoIjCZNyOoca+BgGoYJHKnwFJIYPxDA8ELCiBh00TqmBDlvNJgSDaUiIgdxZhmZZG8UMzZS80soHJdOTvGgX0hTuMXwIKUs/lggADlvEzPCeEfiUfLiFCdYrOC80NTgjR68l7RUGJ15G55UYyU7DhrRzBOcUyCoYR1BOtpQ5hXgF5mpD2iMZKsKsvYRMBC+O5HNQGuUlWLLhCzPRi+g5KZrRk5i2oKNiGc8DzSVaPI/vIyoAQSGAIkA7CdeMTDowbUQFVMuL0KTEyJGwLRImrSXtjSidrIJ3UYXzhsAqeLUNATooBCHNBmG3haCaRr6ipRnKNGDNGLTHiXZYa49T7dFoB60cWu1glPwabWGUly+WaQtDHlp5aMXQiuUSVdy1p+CxD8ZM+AZDfO5ASB+9ACHxMBDDK3HfTPmaD6FhmZflEDWPgq9BwXMI8yBCoyV0ZKmHEPaBEPYhNC58CIkX6RB2KCEUwpNMDEd0wut05GUeiecpH/hiwdci39IF/yLnR18NJC/8S5TLEBIHTp78eYaGFM/PiZdluhO+lkOD5SihpBx4VQxDcUqlcObM1yQ8JYY0DVqncquE3zkl/MwrEr5WhFBFfuoDPYAArXjE0xwpnKJFRzqss3zFScEnvqZYwloUMzRE9igWL7QOu2zyUZyQ9kp23yI/SyGyBU/zPOJn8ngzIlvwHrBeBb4mPMt5hd41sKxgvcbgNJzX4tm2kccpccp4wmA1OPCyxiqwC/xsEB6LwJPIZeMl868Q/jaEcsdBkcwh1RTSka/FY8nX5Nyli0r8LPAyDrws8rXt8NmwibJTidyMP5ZPGaYQVygCTHQYZn5HRkIEWSloI7yNFSQENvK1yOMMMr8LvIwDL2P5nglgSN5H0gxtdOJr1jiwZljl0RsrYYpa+JlWDo12aFQI81HC17TyaJSFUgxNmaelyyThZSnKInnyEfhb+FAIYYOXMfmCr2UdzRMSLxPeJTqag+xSOShYzq8XOCgQMfaCseaZwIsQxhbonIIM144zr0rh1D7zuKiDBR6XdbSQ5znwM2zV0cjlL6BSoZcl4yXqaAWNceBts2yNgo6GoI8F/sZhzaNeFuVoDIv3Op/bqLsVvGwodDThW5GHBf4X+FpsX6a9ynoah3BCEeksO9IkX1YEgo5GBsdoof0+lBfdzHDW08owdwXR0UzgWQpZR0t8jUl233zYgQv3ujsZAPzXHc9txoU1bP73//sO6IOleF8Vy4OkfdiKDVufSj75qZRPR1E6PIz20OQlNlQ76KCUtErSrXZYKCvbnI1DQw6G5OE35KHJYaksDMl5S6JGGJItUg3Ji+UaXtKhjgrvxxjikGYYQkgjbKCKxRpfwTMYe3rDa+MS91zkUQwa3QXOCZ/2ZBG8dzmELqZjvmOGhL4yHJLuAZFDBAeCDUahB6FjE9IqpIVJ9d7IkTUcy6c/ezYYWMOxwto3sKwldMpLnvUanTdBmIY+QnrwenR0PhsX3hOs0/LVMy9GA7N8XYOdxPPCJa4q74h45Ph9n2NeKTJKJw9eiruelCuHxFSVi8pE2D1JdbnoIxgoBeNchDz4IPQZIV44K4Tx058ovYzl+0vJEMlpTkpj2OGLjDbs8vmRFynuyAQtSIU4eB1pbOKhi6AJAUZtjqSMooYVdw2DJkoOgVNS2LIutrtVshKSMB2no9Sl9BOjKJzLSxyi4IYvDTKJ4oDIpLUBlAgRpwGrCGslXiwExRQ66IhRwaCgXKQ+kL7G5IPDjUO8eSoPPCum5atX4agYCO86yNfDvCyTjnyMobUXvkYsxlVQRNrAy4ySnyaPJhhkmjwWqk88rFHBGCMvoWokdZZqEMdAyNck/K9B6DvwMU0eBtJX5msQXobw2WBEG45ggvEx5mWIwW+SDrtZqqCrMtT3bH5W7KwWikPkYSm8CSx7YomPBb4GCaUTXgbhL5D3M21yfgmvciznPRtYKAysYQP/Glin88HHtELngzEQ+Jr1odxrDCFvcHrE15ynYDgIX3WBv3mv4ByBQ5pDmn3kZ4F3TflZeM5GvKzgX0kZDHxLjAlJq7KdDfyrrOt5g5+RR3ifNeeJwhc83sxy9JGfSf6If0W+5qf5wfvNmf9xdMKEdvmjBAUvYyRexLo0ZGiGp/FmVsnPQjkFDVqO0hf55LWQdHRwUE5Dq7SrXeaJUY4xT9NqKy8DRaMn5GuIgUUAGwNWCO9bBj5EYiyBJLoCI/4U+Fl8T0YFXrZRJ/JGHvE+0csCnwvpyNck7JcLfiafGScKepnyYkhpn5xFkX81wamkidEq2SE32qMtdLEm6lwUdTDR12LahLToaEH3Q9bbNDE0HDSCQYf4AzRx4HEkl0QUg8QDL8s6WAqCm+hlREWI7zn4GRd8a5oW/QwpPaefyWvVFPQ0CrqWTg4vywoDdOBLwrMsVNK/LGvhcZGvBX2sZyM8LORFvawLjlzLCqeulXZeJX1M0qKPCV/LepnnwM+cSryMXfBSOciOW+BvJf+CB/xqDeDPd69rwIU1bP7yrv8Tly6V8cyFgl6cecrBavkfkpCLAR+ZGAgeYQ2DQHPBA+qj0g6VjiL4FAY24kFghVPfJuEW63a+SYIwCjkbPFypbkwn5TwI0pC2XokwS8q5KO1ZoKmknCdFnbPggp8ItKjIB8GEQlGPAmqjPAkuKVOOUx0kwZXTIrgQ5hSU8PjElT+WcoQXlzmV+ZTmIo3gYZTzAcwDFIsx0EaB5rLSzj4oOVHJB1JeKNhUnnacU3wzMPmJIW4LYDPMr4hHDRJwFJoFVXj/VG4b/lhO8sxA5foilHTIzzsViAo3FYp2Om4q32NhVAgrNS7fUM5Tu/COQcrnJAjjy/DJ8ZAEGhdOB/FamyDkktNBidEvTgeeKO0S+6+J0ajgdFBBSCmfHA1GeTTh/RIdBF0T0iOnQ/yBxUFB0dHgRZcAiwOCxCkaN1wkjFHut4EKzspCkBVhGTFUgxK/KshpQ2mSUk7/B45GhWBLnCw7HXxR5jnyuIKXAcnJ4ECBP1HiW9EbOkDLjjUIAxsMvsVREHAOqlDUdRJ+jik5GpzPzojIz1xQ0r2npLQnXuaz08FHp4MnCb0MfAuFcMt8a1O4lUo2RryON3hcyb/ABS9L5QWvY9khTR7XksdFnuQmvMz7wPei8o0U8oTIu0o+lkKhBnGCeI8FC89auqy0I+RNw0C55GHXwc8ohnUwkHZVk9JehC2XaRUUtREvCw9HDDnVKqUjj9vgZ/GcSMJnIn9TFJTpyO8m/Cw6D5SEbiX+FXnWLr42KceonCf52dkwcjpEfqbDrpvmkdNBB6dDdKRqFXiZ8jCKxYmghH+1akh8qwnOiEXhdBBHRHCgquxcaMkmnhb5WlTcozNVeBujJdnz0CS8LJpvJtpLiUcBJoR1UgjTJiqdDkHiBdpQKX02PwOy3sXJWGRMNbSsk0VelnU2X/KyoKTLzg0lRX2A8B4f+Fd0SFiWHeyVb3HEeyNdzEGh8yY5WQefedvgs6PBeimPDgnP4jR1LPzLJl6mwi8r52MHavg5yI7qjAN16oBA6WjwY16VHAa+qF/wvRE/C/wp88PAA5OTFAU/i/wt87Kkn6XwTYR3w6Q+xxA7H50KEoanPaCZ0RTOVGZkHS+eR4dUTEenwxn8zPoB/y/Ohwtr2Nz38Hdj/1g+LctJEcieFS6UANmBDyFryOlk4FBWItIOJlGxWx/COMIxfr0j5+Wt0vj1tbhtGsPaLERgx/IU7gYheMcqjxU0Jwp/KE3F2FavkuNcjmG3Jk2UwlY60vYrsYSUxDCsmEeByCiEfsQyFbZicx8h3MH72f5UENiSRjG+tEu7EqFuTKewubiFi1g3COSYz8XWbSrnMcF7Djwy5gsZpE+uFnVzvVJpRO7rvEiCfpzHZXkR4hfTHCcxUgQohc+M0jEkEHG7GXnbmVCE1SCFBsYQGw51fDKEALdRF+FLQTGvaFeUe8qKhZyjGDf2G9thko7z5JSfpKqKmpWEG0YZSUq8/XFLOy6TJi954OQwtYhfCfMYSIMgbVIIYhDw8kSGLyGBoYIhQ5AvcEl0o4QyStdhnKjDbEw5hjOWacqCnwsPXcib//DD+EiT8zJMgSPjR/bScVByuSj3RVoeJQqGDqU0B0+dDy8iO87ht451qJfT0UjxQcDHcxdClHxQBpgpGytMaYfBe8n3HPidU8HJTln4B+NGBCeBg+CNAh9JWCMZKDFUNDtekB0ssT6zGD6RH83WyX3HXdG8Qxr7yPlJCYh8KbXJeXHXgUY7pz4ZImW63GEY7bbKjZddDCDzR5Q7qdg8PlmMdlY3jZxoiCSjPe40jJw52eAZO2qycUPJuCna6GKHIhgtKjpuksGD7MghpLzs4JE5iXES62d+BZ3DZ7BRZ1oXme9Gp1E0bkJdinwr7kQQktMmOW8CE/HhoxGWPHzIt8pjSEaQHGUXVhw3igojKRg6inK0iQJDB2Mn8TXK/cSvp8a6Kjho5PbmtCLKUWTF/Y1GDVGUdXJkCjyNkGXazDHqZpt6GRB39WNeTCdDh7LTGUGPi2wgqDuBs2Okn0VdjIGgg6mx/obsqPasEl900GlnQ15NoMTbSt1P5LMYpwyC1i6H9BbHONEylD3qVaIzZR1L9Kogd0Z6V9bZVOAvpV6lRvpV1OPGfVCp/83lFyG/4/lE/oUcJlzoYzQtD4opeaR0dCKPdLOCb8Xwz8jT0hGxyjhvF9a9xSf+7MxqAC6wYXPPg3eCDpaBUhCYTk6n/CKPUt3MhBAUKArlpHheuQoPaGRUiWmEow7eXUVhexXCqCIDiZ6TXFeOFJSv6IlRJJ5iQvBkB8VMBSYXlbVYJ9eXMhOZWqhXKngKuYzAaMgViiCH9i4pdZEppn7Bs2UEFHkhjfCQEnI63Jq4hRuR88UDHrmloqw05rplWiXDIm4FY1K/xLSvpwMe+aEFgl5WPJQ+sGkAyRMl9QrmDhRhNNFTxdHeG42VDHHODF8UWwpCIF/vUDJozgIhMX3O6ezZVyPGnuKbOQsJy3rU3gXPmAgLlbxn8dyyTn2Kcl0Y+pz7Y0Yaz6V6SDuZFmE3E6Is+/QrhdJYQKVr8eO8qHDnI8DBkRDzOCrcHJRvLtJBIY/vMaSjL9OBQkeOCEwE4FxecYy7sGW+36yXlPwoTGfy8jmP2qf5YdJHEFjkJ+MkQVfOhyfz5OBMkT6aaFwU5VHJlzyf86PgRU7L3H0SeuRz+5GSX8a3F4KSSkMi1B19BCN5kIrzad88MS4wUzc8l6m/6S5xEuLTPlCAsflu0aROUiivl7/FG12ex44pcPeiWqDhNHTazYmn0bmTjZaiMMhclccM2nT6QiGKNpT7T/lF3jg994vzCMfRjvi0bZ4Lp511ZKMqlkejJ9alol5qo1IdDgKPg7OSIU57Gw2mNEZY5iAMuShnlW7HOD/pPch9jc7LMZDGSGmaHOfyo3AmhPdkkHbBMKrLs3VzH5yFPzgI/ViHA0mEY6GPxbQ4vrLOFfOi/hXzASRDkMr0REfTUXdDbp8Nx6yjEYCFcqJ7RV2r0K9Sm6AbmdSO045bqXeZcC6fgPYjPUqThL9Fx1wyVqPeFfJ36WJRV4xOOyGvQh9LeWO9LEKHZzjeqhhGnB5HxIiESJI5BC/WUcWL/WN9q3D8FciByN8erh05/C83umHzZ6/+Sxwe6glDDpis0XjrM1cZKZMETNXMsn3s36NQPynX3Kyf65Y/yc8zjh4H+dFmPmdB4Ysfj8rzDlGZV/79Hkb4Oys+Kq6FVyOlRaGNSqUotSqdl32VymipMGbvb7kTFc+R27IaKaM5XzilD3XTN+2Tp1kVnpHCO1KcRw6elE1QeJkwPD5BOCelc5IeK3Y0UeDKNM3kbaZLxXGzbNquVNzGRDMqmyOqyTgbdUOdsl1WsifzzTpj1Myy5QRkBTOMPfKuhHG46A8c60hbLuYhTTnkx7zYpqhXeHlSf2mcqPzldtFLJe+hjfsrFcUUJsQMeag5XzuKunGc1DAec1/5OkNfZb0SvMGmttbbaBe96GUdmgiM5EqNp1FZy3U26s/1MdqZnGk/zS88vWX7XG/mGHcvi/YbymlSyrICySaHInua1kFSRqOSJvyaijSSsonouQ/tSgUvKYRTRfA85XGMSArlXHYeeb5u6G9z/kB6/42QryWNzRvtksc8KptRmwl1E5lFx97kGB2F6WVhQnICEsUw06A8JUchkiIW02NHIbJSScExFurrYtch794iOBA5OQ5jHxo+j4FSMZU5RSU1pRHCTQunYZlOechKr6JibOQ6VDr5yKN0/FGZTkpnkSaktiVZg6ioWyiNFG4bCCpXTo7B/Jzm55s2nvXNuql+cV7W33bOG3k8Wy/Cw0/aJ8ZasFAObG76ysE0tK3Mk6xSR8tiruxj149mzgs9qciL6dLxB0x2lFgFMTrWwSzkneENJyPn/sq/JxV3msq0Y5XkaelMLHW4Uh+bOvuSPlakRzpacYz9cdE262NjHQ0pjcQwc3ruOE3L+VhPoiItR3vaAfhfNwlsBhfWsPnq8SUcwGSmPsXkQRmFHc08RCN/OWHj4YnwRX2ktR31PnmAyjqThyS1Hxs38SjjUehzTOjjhyo8KMGwyQ9T9LbH9kW4W2GIpP4YuQ7nLV0fFjmOX/ZRPogAUh9pR6B4CBAfhjR+4Q1HQfyIxlycN0TxiOtCKJyd8SGJYTphEozI7dNCR/k+ekCKPCr5afqFFS3qZw84h7AiHnvFgWLLNvebwvRSvYmnfHpeeG6VH/eBUD43/1GdLX3mduO6s/VCqF9pWIE5rUUyqIq1z+vGqT6K+ijmnrjxKJ/H/U3yuJjvqP20bUSqV/RX9JX6wGY6XhtP+8y5m+M83Zj1zGdFJyotHP+fMWzGnnaIh7fsahKWlIeYpCk/l4jevGRQjPtIK1MYLfFrP+P+IG7F8jyFgZTleSzeGBcpFDLoGDkcMgyVDCKi5O8Y1YvplB+uVRU8vOgzjl0aPGWdPHYYbzLGyGACjc9H1zTfLsmkjfFyu3gtZR7KPqb9FX1QYjLFDxxuFadbnYyfIp0U9dCHLFU2fpIxkAyDrPyX+dFICNQzmrMHhTxCcGkgvu+mwLCUnwwdykEowlWzsUJAinigYqyybpSi2TWY82I6e92RJFuW1OP0tl9E9KDH5Y6lVOSnulzk+Fg+rp/Ak3MU9bexM8akBebrjvJyC2JgpJuNDjxq54vzsj6H89JRFUNxy+E5jZVEQNYxkvgo9DAOzx8w0qFifowSSEp9KJ8aJGUbKVOpXTaONh2802OMPCjrl0bP6HWGiYHioqEB2jRWEEKBi3bZiTyZS5GXRGmRn9bBUxKTHHd4mcS5DACxPDHQ8sZRvjlRLxmd00ZeTLuumSG+eVxYw+Zt998FtVzGp3vElEuGu8GoAaBkzqn+NI+z3J+UR2Y9SiPK8KCYhfGzA3KcHudlIZC8YCkv14ljlMw+CQCa5s3XUVOBUQqNcr7l+Jh4mDY8TzxqH7/2loVZrpO3QnNI1iisrZhP9meUws0XYW5+LBiKNvKXpbkYw2chQ0U+CmFZBG7lvFwe1yX3kYUUoRS2QPSnjPNkjOl1z/UBIMx/Wp7vj8L4OtI40+ub1Mljx/wscEePCWXbcCpgc7tctxSMhNw255Vb2ONwxHG/mwJ2Wwjhtj70xui7xpjv46x5bhX4FwiMsZNmd92pi2aM+IXE+Xbz/U3HjopImeFn2k/blZuF8Z2ibeW+zOfN25R3zGlSlyZ9UNE3Ff1Ras+Ql5hT3VEfRT7GjqNcLkie1CIvKUJFfVEyMm27QtFBmPNoZz3NFclzW+ZxGCMqQanuVLHauJaxAjfub9yurJPXaOwYy/2V65IdYOPxIC9pF/lxkUbK6EhBxGiMkUI6yY8Xs61uzttxTI6+cTtM6sU6GNUp8wBEmir6RZxrWT89IGVbGTOX5fJCvGzJCzx91HamrMwr66a8zb6n9WjSz2ZekB3TPkbzuc4xyjZz/e1sz6PyM+e2a41Gc+WtdefTXKq5s+PTxJIbza0sH/UtefGdUp2uj0fXmv7Uw1x/of7mNfqNcdK1pPkVHcbzjTF41M66Dl/C+XBhDZs/+h/+GgeHk+kVz88Gtukus/nbd3jGbcdhaBvVivJpGY8oba4sFm2Gugk/KeuMZ0zTXgpFlzf+3xxzXDeMx0hGm8/Z6SrKdJ759jo5Hc2syZgpXQjNpH7H9tlTktoUXpetdZCFTaIZ3qwDyBYxGFmgTOYx7W86HmGs7IzahvwomMu2zGIslN6g8fXR5NkP68jT+ZR1y3Uq5jJTr1QWyvlN17kU1KP8mfJSsCOVxbmG89B2456Uwn2iyKR0cY/A42tNdMSTch73vcFEWAQ4F+exzpwQ3+gbuYzOqLOtv01BSDP9bauLiVDY3hYTITVfZ348Hu3ATcfPdTmMc56+R+2nAnTb9c4eeee8Zo+75jKpw+V6lku9q9+QpKKPublxcZ3lEuxqF9d4NNbM9fDkGkYFo2eat4xRdFnUKccakdZEWRmXTfraKAtSNM1t7rrGYaIMvzmn4nqS+TR9HjAeA0D2Nsf2O9diro/IQycdF/NBUbfsc7QeG4nJxY1440ydKT3P9bdRttuxs0WFuu5x6Kw2EwfTfP05F1wumaam46SQ2y0tRznlfGbmmFIxLHjS97n6iGme1I19caFpldPc0m4UPjx7eWV+2YecT+7A5nymfe0835I3dSQmkqZ8Pg1xBsDKTjvfigtr2Cy0w1Jvszgm2Kg2/wSNefeWp2zGCNlal7d7S0etxhzobC/ntH2Ylk/T40m3pQKfH4L0HmuhQG+OuznFkZdzYpTk+jRpOzZQyrTfVR6ND84ey3Lscf3o/dscYzyf7CWM3XmeqxPXhXb3VxoGmHpQi7625Rdjl57T0Xgj42PTWPOFIZENkWiUlPMLyxmNKh6vdfZMT/PH46U15uJex/SWvHjNcW6lhzjPEdgwbDaUk03P5qicQ51Ye6J8oBhvTLSbRg2w2yDZ7X3Lx7KPqVI/ez7Tz7ZzOrMOb4y9aw5ZYdjilZycp5WemcPu8Xi+flm3uIbd/fJo/M3xeLZ/mo6zbU5nrAnH/JLGtsw9ofDObngvJ3kjD20sj/1thFxixpNazq/oY7b9lF42+yrHzWvKG9e3kZ72MUnT1najjrEBmoxNk3pl+bTudBdyZlcyua5j+bTKefoI+dOSs/7A8dY/SpvG2d1+53xuKMwpxnN1dleiSTkX/8+1n9YvCs7Oo0neRvkZfWxNp/8meefoozjnsp9dBk/5vuO0bkjz3PgbY83NB5M6NHstYsxwKudJ+77vgc/hXLiwhs3/9Hf/YwhFKzKpeMQn+eM0b+YDkxtTpnlL/mRS56o3M/YkPTIxtsyDdlzTaLhCktNMX5u0PN8PTa4tlRUhTaO5j9rzhJY3r2NkskzWYFqfZvIjFM23U4VWU/atirWjYlyarMk0RK9EOXcinp3fZh95/acm5WgeRT8b6Um76bUr5Bcop6FzmNyP8fw3x0tjbFzr7rq5LJvA0/Elryif9FOakTG8caN8EvK3uS752qdibS40Mfc9s86j8Tb707vmP3N9Zchlzhu/UEsxb6PdGGWIZxx7bj021mBmTc83z/O22wTN9KfnrIMtfUzHTvk00++2ujv7mJvzJj3E/Lngx7n7vdnflnZbW8VnaovChXGY6Hz5rtLz9YEwhzPr4Pxfo6RQ/3x1y5flz4/yXZMnA3Xuq7mO+TyVHVZcF/zTYPQxxl9Avf72u8OCZ8e7zus4zwhz4cS75rDLCC/DhZ/snHb1cXTk8cf/5YwBAi6sYXPPj9yH/WkoGnA2J55RqoqSJ9HfddSbCLgzyWpnn5vGz3kfhPKdnXIuUQk+a/zZ8LhYfeucp98hKeeyu/ZojC1hdfPzPM91bYbwjUu3jw/IrhdN+qOU2gZOu9Mbc5sb88wrnZvntvlJf34y6rY557Fpsk4z8ykKOKz+yFqdme28+jjte9sKUR5nR/25dSvHFXqmjXsRd63iPY+7ZYQpDV7feFOI+BursTEEctTHZLdv6qLemH84S9cXfQ6RX8zs0m6bs9+47knfo5mM6WXjOkJ5JhfK4VzBmz7dIU11S7ZH0Xk+ty7jUM28WkW/nPvYdQ83172owzQqme76xkdgbmwUeZtjJ6Y+7m9UqVyjmD+mgw0TNtLA5J7xpD/phEbjS70tJipPzscXg431nY43c11z894om+xu755nuOcb6zi3tvNtZ82u6XO07aGam9Okz7T2s/Wk7tw4Z4Zx7epn25hPdoyCdke552m/pc71Xh/tqnNG2231rn/+vCV/fizaVv2c6zgX5jhbl0eH+TVL+eMvhG7U25BR2+tMPxIxo4ZuXPe0n7nLidfYrXsAfzJTYxMX1rD57256FJcO9fkbXDCPyPVa5OfB+Szr+TrntcrLXq53/n4sgs9R99uvk0fcdt1ntZ0JHSjKz25//eVzkQ65/jwhb5uH32I5luF/5xlDlLFxvp/WLfgWT4Tb1JDKTTaVaz/60seuOYlSPVXmy/4I43ecynplrTLccFRnJGzm6sRRtvUt7coQxLny6TrOrsvI0BgbQnNrNAoT3ViDzRe6t/c3nj+IR2GLI9qYMZqSUVV0Ud7jMiRUzoGpYRNPNteupMvyYwBTZXv7eswZZhshozxXd3OMuRDVcsy5a9rUeSahm2f0F/uctp3WyfXiumPrtUzDTlPbLXQ8jhSbM5rCGMVEtq7jzJrOjjOz/rnvbXOb9HuO9tg6t3kFfroWucJ5+izPN+vTudpNxy/cHrvaT/ufmeeZBkNZZ7pWDGxwNQ5lo/ZTzRsjuhtRfalwTw3HoJHPKdDl2PPjjWa+0X7bfTiP8bO17cY4PN/ndC7TvHPc1+ufP8/X2TX/adnM3ObGozEjOld/BGDous3OtuDCGjY/93/9DPTecrNgqwEzt4Jbqm7hd2e1mc/f2tN197XtHb5NjnOOMTbqbZnnzn4329CZba5j7C1taFebHWs0W7RjrrP+7B3rORcal/uay5+vv30Nt1/DNt/79vDg65wrNkPwzuwL138ttKO/6x1/LmzwrDZpDlERGBVs33fZFm60fW5nzWH7GsyGQj2J+xbHmb0P57jWuRluW/O8plvmsXU83hputCvEC9gMrztfO6QvP5Z9gbaHrUmj3eXb12XOhC3nsnvd5sI6wWdf4zRkcTynLTSG3XQh92pOcQ3tz7qWM56Hnet/jvZnjb1LVE7DQMd9736W50I4p2OfhTP7AO+UJ2etXepjG9859zy3ubBC+Q66K+dxltpy1v2UOk/FeGfT3ZMZ73xzemquL9Y9V3/nDJ/bSg9zKtNMaPA2nOfej+vmfk+OPO79387X9sIaNm/97z+DvYPxd6unxv+3hevt5ykd93xE8KTHfgrmymD568JPRfdPZj7nYLLn2lHapvg+A2PvNAK39Dqjw4DPUF53jr+Rmee9XX09Rz87Crd+RfCM8eau8ax13naN8+uYz67bT7BF2d8lMHflbL+f83PbvgpnCIotTpydyvbO8TD7fMQ57Gq31ZjD9P6E/CfFx7avR/74yjymyutoN+QMpX1byfZ7mVfsrDnNhRSe9YXy8yiOcztZcxS+WedJongu569pZuzigrbfV9pqwGfE53e+1lwY5rh8s7cyXHPbfWZMgwLn6uV+5sqiC2bX9V2nRvGkcdY4/syZ7o6CAPLmwa7nP+76bltbHu0Aza/rtvJtajoBIN58bq4XcW5zxvJ0B3a3HJvMDdsjNc4/t91jb+NomYfvlvqzrbco9jR5Zle9BfBv8xOf4MIZNvHlpNc//wu4fLj7b1VUPH1g/vZejnsmcL3hdU/t2M/M2viZr+w8VXg6wiWB+b9b8mSx84tB1wEJq/v2+tru7zxPW85fKXySYz8Zinuyc5a/IfLkxnsymIYYntUZYzO87uwxgOsR+oztIZazfZ9zPu46FY/rUaTie2Lnweyaz/R33jWYvg/1lIx9jmvhM/6mVa53vvvp+Oz+zqPAC42e3Vekh7n1iGGsu9Zql0JZIv7dnnmc7z5Pv+o5W35GH9Nwys0+zrPn8e3NY9d9OYtOznp38TxrtH3s3W3Putfx66+7yneNvfu6zx57Nv+MZ+WsZzzeq/5kCPM4mzounGFzdHQEAPieH/7XZ3ciFRUVFRUVFRUVFRUXAkdHR7hy5crOOsRPlUv0KYL3Hg899BDuuOMOfPWrX8Xly5ef7SlVfAfj2rVr+O7v/u5KaxVPOyqtVTxTqLRW8Uyh0lrFMwFmxtHREW6//XYotXsn9MLt2Cil8F3f9V0AgMuXL9cHpeIZQaW1imcKldYqnilUWqt4plBpreLpxlk7NRH1JZaKioqKioqKioqKihse1bCpqKioqKioqKioqLjhcSENm8VigXe+851YLBbP9lQqvsNRaa3imUKltYpnCpXWKp4pVFqruGi4cB8PqKioqKioqKioqKiouF5cyB2bioqKioqKioqKioqK60E1bCoqKioqKioqKioqbnhUw6aioqKioqKioqKi4oZHNWwqKioqKioqKioqKm54XDjD5j3veQ++93u/F8vlEq961avwD//wD8/2lCpuMHz84x/HT/3UT+H2228HEeEv/uIvRuXMjHe84x14/vOfj729Pdx11134l3/5l1Gdxx57DG984xtx+fJlXL16Fb/4i7+I4+PjZ/AqKm4EvOtd78KP/MiP4NKlS3je856Hn/7pn8ZDDz00qrNer3H33XfjOc95Dg4PD/FzP/dz+PrXvz6q85WvfAWve93rsL+/j+c973n4jd/4DVhrn8lLqbjgeO9734uXvvSl6Q8h3nnnnfjwhz+cyiudVTxduOeee0BEeNvb3pbyKr1VXFRcKMPmz/7sz/Brv/ZreOc734nPfvazeNnLXobXvva1eOSRR57tqVXcQDg5OcHLXvYyvOc975kt/+3f/m28+93vxh/8wR/g/vvvx8HBAV772tdivV6nOm984xvxxS9+Effeey8+9KEP4eMf/zje/OY3P1OXUHGD4L777sPdd9+NT33qU7j33nsxDANe85rX4OTkJNX51V/9VfzVX/0VPvCBD+C+++7Df/zHf+Bnf/ZnU7lzDq973evQ9z3+/u//Hn/yJ3+C973vfXjHO97xbFxSxQXFC17wAtxzzz144IEH8JnPfAY//uM/jte//vX44he/CKDSWcXTg09/+tP4wz/8Q7z0pS8d5Vd6q7iw4AuEV77ylXz33Xenc+cc33777fyud73rWZxVxY0MAPzBD34wnXvv+bbbbuPf+Z3fSXmPP/44LxYL/tM//VNmZv7Sl77EAPjTn/50qvPhD3+YiYj//d///Rmbe8WNh0ceeYQB8H333cfMQltN0/AHPvCBVOef/umfGAB/8pOfZGbmv/7rv2alFD/88MOpznvf+16+fPkyd133zF5AxQ2Fm266if/oj/6o0lnF04KjoyN+8YtfzPfeey//2I/9GL/1rW9l5srXKi42LsyOTd/3eOCBB3DXXXelPKUU7rrrLnzyk598FmdW8Z2EL3/5y3j44YdHdHblyhW86lWvSnT2yU9+ElevXsUrXvGKVOeuu+6CUgr333//Mz7nihsHTzzxBADg5ptvBgA88MADGIZhRG8/8AM/gBe+8IUjevvBH/xB3HrrranOa1/7Wly7di154ysqSjjn8P73vx8nJye48847K51VPC24++678brXvW5EV0DlaxUXG+bZnkDEo48+Cufc6CEAgFtvvRX//M///CzNquI7DQ8//DAAzNJZLHv44YfxvOc9b1RujMHNN9+c6lRUTOG9x9ve9jb86I/+KF7ykpcAEFpq2xZXr14d1Z3S2xw9xrKKiogvfOELuPPOO7Fer3F4eIgPfvCDuOOOO/Dggw9WOqt4SvH+978fn/3sZ/HpT396o6zytYqLjAtj2FRUVFTcyLj77rvxj//4j/jEJz7xbE+l4jsU3//9348HH3wQTzzxBP78z/8cb3rTm3Dfffc929Oq+A7DV7/6Vbz1rW/Fvffei+Vy+WxPp6LiunBhQtGe+9znQmu98VWNr3/967jtttuepVlVfKch0tIuOrvttts2PlhhrcVjjz1WabFiFm95y1vwoQ99CB/72Mfwghe8IOXfdttt6Psejz/++Kj+lN7m6DGWVVREtG2L7/u+78PLX/5yvOtd78LLXvYy/N7v/V6ls4qnFA888AAeeeQR/PAP/zCMMTDG4L777sO73/1uGGNw6623VnqruLC4MIZN27Z4+ctfjo985CMpz3uPj3zkI7jzzjufxZlVfCfhRS96EW677bYRnV27dg33339/orM777wTjz/+OB544IFU56Mf/Si893jVq171jM+54uKCmfGWt7wFH/zgB/HRj34UL3rRi0blL3/5y9E0zYjeHnroIXzlK18Z0dsXvvCFkTF977334vLly7jjjjuemQupuCHhvUfXdZXOKp5SvPrVr8YXvvAFPPjgg+n3ile8Am984xtTutJbxYXFs/31ghLvf//7ebFY8Pve9z7+0pe+xG9+85v56tWro69qVFSchaOjI/7c5z7Hn/vc5xgA/+7v/i5/7nOf43/7t39jZuZ77rmHr169yn/5l3/Jn//85/n1r389v+hFL+LVapX6+Imf+An+oR/6Ib7//vv5E5/4BL/4xS/mN7zhDc/WJVVcUPzyL/8yX7lyhf/u7/6Ov/a1r6Xf6elpqvNLv/RL/MIXvpA/+tGP8mc+8xm+8847+c4770zl1lp+yUtewq95zWv4wQcf5L/5m7/hW265hX/zN3/z2bikiguKt7/97Xzffffxl7/8Zf785z/Pb3/725mI+G//9m+ZudJZxdOL8qtozJXeKi4uLpRhw8z8+7//+/zCF76Q27blV77ylfypT33q2Z5SxQ2Gj33sYwxg4/emN72JmeWTz7/1W7/Ft956Ky8WC371q1/NDz300KiPb37zm/yGN7yBDw8P+fLly/zzP//zfHR09CxcTcVFxhydAeA//uM/TnVWqxX/yq/8Ct900028v7/PP/MzP8Nf+9rXRv3867/+K//kT/4k7+3t8XOf+1z+9V//dR6G4Rm+moqLjF/4hV/g7/me7+G2bfmWW27hV7/61cmoYa50VvH0YmrYVHqruKggZuZnZ6+ooqKioqKioqKioqLiqcGFecemoqKioqKioqKioqLiyaIaNhUVFRUVFRUVFRUVNzyqYVNRUVFRUVFRUVFRccOjGjYVFRUVFRUVFRUVFTc8qmFTUVFRUVFRUVFRUXHDoxo2FRUVFRUVFRUVFRU3PKphU1FRUVFRUVFRUVFxw6MaNhUVFRUVFRUVFRUVNzyqYVNRUVFRUVFRUVFRccOjGjYVFRUVFRUVFRUVFTc8qmFTUVFRUVFRUVFRUXHDoxo2FRUVFRUVFRUVFRU3PP5/HS/fSTtXw6IAAAAASUVORK5CYII=", "text/plain": [ - "torch.Size([4, 3, 4])" + "
" ] }, - "execution_count": null, "metadata": {}, - "output_type": "execute_result" + "output_type": "display_data" } ], "source": [ - "a = torch.zeros((1, 4, 3, 4))\n", - "l = PositionalEncoding2D(d_model=4) \n", - "\n", - "l(a)[0].shape" + "#plot for a fixed space position, and show the vector depending on time position\n", + "x_pos = 0\n", + "plt.figure(figsize=(10, 5))\n", + "plt.imshow(l_pos[0, :, x_pos]) \n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "694e9c20-b0d3-4de2-aaab-cf3dd0cd75c7", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzYAAAFhCAYAAACiWz8nAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAIoBJREFUeJzt3X1wVfWdP/DPTUICYogG5CElID5UqwLWByi1tbgwYtayun1Y7VpLaUf7ELTKrrXsFKl9SrU7/VFbfrjtTos7v6K2u8V23anWpSr1Vx8WWFrttBQslSgCrVsTCBJicn5/9GdoFJDA93Jzktdr5sxw7z18ztt7zpnxzbn33EKWZVkAAADkWFmpAwAAABwuxQYAAMg9xQYAAMg9xQYAAMg9xQYAAMg9xQYAAMg9xQYAAMi9ilIHeLWurq7YsmVLVFdXR6FQKHUcAACgRLIsix07dkRdXV2UlR34mkyfKzZbtmyJ+vr6UscAAAD6iObm5hg7duwB1+lzxaa6ujoiIsYu+nSUDR582PN+/u5vHfaMPzf53z6UbFbKbClzRch2KBxrh0a23nOsHRrZes+xdmhk6z3H2qEZCNlad3bF+LN+190RDqTPFZtXPn5WNnhwkmIzrDrt14hSZHpFymwpc0XIdigca4dGtt5zrB0a2XrPsXZoZOs9x9qhGSjZIuKgvqLi5gEAAEDuKTYAAEDuKTYAAEDuFa3YLFmyJI4//vgYPHhwTJ06NZ544olibQoAABjgilJs7r777pg/f34sWrQo1q5dG5MnT45Zs2bF9u3bi7E5AABggCtKsfnKV74SV111VcydOzdOO+20uP322+Ooo46Kb30r7a38AAAAIopQbPbs2RNr1qyJmTNn7t1IWVnMnDkzHn300des397eHq2trT0WAACA3khebP7whz9EZ2dnjBo1qsfzo0aNiq1bt75m/aampqipqele6uvrU0cCAAD6uZLfFW3BggXR0tLSvTQ3N5c6EgAAkDMVqQeOGDEiysvLY9u2bT2e37ZtW4wePfo161dVVUVVVVXqGAAAwACS/IpNZWVlnH322bFy5cru57q6umLlypUxbdq01JsDAABIf8UmImL+/PkxZ86cOOecc2LKlCmxePHiaGtri7lz5xZjcwAAwABXlGJz2WWXxe9///u46aabYuvWrXHmmWfGfffd95obCgAAAKRQlGITETFv3ryYN29escYDAAB0K/ld0QAAAA6XYgMAAORe0T6KdriWvfP2OLr68HvXvOfeniDNXlOnrk826zcdbclmVY3bmWxWRMSurj3JZnUd83KyWRERnVlXslldQ9LNSi0blJU6wj5l5aVOcAB9+Z9qCqUOcAB9ORsAHKS+/L8BAAAAB0WxAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAck+xAQAAcq+i1AH2Z2T5nqguP/zetXrxmxOk2WvJ529LNmvx9hnJZp055rlksyIinu3sSDZryLDdyWZFRLRnL6cbVtWVbFRnlm5WRERWkSWdl0xZH80VEVmh1AkOoC9nA4B+wBUbAAAg9xQbAAAg9xQbAAAg9xQbAAAg9xQbAAAg95IXm6ampjj33HOjuro6Ro4cGZdeemmsX78+9WYAAAC6JS82Dz/8cDQ2NsZjjz0WDzzwQHR0dMSFF14YbW1tqTcFAAAQEUX4HZv77ruvx+Nly5bFyJEjY82aNXH++een3hwAAEDxf6CzpaUlIiJqa2v3+Xp7e3u0t7d3P25tbS12JAAAoJ8p6s0Durq64rrrrovzzjsvzjjjjH2u09TUFDU1Nd1LfX19MSMBAAD9UFGLTWNjYzz11FNx11137XedBQsWREtLS/fS3NxczEgAAEA/VLSPos2bNy/uvffeWLVqVYwdO3a/61VVVUVVVVWxYgAAAANA8mKTZVlcc801sWLFinjooYdiwoQJqTcBAADQQ/Ji09jYGMuXL48f/OAHUV1dHVu3bo2IiJqamhgyZEjqzQEAAKT/js3SpUujpaUlpk+fHmPGjOle7r777tSbAgAAiIgifRQNAADgSCrqXdEAAACOBMUGAADIvaLd7vlwNaz6WJQNGXzYc05e/niCNHud/eXKZLPes3pSslk3vuM/ks2KiFi7e/+36O6tkcN2JpsVEbEz60g2q3zwy8lmdUXij2GW99GPdfblfw4p9NH3DAAour78vygAAAAHRbEBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByr6LUAfbnjYt3RkV5x2HPeXnapARp9nr25Z8mmzXqkXS98i0X/TbZrIiIb/zhHclm1Q1tSTYrIuLFrnSzKitfTjarI+tMNisiIiqyZKM6s3RvWlaWLldyhVIH2L+sD2fry+8bABwsV2wAAIDcU2wAAIDcU2wAAIDcU2wAAIDcU2wAAIDcU2wAAIDcK3qx+dKXvhSFQiGuu+66Ym8KAAAYoIpabP7rv/4r/umf/ikmTUr7WzIAAAB/rmjFZufOnXHFFVfEN7/5zTj22GP3u157e3u0trb2WAAAAHqjaMWmsbExLr744pg5c+YB12tqaoqamprupb6+vliRAACAfqooxeauu+6KtWvXRlNT0+uuu2DBgmhpaelempubixEJAADoxypSD2xubo5PfOIT8cADD8TgwYNfd/2qqqqoqqpKHQMAABhAkhebNWvWxPbt2+Oss87qfq6zszNWrVoVX//616O9vT3Ky8tTbxYAABjAkhebGTNmxJNPPtnjublz58app54aN954o1IDAAAkl7zYVFdXxxlnnNHjuaFDh8bw4cNf8zwAAEAKRf+BTgAAgGJLfsVmXx566KEjsRkAAGCAcsUGAADIPcUGAADIvSPyUbRD0bWpOboKgw57zra7T0iQZq///cJbk80a/rPnk82aMKgr2ayIiMe3jks2a9bYXyebFRGxtXNosllDqvYkm9URnclmRUQUKtLu02QKpQ5wAH05GwBQVK7YAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuafYAAAAuVdR6gD7s+2DZ0Z51eDDnvN/z/lKgjR7TXygMdmsN/5ubbJZNWVDks2KiPifZ49JNmvciS8kmxURsaXj2GSzqqv2JJvVnnUlmxURUVaRbl5XZMlmRVnCWakVSh3gAAp9+H0DgH7AFRsAACD3FBsAACD3FBsAACD3FBsAACD3FBsAACD3ilJsnnvuuXj/+98fw4cPjyFDhsTEiRNj9erVxdgUAABA+ts9//GPf4zzzjsvLrjggvjRj34Uxx13XGzYsCGOPTbdLXoBAAD+XPJic8stt0R9fX18+9vf7n5uwoQJqTcDAADQLflH0X74wx/GOeecE+9973tj5MiR8eY3vzm++c1v7nf99vb2aG1t7bEAAAD0RvJi89vf/jaWLl0aJ598ctx///3xsY99LK699tq444479rl+U1NT1NTUdC/19fWpIwEAAP1c8mLT1dUVZ511Vnzxi1+MN7/5zXH11VfHVVddFbfffvs+11+wYEG0tLR0L83NzakjAQAA/VzyYjNmzJg47bTTejz3pje9KTZv3rzP9auqqmLYsGE9FgAAgN5IXmzOO++8WL9+fY/nfvOb38T48eNTbwoAACAiilBsrr/++njsscfii1/8YmzcuDGWL18e3/jGN6KxsTH1pgAAACKiCMXm3HPPjRUrVsSdd94ZZ5xxRnzuc5+LxYsXxxVXXJF6UwAAABFRhN+xiYh45zvfGe985zuLMRoAAOA1kl+xAQAAONIUGwAAIPcUGwAAIPeK8h2bFK748I9j8NGHH+8/XxqRIM1ewx+pTDar7Kijks3qyDqTzYqIOGpzukOjvvKFZLMiIp56qT7ZrKMr25PN2p1lyWZFRJSXdyWb1RXpZkVZ2v/OlLJC380GABSXKzYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuKTYAAEDuVZQ6wP58pOZ3Maz68HvXxNvnJUiz1/E/+32yWS+feXKyWc933p9sVkTEsGe6ks16Q3lLslkREf+x+8xks4YN2p1s1u6skGxWRERFRWeyWZ1ZlmxW6n8O6czSHWuRdhek1YezJT50AaAkXLEBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByL3mx6ezsjIULF8aECRNiyJAhceKJJ8bnPve5yFLelQkAAODPJL/d8y233BJLly6NO+64I04//fRYvXp1zJ07N2pqauLaa69NvTkAAID0xeZnP/tZXHLJJXHxxRdHRMTxxx8fd955ZzzxxBOpNwUAABARRfgo2lvf+tZYuXJl/OY3v4mIiJ///OfxyCOPRENDwz7Xb29vj9bW1h4LAABAbyS/YvOpT30qWltb49RTT43y8vLo7OyML3zhC3HFFVfsc/2mpqa4+eabU8cAAAAGkORXbL773e/Gd77znVi+fHmsXbs27rjjjvjHf/zHuOOOO/a5/oIFC6KlpaV7aW5uTh0JAADo55JfsbnhhhviU5/6VFx++eURETFx4sR45plnoqmpKebMmfOa9auqqqKqqip1DAAAYABJfsVm165dUVbWc2x5eXl0dXWl3hQAAEBEFOGKzezZs+MLX/hCjBs3Lk4//fT47//+7/jKV74SH/rQh1JvCgAAICKKUGy+9rWvxcKFC+PjH/94bN++Perq6uIjH/lI3HTTTak3BQAAEBFFKDbV1dWxePHiWLx4cerRAAAA+5T8OzYAAABHmmIDAADkXvKPoqXy3g0XRcXQw78N9PFf+2WCNHt1tu5MNmvLDVOTzXpyz4hksyIijt68O9ms2vKOZLMiIja3HZts1rihf0w2a0fXoGSzIiLKy9PdSbAr0s0qlGXJZiVXKHUAAKBUXLEBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByr6LUAfZn921jomLQ4MOeM/ToLQnS7FV4aXeyWYPe+j/JZv3fHW9MNisiovK5PyabVVNWnmxWRMTzO6qTzZpU81yyWW1ZZbJZERGDyjuTzeqMLNmsKEs4ayAplDoAAPRvrtgAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC51+tis2rVqpg9e3bU1dVFoVCIe+65p8frWZbFTTfdFGPGjIkhQ4bEzJkzY8OGDanyAgAAvEavi01bW1tMnjw5lixZss/Xb7311rjtttvi9ttvj8cffzyGDh0as2bNit27090mGQAA4M/1+ndsGhoaoqGhYZ+vZVkWixcvjk9/+tNxySWXRETEv/zLv8SoUaPinnvuicsvv/zw0gIAAOxD0u/YbNq0KbZu3RozZ87sfq6mpiamTp0ajz766D7/Tnt7e7S2tvZYAAAAeiNpsdm6dWtERIwaNarH86NGjep+7dWampqipqame6mvr08ZCQAAGABKfle0BQsWREtLS/fS3Nxc6kgAAEDOJC02o0ePjoiIbdu29Xh+27Zt3a+9WlVVVQwbNqzHAgAA0BtJi82ECRNi9OjRsXLlyu7nWltb4/HHH49p06al3BQAAEC3Xt8VbefOnbFx48bux5s2bYp169ZFbW1tjBs3Lq677rr4/Oc/HyeffHJMmDAhFi5cGHV1dXHppZemzA0AANCt18Vm9erVccEFF3Q/nj9/fkREzJkzJ5YtWxaf/OQno62tLa6++up48cUX421ve1vcd999MXjw4HSpAQAA/kyvi8306dMjy7L9vl4oFOKzn/1sfPaznz2sYAAAAAer5HdFAwAAOFyKDQAAkHu9/ijakVL147VRURh02HN+/b/ekiDNXif/n2OSzbr+lPuTzfrW5rclmxURMeQPLySbdVShMtmsiIiW1qHJZtVUvJRs1o6utN8jq6zoTDar6wAfH+2tQlm6WckV+nA2AKCoXLEBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByT7EBAAByr6LUAfanfdZZ0Tlo8GHP+bdLv5ogzV4f3HR9slnvOXpLslmLnh2RbFZExBt3PpNs1qBCebJZERGdLYOSzaop35Vs1o7OIclmRURUlXcmm9URWbJZhbJ0syIiuhJmi0K6Ucn16Wxp9ykAlIIrNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO71utisWrUqZs+eHXV1dVEoFOKee+7pfq2joyNuvPHGmDhxYgwdOjTq6uriAx/4QGzZku62xgAAAK/W62LT1tYWkydPjiVLlrzmtV27dsXatWtj4cKFsXbt2vj+978f69evj7/6q79KEhYAAGBfev0DnQ0NDdHQ0LDP12pqauKBBx7o8dzXv/71mDJlSmzevDnGjRv3mr/T3t4e7e3t3Y9bW1t7GwkAABjgiv4dm5aWligUCnHMMcfs8/WmpqaoqanpXurr64sdCQAA6GeKWmx2794dN954Y7zvfe+LYcOG7XOdBQsWREtLS/fS3NxczEgAAEA/1OuPoh2sjo6O+Ju/+ZvIsiyWLl263/WqqqqiqqqqWDEAAIABoCjF5pVS88wzz8RPfvKT/V6tAQAASCF5sXml1GzYsCEefPDBGD58eOpNAAAA9NDrYrNz587YuHFj9+NNmzbFunXrora2NsaMGRPvec97Yu3atXHvvfdGZ2dnbN26NSIiamtro7KyMl1yAACA/6/XxWb16tVxwQUXdD+eP39+RETMmTMnPvOZz8QPf/jDiIg488wze/y9Bx98MKZPn37oSQEAAPaj18Vm+vTpkWXZfl8/0GsAAADFUPTfsQEAACg2xQYAAMi9ov2OzeE66totUTH08H/fZkR5R4I0e+1+245ks8oLhWSzBm9OfGOGPvyRwkEt5clmHVO+K9msFzuPSjYrIqKq/OVkszoT7s+yQt89NiLdKQUA5IwrNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO4pNgAAQO5VlDrA/tx90v0xrPrwe9dJP/5EgjR7zZ/yn8lm3ds2PNmsozdnyWZFRJQNHZpsVkfWmWxWRERlSyHZrOryl5LNembPiGSzIiIGlad73zqSTYoolKU91rqiK92wQtpsKWV9OBsA9Aeu2AAAALmn2AAAALmn2AAAALmn2AAAALmn2AAAALnX62KzatWqmD17dtTV1UWhUIh77rlnv+t+9KMfjUKhEIsXLz6MiAAAAAfW62LT1tYWkydPjiVLlhxwvRUrVsRjjz0WdXV1hxwOAADgYPT6d2waGhqioaHhgOs899xzcc0118T9998fF1988SGHAwAAOBjJf6Czq6srrrzyyrjhhhvi9NNPf93129vbo729vftxa2tr6kgAAEA/l/zmAbfccktUVFTEtddee1DrNzU1RU1NTfdSX1+fOhIAANDPJS02a9asia9+9auxbNmyKBQKB/V3FixYEC0tLd1Lc3NzykgAAMAAkLTY/PSnP43t27fHuHHjoqKiIioqKuKZZ56Jv/u7v4vjjz9+n3+nqqoqhg0b1mMBAADojaTfsbnyyitj5syZPZ6bNWtWXHnllTF37tyUmwIAAOjW62Kzc+fO2LhxY/fjTZs2xbp166K2tjbGjRsXw4cP77H+oEGDYvTo0XHKKaccfloAAIB96HWxWb16dVxwwQXdj+fPnx8REXPmzIlly5YlCwYAAHCwel1spk+fHlmWHfT6v/vd73q7CQAAgF5JfrtnAACAI02xAQAAck+xAQAAci/p7Z5TWvriCTH45cOP96ZbWxOk2ev9P/5VslkNT34g2axhz+xJNisiomzkiGSzdmVps1W2HPx3vF5PddnuZLP+5+WhyWZFRAwu70g2qyPdWxZlZQmHpXZwvwsMAPRDrtgAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5p9gAAAC5V1HqAK+WZVlEROze+XKSeS93tieZ84odO7qSzepsS5ft5Zd3J5sVEfFyV7psrQnfs4iIzj3p/lvbEmZr39mRbFZEREfbnmSzdqY8bnelPdZSHh9dLw2MbKnPqb6arWt3H96fAyRb8mOtj2YbKPszou9mc6wdmoGQrXXnn+a80hEOpJAdzFpH0LPPPhv19fWljgEAAPQRzc3NMXbs2AOu0+eKTVdXV2zZsiWqq6ujUCjsd73W1taor6+P5ubmGDZs2BFMyCvsg9KzD0rPPig9+6C0vP+lZx+Unn1QPFmWxY4dO6Kuri7Kyg78LZo+91G0srKy121jf27YsGEOoBKzD0rPPig9+6D07IPS8v6Xnn1QevZBcdTU1BzUem4eAAAA5J5iAwAA5F5ui01VVVUsWrQoqqqqSh1lwLIPSs8+KD37oPTsg9Ly/peefVB69kHf0OduHgAAANBbub1iAwAA8ArFBgAAyD3FBgAAyD3FBgAAyD3FBgAAyL3cFpslS5bE8ccfH4MHD46pU6fGE088UepIA8ZnPvOZKBQKPZZTTz211LH6tVWrVsXs2bOjrq4uCoVC3HPPPT1ez7IsbrrpphgzZkwMGTIkZs6cGRs2bChN2H7o9d7/D37wg685Jy666KLShO2nmpqa4txzz43q6uoYOXJkXHrppbF+/foe6+zevTsaGxtj+PDhcfTRR8e73/3u2LZtW4kS9z8Hsw+mT5/+mnPhox/9aIkS9z9Lly6NSZMmdf+6/bRp0+JHP/pR9+vOgeJ6vfff8V96uSw2d999d8yfPz8WLVoUa9eujcmTJ8esWbNi+/btpY42YJx++unx/PPPdy+PPPJIqSP1a21tbTF58uRYsmTJPl+/9dZb47bbbovbb789Hn/88Rg6dGjMmjUrdu/efYST9k+v9/5HRFx00UU9zok777zzCCbs/x5++OFobGyMxx57LB544IHo6OiICy+8MNra2rrXuf766+Pf//3f43vf+148/PDDsWXLlnjXu95VwtT9y8Hsg4iIq666qse5cOutt5Yocf8zduzY+NKXvhRr1qyJ1atXx1/8xV/EJZdcEr/85S8jwjlQbK/3/kc4/ksuy6EpU6ZkjY2N3Y87Ozuzurq6rKmpqYSpBo5FixZlkydPLnWMASsishUrVnQ/7urqykaPHp19+ctf7n7uxRdfzKqqqrI777yzBAn7t1e//1mWZXPmzMkuueSSkuQZqLZv355FRPbwww9nWfanY37QoEHZ9773ve51fvWrX2URkT366KOlitmvvXofZFmWveMd78g+8YlPlC7UAHTsscdm//zP/+wcKJFX3v8sc/z3Bbm7YrNnz55Ys2ZNzJw5s/u5srKymDlzZjz66KMlTDawbNiwIerq6uKEE06IK664IjZv3lzqSAPWpk2bYuvWrT3OiZqampg6dapz4gh66KGHYuTIkXHKKafExz72sXjhhRdKHalfa2lpiYiI2traiIhYs2ZNdHR09DgPTj311Bg3bpzzoEhevQ9e8Z3vfCdGjBgRZ5xxRixYsCB27dpVinj9XmdnZ9x1113R1tYW06ZNcw4cYa9+/1/h+C+tilIH6K0//OEP0dnZGaNGjerx/KhRo+LXv/51iVINLFOnTo1ly5bFKaecEs8//3zcfPPN8fa3vz2eeuqpqK6uLnW8AWfr1q0REfs8J155jeK66KKL4l3veldMmDAhnn766fiHf/iHaGhoiEcffTTKy8tLHa/f6erqiuuuuy7OO++8OOOMMyLiT+dBZWVlHHPMMT3WdR4Ux772QUTE3/7t38b48eOjrq4ufvGLX8SNN94Y69evj+9///slTNu/PPnkkzFt2rTYvXt3HH300bFixYo47bTTYt26dc6BI2B/73+E478vyF2xofQaGhq6/zxp0qSYOnVqjB8/Pr773e/Ghz/84RImg9K4/PLLu/88ceLEmDRpUpx44onx0EMPxYwZM0qYrH9qbGyMp556ynf7Smh/++Dqq6/u/vPEiRNjzJgxMWPGjHj66afjxBNPPNIx+6VTTjkl1q1bFy0tLfGv//qvMWfOnHj44YdLHWvA2N/7f9pppzn++4DcfRRtxIgRUV5e/pq7fGzbti1Gjx5dolQD2zHHHBNvfOMbY+PGjaWOMiC9ctw7J/qOE044IUaMGOGcKIJ58+bFvffeGw8++GCMHTu2+/nRo0fHnj174sUXX+yxvvMgvf3tg32ZOnVqRIRzIaHKyso46aST4uyzz46mpqaYPHlyfPWrX3UOHCH7e//3xfF/5OWu2FRWVsbZZ58dK1eu7H6uq6srVq5c2eMzjhw5O3fujKeffjrGjBlT6igD0oQJE2L06NE9zonW1tZ4/PHHnRMl8uyzz8YLL7zgnEgoy7KYN29erFixIn7yk5/EhAkTerx+9tlnx6BBg3qcB+vXr4/Nmzc7DxJ5vX2wL+vWrYuIcC4UUVdXV7S3tzsHSuSV939fHP9HXi4/ijZ//vyYM2dOnHPOOTFlypRYvHhxtLW1xdy5c0sdbUD4+7//+5g9e3aMHz8+tmzZEosWLYry8vJ43/veV+po/dbOnTt7/IvPpk2bYt26dVFbWxvjxo2L6667Lj7/+c/HySefHBMmTIiFCxdGXV1dXHrppaUL3Y8c6P2vra2Nm2++Od797nfH6NGj4+mnn45PfvKTcdJJJ8WsWbNKmLp/aWxsjOXLl8cPfvCDqK6u7v7OQE1NTQwZMiRqamriwx/+cMyfPz9qa2tj2LBhcc0118S0adPiLW95S4nT9w+vtw+efvrpWL58efzlX/5lDB8+PH7xi1/E9ddfH+eff35MmjSpxOn7hwULFkRDQ0OMGzcuduzYEcuXL4+HHnoo7r//fufAEXCg99/x30eU+rZsh+prX/taNm7cuKyysjKbMmVK9thjj5U60oBx2WWXZWPGjMkqKyuzN7zhDdlll12Wbdy4sdSx+rUHH3wwi4jXLHPmzMmy7E+3fF64cGE2atSorKqqKpsxY0a2fv360obuRw70/u/atSu78MILs+OOOy4bNGhQNn78+Oyqq67Ktm7dWurY/cq+3v+IyL797W93r/PSSy9lH//4x7Njjz02O+qoo7K//uu/zp5//vnShe5nXm8fbN68OTv//POz2trarKqqKjvppJOyG264IWtpaSlt8H7kQx/6UDZ+/PissrIyO+6447IZM2ZkP/7xj7tfdw4U14Hef8d/31DIsiw7kkUKAAAgtdx9xwYAAODVFBsAACD3FBsAACD3FBsAACD3FBsAACD3FBsAACD3FBsAACD3FBsAACD3FBsAACD3FBsAACD3FBsAACD3/h999ZUebOHklgAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "#plot for a fixed time position, and show the vector depending on space position\n", + "t_pos = 0\n", + "plt.figure(figsize=(10, 5))\n", + "plt.imshow(l_pos[0, :, :, t_pos].T) \n", + "plt.show()" ] }, { @@ -487,6 +535,13 @@ "display_name": "python3", "language": "python", "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/src/models/position_encoding.ipynb b/src/models/position_encoding.ipynb new file mode 100644 index 0000000..9646a58 --- /dev/null +++ b/src/models/position_encoding.ipynb @@ -0,0 +1,389 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "2dfc3ea0-afd2-46e8-a65c-bc0b08dde884", + "metadata": {}, + "source": [ + "# Position encodings\n", + "\n", + "> Implementation of special position encodings." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "61b2a7e9-d562-4c87-97b6-f7e2a6829f59", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.position_encoding" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9c2e0ff4-ebe6-4718-8b7b-071532ea4a7f", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *" + ] + }, + { + "cell_type": "markdown", + "id": "3826cecb-e003-41a4-ad86-f75ff2e80269", + "metadata": {}, + "source": [ + "## p-RoPE" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5388ae18-acfc-4dd1-88bf-238477598cc3", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class RotaryPositionalEmbedding(nn.Module):\n", + " \"\"\"\n", + " This class implements the Rotary Positional Embeddings (RoPE),\n", + " proposed in https://arxiv.org/abs/2104.09864.\n", + "\n", + " Code adjusted from https://github.com/pytorch/torchtune/blob/main/torchtune/modules/position_embeddings.py\n", + " > Copyright (c) Meta Platforms, Inc. and affiliates.\n", + " > All rights reserved.\n", + "\n", + " Additionally adds p-RoPE from https://openreview.net/pdf?id=GtvuNrk58a\n", + " Note: p=0 coincides with NoPE, while the case p=1 with RoPE\n", + " \"\"\"\n", + "\n", + " def __init__(self, head_dim: int, p: float = 1.0, max_seq_len: int = 4096, base: float = 10_000) -> None:\n", + " super().__init__()\n", + " self.head_dim = head_dim\n", + " self.p = p \n", + " self.base = base # max_wavelength; the lowest frequencies rotate at roughly 1/base radians per token; i.e. we can resolve 2pi*base tokens\n", + " self.max_seq_len = max_seq_len\n", + "\n", + " self.rope_angles = int(self.p * (self.head_dim//2)) #division factor two is for cos-sin split\n", + " self.nope_angles = self.head_dim//2 - self.rope_angles\n", + "\n", + " timescale = self.base ** (2.0 * torch.arange(0, self.head_dim//2, dtype=torch.float32) / self.head_dim)\n", + " timescale[self.rope_angles:] = torch.full((self.nope_angles,), fill_value=torch.inf, dtype=torch.float32)\n", + "\n", + " theta = 1.0 / timescale\n", + " \n", + " self.register_buffer(\"theta\", theta, persistent=False)\n", + " self.rebuild_rope_cache(self.max_seq_len)\n", + " \n", + " def rebuild_rope_cache(self, max_seq_len: int = 4096) -> None:\n", + " \n", + " # Create position indexes [0, 1, ..., max_seq_len - 1]\n", + " seq_idx = torch.arange(max_seq_len, dtype=self.theta.dtype, device=self.theta.device)\n", + "\n", + " # Outer product of theta and position index: output shape [max_seq_len, head_dim//2]\n", + " idx_theta = torch.einsum(\"i, j -> ij\", seq_idx, self.theta)\n", + "\n", + " # cache includes both the cos and sin: output shape [max_seq_len, head_dim//2, 2]\n", + " rope_cache = torch.stack([torch.cos(idx_theta), torch.sin(idx_theta)], dim=-1)\n", + " self.register_buffer(\"rope_cache\", rope_cache, persistent=False)\n", + "\n", + " def forward(self, x: torch.Tensor, pos_idx: Optional[torch.Tensor] = None) -> torch.Tensor:\n", + " \"\"\"\n", + " Shape:\n", + " x ... [b, seq, n_heads, head_dim]\n", + " pos_idx ... [b, seq] or [seq]\n", + " \"\"\"\n", + " \n", + " seq_len = x.size(1)\n", + " xshaped = x.type_as(self.rope_cache).reshape(*x.shape[:-1], -1, 2) #split head_dim [b, seq, n_h, head_dim/2, 2]\n", + " \n", + " # extract the values based on whether pos_idx is set or not, shape [seq, head_dim/2, 2]\n", + " if exists(pos_idx):\n", + " rope_cache = self.rope_cache[pos_idx]\n", + " \n", + " # reshape the cache to [b, seq, 1, head_dim/2, 2]\n", + " rope_cache = rope_cache.view(-1, seq_len, 1, xshaped.size(3), 2)\n", + "\n", + " else:\n", + " rope_cache = self.rope_cache[:seq_len]\n", + " \n", + " # reshape the cache to [1, seq, 1, head_dim/2, 2]\n", + " rope_cache = rope_cache.view(1, seq_len, 1, xshaped.size(3), 2) \n", + "\n", + " # out has shape [b, seq, n_h, head_dim/2, 2]\n", + " x_out = torch.stack(\n", + " [\n", + " xshaped[..., 0] * rope_cache[..., 0] - xshaped[..., 1] * rope_cache[..., 1], # x cos - y sin\n", + " xshaped[..., 1] * rope_cache[..., 0] + xshaped[..., 0] * rope_cache[..., 1], # x sin + y cos\n", + " ], dim=-1)\n", + "\n", + " # flatten to shape [b, seq, n_h, head_dim]\n", + " x_out = x_out.flatten(3)\n", + " return x_out.type_as(x)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "733bc135-cd2d-474b-9e16-f971ffaac54b", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABM8AAACXCAYAAAAChXxHAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAuWlJREFUeJzs/Xmwffld1/8+P8Oa93jmc75Df/vb6XR3hu6MBKLkF64YRb2IlgVqYSGWV6sYihKHAusiUKJYeqEUCBTcEr3ovVcsCi84AT+BK9efgcwhnZ6H73jms8+e1vwZ7h/7dCdN0iYEmnTCeqRO7+zzPWfV2muv92e/zufzWZ8lvPeeTqfT6XQ6nU6n0+l0Op1Op/Np5Bd6BzqdTqfT6XQ6nU6n0+l0Op1Xq67zrNPpdDqdTqfT6XQ6nU6n03kZXedZp9PpdDqdTqfT6XQ6nU6n8zK6zrNOp9PpdDqdTqfT6XQ6nU7nZXSdZ51Op9PpdDqdTqfT6XQ6nc7L6DrPOp1Op9PpdDqdTqfT6XQ6nZfRdZ51Op1Op9PpdDqdTqfT6XQ6L6PrPOt0Op1Op9PpdDqdTqfT6XReRtd51ul0Op1Op9PpdDqdTqfT6byMrvOs0+l0Op1Op9PpdDqdTqfTeRld51mn0+n8L/zsz/4s3/iN38j999+PEIJ3v/vdX+hd6nQ6nU6n0+l8jn7xF3+Rt7zlLcRxzNWrV/ne7/1ejDGf9fdu3LiBEOIzfv27f/fv/gD2vNPpvJroL/QOdDqdzqvZT/zET/ChD32It7/97ZydnX2hd6fT6XQ6nU6n8zn6r//1v/J1X/d1vPvd7+ZHf/RH+fjHP84P/MAPcHx8zE/8xE98Ttv4S3/pL/Gn/tSfesn3vuIrvuKV2N1Op/Mq1nWedTqdzv/Cv/k3/4ZLly4hpeQNb3jDF3p3Op1Op9PpdDqfo7/zd/4ODz/8ML/yK7+C1qs/fQeDAf/4H/9jvuM7voMHH3zws27jLW95C9/4jd/4Su9qp9N5lesu2+x0Ol/Uvu/7vg8hBE888QRf//Vfz2AwYH19ne/4ju+gqqrf8/avXLmClF1T2el0Op1Op/P77ZXMcY899hiPPfYYf+Nv/I0XO84AvuVbvgXvPT/3cz/3OW8rz3Oapvk97U+n0/ni1s0863Q6XxK+/uu/nmvXrvGDP/iD/OZv/iY/8iM/wvn5OT/zMz8DwGw2o23bz7qdOI7p9Xqv9O52Op1Op9PpdC68EjnuIx/5CABve9vbXvIze3t7XL58+cV//2y+//u/n7/7d/8uQgje+ta38o/+0T/iPe95z+/m5XU6nS8BXedZp9P5knDvvffyC7/wCwB867d+K4PBgB//8R9/cbr+n/2zf5b//t//+2fdzjd90zfxr//1v36F97bT6XQ6nU6n84JXIscdHBwAsLu7+2k/t7u7y/7+/v9yW1JK3vOe9/Dn/tyf49KlSzz33HP88A//MF/zNV/DL/7iL/Kn//Sf/l2+yk6n88Ws6zzrdDpfEr71W7/1Jc+//du/nR//8R/nv/yX/8LDDz/MD/3QD3F+fv5Zt7O3t/dK7WKn0+l0Op1O5zN4JXJcWZYARFH0aT8XxzHz+fx/ua2rV6/yy7/8yy/53l/5K3+F173udfztv/23u86zTucPma7zrNPpfEm4//77X/L8vvvuQ0rJjRs3AHjrW9/6BdirTqfT6XQ6nc5n80rkuCRJAKjr+tP+raqqF//9d2NtbY1v/uZv5p/8k3/CnTt3uHz58u96G51O54tT13nW6XS+JAkhXvJ8Mpl8Tgu9JknCcDh8pXar0+l0Op1Op/NZ/H7kuBcu1zw4OODKlSsv+bmDgwO+7Mu+7PPatxe2NZlMus6zTucPke4Wcp1O50vC008//ZLnzzzzDM45rl27BsCf//N/nt3d3c/69R3f8R1fgL3vdDqdTqfT+cPrlchxb3rTmwD44Ac/+JJt7+/vc+fOnRf//XfrueeeA2Bzc/Pz+v1Op/PFqZt51ul0viS8973vfcmdj370R38UgK/5mq8B6NY863Q6nU6n03mVeiVy3Otf/3oefPBBfuqnfoq/+Tf/JkopAH7iJ34CIQR/4S/8hRd/djabcXBwwO7u7osz105OTj6tg+zu3bv89E//NA8//PBnvBFBp9P50tV1nnU6nS8Jzz//PF/7tV/Ln/yTf5L3ve99/Nt/+2/5y3/5L/PII48An/+aZ7/xG7/Bb/zGbwCrEJXnOT/wAz8AwLve9S7e9a53/f68gE6n0+l0Op0/pF6pHPfP/tk/42u/9mt5z3vew1/8i3+RRx99lB/7sR/jr//1v85DDz304s/9h//wH/jmb/5m/tW/+lf81b/6VwH4e3/v7/Hss8/yx/7YH2Nvb48bN27wkz/5k+R5zr/4F//i9/yaO53OF5fuss1Op/Ml4Wd/9meJoojv+q7v4j//5//Mt33bt/Ev/+W//D1v99d+7df4nu/5Hr7ne76H4+Njbty48eLzX/u1X/t92PNOp9PpdDqdP9xeqRz3Z/7Mn+Hnf/7nmUwmfPu3fzs///M/z9//+3+f9773vZ/1d9/znvcghOC9730v3/It38JP/dRP8a53vYv3ve99vPvd7/4971un0/niIrz3/gu9E51Op/P5+r7v+z6+//u/n5OTEzY2Nr7Qu9PpdDqdTqfT+Rx1Oa7T6Xyx6GaedTqdTqfT6XQ6nU6n0+l0Oi+j6zzrdDqdTqfT6XQ6nU6n0+l0XkbXedbpdDqdTqfT6XQ6nU6n0+m8jG7Ns06n0+l0Op1Op9PpdDqdTudlfFHMPHvve9/LtWvXiOOYd7zjHbz//e//Qu9Sp9PpdDqdTuf3QZfzOp1Op9PpvNq96jvPfvZnf5bv/M7v5Hu/93v58Ic/zCOPPMKf+BN/guPj4y/0rnU6nU6n0+l0fg+6nNfpdDqdTueLwav+ss13vOMdvP3tb+fHfuzHAHDOceXKFb7927+d7/qu7/qsv++cY39/n36/jxDild7dTqfT6XQ6XwK89ywWC/b29pDyVT/W+EWry3mdTqfT6XT+oH0+OU+/wvv0e9I0DR/60If47u/+7he/J6Xkq7/6q3nf+973GX+nrmvqun7x+d27d3nd6173iu9rp9PpdDqdLz23b9/m8uXLX+jd+JLU5bxOp9PpdDpfSL+bnPeq7jw7PT3FWsv29vZLvr+9vc0TTzzxGX/nB3/wB/n+7//+T/v+v/j/vpUHA0FbBfzMM/dzczIg+M0McSix5yf4Jmf8SJ/sckj/zVOS1yx5JJzzkF7w6NmQj50OOXp6jVsf3UKe5IQ3z4jXNMPrMe4yNH/UcXkw40/vPIGylqOziONpn194/yPMT2PWPrYkKAx6ZJA9UH8kJrgkeOcDj3J17Zi0kAS15OeeeYD/4+4VBo9J+k9LTDPHtHPS+xMGj6SsXzvn0sMH7MiWh3zJjXmf/3jzEsujhPn/XMfPWrh7SBDB+iM9gk2Q7yrorVX88cEd1mXFJ477nCwjPvD++9i/MyJ9Zkp0XJFsK6KxxLxVYR9SvH3nWd6x/QymUJhlwAduX+H/89gbiO861n/bgK/xQY7e1qR/pMdoK+dNb3qcYVRzpfIsyoh//dSDHJ5lRL+ZIU/ATI/xtmLtrX3SvYDR285Ir+a8OZhzXRd8+HjEJ86GHDy+zsGjG6ijOcHdCclWyPDeGHfN0b7Tc6034U9uPYlt4PAs4e7ZkP/8/jdSn2pGH18S1IZgzSIGAvWumHjH8ZUPfIy9wYQkl1Br/l+Pv54PHe4y/m1J76bAtFNMu6D3hoze61K27jvh8hsO2PWG17qax8+H/KfblyjvJOS/NcbPGzg4JOwJ1h/uo7c9+iuXDEcVf7x/h4yW3z4ccrqI+cD7rnN22CN56pzwvCa9FBCOBObLNO5+wR/ZfYo3b9ygWWraPOD/9/y9/NITD9K7ZRl/ogHZQFSgLgUk78zY2Jrz5ocfZ6haLlVwmqf8yycf5Pw0JXlfgjj3tNMjhGhYf3ufZFczfscJvd2CtwZzLsmK9x2s8+S0z8HHNjl5cow+PEcdTMn2Iob3xtjXOMzbLQ/0T/nqjWcpS8nBJOHW0Zhf/sDrsaeC4ccXaOcI1y1iJBDvTsg2Gr7qwY+ymc2IFoq2Cvg3n3iYx443WfuIILsLxsywrqD3poz0tTGXHjjg0gNHXHaGe13Lh07X+KU7l2huJpQfGMK8xB8dEY0164/0ULuG8I8sWe+V/PH+Pso6Pnw45HSW8qH3XWd+FJM8OSHIG7LLq2PdvlPjr8FX7T3O68d3qOYBTRHwa0/fz68/fR+D5w2jJxoIaogq1L0RyTtSdrfPePMbHqeHY6eE2/M+/+rJB8lPErL3xYiFpZkdI0PL+pf1iXcEm+84pr9V8DY1Z52W/76/xbPTHgcf2mT6fB+1P0Edz+nfE9O/EmFfZ3FvsrxhcMRXjW8yzQPuTmKe39/k1z70IOLYMvzEHC094YZDbEh4V8xgo+KPPfBhxnFOOFcUZcS/+u038/zpmI0PQ3LkMWaKo6L3toz0esw9r7/DzvVjrlrDJWv4n0db/Lf9PewzCc1H+vjFEndyQroVsvZwhtozxO+csZ2W/J+yQ9pW8KGDIafnGR9533WKE03y5Bm6bulfDdFjSftHJeKy509c+gSv6R+Rz0PKIuCXH3uA33r+HoZPtwyeaxBhg4hr9GsiwrclXNk74k0PPUXfOjZLeOp8zP/jyQcxxxHZb0WQG9rZMTpzrH35gHjLs/MVh/THBW+XS3re8r/f2eHmNOPoA1ssbqWoO6fI6ZLh9YRsN8K9yeBeb3hrf5+vGN3lbBFw9zzlqZvb/I+PvobgyNB/YoEOPNGGhx0JXxmxvr7kj7/2w/R0RTDTTIuEn/7YW9g/HbL5QU905jB+ilcN/Xf0iK8G3PfGW2xdOeWaMWxby3/b3+M3DnZwT6S4RzPcYoY7OyPbixi/MSO4WpO8Y8ZeVPDu7Jh5qfng0Yiz0x4f+z+u0ZwJ4idPCZxlcD1CjQXNuyTBjuVPXfo4V9MJ82lEXmj+y8dfz8du7TF6oqF3q0EmDSJq0K+PCB5OuO/KXR6+/2n6LayVgo+ebvD/fOpBOAjpvz/AVw3t4phgJFh7x4Bkx7D35fsM+xVvlUsCA//l9h53pxnH79uk3I+Qd06Qi5zRazPSnRD/1hb/QMuXD+7y1v4Rh9OIu9OEJ57d5f2/fS/RQU329IIghWhTwCWJ+8qQ3fGUr37NR0hEi5ppjhY9fvqjb+X8NGPzg45wbjHMIDT0v6JHfEnxwJueY3P7nGvGsmYc/+n2VX7zeBM+nuGfiHHzKW46oXctYfxQSni9JHnbnGthzh9JTjnNI95/OOLkqMcn3ncP7syhntrn/Tf/Lf1+/xVIOB3ocl6X87qc1+W8Lud1Oa/LeV3O++LJea/qzrPPx3d/93fznd/5nS8+n8/nXLlyhWwguHetxFpDQoyY9JFPJqhzQXjuYWJYrg2ZinVG9yf03JxeWvHQqGQ2TLi55yiVpDdLiGxB9myJLzPMdMBipNl3IZHUXOm1RLJBJw7fj1C3e0BC0BaEC6CMcLFif7qFHQe8M3yG8dBxdX3JUBreF95PbyOhv7T0jx3tCbQTT3He4/T8Cs1mwtiXxHrG65ITRKhJ0DQ6Qo76UNcEc0BLzidbEGk0U1y05IHNp7kez2jWIa4sn5hFyHBIcLsgqCrcaUKdJ5xcjjnfjXjT6C6XZIkfSOxQ8oz02HoN7xrUk3N0ZQimjjZWnNbrSBfS70m2MseD0ZJZa4hkjD8dIJ+KUUsIngMWltnWmHM3oH6tZtAuGA9yXj8oOEj7PLcriduIaJGRVkv6N0ooQvx0wLKQHCPZDBvuGdU472EgqPp95N0hoAir5eoY5CG2F3BnsoXqwbv0kwx7nr21nFg4tqQn24oZTiyDqcUcW+zUMJv1OFpcwjeKNVHSi2a8Lp6QhymRDDAuRvV7kOeEE48vQ07OdiEWKDGFaM5DW0+yFeSUa4q0gKcmIXmaEd+co1uHnaQUVcLJJGGxCHjnxk2uqJJ2KGgHknELrlyDvESrOaJxiKWiyWIO601CLxj1Jbux4ZFozn5tSHTE9DhDPJUga0f6vIBaMNnfxPsUMxe063O2+jPekBY8F65xaztAzzNEMSRZzOnXJbJIcNMReeWZKs9rkyWvXc9ZOoVbF8z7wOEY8ETVElUAy5BmFnB4uk0atQTyMdYSz/qwwHnFwGvCrYzBYc1waXDHLX5eM11scVZsEzvDWlgxCKa8LppxGA4JdYSrY1SaImeG4Mji6piT7T2IPMLPUOGU124+S6YbztcD4oXk2dMQlyZkNxx61tJOhyzblKN5Qlkqvto/yz1BTTmuaYaCQQ6+HCPOcrR3+MLjZ45i2OOo2iLGMurDbljzcLRkUHh6UUi7HyOfThG2Jbkp8F5xeriNIELkDl/P2Vub8JpoyWNqg4ONAH3Sw5djopMFcdWilwPa6Yii9uTaEWcTHt6acmQjzDac9cCdraF8TVQvkY3HLwPqZcjB6zZYC3JSodiMLMO9msJaojaG0yHpnZx+3iCPDZQts7zHUbXGulxik5z1cMaDQc6zsSdIE8hT5JMZ6rxG7bf4NuN4/RIiMmjbI9MTXrN5EyccR+sRahowPI3QacDguRa1rGlnQyqfcKfMaFvB1+onuSetWQQt5UjSmyp8PULdXRA1Bldq/BnMxkPOiy1S3zLowV5Y84ZwgVpohnFIdTsifi4ED/o22DLg8HgPrRRhaVD9OVeHZ+zoko9c2uJ4rFdtbzEguDNDLxxmlrFMR9RNS6Nb+oMj3rZ9zu31GHXJcZJuYefraFcQfmyJLCW+CCnbkIPTEZUOGXjPdtDQ262ZtCCLHmY8InhuSVw2BKcGYVoWRZ9lPeBqMCHI5uxGFQ8EOR+NQQ4zmKfwfA99VqDvNljf52i0i04aoiZjIz3lNWu3mXvDjXWLOfX0jmKaFMbPNKja4GYDjIi53WTgLWn8GFf7NbPYsmw1ySTEmxHquSlR2eBzBTbifHON8+UmY1uQZpKdoOKhcEG+kTBIIvwgYPC8wJ9Bs+9ofcTd6SXCxBO3hog592Zn9ETL+5znbKBQz/SQRUr43AQ99ZSTAWU4om0rrGpYG+zzjq1znl1PiJqWk2gHWY6QdkHw+BK50NgmJpcRh2cDhJasecM4rEi3PeFAo+YDbG+AfDJHVQ3xxCOEJy9GLJuYB8NDev051+MlV3XF+xJgo4eYZIj9hOA0R+1brI45yHaJ+gVJFXM1OuGh0R3u9uHxEeRjTXjUg7hh8FQD0F0K+CrT5bwu53U5r8t5Xc7rcl6X87qc94XIea/qNc+apiFNU37u536Or/u6r3vx+9/0Td/EdDrlF37hFz7rNubzOcPhkP/bb/1Rni3vY5r3uP3YJuVZSP8DFcFRi799AIslfmsNBim8LoGrEf17SrJLNcnakmR9yZV2zmvrCWlpGc4M5zLkOZ1xU4z5VflawgruPS4JFpbguYZm5rn9pKfNITq3SBT20jp2EHP6toR2V/KmNzzDpe0z3j14htfGJyzqDcp2yPTUMj93fGyyw0fOdliWI2b5BqOqZHc5p5/P2Do9IDeKW3VGK1JauYcJJct1CwNH8kbLcK3k3Q88yqX+gq/KpqzplmdbODWK/3j7TTx5vsvJxzOWd0P6h5J4IrBmiTMFO8EpO/oUMejBeMjdbI0nxnvIsCXMllyOJ/zR0ROsZZbrO4I4aeiNTqiE4tFqj8NywH986vVMznsMf0sRHjvix/eR0wITgFcQX4/QmwHjewTZtuDutuJoQ3FNTblXTbjeTHioPIZQ46OQO3HGB3vrFE3E+ayHn2vE8wHtmeT8Ixo3NQTPHiNah+z3cVnI/K3b2K2A0ZtOSTdy3nLpNrv9OWPXkHhDfyKJl5Jnc8WdQvFRe52P2uv0KsMoN2yc51zdnzFdWG4dttg6xC36OK2xg4R2pJk9khFutNz7trvsDWb81Y0PsR3kOHo0NuADR2MO8pjffOYKd88GtIcD7CxZjVLXlsvVDbbrfbySeKk4jPe4k9yLiTxtCsmwYLw34dJ4xpffe5utrOLhjSVSGnJfc7ce8H8//HKOpkNm71/HnUiGjy7QswbyElxLNC7RqeHSXsRgFPDc7oDDUYrNPC52vFk8w1vEM2wlmktZwKJnmYwb5j7hyI04nfV48tY21UnM8qMj5ElD9pF9ZNnimgYfKZqHdpBrAeM3FYRrBnW9QvVbdtIpA13xxlnOTtVQ1yVt2/KR6CpPhLucNBmnTcbOieHSYcv82HFy02ImFnOrRXiJEgFmLaF4wzbtFpTvqNgaL/hL936UnWjJ1WBO4DznZwPyKuTR/YSTZcQHJ6/hoBwj8xDZKB48v8VefoZZLLF5wbHb4dhtQxBDlFCtO/I9x/pezr2vP+eB4TF/ZvcTDLRhUwqOTcJ/mu1xdz7gfzz6IPkkQj3qkQtPOhXo1hJxl0AUXM4MWQR3NveY9oac9FLmUcilbJ/N5JSHxye8YXRKMlak6xqlCnQw4/F8m185f4DJ4ZCbH7tMcGhY/+Acuajh+AyvoN6JCfqCKw9BOBIU1xVtT3C2EeBDx3vaJ7hmJ2xbS+Y8RwPNNNE8pTe5pUZwEuNPEpZ3Q2Y3Y7hVwuNLRFHBdAnjAf76JcyOYvl2RbZe8doHDhmnS944fp4ehrV5iqqhOJmR1/A/mgc4cGOeqHaZm5SHT0/YyXPE/gzOS+6eJZzMYiKfELqEajOk3ImorwnKN0reuH2b//NrPsqVoOTt0YyFDXi8GHCyzPjwrT1OZykffWqTOg9Jz3uEjWdcHJCYgt3qmNA3HCbbLIMe5+kmZZhRDXJMWvGGq2fcuz3jvu1z7t08Zy/KuRoteKbt8+F6zG8fXea/Pv0weh8GH7IEM0N2p8C6llLnxJHh+m5O2JPYe8aUWcSHNzepIskj0R225Jwv18+yo+aE631kFjHpNSwiyzOzPe7mG+zfXOPo9oj4aUvymIHjCdw+hCxeHe+dCP9wn3jdMn4gR/dbgssLBrLkkeaAQduwPTVgBbdVwERE/FLwEEd+wM5pQ1ZY4hstauLYfxbOjyE8NwRLi18b4Ed95q9NmD2UsnPPKdcevMsbskP+2OgpnIupmjXqHKZHhqM85b8fXmFa97k7v4JsJbuLBVlVceX4NrqquF1EFFZTi0sYmVEMHW3qkQ945K7nrfc9z2v3DvjydMKb0xkT5zm1ng9O7uG/HT7E2d2UO48OCaeC/h0JTYuvF6Qi52pwizAQ6PURRZzxsY17KcIA0auIw4p3jp9gJ5nz4G7LOLMMxjPCpOZWO+LUZPzy7dfyweNLJI8FxE9popsT4ucnWBxWWNS6Jrgnorcu2LxPshhKnr8ckIYN16MJm27JI6dP8xf+6m8zm80YDAavXNj5Q6zLeV3O63Jel/O6nNflvC7ndTnviyXnvapnnoVhyFvf+lZ+9Vd/9cVQ5ZzjV3/1V/m2b/u239W2Wqd4Ml/jdD4gmoaEU0HQGrRvMBpsIFF1i5hVtHczWpNwQsLCwXV/zGuyikFW8MDmXTKlGKqQY1fRmIJyKegd1lRFwpNHO+gzx/DJAjGrkTcPCVuL0CE+VPg4wCchwguoHad5Dzv3nOmUpdDsBAXjuGY/sxxfsdyZ9QmnivAuhM8ZmEny51OWZ479GyXeOnBA6lFXHDZUtDsZjB3x3gQ1qNiNZ1ySc4SDxgaElGTSMBotWYvmzJYZPk0QWJRz6Fst4qBgVmvOqzXEeITc3KDciQhe43HbnuaqQA4cm5cr9sKKh5MSKSxLak5tzH6bctBkmFahDOjIolOH7ClkGyBnS1zTYglxRwHPlgOqaUJpDJU03LtzyvbuhKvRnAeSHITHS0/U9jmqJM/P1nlsvg2TgP4dhT4zpCfnsKxprcEDQoJAoJceH8CtozWcGTLKSqwUvKZ/k/ujCfFQEwiFbVOMiRmeVITHFfZOyPyoT70fMH0qwM1zzOEZQrbIqIRBihlluDVFMxDIzBN6Q2ANeSMokGxoh1Qt92ydkhjNJ8Jt1EJhnlSII0HylCU5bjk/iDk9WUdIhZAKv5mS7lnyPcViU6O3NOIBSS9reM3mAWvaMRIBjRccOUnuIVENWdSsQrUQqO0IHUj8dAaLgvbE0Xp49ngEowHTZUSxq4nuWxKOa4ZrFZc2Si4HnvtCx9K3nPuKx6sd3j+/ynEx5Ob5FnoqGBUW1bSIwOONx9UGWk98YnCl4vZgk3YqMUmJrmuuXTnhWnLG/Ws5e0FNRUuD5bSccl5FnN1NODsbUB3D2fMCfbAkeO4cCgOzJSKOEeM+xBFOgfVQLzVLFXB7meGt5cHeOSPVsL1dUaM4397CN57gpsOfQ/ykI5zA2Y0+i0ONOYux8yUqDUiTimY3pF4PadY9xVXPxmbB2mjOKMlJnCWw0KAReC6n53jhCK7WyJFGFilqAuF8iS4bmjuOuhA8FfUROqC9d4Bdy6hfKzEDh7giCC8pNvo51/uHDHXAmgo5dPC81ZyamMNqSNUmRNIQhg491ECLPbXQOuI7bvWHht/DDCMmPsCueTb7pwzTgp3dc66mp1wNFUMlGbuGc2+5O+1RLLc4mY84Ox7TO7D0bxs4sfg8B+NwgUIIgaotZqloD0OmreQj65vs9ANenz1PL6x5aHs1un92T8nMKR6b5SzKiPTplvrMcXQ0ZnY0RD0XIY+X2LwkrhaoNQ2jHjYLadcT7MBAWOPw1HVA4wyNComF5aH0hHFUcDtOaXOPCC/DNCL8uCNoLO0NiZ0HPD7p4VqDHieIJMTeL2DT026F1Dua4Moxo+059yQT3pCcIoWgdgG10zRe4TVE/Ro1DpCXQmQE8gzE0hLfXgKCm4dr+DSiWm7RjgKqAORaS3xvTTasuDxcci2c09cVgdA8ZkIaozlsUj662ETMYuRUoUtD4BuscthIIrxDLkqcVtjnQiZzxTPxgHSt4vrQkGYV924dsaFrdl4T44VAGUtmQtaPr5DnMfuzMeY0YvBcTXzc4m6dkEwW4AUOAVpBGq3amMayLEJuz9ZY8zmTOGRDWx5OzyhTy8F6jW5G9K5coZ5D8ITFT6B5OsKdSx5/eohfRvh61dbKXYseGNzVlHZTI/dy1E7FqL/kHj0ho6UxIYKGRLQM05Kt7XMaIfHtJhwJVOmRZw36RomvDM/Nh6AD9NYWdhDhH1LoNSivCeQI+tcMm72a16YTdnSFEZYW+ITV3GoTFm0ArUQpT5AZdE8iByF+WeCmS6gT3DLmbCPlVjOm3oBZaNkazxntztmJ51y/vPx9TDSdz6TLeV3O63Jel/O6nNflvC7ndTnviyXnvapnnsHqFubf9E3fxE/+5E/yZV/2Zfzzf/7P+ff//t/zxBNPfNoaGZ/JCyOSP/7Bd7AVxWgrcFNoa8ljx2NOlzGfuLnOyTQhOwwI54pg0aJKS61KWlnRXxMMNgTZpmd02WPXwFyGYW/BlY1DEixr3iJbiZyFFJXm6ZOU02XE+2/sUCxDotshqpAk5w7ZWCq/wImWbAvCnmDrqqO35rGXFH5DsLZxxmh8zlhUjGWFqiJ0njJdSg7PJbdnfd5/Zwc7C3C3Y4JCkJ0JvDXU7Qy0JVx3hBlcuheigUDcK5F9z2DvlCgruRydM1QVOu+j6pjzmWOx9Hz8eIsnz9apjiLKw4h4LsnOFc5UtPUSIofvG/pD2LtHEAwEwb0S1TeEV2YkUcX17IhYOOKiD61keW4oSsEHDvY4WvQ4vtkjn4b0TzTRUuLLAt/UmLjFRob1LcHGtiDcgviSgHELexVryYJ7B4eETpHWCb72NFPLtIz4yMEW58uYp58f0y4D+gcBugS5KMEa6qDEa8vmJUE2FAyueqI1sJcsbs1zaXTMdn9CZiWZlfhC4BeCg0XCM2cD9icZn7i1jloo0v0AVXnCWYv3hlIuUZFnvCeIep7N6x7dB3NNIHuWK9sH9OKcdVoS7xCLCCrN7UnMySLg0YN1njkZoU8C9ElAtPREc4dxFbVfEmSQbEA2gu17PWIgcPdIol7Nxt4Rqa7Z0wtCB+Gij20U+6eCWan5n7f2OF5k2FsRfqbJziVhAU27wNoKNXDInmdjFzZ2BWJTIPck8Tgn2zlnpAv2onN0GxLlfeoazieWkzzmfXcvs1xEzJ7PIFcMjhWy8ZhijvctbmCRsWPvHkF/KAiuCvQa6N0cNa64nJ6xFU8J65SwSaiWlmpuuDEb8vHjLWaTmINbfYKlYnCsoTH4PMdJS5tUhAns3SOJ+tC7DrLv8VdrdNZy33ifQVCSNTGBUbRzhy09j52scWfe58bdAQfHGelEk5xpZNmi8ppWNdS6ojcUrO9J4pFneM3hB2CuOrK05N6NuyTSMrICZYCZoqkUjx8POSsiPnJjg/N5THo3JFhIwrlFVZZaFLSior8hSEeC0a5nuOOwWxK7KxgNZmxvHNMThnVqZK1Ri5hFqblxGnK8TPitmzvUixB9O0IVgnTiEa2jcnO8MCSbniCDnXs82Qj8VYkfC8bbp/SGC7bUkrEqUWWKLlPOF3A6Ezw7GfGRgy3MJKC9ExMWkuxM4k1D08whcMg1R5zB5Xsh7IO6VyH7juzyGVFacU98Sk82hMUA2QbMzi1l4fnw4Q7Pz0bM7yQUJxHpVJHOFLYusXWBjy02MwzHgt3LgmAM0T0SMWzRl5f0ooLr/UMiICl70ErK85ZlqfjAwR5ny4Tbzw8oFyH9o4AwF5AX0LQ0cY0NDJu7gtGaJNnzxNvgNw1up2Urm3J1cExkFWkb4CtwM89ZEfPx43XO5jGPPb+GW2r6+yGq8uh5Dc5S6Ry0ZeOSJOnD6JonGnnMFY8fei6vH7Lem9F3kDkPS40oFHdnCbfOE26cDvjE3TX0VBEfhujSE00N1rdUYomOPYMdiHuwfZ9D9QXmmkRnhr3dfdKoYkuUxN6hFxnUmltnAeeF4sN3trh5PkAchIizgGQO8QIaW9C4nKDvCccwWIftKx4xFvirirhfMt49IQsqLgUzQisJlgPaWnJ85pmWAf/z9iUmi4TqZoJbaPqnkqCEplngbIMYWkTi2L4kWNsAuSdRW4JwY0m4tWQjXHApnhC0EWHZoy4di3PD4bLH+/f3WM4jTm/0kLlkcBQgG4spFngsptegYseleyTpQJBcE6ihR14ukcOGe3rHrMdzkjolbEOahaVZGp45H/Pk2TonJyl37/aJ54resUbULeQlVhqaqCLOYOfqqk2J9xp+8v/6093Ms1dYl/O6nNflvC7ndTmvy3ldzuty3hdDzntVzzwD+IZv+AZOTk74B//gH3B4eMib3vQmfumXfulzClSfKpKGLxvtM5It1YaldJL5lXsx9Qi7eYnl2RrqCYk4gGBxjj4vEPMF0XKJ6PfJB33OLvV54mxEeckzl5br66dkgwX3RjPe0btLJBzBhmLiQuprG4hygNm+RjnLkFEIE9DzKUFVoQ+n+KpE3unj4ogbZ1u0W32mtWBZCx5SEfelit30Jm9O75JlmsFmyO024BNNSDnfo927l+Ykxoc9/KkhO8tReUNyewrWIqIIG4c8N7mEGcWcWo1bd1zLEtblnIeSQ14bH7GXnTOQiueN59DC4SThqekW9mZKc2NAeKdFlg3ytCG4fY73Hu+gHfa4cbJLvRYwMTFqrWXUO2OnN+WR7Aa7Qc6D6+dEwnN3q2ZiFE/vjTgtejTDPsVpRvy4JXAWdTpDHc8J2hZvDPXGmP2tdfIrEYs8RVyqEWnOG/xd3jC6yV7Y8KbhER7L7FLN3bbHzWsp9VJRrw2oziNiQEwsweEUOa9Q8wXeGKq7m9S9lBuTIfV2Qtk2NG3L/6Ya1uIZV6MZD/dm2IHH7Dgeb9aYlp7zSUy+NUIdK7RUBGc1ycFq+rM6m4CUmDsj2l7MUb5FOw441xa53vJVg5KroeMd2RHXghnBSCNRfLAa8kzb4+mDMfnJgPD5kPC5AH27QB4vCWYF6niCCAJ8ljJb63E82abakEy8pD8ueGTguZROeVf6PBu6ZmMYY73io5c1B23E+/f2qBcpdtCDk4D0iQJVNSSnS5jMQGuEVswubXB+aZ3FFc201YybGbv9lDdkd3ln/xnGMVwZn7F0nmcvW25WIz5w6TJuHlElI5gosrxGtg3hwQLyCrwHJTk73OV01ON8EVPuaFIxJ5IFfzJY8PpswT3DBVc1TF3DmWuQ+RWema+xPNbUa2M49mAcamIIbs6hbrFlBWHA5HgXO4iZVAPaNYmLcpJxyXbvhJ1wxlv6M7ZVy3KtpvKO5aX7mFUKf3OD5cEa8nkIgPDuDH2ao8qKYLlE9jJmt9Y53kxY5COaTU8eNeyMZmyMzxjpOV/RO6QnW/wm5E5TXruMqvqwfYnqfIhONOJYEC2n6LxCTGaEywL6fao05cbVEfWVIcurnoX1vGb7iLf1ah6KTnh7doc4FSTrmmMbwdUBPl+j2bpOOU0IghR97kjP58iyJd6fQdMgbyW4KOTGZBu7nnLeSuodeG0cshdPuN6f8Eh6l1E/ZChCbhjJ061iPpOY46s0hwlVNsAfG9JZiVwaopszhHMIrXFpzI2TPdpxwGkbwppjN01ZtwteH93hejDhnvUZPSm5td1yZj3PX+nz/HKD9rk++d0++jlD3LboeUN45xycQziLHw/YP9qh2go4NwlyoyHpz7jaP+Ph4Q12goo39M/ROI52Ks5MyNNXRsyLmHo4IJ+kxDjUsSU8mqJOF+iqxDct9dYGx+Mhi+sZ+T0ptmgwuuZt7ibX+odcigrePFhisZR7LbfaIfvLgHKuqNcH2ElAKgTqvCU8miGXFeF0ineO9mCDNou5s1in3ohYYGlry7uThiipeX18zOvCc+RQILzgY/WYulrj8CQl3xkSHiikDBDHJepugcoL1MkEoRRmOGA+SDhcbNOMNRMJ4bjmncOWPTXlKwb7XNZz+qMQJRQfuppxu414/PI67TRDPh0j7kTwXI6alUSTJcHJBBGEEEVMN4ecnG5RbikmTjJcz3lgoLg3OeNd6TOsRYbt/hmNF3xiT3DQpHxgZw+ziKjTIfYsJK1KdNUSHhcwW4IUICVnV7Y43Rwxn2mWRcDQzRhnU75MPsc7ozM2E8G1NcXcW26blqeqLT52ukMx1dSDEeJMktUtnNeExzmUNWHbrrZ9uMfpMOV8mVJvKgK1ILQla8E516OCh3oLrmjLbL1h7lpMcY3bywHiICDfWMfveyLjCE4t4emSoG6IitXsh9nhNmYcMVlEr0Cq6fxOXc7rcl6X87qc1+W8Lud1Oa/LeV8MOe9VP/Ps9+qFEckf+eCXI6MRrQk4nqcUTcDB6ZhlEXF2Z0g5jchuGaKJRR/MUOc5bpnjiwIRhhAGuFGG3exj1gKaSxHpsGHzck6WNmyu54jIwdDiA4dNDR6PbyEwjsuzkqyxrE0dYeOxRY41hqVSVEpyN93mPBpwIxpxFGZoYVHSsVfnXK6WxKUgyyVV7pjPLJNlwI2TDLsEd+RRlSc6NwjrEdaBUvhRD5cEVPcMMf2A2XWJHXj612akg5J3rN/gSnLOa+MZ20FJKEoC0XJSbzJpx9QLTT0PaBaWamqZ5ZrDacC0SbhTjKh9ysKv44XGKo2WjoEs6PmKB9vb9GzFqCyQxrBsaioHzzdbzF3GslqjsQlBG6GcxtBgRUubONrYITcUakPR32gY7ZSsDedc2jzhSjzn7f19BgouKXA4Ct9ybgM+XK1xUmd86OQ6szzl+PbqNTQ3HX7pCU4MsvLEbYiyCqc8XkIb5bigZjc6YyOcsREY1gMDYYBPIs6CHnfjNc5Exg2xhncCrED7lszNSETJZX1IT9dc6y9JIsdwPUDFEr+hkJFjc3BGEpakskAJy0GzztRmfHyxwc1ywM3jTfbP1ogOFdGBJDluSPdrxCxHns7AWGgNLlTYfoRNJPVWQJh5Ni5Z0sRwdTMnjDxiHGIjwX5PkAeSfdWnEprLdc7A1ty7PGW9XjIsC9KmQUmFkoo6DaizgLM05KgXsYxCZkmMdpbINASNJllEuAqqc09Rhtw+WKPJFfVNj1g6wjtLZNHCdAGtQQQBQivY2sD3E/KrKc1agL9s8BuG3Y0zNsYztno1W72aQThnFJ1jvaR1EqoIv+gRlIL0HFzVUE9nlEZwVIcsSbilr7KQCTeDDSoZ4BUEOB60J6y5kkvLil5jsIscWzfcXkaclgGTecasSAjLgLDUqIvRRRNAG4Pth7RbCe1IUtwToIct2bUFW9mC/237KbaDJW+PD+hLw0BKPIq7prdav2HaZ1qHTE4GFHnE0WHGfBFyeqZYLBWqjlFNhBAhUoR4afDKMBJTttQJI7dgz52iAC0klQg41n3mZDzHVRofYm2K94BtVyM14QKpWjbWKtLYcmm7pN8zjDeWpL2GvVHFOG25HMzYDBZYDxbPTZPxbNPj8XyX3zy/zuI85fT2CH0O2U2PWrREd3JkbQkXBuHBS4FTnrJn8IElW6+Io5bXjOYMIkNv0ENHAZO+JI8kTwcbHOoehdPUXrNjFmzZnK32hN3miIGs2BAFYRyT9geYRFKNJU1syYclUjvisME5zaJeo24DDmYRRRVw63iLIo8pbqa4maJ3oyaYGeTBGXJR4MoK3zSILEXEEe1GhllPcVsKuxMwHhds78xJU8tg1OJjgxvUWA1tJJDOE5WGuLVcmVbEjWMwNUhjMXWFdY7zMKbUATd6O0zDjOejIecqYkxFSsvlomKjbghzT5B7pgvJZC45mwXsn0TIhUefOFRlCeeG1fVIQBTi1gfYXkB+b5+2L5lfB9k3XL7nhFGW8861m+xEC66HU4aqxgLOw2GxxazpY6cCmwuqqaWaO06WIYeLkEnb57AZ05BRihFeCJySpKJhS8wZuZz767tEpiEpG6x1nDWWwmmerXcpbIKpR3gbok2AcIJGVlhhaYZgEwh3FHpNMtrJGW4UXBqfcWV8wvVoxhvTU3pCMpaa0lsmruGwzfhAscdp2ePxwyvkecTpzQFmKbC3DBSOcOJRLUQ2RiJptcFJi4jnSF1xT3zCerBgLZD0tKSNA5pIsx+ucyfc4JyEE99HOI+0jtCX9P05fZVzOTihH7ZcHVWEsUCmgr/xl//3bubZq1yX87qc1+W8Lud1Oa/LeV3O63LeH0TOe9XPPPv9UjnNo8sdzuoeN47XKYqI4CBALSXZoSdbeOJTS7BoELUBawEPUuBNC6ZFOo+uPeE0IZlq2n7ErZMBTU9Q7Ahcz2F2G7Kk5v7tY9ajnLePnmddFzx49ZS+NGzImFDI1WJ23nPsWhbO80RTc2AGVHNDU3qOJwPOznocnG3x8VNJNPUkp45g1hAdl5CXxJMZvm1xRbl6kVKuRpeGA4hD3LiPSwJML6BNJQjwDuZlTC4VT8XbnPsMIRwlhnuDinVp2EvPSMUCM7JY75g4x5H17Jsxqt5FlBG3ZwntMmJxpJC5JD4AuRT4/ZiyCHjs4CqiavGn59A0uKpajTooBaolHhTEscVuSlxP0W6l1ENFtQH1GPRajVqvGfdLdsenXI1OeUNyhw3VsKMrIhSCEIEkQNGTlmvhGSO1ZLGVcNr2mKYBZZmyXOtj8oD0QKJzUIcGFo7wJEctauLZAp+XLFrHvE14XmuEUogsRQ762EFMu96jGSjCHUXbg3LH02YCsasJY81oQ7ARGF6fHTBWFdd0QyoglSESQeEbGu+4ZULObMATzZC7zZgnltvcWY6oFgkUGloQAnyocL0Q5RyytviywlU1YtmgpnOUlMS3IohCivGIPOlzd+cKJtUs9xQmExS7Dp85hjsL0rRm59IZu/Gc18d32A1m7CnPWHlCodEoCt9Q+JYjG3HHpNys13m0vMxR3ufJyTXMMsQcJqglZAegc0+236CKlt7xDOoWN51Ba/B29cEgpASlVqNZjSVcOCSeQgfUZcSTZczHl3uM1paMmpz7eic8JASXg3PemNxlkMHOlkIh0ELReMvUVcx9wNPNiDPTMFsuMbXHnzTYQsB+TJtLnj24zK3c89RBg14a1NkcsaxwyyWurMAvGbgFMo6RcQz9DD/s44YhZjOmGUiKbYHpe+odQ5oZBllFL6qQeJyH3Hkknr6UhMB9QYELPJc3jyi854nNDU5tSru8SlWNaM/6LBZ99IlGTjXJqSeaeOKJIzprEQvB6SThpJY8udSrNggQOkD2Mogj4nVBkHqKPY9JJfmlCJtCs6dRmWFj65QwKbk8OGE3mvFAeMKWylmTIZl8ockPmPqWmTMsnGDpYiySNGipEoMdW7xWFC5ALwNkEKNzCwcFlA3ydIpoGtLnCnCrtVcqIXm8v7kKAhsjfBpR7ES0A025KWiG4DZb/LglGJekwzk7yYwH0ik7esm9ekYmNSMZ4QGLpXCOA2uYuIRHq0uctj0+OL/MrEzYPxzTFgHJgULlgt6+Q+eOcNai8haMu3itfnUUqwrqhsA4ooXBnme445DFaMjxwSbNAMotoG+QOxXjtOChjSO2wjlv23mesap4TTAnlTCUMQpB6y0tjtsGZk4SVS3HZsB8KmjKIUfHQ4pFzDPHkvBckJw54nOHPi8Jz0p8XjCczvGmxdU1Qki80ogoRI6GuCTEjTNsqrGRxAUCYT2uURzmfaY+YhBVHLkMKWq2fMu1oGKkDK8d7hMJTb3Z0mI5spITK3mu2cLXO1RLRTNNqOYh+bFALwTpIYilpjgYUBcJk6MYqho/meKNwTfNqq5DQ6BLwnEIsaXd1tgsoN4e0vYl5ZanGXrSzYJ4WDEczri3f8Z90TH3R4dsKMtIeAIBIFBAKmBDVzyUHnAS9iiDkNO6x1E/piwj6s0hFJJsX6Bzjz5oEbklPq6QeY2bLfFVzb4JuevGyDBEaI0Y9KGfYcYJ7XpKPZIEm9AModiFpC+JtkJUUrM9tmwFOY/EtxhKw6B84RzqfDHocl6X87qc1+W8Lud1Oa/LeV3OeyVz3uc88+xTbwv+2fzwD//w73pHXikvjEj+6AffQZyEOKcwRR9vAuRSIlpBs2gxtWc6j8irgKNZzHkeMp8rlkuFKiSykAStJmg1ygmUUThhMaLFKkcTtvjA43uWIPKsjw1J5Nhea4gjy2DcokOPGgaIUGD7LYSWOF2gg4ZItSjpcEbjnMRWKa5OkKVAlNCWjia3LMuAyTxhXmr2z2PqSjI/19BIgqVCGkncrHqHlZWApxUtTlqaqMUpB32HjDzrI0uaODZGDf3UkPUtUWqRmUKkCpcafNoShiVxnKOEI1AWZxW2DfFtgCsyaAVy6bGNp5g76kZyPM0oas3dSURZK/Jzjakl4VIhW0nUaJRVSAfCQytbjLSY0GACi0w9MnUMepa1oaWfGtaHNWHiSQcOEQkYKAgd9CuUbkmTBUo6tLDgBW2T4KzC5ym0CpYO33iKBbSV4HiesCxDjmYhszygXiiaXBKUCl1JtFGEJgDvEQ6stDSywWlHGxtE5AkGjjj2bK+1JJFlc1wRRo5kBCoEP5AQguiXyKAliQoC3aCER+AxTYI1IZQRvgqgdFA4ylJQ5IJ5EXIyT5gXmqPzEFNJzGx1DMNCI+3qWAov8Dg8nkbXWOloU4vXjnjoCCLH9pqhl1jWhjVpYgj7giAVuEzgE4FMSlRSEQYNcVghvEcA1gS0dYZoFSKP8I3HLQymEcxnirLR3D3PKGrF4VlIU0vcTEEjVyN9RhK2CuUk7oV9VA1WGkzisJEj7jmSnmPUM2wMDGlqGA5rdCwIBwofgeuDCFqC3hKlDGlUIvF4J/BO0lY9vNHIZYhoBWbRYhvPbBZS1YrDWcK8DDidBixyhVhKKCVBowlqhbarL49d/U9amqDFBQ6TWsLYMxg5ktiyu9YSRZbRuEaHnnCoESHYgceHjihboHRDEtYoafFW4p3AVBm2jZGlQlYSUxhsYVkUIfNlyDQPOJrH5IXifBpALZALhTKSqAqQThAYtQodosUJRx00OOVwfYcIPeOxJY4dW6OGLLYMRg1x7FADhYwlLnO4xBHGOWFcEChDqAzeSZxVuCairTJEI5BLiWsc9cJSN4rJNCavNXcmCWWtmJ4H2FqgFgrZCqI6QFmJthLhBEa2WCxtaDDKQubwsWc4sAx6llHPsDZoiVNLNjCIWKL6Gh85XK9FBQ1JOkdJR6Rb8ALTBnirsUUf30rkEnwD9cLS1nA6SyjqgP3z1Roii5mmLiS6UKhKErSKoNVIJ5BOYIWhlQarV/tJ5BA9R5J4NsaGNLZsj2vCyJGNLDIEOdQQgB/UiMCSJAu0agmUQeIxbbRqe8oU14SI3CMqT1V4msIzzWPOlxHny4DjWURTSoqZRjWCYKlQVhK1IcLDqjnzNLLGSUeTGLz26KFDhxdtT+zYHNckkSEZeoLYQ19BLCBrIG5IwpI4KpE4lHBYE9K2MTQBvoihBnKHqSGfQ1FrjmYpeaXYn8TUtaQ+19AKwnz1WRM1AcpJ8B6Pp5UNVlra2GIDR9B36MSxPrCM+oZBr2XYbwhTiPpALHA9iYhaZFaidUsa50g8Eod3iqbOwChEHkMrcQuDazzLuaJuJAfnKcsq4Og8ZFkq7Fzhqot2vJEERqOtenEfjWxpVYsNHW1sCWJPMnSksbtoxw3r4xodeXxg+b7/y8/9oZl51uW8Lud1Oa/LeV3O63Jel/O6nNflvJf3Oc88+8hHPvI5/ZwQ4nPd5B+oWBre3jtgKFs21yJCIYFVf/+JbVh6y5PNFoemT7u8xLzYoJoMOJ/10WeK4EwTnXuSE0c4b9HHJWpZo89m+KYhLorVdf9CILSm7fdp45jp5g4uDch3Y0wmyXcFpgd2p0H2DK/ZPGE9W/Km3m2u6AnXsyN2VM5AhvRliPUeh2PqLGfWcNeMeLzZ4W45Yn/Wp8kTzo/GiFwSHyh0DuLAoQuLPshXi2OenuPrhqgoViOtSiGkxA76LKOI2cYmrhdTboXUI/3iqCBrDWK94fJgyn1rZ1yNznhDeIeNsOK+/pxYKoYiXhUTltJ77hjH1EZ8pLqH07bHwfkV2jJldjikyUOSA0mwFHBoCRaO6DhHLmui6ZwwL/BNg7+41lkoDVnCtN/nbBDz7HqfZqAotgPaHhQ7IHqGcK9gmBS8ceOAjWDJ23vPsaYqrvcKUunpiQiFpPINDZ47RjF1kkfLK9xtx0xmO5R5j/w0ozhPiM8E0ZkgPrfIU4uaV+iTJaqoCKYzvLWrnnkpEUGICAMmayN8HPL8Th+TavI9TZsKij2Pyzy9nQVx1vDGtX22kwWPJLe4FJxzKbOsS0soAkKhqLyh9i3HNuCuCbnVrPNoeYm2GFJMhjTLkOowQy0F6QEEuUftt6iiJTiaQ9Wgp6uRat+2gEeEEUJrpmsjpknMc1sZthdRbCvqoaTadDRjT38tp79WcL13yoODI66EEx6K9hlJz65aTSsP0RgsC1ezcIpnTI8zk1Ivr3Na9yhPNliWEe4ggVyt9nHpkQctYmlQp3PkskIvlvjy4i5i3iHCEBFFLPsZy+EG7TCi2UxpBoJ8W2F6jmrXEWc129tT1uOct41vshEseWNyh6Fs2RsrIiEIUACcu4rSO55qx5zaBL+8iqnHtGcjZos+4iRETDXxGcRnnui8RZ00yEWNmszxVU24XK7mRnu3Gu3PUqoo4rnNDVwSUO6mq1HBPYnJoNozyMxwZXvCMCl56/Amu9GMB+MjtvSS9VFAXyoEAgHMXcvCr9ZdeL4dc6Pa4Hx5icUy5vx0HbFQ6MOAYAn+wKNziz6okGWDOplD3RAuFnhrVw2dlNDrUUcht9a38WlEuR3TDgKKbUEzFJjNFjsy7I1n7I5m3Jec8FB2wKXwnNeEZ2RCM5YhAA5P6R3HtmHqYh6tdzltexxP76WoImZHazRFQLQfoHOBP3AEuUMelOi8ITxbQF4SleWqZoQAIZFZSpEm5MMht0c9mlFAuRXSDATFNvi+xe/UjNKSB7aO2AwXvK1/gzVd8mA6IZOesYxRSFoMrfccWsvSCT5eX+LIDJjOLjMtM/KTIfN5QnSsCKeS+NTBxBGcV+izErks0NP5qmaqatWGS4WIQpbDAcskYn9riE01y0shJhXke+BSj9orCdOG128cspEseUt2k+1gzn3pgnXZkMmIWGgabzDecezgzMJzzSZP19vYfJ078zH5PGZ63EfPJcmRJFg6xIFF5y3h4RJRNaizc2hbkrpe1bUOEVqxHA1ZxiEHmwNsFlLsBjR9SbkNzcATbZboUcP9wxPuHZxxX3TEA9EJW0nLZd0QoollgPWOyrcsnOCm0ZzajI8U1zipe9w4W6MoY5YHPXyuSA8EOge5b/C5JTheoPMaNZ1DVeHadrUeUxAglKYa9DjqjbkzTjEXo4LFpqYZeKptT9iv6W3nbCULHlm7y1Yw583JTYay5erQEgtBLAIAlr6m8p7n2oSpC/lQfo2jZsj+pM+y6NMcJdh5SHIM4RSSU4M8N6hpgZ7k6KIgnC/xzq4+D7VCRjEmjthfW8elIY/u9DCZZLbRAj/3B5hUvrC6nNflvC7ndTmvy3ldzutyXpfzupz38j7nzrNf//Vf/11t+NWm9ZInq20k4GwP7zRFE9I6ybyCysBZ2WPZxBwvh8zKDD+LyBYSPYNgagkWlmhqUXmDKBto2lVjC6vGAla9ntbiqxJMC0oglprIxASRQs01LpbYIyCBdtxjnoY82VMcRNs8lVRkQUMYKYJA4QOL0w6jDI1uqEXAUkS0XnE1PccFUx4KDwlax3DLELae/sITtJ5kYZGtQxQl3lhcU+O9owWcgCaKMUqzTAfUQcw0TVchK4pYRAFVKKm0IG8ibpyuMyPhlCE9Z9g0NaETZK1GWA+1xRjPojJURnGQ98hNgJs5wqZieG6xtURPPbL2BHOHajzkBt+s1nlASkQSQxJDGEAQ4NMI00+wWUDbV7SpxIbgLmY6eyNoS03uY+4uRiyCiNC3DHTNJJqTScOGbomEIxIWKQyBsIyk46FoypWg5T5RMc9S6iyg3dSIwiNyjy8dfrm6c0+zdNQ1LHJPbRWLWtN4Te5SjAipVYyTGhsqnJKYWOCVIFgABQTzEKUkU7WGkynSK275LYauJXUGbT3SgXEtrTUsnWTmFFObcWIEhTGk9YKoDUhKg7SKoNRIK5AWvJY0myneR5jdEI/DBBYvPT5V+EAiRyEiVgTjFpUaxuMWlVmiQU2Y1Yx6OaNsyW604Go0ZV2XrElPJiEQColYfSZ6gRaSWMK6rAi058H4kJmOSWzDsomYB32aKqAexthK0exIqlLgz2MogtVdqEqDbDyi9SinUF4hVYCSIQiBnjeIwiGnDhdY0qdbtG5R6ZJG1dyKA85UylzvEEvHQCsCKVE6BCkoFbRScKz65CJinzWmpLg2JbIBrpH4VmASv/oAWhOUlxWB1UQmInSe1IVEsqUva0JdM4gXBAFk/X1kIJHDABFI3FDjL0YkRegY9HKioGE7PqOnS1LZYrzgyAYc2ZDCRdQuYGI050Zx2PS4Ww85qXocLofkRQRLhSglshUI53EKXCAwqUYqUE2KqEOEFAhjVyHae3AOX19MwZ4rVB4gYom4K4kSiR+CzyDstbS9gJNkDFnEnbDh6bgiCBRhFKxuFR9CoxyzwFIKzb7qURBy5jIapxj1CmQC46QlNpb1ewqS1jDMl8RNQ1zm6LZFtxZpHVJIpJAX9a2xcYCJA6pEkmeKMlLMM00dKhapRmlPIC2FDfnt+WVCb3nUXkE7QdJGSCvwtcdbT1F5GgMnxYC8jVjM+ogqJjt3BHmFnjrU0qHnBr20yGWDX9b4uoGmAedW7biUq9t9S4kAvHPIxuAlRDOJqiVeS1wMzmpELDhZrLGMetT9kF5Q8dGkpBe0DEJPoiFSFaGq8aLBiwaAvWDKKCu5pie4LMAOYnQtia8qVAPhYnUJjJiXuNbQFDnWOorW03pBTkYrAhahoJUx01hSqwgXxxitsZHCa4EtNKIRTCZjpIoofJ9jv8XAWcamJbAQtgJnDbapqaznvPHkRnO3Cli2Hp3PSJoctVhCezGzxAiCQqIMiNbilYRRD+9SnPI46XGxXl2a1I/wWUg7CDBDielDMzaIzJKMDWlaM45zhmFFJA1SeGqvKb1n6Q2rlaXMam0YJIkQ7GoYO4tOJszDnPvFjPkgJE9DmkrR7oWYQtMuNG2uqZasvkpNuRxiWkFbCYRXCBcihUapZLWOVCTxShDOvqSXhP00Xc7rcl6X87qc1+W8Lud1Oa/LeV3Oe3m/pzXPnnnmGZ599lne9a53kSQJ3vtX7Yhk5TW/XW2ztBG3izHLNuJk0aNoAtpFiK0VeqFQlUAvQZUQ5NAvV6MAwdKiqha1bBF1g8grfGsuRiv86hpm3KowrV2tTyEEomqQShKfJ6AVaZasGpN+jA817WDAJJIc9XYwsaDtg43B9DwmBZ9afGpIk4YsqxlGJdvpkpEuuJ6dMlAll3YmZKJhWy2JhGMkBYEQpCLkk2MfYHGrhVddS4tn6gSFlxybPnMXc2hGnJuMo7rPadNjUqWclSmLMuYoHyIqiVooVC0I5qBqCBce1XjCuUM1Dj1rkK1FLqrVmgh5SWRKwmq6Cptti3eOF84SoTReSkQYIAINcYwMQ3wa4ZIIl2hMFmASSdOX2FhgI4HTILzHGUldhBijeF4KosAwtSlZ0HAnndBTNVfDM3qqYlfXpMIyloa+dFyJJ4RM0dkRajV5dDXV3Bsabym8Z+E8Sxdy5hJmNmW/HTE3CXfq1Tk0y/tUbcBkkWAbhZ+vGrBgDrKBcMZqiu48QtYhk3nIrLbsLzeQdQvl6vp86gbftHjTrs4rPN57EHJVU6qlF17cMSmpIdSQpS8uLOsCSbPew4aCZrAKns1w9aFoBg4XevSgQUaWca8gC1vW0znjsGQnnLEeLNnQCzbUgoFsWJMVsZD0pCQQikCoT75nQIAE4dhSFUNZk6QVhQvYjWfkLuL2+joLE3GnGLMwq1orm4BmkWArjV5KVCkIclAFBIUnKEAXlmDpUFVLeF4jmhaRl/jWrEaLjMXXNY13PGtDIOTjYrCqtSBAKIlIVrVG+jtqrR9gI4XrS+JY0GZg0tVXlYFPBGSSONb0ehE69PTShmFguRI39FXJpeCcTDZsqwXx76w18TtrzVO61ToEUycpveTYhMxcwpEZMDE9jus+J02f8yrhtMioqpBiGSEqiVxoZA2yBWEFXoMLJaankaFCegGNRQbBatHeogRj8GW9qrXFEpwjuHjfIq0RL9ZaAFFIE0XsJxl30wibrNYzMLGk6StsDM1AYENoB+BCjxlaZGBJBjVhYNgeLMiChqsXtXYlPKOvKnb1jEzUjKUlEY5UaiIkWigUEs9qtLP1hvpiNsPCeZY+4NSmzF3C3XbM4sVai3mq2FrV2jzDtgo/Cy5qTa5qbc4qjMwdqvYEc4OqLIO8QVYlFJ9aaw3emNXIGbw4m2Q1GilXaygotZqy4jyyald/PEpJEEpUq7GhpJmH2Aj2Bwkugsf6e/jIowYNKrKMsoIsathOFqyFBVvhnM1gwbpeck94Sj9uWBvWxELQE2r1x4oIPnkeebcazcSxdIbaC05dSOk0d80aSxdxpxYsbE1dCkzbYpeetg6xyxBXaewygDLkOE84L+H5YrWYbVA6goVFVga9bC5qbfW55ssKrMHVq7AZ2gkhfPIY6WC1XlASg9aQxqs/gocZLtLYnsbEirYnMIm8qDWByTwm8/jE4TNDHLf0eiX9sGYrXTLQFZFsUcJR+hDpYe4qauEQ0hFe1FqMpKcAZbkvOMbhKXotDY6ZExROcmJ7TF3CcTvgzPQ4afoc133KKiEvMqoqoFjGq1pbvvC5JlC1J1yAbDzRWbfmWZfzupzX5bwu53U5r8t5Xc7rcl6X8y7ax8/nbptnZ2d8/dd/Pb/+67+OEIKnn36a69ev89f+2l9jPB7zQz/0Q7/rHXmlfPIuTO9ApqtbOxdNjHGKsk4wRkMd4M3qOmnRApWHxq16uhuPrQS2FrS1oK4kTSMoSknTQlEK2hbqEpwVuAqEExejCALl1GoEhwCBRAq9KhilXryTiZMeE7jVY+hw2mPD1Rehg8gTRo4otqSRZZA0pIFlnNTE2jCMK0Jt6UUNSkEUCaQS6EgjJPhQrEamAotXDqFrkBZUixcOKyQOQeMV1ktqG9DYgLoNqdoI1waYJkQ2AlErROuRJXjj8aXFG7AFOCOoC4UxgiKXGCNYFqvjU5RgDZgKnAHRCoQVKKtWx8krpF8dH8nqcgOEwitwEpy6OEbq4hgpMJHDa49LHEJ7wtQSaM8wawm1ZT2ribRlLa6ItKUf14TKEkUOrSAIFVILRCBBC7xebc+rFnQL0lwcI/ACLJLGa4yTVDbAOE3ZxFirqasYbyWi1AgrVmuYmNWoJsbjytUdudpCYltBWUraRlBUgroWlLWgrqGtwTSAEfDCMTIC4SXKKSQSddHoCqlBrEY+vQSj/SfPIekx8ep42djjtUclDhE4stQSBY5h0pCGhkHU0ItasrAhC1uiwJKEBqUlQSCQWiIDBWr1weqlwwUtQlqErkB4vLR4AS0Kh6SyelVrbYSxelVrVkP1MrXWeHztsbXAVoK2EdTlJ2utbSEvBaaF6oVaq1frBMhWftZaQwrsp9Wax2n3KbXmIXIEoSNOLEloGaYtSWBerLVRXBEoRy9u0Mp/stZChVACHwi8+gy1JluQDivERa1pzMvVWr1ay0PWCtGCLP1La628qLVcYY0gLySmhWUhaQ2UBZjPWGsS4eSn15qQIBVeshr1/NRaCzxO+1WtqU+ptWxVa4OsJdKOtbQm1pZxUr6k1uKLWtOhQn0+teYllQlW7XWTYK2irhO8WdUaVqBKsVovorAvrbVSYhtBVUqaVlCWgqp+4WtVZ2392WpNI5Cfsda89LQv1Fp00W7/zlpLLFHoGCQNWWgYRC29qCELWrKoIdKWODJoJQhC+RlrzQctCIsIahAOpMV9Sq3VdnWcyjamtZqqjmlt8GKtyWr1eUT9KZ9rtcfVAvMptda2q3PpxVozUBWrWvM18BlqTREgkUihEEhQEqR8sdas9ljlVudR8PK1FoeWUdoSa8NaWhMpyyipCJX93GpNutXx+Yy1pjBererMBjQmpGoibKsxdYQwAlkphPn0WisnDT/y/f/vPzRrnn2qLud1Oa/LeV3O63Jel/O6nNflvC7nvdTnNfPsb/2tv0UQBNy6dYuHHnroxe9/wzd8A9/5nd/5qgpVLwil4TXxIYloWO+3RDgyqQmQhEKjWK2NgYfaG1pvKbyldI6Zi5i6iHObcmwGTNuM/WbEvIlZlAOqJmCyTLGNws7Cl+kl96jWE84NsnGoZYMqW1ReQmMIqwou1i/w1uK9u+glB1itryECjQgD8igiDxNOsk0IFbYX4SJJOwhWI1JDiQtWI1IugHbg8aFHDFpkaBn1C+KwZTed0w8qdqMZQ12wo2eMVMGaKhmKmkRK0ou1BSIRIFZ7gvN+dfcRb8n9qpf83GoKH3BghuQ24nazztKsRn+tCcmXfapWU85jbKNQc4WsBcESVCUIlx5deYKlQ5cOVRhUYZB5gy5rfNMQVvVqxKVp8M6t7vTzAimQOliF1SSiVoqDbAyB5vl+jA8UZhDgQrkasYsEbU9gY2h7YBOPyywudSRJQ5zWjOKSjSRnHBTsxHOGqmAnmNELarbVnFh4RtIRIElFgBSC1f2CwHl/MSLV0HrHzEHlBSe2x9KFHLQjZjbloBoyaTPKMqOoUooiJC8iRKkgV+hqdXcUXXuCBRe95avR32DeIhuLWqyOi84LMPbFO175tv3keQSrERYhkFEIWlPEEUUYc5qu7uZk0wCXBphU0vZWo79NT+BeOEaxx/QdMrSEg4YoaNnsL8l0w+X0nJ6uuRyek6mae+JTUtGwJlti8fK19snRX0fpLHMXce4ipi/Umkm5W49YtDHzYkDVhC/Wmplf1NpMIttVrckGovlqlDyYG9RFrcmyReYVNO3nXmtBQB6H5GHMaboBoV6N/oaSdhjgQkE9ELhQ0AzAhS+tNXFRa0nYspPM6Yc1e9GUNZ2zoeeMVcFYVoxkRSIEqVzdUSwUenXpxKfUmvGOpW9frLXSaw7M6KLW1l6caWHbiHzZo2wDqkWEqTVqIZGVfLHWVqNRHp07gsKhyotaKxtUUX/yGL1crQmxGgVVEuKIWmsOsxEEiud7q9Ff0w9w0SdnEbQ9gY1eqDVWtZZZ4qQhSWuGUcVmmjMMCvbiGX1VshdMyWTNTvLZa83jKS5qbf5irWUsXcRBO2JqUw7rAWdNj6pMKaqMogzJ89XoL0u9OjZLgapWI1Kquai12q9qrbWo5WqmxQu1FlU1OPtZa62MQ8ow5jSJVnfISwPs76i1F49RH1zkaQerWgv6DVHYstVfkuqGy+n0otYmZLLmanx2UWvNavRXaELx8rVWekfhLAsfMrExM5twZIbMTMLdesyijVgUQ+o2YLLIXlprn/K59kKtycYTLiyqdqhlvaq1ooS6xdc1XIz+emNecoxWI5tqNZMg0ORxRB7EnGWfrDUfSpqBXrXbQ3Ex4+Ki1vqrWmPQIiPLqFeSXIz+DsKK3RdrbcGayhnJirGsiIUg+5RaE6vYjPMec3FXrdytau32VPIj3/9KJJJXvy7ndTmvy3ldzutyXpfzupzX5bwu573U59V59iu/8iv88i//MpcvX37J9++//35u3rz5+WzyFee8ZGYTchGxtB4FKB+ubp7qA/AS4yTOC2rraZ2nsp7aegobUNiA3MTM2pTcRJzXq4arLhNsq9F5gGokfikQrUAvVh2jwdKtHnOLbD2yNMjWIuoWWgtmtZidcA7vPDgQzr84jRRYNfbevtBVj7NAs+p9R2t8bfGhRJQBMhCopUIGAqarW97K3uol0gMRSshCXKDIU48NYkQUMtc1i2BApit6uiFVLaEShFKglERLCVIgFDjhsHI1YbmRLQaohKBFsvQxDZpaaLwS9OOayBli1WKNxMcCjCAcgjKeqPQExhOWELSeoPIEtV9dGlA7RGOQdYs39sXr1X27Kk53cZt5h8cLgZOr3nAXBjipsFGMU5omTrBKUSYRRivKOMBoSRlpmkBSR5ImEJgQzOpUoLGaZR2DE1QypMpDUtFwJvoktKxREnpP33s0gtitplcrJxAesKvp+I1pcd5TGE/rYGYiaquYtCmFDVnWEU2roHYEdUNcWUTVQi2gXDVgqlw18KoE1XpU6ZHGQWHx1uErg7ButZ6IdatROCEhCFb7IldTcF+4vt9HIWiFjUIINC4O8FGAjTQ21FgtaJXESkErBNav7gbtjMe0AiFW6w84J5jrmEYrAmFYqBhvIZENdahJZMNU1cTC0lcQCU8sIBAQCIsSF/dkEg4rVneGSWSL9xAqQaIlG7ZlPbBUJuR6VFAbzTKNcEZiBxqMQBasRhMKf/HoEK1HFKvRYF9afOswlcW1jrZxmNbRGjDGY5zAOInxEuMVTmqMCPFSY1WEVxoXhHilcKHGK4mNFU6tGnmnwBqx+qyYe1AeUXiklrhwtd5MoT2oiFMpqWVCIRImVPSo6Yma0EPsQSEIvFjdJtqvziHj7WqRT2doPSy9ovaKc5tQ+YCJkVRO0DYGbwVBs5rOryqHtQpVaYQRqFoijUTXIFuBald3F/NS4CIFKoAQvNV4o1c15lo8HocFAV6tfp5AgZKIWIOWiDRAaInIJCLw0DOIAILME0SQJBYReURiEZFFJQYVt8RhQxrXDMKataigr2s2gyWZbNhUFYloGUhPKDypEGgBofBIQOJW+3TxB0wkVu166xzKQyMbAu+wUpG4Bi0tvaBioFPWopwijlgmEbbRmH4ItcQVChqBzxU04HKBa8AsPbQCcg1GQs6qKKpwNc2/WV3aJeyqDZdIhF/NGpBSIlSAkGpVk0rhrLwY5XPIxoP2+JlHahAXMy1UbBDKoeMGrSw2bmiUYR55GimQOiKWklxZYmHoS0MoPZHQBEKgpV6NFF7UfyscFk8tVs1LQcBShOQ+5txn5D5i7vqULsCYBGc1qtEIq6CWYFfDxV6u1o+R4epzRVowQ4EwAt1KpPGoViGtQxmFdBrtPMo5lPBoHFo4tLyYGaIrlBaEFyOyKtYILZFJgNcCn2m8FrhU4QOw6epyLpeA1x5Suxr9jVeXmAzDgkQ3jPSSnq7oyZJM1oRiNZLbIHBOIlAINA6J8RrjBZUXNB4WFmov2W8EcOsPOK28OnQ5r8t5Xc7rcl6X87qc1+W8Lud1Oe+lPq/OszzPSdP0074/mUyIoujz2eQrrvWK55sRjddM25TaaWZtQu00yzamtoqiCWmMpq41basuFpqU0EhEu7rduajFxVRbEEYgqtVM1KS6mCFf+tVJVVmkXT0Kc3EttXGI2qxCVNOCXY0aYe0nA5QUq0/1FxY19Ku1NrB2NXoiBLx4p5BViJDBqnD1xShBGgZ4KSFaPfdRgFcSH6+CjotjvBacxwO8hjvxxXX20apIXHgxlfSF/3/x6AOPi1bTeUVkkdoShgatLGnUECjLIKwJpGUYVATSsBvPCKRhNCyIhGEgV489URNiyURLJCwxEInVGgsBAoVEr5qmVa/6RS+2v/iPv7jRtPOrO0BZ76mwWA8lAuMlSx/Qopi7ZPW+u5TaB8xdQuU0M5tSuIBFG1O6gLwNqaxejea0AZM64LjpgxGIWq4ajRqkXV0vLS3oF973lzx6VOUvHh3CemRlV4GntqvGuDF4swqIGIcwhp5pyIxdBUh78ej8xbnhP3mOvJi1V42llxIvJEKvpq2LIHjxUUiJDz95HqAULtKrhTAjhQskNlqFbxuuRtdsuFr/wIUCF6w+SOFiJnFzcbtwAlqtqVuN0o6zMkMry3PROqGyjKKSSBpGYUEsDUNdEkvDQJXEsmUgSxLRkEhHLBzJReDKpGWMRekGTX4xpXo1VViiLt771at/4TxoncXhMc5i8VTOrc4HLzBekLuAxisWNlq9/zamdCELG5PbkNxGq0cTsmhjKqup2pDaaMo6xFpJ22iwEl+p1eUatURYULVAuNX7LhpQ09W6FbrWq/Oj1mA98zpjaRyTxiBbh2wNsjUIYxFm9bgKxXY1F9/a1bnxwsLU3r14pyX/4tdqjoAXq9EvLyq8aOgFqzuEoYKLO4WtHgkC0Aqv9eoxXL3/TkvaRON0tDoHNNhw1SaYiJc82nh1PthoNW3dxw4UqMQglSeOW7SyZFFDqA2DsCbWLX1dE6uWQVCSqZpMVWSyJpUNmapJRLNarFc4MmkIgESuQmYsVu99eLEei0R+ct1uXqgF0EIhvacvPcZ7YlFigE2ZUyNZhiGFC8hdTO5CcheztDGFDZmbhMoGLMzq/Z/VCa1VLJuQ1ijKKsRagalSvBWIUr7kUgJVrYK9euFKqXr1XNd+daVC41d/VLf+4v13qxkprUO88L437ardN+bFUU6cw5v2oh0wNHgOnAUEN+gjhEcwWp0JcrVujpAX7YDSCCVX6+e88J4rBYHGBxqnFT5cPdpQ47SgjTVOCdpI4pQgjlaXMJgInBKrzwi1WhvFq0+eD4QClERHGqkFcRARaE0WKGLdEuuGRCt6qqanLalqGahq1RaoklisHkNh6cmGQHhS6VACYiFRQhDKFy6xWL3/4lPOA8RqVBo85sVVnywWR+sdrfdUSCqvqJ2m8CG1D1i6mMq98JkQMDOrx2mbUFvN8bkCPvT7F0S+iHQ5r8t5Xc7rcl6X87qc1+W8Lud1Oe+lPq/Os6/8yq/kZ37mZ/iH//Afrl6PEDjn+Kf/9J/yVV/1VZ/PJl9xQnhi2aK9Be0xTpGKGuMUtQxXjzrAWIUJNdaoVc9zu/owfeFLtqtiES0XPdGrXmjReLDAxaNvxGoQsQFvPbYG7zy28TgLpvE45zGNxzqBbTXWKqzxOOPxFpx9YWSSi5FKgNUaGwIQ/qKIUBfFpF8sKoF8cUqp8KuG39cOLy/WaJACilV+8xpWt7fwqxEH7Vch61OfBxe9v4HHq4s1OpTDhw6nVtc4CwVNJPESyjDASIUKBIG0ECgCaWlUTCAtc93y/2/v3KMtKar7/63qPufOHWBmQGBm0JGHIoqMkoDi4PohS1iAMYYoy6AS8YlBBxVBNJr4XIkoWUs0PvNSTJbvLBVFY4Kj4AJ5CGoAQQQkDhoGFDLMDDP3nNNd+/fHrqqu6se555z7Ovfe/YE7dbq6Xt1dj917V1WnymBFkiHVBu1EIdVAohUSraG1glYaSitorUDcm1qXQG7/BRBI8zc6cpXBKIJRBgZAD7wWukMpcmiklEMRCycrSWPSdNEzCTrtFnp5gk7eQi9P0ctSdLMWW72y1K6VT6ByQHW13X8BvDa8S9af01UdtkigC7ZKdvjZmQ4blXNbHzJbD/IeIc8JWY/3N8iyBHmmYfKU64HhcCDOD2TrAdnnCljNuoJS1tXV+gBlvyyj7BaVhqC6OXSeAxmfVinvhaATQpLyngimZd0UvBdCm11qs+VNtw10YpC2DZLEgNo58sSg09YwSYqktQKdxCBLU7STHI+mK9DWOf4v7WJCZ5hIMrR1zn+JQaqB1D7zVGtopaATN1gooKYeGJ2zXTqoB6QIuSLenyOxa+KTFBk0EiKsoAzKTGEFZdjHdNE1u9HJW37fhU7GfUG314LJNUyP911AN2HhsqPZ8tTl56E6bN3jfiDsD2w96ALIANPlGQWmyxbePCPkGdjtKe4Degp53kLeM7wvdcZ1y2SuP1BRPdBkF9oQfzVI230btOLnzvXB9Qd2Y2JiM7MmYmE/A1QCoMPyl0oJpAHVAkhb65Mmuz8DvEsTBtCEZMJAaUJrIkeqCclEBm37hzwBuqmGSlLsTidg0gRZ0kInmcSjSYadSQ9tnWNF0kOqCW1tkGiglShopZAmth7ohAfRxI6kdp9X0qboDxTBqAykDEjltj4QcgA9aGTQ6FECUtwv7aU6aOsMk4ntCwz3BXvabV8PcpOg223BGA3TtZvzdDUL110WqnTX9tFhfxDWgx7YOt5ToB4/e3YB07V9gBsb7HPPuhomV8h6mseOrA0y3H9wf8D3QeX8gqVsPdDQ7KqEXZ34PkApDUUJkGuANMjwTBPT4b5e7e7xR6ha3MZ0i59z0iLek8ge520Ob9rEz6FlgISQtA10Smi1cqSpgWrlQGpgWkCW8hibpAqUpKC0hT06RyedRFtn2JV2eUxIMySKMJGwUNVKNLQC0iSBVoo3i1awMy2K/sDVA6NykDKAdXk8MMjsLcugkCFBRho9pMigkCDHhCLskxAmVYIV6CJLNFZO6NkVRBYRIueJnCdyHkTOEzlP5DyR80TOEzkvYiTl2SWXXIKTTjoJN910E7rdLt72trfh5z//OR5++GFce+21oyQ556QwWNvajhYyrNRdtGCwUmVIFWECdhqt4tXVCfhLIRrad1he9U3gya1EyMmgZ6fadomQEbDHsBVkl2mjRwl25ivQpQSP5CvRNQl2ZNY6ZjWeO3or0MlTTHWtBaSbopslyLopsh4LddTjzxjrjrLTcmGtIYH2OwdS504RlCEkHRb4kg4vF1DOtZrvVi9jK0eW8Xp3pwk3BiDDXwGiQtML8K3gP57WrRJnFeVpo712C71EYaq1l/3iS8qd4ETKa+cnEiBVMPYTsWaCNwTM28paQNkKZloqsoaalFgbnhrQBEElBnoiR5LkWDFRWEFaOsc+rSm0khyr0z1o6xyr0j1oB9awx6Q70VY5VqouWsgxqXK0FaEF/qJO6q2hCfyuBEE9IPuFJALXASJCjwwMWWsYgN3GWcPa6FKCXfkKdCnFjpytoTus5ntHbwX25C3sytrYk7XQ67Wwp9dCt5dgqpfC9BJknYR7A2sV1R276eVU/PzD+sCutYp2rFW0y9Zx1cnsVFue/o8ss1ZQWx+MtX4bAzI51wHiqdMeVw+cFSS1bos3Ae62U/R0gj2tfaASxfUg1UA78fWAUs2bHLeUfe7WtVYxtn7w/gCUwroEM8GCPa0wUEkxgE9OdJEmBnu1eRPTvVtT/PxbbB3dx1pA9k46WKk72D/dgRWqh0mVYVL10LJTwlMotJWzgXI/kFrBNe4PCMZaynP7/LuU2XpAyEDYY5LAKppiZ84WkF35Cuw27cIimrWxK5vAVNbCrt4K9PIEj3ba6OUJpjopTK6RTyVsDZtiAV93lLWOo7CKBf1A0kVhDct4iYzKXD2wS2V6OZIssILmzhreAxniPTCIinqA0ldponoA/jKPVryXSKLRaSXoJi082lrBg2Er4aUArQRoaaCtQS1t64C2MyMUTMpfW/PHCWC8NdQK+CtsPbCzJJJ2hiQxWDHRRSvNsVebNyTdO+1gIulh77SDlUkXK3UXk0kXk6qL1Zqnfu+leLr3pOqhpYAJkB0TeEZEC7xnQuKm6dv/XLdIZFhOJbaOd611vGPYOjpFCbpGYze1sMe0sNvwZ+z35FwX9piWnyGz086Q2dGdRDdPsKvTRpZrdDotmFwhn0pB1iqOzFrF89K4kIXPH9AZ71mR9ACdsUVU9Qx0Nwd6OVK7FAjeCprB76fj9kJx9QBuTAjqgHVVoll4S+wLcSvhepCm6KYt7Gqt4L1mXF+Qaq4LqeLPhqcKZoW2z9m6K1TkmhX88mdsPcgneDkEJgyQGqTtDDo1mGz30E4zTKY9rEh7WJl2sXerw31Ayu4+yRQmlBsTMuytO0hhsFL1kCreKPgfZix9LE5EzhM5T+Q8kfNEzhM5T+Q8OyyInCdynpc1RuCoo47CL3/5S3z84x/HPvvsg127duFFL3oRNm/ejPXr14+S5LyQE1ttupTCgPcMSInQU9rujeG6UGvRI15xTaRYrLCuIe5EczuY5kToGkJOwJThqeS7TQsZJdhlJtAzCXblk+i5wdUk2J1NomtSdPI2unmKLG8jNwnIsFlImQTaaNZ+G7ZCalLQbAjiKcTW1Yag7R4a2rBApXKCynkwRZ6DNzTI+LiXQ+U8lVzZQVXltiHZwRTGClVuKnn5o6zB5355Q0ACFA+MSivkLQVowxsfaA2aMNaCoa0mG6DUClOp5mnELfcH77LABb+0gFKyQhVBTbAVLJtoIU0MzEQLqTbI2220dI681UZb5+i2JtDWGTrJCrR1hr2SDloqw6TuoaVyTOocLWXQ1kCqFFKlkCieNppobS9X+QGEYC2iIJ42quzSAhC6br15opBDYYrYCrabWMjuEO+5QDmgiNDKc+RGYWWmkBhCK8sxmWXo5Rq9Hg+ixlk/emz9SLuwU8X5+addu7TAWsX4PPFU4hw8mBogcUKVfe4qy61Qldtp5HZQNcbXA8oDocriFlYQFFsFwFN5SSnwF480TxnXGpTy0hLT4iUlJm3BJBp5K4VJNfJWgjzVyFKNvKWRpzzAIuG6kCcKWVvBJApZi6cUZ/ZLOMYu81BGQ4Mw1UuQGIMpStHSOfZkLbRUjj1pGxMqw6N6BSZ0D3upDlaoDCtVFxPoYVL1MIkMbRhMgJACmOAewC8pSclOJSdrBfLNwtYDY0B2MOVB1QT9gcIeu6Tg0byNKWphd97GlLFfhclTZHkCkym2PvfICsEGSQ6kXQXKDVu7c8UvR7ldUmTAbiBcOyu57iKYLUFAl/cGQc/YAdQuLbHPHrmBcs8/s9b7nIUF915FbgRVscsWL8VTHBTfOeQaBimQJywNJQmIUsBoGEpBpEGUwJCGMQo5aVCukJNdUhC4lLBhzyTgjWMTICc7O4KsNcywcL2CUrRSgz15CxNphj2tFlYkPUzlLUwmXeyVdDGpu1iZdDGlO5jULfR0grbK0NMpTyVXOVIFfuFWPCMisWZgjcIoqt3CCjYPQxNbxRPkMMSfj8+J0Aa3+xXUwhS1sLfpYQ/10KEWducZpkwLa/IMXZPi0bzLe/H0uugZjd3dNnKj0emycG26VrjuKMAo//z5eVs3Izt7pnBVz/3ZvWIyw3UhM6Ce4X11etzuTc+AjEHey3lWRGbsqjLDY6BxYyHfCEMKBN4ng/sFXmKUJQn4C18pP/8kBRLeT6boIxLbNyT8og3+KiDnoZD3eDaHAbf7POdqZjpsDTUtOyi2DO8d0sqhtUGvlaGdZOglGTpJhl7SRZZMYIXuoqcnsEL10NNtTKgeurqNNjJ01BRaMJhS/I2r7NFgA+Vlhsh5IueJnCdynsh5IueJnCdyHiByXjQ0EpVHy6WF+4T5x24+DsnKFcih0DEt5MSfyM1Jo2NS5CbBlOGpvF2Tspsn6JkEmUl4o8lcI8sT5EYhy3mqZZ4rkFEwGU/vpZ7mzidjYYgyrvgIzxsAGcdDpri3yBR3mBmAHN7VGf/WOW+SqXJeT61M4SrnZsYe5zxdO8t5U9os6CgNsfBk99fgAZQbkDsmYzsqliSLL2c0VRXl/7HCFuCnDVuhC4lzeao5Uu58ybpINPdUCf8m60K738o2PgVKeTCnlgY0gFSx23IuIle1iMvUIkATq4xDNwFPSa1xKeU0KLHTl1OXBgGKoFKexqzTHEoRUuu20hxaEVp2amqa8EapbZ0j0dZVBi2V86aK1m2pDG2VI0WOlsrQQo42cqQqRws5UhisQA8JCBPIoYnQZvECLaLgsnlDUrddIl+mhiI+p8jJiHZqt7WweRfwG3TCuoVt2lpiAdiJ9MisrYqXUACZ7Rg7SKzLnzTvoIUMGl2kyJCgY/cr6VALHaToUoIuUvQoxRSl6FGCKcOCacekyCjBHpMiJ4Wuc/MEhhSyPLHVmuf3Uq54yn3OQpDO2E0ylveTnIVObaybs58yyvrbOGWX2MKjjB2sCDxQGbBgYmB33qXAJTsV2/Ayn5yXavByDQLl1jUEkxuQAUzOcU3OLzdk90Xh6dzEbZdQbDxNhavcYyQrAAKAXf7hGq3/rax1TRdCsrIvTXH7dW3SCs9Buya7D0vhah5gfZvVvu1SwhZH5/JvFpgp4VkIpPnFil2UXAL8MS8tIQ2gxUsLVIvbZJLyUpNWattcmiNNcrSTHO0kQ0vnmEhytDVvdN3SOVYkmX3RypDqHJOalx2ttO4KnSFRBitUz7o8BX1C9ZCC228CbtcJeONUDd7gVivYl3ZlRXZ2uUVq7nRgBVMokJ2nboKXegTP1VnDyZAVcNgqbuwGxHng5lauzg0LqRkBOWn07Etez/AmylOGP4fese6UaSEnZV2NPXnL+2ekMZWz/1SeWpf9e3mCnBL0cp42z+Nmgtxo3kSX2IXhcY83Sef+SWV8rGy7VTlbflXObVDZZU3aLmvQzt9wm+Q2TEiskkEbw79zg8QYdnMDbQy0yW37z7yriKDyHIoMss4UvnrzZUN9wlyYf0TOEzlP5DyR80TOg8h5IueJnDcPct5IM88AYPv27bjxxhvx4IMPwph4iufZZ589arJzitUduzrp/wCAFIGlHQUFHhiV72cIbkNLUsTriRWxpYVYJw3NcZEYtiAmhuULUrzPAOyDh60omrNT2nbOqa0UtvNGbtdU56XfhnjvAuta8ygLbBl3tmTXzlPGna6JXAo6a3CjzXlzUMqV7+iJyFoibGceHPMFK3/zik67uNdulwalnGstl/bYTQOHdh04l0UZG55HbBZgtJUAEsMdtmZhirQ9TrhNmhSA4qm/pJU9tp215vOkrLCkYcMVx+wS+/s/8q4TrliQMyywpfyck9RAaxaqtCK00xxaEybSFIkymEhzJIqwwu4BYhKNVOe2guXQtn+B1oA2UEpBa811Txsoxb+VC6OsVVQRcsV1WmkWqnh8VCB7vxM7aCbK7Z3C9zv87R4kFQ/QW14BwLgv3sBaJJTxFlkCWXssC1lF21LcQYGX0yjic5o0NHLkpJFShgwabZNiBXFH3qMEmd3wsXjp0ega7qC7eQu5UejlTqhKYQx33MYo5FkCIn7RIbIvLsYKCcZOxM65jiplO2sEf8oKJcoKTAp8HcrWddsGSAXtwRAouI+K4Os0lGLrvrJWXLtHBxlub4EhDzAErbnNqcS69oteZF0YbovERrDifYesgAzd0DadIOXaZPhb8YuQDtqpcu1W+2nalZekykuTc3lmAtn2TSjuk2vfZPs0lXGRtW2HsK4KXGjw3jyKX3TYLV50SMO+6BRtU6csZJmE3V5KMJqFuTxJ0NP8tbGWTtBJErS0wZ6EX24m7B49K5wwlbA74YQq605oK1xptmC1tW3/itt7qghakX/vS5RCYtunVixaad9Oi9kPrm7yPxR2t769ulkRlLjZEdZKaNug3TIFxr7wOH9XExQ5gc+AKIEC16sUCprItk9229adMAY5NDq5QU4KHZOxgsK20U7eY2EtT5BbYSozmvd4yTXvJ5IrUK7tSw8LUzBWmDIKOuPC6xzW346ZdmxUGY93XshyLzh2fGTlA3jcNDyO8Mwc+LHUSZlkx04oxQInEjv2af6Knm9AyxOR80TOEzlP5DyR80TOEzlP5DyR8wpGUp5961vfwllnnYVdu3Zh1apVdqC0D0upsRSqNAxWJ3uglUEbGVuLkEODkCrDLriSsY4YbntOa8FRVpvMmmSQO1esjfY137btWGqzjtUi53b6b6Q9tscsJ5E1ZDjtMU93zUihZ7jCdqxF1R17rbF1O3bQmTIt5MZpjZXXHndzZ2llt5tpb3nNc80b3NrKb/LAguq0yAZQubIV21q5eixI6oyvmV3ltcauofDyA6782jUQKhqKNlaQtIMKyP42vEwBPWMtMWQtM1XXr983PPqQ86fQZUGh2PODH5aTG92oVAyYgavgPw/sZcVEW5mR/XXKHaZJFIzizlwp/lIJlN0kVcOuHQfvG5FYYTOxg5MzZ7ie2VtJ+ZisMOmtp6ndeDeFtQoV4cmFrwiThQAJzR01EnaVtpZXxVYeZ3nVyiBNDLeplNvURNKzbsb+1vLa1hlS6xaW2NxbblYle9gya62vCQx/XFiR/cgwfNtM4ew32j4i7dsioKDJWXu4nWrrunZYbpvcJIsXCfK/y+0TyAxvUmzfW9AjbQ2PbIHtmoQHnVAgpMKa2rUzHDqGra09w+23axJ08hQ9Y9sl8YwIYzQ6WYKcFHqZnQ2RJZxnplkwyzRfg531ADtY8WCkisHItlWdF+2MXRW0u9AfcbslZ921AxfZZUMGUMZwf2D4hU11uU1p1yZzw1Kgse3MLxkK2qLtE31btPce1vpdCBexxFhpm/bNQmkULw9BW9VW4FNaQSXa7gur0dMaezTv5aETbQU93tNB2XasbJtTtv2iZdu+bYMI2qBvm/bFz7dNJzi69mjbN6WI2qdxcfxsCPvyHwmPxayIJDFQ2iDRBlobtBK2xLZ04aY6R0uzVTZV/Nu3S9iwymBCcfud1F1oEFqKx8q2yuxYmUdtM+HXBTtmwi2Isy9QQfsMxkq/p0hoaXWPNRg3ybY9HjsJuXFjpV1iYOtLZlh4zIO2ydZX5cdJfmHTyEyCnhUGuV06SyxbUqdyPt6x0wBnYVkicp7IeSLnQeQ8kfNEzhM5T+Q8iJwXMpLy7MILL8SrX/1qfOADH6j9lPm4opXhztpPnebOu61yJCC0lEECXgvPFUbZCZZ2WrTStiMv/tPBMVDUBVjtsO+4ra+xWt8cvNdEbgUp+z0hFqYAK0gRMij0iNeH8xdkNDRx5TBkoEmDiOe5Joars84TEMAdHhSQA0RWC0waZDvr3LAlJ894OUOe83TLPONKl+ealyvkmq07BrbjZuHKC1OBq3uw1lHYac18DHfc407YWIFJ9wBjyGqhyWuPKXcti6DIcEtxppecO1rlpkrb/RpUZjtu+0l4cp+GN/bYduSwHb8TssJNc+0IW7jTViruub31wFlebQfupj2TtdTkOrESe2DB0c7VvJTBufZcMTVaW0srC12mxcKTaXFexgpRPM3ZWmQ1eLq0tcjCWWZdJ+3cyPoKL1QhLYQq7aZIO8urJttxEyZSa6kpu0ls0Vmhedp0W/XQ0oZdlaOtMrR1BuOspiq3g6Xh26OAVPG06LayyxeUtQZDFy7Alldr6QHAvsGLX9BEbdus/8/U/GXErq3m6BILUz3iLz0llMJAQRO/siniPXEU5dZNkVACGANtj5Wbpm/Afrnd5ybnFypkPCiojGAMoHMCGYXUrsoxvo0RD1p2xoITprxgZC032lpsQuFK2zi6dJwEViCfBtmlRATonvFtj61GVqiybU/lQRsN2p5bRgQycbsMZkAgegmCFbZc26R44A2fq7EPVoWuKmY/aF36s37Ktj2/XEHbGQi8XwOUnd2g7DIFXbg860HBuDbqj+0muP4YMNrOmkjsOV38pqB9ete99HiLbGiBJWgnVLk2agUr1zZ56QILTC37xbMJbZczaF6+4CyvEypDqnJM6i4vHbLLFiYULyFSipCAkNgZPFqxQNWCtbyCFyWkYMtrCl0IWKGSgnvNaOyE4mfpHpdvm4FQRSDkSTF2hm4PThHBxseeVUJkYOGqS7xcKLPjZ0IJFBkklIAMQdsXJEWEJFcAkR1Tlyci54mcJ3Keq1Qi54mcJ3KeyHki54mcx4ykPPvtb3+LN73pTYtKoDJQ2JVPwFYFbrORy50ikUJObAFgf43c/hHx9P0cCrlh+4ixlojcaF6PTzYt4rQyo61Cnqf9G2PzMMr2GyroP5Sdossuj/eq5K8CP1X42bX/7K9tXOvmzi3C+TxsejDFeZ3zF8phOIyyv/0USgILP7YTdZZEEPz6ZL9HQF64CI6Rm8LfWhq9ay0dhb+JziGyclgrCAVCUo1bbIprcUIQAB52uYOCtUq53xF1gpZdysICgOIbA/CmqgjysOf9HgMuvCr96epv//luN3XafdI90VE4SlTjMXl/FP52gIDt8DksC2S8bAIszWjYgQd2EAJvHMrjEKdpv/S+OyFAAbu0nf5ulz9obQcATTaeFdaUPVZU+NnlG41uYu+9d5V7jHxfa1wKjsktD1Fg66t1uUK6JRPG5mmsHxVlVARlBToXXttjrXnBkNLkq5iyU7r52Ia3AxS7wITOMJn0bLGMLbZNE9xWEjthO3aBhNhN3QwL4hkWzj8Bf6o8tXF4hQwPjprYypu4W0LEL5AE7w8AmpTdRwW2LNaiRP422jaCYp8V79rR0r+wwB8XTYoC/8KPLcQmiAdeWgLA2yndoIuyy8RLTACjlJ3eruyUd970mV3eEJnvpEKu2LUiBXLFdz2zdz9XiT9voJAF53PwJ9ONYjeHHTv8b/bPrGDuwoTHBsof57ZmZHYJGbv8wk2AH3MI8Ps2sfw5wV0QBc+HXJW3wjFx/dSGfP3gPytkGGtxNDa8IesPH0+RFcCpaE6u3jh/ZZTPv/CLy6VyfnIqt12nsWOHdeHHgfiYgmP7rmz9bbWy8jyPe8T3yiWV26gmOCag0+1iuSJynsh5Iue5iiVynsh5IueJnCdynsh5zEjKs1NPPRU33XQTDjvssFGiLxBcmVnwSawVIYmOMytM9SjxghRPobcWOjstMCee9m7ssSHeiM+QQmbsZpZkz7twuW0AVnAxoeBjhRsEghK8IKNshULw2wo4tlLCVVQr2PhK6gSgUuXVQTj+Kyot5+PSpigcorDluLDCkMvbCkfWH+TSKzpPlisM99QUtEQn1HjrheuAS43ItwiXbnFMQaPjDwhZwREAq/cDy0do6SAUaTs/B6Hw74cdTGpP2KL4e+CFMeV6fBakYH9DeQFQae2P2WoXCFVQhcXFb75Scu0XpSLXbR6qrIAVCmXhOS+QqMC105G9AMfL2gt/Dseui2/9AkGHvH8YBkEYxev+g302ozTL/gnXnyg9lj4CAcoKV7o4VnbfFaUQCXjKCoNaEXRirTDaLm+w51v2uKXtEgS7J0Jqj1uKLbipKo5TzZsLJ8rtfeJmTARxFPHyBmXsMofCv6UyKHAeyvq7pRIahMQuU0qUO88zLrR3Wbjj2eRUyJmK5VWehcH10z5WOAuS219F25rurL+ugkfWYNca6htFjA9D/l8/fR9OYHJCkqkRpghl4cqQ2wjZNjErptkJECxcOZf4txeq7Mt2j1h4yqx/z44ZOcXHPftVqQyJnUoeji3FmOKEJzfN3L2Q8+auiR9bcjumhC/sPZNYV/sXdz/G2Nkrfoyx11R+GSejkLv+xo4nbtxRpPjGBEKQH0PCTV2dvxsfqN71Y4cTjGriKKrxzykWuoJlXdUxJ3BDYcunTdFYUhbIyi7ZTZp7vXyASrs0ETlP5DyR8/ogcp7IeSLnsb/IeSLnLTM5byTl2fOf/3xcdNFFuP3227Fx40a0Wq3o/J/8yZ+MkuycokBYobr8IRGr0bZbwFq9r+tUyLdrtjmSjY+we4Bfvws7NdEec/fjjovB0k9itP4uPPl4fI7XBsPH40Bl1w66Jf/CiMYaWRfGTYPk/60VwFsDnNbfnrfh3bIDoKbewWm/reuP3RdDrCUWsJbcQsh0G+9mtsFnJYtu7rTvhsPmNs3c/hkv0BZpeqsvFfsSsMGyxWXIlU/TWYMN8SdyvSXYXUtgKWaBTNn7ZV2/O6Nz7bPzHUQgBFsZMfL3x+584R9aDGA1+fG5QrvvnjegCs1+GB+2g0KYlq3bJgjr6g8VQrStKOyXGZuXTTR0I2E0qIyuvtlzhOK8W8/O9c7FgQ9TDKDuF0VH/pcK4qpSuKJpBudR8fcmM+fnG7jdRwFgiynAFmHArdbg/RHsMaAKOdVZSa0wmVk5NdMsBCuVwM0o9+koZdMlFPs2oDBUR2FR7PFgZWVolPzJnwfgP+7j0mu03EZ+qjhG6didt3mTtWQ7Adb5h0K47/o0eD+HUChW9lmEFmOfBhXmTn9dtk5aq7G2x9oKtlo5Qdj26/aYhVcrHFvBtXzsBFAWMt05ex7W8hy4PjwKC3Nql6eEYwr3dhT7KTs7A4VAG1dHCsYVwO8iURpb/FlyIm44xrhxBYEf93lRWg1jTXmfiqKvsIeRlRn+xddNw3d+4VgDYqHW7z1DTvi1Xaop5B1Oi/veLBpjirHFjTm5HUvcWOSEUq/g8G6Nn1F+HOJxidPZ/WgGnI9lich5IueJnCdynsh5IueJnCdyXnhe5LwRlWfnnHMOAOD9739/5ZxSCnk+ftZaBfgNY1uKN71M4I7tpphumqwiaPCxi8N+vO5XA0hCDT54RrFCuHcG56q9v7ZhuWdz1d/hNPduvTBQ1ONh4DrrBicFwOntAf8ZaleJQ3/XEAC/P4eVGVhDbys8f+hCeU2+09iztZc1+QYKXUpApP2UVKepd1bhHuxXOiiJtPOZ3egv91r98By7XZN6S3JOLFQYaL/kQVlNPbmlFVajn+eF1Ti0FptIyHLa/FiL75Y2wFmM80K4UvbGKKfRNyg09xUtuysjKlbkyNrrhCcTp8FxSuGCT/myf7yUwmnllTXDOMuzspZbZyVwUrPX9IMCv6CnY6kTIP5EcHhctQ4b25GWzvt9Dmwt9/uSUCTw2wqNQuIPBLNAKOPKP0qLKaFsq1axn3J+3mprW3C4FANA8XUxzdZRl56O46tgaUYhYYXLLgJ/bwXm48oyjcQO1YkKLLjWiqxVYQEOrMmkUGxg6q2/heXZJLB5oWpRVvAW6Mr5mmMfT8MvpeCy1flRIXhZy7ITEpGESyoAnbglFNY6bM+n9jjVdm8I5azEhcu/jbX0sgWZNzwurMCFNZjQ1myLbOk8GkOc9belMiT+2CCx8RMU1mX+pLnxgpfbKDmJxhi7zEJRZaxxe9U6a7GvTvZ3AvtFJ3vsd2lS8d4Tvlrbc6NTWH8dzkZsgnGmbCXmrob8eGS7Rr8PVDG22C6HeyLetNmPNcXyBvcizlbh/mNM8buwGPslFIE12J3HzlnoUxYpIueJnCdynsh5IucFxyLniZwncp7IeRhReVb+ZPligAB0iC93ilrej1szvAji/Mn6w/0GeKAOqiVrSgMds9XuFv7ggR0AqNBJg9xq90K76uNSOX5s8Sv7Oxek4LZ6MFaLbKy1q/C35Q7HPajymBUZmPi3Co5VcJ4FDVdeUFheXRxTcR84XGG59FNMXTh37ZF/cK1O4HFxgSAsirjWH3DWxcLa6MqqjPJfSPcWRysohb/hfjtBx/kjEJacIOKshaX128XSC1vPrOADQiT41KXlw4bCka20isLzzi/MOw4HosLySBT5F2FDoSg+F1YeVTou9huhyB9lASkM7/xLuGUOxctBeI5fFqKlFQQEJnnEJ6b3Ks65h60iPx/FDUJKxcm4JRjK9hfuwDleWAv87DEvwXDHYVhEAhu5cN786fJUNkogmFnhyoVjfxcGRXp2nr4/5/diCfKLfheCWZF2eMzX4S2R2t5uez7yD9L3afQ5LltQ3X1T1gwcWWIVL1Hwt1wRb9iqAKMIPUWB1ddZLcF7tdi0FMhbirU1+bs0Ixd2jxfE55WNw7etyAPBb2XPwx2DgqoSlD+sCqXq5e5tUVVK902xAOXDqzCuqwvBOR+Gavwpjq9sC3VWZpCP68M7v+hccV5FYcECtTtXLpPLK/Sv+a1cDqoUIAwX0nA6VVlDhKWPyHmFv8h5IueJnEci54mcZ4/5OkTOEznPh19mct5IyrPFCIE3iHUT94nA65rJaTi1neQfTCl3U9ChC8ubE47Ipem0o8E6ZBeO4NMyNr6xU84NBZsAWiHDhYmObXrGxGF5LCyOWXgKBDJCsd8GxUKGF5IoFDKUF4IKISL4bXxPXBIygmPYY4TnrRvEUwDghQ93PohTcVXlXDGoV8PX/g7zsnnrKCw15B2Hqc+fqnlaCdYJQMV1UlCuQqDwZTBhGCpmrbtzoZTswtRKzS4dlwcV82VDAcfFCwWcUKJ2L1B1whFiV0WCUikcUHSsIO44g3rje8FAgPcQFfUuTCwSqlz5VOxX04sWSwr6QfWH/nqnjRGjYmtQ4W+Fi9CjZCWKrEaRAFYKZ/dEUdEojGgkVpW8gvPhJsY1eamKHwoBrxSH/D4sNWUp5eEFsuA8KVhhoRAGqE7gA4cjFGXxltcgTQpcE/pZQasIg8AthJOmc5E/4Jcz+GUN5XR1XVoo8gEKQUfXhLPnBwnn/L1wVJdfzXn2tx21vw6KwsMvJbG13gmj9rwXcJXzDwXK4tjtM6NqBFJt44bHSvHMonDZhrPYus2cE+tqa+n1SzLC3/7YWYW5DG55RurSqHRGwjgjch5EzhM5r/jt8nPxRM4rh6o/FDkv8IPIeXWyVeCKnOf8l4+cN7LybMuWLdiyZQsefPDBioXyM5/5zKjJzhl8o3L/2zVIhbBfd7ZH9qt0gVT4q2g0KOL4QTLIOextfQ4Uph72APXHbOxR9kiVwrC/sxK6tCk4r8I83Xn7jwriFNcZ7/FRDCrl4/j++FRqwqma8HEcV6Z44FIN/m7/Dn9sL4iCZxD5leN7l6L7EbtUkh9UdFyJE917eAEY3uUwxj4/LlYRJzpnn4Px5wth2j2jsF64/UmcNT0SvmEt1j6MywuFEO6E70Bwj+UyZeWyIi0TpEVwMpjd84TiMlOUN2qOy37hfUFg/Q6eR3C9LhyC4/B5+2dgQr/QVVF9UkE8F0bVxkMgUKtS3DAtVZO2PQ7KUB+35jxc2y2FwwDhwnztNao4dNRWi7RU3GZr0q/GCSi190YI9sWrKKwKVolx+fonEJ2luNC+xbu2H10QVf6tO45zqYaOfMLu0FneCNHNiHJSqBxXQtYOJUGuwXDi3PAx1qYRV5r4QYeVIhTCoCrxVBBXBek6y7kKw4HKcnYQFoU87g3xRVphPFceY8N7Adqn75Y3BGmq4hxbnF0eHC/vTWA5I3JeESs4Xe0/Rc6Lz5XCl/t9KgoejUt1/iLniZyH4HpFzgOiM/3ChfnaaxQ5r3I2KmT5OM6lGjryETmPfy8DOW8k5dn73vc+vP/978exxx6L9evXY2braeePVOVQgN2dAX4Nst8UEMb+Jn+cKDfxPt4s0H0+2G84aCubC5PaNDSC6ZxwM1HJ18emGascR/ljh99MEMU+G+XztftsqGJtdJnyGunCH7X+c0l5bXXsH3dZptSpFl9aicNy3+zWXMedZLgWu9gjpOjK3YDvZ/Hb++GPnUADFZ+3A7j7aosXcFBYrL11HIXgE1rHCc4arn1ctn4nUZzMfSEmWDJCQOxPylvgXVlzCi3yhRv/1nH5SMdxA/86S3tuTR1kirTd/Yg2HyZE58gJbEEYLzR6Ac3e6/KyDSvceCHYvfM5P29FD4So4LeK/GO/YokFp12xXAeVRwHVZR9RmoGLOH5fC324ZAT9w9bHb7Lm1/lTnJ6heGxttLgHS1BAvixFHlSEj66DvKCjShZ2d47vVxGvRjK36dX714aP35xAYRg0xEXpPAEUhQviOayfe9mL/RHn5Q/DNGrSDNOYb5rG/lCqqngH0pI/Yf8JvUJLeV1eVjoqW+BJ2RG2bD0PLPOxhV0F1u84rczEm+QvJ5aWnGc3jkYs52kbrr+cR1bWiuU8t1+NS8fJg3we3h/oI+cpJ7M1yXmw+9vEslitnBeEa5bzqrAsOb/Plvdaq/ZX3JtScFyz347tc6vynNuQOvRrlvPs8BQN9V5JBCd/uTih/Da9nOfksKws50EVMpf9bWrkvEKuC2XDBLzfXGJlUvtRBitzOXnPzZJ0eTbOsLS/3Yba/pqpPLOyfqalS7ss54XhQxmvUMjVy31kl+nmVj4zNs1CUWf93UcenNItmGnJF2HPl+U8VyGMCuSjUO4rjqvyU+wfyoFeHnL+pOplu1KakazozqEk34V5lsPW+ZXiVT5oQU1plv3Jp8fXRQ1lcw2GKmWgyJ+8jFOR80oylSp2kAcC+TDQVBezLUv+dbIc+5tAvkIpTOFX+fruNHJgZQamI0g3XnZMQTnCCDZ/l0hY1jIu7XmjJKtFp2KZrYjh5K96mS1IufCrzIQs4hTpwYcjpYrZnKX4lVmZLn4o5wVpZTS8nDeS8uzTn/40LrvsMrz85S8fJfqCkfs5lnabV1dBLVVxn+JnCCpk77DyRvW4SQhRfcOWVWAcVvljqsRxA0JRahP5haUP03PnUISh+IooSMOHJ5TCqFJfEafrrD8U/Ebf8FwWJ9gUAYt0Su+UQafkrFhU9INRuoyhIHJ4D8r9aRAv6sOia3LnVeV8eH1liyT5+2kFBndffRmc0OPSVf5eh2kiiFtYD3VcpkCAKcpSlLfYZ6SwZJb3HQmfU7zvSnAdhDhe+HxNOT1UrrtYVhL4AYXCylrLdLBUxMVzz48vsLD6wp0PK0MQDlG4OL9IGPHnqHQcVBygVrAJ/W3XEaRfVLbmOCWBpJxWqHQCiiUyQVmLPCkKFyq0/LWE5UEpfwryiq672lDi9MJrCBpjSYlWbnz156j47a+f4rT97yC9voJMKT4ojhOlUT5fuBWhaxqUcm3EhVf2xrk+2uZTlj9Iocm8MBCzrWDrm171HBHqBbF+frXn/D/xcpjAv/xbhTdTwffFoRDlB3jrGurVXtlyYLHKeYY0AIJRCRTAm80jgaLAkEmFdBSZnAL5rnBDSSoOYxcQRdJSKGWpsCpG58LfVaVWWCWVDxWHqEqMKki72mbC5tCc0vxAwb+RP5XDUem48Ks7V+sfDjtR/NCvlAYVTzU+X/YrjJ/ufHjOGUEj2RGFAdPF9zPRgjS8nGf9nBIXlANKgbfiVkjt9t1GZYhkuOC9IPQLf5tS2DicK0s5fnCdJbnPK7wovrZiaFXxvXW/y+f8cFx9LymG5kDes2GLcPY4rAp1/hTISN4tzvedgdbnuD7dev+6cKHcV8ljuniN6dC0eRfHQaOppEuV8qlS+KKyUyRPxudL5XFphA+1tizuXCm9otKUykJxmk1xvTdV/Kf9HeaHsl9xkqjufE2Z4sJN6zXnNA4RDScimaw8coXRasLVDVR1cl05DS8uNuRdJ0/a351OD9+7pP5SmhhJedbtdnH88cePEnXBIIQWm0Ikqh4XA55XPkCV2mJgSQqUHeGx2zMjnplUDEJO0VX2r/ttyv4NA5yzCjUNbLVKEoR7bsAPVvVxCrd+kAwHRXudxgoAvl8sBkrfVwZKFQR5IAxPKBQy5UHSD6gKUZ/olI8urXA/j8DfpxUNqqUwVO+n+pwDEO8NEvr7eC6d+oG+VrGCMF5N2mFZGsKA4rQqgyhV49TmZUrHlbKEcWoG76Zrs64e5F74a6CB0y4P6BVhwN87ql7zdNfkj6k+/5o4ANU821LeYVmDshd5UFSuyL9cpkr6VJx3hNdYyZOq8aLnEecZKdQqYRryjvIqpV90HrXpho8oLmP5+uryLJWnpuxUdy1DKqYiBVpZOCgr1nwkiuu9z18V+8s0Mlz5OMoIcaaLN2CaI+bcP71BZ9goN74tX+XZYpTzADcrKBxPdRxgtiuWIMwDTomrywPA/Os+BUEQlgx7dmXAfCjPXvva1+ILX/gC3vWud40SfUGwojDP3IteRtjhdyVVWhMdy1l1YxQhVo5Q2T9Muyl+zftqmGvdu0Y1XqGoK5c4VDIV5QmPA0tPWO4ovTAtVbkmVbIChUo0Hz8oR/FiG+RLYd5hWcLywYdT5ftQuoayQipKs3xP7TlVuldlxUkcvnxdqPxWDf5h+rVhguPKy3KpTE150ABh2C0rAurC1oehMP3goVJDGStfTrJpVMtHcZY1ZYqsRJXylvQkFJah5jpq4sd1zjn1z9vrZvxzdSdqLKconmkRLw7r0woKW1Yah2UJ0/FpR221OS+qCx8eR/ee4vilekBBuWrzLZ0PdU71+de4tdcT3vu4uhSKd4r6gP73p4gb5l2EL+4DH5J1y4aWoiDV6wrL5C6sdP3l6l0pD0rxy2HDe0JRGATlDyNS6bgSp5RXfL7aPirjZmNcm0JYpgHyHjR9Ffxbn290BADIsg5wc00ZlgGLUc4TBEEQBEGYS0ZSnk1NTeEf//Ef8b3vfQ9Pe9rT0GrF60U//OEPz0rhZpvEvgm4vS/8dH1FCF953HT88Fy8d1mx90URno9r90ALNAi1/oBP2+dTycPljap/6ViX8irCxS8a8b4bqKZRilscq8p2NeX9N4o8VBSucd8N1bRPR1Pa1T0/KnkPuY9H055wixF+8W/aQa4mbJ9w1V1H4F+yTUM80/DWbVAoGvrlXZe2ofh8ZS+U2ryq7/8G5ffr0oxFVPc2KcrQ4O/ClxTOzr88MzXM24cpzXT1Myhsodws1Xi/FeXDmZLipuxfnWFbnu3aVBZVUQoZq+yOykr18cOyxud4iVV0HRTOxkVQ3nifGVTCF4orKuVl/D0IZsASKuUppwEAVDObl8vAz7d8zlTiF7NyEeZB8bIcuHQCxVqxNLrIC+W0nR8A1IUvzcaNlJ5UpFU83JIxomFGbTTTlmoUXlQNW54p6uOW8px+qYwqlGY1YfstqZk2bRMf552pZas8W6xyniAIgiAIwlwxkvLslltuwdFHHw0AuO2226Jz47yprLHKEbehqLESuIpe5hXc661b1lRVULFSzb2gOMVWdN6nFhO/NlcJX60b5pE1pFf1UzW+TeH9LK7aMGWVUlnVFs7qKYcMr7hQhMV3q1CshefI7nWlVDCTpSbt+jsZXllQAFTTKcpQnYURP/lKUijP3Ci/H6ImXL26qXrvGsP762i4bq8worI3KjNDUFJCUTk8Y8oe5biVNNHgH5eqaQUXUfVelmfOhO/ndWnFijNV8a8vq4r8ys+E/auzQct7eRR5FQoq1MSt3WsEsEqOuM5Ey5qDspWP68sUK4cqip9h0244X74PxfU2p2ua4tbGi9OIy1yER5BOqDhDKa3avRlLcaM4Ydvz9ahIe7rw4XUpm091+XQ4O7HU94V1jIq0or7Np1eOW1WEUYO/h/i8qvOvdUuKsgY3WvocFLLwL8pUVZJRNf1y2MDf723ql+X3iV86Nt2ySn75sFjlPEEQBEEQhLliJOXZD37wg9kux5xDKDbOdJjgpQPheSe3l5fgoPRCS+UZKNWlk+F5fsErNnWPX/zrZqRUz5dfZMMXc1/G2nIXMx182RrKWp0dU315LmaaoDbNunsbxfezR4CyYqlOEVGndAhf/osTRZkqaYThB8kzenmv5l1WrMUv/tXnWw5TiVfOwxYuCk9xecO068pdTjN6mS6VrXIuWtpXesEu51VOtFz2SqbV8hXhYsVDXdxa/zB+bbrVPKJzTfn1y7OsFAjDNqVfc05NV6bGOHVlistaUXxMk/606dpzKgjSqCwphe93PeVz06bZL92SAqZvuqVyTLdMejbT8oaOoqENnY4q+w2Rvy9DX/9qp9GUbn9/iv37Phcq+Q9ThrpOri7uNGkSkGXLd8+zxSjnCYIgCIIgzCUjKc9mi4svvhhf+9rX8Itf/AKTk5M4/vjj8aEPfQhHHHGEDzM1NYULL7wQX/rSl9DpdHDqqafik5/8JNauXTtUXk5tVJ4hFobQ0eZL7l2xXlGlQPypVCewK6sEUe58NTxQKJHqZrTEx6zAcvHIFoh86vXx6tMOlSDhmfpZNdV3iursq8rXMsO8yoqAunDh7JHIvyE/FLP8Ij+fePWe1N2HIo+qf+1XQZvuBxW/o3wpyCc4G9+vqpKkqjyz+QRhFOK0QXEYpUr5VypHWWFVVcpV5h2G1143M66hHkR51FXS8myZSrym/EozBCv3rDnvWiXDkHn38+urpKq5r/7MNOnWKm1C/2mup1qu/srHqI3VKHNq55zUKW4azpeTnFap1hSx4brjPd+mycvXQYriNpZlkLpT7jAGubYB/IrvAdQ1nJo0hs1jlPSic1TTbop7EfbHxbhWUxeb2lDd46+pS5Xn3ud4urDdqS6uuQrCDJhPOU8QBEEQBGEuGVl5dtNNN+ErX/kKtm7dim63G5372te+NlAaV199NTZv3oxnPOMZyLIM73znO3HKKafg9ttvx1577QUAeMtb3oJvf/vb+OpXv4rVq1fjvPPOw4te9CJce+21Q5c5Ke2KpOO1I34vNEexPDNGIVbvKMT7k8Vps0ajbp8yn4Zyx1Vc2k6xUclLlfc2K8+PK0pR9leqOc/w+tz+b3WUr7soUzm9+pfu5utuCK9cWVRN+DCtauzmMvTxV03KH3e+/pxDW1VP03unO19HGEdhgD3epjlfRmH4Pd6GzSPKK7xZ/V7Ep0tnxDIsKvrcn/m6eu7BgoW2s1Amg0JRNWxpTO30xn4xBsuJaMBwIH9PmtOaTsfUPy8CVdIo9+XxqFVQu/9gSbnctAixzr9uefJ0ZegXpzz7uEgrMEAQSqNPWMY4fvUDOUV6082cdmWtzAwv5bF7Z47LPllTmGXCYpTzBEEQBEEQ5oqRlGdf+tKXcPbZZ+PUU0/Ff/3Xf+GUU07BL3/5SzzwwAN44QtfOHA63/3ud6Pjyy67DAceeCBuvvlmnHDCCXjkkUfwL//yL/jCF76A5z73uQCAz372s3jKU56C66+/Hs961rOGKnehymIMIXqhNwhmCsEqgGqE7rLyDCDowICugGgfNSeSVxRzILgZR+WPFiDyj68iUqZR9VWjn6IqPMFxi3I7dMmner1xXnV+tWVSVUu/Lr8ZuvBWSVa++iLt+ufSj6bz/dQx0ylqwntN3qdAByHr7qCqyWPQcHF4ZfOaXo3h0g+vu9/+ab4MqihD3ayPuLRln8FVPs172DHVffOmz99FGFFnFyQx4HU0ZDRM/jPNayZ58+zK6dVPdXW+b/gBFVV1+VRmwUVlqPMv5dXnmUxbKnu66QMZcVrN8adVvqFmNlSJJgXY9Iq7BoVXn3jNeTXvBVnp+anPfUGwPYG/x039XJBnHyXbdFsilMPGzyYOt2d3VhtvObBY5TxBEARBEIS5YiTl2Qc+8AFceuml2Lx5M/bZZx989KMfxaGHHoq/+Iu/wPr160cuzCOPPAIA2G+//QAAN998M3q9Hk4++WQf5slPfjIe//jH47rrrqsVqjqdDjqdjj/esWOH/+33/vIqj/q3hiZhu064LlNnDQemeSHoY0Hvm1fd+WlfKqrLauqutzrboaaM1ODvwrvrcmulavZIi+I0zA5ofOmi+rQa73XjC1/Dnl2196u5rMPkHefZf/aDOzFsWRrrQU1ZattBbXwO23ydpZkkQK0Cul+daq4f8bnmOlPPUNfdqGgpKTqbnku/+BQfNylb6va6qwmGuuXGRTmq5a1S3jcwzJsC7+r96vuRi9p7y/n1L0+5DNOHdWlS5DddmUr1tE/cpvtft79dowKrnPcgCta+2rBq3rXBGitOfT8zkN90ZWmK2/BcVJCGC9K4X1vpUFXO1Rsjpkuv8Co+4pDvmQLwo2rAZcBilfMEQRAEQRDmipGUZ/fccw+e//znAwDa7TYeffRRKKXwlre8Bc997nPxvve9b+g0jTE4//zz8exnPxtHHXUUAGDbtm1ot9tYs2ZNFHbt2rXYtm1bbToXX3xx3/yp5td0IR39F+Dxy17dMs9QGK9NW1Ve/Rpe4EOPutk9dh+2hvLxsqeaF30FFHPZqjQqKVSzgi/88EIYvml2VKhMGURxB0w/Q6F81ijUvOTZ2W016y/j647vvSkFd4dNylP3XHxqpBq21VLxS6EPP829rit7HwXXzBV2sZ8qlb1f2eDCDqigakqrVvHUqLxQlXPNyjMbvsa/dq9C+2D9rDwK0yknUlbgoVHxEMfvc7+aFBdRGtXrr0vD93ANSsVqPBerzt/n3JhGv1lWleuqUTbXx2s6UV9f6lEDhGm49zNRNtXF7xd2kHzrntFAZRz0XLMicJi8KldZiTdNPgPkN4gCLaSHDm4dIMulyGKW8wRBEARBEOaCkZRn++67L3bu3AkAeOxjH4vbbrsNGzduxPbt27F79+6RCrJ582bcdtttuOaaa0aK73jHO96BCy64wB/v2LEDGzZs8McKgG5Uu9gwqk6SL5ZaNsZD8/JGh27Ykbp5YUm/vOPX0tr9x8LQDXucuVi6TxnIlkHX+MdlrF9u6ctYnSYQpTAb97hcnlHTCj/2UNGthcqBhvs6Spnisg2eZnMqBdPVj5mUoVwvHE2KqGHSHja8U1KWl+MOevUu7jAMuhNbv5o/XI6jpNFfSTfKdQ9fphkqggAoVUqzYUbSzFBzcj8GIlI8lvYKbAg3tixYGQd4bgOUbeeuHE+feWEWJYtZzhMEQRAEQZgLRlKenXDCCbjyyiuxceNGvPjFL8ab3/xmfP/738eVV16Jk046aej0zjvvPFxxxRX44Q9/iMc97nHef926deh2u9i+fXtklXzggQewbt262rQmJiYwMTExdBkiVL1CYpAX+MHOV9OeVvHVOAfInq/smVbNt37RZlFmPZ3iquG+hBTXMYqS0C4sbfzk3XDKs+nu6aDKJAVUy6RK5wcu02CEisp+KYfPtXnuIKeiS5MBpysxK5oHCzesimHQ+xDnMaxCq3ZCYUzNxVWUMwPlNZqapVBczVxNE3/MYfjUZkNZVL6dTR/EGFzdXGXUj1YMAy2k8ixi+g+FNLFwurXByjsn5ZvlRFutfHYTXEQseTlPEARBEARhSEZSnn384x/H1NQUAOCv/uqv0Gq18KMf/QhnnHEG/vqv/3rgdIgIb3zjG/H1r38dV111FQ499NDo/DHHHINWq4UtW7bgjDPOAADceeed2Lp1KzZt2jRK0TlflJdOVff86Sf+Ny1jHCTPQRZYFeGL0NMpZ/qrvVwaMU1LDKfLZxAM6pfYNCkzmvYvaypBcb395sqVfGiwezrIvawv1bSBBk7bpTZMWZr2dCunWRRnsDrTb4llOXS/5aJ1Zem3HDWk/7OOww3aNpuuv/Y+DXD9xTMbvJ0Uz2yAa5vhdc00zenDzG56QHOdHvUZ+bB9AwwztxUDKWzm7Jn0W1I6J3li2ut19bn/PR6mDnB6g5RtoOsd4Pm6+9F7tAvgtkFSXXIsdjlPEARBEARhthlJeeY2egUArTX+8i//cqTMN2/ejC984Qu4/PLLsc8++/j9LVavXo3JyUmsXr0ar3nNa3DBBRdgv/32w6pVq/DGN74RmzZtmtUvMNW9vPbdwoWa99tqzmM6gb2Uf58vhDUxbJlYeTb4XKT6Dw7UhevzMtaQhRlFqTTwS24QZ5BwQ6YLNH8IoZr2cLNuBlbEjFonp31pZpXvoGkPq5Ad+AV/iOurXlc/JengSpxB8c9sIKXK3CimBlEkzYVypUlpPnreatqvSPo0Z0u5SKrxK5MzynMIBfT06U2fd10/Vj8LcPbu3TBK8YHr6BBK0YHCTaOMC+9H1lvomYcLx1KT8wRBEARBEGbKSMozgDd+vfvuu/Hggw/CmPhV44QTThgojU996lMAgBNPPDHy/+xnP4tXvvKVAIBLL70UWmucccYZ6HQ6OPXUU/HJT35y1GLXkoCAgV+X0Liss28UNdi8KkexnGtwVYub2Tb8vKnB8qp7Eeu/Z1hTPjXLIPtFCOMG935Y5eKg93LY65xpvOkYTq0YxxvmedalOEz5q0poGiCNpplFoy/sqy4drPPlXAZltOcYz3JsUlyMnurMmE21gFPDz+aquUGWLVfoE9j1wdPO9Bsiw9lXrczeQtFqfzTztKe7NU0fhKlNa8TKMtNlu4MqbQFgz64cV88ot8XNUpLzBEEQBEEQZooiGl6Evf766/Gyl70Mv/71r1GOrpRCno/PPiE7duzA6tWr8fGbn4nJvet1hcO+qLv9xYaJowdfVBLnhQZFU5/wehhFYBS3ZmP8acIPuwl9lNeI96PpowtzmefI8abZi25O8x7xen0aIyiJy/T7CEXfvEcue3VBn57BdTiF9Kj13NGswBsOrv8urZkpEWaioCxT95xncrV8z2eXYQ0Yg6AxN2nOPm4Pt7mF6+bc5aL839zkEdZZBeDRnYQnHXkfHnnkEaxatWpO8hxXlpqcJwiCIAiCELJnV4bzjrlxKDlvJCnj3HPPxbHHHotvf/vbWL9+ffPXwBYJ1T3QgOlnJgyn0GrKa6C4A+k3g+UtIz4OjlbNq2l5nwKNlhf52EPTVMY4+cH2SRqGURUBTXkOt/fZcDXGLZmaqeLLvQcPu9S0VJpa/0GXfw1D7VLJGT708FMMM2P2+8jZUH2FT2GYJXLVdGJmpratT3PYZclN6Q2/LLt/esDsPAtOB8AIe1H2T3O6/TaHT6uOodKf4TMolu/P3r0vLxFWAKZ29QDcN0vpLy6WmpwnCIIgCIIwU0ZSnt11113493//dzzxiU+c7fKMBYPtBTWaIDnTF8AmCKrYb2g2105hmr2hRszLzOBFfZC0Z1NZ4fbAGW3xZEN6szrvJ2aUD0H0w9et2UxzlhQYIbP+3EfYR64xrYH6lOGYi2cy2pzVmrTG+Hpnu+45hdJsXe/4tjVbP2bhOmdT6TVb/V3dM+xOdWcl7cXIUpfzBEEQBEEQhmUk5dlxxx2Hu+++e8kKVRoEjLg0cDrSWVcZFMzVipliJt5szCdxxCnNpiqpKaVR8xhq3teg9WbE2VyDLK2dqVKyvFR4bmbDBHnNQp3qN1t0JukPO0u03+zN2Wb4WYn9GewrqcPsGzizaX+ztUdcPfWpz/fs1Gr9n32lcrmvGXZ5fpzOzHDXO5vtYdAyTadar0tnd5Lj88MXaUmw1OU8QRAEQRCEYRlYeXbLLbf432984xtx4YUXYtu2bdi4cSNarVYU9mlPe9rslXABmOleUf3Qam7mG81lmX0eavQ9zqZj1L2xhspDzda8mnpmst/cMHmoOapDRR5zX5cceh7ymqs2V2Xu2kcZVgSY0ZQgM2D+rm/+7mWR5+j7KY6e5/y1NYfGnKjoBmIh7nGY97DXvSub2/583FhOcp4gCIIgCMKwDKw8O/roo6GUijaOffWrX+1/u3PjtpHsKEz3qj2T1x0z4jZh0xOXenb3gmHULK6zLJdtPl7mBp+pM3pplsauMDP50uWwMedLsTU/DDsbbKafRZiLe9fcd8zfsyorOuaiP6vLc9i5VTMt02xe0TDd83wpXcnubxnlvUCKO5f7MOyZygDcPzdFGUOWk5wnCIIgCIIwLAMrz+699965LMfY0e+liKBmfV+pucDM4b5aM2Umm5PPNXOxx9dsUezLM37lK/ZyGy/MmN4vwLXR8YL3yBqv+0Wwe4GNWb87Lv1EsS/jeJTHMR9Kz0HhPf2GK0un05uj0owny03OEwRBEARBGIaBlWcHH3yw/33xxRdj7dq1kUUSAD7zmc/gd7/7Hd7+9rfPXgnHEAVCskBLT4ZBL3QB+qJAavb3bZodyrsQAeNTxumVLQtZ3mE3TecZMHNb1tncuH7gPe0GZQ4+nFBm+Pqw0HOD6r+VOy4tMMQphsahdDNRUs1V+Wu/gDubqMHLPkpZ9rSzEQq1eBE5TxAEQRAEoZmRPhjwD//wD/jCF75Q8X/qU5+Kl7zkJUteqHL7DY0zSs3uJvxzxXzseTUs5dLwnlnjVcZ+zMf+cbOFWkT3diH24ZoNNGj2lX5zxGx9QGI+8fvPLXRBRmQu97KcD+ay7I92l+/SxOUu5wmCIAiCIJQZSXm2bds2rF+/vuJ/wAEH4P77l/7+IDP/ktw8LGWhxaE8M2jYBG7Er1HOBYpG/DrdHED+n2YWi6IE4I/azpayZD7qy2JS7Lh+ajH0AwXzU9bZfoqL6x4z5S9wLlbm8kvNu5fxvl7LXc4TBEEQBEEoM5LybMOGDbj22mtx6KGHRv7XXnstDjrooFkp2LgzU3HdQM3qcrJFTc1729jNRxqTwiyW/fYWAoPx3dtsITBjvK/gQkKLUtU1+xAgfUlEdf7gVG95LdsMETlPEARBEAQhZiTl2TnnnIPzzz8fvV4Pz33ucwEAW7Zswdve9jZceOGFs1rApYrC4pohNN/QuGirxo54wbC8+hZMrxBZXnWKoGZcQZZi/ZrLTezHTOXfl3H4aMu4zOgF4Gc7h7ck0ctXeSZyniAIgiAIQsxIyrOLLroIDz30EN7whjeg2+0CAFasWIG3v/3teMc73jGrBVyq6EW8R858sNj34ZlLwskiCm7PH7lX06FFWT0Ui3WPt4VENy1DF2oZ9zq2O12+yzZFzhMEQRAEQYhRRKPbnnft2oU77rgDk5OTOPzwwzExMTGbZZsVduzYgdWrV+PjNz8Tk3uPpCucE+Z74dD4vp7UIzPzBmfQurTc7+Zi3Ix+Jsx0b0Y1founxx6Fhd8fkUf0xaHBG/cPhuzemeM1f/gzPPLII1i1atVCF2dBEDlPEARBEISlyJ5dGc475sah5LwZSRl77703nvGMZ8wkiWXLQrwymEW214/ozgZh8JtkaHE9/7lguVy9W5ooTWj5QdLOZ409+fJdtukQOU8QBEEQBIERE90ygl+n5HV6ubLcZl4tZxQAg+WjLBQKSI2+Z6TUl5hEmekDCYIgCIIgCMsCUZ4tA4o9ZURxspxRWuYiLS/qVSGyHFNoYpz3H1sIUr189zwTBEEQBEEQYpa88sxt6bZn1/IVgmVDeQHgDfNlZokgMxCFesZ78/6FoGPlhhlsDSvMAyLnCYIgCIIwLHtGkPNm9MGAxcBvfvMbbNiwYaGLIQiCIAjCIuS+++7D4x73uIUuhtCAyHmCIAiCIIzKMHLekleeGWNw55134sgjj8R99923bL+YNa7s2LEDGzZskGczhsizGV/k2Yw38nzGl2GeDRFh586dOOigg6C1nqcSCsMict54I/3h+CLPZryR5zO+yLMZX+Zazlvyyza11njsYx8LAFi1apVU8DFFns34Is9mfJFnM97I8xlfBn02q1evnofSCDNB5LzFgTyb8UWezXgjz2d8kWczvsyVnCemVEEQBEEQBEEQBEEQBEFoQJRngiAIgiAIgiAIgiAIgtDAslCeTUxM4D3veQ8mJiYWuihCCXk244s8m/FFns14I89nfJFnszSR5zq+yLMZX+TZjDfyfMYXeTbjy1w/myX/wQBBEARBEARBEARBEARBGJVlMfNMEARBEARBEARBEARBEEZBlGeCIAiCIAiCIAiCIAiC0IAozwRBEARBEARBEARBEAShAVGeCYIgCIIgCIIgCIIgCEIDS1559olPfAKHHHIIVqxYgeOOOw433njjQhdp2fHe974XSqno78lPfrI/PzU1hc2bN+Mxj3kM9t57b5xxxhl44IEHFrDES5sf/vCHeMELXoCDDjoISil84xvfiM4TEd797ndj/fr1mJycxMknn4y77rorCvPwww/jrLPOwqpVq7BmzRq85jWvwa5du+bxKpYm0z2bV77ylZW2dNppp0Vh5NnMDRdffDGe8YxnYJ999sGBBx6IP/3TP8Wdd94ZhRmkL9u6dSue//znY+XKlTjwwANx0UUXIcuy+byUJccgz+bEE0+stJ1zzz03CiPPZnEict7CI3LeeCFy3vgict74InLe+DJOct6SVp59+ctfxgUXXID3vOc9+MlPfoKnP/3pOPXUU/Hggw8udNGWHU996lNx//33+79rrrnGn3vLW96Cb33rW/jqV7+Kq6++Gv/7v/+LF73oRQtY2qXNo48+iqc//en4xCc+UXv+kksuwd///d/j05/+NG644QbstddeOPXUUzE1NeXDnHXWWfj5z3+OK6+8EldccQV++MMf4nWve918XcKSZbpnAwCnnXZa1Ja++MUvRufl2cwNV199NTZv3ozrr78eV155JXq9Hk455RQ8+uijPsx0fVme53j+85+PbreLH/3oR/jc5z6Hyy67DO9+97sX4pKWDIM8GwA455xzorZzySWX+HPybBYnIueNDyLnjQ8i540vIueNLyLnjS9jJefREuaZz3wmbd682R/neU4HHXQQXXzxxQtYquXHe97zHnr6059ee2779u3UarXoq1/9qve74447CABdd91181TC5QsA+vrXv+6PjTG0bt06+ru/+zvvt337dpqYmKAvfvGLRER0++23EwD68Y9/7MP8x3/8Byml6Le//e28lX2pU342RESveMUr6PTTT2+MI89m/njwwQcJAF199dVENFhf9p3vfIe01rRt2zYf5lOf+hStWrWKOp3O/F7AEqb8bIiInvOc59Cb3/zmxjjybBYnIueNByLnjS8i540vIueNNyLnjS8LKect2Zln3W4XN998M04++WTvp7XGySefjOuuu24BS7Y8ueuuu3DQQQfhsMMOw1lnnYWtW7cCAG6++Wb0er3oOT35yU/G4x//eHlOC8C9996Lbdu2Rc9j9erVOO644/zzuO6667BmzRoce+yxPszJJ58MrTVuuOGGeS/zcuOqq67CgQceiCOOOAKvf/3r8dBDD/lz8mzmj0ceeQQAsN9++wEYrC+77rrrsHHjRqxdu9aHOfXUU7Fjxw78/Oc/n8fSL23Kz8bx+c9/Hvvvvz+OOuoovOMd78Du3bv9OXk2iw+R88YLkfMWByLnjT8i540HIueNLwsp56UzLPvY8vvf/x55nkc3CADWrl2LX/ziFwtUquXJcccdh8suuwxHHHEE7r//frzvfe/D//t//w+33XYbtm3bhna7jTVr1kRx1q5di23bti1MgZcx7p7XtRt3btu2bTjwwAOj82maYr/99pNnNsecdtppeNGLXoRDDz0U99xzD975znfiec97Hq677jokSSLPZp4wxuD888/Hs5/9bBx11FEAMFBftm3bttq25c4JM6fu2QDAy172Mhx88ME46KCDcMstt+Dtb3877rzzTnzta18DIM9mMSJy3vggct7iQeS88UbkvPFA5LzxZaHlvCWrPBPGh+c973n+99Oe9jQcd9xxOPjgg/GVr3wFk5OTC1gyQVhcvOQlL/G/N27ciKc97Wl4whOegKuuugonnXTSApZsebF582bcdttt0Z4+wnjQ9GzC/WA2btyI9evX46STTsI999yDJzzhCfNdTEFYUoicJwizg8h544HIeePLQst5S3bZ5v77748kSSpfwHjggQewbt26BSqVAABr1qzBk570JNx9991Yt24dut0utm/fHoWR57QwuHver92sW7eushlzlmV4+OGH5ZnNM4cddhj2339/3H333QDk2cwH5513Hq644gr84Ac/wOMe9zjvP0hftm7dutq25c4JM6Pp2dRx3HHHAUDUduTZLC5EzhtfRM4bX0TOW1yInDf/iJw3voyDnLdklWftdhvHHHMMtmzZ4v2MMdiyZQs2bdq0gCUTdu3ahXvuuQfr16/HMcccg1arFT2nO++8E1u3bpXntAAceuihWLduXfQ8duzYgRtuuME/j02bNmH79u24+eabfZjvf//7MMb4jkqYH37zm9/goYcewvr16wHIs5lLiAjnnXcevv71r+P73/8+Dj300Oj8IH3Zpk2bcOutt0aC75VXXolVq1bhyCOPnJ8LWYJM92zq+NnPfgYAUduRZ7O4EDlvfBE5b3wROW9xIXLe/CFy3vgyVnLe0J83WER86UtfoomJCbrsssvo9ttvp9e97nW0Zs2a6CsLwtxz4YUX0lVXXUX33nsvXXvttXTyySfT/vvvTw8++CAREZ177rn0+Mc/nr7//e/TTTfdRJs2baJNmzYtcKmXLjt37qSf/vSn9NOf/pQA0Ic//GH66U9/Sr/+9a+JiOiDH/wgrVmzhi6//HK65ZZb6PTTT6dDDz2U9uzZ49M47bTT6A/+4A/ohhtuoGuuuYYOP/xweulLX7pQl7Rk6Pdsdu7cSW9961vpuuuuo3vvvZe+973v0R/+4R/S4YcfTlNTUz4NeTZzw+tf/3pavXo1XXXVVXT//ff7v927d/sw0/VlWZbRUUcdRaeccgr97Gc/o+9+97t0wAEH0Dve8Y6FuKQlw3TP5u6776b3v//9dNNNN9G9995Ll19+OR122GF0wgkn+DTk2SxORM4bD0TOGy9EzhtfRM4bX0TOG1/GSc5b0sozIqKPfexj9PjHP57a7TY985nPpOuvv36hi7TsOPPMM2n9+vXUbrfpsY99LJ155pl09913+/N79uyhN7zhDbTvvvvSypUr6YUvfCHdf//9C1jipc0PfvADAlD5e8UrXkFE/Bnzd73rXbR27VqamJigk046ie68884ojYceeohe+tKX0t57702rVq2iV73qVbRz584FuJqlRb9ns3v3bjrllFPogAMOoFarRQcffDCdc845lZdEeTZzQ91zAUCf/exnfZhB+rL/+Z//oec973k0OTlJ+++/P1144YXU6/Xm+WqWFtM9m61bt9IJJ5xA++23H01MTNATn/hEuuiii+iRRx6J0pFnszgROW/hETlvvBA5b3wROW98ETlvfBknOU/ZAgmCIAiCIAiCIAiCIAiCUGLJ7nkmCIIgCIIgCIIgCIIgCDNFlGeCIAiCIAiCIAiCIAiC0IAozwRBEARBEARBEARBEAShAVGeCYIgCIIgCIIgCIIgCEIDojwTBEEQBEEQBEEQBEEQhAZEeSYIgiAIgiAIgiAIgiAIDYjyTBAEQRAEQRAEQRAEQRAaEOWZIAiCIAiCIAiCIAiCIDQgyjNBEJYdV111FZRS2L59e99whxxyCD7ykY/MS5kEQRAEQRCEmSNyniAIc4EiIlroQgiCIMwn3W4XDz/8MNauXQulFC677DKcf/75FSHrd7/7Hfbaay+sXLlyYQoqCIIgCIIgDIXIeYIgzAXpQhdAEARhvmm321i3bt204Q444IB5KI0gCIIgCIIwW4icJwjCXCDLNgVBGEtOPPFEnHfeeTjvvPOwevVq7L///njXu94FN1n2//7v/3D22Wdj3333xcqVK/G85z0Pd911l4//61//Gi94wQuw7777Yq+99sJTn/pUfOc73wEQT+e/6qqr8KpXvQqPPPIIlFJQSuG9730vgOp0/q1bt+L000/H3nvvjVWrVuHP/uzP8MADD/jz733ve3H00Ufj3/7t33DIIYdg9erVeMlLXoKdO3fO/Q0TBEEQBEFYJIicJwjCYkOUZ4IgjC2f+9znkKYpbrzxRnz0ox/Fhz/8YfzzP/8zAOCVr3wlbrrpJnzzm9/EddddByLCH/3RH6HX6wEANm/ejE6ngx/+8Ie49dZb8aEPfQh77713JY/jjz8eH/nIR7Bq1Srcf//9uP/++/HWt761Es4Yg9NPPx0PP/wwrr76alx55ZX41a9+hTPPPDMKd8899+Ab3/gGrrjiClxxxRW4+uqr8cEPfnAO7o4gCIIgCMLiReQ8QRAWE7JsUxCEsWXDhg249NJLoZTCEUccgVtvvRWXXnopTjzxRHzzm9/Etddei+OPPx4A8PnPfx4bNmzAN77xDbz4xS/G1q1bccYZZ2Djxo0AgMMOO6w2j3a7jdWrV0Mp1XeK/5YtW3Drrbfi3nvvxYYNGwAA//qv/4qnPvWp+PGPf4xnPOMZAFj4uuyyy7DPPvsAAF7+8pdjy5Yt+Nu//dtZuy+CIAiCIAiLHZHzBEFYTMjMM0EQxpZnPetZUEr5402bNuGuu+7C7bffjjRNcdxxx/lzj3nMY3DEEUfgjjvuAAC86U1vwt/8zd/g2c9+Nt7znvfglltumVFZ7rjjDmzYsMELVABw5JFHYs2aNT5PgJcAOIEKANavX48HH3xwRnkLgiAIgiAsNUTOEwRhMSHKM0EQliSvfe1r8atf/Qovf/nLceutt+LYY4/Fxz72sTnPt9VqRcdKKRhj5jxfQRAEQRCE5YLIeYIgzDeiPBMEYWy54YYbouPrr78ehx9+OI488khkWRadf+ihh3DnnXfiyCOP9H4bNmzAueeei6997Wu48MIL8U//9E+1+bTbbeR53rcsT3nKU3Dffffhvvvu83633347tm/fHuUpCIIgCIIgTI/IeYIgLCZEeSYIwtiydetWXHDBBbjzzjvxxS9+ER/72Mfw5je/GYcffjhOP/10nHPOObjmmmvw3//93/jzP/9zPPaxj8Xpp58OADj//PPxn//5n7j33nvxk5/8BD/4wQ/wlKc8pTafQw45BLt27cKWLVvw+9//Hrt3766EOfnkk7Fx40acddZZ+MlPfoIbb7wRZ599Np7znOfg2GOPndP7IAiCIAiCsNQQOU8QhMWEKM8EQRhbzj77bOzZswfPfOYzsXnzZrz5zW/G6173OgDAZz/7WRxzzDH44z/+Y2zatAlEhO985zt+On2e59i8eTOe8pSn4LTTTsOTnvQkfPKTn6zN5/jjj8e5556LM888EwcccAAuueSSShilFC6//HLsu+++OOGEE3DyySfjsMMOw5e//OW5uwGCIAiCIAhLFJHzBEFYTCgiooUuhCAIQpkTTzwRRx99ND7ykY8sdFEEQRAEQRCEWUTkPEEQFhsy80wQBEEQBEEQBEEQBEEQGhDlmSAIgiAIgiAIgiAIgiA0IMs2BUEQBEEQBEEQBEEQBKEBmXkmCIIgCIIgCIIgCIIgCA2I8kwQBEEQBEEQBEEQBEEQGhDlmSAIgiAIgiAIgiAIgiA0IMozQRAEQRAEQRAEQRAEQWhAlGeCIAiCIAiCIAiCIAiC0IAozwRBEARBEARBEARBEAShAVGeCYIgCIIgCIIgCIIgCEIDojwTBEEQBEEQBEEQBEEQhAb+P+qTN5R8BoMZAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "b = 1\n", + "s = 256\n", + "n_heads = 1\n", + "head_dim = 32\n", + "q = torch.ones((b, s, n_heads, head_dim))\n", + "\n", + "p1 = 1\n", + "p2 = 0.5\n", + "\n", + "pe = RotaryPositionalEmbedding(head_dim, p1)\n", + "q_pe1 = pe(q).squeeze() # [s, head_dim]\n", + "\n", + "pe = RotaryPositionalEmbedding(head_dim, p2)\n", + "q_pe2 = pe(q).squeeze() # [s, head_dim]\n", + "\n", + "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5))\n", + "ax1.imshow(q_pe1.T) \n", + "ax1.set_title(f\"p={p1}\")\n", + "ax1.set_xlabel(\"position\")\n", + "ax1.set_ylabel(\"channel\")\n", + "ax2.imshow(q_pe2.T) \n", + "ax2.set_title(f\"p={p2}\")\n", + "ax2.set_xlabel(\"position\")\n", + "ax2.set_ylabel(\"channel\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "a68f50c0-542a-4659-9258-5c067e0ebc26", + "metadata": {}, + "source": [ + "## 2d p-RoPE" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6a77250e-833b-48f4-b769-5b36f93f4990", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class RotaryPositionalEmbedding2D(nn.Module):\n", + " \n", + " def __init__(self, head_dim: int, p: float = 1.0, max_seq_len: int = 4096, base: float = 10_000) -> None:\n", + " super().__init__()\n", + " self.rope = RotaryPositionalEmbedding(head_dim=head_dim//2, p=p, max_seq_len=max_seq_len, base=base)\n", + "\n", + " def forward(self, x: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " The tensor `pos_idx` specifies the x and y coordinates of sequence elements of x.\n", + " \n", + " Shape:\n", + " x ... [b, seq, n_heads, head_dim]\n", + " pos_idx ... [b, seq, 2] or [seq, 2]\n", + " \"\"\"\n", + "\n", + " xshaped = x.reshape(*x.shape[:-1], -1, 2) #split head_dim [b, seq, n_h, head_dim/2, 2]\n", + "\n", + " x_out = torch.cat(\n", + " [\n", + " self.rope(xshaped[..., 0], pos_idx=pos_idx[..., 0]), # coord 1\n", + " self.rope(xshaped[..., 1], pos_idx=pos_idx[..., 1]), # coord 2\n", + " ], dim=-1) \n", + " \n", + " return x_out" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bebd3a56-2de5-47c7-8d51-8f034e577833", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABM8AAADZCAYAAAA31xdbAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs/Xe8pNdB2P9/znna9Jk7t9ftTWUla9VlyxKSJWwHgw1fQwg1JOH7RXYMJhAgXwdMiJ2QFyYEF+D3pTmJY1MMrtjCsqxmyfKqbm93by9z7/T61PP7Y+6uvFiSpbXkK2nOWy/9sXfvnT17dspnzjzPeYRSSqFpmqZpmqZpmqZpmqZp2reRmz0ATdM0TdM0TdM0TdM0TXul0otnmqZpmqZpmqZpmqZpmvYc9OKZpmmapmmapmmapmmapj0HvXimaZqmaZqmaZqmaZqmac9BL55pmqZpmqZpmqZpmqZp2nPQi2eapmmapmmapmmapmma9hz04pmmaZqmaZqmaZqmaZqmPQe9eKZpmqZpmqZpmqZpmqZpz0Evnmmapmmapmmapmmapmnac9CLZ5qmaZqmaZqmaZqmaZr2HPTimaZp2vP41Kc+xU/8xE+wa9cuhBDccsstmz0kTdM0TdM07QX67Gc/y1VXXUUsFmNqaorf/M3fJAiC7/hzMzMzCCGe9f9PfvKT34ORa5r2SmJu9gA0TdNeyT72sY/x2GOPcc0111AsFjd7OJqmaZqmadoL9A//8A/80A/9ELfccgt/+Id/yKFDh/id3/kdCoUCH/vYx17Qbfzzf/7Pectb3nLB12644YaXY7iapr2C6cUzTdO05/E//+f/ZHx8HCkll1122WYPR9M0TdM0TXuB/t2/+3fs37+fu+++G9PsvvXNZDJ84AMf4D3veQ979+79jrdx1VVX8RM/8RMv91A1TXuF06dtapr2qvZbv/VbCCE4fvw473znO8lkMvT39/Oe97yHTqfzXd/+5OQkUuqnSk3TNE3TtJfay9lxR48e5ejRo/ybf/Nvzi+cAfzCL/wCSin+5m/+5gXfVrPZxPO872o8mqa9uukjzzRNe0145zvfydatW/ngBz/II488wv/4H/+DcrnMxz/+cQCq1Sq+73/H24nFYqRSqZd7uJqmaZqmadqGl6PjnnjiCQCuvvrqC75nbGyMiYmJ87//nbz//e/nV37lVxBCcODAAf7zf/7P3HHHHS/mr6dp2muAXjzTNO01Ydu2bXzmM58B4K677iKTyfDRj370/OH6P/iDP8h99933HW/np3/6p/mLv/iLl3m0mqZpmqZp2jkvR8ctLy8DMDo6+m3fNzo6ytLS0vPelpSSO+64g7e//e2Mj48zPT3Nhz70Id785jfz2c9+lre+9a0v8m+padqrmV480zTtNeGuu+664Nfvfve7+ehHP8oXv/hF9u/fz+/93u9RLpe/4+2MjY29XEPUNE3TNE3TnsXL0XHtdhsAx3G+7ftisRi1Wu15b2tqaoovf/nLF3ztJ3/yJ7nkkkv45V/+Zb14pmk9Ri+eaZr2mrBr164Lfr1jxw6klMzMzABw4MCBTRiVpmmapmma9p28HB0Xj8cBcF33236v0+mc//0XI5/P87M/+7P8l//yX1hYWGBiYuJF34amaa9OevFM07TXJCHEBb8ulUovaKPXeDxONpt9uYalaZqmaZqmfQcvRcedO11zeXmZycnJC75veXmZa6+99qLGdu62SqWSXjzTtB6iLyGnadprwqlTpy749enTp4miiK1btwLwjne8g9HR0e/4/3ve855NGL2maZqmaVrvejk67sorrwTg4MGDF9z20tISCwsL53//xZqengZgcHDwon5e07RXJ33kmaZprwkf+chHLrjy0R/+4R8C8OY3vxlA73mmaZqmaZr2CvVydNyll17K3r17+ZM/+RN+/ud/HsMwAPjYxz6GEIIf+ZEfOf+91WqV5eVlRkdHzx+5tra29m0LZIuLi/zZn/0Z+/fvf9YLEWia9tqlF880TXtNOHv2LG9729v4/u//fh5++GH+1//6X/z4j/84V1xxBXDxe57df//93H///UA3oprNJr/zO78DwM0338zNN9/80vwFNE3TNE3TetTL1XH/7b/9N972trdxxx138GM/9mMcPnyYD3/4w/yrf/Wv2Ldv3/nv+7u/+zt+9md/lj//8z/nZ37mZwD41V/9Vc6cOcNtt93G2NgYMzMz/PEf/zHNZpM/+IM/+K7/zpqmvbro0zY1TXtN+NSnPoXjOPzar/0aX/jCF3jXu97Fn/7pn37Xt/vVr36V973vfbzvfe+jUCgwMzNz/tdf/epXX4KRa5qmaZqm9baXq+P+2T/7Z3z605+mVCrx7ne/m09/+tP8xm/8Bh/5yEe+48/ecccdCCH4yEc+wi/8wi/wJ3/yJ9x88808/PDD3HLLLd/12DRNe3URSim12YPQNE27WL/1W7/F+9//ftbW1hgYGNjs4WiapmmapmkvkO44TdNeLfSRZ5qmaZqmaZqmaZqmaZr2HPTimaZpmqZpmqZpmqZpmqY9B714pmmapmmapmmapmmapmnPQe95pmmapmmapmmapmmapmnP4VVx5NlHPvIRtm7dSiwW47rrruPRRx/d7CFpmqZpmqZpLwHdeZqmaZqmvdK94hfPPvWpT/He976X3/zN3+Txxx/niiuu4M4776RQKGz20DRN0zRN07Tvgu48TdM0TdNeDV7xp21ed911XHPNNXz4wx8GIIoiJicnefe7382v/dqvfcefj6KIpaUl0uk0QoiXe7iapmmapr0GKKWo1+uMjY0h5Sv+s8ZXLd15mqZpmqZ9r11M55kv85i+K57n8dhjj/Hrv/7r578mpeT222/n4YcfftafcV0X13XP/3pxcZFLLrnkZR+rpmmapmmvPfPz80xMTGz2MF6TdOdpmqZpmraZXkznvaIXz9bX1wnDkOHh4Qu+Pjw8zPHjx5/1Zz74wQ/y/ve//9u+/t/uO0A8Zbws43w1sBRs9UIS0WaPZHN5oeR/n9nJ4Up+s4eyqUQE5uMJjLP2Zg9l04W1EmGtvNnD2HSpLXGyl6Wgx4/ccCbaZK4vIoxX9EHZL7sh6XJzrEhMhJs9lE3TbITccesTpNPpzR7Ka5buvJeO7rwu3XlduvOeoTuvS3del+68Lt15F9d5r+jFs4vx67/+67z3ve89/+tarcbk5CTxlEE89Zr7675gjlBMmG36evgBAtCOTFKBjZFLbfZQNlckkNM2htW7jwkAFJhNA7nc4+82AD+eZLE0Bb3dVCTyDUJLIK3evk8MO1VeN3SWjPQ3eyibplbv3gf0qYCvLLrznp3uvC7deRt053XpzjtPd16X7rwu3XkX13mv6GfUgYEBDMNgdXX1gq+vrq4yMjLyrD/jOA6O43wvhveqUvMc/vuJy6g0ezsmVAD1xxM4K9ZmD2VzKZBHioj5+maPZNNJy0YOD232MDZdZyRDc0yiejyq7H5F0nKRZm9HlWN4BCrEJ9jsoWwan96+D3wv6M576ejO69Kdt0F33nm687p053XpzuvSnXdxnfeKXjyzbZsDBw5wzz338EM/9ENAd2PYe+65h3e9612bO7hXmUBJZho5lqq5zR7KppIB9K8HxAu9/YSJUvh1l7DT2uyRbC4hEI4Nifhmj2TTqZhJaIQ9fzi/UhGyozB6/HB+FUHTM0H27jw03N4+gud7QXfeS0d3XpfuvA2687p0552nO69Ld16X7ryL67xX9OIZwHvf+15++qd/mquvvpprr72W//7f/zvNZpOf/dmf3eyhvapkLJf37DuIE/X2FcOiCOa2m9RavT0PSsFDpyc5vrxjs4ey6dJLJqmlV/xT4cvOWm+S//zqd/7G1zg7LSjen0T09lMElVyGmcsnkXbvRrbX9oA/3uxhvObpzntp6M7r0p3XpTvvGbrzunTndenO69Kdd3Gd94p/JvnRH/1R1tbW+I//8T+ysrLClVdeyZe+9KVv21xWe36WDNmfLzBg9O6hmQCBEsRyDivBK/6u/7IKlcByJmnls5s9lM2lwIkUUbHHP6EGZCkgdqYK9O4nUAAiFqO1mgPZuzEB4A04HEsOEDq9uwF76HY2ewg9QXfeS0N3XpfuvC7deRt0552nO69Ld16X7ryL6zyhlHpNP4JqtRrZbJYPP3ZtT28k64cm8+UxWn5vH7asQqjPxXBrvftEAYCCtZMJakt63xhroYG50NzsYWw6GURIT5+mFmUTBJMDPR9Vbt6geFWMqIc/kYw6HaZ/+/+lWq2SyWQ2ezjac9Cd16U7r0t33gbdeefpzuvSndelO69Ld97FdV7vVkaPcSODr1dGWWrlNnsomyuE2LSDud7jd32lSJ+sk13SR1ZExQZhSV/CXDgxRCKh98JIOIS5OMro7XkIM4IwDqH9mv587XlFonf/7tqrj+68DbrzunTnnac7r0t3XpfuvC7deRfXeT3+ytI7UqbPT44fRoT6kzjVJ6DzPE+Yis27jPP36M9WCmavSrFWjb38f9gr3NxKgpXVLZs9jE1n103iJavXj+ZHBgp7udrz82AuhXC2herhjWTDwGNuswehaS+Q7rwNuvO6f4zuvPN053XpzuvSndelO+/iOk8vnvWImAx4ff8MA9Lb7KFsKgV4IyHhRVya9rUkQnB/c4ojbv9mD2XTLS2mqSzqS5gnFgTypKTXD7ax1lvEloqIqLefI0zXwyyVu7tv96hA+Zs9BE17wXTndenO69Kd9wzdeV2687p053Xpzru4ztOLZz2iHZl8rrSTKOztf3KloFl28Du9/cmsUlBYy1Cp9/beKACdpRhDyz1+KXfAXPOwFzrdO0cPM9o+UbXV83GJAJlJb/YoNpVUHlQ2exSa9sLozuvSndelO+8ZuvO6dOd16c7boDvvojqvt19he0g7MvlKeQsFt7c3PVaRoDabxa32+AaqCtJzith6r79yQGylRf+K3khW1RqoYmmzh7HplFKoKOr1o/kRjoMc6EcYvXstdxm5evFMe9XQndelO2+D7rzzdOd16c7r0p3XpTvv4jpPL571iJgMuDV3lqDn98IQRFYS5dqbPZLNpSAcD4ia+qo75bJDudLj9wegVDVZXtend9ARUDFA9fZGskZkEPNjiB6ehyA00Zueaa8WuvM26M7r0p13nu68Lt15G3TnAbrz4OI6Ty+e9Yi49HlD30lyhrvZQ9lUAkiPOtiit+/6CsV66FJTwWYPZdMd6oxxuDO62cPYdG5liOraKKqHX0QBqJiImThEvT0PViNCng2RQe9+Nhv4vf06ob266M7r0p3XpTvvGbrzunTnbdCdB+jOg4vrvN5+ZekhnjJ4vDWK0buPjw0Cr5MiCvQnUK1mhOv27iaR56zWsxQavX2aC0CjmiRVgJ6//FDNRyy59Phe0xjtCGPFRfRwVMmwtxchtFcX3Xnn6M47R3del+68Lt15G3TnAbrz4OI6Ty+e9Yh2ZPFgdQvlILHZQ9lUUSSZW8tTbfX4BqoKrIKJUevx0zuA2LrC0VtAYFU8+tb0YoFouYhitec31FVBQFRv9PY86Kttaq8iuvO6dOdt0J13nu68Lt15XbrzunTncVGdpxfPeoQlQnbFC3ihtdlD2VRKCXZGNbx0j28kCxgJA9nq3U0izxsOUfUe//gJ8BuCTlXSPemld7XbUC6Lnj+tIfBMGpUcqocfGkHgwaObPQpNe2F053XpznuG7rwNuvMA3Xnn6M7r0p13cZ2nF896REz63J49SVr09icOQkB+yCHe43thAD1/1PY5ldCnGukjTJb8DGe8gZ6/W6y6GZ6sTRL2eFS5HYf1Qp4g7N03XlG7oxfPtFcN3XlduvO+Ra+/oG/QndelO69Ld16X7ryL6zz9ytIjQiVZ9jMU6fWr7gjmowQy6u1PZgFCXxL18BPmOS0vpOX3+uMCyl6ClXa256Oq0kngV+NEPR5VdEziaxFR2Lv3iNDVzwvaq4fuvHN0552jO69Ld16X7rwu3XkbdOddVOfpxbMe4SqT++s7aYa9fRh7qATz9T5qnt4Lo1WN47Z0XBo1iVnXcWk0waps9ig2n2wr7GLU85/YCy9kpNiBqHcnIghdjm/2IDTtBdKd16U7b4PuvPN053XpzuvSndelO+/iOk8vnvUIiSJntohLb7OHsqmUEhgxaBmtzR7KpvOiGIGlr0ZlmgIj1uOfPgEipZCJHt744JyOgjQ9H1WRp/D7op7eC8P3PTi82aPQtBdGd16X7rxn6M7r0p3XpTtvg+48QHceXFzn6cWzHuHIgBtSZ4iL3o4qAeT6JI5+DcVRNpZ+CkAp1dOfupzTiQIaod4TpBmZFHyHqMc31G1GDjOdAQLVu5/Wd5o+fHazR6FpL4zuvC7dec/QndelO69Ld16X7rwu3XkX13n6GbVHKAVuZPb6BVYQgMDA6vWJQGAoG0Ppp4AoEkQ9/KnLOW4Y0g78Xv8gjnZkUvIcev0u0Q4dmp1ET0eV29JvMrRXD915XbrzztGdd47uvC7deV2687p0511c5+ln1B7hKZPH21u6YdXDFIJCO0Orx/cEUQoq7QQtt7fnASBomwTt3n5cAIiOQDZ7/c0GCE8gGgLR63XpC0RdIKLevU8EXgf4+80ehqa9ILrzunTndenOe4buvC7deV268zbozruoztPPJD1EoJA9/nlDBEgRYfT41aiUAFOEWCLY7KFsOikFhuzdT13OkYbAMPQ8SCPCMOn5qBIoDEshVO9OhK/czR6Cpr0ouvN0552jO+8ZuvO6dOd16c7r0p13cZ2nF896hCUCLosvYPf4i6gAkqkIu8fjEsDGxFDGZg9j05lKzwOAUIKeP4YdCKMIPwxQPf4cESpFM4i6e8X0qGYj5At/vNmj0LQXRndel+68Z+jO69Kd16U7r0t3XpfuvIvrPL141iMEYIoIS/T2J3ECiIsAW+hXjxiq5+8PAIaKkPqpsHvVIdW7h26fE6gQGfX2m0+AQCn8MOrpuPSVfn7UXj1053XpznuG7rwu3XkbdOcBuvPO0Z13cZ2nn0l6hK8MTnZGCHr8kxcF1IM4Xo/vCYKChh/HDazNHsmma/s2HU9fyj30DQK3t58fAAgEdETPX8KcEGjT0/Pgt13gf2z2MDTtBdGd16U7b4PuvPN053XpztugO69Ld95FdV6Pv7L0DoWgoyy8Hr/qTqQElTBOJ+ztmFBA1Y/TCnRMtFyblqvnIfRN/E5vPz8ACF8g20ZPxwSACMFs9fYpHkGns9lD0LQXTHdel+68Lt15z9Cd16U7r0t3XpfuvIvrPP0I6hGmCNlqr2P0/GHsilg8xOj1Z0zAVBKpD99GKhPR45/UA0hlYPT6J/UAET195aHzFBCqno7LZiPgzt/e7FFo2gujO+8c3Xnn6M7r0p3XpTtvg+68Lt15F9V5+hHUIwSKhHT1RrIoctLF6fF5AEgIsPVrB7aQWOioMjCwhH5JAHo6JM5RQNTDm8gC1BJ6ryDt1UOgSBqdnu88CeRkm1iPzwN0O8/p9UsKArYwsIW+yqSJgSV072pdCnp6vzOAWv3Ff9ik3yn1iBCDJb8PRW+vlijAC03CHv8EqjsPFoH+BAovtPD1PBCEJr7eG4UwlPi+vj+oCCJP9vTmwl7LA05t9jA07QVRSGphAtnL5+AACKiGcQy9aIRBdzGx10kh9DwAEoHo8feBmvatmm19wQDtOSglaEYOfs8vGgnKQQJX74VBPYjR7vF5AGj5Ni29aIQbmLT0hroEgYHrmj29aASgIkHUMXp6HqK23vNMe/VQgNvj+50BoLp7YGuapmna82mHL/4IZf0q2yOkiBiyaogePzwTYIu1vtlD2HQCEKr7f68TSkKPLyoDoCToI/C6i0WhngehBCLq7Q11Ww2fn97sQWiapmmapmmvCPodQo+QKNKyjSV6ew8XgSIlXT0PQEIEOD0+DwCOAH28FZhC6j1BAInEQkKPn9ogRHcfvF5Wq+vnR03TNE3TNK1LL571iAhBMUz19FEEXYJIiZ7f+w26p/IqpRdLwsjo+T3wACJlEOojz1CR1HsBAlEkCUOzp18yOk0fmN7sYWiapmmapmmvAPodQo+IEDTCGEGPLxIooB7G8PSbY1qhjaf3R6EVWLRCfeyZF5p6DzzADyVt3+7pRSOAMJK0PYtevuBm2HI3ewiapmmapmnaK8SmvnP+4Ac/yKc//WmOHz9OPB7nxhtv5L/+1//Knj17zn9Pp9Phl3/5l/nkJz+J67rceeedfPSjH2V4eHgTR/7qI1FkjLY+3groNxo9/8YYACV6+o3xOUpJVA9vin5Odx70kYhKSaLI0I+NjXno5dNXOw2ff7/Zg3iV052naZqmadprxaYunt13333cddddXHPNNQRBwG/8xm9wxx13cPToUZLJJAC/9Eu/xBe+8AX++q//mmw2y7ve9S7e8Y538NBDD23m0F91BIq07GD2+B5XAkjoPc8AiIkAGz0Ptoj03m+Aqfd+A7qXtLf0xdwRQmBtXNi+V9XqkV48+y7pztM0TdM07bVCKPXK+Xx9bW2NoaEh7rvvPm6++Waq1SqDg4N84hOf4Ed+5EcAOH78OPv27ePhhx/m+uuv/463WavVyGazfPixa4mnevsUNYMI0euXV1QbV5rUx5718FviCwl91BnQPeIKfeQZCoFSvX2VyS6JUkZPH5XZbgT8PzfeS7VaJZPJbPZwXhN052mapmma9krQbgS868CjL6rzXlGVUa1WAcjn8wA89thj+L7P7bfffv579u7dy9TU1HNGleu6uO4z+5TUarWXedSvDhGCVhQn7OE3QtB9Y9yObPwe3/sNwI1MPQ9058GN9F5fXmToPc+AIDLohK+ol8ZNESpJO+jtPc+Cpgvcu9nDeE3RnadpmqZp2qvVC36H8N73vvcF3+iHPvShFz2QKIr4xV/8RW666SYuu+wyAFZWVrBtm1wud8H3Dg8Ps7Ky8qy388EPfpD3v//9L/rPf60TKGLCQ4neXjwDiAtPX20T9FVHN0RKEOkjroiUIIz0PESIjb2+eptSgqDH56Fj+vTSiYO68zRN0zRN057bC148e+KJJ17Q94mLXJy56667OHz4MA8++OBF/fw5v/7rv35BANZqNSYnJ7+r23wtkCjihock2uyhbCqBIiZ9zB6fB1A4Iuz5PfAALCK9Bx5gigi75x8XIAVY+pxNJGCJXt7xDOqJiA9s9iC+h3TnaZqmaZqmPbcXvHh2770v36kL73rXu/j85z/P/fffz8TExPmvj4yM4HkelUrlgk8lV1dXGRkZedbbchwHx3FetrG+WikEnjL1Xl+AHxpIPQ9IofQ80F1Y7uUFgnMkIHr5HL0NYuM/DWSPH5HZaofAsx/99FqkO0/TNE3TNO25fVcbu5w+fZozZ85w8803E4/HUUq9qE8klVK8+93v5u/+7u/42te+xrZt2y74/QMHDmBZFvfccw8//MM/DMCJEyeYm5vjhhtu+G6G3nMUgkboEPX8m0KBpwzCHn9TCOApU88D4EcGgd77DV9JvEjv9RVszEOvL6lGSuBHRk8vr/tND3hys4exqXTnaZqmaZqmdV3UO6Viscg73/lO7r33XoQQnDp1iu3bt/NzP/dz9PX18Xu/93sv6HbuuusuPvGJT/CZz3yGdDp9fn+LbDZLPB4nm83ycz/3c7z3ve8ln8+TyWR497vfzQ033PCCrsCkfSuFKcKef0MIIImI9N5vOCrQi6lAJPWeZwChEnoxle6ikV5M7e55FqrePjbV9f3NHsKm0Z2naZqmaZp2oYtaPPulX/olLMtibm6Offv2nf/6j/7oj/Le9773BUfVxz72MQBuueWWC77+53/+5/zMz/wMAL//+7+PlJIf/uEfxnVd7rzzTj760Y9ezLB7mgRi0kWKXn4r1GUR6nlAYYsQQ+9xhannAQADhSn0PEgUlp4HBIruEmLvPlc2rIgXVjOvPbrzNE3TNE3TLnRRi2d33303X/7yly/YtwJg165dzM7OvuDbUS9gf51YLMZHPvIRPvKRj7zocWrPUECIfEFz3gv03m8QKYnUiwQYytR7v9FdNOr1C4oACLoLib1ObPzfy5pB715IRHeepmmapmnahS5q8azZbJJIJL7t66VSSW/i+gqlELQju+dP21RAoAwi1dvzABAqSYg+TS9UUp+uCEQIAj0PREriK6OXD7gCzt0fevv0VbfhAyc3exibQneepmmapmnahS5q8ewNb3gDH//4x/lP/+k/Ad3LlkdRxO/+7u9y6623vqQD1F4q55bNevsdYfeokuhFbXj8WqZnQR9xdU6E1PPAt8xDjz9HKCBUYU+/ZEjRu3ue6c7TNE3TNE270EUtnv3u7/4ut912GwcPHsTzPH71V3+VI0eOUCqVeOihh17qMWovAQHEpKdPTwNMESF6fJFA0F00Ej2/91t3MVXveQZSKP38QPeUbj0P3eeHXt8DrynDnt3zTHeepmmapmnahS5q8eyyyy7j5MmTfPjDHyadTtNoNHjHO97BXXfdxejo6Es9Rk17yfX2MSWapn0n+jmiq5fnoZf/7rrzNE3TNE3TLnRRi2fQvcz4f/gP/+GlHIv2MlKAG1mbPYxXhAjR83u/AQRK6r3f6J7QHOn7A0rpeYDuPHT3fuvtuVB0Hxu9fI2ZTtMHpjd7GJtGd56maZqmadozLnrxrFKp8Oijj1IoFIiiC0/t+Kmf+qnvemDaS6/7Hki/IYyQRD38hrBLEClJ2OP3B+huEK8XjboLJaFeTO0+LpQ+cVMhen5x3e/llUN052mapmmapn2ri1o8+9znPse/+Bf/gkajQSaTuWDzdSGEjqpXIAHYIkT0+ltCsbHXV6/PA+jj7zZIoe8PoB8X5+g9z7oEIHt8z7NWFG72EDaN7jxN0zRN07QLXdTi2S//8i/zL//lv+QDH/jAs17KXHtlEnqRAABJhNQb5XevOqrvDxhCX20TuotnRo8vlkB38UxfQAKkoOfnQcjeXTzTnadpmqZpmnahi1o8W1xc5N/+23+rg+pVRAG+MlA9fhqO9gy9dNallD4GT3tGd58vfX84v+fZZg9kE3XcADiz2cPYFLrzNE3TNE3TLnRRi2d33nknBw8eZPv27S/1eLSXUaT38QHY2N+qt98cK/ReX+eEqrv/W687t1jS66KN/QB7/bmyuwdebz8uOqG/2UPYNLrzNE3TNE3TLnRRi2dvfetb+ZVf+RWOHj3K5ZdfjmVdeBXHt73tbS/J4LSXjgAsEei3xoBeIugSEuj5JYJeX0Z9huj544y69P3hGb2+hNjye/e0Td15mqZpmqZpFxJKvfjLSUn53J9GCyEIw1dOcNZqNbLZLB9+7FriqYu+uOhrgin0iXqgFwnOMUTU82+OYWOjfL0HXnfPsx7f4wr03pDnSMAQvX093mY95B1XnKJarZLJZDZ7ON9TuvM0TdM0TXstazcC3nXg0RfVeRdVGf/0kuXaq0PQ46fgfCv91hhQeokA0HcG7QLPLJ318rIR6AcGtP0AOLXZw9gUuvM0TdM0TdMupD+i6xHnN3/WG2Hrfb44t5+RIEIvqHYfF5s9is2nEPqxQfcCEqE+JhMQRD3+etEJenfPM03TNE3TNO1CF714ds8993DPPfdQKBS+7RPKP/uzP/uuB6a9tLpb5CvQp6dhbPYAXiH0qYrP6O0lAu2f0o+Mrl5/XLStYLOHsKl052mapmmapj3johbP3v/+9/Pbv/3bXH311YyOjiLEqz+xFc/+RuG5vv699GLH8FzfLzeOP/vOt/Zsbx1f7M+80Nl8vp97vn+Vixub4NyeRppBhBT61BypLyABbOwF+E8XVNX53/wn3/vs3/ZCf+5ZF6ee4+H+nD9zMWNTz/6NF/6M0sed0Z0TKTZeM55vOp7vwfOsPyee/0Oc5/qt5/pzLmZsSjzHD144tpbxytnX63tNd9731kvVed/Nz+p5+O5v86XyfGN4qcf3vfyzXqyLGdvFjvmVPA/PR89Dl56HrpdjHniJb/Ol9GL/vi/FmC9q8eyP/uiP+Iu/+At+8id/8rv84793Op7FiWPbadcsEqcrGI0AYg7YBrVdDsGA5IbhM2xPr238hODx9SmOlMdJzHsk53yU74Pr4Q3G6WzN0J+vsWvLElnTY9Jss+Yn+Fplik7NwjxmQyNArJZBQDDZj8gYJPfUiWdcrkuu0W90OOJmKfgxCmfzNNaSWAsVzPUmpJOQjNOZEnSm4LL0Ktfn5mkrg2pksFTN8+TCVoxSSPJUHREqhBAEKZP6nhTJtMt1U6fJOG0yMkAgWAqSNAKbE9MT1CpJ4tNVzEoH4ThgmdS323RGTQ4MzHFp3+L5uTtaHuOxtS04qwHpMx74Acp1CXIOrR1ZMtk2e7YtkLFdtphtmqHNPZUt1JsO5lELUVeI1TIqCAgn+lEZm+SuOk6/y4F4kUm7yQk3zUKQoDiXo7KcxlypYy3XIBlHpJO4I4LWDsGOVIk35s/iA5XIZK2R4fH57UQVSJ2oI70IhCCMGTT2pDCzEddOnaY/3iAjA0yhWAmS1EOb07NjrBUzxGfrWGsthG2DbdGatGhO2VySW+LA4Oz5B9nZ+gBfX92BWVSkT3YQXggdlyBh0t6VI5bx2btjnozjssVqEUWSe6tTrLWTmMctREkg1srQdgnH80S5OIltTZyRNpfHyuxyqkz7Kaa9FJXlNMX5HOZ6E2u+gog5kE3j9wuaewSjiRq3D5xBiohyZFLpJDg4u4NOzSJ1oo7RCkAIlCWp705Bn+TAxBlG0hUyMsQWEWthnGpkM7swzFKhH2exgbPYBNtE2DbtEYvGdpttmXVuHD5zfqFtpZ3l/uXdhBVJ9ngH2QlRHZfIlrR25TAzsHf7PNlkiymrjUPEfdVJFtoZjFMWYk0iilVEvUU4nCXsTxOfbBGbbLHbqXJZvMyiH+eEl6G2nmBtuh9Z6WDPlMC0ELk0QU7S3CvIJdu8afA0CcOlHFnUPYeDczuo1eKkTtaxahvzIAXNnUn8AYv9YzNs7VsnKQISIqIYxSiGMZZW+5lZHMYutInN1hCmCY6N129S2+0wkqpy8+hJHKN7NErZTXLv0h7aNZvs8Q5Gs3t/iCS0d2ZROZPd2xboz9SZMDukZcAj9TFOtvowzlrIJQPKdUSlQdSfJhzOYo90SGxrMOU0OJAoUgxsDrtZGtU4K6cHoBrgnC0ilIC+DGHKoHmJIJ72eNPAafJ2i3Jo0QpNHp/fzlo1Q/J0A7vogRAgoLk1gTsaY+/QInsGl4iLkJQMqYQOa2GcQinHmdlRjJJHfLqKEBJiDkHGoLrXIZduc8vocVKWC0AzcPja0h7K9SSZEx2sancelIpob88S9DvsmFxiuL/CqNmh3/B4ojHMU80hjAUDOWMhak0oVolyCcLRPqwBn+SuOsNOi+sSazSVwaFOjnozxsrJAcIa2NNFpB9BXwaVMGnsFchcyG0D00zEqlQik1Zk8vTiFubL/STPNnFWOufnoTMWpzUVZ1t/gctHZ4mLiLQMaEQWK2GSci3FybMTqEpI4nQFEYKIOQRJg9reGLGMzy1jJ8g7DQD8yOT+5d0s17OkT7k46wHK9SAI6GzJ4A3HmRorMDGyzqDhMWK4HGvlebQ+CgUT47SFqLcRaxWipEM4kcfIhST31MnHO1yfLADwVCdHteOwcnIAr2rinC0iWz7k0hC3ae4WhIOKN/TNsju5TjUyaSqDE6vjnC6MEFtsk5hv0V0oAnfQobEjyVi2zFUT08RlQEaGuMpgKUhSayU4fmYSvyZInKogvRARixE5BrW9DqJPcfPIScYSVQAiJXi4sJ0z1SFSZ13iywF4HsrzcceSuBNpRgdLbJtYoc/wGDc7zLoZHqxOEJZMjBMWouEjCiWUbRJM9iMzkNpbI5V0uSFZIC4DDnVylDyH1dMDtMoO9kwZo9pGZNOQcGhtF7ijcG1ukSsyyzQig3pkcrY4xNHlSeyCS3K6eb6k/JxNfXeS/kyDaybPkDQ90ob3MlbJK5vuvFdn5y0GSRq+w4mzE1QrCeLTVayKCxud19jovKv6Z7m0b+n83B2rjHJwbSux1YDURufhugTZjc7LbXSe5bLFatMIbL5anaLejGEctRA1hSiUIQgIJvohY5PYVSeWdzmQWGfCbnLCzbDgxynO56gsZ7C+pfNIJ/FGBK0dsD1Z4o39MwQ803mPzW0nqkL6RB3hRd2LVsQM6ntSWNmIaydP0Z9okpEBBoqVMEktsDk9N8Z6MUtstoZdaINjIyyL5lS39fb1LXP1wCzn3iLO1Af4+upOjGJE+qR7vvPCpElrV454xmfv9nkysQ5bzDaRkny1MsV6O4Fx3EaUQaxVup031keUS5DY3sAZ7nB5fKPzvBRn/SSV5QzFuRzGehN7odL9N8ql8fOC5h4YS9a5beAMUoSUI+ufdF4NoxUihCCyJI3dKVROcGBymtF0hbQMcFCshgkqkc3MwgjLhTzOYhNnsQG2hbCtF9R5meMushNCp7PReX2YGcWe7fPkznee6nZeJ41x8sLOC4ZzRP0pYpMt4pMtdjlVLo+XWfQTnPDS1NaTFM7mkWUXe6bUba++DGFW0tgLfak2bxo4TcLwKEcmdd/h4OzO851n1nzERuc1NjrvirFZtvStkRIhcRlSDOMUwxiLqwPMLg5hFdrEZ2tgmgjHxu03qe9yGE5XeePoSWy50Xlekq9tdF7meAez2e1dJaG1Mwc5g91bF8ln60yY3cfhw7UxTrXzyLMmctFEVOqISp1wo/OckQ7xbQ2mnCYH4usUQ5sjbpZ6Nc7qRufZ5zsvS5SSNPYJ4hmPNw2cod9qUo4smqHFE/PbKFSzG53ndudBQGtrgs5IjL3D3c5LiJDkRucVwsSzdp5wHPxs93U9m2pz69gJkmYHgNZG55UaSbLHu52nXBcVdTsv7HfYPvUtnSd9nmgO8XRzCDlvIGe7nSeKVaJsgmBso/N2bnReco1WZPC0m6PeiLFy6p90Xi6DSpo0Nzrv+/qnmYhvdF5o8vTSFhZK/SRmNjoPgRDQHv+WzhuZIy5D0iKkoSyWgwTlWpoTZyfgXOdFQMwhTBjU9m103ugznedFJg98a+cVv6XzptJ4w4lv77z2RuetmhinTUSj0+28hEMwkcfs63ZeX6zDDS+k8xIbnTegeEN+hl3JIrXIpBl1O+/U2gjxxTbxudb5eegMOTS3f0vnGQEZEdJRBktBimorwfHpSYKqIHG6gnRDiDkox6S6z0HmFG8YOclYogJAhOSR1W/tPB/l+bDReZ2J1EbnrZL/ls57oDpBtNF5NH3Earfzwo3OS+6tkf6Wznv6WzqvXXawZ0oY1Q5sdF57o/OuyS1y5Ubn1SKTmeIQR853XgNU90JHXs6isTtFPtPgmsnTJE0f03RfdGtc1OKZ53nceOONF/Ojm8bzbA4d30ZxIUn/l2axC21ENoNK2Sy+JUNnj8nVmVkm8xUAlIK7q5dx38weBh5uMvRQE9Vqo+oNmnvzlG+ZYPeORXZMLpOWAZfYDY4GDkdrY9QWk9j3JBDLHvLQGZQE98YdiDGbob4l8naNt6dX2O20ONYZYN1Lc/zEFpaPD5B8ZBbnRAExOgjD/VRulJQdwYTRYvdQlVJkQhDjVCPGwyf2Yp3uMPzFZaSvEFLSGXZY/oERBsfrvH78OANGkwmzg0BQCmOUA5Mjp7cyPztA/p454mdriEz3Trhye5rKlTF2OeuMD1bPz92jjR3cN7OXzNMuo1+pQ6uDqtbpbElTfNMU41Mltk2uEHM67LaarCrBqfoIS4UM9teSyMUIeWgaOh3c67cRjacZTC6TTdR4U6LE5XaLGTfHut99szr7+DjxJxeJP7GIGMrD2BD1KwXFpCQ2dJrtQ0/TAVQQZ6mT55GTuwlmYfRzSxjNECEkfs5i6W0jJCZ8rhk+Q1+qyYTp4qCohg6lKM6JmQmOnZqk74FFkkeLiFQKkYyzdmOStRuT9G1rMzZy6PzROCfbI9w/sxfnZMDYF2vIhoeq1PAGYxTfvJW+iRZT42vkYx22m22CyGSmMcSx0iD2gwmMaYk8ehbKNdyrtxJu6adfrJLJlLnGaXC506bgZ1jz08wujHPy61txjhdIPjKDyGZgapTWbslaSnLV4CI/O3wI0/AJ/ThlP8k3z+ykuhBn5HNL2OseQgrChMnyW4dR20wuyS+Qy7UZNVwyMqQVWaxHCU4tjPPY4V1kvrlK5uAKIpFApJKUr4yzIlLcOnGCHxx7Ckt2j8JY8XI8MLsHf9Zg4jNVzIqHqtYIUibrb95KbEIxNFwmk+wwYbikRMQnWgN8ozyO/Y0ExlETcXIeubSOd8UU/q5Rcjeuk+svstPyucRaoREkWfUzLK4McfSRnRjTVVL3n0HGYrB1HHfKZDUh2TZc5MeHjjJgNgmCOI3Q5omz21lYzDPy+WViCy2QEmVJVu8Yor03wUSyyGX9CwwbHnnp43omy8rk7OowDzx+OckjJXIPLCIcB5FJU9/lsCgzXDE6z9vGniZtdGOiEcR5eH4npaUUk39fxS507w+RqSjeuQW11SHb1ySTbjNs+IyZLp9v9/FIeTvW43HMgw7i7BJyZhl/zxje5VOkr6jQP7xKv1xlt7XMKeVQ8LMsr+d5+ht7YK5D+t5TSCVh2wT+iE0hJugba/L2/lNMmU1CFccNJIfnpjgyP8nQl1dInayDlCAF628coPK6LGmjzeuGzzIgfYYMD6UkC0GGheIA9z2xH/t0nfw9c0hpIrIZWhM2CyLLltEibxk5zKDR3HhylTy2tI3p5UEmPlclMde9P0RRQOm2SdxdGay4TzbfpE+22Gl1uN/N8HB5O+ahGNZ9McTiGvLUPMHUAO6B7ST2NBkcWcIURbZnl1kPDO71syxVsjx5cB/egiJ9z0mMVoDYNkE4EKcgBeZWnztyc0yZLaIghh/BiaVxHp7ezcDX1sg+WQYpEUJQuiZH8aZ+vm/XYa4dO02f4TNquKwGSWZ9i+VqH/c/eRlq1qf/y7Pd59hcFm/QYuGHsmTH23zf4AkGEi0AmpHD0yuTPL68hbF/qJE56aJqdVTHpfL6MRr7B3i9OEz/UI2U6bLd6nAoSPL18g7EcQf7ywnESgV5bIZwIE3n+l04Wz2GRhbZISpMZlZRKL4WZJirZ3jqiX005m3SXz2Fud5CbB1H9SdZe6vE2xtxbbLAZLaFCGP4oc10YZh/PHoFfY+WyD+0DkIgpKR2aZrCbYNcteUs142fIiM9JkyXSuhwVuUoNLI8eOhS2guS/i/NYNYCRC5DmLVZ+MEscmvIdX0z5NPdeQgigxNrY3xl/hKGv1on/0Qb1Wiimi1qVw9Tu26E111ymqGxMsNmwDbTY6EV4+HyDvzpGPY/JBCFJvLwNFHCxr1xD+YEDA0uMiZr/GB6nbyheChMM9PKcujQHorTGVL3n8GeryAmR6A/S/F2SR3YaTeY7JtmBZu2EiyU+rn76BWkDtUZume1G1VS0tyaYOUtw+ycWOGasTMkbZ+s0X754+QVSnfeq7fzKqHB0dNbnrfzdjtrTA5Vzs/dwcZ2HpjZ87ydt31ymbgTscdqsKpSnH7Oztt6QefdkSxyhd1izs1RClKcuqDz+LbOiw+dZudwt/NmgjjLnT4ePbXreTvv2uHT5NON851Xj2wqKsbpmfHn7bx+0WJipHK+8850hnlwdvfzdt7W8VUG4i12Wk2CyOQvmwPdznvon3belm7nyW7nXR+rcaXTpOinKPopZhdGOfnwuc6LLuy8dLfzfm74KUzDZ9qPU/PjPH5m+wWdhxRE39J5l/fPk881z3dexzWohA6zi8PP3XkyhTEBPzL++PnOK9bSPDK383k7b3xknYFUk21mi5SI+Ot2jsfL49iP/tPOG3+m8waK7LM7XGG3cMMYDwdJFle/tfPCCzsv2e28nx46xIDV4EwQx+0YHJ6ZYmExz/Dnl4k/S+dtTxXIDzTPd94JJbofkhbyPPLEvmfvPKPbef/X+OOkrW7n+W2Lxxa2Pm/nDearDGRrTJltxkyXu900j1cmsZ741s5T+HtGn+m8kVXGjIjLnRanPIOHmkmWi3mefvRc50UXdl6823k/NnCMnXaVs34cFcY5OT/2vJ03YNW5fuTU+c6bIcNamGC9lOUbT+579s6T3c57x9gTDFs1ANaCNIdXJrqd94Vn77xUstVdPDNcdtsNHi1v4fHKJOaRb+286MLOG10iZRS5xJ5jPXD4ejPBci3Lk4/t3ug8dWHnGd3O+4G+M+y2qswEMYrKZnZ56Hk7z94VcvP4sQs6bzVIUKkn+ebTu+HZOo9u57116BCTVve5suk7nCoMdzvvy8/eeYYRbSyeuVzq1DjdGOaJyiTi5Ld2XnRh540uskNW2J2bR6H4RivGajPD00/tfN7Oe2N6gT1WhYUwxlpos7zex0PH9j1v590yeYS84Z7vvNUwQbMV44nDO5+3827tP3F+HoLIYL7Yz/3ze569864d4XWXnmbbxCo5w+dSu06pk+Xp6gT+TKw7D4Um8nC40Xm7up03vMiYWWNrdpG84fJEy2atk+HIkW3P3XkS9seK7OmvsBLaiNChVEnx0PG9z9t5t0wcZchskrVaL7o1hFIvfqvsf//v/z2pVIr3ve99L/oP/F47dwnzP3j4JtzCBM2yw1PfjFNZMzGWUtC2qA+7+OmA11+xys7JKrHJNtagS7WWp9bIUT5tUDppUlt0KE7HCW0I4oLBsTb7riiRGvAZ2tMmMqEpBEHNoPlUgvq6ydMH03SaJnaUwLQEqcuqxPs9rrqszsCAR5irETke9TN9dApJDj0dZ+asQ7yWxGkkaA91aA93uGx7lRsuKWH0tTHGG7TraSrLw5RXTM48buGWLTpnUwRC0MwHpHIeN127TDbvkt3dwkyGYPmoSFCdGaZVTnD48RiFJRNjJYWo2TQGPdoZn2suWefynWXs0TbOWIdGI0u52k9t3mDtsEmjYLN+Mk4oBX5CkB1wuexAkVTeZ3hfG+Go7jy0Jc2nErTWTQ49lqJWtnCiBIY0SO2rERtyuXxfg/FRlzBbJ0p0aM5maS+lOXk8xvHjMZxGnHgthZv3aI632THe4I1XFLCyLuZkHa8Tp7Q0Sn3d5ORBm07ZpDOdIggkzXyAnQ648ZoVBgbbZHa1sbM+WAEQUZsbol1McexQjIUZC1lIIMsxWn0+zbzPpTvKXHvJGvagS2yqRaudolQZpLFiUHjKorluUjiWIFASPylIZnwuu7pIpt9jeF8bMxnSFJLAFzSfTtBZMznyRIriqo0dxTGwSO6qExtrs3dXi+1TbcJMkyjdor2UojmTZXbG4dBTCcyWQ6Kcxk8HNKZajA61uP1AgXjWxZyqEUQWpYVRmhWHk9+0aBbN7v2hbdDMh4hEyHVXrTI21iS9vU1swAfTRxgh9cUBWoUsZ47bnDnhIEsx5FqCTiagPuCxY7LG669Yxcm7xLe1cIMY66Uh2iWT5cds2iWD1aNJfK87D1Yi5LKri+QGXYb3tollA1pC4kWC1pEY3rLNicMJFuccbBXDjBziW5vEtzbZvqXNvp0tomSbMNvAXU/QONPH8qLN448lEU2bRClNGI+ob2nRl+9wx9WrZPo6mFM1IkNQXhyjVYtx6qBNrWDQmUkS1CxauZAgGXHV/jW2T9VIbumQGPHADBBmQHM1R2Opn/mzFscPOVB1MFZSeImQ2pDL6HCLWw8skejziO9oEGCxXhymXbNZecyiVTQoHE3SaRr4CYGMKy55XZGBkTaDuzukBvzuPChon4rhztpMn4xz9lQcSzlYUYzYWJvErgbjox32X9KAhEuYq+PXbOon+1kvWBx8NIVft0iWMmAI6lubxHIud1y9ymB/dx6IhZSXRmnVkkw/aVFcNHDnkvhFh3Y2xE2GXLa3xCW7y8THXFKTLpghwvRpl9LU5gZZXbQ4/ESMqGpjLKXwbUVtxCWX73D7NYtk+lziOxooW3bnoeGw8oRNqyApHE/QLlv4CYFyYPflZUYnm/Rv75Ad8+gg6AhBZ8ahc9phYSbGySMJjMjGjmLYgx6pvTUGhzxed1kdM+ER5mv4HYP68X6qRZtvfiNFs9KdB6kk9S0tZNbj+64qMDXcwpioIzMelZUhWpUss0dMVqZNvOU43kocNxXSzoTs3FbjdZetERvySW9rI8wQYfm4tQTVmWFKBYunvhnDr1oYi2lCIaiOdIhlPW67ZonBgTbx7U1EMqJYGqLdSrD6lEVjyWD9dJzGqk0QF4QObNtTZcvOOrkJl/xWFxdBWwi8JYvWsTiFJZujTyYh6D5XWtmA1KVVcv0+B/bXiKV8wv4qYQi14/00Sw4HH01SWrNJltOYnkVjqkWQ83jD/nV2T9YxR5vIfJv6+gD1Yp6lUwYLxy28goO7kMBLKNq5kPHRBtdeVSDe75HZ2UZa3Xnw2zaV6RFqJZunHo3TLJsYi2lUIKmNuIi0z60Hlhkf6T6GzZxPqTJIs5lm/ahJdcagNBOjOh8jiEEYE4xva7DzkgqZEZ+BHR0C2X3t9NdNWofjlAs2hx5LEbgmTpjATESkL6uR6vc4sL9OOtu9P0QyoH6yn3YxxuMHk6ws2cQrKex2jOZYG3fA5dq9Ja7YUcEYamMMNWmWc1QLg6zNmcw8beGVbNzZJL6laOZDBgba3HD1Cqm8hzVW4xdvve9FXcL8tUJ3nu483Xm683Tn6c7Tnac7T3fehS5q8ew973kPH//4x9m/fz/79+/HsqwLfv9DH/rQi73Jl825qPqzxw/w/YN1VGTwH2dv5EhxAPtzSeS0RB6bQRSreFdtI9w6QP7WAukrK/xobpq3Zue5rznIPc1hZp8a4/iD23BOrpN8eAaRSSGmRmntlBR+wOCKwWX+8567cUyPGT/GXDXPxx68k9JCgtHPL3ePBhKCMCZZfcsw4XaLn7vuHq6aOMuo4ZEi5EPTb+Dvly8lf19I3zci1FoJVVinc8kwrWsnmbx0hcveeJrL4lV+KLXI4XaePyxcQmUuTenzo7DUxn7kFMKQRPt3oEYtwrfXiY10j5KYiFV5faxCRoR8cP46HiyNY305gXHEQp6cRyyt4V8+SbB7lOz1JbI3rPP9mQV+PH+Gx9t9fK4+xuKpQQ5/ZTfGTK17NJDjILaO4U6YrL5dMjVc5r/s/TJDTp0ZP8ZqO8UfPXgHs4sDjHxxhfh8u3tosSkovGmQ1r4k//x1D/LGnUcYkj790ufP56/m/5u9huzBiP57QyhVUUsFvG19NN6wjaEdZa5803F2Jmu8M73Akp/k91cvY3U1Temzo4SLEc4jpxCuT7R/O2o4Qfi2BubWDjcPnGJrvMj1sSqTpssfLL6OzxW3Y36t+ymRPLuMmFkm2DWCf/kUqf1V8rcVuCG9yr/uP8G0l+RvapMsz/fx5Jf2ouZdUveeRkYCuW0cf9hm9e2S7HiLD+65mz3pArN+jHUvxp89fBuH5qYY+soqqRONc5uwUXxDP5Wrsrzt0oP8wKUHyRsBw9LjMyuX8Htn3kD8KAz9QwjlJiws4w+nqN+6g+xUi6vefJSpTJ0fy8zjRQYfWr2cmVKW4udG8WcNnEdPI8sNosu2oUayRG9uwD6X6/PT7EmtcpVTY4/V5C9XL+UvVy/BPBjDvC+GXFxHnJwnnMjjHdhOfHeLgTcvc2mmxLsHj1IJTf5PdYqltRxPfGEf7gKkv3oKo+kjt453PyX6QYnc5vP+3fdwQ98c84FDMbD5Pwdfz4Nn9tH/wDq5JyrdM8iEoHIgR/H1eW7ecZR/ftWD9Bk+Y4bLg6WtvP/E7TBtMPzZEFnqwNwSQdqmfttOYhMRV7/lCGMDVX40s0BaBPz3wmUcreYpfXGEzuk49mPTGEsl1N4p1Hg/4a0toqs7vC47z5XZefbZTa6063yuuJ0/WHod4oiD9eUEYqWKPHaWqD+Fd91O7C0BAz+wxLa+Kr84dBgpIj5R3cJ8Jc0TX9xHfS5G+munMdeaiC1jkE+y9haDzj7Fr2x/gB8YPs5SYLMeWnz20DV88ehV5A6W6X+oeH4e6ntTFG4f5MqpWX7u+nvIWy7jhsvR+jD/4dgd1BcTjPxdiFlwYW6Z0FQ0vm8nctLiqu8/xuh4mR9OL7DFavFHa/t4pDZM+StDtA6nsQ7NYZ5ZQW0fQ20bIbyuQ3Rzi73pFa7Ln2WH2eLaWJWv18b5wNw1eGfj2J9LIFZbyMPTqLiFe8NuzAnBwNuWGBmq8Z6hIwyZbT5VneR0I8NTd+9l/UyW1IPT2LNlxMQwoj9L8TZJ7YDk30w9ys9MPEEhtFgObO47fSmffPwmUkcbDN5T6J6eJAWtqTirbxlm23iB//vGuxmKN5g0XZbbWX7t2PezuJJj5O9DnIUANb9E5Hs0btlBtDXF/ttOMLFzjX+WWuaKWIWPF3fx5eoEtYf6qX8jj3lyCevoPGpiELVnkugyj/DOBltSJW4eOMWE2eL1sQon2nl+c/ZGKosJ7L9PIVc2jjRREd4NuxHjDgM/sETfZJ13DRxjb6zC39YmONzOcvTenSweHSLx6ByxY6uI0UHEUJ7KDZLyGyQ/PHqYX9z2darKYMF3eGxhO3/xyK2Y0x7D/7CCdCOEFLiDDis/MEz/eIO7bvoSk5kyE4ZLK3D4f4/fyaHCCMOfDUmcCVGLq6hGk+ZNW/B29XPJG86w5fIlbksUeH1inc9UtvCp8nYaT+So3juAMbuO/cRZ1GAWdelWoh0h4Q80GM5WuXXgBGNWi5vjZda9JO+bvZGFQhb775PIBYU8PA2tNt51O4kmMvR//wqZXTV+rv8kN6VW+GJ9lG+0+zn58FbOPjZB/Okl4o8vIAb6EOND1C+XrN8puXVomvftuhcPxVwQ40RhlD9+6E1485KRzy9j1gOEFPgZk5V/NkJsMuT/vvFudg8uM2G4GErw2ydu4/7Cdga/HJI+HKGWC6hShfaBCdr7R9l5YI7d181wXbzEnakV7quP8qfre2icTFP+0jBioYbzzdOQindfMyYF7h3rPPyTf9CTi2e683Tn6c7Tnac7T3ee7jzdebrzLnRRp20+/fTTXHnllQAcPnz4gt97pW4q2wotvrK0jXY7RvVoGnPNRM7WkSshEolIpQgyNl5GQkxhyxApQxQhfWaDXTFBK5/g+FaX0DfxVgcwQolVa2HOC1IPCToDcF9lD2YqZHVQ0IlMrpo4CxnBaFBD1CPO+FmqIkYxp/BdyczjQxiHFTNuHcf3KLXjpD2FwqC6y2Bod8QwLVKTq+R2NnFGaiStGjXX4NO1nSxX0tRPZ/EXDYyZEpR9pGWjYhZu1oSMxLEjHCNECoUXGTywOkXQcVg9nsMsmBjTTeRygAwVIp0iTDt0MpJMTGEb5+YhImO02BUrEOVMjmztEAmBv9CP9MFqtJHLguTDkqg/5OHKTlIZj9VBaEuDfaOL7EqtMuLXcEoB036GchSnPBShPMHC4X6ePr2VlNck6bVZcpOkOwrRkdR2SvJCMUaL5FBI3z4XZ7BN0qniB/DZle2U63HKp3J4BQs5XUUUQ6Q0IW7iZSyijIHjRN2/j1BESvLo+jjf7NjMnh7AXLIwznSQSw2EFyBTKaJ0DDcjScTBkiGG6N4fEkaHHc4aMqM4NNXBM8Gf7cNoKay2iyx4JL8pMKcDDpa3MN83RGEQWrZkanCN4ViNobBOap/LrJ9mLUzQGAghEKyc7OOphW0k/RZpt8lZL02qDdQlte2SjILJA23ifR79l4XYeY9UvIKhIr5cmKLdciic6sNdjyHP1DFXu4d+y3gcN20RZg1sR2EZAcbGaQpPlYY53HE4MTuMOWthnHSRS01E00WmUgTpOG5GYifAMkLMjXlwZMQ2Zx0rFXB4sk3HtPG354gqAVbgI4oh8ccFYiHiUHGcxmCa9X5BIyHoz9W5fdch+mmS29JmKUiyFCTp9AUQCEqzGQ6tbyURtMm6DU75WeItCJuS+hZJcsJj6ooOiVSLgctPYfdFpFIlLOFz39o4oWexeKqfTjEBp5uYcy2MQCHjcTppiyBrYMUVjhFgyO4eHyer/ZxpjXN0aQDztA1nAsRSEdn0kMkkYTqBmzEQqRDLDLFkAISYImCLXcRMuBwbb1GXBv62DCoRw1IRotwk9rTAWINTq0P845hBqU9QzQhiSZfbdh6iz2rTP9RkLYgz56cJ0zFEJKguJzj0j1tIRW3OdGosBX3YLUmsLWiOmcQGQ7Zc1iHheAxcPo3TL0hl17Bkh0dLwzzmm0xPD9FeS6FOdTBnXYx2gIzH8dI2Xs7EjINjBJgyQgCzzSyLq6OcXc8hjjuYCxFisYSo+8h4nDBt42UMopTCNMON0zxCpPAZt8uohMvp0QnWwzjBYhIhDUwlMKpNnKOCVFMwt72Pu7deSjUjKPUJfEtw247DpJMug6k61cBh2ssgEiZCCFrrMY7eO8kiLc52alSDOKJlkHChPWgSpGHqEo+02SR/6QzxQYvkQAHbbHComufk+hDHZodpr6aJTgSYc2sYDQ8Zj+OnHNysgZEExzj391GsdlJ8dm2IlUqa8GgMc0UgF8qISoB0HCJL4mUMSAsMK8KWId2N0QJGrQquclkYHmaxlSVci+O5g5jKwKw2sU8KUggKE0m+snYpzRSs9wtaxHjj9qPEMwHDZo2WZ3ZfM2yBNMGt2px4YIJ1mWGmU8MNDLyWRdIDN2sS7pFM7PXJiSZ9e+dJjK6RGF4nZtY400izXM5zZKmfznya8JjCnFvHqLSR8ThBMkYnayJTCmfj31UIqHgxvljcSbWRoH0kibkmkXNVxHqIMC1IG/gZiyAtkHaELQPExnPEoFVnVxSyNtDH2a2DhHUbr/LMPFhnBakHBNURi6+W9uEnFYVBQTuwuWHrSaxcxIiqEbQlp70sNTPOWkIRtAxOf2OEtmEz69YRQUij5ZDyFL5jUNtlMLo7oJ8W2R1LpKcqxCfKxK0aK+0Yf1PdzfRaltbZNMFZiTFTRFZdpBMjTMZwMyYiHWFZvXvBAN15L77zxsIaoq447WV6uvNG/fr5zitFsR7uvAapfS5zfppCGO/ZzhugSW5Lh8UgyVKQ6NnOy1tt+odarAUxZnu884ZSDSqBw7SX7unOG7HqtFyT036mhztPMapqBG3BqVdJ513U4tm99957MT+2qapujE9MH2B9LcPQgwHOSgAnlhC1JnJgADGYwR+K0RoWyGRIwvCQIiAgZNQqE5PrVMbjPCimCFMx2sEW7MUa1pNL2GsRA0cVrf4Ef3ng9QQjgs5NLUbzZf7tJfcy4dQYvcXAjWw+Wd3JmVYfS4+FtFYMnvradk4vjhGtFlD1BtFUmv4hKF1uUrzeYvv2kCt21Lg8tsz18SpLoeRYYPD4+hZ+/9T1GAsm/V8XGIUG1tMzCCWQ2QxBzqE9bCOGIBvzSVgeUijagcUnTu9nrjDIwCMBifkQThUQ6xVkPo8YGCAYStAalpCOSBgepggIVEi/WeXaRIlwWPDV/TsI8jHa7Ums1TbWYwtYawH9pxVR2uH/XH0d4bBJ5/UtMgMN3rXja+xOrDHyegNDGfx1bQdHW3kKTwVUFwTHvjHJ0nQ/0VoRVa4QjfUxMKao7DYpXG8zPAH799bY5bR4Y6JCWQkO+QbHq8N87OTV+Ksx+h8QmOsdjKfOIt0AmckQpRw6QzbhiEE6EZCyXEwRESjBZ2b2cnR5gvxjAenTEcwsIRbXkbksYrCfcDBJa1iSzSoSpoctfQJCMrLB1YkK8YEOX7l8F53hJJ3GOMaqj3lwHmPNo29WEcVMPnPgSsLhGJ2b2lijbX5+8gGuSi8wdIMkgeTz9a18szVE5Vie9TOC6adGqRxNE5UrqLUiajBPfgvUpyRr19ikhiWXXl5nW6zJbcnDuErxZGAy08zx4TPXUllL0f+AxC74GE/NYNTbGNkMKpfAHXLwRgwSqZCU5WHJEIXgy4s7eHRuB7nDIbnDISytIc4uI9IpZH+ecOP+EO9TxE0Px/AICYlLj6vidfqo85VLd1EbyeHWR5CrCvOxeYxCm9yiQpmSf3zdPoKxFO61bdQWl58c+wa37DlF/3WSHJKvNse5tzlO+0yG5WOwcHyAu5+8Emp1opUCKpshu13QGhWsXu9gDnXYt7/OZKLBHanjmCLkSd9ksZPgT2dfx8J6nv6HJLGlCOPwLHaxhkynEdkM/mCM1qhJPh2RsrzuiyHwUGGCe09fSupURP7x7ibQ4vQCIhZDDg6gBmO0hw2MvCJm+TiGT0iIJTrsjy0wJBPcv3cHhWELrzmIvyiRTyxirVTIrCiUFDx82VYe2HIl7pUd/N0e7xh+kh/b+Q36rpX0C8k320N8vr6FcCHOwlOwfjLHPd+8AlFrEi2vouIxYjsMxCCs3OiQHozYcWWTLekqb0qdJCsDngoMlj2bj0/fxLG1MfLfEMRnwTi2gL1UQqaSyGwWfyBOc8Qkm1OkbO/8hRCeKg3zxWNX4szBwCMB1nodTiwiDBM5PEjY79AeNrAHAhzHJ2Z4KAIEPvucJUZMi0d2bYWBJF4nTzBgkji0jDFfIlWApFAc2jPK4zsvx9vn4u3v8H39J/hX13yVrIChH5Qcc/P8bW0HsmAx95iiOp/ia49cjlFtEy2toAyJ2GmRyQtWb3QwhiVvuKLNtr4StyWnGbM6HPZNlkKDv5q7lm+sbif3uCB1UmCcWcY+W0Am4shslk5/guaISaoPkrZHzPQBOFPr4+8PXw3LJoMPBtjFFhxbQkQKOTyEysZwh23UIFixgLjhAQGRCtjprDJkwuFtYxzL9uGHacLUALETBczT6yTWIP4NxdmtfXzs0r3423zca9pc3TfLv77qfnJGyMhbJPN+mk9Wd7FUFswdVLRXYzz4mX1YpQ7R0gpEEeEOh748FK6z8a6Eay7rsHe4whsSs+yyW5wIDOZCgy+s7Ocf5y8lfVSQfVog59exTywiYg4ynSbKJ2mNmMT6ffKOR9z0ESiW22n+9si1tAoxhh4MsNdcOLaK6HgYQ4NE6Xj3uWVIYiYCEqaHEN3nyilrjZwRMTOV5/HYCL4VJzTzxM6WMI+sEltTOE8q1kYz/MmVtxBMhLg3tNmVW+Wu/V9j0HIZvV1SDuN8orKb+bpi/mBEqxDj0S/tJlbooFZWUe0O0fYE/f2wdsCidKnBZXt9rpyocU18gStjDaZDgzOB5L7iHv7u7FUkzkj6DgrkcgX7yBzCNJGZDEFfnNaIhTnok7f9zciVVwTdeS+28+qM32LgKptPVHZyppXr2c4bfb2JiclfVbdzpNXXs503fINBEoPP1bfwaGuwZztv8DqDnJDc0xjnq82xnu28/LWSAWnwaGuIz9WnerbzckIw8kMGRzt9/HVte892Xp8RMvpWk3kvzSeqO3q284Ysl7E3mZSDOP+rsvNV0XkXtXj2ahQzA64fXKLllFH7FcG4YjZp0ajkiAcJTGVjrbWJe23CMKC5YnNsZAg1GCdMtwnTbSypuLlvlmjSQrk2tWHBYtwkbEqidQtlWohCB6MTYdltwmzIifIEaymXk8OgbHAMj62JdZITFu1kgoarcMdgfTZGoxjDMR0sL8Re9Eh6IW4VVsoDiHyGxlAez3FpJVukrYDvG5wjEhJ5BTTW4ayTIOqYOO0EyjCwl7qfgio7IOpXcK3CyoYc6F9ll9VEXaqIhmA+IygXcsTCJHZoY1Rc4idcVMejVbE5M9DPPaMGUcIlzDbxsbgpN0cUWUSXO7TGBPOWJGg4hOs2ShiIkot0XayDbcj5nCmN0spmSA6CjAMiZCpRIjlm0XQqNCNFpx9KCza11UFsM47tR9irbZJP1wnXAlYa/UTZDOFojtDxaaYa2AbcPLBAYJrI/dApwbTl0GkmcFpJECZWoYnZBCyfcEChDijkCFzet8aYCFAthcrCUj5ibTiHEyZxQgfZDIifLCLqbVptm9m+Pu6d2ImIuwS5JtUoznXZBXxhoy516IxJ5k2BW3MI1y1UaEAtQAYtrCfaGDMes2tDyLxDYgCsFLSEZDJRIjUs2U2TtoRWUlFdlZQWBrCMBI4fYRU7JA81ECsuBbePMJNCjmURjk8z3QBhcEN+iY7lIC8DvwRnLYNWLYPTTiIjC7PUQXguwnSJFhXqCgVbFXuyJXJjBpGvUA6sjUQs5XPYYZxY4CDdiPipEka5QzuwWc5muW9qO1bcI+hr0FQWr0uv0JFl1CUx/FHJvAGtit29P3gS1QqRiy3Mp9uw5LO0rY+nhnYQz4OThZKQTMZLZAcVW1yPjgMtS1EvKtZnBzBwcEKFWfFIHmlhLLdZ83OEmQT3j2ewYt15cDE4kFtlj1lHXgLRMMzEoFrM4LQTGIGNUfNwpstIOkTrEWqvgr2wNVXjTaMzKBQRUC4o5jNZzMAhFjiISBA7XcYs+rQxWcukeGjLNhIJlyDfoCMl+1LrTBhN1J4Y4YDBggWNNZuwZBG1DJQPYqmFabmIokthMs3T4ztwshDvg6qUjMfL5PIBo9sj3JSgoRStsqKQyyMii5gCo+6TON7GWvYohRlE1uah8TSJhE8z1cQ1Ii7LFBmTPnIP0AfzCSiOp7E7CUzPQbYDnLMlDL9N2IhQ2xTsh7FEg9tHZ4lMQeR1/w1mk2nwLRKBg5IGsbNVzGKIKw3K2QSPbNlCX7JDkG/gWxHb4hVyMkDtcohyJssxqIzbhBWTqG6ilEAsNTGFB02XynCcp7dtJ5aC+AA0hWQkViGT8xjYJvGykrqv6FQVq7kckWcQkxKjFRA/1UauBlTCBIt9gzwyliaX8mmlmriWz85UhbSaRe4CkYDlLKwMpLHcGJYbh0DhnC1jtlwiPyIaBw5Av9Pm1pE5AscgairaZcVMIknQNkgEMTAM7PkalCM8KajlYzw2NcFCup8w1ySI+Qw7TW7um0Ftd1Bxi0Ia1gdtoqpBWDVRloVYbmEGPirs0Oi3OFTaRjwZkRyCjhQMODWSGY/clu5RJnVX4VZg9UwGvy2JmSayExKbrmEWQ+pejKXBAQ6OpJnN+rSTLdxYh4lEk9uHZpEuCAlrg7CYTWN4DnY7AULgzFSw6j4REdGwgmshbbm8YXgeN2YTXaHwyoqZRJxOI0HCjyOFhb3cQDYUgVA0ZmM8PT5GLZ8lyLSIkh2ylsfN+RnUFhuFTSkPKxmLqGEQlkyUYyNX2hhugCXbuDnB0fIW0qmA5BAEpiBjN9me9khPWnQyDo2Owi1BYTpJu5YiZtkYbogz20DVQlotk+XVfp4cSLPe79GJt2kn2gzGOtw+NIsIBcKHyijMJlIIz+q+dtoG9nwVsxag/Ggzs0V7kTa7804NC7DBMdye7rzTgwIjDqrHOy85ILBT0BKipzsvkQcnKyj2eOfFshDPC6pCd15yQNCQoqc7L5GMSA4J3B7vvPlUwIkhQWAKsnYT61XQeRe15xnAwYMH+au/+ivm5ubwvAsv5/7pT3/6Ym7yZfHMXhhX86b+DgkRcrAVY8mN8b+PXseZtSEGvwnJeYU4s4BYq2DmUshUnOZlWVp7UgzsKTKwb53Xp+b5gexJQqVwQ8U3axP82dK1tJcSdL7Rh1lwyTy+guh4KM9FJS3CqyYJhyxqN/okBpv86+0PsCu1xjZpEMPgi7VRTrTTfOPp3cwsDDHwZJv0tEe0tkZUKmMO9WGM9FPfbVO5Ms6OyWWuvuQUe5w6tycKNELFtAvHakP88ak34Bbi5B62MNc87KdnEB0fI+UQH4y48bfnGL20yY1Og34Z8GTbYdGz+JsTB3hiZYr+JyAzDWJmGbG0jpFJYGSStHemaOzPkttaZeTKFa5MrvLOvqMIFdIJFSeag3xs8Qaqayk6D/chCyGZx1aQDRfld1CWJHrdJOFwjNr1PnLU42e2PcSVuQWmpCQrJPfUh3mqnePJY9s4Nj1B7liHvqMuUalMtLaGkc9gTgzSmrIpXRtndKTEjVccY1uszvcnVglVyGlXMNvK8rGTN1Ncz5D7uo21FmA/PYustTGSDlZGcO1vLDB5U41rnSZThsfhjs2sZ/GF6ct5YH4XuaPQdxTE0jpiZhkjGcPoS+NOxqkd6CM13mTs6mV2JYv8ZP8hYsKjEyjm3Qwfmb+J1XKO9tf7oCDIPL6MWe6gvA5KKqIrxolG0tSv8fG3BPzo5Dd5/eApxqVkSEi+3uzn680Bjk+P88Tx7aSnPQaeaKOqdcKVFWQ6gTU1jDtqs35Dgr6hOm943REmknXemljBET6nXcFSJ84fnbyZudIAuUcs7GWwjsxhrNeRCQcjbnDVLy6x/a0lrrRb7DE7nHRtTroWDyzs5gtnLic9Df1PgChUkGcWkI6Fme9+cl+9Lk9sxGP8ukUm0xV+uv9p+ow2nSCi6Mf56OKNTFcGaD/SR7Rskn5yFXu1ifJdlAqJLhlBTfbRuCKgvTvgrWNP85bRQwwJybiUPNXOcU99iOnFIb7x9G5iiyFDj7ah2iRcWkLGbKytI/gDDus3JYgNudx81WHGM1Xeklglb3Q40xEUfJM/PX0Th4vjZL5pEZ+TWMcXMZbKyLiNjJlc+jMFLvmpAnusDldYLeZ8i8Ntm8dXp/ibkwdwFiSDj4IsNpAn5hBSYPZnCfMO1Rv6MYdCJq5fZChX46f6DzFh1eiEEY3A5E+Wrufp6hidb+YI52KkDq/hzNdQgYsKfaKdQ6gdA7T2BjQvD3jD0CneOflNBiRMSoMzbop/qI0yt5bnocf3IVZg+JEWRrVDuLgEAqxto0R9MdZvSiBGQl5/5VEm+kvcmSgwZTaZdgXrgeB/n72Whwo7SD9lkTxlYE6vYp4tIGMWIm6x8wdLvO7dy2yzXa6xmxQCgyfbDsdKw3zi6HVEqxZDD4NZbCOPzyLCCLM/g8p050ENSyauWyQ/WOfH84fZEyviBhGdSPC/Vg7wQHkb3lMZvNNJkidKxE+XUaGH8j3UVJ7okmE6WyPqVwdcmZ/jp7d+nQEzZKs0WPHjfLY6xkIlywOPXYJXsBn+ehOr5HbnIQywtoxAX4L16xP444Lr9x9n60iBWxPr7LNqzHmC1QA+M38F/7B0KcnjJunDJsZ8EevkEsIykQmbiZtrXPvrC0wkXW5wmjRCwWNth7O1Pj5+5AaaawmGvg72ut+dh46Lmc9AyqF2bR5/3GbswDK58So/nDvO1cllvFDhRYq/K1zGF4v78I6ncA+nic9USR5dh8AnCjzUcJpo/xjeGNSu99meL/Cvt9/PsOWyzZA0Qoe/r4wz30jzwGOXUCskGHqkRWzVI1xeRrXaWFPDyHyK0oE4rW0WB/adZteWRW6IlTjglFn2BYs+fHVlN381e4DYjEnmMQtjpYp9dB4hJTLpMLi/xQ2/Ncdov8vrnTqRgoOtGAvtJB8/fAOr6zmGHobYaog4OYeoNjHzaWQyRv2qHJ2tCYYvXyW/o8KbM6e5NT17fh7uKe3kk6tX4s0kcR/PYi82SD9VAN8n8l1UX5zoygmCYUn1Jp+hfJWf33EfE7E626UkUiafrY5ztpXmoSf2srqSY+ibbRILHtHKKlGthjk+iDGQpXp5jNpeh0t3zLJ/91mucKq8IVZkPYBZDw6WpviLMzcgliyyj9iYa02sQ7PISBHFBfes/mVP7nkGuvMupvOSg03+le483Xm683Tn6c7Tnac77zXZeRd15NknP/lJfuqnfoo777yTu+++mzvuuIOTJ0+yurrK29/+9ou5yZedpwyONvPIUHKm5FBsOahFk8R6iLnmQjlAeiECSWTahE6Mtm3TME0yUm5s8BhhygAThWMIBhIuW3MlSm7A3FiKyFC4IzGMholZFKAk0WqHsO1DNiLKRyy3+jEyNrWsxHIEKzJFy7AZGyyTMzoM+h65voBWqYJbqVFKxSilFH5O0vFM6mtxikdzLCA5SotOFLEYwJoXxypJoqYgiEvUgMS8xMIJQ8YyNVJ9Pk4yIFCCk60cdiiZLjust2z8RYfESoi15kHJR3R8JBJlWPhOjM7GPMQNiRICISIMEWDKENsQ5COPLbkSa1HA7GiKyBS4iw5GzcAsCoSCcN0j9BRkQ1gLWW32caYvopKRJOKSRZmkKW0G8nWuCOfox6c/4dOp1GmXytTiJmsZhZ8TdDyLZjlG8VgORxgcp0mkQhYCWPHjyJKB3ZKEjoQ+A3OPheH5jKQbpNI+yZyHAqbbGQqRZKZqU2hadBbjJBZDrIIPZQ/Rcp+ZBzuGazs0TRPTMFAChFAYIsCWAZYtyIuAqWwFUyhmR5N4pom35BBZArMkEH5EVAoIgxakI0QlYr2S5uzQGOW0JJMUzMoETcMmm2txxdQcWStgSHr4jRbNQpmmE7GaHcRPQyc0adUciidymIbBCZrYymMhgGIQg7KB0xaEpsTLgbHTwhgzGUo3ycR90v3dS3HPd9LUVZbFmsVi3aa+mCSxGGKvBoiyi2x2upfMluYz82CZKCNECWBjHizpY9qC0AiYyFTxhWRuJE5TxPFXbBQKsyyRnQBVjQhECxWPEG1FpZjkbHGMYlKynBbMiRgNwyGR9tg/NU8yETESeISNNs1CmbYRZyU3hJ+AjjJRDUXpVBbLlpxSLTLKZDGASmQSlk2ctkSJ7idbcruJHLTIJ9v0xX1yo20AVt0Ej3VSFJoWsxWHymqG+GKEtRIgSi6i3kEqAdIktGN4tk3LNDFNQSQECDYeFz5JKTCMiPF0japwWBh2qIYmftEEP45ZlchGd9zBYgtlRgilqA85nK2OsZ6AQkZSkA51w8FKhFw6uYCTglHPhbpHc7JER5ms5Abx4gpXGqiWQXk6g7OsOK3aNBAsB1CJJG7FItYSiFB09/zZYmJkLDJxj8Fknf7JJqAo+jEecxNU2iYz5RjFUg5nUaHWfGSpg6h5yEiAMAjtGIHj0DItlCkIN+ZBihBT+BiWwFGC0VSDrarI8pBJsRUjqBm4jThG3cCoGURtCBbaRKFC2BGtfouZ5ijFeEQxK6kaFlUZgxjsHV9GpiRjLRez5tOYLOH6Eau5AZpxhWdJ3I5JdTbNejHgrHJRRKwGUI6gVbeJ1QXSlXhpiTVhYMQsso7PcKpB3/Y2Qiqqgc2TXpymazBTirFaS2EtQmw9QBY7iKrf3fAWSWg5RE6MlmXhmQaB/CfzYApiwHCqxdawSGEQVscSBJ7ELcUxmhZGRaI8SbjYJvJAJCPcPsFsa4Rqwqeck7iGQdmIE9iSnaOrRAmT0ZZLrBzQKBTx2h6FbB+NuMKNG3Rci+piivVGHzMqIIHPevcpmHrTIVaTGE2Jn5YoYWAaNknTYzRdIrvDw7AiWqHJ040BfF8yXYqx3ogjFyXxYoBRdKHiI/1o47XTIbRjtK3uY8M3JMDGa4ZPzAQHwVCyzdZckdJAyMJYklAJ3NU4smVhlgQEJuFyh7AjEJkIv08x3x6kk0xTzUmUJVk3EriWydbhdUadOiNtl+RIQGuthNtosZ5OU01k8FLd187aSpJ1P8+8gqO4VCPFSgDlTgy7LFA1iZ/amIfLLCzhMRirwj9uRrFsvl7vPAtwDF5054Wvsc6zNjqvP/LYkivrztOdh2kLIt15uvMu6DypO88UxIGRHu+8+EbvvpY776IWzz7wgQ/w+7//+9x1112k02n+4A/+gG3btvHzP//zjI6OXsxNvuzKvsOnV19HsZbBPBJDFgWxIy3GinXU0hqq3kLG44hkEm84QzCcxR2VuAMKkQ5ImS6G9GmpiIQwyEqbnbEmbx48xMnYMEtGik4xRicxjFWK6DvSxKi5WCeWsYKA+FMWxCzu37kfcg6VSyyCARjctU6mv8Ed249yySXLjL7eog+DRT9g1Q95sG5yX7UfVkyMOageyXJsehenyjUemE+hwohQKcJEksyEwMtCea9EbLWQb8+SSgveOHyMAadByUpRC+N8bP5SFmp5zOMxjIKBc7zF+EoDtVpEletIJ4ZMJvAHM/gTeTpj3XlQmZCk6WJKn7YKSQpBTtpscVzu7D/GTCJHIUpRqySpxgaRJeg70sSqelhnV7E6LrFDNtgGB3fs5rF8guoek86wZGBbiexIjTdMnOKa3bMM3GQyhEnBD1nwA55ohnypMgjrBvIsNKeTnPjSDs5Um3xjNgFBSBQpQschNmlipBXlvYJozES+NYOdNbhh+BRT8RJlK0VTOfzvpT2crIxgnnKQyybO6Q7j8w3UWgW1XkHaNjKRIOhP423pozMq6QxCOhuStDxsw6OjQmygT9pIK+S2/hMsplL8bRCnUMtSi+URRUnuaBOn6GEurGG2WjhHLLAMjm7dwvGhS6lvN2hNGPRNVuibrHL14Cw3b3uAPAYjWJSDkFk/4ER7mM+URlAViXEa3MU4p766jZlahyfPJhCeRxgpItNCTDr0pxWV3YLOVom4PYXMS1439DSXpJapmAmaxPhsYTtPFicxZmyMWQt7zmX8TANVrqNWi0jT6m4g2pfEneqjM2jgDgrsvpC47REzPFxCXKXISxvHFLyx7wy7knH+7nKbxtQA9UQGChbZ4y3iBRezUMQ4s4xzxATHZGZ8iLmxXbQmDerbDTIjdQa2ldiXXeZfTD5MHzD+NptmGDHt+cy6ffxtcZRWXSJPKYKizfRDW1iouhyeSSBbbUKlUELijicYzERUdwoakwbcnIJhuCR/nGuyZ6mbcerEubc0wcOrO5BLFsZpB2vZZ+R4C2pN1NIaQhrIeLx77v9UH17epDMkcPIRMccjbnh4hHSUok9aJA2TG3NzbEmu8vlLJNVxQTOdJNraR+ZMm+SCi1ksY8yu4hw2yDgWhaEMf73lVjojBrXdJomBFkO71tkaX+f/ue4r9ImQyTdb+BGccX1W/AR/XRym2TQRpxRRxWT2mxMUykMcn0ti1BrdeUDgjqYZyilqWwWNCYPEDXHEeB/bc9Pcmj9B03SoygQHK0Pcu7QXtWZhHo9hrgUMHGlCrYNaLCCUQsQTqISDO5Ej6LPpDEtEf4gd80kYLgEBbRWRkza2NLk6s8RgvMg9UUBx0KKVj9EYy5Kac0mfbWNUasgnpnEOGaQfMGnkE3x6+814AyaVfSZO3mV4zxqj8Qo/c9V9DBodJm+3ERFMez5rgc3fFgdptCzE6SSiLFn4x1HK6wOcXEhjlipECiKl8IdyDA0ommNQ32IQG4kjt/Yxll7kLYNH8QyTspXkRKOfP1y4DL9kYx6NYRQjMoebZKsVWCyAHyDjCUglccdzBH0xOiMGfr/CivskDY9IBLRUREbaJIXJlak1EnaNh8UOVjNx3FGHVn6M+IpH9mQLWWtiPzWDLQXJRyzCdIzP77yRoM+kcqmF7PMZ2VugP1XnR/c/zKjZYPJWC0cJznoBxUDw2VKOY6044mwCuSZYeWCQ1kqWU8s5PlfIn5+HIJ9ncFjRHoLqDondH0PuzDGZKvCWoRMIE0pWioVOhv/f/OU0qgnMIw5GCWKHmoxWaqiFAnS6G/KKVBJ3NEswkKQzIukMRJiJgKTpwsY8pIRFWlrsS1QIzUM8bU2wFEviTTq006M46wG5o01Eo419ZB5bRSQesxEJi7t3XUOUs6hcaqHyEcN71shlGrx17xNss4uM32yRVgazvs96oLi7kuCxRgbmHeSyoPhYnqNzSU6tl7h7MYtSqvvamcnQPwadPFR2SYw9NnJ3joFkmVtT0/xljy6e9XrnJYVBn3TYHWv1dOcZQpCXMbY6Pnf2H+3ZznOAvHQwLdXTnecpyBs2cWH0fOflpUPaNLipxzuvT8ZwpMk1meWe7rysdEgJkytT68R7uPPS0iIjbC5NVF+znXdRi2dnzpzhrW99KwC2bdNsNhFC8Eu/9Et83/d9H+9///sv5mZfVoaIGI/VyYQhVr+FMCRBzSdMRVQsg049jkUcAxukRHZ8zPUIR0QI38dtm6xlMpzIj+E4knjSpCkjAiMgaXnsyxbwhUO4JQk5IAjw6yHFmEngGhiejRAmUdNERRJ3NiAoR/ihwstLKkMpVtN5OimLQsygYbZpJl2SeOw01ugELdpuBykjDDekFfcp+wmUB6opkZaD4QqMRoC12oRWiEwFqDSUySASFsGAQlqKUaeBnZBYfRZSSYJGQBgLqcYE7UwcixiGclCWgez4WKUIZy5Ctj28wKSUSnFyYAzHhkTKwpUK1/BxjJDd2XXaRoNoKonKSQgCwlpIMW7gNR0M30Yok6htQVnizoV49QDPi/DKkupAktVcnlbCpJwwaRkujWQbR4bsEOu4tGi2XIQFRiPASwYU3TiR250HIS1MV2DIEKvQImxHyKQP9YiKSpNIQdCvEDEYcloEiQpmn4nhGwTtkFCGNJKKejKOiY2p4ijHRLoBZkURmwsx6h08YVBNJDg1OErCiUimLAJD0TYCDKnYkS4xaHaIJpOojAFBQFSJKCUFndrGPEQGkWdCWeItRLidEK8d4TcEtVyc1YE8jZhBLWnRMTwa8TaGIdim1ukYTZp1HxyBUQ8IUhHFjkPQtrvzgIHpS4xWhLneBldhxH1EO6QeJVnL5An7FKQUeavF9kQZI2ditkzCICQIQ1rZkGosjqEsLBVHxa3uPNQCnPkQs+biG5JGMsaZoWHWHJ9kqvvJVMPwERK2piqkZEg0nkQlTIhCVD6kklE0Sw7St5ChSRRs3B+MCDcK8WohXkfQyMQoDPXRdATNtIVvhNTiTZRlsCUqMWC3GasGRDEDsxYQJUOKroXXMKApIZIYoYnRVpjFDlYIhuMi/IimF2fN6yfMKMhC1nLZlqhgZEzMAZOQiLAT0K4GVKwYIjKxVQLlWAgvwKiHOAshVs0nMAWtlM3ZwSHqsTSJtIVhCipGgBIwkawhhSQaS6BsB2SASkfUViNqBQcjMJGBRaRsgoqBC7iGh1EO8cLubRdG+mjbEZ2MRWREVONNfMdiIqyQcXzGhgJCx8KoB5CIKPkGnUQSWhLlS8zI7s5DycUSLobhIlVIu+2wFuSJ0gr6IGV6bEtUIG1iDlgoIyJoBHj1kJLpQCCwSCBMCxFEyKaPs9SBRkBoQjtjMzc4QJCIkUiaWLZk3QiIRMRwvMEluVXUSJyIGNghyglpFgMqSQcRGhi+TSQcgoqBF4Jne7Ae4EXQTpmsjeYIHA83bSFNKDtNOo5gJKxjxRRjQwG+7WA0Q4QTUQmhbCehLf//7P15zK1ZXt+Lfdb4THt6pzOf6uquHqqhuwEzGLDUMYMuSI6iSMiWjf/CVhRPCMkdg4n9jyVL2FiRkEKwEsv2VXSF0ZUTJXGQLyHYudi4wdDtNg003V3dVdVVdaZ32PMzrDF/PPucqgJ8DXUv9nXXu6Sjc2rXqb1Pfc5vrfXZz1rru8iDQFGg2oxeDZjCodKA1JFhrjnPx1An8gnUKvCeakWcGsyJAQ1+4/HbxFJb4mCw1AipESkhO4992CPbRJaZ4VLzxskxxURQ1RpbKpYyklTkpGz52sVjkitIroJJIstIt/IsKwtBjmOELogbRYiC4WWHugg4BMNUc3FrjqgsbmowRrAyLV0ROU173l+e0+48TlaoPiJlYi0yS9VAL8mdQMlqrIe1wzzyaOeQZSTMFBccYepAOgErI/erDUMYnnEIzwXCIrLUBt8qDDVSGABk5zFPEmVI5BRxW83jxZzfnt+hrDRlpdjIRFSBWTHwNfMnxFSQnq9hliEFhk3gsjQkJ9DegjLErSZGiXslkC8jPsMwV1zdnFI3GTcxlIVkbTo661mknhfsOX3f06c9MiZUjOxMYJlqGEYOUlfoHvTGYx4HVB+QdSROBFdH776jmk/btecpmsawl5Gg/LvW80oraA6e597lnjeZWILK9Mq/qz1vMrFw7XlMphZpBCvp39We1zQGaxUXypNEfNd6Xl0bilJzpQJJhnet51WVpqoMGxm+aj3vHWWe3bt3j3/+z/85H/3oR/nYxz7Gj/7oj/Jn/syf4ZOf/CTf+73fy3q9/gP/Qf6w2tMsjH/w6W/i2xY9ExHwPtFHyX+/vs9r3YxPvvw8r18eMfuSpHwiqV7bYC9aQrsj9nvMvMYuatLtkvjeCcOtzP4DiftHl3zL/S9x1+z45uIJFtBBs/KGX94d88Z+yj9/+UU2m5r6cwV6BdMv7dAbR1hekN2AWVTIyiDeN0eclexeEHS3BS/ce8hzt57w0fIRX1c+pMqGOhU8dJovdIbfXt/knz/4MP6yIH++wSwTiy/2iH1PfPwEckTVGlFrzAdOqG5m/uj3/zo3n1vxUbPjWAS8j/go+DfbO3ypW/BrX7nPS0/OmLyiqN+QFI92lA93xL4ltFv0pKQ4aUinJfEDE/wp7F5MnM43fPt7vsgtu+Nby8fURHTQtFHzye0xD/uG/+7lF3m8nlF93mKuJNMv77BLR1hdkbo9elahaot4boK409A+J9i/R3D/5jnvv/+QDxQXfHP1FaqsmaSSS6/4rdby8v6If/b6R+hWFelzDWoFR1/okbuB+GQ8Wy1rhSwV+v0n2FPNN/2p3+Tuh8/5Gr3nlnJ4n4gx82u7m/xWe8Jn37jDbzy4Tf1AMXlFYi86yq9sSENH2G9QlaE4m8LCEj80xR8Ldl+TmM5a/tjzX+BmueVby8fMhUdHTYiKX9ke8cZQ8/OvfpBXl8eUL1nMuWLyakv5ZCBsVsT9Bj0p0JMCcbtBvGdKf1uwfUFw82TFi+99jefskj/WvEKNYBpLtlHxG/uCB92E/8drH2W1aUifmyBXgsUXeszGE588IQ89stIIKzHvO0aflXzd/+rzPP/Nb/BB3fGc6gkhEULit9pTfm13k5fOz/i1V5+jeCKZvaQwy4HqlTV56Am7NcJKyrMZzAzxxRlxIdl+bcLOHX/s+S9wq9nwR4vHnKkeHRQ5KT61W/DqUPOLr73Ab13epHzFYB8Y6jc66gcdcbchblao2qJnJeK0QrxvznAm2H5IsDja8ZEXXuV2sebjzctMRGQWS4Y0cng0VPw/X/sIj7Zz0ucaxFIzf6mnuArEi3Pyfo+sFMIq9HNz1K0JH/6uL/Pid77Me1TPB3RHjAnvE1/uF3xyc4dXl0d88uX3Ii81888r9NpTf3kF/UDYrRAKirMpYmKJH5qRjhXbDyfEceBbn3uJe7Ml31icc0/vUFEjo+TX2xkvdRP+7aPn+LVH9yneMBSvGsonPZNXW1K7J6yukIXGLCrEokB8YIE/lmw+LKgXPR99/8vcqLb8L5ovcyQHZqkkJc1v7C1PnOVn3/havrw+JX2xhieG2SsD1ZNAurokbTaIQiFLib49Rd2b875vfZ2P/S+/wB098GGzJ8dM8IkHruG/X9/jwXbOv3rpBcLKMP9tjVlH6i8vka3D71aQI8XZFNVY4genpBPL7kOJcBb5xnsv877jcz5qL3m/WaOiQkXF57sJv9nO+OzFLf71G+9DPTFULxnslWf65T257QjLC4SWmKMKObWIDy6IC836awRm4fnI+1/hbLLh482Xuan2TFOJSprPdZZHg+HnH36I31jdJn+5Jj0omL7uaN5wpPWKtFwirESWCnXWoJ8/4s5HzvnGP/mb3Ch7Pqb3yDzWw6Uv+Zfr+zzcT/lXX3yB3bpi8dsKu8pUX16htgNhvyYHhz2doJuC+MKEdLOkfSHR30l89PbrfPjGG3zIrPgae4VKCh0Urw41n97PeWl1yi+8+kHylaH6vMWuAtOXdohuwF9dIETCLGpEY5AfPCItNJuvkeSjyNe+8Co35mu+vX6V58yKSSoosuGLneH1wfCvn7yPX7l4D/n1kvRqRf3IM3t1IG23pMtLhBbISiEXJeaFE05eWPNHv/+znE47vs7sKIg4l9gFw79c3+dBN+VfvfQ+LldT5p9XFFdQvbJGr3pCO46X9rjBTCvie2rSvZrufqJ7PvOB00d8/Z1Xed5s+IbiAp0EKmoeu4J/uzviK9sFP/fKiwyrgupzBWadmL20Q+6Gce6MYayHyiA+sIAjy/ZFiT/NfOj517h9uuSbq9f4oL2gzpY6WV7pDS/3mk9f3edfPv4A6ZElfammvIjMv9zDriWen4/Hk2qNnFh4fsLP/Xf/h3dl5tm152XaDyTuXXvetedde9615/0enrf7cIbjwLc+98VrzztLB8978i73vMxHb7/2rva8/n6i/Sr3vHe08+zjH/84P//zP89HP/pR/uSf/JP80A/9EP/iX/wLfv7nf57v+q7veidv+YfeMoI2G3KW9EHQBUnrCwZvwWm0F8g+IboAnYN2QHUB0QOFwveWfqjYDw2DT+xDYB4tISukyMx1RykzZaEx0XBmDK4SHHU91Jq4tCQrCZeSnBVipxE+QqvITjI8lvhBstWCroe1t6z7hvV0/DEYzVAY1ih2pSJmmMcBYSNl5ylmiSPfw64n2CUxZlrV4G3J2lq8giAkGWiTQQvBEC0uCPa+ZPAF2RmUk8ghIfqIOHCQQ0B3IJTEdwVDX7IbJjg3cjCxx+cxL2Sqe6bSUxaaLmluaEtwieO2Y6gLwsqQjMAvFUSFaBWiN9ArcpC4JxKfJLsM+yiYtYZVrFk3DZv5hEFLfGlZI9mVmiAksxsDdZUp9wEzzxy7DrVz+GJJdIFONXht2RSaTpf4A4cua7YpM0SBC7zJwes3OXQjB9ENyMGj+4wQktBZfFmyHSb4AXbBE2LEZUVGMNU9R3KgsJqI4kwbnI+c9B3rciDuNElowlbieolwCtlqxKDIUeKNxClJ52AjBMVGsxY168qxOZrgNYTSshOSXaEZlGJywyFrRbmLmLngyHXjalmxInY9na7x0rKtFDtdMkgFQJ8V22RwBw7bA4foDNpJpMuI3o/9ousRg0d3GeLIIZiC7dDgnWTnA2UUDFmRsqBRAwvdUhiFQHGmNZ2PnLiWIzsQO0UKgtBJ3E4hgkZsNXhFXksCkqEQDJ1gbQVipVipmqoKrI8bko7E0uKkZGs1nVLUp4GjuqfcJcxMsAg9xSwQ6hVpu6dTFV5adrVkq0t6OQ6DLiu2yeIjDAE2oaT3BcHbsR4ciN4j+rEeRO9GDmrkkFXBdqhxg2HnPSJ4+qyJSCrlWOgOaxRaSG4YxabOnMSWIzGQoiD2dlxxWSpAITYaERV5IwlJMrwh8HvBuhT4uWSlamyTWB83SKuJZUGSkq3RtFpTngQWVU/ZRUylmOeeuvaEyYq4WjOokkGWtI1gpUtaaciAR7JLlpigD7AOFb0v8W7kkJ1A9h7RBUQ7IFqH7hI5Q+wMUT7lULD3gRACbTKELCmlZ65brFFYobiygrNKcCpnLOJAlBC3ligl/kKCOHBIAraS4CXDA4nfCtaVQC8ka11ip5HN0YSqEITSopRiqxT7WmOOI4tioHARYzqmyjE1jrBcEZslThb0qqKf1qxNwVyNK2sxS7bJIFKmD4K1L+hdgXcF0iu0E4g+QB/HeugGVBfJEVJncKpgP9QMQ8XeR4YQ2B84GBlZ6A4jJEWpaIvMjUKy0hUL1+MthFVBNBJ/rhBSIdYaERJ5p8he0j0UhJ1kXQnySrKSJXYRWB81bKpAKAxWazZSsa8U6iix0AM2RywDTeGZiYG02RKqJV5oOlXjGsvaFBTKkoQgIdgnw5AVfRDsgqbzJc4XSK/RTiCHgOjz2zhIB7nTeFPQDjXd0ND5SBcit5IhINEijfUgBIXQpCJywxq6QnPU9+xLQVwWpELgzyXyaT3EDDtFHiT9Q0nYS9alwG1hTUG9q1nPGzZ1h7OG3mo2QrGrFMwzC3q0ChTeU00Ci9STty2+WBKypNMNrlRcWfufQ1f+Z9GuPW/0m9m151173rXnXXvetef9PjxPX3vetee9KzzvHT08+8mf/En6fgyh/Bt/429gjOHf/Jt/w/d93/fxN//m33wnb/mH3lah4O8//Airrmbz+oywNcivKMRaoF913Ftu4fEasWkRLhBDRDYN6uyUcDbB35zS3lDs7wjEaaKaDpSVQ8lIJtHmhMwwEYq5jHxd8YT7ek24L3ncT/nl6n2sdw2XZzV5rZi8OsWuI/LhHrVz6FcvkF8cMJ+BpCGd1Dw4OuHR2W1+4ea3MJxI+lsSsRgQtzpuVhv++M0vcPNWy8e+5pw6CebBEGLkyu1ZR8Onhvs88VN+ZXXKVijiUaJNhv/r5fs572Zs35gwbCzqdY1YSnjNcf9iBxcbxNUO4SPRB2RVjRyOavydOf2Jor0t4ChTHjhomRAkuhSxItMIRSPhI8U597Um3FU8dDP+bflenuxmrG5UpKuG5rUpxVWkeNKiVj3q0RrxyiMWn4GZAbkoeXh6wqOjW/zSnW/ALwTtXQnTgLy756hs+fazlzm92fJ1HzhnkhNzbyBlrtyeXZR8qr/PkzDl365P2ASBv5mIWfJ/u/wgD9ojdg8bumWJfKiQ5woeee4/3MFyhzjfQEhEFxBFgTo9IU5LhntHuIWivSPJ80gxdVTNgFERKRJ9TnQ5UUmJQfCiveKuFqTbkteOF3y6eg+vbY7Z3C4J5zX1gynlecRedBSXPepyj33tCUZBY8FMLQ9vHPN4doNP3/sIYSZp70vyJCLv7ZmUPd9w9DrHZy3f8L5zZtmzCBaV4GrY08bMZ4a7PAgzPr054mIo8PcjGfi55fO8vLtBe16xP6+R5wr5UMFl4O5rO9h0iEerMW9kCAhjkCdHpKZguH+En2naO4o4y9i5ozxwUCIx5LFvlMJQCMn7zYqbaom8Ac/PL/iN5h4v3bvB7n7B6uGC6smU6sENzGpAPWmRux77qQuMgupfg64NT27OuZie8Jv3XyRNJO1zkjRJyHt7inrgY4sHnJzu+br3nHPCwNwbbJKsXMs+en5juM1XwhG/sZ3yqJ0w3IsA/NLmLv9oe5vuqmL3aMxTkK9pWEVuv9rCdkA8XIKPpMGP2RhHc3JVMDx3RJgY2rsGPwez8NjJgNEjB58TbY4YobEonjdbFmpFeRa5OdnwhflNfvPGXdpzy/r+guJqRvPaKXrrUQ93yN5hP/MKRmTKT4IsFZe3piybBS899wJ5KmnvK+I0I+/u0Y3jaxcP+djiDT5y95JbomUWNGVUbHzHLji+4M54yZ/xhX3Jg92C/mYAAf9+f4P/ZnWPflOyeWMCG4V6VcEmcfbyALst4sEVuEDuA0kI5HxGLi3u/hFhZmnvGtyRQB15zNRhTUCLhD+MlVpoDIp7uqWWG46OB6Zlz6vHJ/y7o+cYlpbd7Tl6NWPy6jF651EPdsjBYz/7FXROFL8GwkhWt6dsmymv3n8PTBXtfUWYgbjTomaOF2eP+K9mv8WLd5Y8J7ZMgqaJim0Y2PqOV/wxv+Vu8WqneLA5oT0KZA0v9Qv+6fJ52n3B5rUpeatQr2rYZhZfHljs+pFD52HwpAxiNkWWBe7egjAvae8Z+jOBOA7U0wFrPVokIpE2RxokjVDc1D3fKHbckC3GBB6cLvjV6XsZNgVPbsxQG5i8vEDvA9UbO0TvMZ97A50iZ58GtGB/a8Jrkxv8t3fvIxaK7o7EHwnEzQ5xNPDC5IL/6tbneO/tNR/4xhV1VEyCpouOlet4GKb8++EuD4bMw/XAfuLJNvPATfh/X32EXVexeW1K3GnkKxqxhfrlgXqzgYdLxH5ADIEUE2I6Qc5n+FszwklDe1fT3haIk0g17SmKkUM61INAMRGKE+X5I8Uj7qoN+Tl4fDbjl5v3sd9WnJ9OkFvB5OUZZhtRD/aj0H/pEcp7Tj4DaBhuTHhjesrDO3eRx2PW0nAqEac94qznfr3ku2/9Nvdu7/jwxy6pkmTuDUMKLIc9F7Hm08M9njjPgwcz+H/957KW/7zt2vOuPe/a864979rzrj3v2vOuPe/a897e3tHDs+Pj42e/llLy1//6X38nb/O29nf+zt/hR3/0R/mhH/ohfuInfgKAvu/5xCc+wc/8zM8wDAPf8z3fw0/91E9x8+bNd/QZmUwGJAIloBJjboGwA5SBrvb4lMlOQpSo2qLKgmw0ZBA+IrcRqT26HqDP9BRsi4pHkzmVhr0tSTKxVy1DVsxMRxSC24s1jfUMbSRPNFWKqEUmqwDrSHeZiLsMQSADxJ3CpQKXCrwv6fbQ9hlzDIWPzKYGFUBZEBPICvxEEmVGSIXOkonzOD9ws9jik8KaCGTSgYQQAoWgFAEjE8I6KD195XGTTPYCgkGVFlUXYDWQRw67iFABdeGQXWRQhl1R8ngyZ6cT+6JASNjLlg6YmIETsePWfIO2GdcGUmUpSJhpAuvJdaRfJuI6QwQZBGmvcKLAe4tLFcMGdj6j5ooqBYrGISJIC3ICKAgTCSKDUsgsmHjP4HtuVDtKFyiLQAZyHlmAQCEpRMTKgDAOSoerPf0kkwPgNdIaRF1AYcdbuWJC7geyiOgLhzpw2Bclj6czeu1piwKlJHvZMohEpTwn5Y6bsw1RS/zQEGxBoRK2SlB5chFxm4S7epNDbhXu0hJ7wyAa/Bq2KSMmkSoHqCMEgSrHehBaEGpBkgKkQohE7QLzOHB2uSO1krryh37BmxyExMpIoQJCeygcvnJ0kzRysBqpRw65sCAEpIzcOxAJfTmg+4BTmrYqOJ9MSVbT2gKrFHvV4USgUIGTYs+N6YZWFPhQ4UXA2kRhEkw9WUf8LuHEgYMX5F7iriypNwyywTeCbYZ84DBpMniBqkA2IAyESiCUJB04VD4yDwOnqz377ZpJMzwbH9LhELtEYESiVANCBygGQnB0TSKFDEYjlMa8hYPIGdE6pMwjB+fxUtHtCi4mE7SFnbWUWtPJgUEOaJk4LvbcmGxZxhWBEhcjpsmUMsI6kEUk7BNeZHLIiCBgkPilJXeGXjeEWrLNkJpEmQLlBJKTqEaMHCzEQuL1Uw6SMiRmYeB0s+f2as1s2iPEgcOhLiQSJTKVdEgVoehJIbBvEslkMAohFLq2iKIAKSGPeQhSZ/SVQ0VHRNL1BVf1hIdlYGMMjbEMcqCTPZA5tnvaxnL7aE1QFjdE1ATKHBHbSCYS24gTmewzwgtwAr8yhM7Sq4bQKHYJ/DRThoCdQzhV6CnIGkQJ0QqckSQpEUJRxMzMO473LbeqNUdVixBjIYz9YuQgBNTSIVWCoieHwL6JJJXJViGyQDcFFAVZjRxE75FbUMsBJRw5QecLVmXDg3pBrTUba3HS08qehOCoaPFZc/tozWALXBeRM0GZEnI3ckj7iJeZNIxzhvCCsDbE3jLIirAz7AMM60wxJOwO/LFCLUCWIGrGnSG1JIpxxdNGmHlH33XcKtcsbIuUGdJb5gwEigMHmRG2J5eBro4EMtmOc6duCmQ55ikBCBeQu4BaDSjrwEGXLOuy5mGzoFKSXVEQRGCvOnxWLGxHRHJ7saa1DtdG2EmqmJDbRBaBvIt055nYjf1eBEHYaJIrcKLCt5Z2gG6fKdpE0SeGuUJ5kMVYE1kLfCWJQiKkwiSYOYdzPTfD9h25xldDu/a8a8+79rxrz7v2vGvPu/a8a8+79ry3t3eUeQaQUuKll17iyZMnpJTe9u8+/vGP/4He61d/9Vf5U3/qTzGbzfiO7/iOZ1L1F//iX+Rnf/Zn+a//6/+a+XzOX/krfwUpJb/0S7/0+37vp1kY/+dPfRMvzgdqEqWvEEmy6h2tz3x2e4tH/Yxff3KDN9YzxCMLS0N9CdUK1LpHbTqi6whuD0YgS4GdGJobNWKmEHc1cSro70l045nfXTIrW75u/hUWqueedFQ5Y0MBUXLeefYh8+/Xd3nUz/jNx2c8XE9RjwxqqcfPXmbEtkNuWmLoCa5FWoGsJXZimdysYSbJ9wxxCv09gZk4zm5fMitbPjZ9jbkauCsiEsGXU0mfBc/JPRMRKEOFipr1MND6yOd2N3mtW/DbF6d8eXmEODdwbilX0FyC3A3oZUv0A2HYgQJZCXStmdyskROFuG9IjaC/LxBNYnH3irrq+br5Vzg2LfekoyFifYlMisvOs/OR39re4ivdEV84P+HV5QJ1rlHnhnIF9WVG7gfkakcKDu92CC1QtcTUhsntGjFR5PuaNBF09wWqCZzduWRStXxs+jpHuuWOSJRkXk4lm6y4J/cshKcIJSYZNoNj7wJfak94aX/Ky6sFv31+Rl5qxKOCYgvNE5CtR1/uyd7h+y3IjKwEqlRMbtWoRiPua/JE0t8T5ElmfmdJ1fR8dP46N+yG28IxFxETCnTULAfPxkVe2p3yxf0Zry4XfOH8BLnU6EcGu4XJk4xsHXJ54DDsEApULVGlYnqnQTaKfN+QDxyYJM5uX9I0HV87eZ0zu+W2SEzJfCUVXGTNbdlxKgZstNhYsHeezeB5vZ/zW9ubvLGd8euPb5I2GvFGgdkLJo9AdQF9sSN7T+i3ZCKiligrmdyq0Y1G3DPkqWS4K4hTmN1eU81aXpw85F615IZwnIiAjhYTDBsXWA6Br7QLPre7xYPNlN98dIO80eg3LHYPk0cZ2QXU1ZbsPd5tgYRqFLKQTG436EaT72uYSrq7gjTLnNy6YjJt+VDzkLvlijMiRyLzMBkeZsupHLgtOkwyFKGk84F173jsJnx2c5vH+4ZPP7qN3xnEayWqFUwfgu4S+mILzuP7LTkHRCmRVjC5WWMmBu5oxFwy3Jb4hWB6Y0N9tOeFyRPeW59zKjxnwmOixoSCvY9c9p5Hw5RfX9/hyb7h3z+8Rdxq9OsW3QqmD/MYEnp5+Oxh/DtQtRo/+1YzfvZdTV5IhtsCv4CjsxWzox3vq57wfH3BMYkzkTjPmtdSwUI47skWkzRFKHEhseodV67kM5s7nHcNn3p4m25fIF4rkHvJ9BHoNqMvd4jeEfodKTpEKRBG0NyoKWYWbirEscLdkLhTQXOyY3K65V51xYvThyxE4LZw6KSwvqAPmfPOcekq/t36Hhddw2ce3KLfG8xrBWovmD7K6DYjrzaIweOHHSl5VC2RVlLfqChmBdxS5BOFuyFwpzA73rI4WXO/uuIDzSMWJG6JxCorXkkltQg8J/fYLCl9RYiZZT+w8YbPrO9wMTR86sEd1vsS8XqB2Ckmj8DuQV/tEK0jDDtSGBCFQFhBdVxSHZVwqhA3FP5YMNySVPOO6c01N8s1H5m9zkwG7gqHSRIbCnzInHeetTd8en2fi77mM2/cYrsv0a9b1E4yeQx2l5HLLaIbCK4lhgFZSWQhqU9KyuPxs/NNhT+G4aagme85ubnkRrHhaydvMBWROyLRZsmXU4kRiffIPUWGMlTkKFj2A3sv+MzmLudDw797eJvzXY14o4CNZnIOxQbUqkVtx7krum7kUAjKeUF9WsJCIW5rwlww3JXYycDszpLjcs/HZq8xV457YsBmQeELYhScd46tl/y79T3Ohwm//vAml9sa/cAi1+rw2Rm53iN2PcG3RN8jS4ksJeXCUp9WcKTIdzRhDv0dQTntObt1yXGx56PT15jKwHzn+I5v+I13ZeYZXHvetedde96151173rXnXXvetedde95b2zvaefbLv/zLfP/3fz+vvvoqv/PZmxCCGOPv+712ux1/9s/+Wf7BP/gH/O2//befvb5er/mH//Af8tM//dN853d+JwD/+B//Yz784Q/zy7/8y3zrt37rH+jPLERmqgamMlBpEFkSjEPEjNUB6RIETVIFoh3zMfImkIngA2LXo7oesesRSoExpIlkGxr83rK3Nc4JdjMw2XFrSJyqMXOhkIG7xZqZjDTCIpBMomOTMg8XUwanqOpjzEaSC0NqLEEmfEpoBzJHZB8wW48QEqk0qRZsuhI/Nex9iZsJdjlTTD2DDJzWiiEasg1MywEjM/rpZ2vHkXSUGhSaZAdETBTGo6oMSZFEAc4gWkPuIomADBGxH1B9j9j2CCHAGqgMu1QTJ5ZdOcH3gu0cZA7cGhILvScisSJyy244UY6mtmgUj2aOVUpcdhU7Z2mmc8xSkGtNKgqiTQSfUNEhckK4gFkHBAK51OQStn1JnBh2ocJPBducUdPIID2LRvOBYJkXA5PSMVERKywgqZVnoTpKnTEEsA5CoLAeXSaEVMRYQNaIrSW5RCZAjMh9D4NDbMZjLRgDhWKfa1Jj2ZUNvldsp5Bz4mafmdmWDyWFFokbdsdt1VNJQyE0T6JnmQK73nDZ11wuasxUkC8VSVviVcYPCUXAXCWEH2//IYNcGYRV7IZi/OxQ4aeSXc7kaaIXntnE8LwvOa46qqJjoQOPhAY0lfRjToOOFCSU9cTSU5Y12maEESRfkKRGrC0p5nE1IKVxBa4fOeSUENaAMbSpIjcFO9vge82uAU/mxgJmpeH5fI4SkVPT8h69p5SGUmiuYuA8ecIAj7op222BqQVxpcjCEtcQuoiSCXmVIEb0JkBMyM24EtsOllwXBw6KXcyEeWafI/POcHexJNSKsggcGcdSSABKGcZ8ghwobcbGQCgcpTconRClIA3FeD380iIzZOnJOR3yUhxm3ZNjRBgDWtHFgm5SsdcNzht2FQwqczqVzCeaW3GFFmMuwvNmSyE0tTRsUmQSHcoHXj+e0e8VtgS3HTmkjSB0kbwDc5URMaJ3YbxWewNCKrrB0NUle1fhFpq9g2GbuRESx05xMtsSvcLayJF17BEgxlt35qbHZkVpM0NK+MLRedA6oTrIgyUZi1hayIIsAzmPuTFyP6A3Pdm5ZxyGYBl2DS0VQypoDXQFLCrNiZPMbIsiMVU977FrCqGohaXNiTo6qtDwlaMFoZcUZSbsFBlD2kjCkEBFzCpDiqh9QA0eudEICUNnGCYlbVcwbCxdB12XOekFbYTJpCcGjTaOReEIGITIGBGZ655CCCoDPmW8HQgholVCDpB7QyoKxLpAJEnWfpwzej9y2A7kvkcYDUrjB4PvG/pY0smSHtjXMNV7zlzGGocUmYlyPGeXlELQCIsjUwfHVSx4dbEgOCiKTLeXIAxpowg+IWTCbEGlhGwDsvOIrUZKcHuFW5f0W0vXFvQbaF1mvlf0JFQd8dEgTWZR9CihkQIUiZnuqUSmMpCywNuBHCRGR+SQyYMm2QKxHW/SSstx7hSDR+wH9L5Htj1Ca9CasFdsXDPmpZgaF2E3hUr03HSJpDKCTCU99+yaRiYaUZCAJjrWUfH60ZzgJGWZ0DsJypCmhpAikoRuQec45nLsPWKrkFIRNorNtsKtDPuhxC1gHzP1zDKIQKwFLhmEScyD/wN5xldTu/a8a8+79rxrz7v2vGvPu/a8a8+79ry3t3f08Owv/IW/wDd90zfxsz/7s9y+fXucXN9h+8t/+S/zJ/7En+C7v/u73yZVn/rUp/De893f/d3PXnvxxRd57rnn+OQnP/kflKphGBiG4dk/bzYbAHax4J9evYe9L3myXND3lnwpoZX4c0vcKfwTwdkmwcUSsXGIbY/cOYSPJB8RWqNmU3JVkucNYWoZbtb4qWR/WxKnmXTkUbVjWvRMzIAAIoIuZ0xOVAIMcCIVU5H5xuoh77PnnMotD47nvDK/wePdnPVzNY+vKszFFHMxwa4D9iqg9wG7cuADXCzR54n6y55Kw6QSyEIiji2bquIXz74BVQn+2ZnCzj1f8/HP05zs+WfLF9mFkvPlgn1fjhz2gnBpiRuNuxCcLRMsN4hlj9g75KYfsxBcBKVQkwm5tOTFlNgYhlsNYXLgMDlwaDyTsmdqB6TIJAR9znQpUSrQAhZS0UhBri64Z5bclis+ODvh9aNjXr9zwm5Z8vi8Qa9q7OMKvY2U5x7VRYrlAD7Cco28jNSvepKCuhJIK5HHlrayfPLso+hK8LNnEj3JvPjHvsjRvSW/sHmBtWu43MzY7GtYSfJGEFaGuDT4leDsIsFmh7joEK1HrjsIiTQEkBLR1GAN+WhGqgz97YbYKPZ3FLERpGOPmASaamBme5RMJARDzrQ5YTIUAmZSUQoQxZJTveU9+oLnmisen8555cYZ+03J+XMNcltSPLiJ2ifKJwcOVwcO6x1yGale9xQyU1cCjEAfW1wl+dTph/hsI5EnEjmB93/Ly9x6/2P+ze45Lt2U1XbKcjeBzRhMGTeacGkJG8HJ4wz7DvHkcgxTXXaIEMlDIAuBqCqE0eSjKbk09HcaYqNp72r8RBBOPEwjVT1y0DKRDxz2OaFyohQwkRItNLbYMlVf4IGZc6tcc3k24Ysnt8at8XensLcUb9xAt4ny8RmqSyMHF2HXIdZbqoeeQmSqSpCNoDgyxFrx68cv8IXp+5FHCjETvOdjr/Gej77OZ/a3+LnhRbb7msvNnLwX5EtJ2iv8eUHcw+KhgHZAPFq+jUPqAwIQZYHQmryYkivLcKseMzJuK9xMEE4CLCJlPTAzHVYFUpZ4GDMBRKICKiG4rTSV6LDiJc7thFOz42po+MLiNm1bcnlnQtobyjdOUft84DDWg+gj9ANsW4rHfjyaUB44zDVxovn84j6vLd6DmEnkkeTOBx/zwje9zBf6U/5/yw+w7yvOVwtSK0cOrcQ/KUh7weShoGk94vEaOo9cdggXyEMgpYwoLLIqSfMJVAXDjZowt3Q3FP2xIJxFWIzHcWa2p9SehCAcOICgFlAIwS2lmQiHbL7MVVFypPZcuYYvzG6z7Uqubk8Je03x4AS1y5RPTtH7hF2Ogab0Azy5ongS0ESqQpAKgZ1q4kzzyuwW50d3EJOxb5w+d8WHvv0lXqPik+v3sR9KniyPiL0in0tyJ/FPLLGVFA8FZ/uAeLyB1iNXHWLwMMQxE8Ja5GxKnjXkpsKdlPijkv5U0p8JwlGEo4CZOmZFT6U9GYHP0OWEOHAwCG4qzUxGcvMKq6Jgca8dOUxvs+5qru5MGbaG4uECvZlTXAT0NmLXDrkLMDh4coV5EpC/FakszEuJqRXpyPCwOeH/c3KGrCXyTDK/ueXDH/880RT8N5v30PqCx1dH+EG/hUNBaiXikeBslxBPLmE3jpOi8+ACKSSEMePcOanJ0xo/L3BnFW4haW8J4iyRjwK6cUxtT2McCAgHDionapFRSM6kYibgW6rXWdtHHN3Z88RN+dL0JpftlNXdhuW6wD6Zo5cziquAWY0PJeTGg/NwcYU+jzRfCNQGpqVAVQqOLRfVlF84+yZUJQizAHz+HfvNf8nt2vOuPe/a864979rzrj3v2vOuPe/a897e3tHDsy9+8Yv803/6T3n/+9//Tv7zZ+1nfuZn+PSnP82v/uqv/q5/9+jRI6y1LBaLt71+8+ZNHj169B98zx/7sR/jb/2tv/W7Xo9ZcB5qlq7htXZB11nM2qD2kvIqYbYZezWgtoG0GUi7ntgNJDeQMyAF0khkWUBVQF2RK0WykmQEWURICek90ofxKWjKeKPptWEbLCiJ1hYjFFFEksiUOGbSc6PYIjTscslQSKIUuEJhC4mpFHamKCqB2gi0iuQ2EEMgO49oB2QGvZagNHld4KzhfDUjlZp2bShOHO/9li9TIbiMFRduyhvdnM2+wmw0aqsorxJ2PZ5fL1cjh7zriL0juh4yRCmQWpMrC2UBdUmuFLGQxAOHnDMyBJQ7cMgJrzW9MeyCRSlQ2lJKTRSJJBIWz1R5zoodXimc0Oy0BZPptUE3EmsVZqsoC5C78SaT3KU3Oex7ZAa1Gc80s7aEQnGxmpNLQ7s2iHni7td/haMMy1DyyE952M252jcjh5WiuEoUVxm19kyuPHnvSNueODzlkIlSILRElQUUBuoSSk06cEgikciI4EcOfUCJRFCKIRh2tqDQCaEtUZpnHDSBmew4tXv2YgUKVrJCFpFOGORUYpVC7yWlHevXyEDuMjG+hUNMqO141l9sDMkqlssZqbZ0Z4Y4lZy++IhbwCYWPHBTzvsZT3Yz9Eahlxq7yRSXCb0NTJaO3DrStiO5QHAdOWWiEAilUKUBa8d6KA2pUEQjSeKQshED0kd0H9EqkqRkSJa9sWyMBWXJypJEIoqEzJGpHDi2e+6wQuvIBQ2mCuyzhVZRSIVqJYUxqDZhZEB0mZACuffQ9ogQMTuJQCLWmlQo1lcTVk1Jd6JxM019b8l7gF2yPHRTroaGh/s5cqswK4PeZarLhNpHJsuB3HnS7i0cYhrFUkpU8SYHSkMq9DhGyEwW8cAhoIaI7tLIAcNeWzZ2rAWhC7LIRBERBw5BS243awoTuEgTTD3e6BM7jRUa1QoKo9GtxKiIaCFeRlL2iLZH+HDgIJDrhlxKtrOGzaxmmGv6Y404anlfhi4ZHvopq77mjf0C9gq70qg9I4c2UV8NiD4Qtz158IShI4dIRoASIwdz4FAVpFITrSSqPK7YPeMQ0N343w3C0GrLpigIUqF0QSYTZSTnRCMHks7crteU1nORJqgh0EXL0CUKocd6sBq9zxgTkTuIy0TKHrphvEFsf+CwrMi1ZTep2C4muImiWxreayMfSIJBaB75Ceuh5rX9nNRpzMqgupGDbjPl1YDsDhz6AwcXRg4SlFVQFFCVz/pGLCRJQSJCeguHNiESDMrQacMmFHgpULpACogyEXOiFo6sE7erDaUJXMYJok70ycA0UyiF2kqKUmO2El0kVJFJ60xMh1vEeofei7Ff2IJ8VdDVBfurKaFWtGvDraD5YJR4LXgcmpFDu8B1BrvWyFYeOCTKK4fcH+bObiD2Pck5ch45SCuRxWHurEpypYlWEPXIIeeI9B7lPLobOTht6LVlGy1RZrQuUEIShSaIRCUcWQVulhuMiaxTRazBIYmNGG88m0iKWmEnAr3MKJNIO0eMo1yJdkAi0EiEsaRVwVBauuWMWGs2R7//3VVfbe3a864979rzrj3v2vOuPe/a864979rz3t7eUebZd37nd/LDP/zDfO/3fu8f+AOfttdee41v+qZv4ud//uf52Mc+BsAf/+N/nK//+q/nJ37iJ/jpn/5pfuAHfuBtq4sA3/It38J3fMd38Hf/7t/9Pd/391qRvH//Pn//U9/MnWnA5IwNNTJp5CDIQbDrPYNPPGinrIaSVzYLHrcNV+uK1aZAbRV6LTGdpNxJ1JAw+wTBE/uORCDkgSwzlBllBZO5whSC6alClwJ1yrgd+FSRSkjHHsrI6WxJXfbcKtbMdMeEQJUjJpaoUILPY+iei+yGyNVQ88Z+zkVX8aXVgrYzXF1UiF5irxTKCarNeN2y3UWIkdi3lIuej//vv8SNF7cciYGCiPE1KhmEGwP49r2nd5HH3YTLvub1/Yw3tlNWu4LLZYXaS/RKoXtBuVEolw+fEUh9R8oBnweyTFBkpBFMFgcOJyMHfQqignSsyLUgLsYA2ePphkm156zYcmT2NASaHNCxRIdqfDTtYPCJbRdYu4Kv7I5YDSVfXB6x7w2XFzX5KYdh5KA8mF1E+EQaWpQd+Pb/3cvc/6MrFmKgImBChYoW4cdAxm4ItC5w2dc87iY8aRte3izY7i3nVzWik+grhR4E1Xr8DLuNh5XKlpQO9SAiuRjDTJu5xBaSyYnCVgJ1PAadpiNFbiRx7kmTyKLZMm+2HNk9Z3ZLRWSaAyoZTKghAAP4kNh0gZ03vLI9YuNKvrA8Yt9bLi5qQq/exkEPAr0PKJ+IQwfZ8c1/6St84HvOmeJohMPEEh1KRBAIJxh8ZN97VkPJg3bKVV/x0vqYXWd4ctGQeom51G+ydmC36bBC15FTwOd+jC4uM+hMM1PYUtIcScpGoo5ATiHNJHmmSJNAnAem1Z7j6YaZ7rhZrClFZJY9KmusbyAK6DMhZjZdoPWSV3bHbF3BF66O2QyWy4sa1ynsUqF6SXngYNqIGhLRdeTg+Nj3P+Cjf+YBDZ6pGNCxwMRq5DAIXIjsOs/OW17fz1gPJV9YnbDrDY8uGmKvMJcKOcixHhzYXUL6NHKInpCHcfIoRg7VVFHWknomqWYSOQM1hzyRpCNFqiNx4anLntPZkokeuF2uKERgnj06S4yfIJKEPpNiZt0Fei94dXfE2hV8aXXEVVexvCpp9wa7GiW03AlMJ9FdRPeJ5Aei6/ngn3jCN/9vX6OWnrkYUEljQ42IAjFIQkhsO0/rFa/t52xcwReXJ2wHy6OLBtcr9KVGHcYHPQjsLqJcJg0dKThidkQC2WYwmbKRVBNFORE0C4VsQB1DriXpWJLLRDz2FIXjdL6kNgN3iyWlPHAArJ8gooIhkw/10Ht4bb9gOVS8vJ5z3tasliW7ncVsFHorsa2g2B/G8zaSgiO5jvvftuTb/+rL1KXnSPSopA4cJHKQxJjZdo4+SF7bzdk6yxdXJ6yGgicXDW2nMVca2QnKrcT0ErNPI2vXk/xTDp6sMxQZW0rqmaKoBZNjhaxBn0AuBelEkctMPPFo6zlbXFGbgTvliko65nhszhjfIJOBIUOAbR/ofOLBfsZF3/D6dsYbuwnbTcF6XaB3ErNWmF6M85pL6H2EEIhDy9nXbPn4j36J6WLgSPToLLC+QaSRQ46Zbefpfeb1Qz18aX3MVV9xflWx3Vv0SqF2kmIvsa1EdwnTppH10JPwhOzIauSgi8PcWUmmJwpVgj4FSkE8VeQC4olDFoGz+ZLajv2iUQMzPEVOmFCjogWXwcN+iLQu8qSb8Kib8mjf8Op6zn5vWV6VyE5ilxI9SMqtRLmM2Y3HtoZ2yS++9PfflZln15537XnXnnftedeed+1515537XnXnvf29vveefbrv/7rz379gz/4g3ziE5/g0aNHfPSjH8UY87bf+1SS/ofapz71KZ48ecIf+SN/5NlrMUZ+8Rd/kZ/8yZ/k537u53DOsVqt3rYq+fjxY27duvUffN+iKCiK4ne9rkTirl0xk55jWWBRkCEBF9GxTwmGM1SY8njXIFpFuCrp1hPMlSJeKFjl8arujR8zEDqPudqTnUd3HeQ8bvHWmjidEEvL/mxBqg3t7QJfS9pOEBpIaUBOAi9ow6nYMzctp2LLXbPiltozl5aJtKQ8rmytU+QyBR74OcFFYr/AbBaIvcY9mcBOkR9p9B70w3HVQHct0kXUukMzrs4okblt1hypnqOqoBTjDVMZuIqebQoYdwJhznJfIHYL4qagv5yi1grzRJG3GSMSYhdg4xF9OASretS+fZODlKTZlMFq2hsLcm1pbxX4iaLdC/wEUnAwCzwvS27KLZV23LRrbukt9/WGiTTMxWFlgsQuRc5T4DxM8ENGuilmdYTsFP5xQ2oVPDSoFvTDTN4ndN8iXEJue7TskS4iyNzQW87UnnllacRYwzkL1smzjp43/JzgI22nkFtJ2ln68wlip7APNWk/cmAfyZuAdAF91YJz6H1LTgmEREhBnk5wheLidE5qSrqbFj/VdDcEwxzS4Mk+cC8vuacLpMzcLVacqh3vMysaoVnIEg4cupx4HD3LWOKGjPUNr64WdL0kPKpxnYGHGrUXzzgon8guIncDcuiQQwDgxOy5ozbMpGEq7DMOuxS4io4nYYJzCT+AXAtyZ+mfTEh7RWoMugUjE7QRth3CR/SqhcGh2o4cwnhLkZQwafCF5up4Rp5W9KeW4cjQnwj6Y8hHgZQ8N8OG563lHlfcK684Ui0fsFfUQnIsSwSQSAw58zh61knjh8Sln/CV1Zz9IAmPK4a2GPvFTqAeZ9hkVBz/32XrENsW2Y3n3Re64z16SSM1C1E849ClyEV0XMWSfsjgI2p9Qu4Mw+MJodOkByPrkUMajz74iF530A2oriN7P95aJQSyqfFlyfJowtW8GRmcWtxc0LaQZ5GUHcfNnhcKzS3W3BNXzFTPB80VjcycyBKFIJLwBw7bLIl9oIwTHqym6F4QzyvcroInCrVWyIuEWGXE1SHXpveYVYvajxwmauC95pJaSI5kgUBAFvQ5ch4cm2Toh4zyDXp9DINmeNzQtxZ74KAfJcQuk7sOQkBue9S+Q3VjRsYzDlWJrw1uWrM6meKmmu5mgZ8K2m68VSqJgWnd88FCcZJ33C2uaOTA++0lExE5URUGSSQRSTyJYbyhbHDYMON8U7HsGtJlidvU5AtNWirEMqEuMqwdug2IIaCXLXLrIEMtPc/bK2oBx7JEHjj4nHgSHbskcUNEhwazPkL0Cvekpm8L4iON3krUk4RYZ9TQjxk6uwG13aOHgdQPzziIoiBMGnxTsjmdERpNe6vETwRtD6lOROGoKscHC8Vx3nOrWFNJz3vNJQvpOVYlBYo06jsXMbDJETvcQIcj1juL3k/JS4tbTkgrRT5XsAH1JJG3Ht2OeSp62aI24+4LKwP3zZKJTBw3WzTjDVMROA8D+wRxiJjY8Pp6hhgk/ryi29XYJxq1ksjLjFwmZHSwDci9Q632ZOfGuRMBUiCMIUwn+MqyO1sQa017u8Q3grYXxDqTGNC154PWcMyeU7ul1J7nzJJT1bGQBbUwjBekZ65SYJUir7gTsg90rURvF7A1uPMJcivhsSbtQD9K5H081ENEr/r/qMt8NbVrz7v2vGvP+8/reUeyfMbh2vPePZ73/kJz89rz+GChOL32vGvP+496Xkkt9bO58z+15/2+H559/dd/PUKItwXH/rk/9+ee/frpv/v9Bsl+13d9F5/97Gff9toP/MAP8OKLL/IjP/Ij3L9/H2MMv/ALv8D3fd/3AfD5z3+er3zlK3zbt33b7/eP/ax1yfCL2/fjkmbdzRm8pd8VRKeIu0waoN8U+F7TrgqGvaZew51Nh9h65NaPwXzbQzZG5xExQ4ggJbJpQKtxm7/V+KOaVCiGs5JYKrobklgJ3HEiVYlq4ihqT2MdlXIoMWYk+CzosxifrCaPEhKFZCJBAYXpaeQDntdLbpsVq2nNK80pu6Hk8dmCoTds79WkXqDOK2RfYi4qfNkRp09wqeeT+/fikmbTzeh9ybC3+EGPHHoYdhbXGvqtpd8Yih3cXnWIfUCuHKKPqM2A8IncekgJwngTl2waUBKaimw0flGTS81wWhArRX+mCJXAnSRinSmnDnvgUOvfzUHnTMHIQSOpJdxAUBuPFQ9ZmyvO1Ia1r3i5vsHeFTw6PWIYDJu7NbGXqPMS2RWYy4ICjz++JLHj0919hmjY9hNaX+Fai+sMqc3EFnxr6HcW12q6VYHew62rHtFF5NIhhohcDwgfofXkmMkhjpkIdTVKRF2B0cRFTSo17qQg1pr+VOGbkUOYZIqZw9aOpnDU2qNlJGVBQNBniSIzZI8SAo2iFIIzBRORUDxiayzHcs8mlHy5usHOlTw+WdD1lu3dml2nUJcFYl9glsV4zfjNLbDkt7pbfDo+x25o2LkG3xmG1pB6SDvwvabfFrhOsV+WiA5uXg7kPqEuB4SLyNVYDyOHRPYRkUEUBaKqxm3dRhPnFbmyuGNLmBiGI4mbCfwi4+eJYuap656mdNTKYeW41XvkIJBw4CAxKAqROVGaiQTBOTuz5EjuWYeSl8szNq7mycmcfVeyvSrZ7w3yyiJ3Fr0q0btA/1wPnPPScMpn93dpfcW2n+IHTb+35AHiFsKg6DYFoVfsrkpyLzi7cORhQF4OiCGi1iMPWj9mAISISBlhLaI8bOu2hjQtSU2Bnxv8zOLmkuFIEKaZcJTQjWfS9DTlcOAQyHk8ljRkgcqZPgfMoV8YITlWmUkGiku2acXRUcvSV7xanbIcGi5OZ2zamv2qYLW1yJVCrifobYlez2hfyGQe8bpb8Pn2Fn0oWHczgtf0u4LkIG4gOkm3LgmDYndVEHrJyUUg9R559ZbxYYiI1o85QiFBSgitEYUdt7iXltQUpGlBmBjcUYGfSPoTQWwy/iih6kBTD0zKgVo7SjUKemTkoBH0OZCEwKDQKBYSGpGhWHLPbDmVO86bhjeqY877KVdnU1a7hnZbsFoVyK1ELhvUvkRfTTj68BOyesB5mPByf8oQLKtu/oxDdIK4geQF3aokOMX+qsT3ksVlYtp2yOUwjhNbh+wCsg+kISBigphAKeR0MgZxlyWpNvhZRaw1w0lJqAX9mSBWGXeUkVWkbgbqYqA2jkoFBJmUBS5L+gx9DiASGoVGMpeKKguE3XJTt9yUWz5QPuJxveDB0YJ123B5Z0q/s6yXJWIvUBcNsqswlw3l80uyecA6VvxC/yFcNKy6OT4Yum1BcoK4heQE3bogDOP44DrF9AqqfYdcu3He2PkxzHUIY25MesvcOZmA1eM2/9Lg5zWxUrjTklhJujNJqMAdJygj9cRRlO7AwSMOOUvuMEb0OaF4c+6cCo2VAm33LNTAPbXiOXvB5WTKa7MTdl3Jk5tzhs6wuaygE6jzGtmVyEcC/tkfWDf+i23XnveH43kT62h+D88rc8bmgEK8KzxvUgxM9ID+HRxMzjgRkAcOX+2eNykGGu0wv5fn8WY9fLV73qQcaPRAocLbPE8BLodn3v9u8LxGD5S/h+cNOWKEQKEwgq96z2uMo1QeBG/zvCEHOMzpX+2e1xQDE+Oon/rNWzxvyBF5yAV7N3heY8bvQPJ3eN6QIzqD4j+P5/2+H569/PLLf7B3/o+06XTKRz7ykbe91jQNJycnz17/83/+z/NX/+pf5fj4mNlsxg/+4A/ybd/2bX/gG5gAQlZ8xR2zDhUvb47ZDSW7q5rQa+yVRHWSYjlehVssI2aXKDY91c5B25P3HXlw41PklMgpkYVEKIVQh4wMa2DWkAtNmlfEQuJniliMq5CxyMQigY0YFbDCY3LEpARREIPECcWAGoVCghECKxQCSSkkWQYkO2o5oKVjaRukhaWv6UvFzpWsa0PoFaGxyEHCzBCMJJeShOR1t2AZGl7dHrHuK/arUSjMSqJ3ErvJYzbIJmHXEbN3lOseekfe7sk+kNpxBTbHOOYAKI1QGllYMBqmzTh5zEtSqfDTUaZ8DaEaOSSb0CpQSI8hotN4BjoFhUfSC4VJgkFlNAIhRw6FSEDkltoxkT1IzyZWCCNYh4q+UGxdybrUhMEQGovoJXlukCmQGk0GHvoZ527K67sFl11Duy4ZdgV6IzEbidll7Dpj9oliGdGtp1iNty/lzVs4pDRyQIy3jEiFtG9yyAcOsdT42YFDA77OxCKTbETpSCEDloBOERkhRolHMSBRcgzh1eKQ80GmQCBF4qbeM00duQpsU4HQIwdXSKSr2FaK0FvC1CJaRV7ocQv4bOTwJEx56OY82s94tJ/R7yzduhpX2FYK02bsoW+UVwHZBabLDgZP3uzBB2LbQYwHDiDUyEHYcryBZtKQC0Oel8Rq5OAbeeAAsUxkm5A6vJ1DysQoCVkyCIUad6+jBQg5ZhpYBIrEmWqZSohlZJcsUsMq1MQChAvsa4nvBGJmYK+xa03eQVyMq7DLUPPqcMJl1/D6dsHQWvarCtmO231VB+UVqD5RXQRkH5ledeACeb2DQz3kpxxyBqUPuxQUwlpoanJVkGYFsTGEmcLNJH4CoYZQjv1CmoiVASsiJkdUyqQoCUgGqZAJejWeaqiEQgiBBRSZU9UzkYksEhtr0TpxFSeIMpOHSD8Bv5eIhYaNxmw1eQPptAABm1jy5eGM5VDz6uaIobfsr+rxyNClRPVQXIIeMuWBQ3PVIYYAmx3ZeVLbk0N4k4NUYz0UdpSquhq/gE4scWYJjcRNFaEZOcQyj+ODjlgVsCKgc0LFRAqCmBWDUGiZ6YEsGG+oExKDQInEiXQ0ooMicmR2lDowbXpMHQizjN8lwkzBTsNCo/caFgXh5gaEYJ8sLw1nrF3Fy+tjhsGyu6zJg8ReqvHYwmVG9ZnyMqD6RH3VIXoP23acL7qe7Pzv5mDMKNllCZMaakOcF4RaHsbKcXxIZSYVEWkCVo59w6SISokUJVFIBinp8zhnIKEU8vDgSFCKPN64JyKqCEx0y8Q4ysrzqBlwjSC1JWGioVP4qUZ1GuaWeLMnK8GQNF8eTsYvrutjOmfZXTbEQf0eHCK6i5SrHtl62LXkbiD3A2lw47yZEkhJlhqh1Th3lhamDak8zJ2lwE01sTyMkyUkGxFmzLwpRMDkcc4gCqKQOCHplRq/dEgohAShDhwkc+nR9BTCU6iBI9thisSFa+gqhRgKVrUh9Qo/scheY8rmf4Tl/JfXrj3vD8fzLGO9jjFAb/c8mxkXQd4FnmfzOJ/9Ts+zWTJk0EKAeDd4XsS8xW+eep6WggHQCOy7xPNsisj4uz1vQKDh4Lt8VXteId4+rz/1PJUEg4IsBFbIr37PkwGTAjolcpBv87zh6S2hQiGF+Kr2PCsDJo/fgXIURN70vAGBFOOc8W7wPJvHsTJHfpfn6Qz2bRz+03neO8o8+7Ef+zFu3rz5thVJgH/0j/4R5+fn/MiP/Mgf+A8Cb8/CAOj7nk984hP8k3/yTxiGge/5nu/hp37qp/4Ht/P/zrbZbJjP5/yfPvUtzBoxXjscCkgK4UpEVIgeRATXB6JP7HtF7xSrrmTbW666gqu2ZN9p1ntN7sebWZQX6FaiosT2ChlBO4HIeRxQSITsSCIRVCDLTCoS6IytQVuYTTJFAdNpoiwTRRMxZUbWAlEJcinJlSCXkVwFrHWUZUuhPY3t0CJSCo/MIJOEpMju6bltDRFSH8ki4W625PLwZZw8coga4QtE0IgBRAA/RIKLdIOiHRTbwbLqSja95cm+ousVq60hOUnaKaQXmFYig8AOChkFxglEghwD5LdzSHKcOPJbOEybTFVB0yTqKmHr8YcsBaIWUApSLcEmUuPRxlNXe4wKTGyLFolKOgSgooQkyb6GqFCDhihGDinhb3Skxr+Fg4WkEa5ABINwIDwEF/FDZHCSXa9oneWqrdg5w+NtRecUy40lekHeasTTeghirIck0E4g41s5eBLx7RxMxlagC2jqTFNBVScmTcKUiaJJiEIgJwKsIE0kmERqAkoH6no3cihajIiU0iPJI4csyK6GpJGDQgRJGiI5JMJpT5w5dE5oEjkaiAbh7fgjgBgg+oQbAs4Ldq2m85rLtqZ1hofbit4rrtaW4CV5q8ALTKvGeugVKh44JMghQk5EPDEHoopjUKjJJJMwJZhqXLibNJmyzEynEWMz5TQiLMiJBCPIU0nWmTTxSB1HDnqsByNHDoqETBKRBPiKHA3SjRzykEg+EY564tGAyglDIkcN0Y59wpWICKKHFDOu8/gg2LaaISgu9jWtNzzaVHRec7Wx+EO/wB3qwUvMIFFBor1ARUYBTb8HB32QCQu2HhfuZtOMtZn5LKIPHKQBOT1wmEiyyeRJgGccPBPbYVSglB5NQiYxZmeEghws0iuEV2SXSC4RZ45w2qHI6BwhKQgFRIV0JUSBfAuHcODgguRiX9P5sR5ap1luLINTpJ0iuzF7QzmBGRTKS3QQqADERE7xkCfkSfIwVqpxrFQG7Pj9jPksY01mNk8YkylnAfU2DuMtU7nxYBNVtccYR2N6Cu1GQRXxwEFALMi+QASFdIrsE2kYxxd/1iFlGr/wHpiNfaiAKMfrywO43hNCZtdqhiC5PNTD4+04Tqy3hrbX401WvUT3EjUIjJNor1ABtBfPvpglIiF7sowEFUkqkYqM1JmiAW1g/qwe0rN+oW0e+4Ud81SyFeQ6kG2krFoKO1CZgUoPWBGxIiCyQEYB0ZJ9+WzOyCGT+kgqA/5mi1AJm+P4hDKU49w5jL9fDEAA/1YOXrJsK3bOcrEvWfeWzV6z6wy5G2/zUoNA9xId5FgTh7mTlMnxd8ydepw7Y5EQv4sDzGaRwmaKZxwEohDkWpILQa4CuYwURU9RdpR6rAkjIsVTDklA1GR/mDt7DTGzu+z533zf//ddmXl27Xn/03peWWSm00RR5meep2qBqOSYefNu8LxJpi4zTZOpfofnyWbsr6kW7wrPm9SZqso0b/E8WYjDOC5IE0H+qve8zPTgeZNpepvnqYkCC2ny7vC8+TRhbWY2S2/zPHXwm/Ru8Dz7lAPMZm/3vDc5fPV7nnmr7/4enieejg/vEs8rbGY6S2/zPDWRB88bb1X9T+157+jh2fPPP89P//RP8+3f/u1ve/1XfuVX+NN/+k//T756+T+mPZWq/8unv5GvPVoykY4zBSVQSYsep1USmW1ydDnwJJYso+UNf8TDsOD17oivdMdc7WuebKekvSYtLboFeyUwHZSXCdUlysthPGe/3IPz5O0OQiAN40omOYMAYcerjsWkQVhDmjekusAtLKHRDHOBm0r8LOPmwCSQ555503FzvuXU7nj/5JxjveeF4jEz6bind5RCMJcWzbjNN5Pps2efBZ/sz1hHzXvtBXPZcyKhFiMHc/i9mcwuedrsuYgFl9HyOMx5zZ/weJjxpf0p667i4XpG7DTpyqI6QXEpUD1UFwk9JIoLhxwCarlHDJ60G1ds8jCQYxw5wLhCoTSiqRFFQZrVpKbEzwx+ZnBTwTCXhElmWAB1JB87JuXAraMNR7blg5MnLHTLB4pHTOXAfb2jEpmFtOjDtm8Q9NnRZ/jV4YSHoeQ95opjtedYwkRAKQxW6Gcc2hTYZccqGR6Hkss44RV3xqVr+ML+Jtu+5PXVnNhr4mWB7EcOuofyIqH7THHpxhuYVntE78m78fx3dm7MiHjKwRqENuNV4FVJmpSkaU2YaNzC4mtBfySJdaY/BqpIPvFUhePO8Zqp6Xlx+pi5bvlg+YiZ7LmndzQiMZcGKxSKceVqyB6XE/9uOOJlP+GuXnJTb5kLmEuwQlMK84xDnyObNLBLmgexYh0rvjTcYBVqfnt7i60reX21wB04iF6+yeEyo7tEeenGG7nWLbIbyPuW3A9k78eMiEMgizDm2WqNqGtSZUmLhlhphmNLLAXdiSRW0J9ALhP5xGGLwJ3jFVM78KHpYxam5QPFY+aq5Z7aM5WembSUBw5SjNvAfQ78ppvzOTfnhtpw16yYCjiWYISiEvYZB5cj6+Rok+SNWLFNJS8NN1mHit/e3WLjSl5fLugGS7q00KuxX3RQXuXDyq7D7CNy0yL3A7nryIe8kOzcOGjlDFqPq9rWICcNubCkowmxVAzHxcjhVBIKwXA6rlzlE4cuA7eP1kyLgQ9MnnBi97yveMKJ3nFb7TmSjok01EKPR2SExOWIy54v+SmfGY44Unue02PexqkcV7gqYQ/5I5mQE6s00GfBG6FilywvuZusQ81v726ycjVvrObs+oK0LMitolgK9B6KVcZuM2bjsZuA3PXIbTfWQrsfb3J6GgSe87jt3VowBvF0l8PRlFQqhpOCUEj6U0koBcNJJpYZThyiitxabJhVPe9rLrhRbHmPveCmXnNDdZyqnkZoJtI84+BzZMiBN0LFv+1PqOXAe80FjUycSrBCUIkCiRjzJnJmnQb6nHkQK7bJ8OXhBsvY8MXdDS7chEfrGauuIq0seaexG4HZMN5wts7oXaBYeUQ3oNbjKmbe7cc5w7mRwSFbSBYFaD1yMIZ0PCEVZuwXlaI/loRKMBxnQg0cOWgiZ7MtR03H/fqKe9WKO2bJPXPFsRq4pVoqoZhKg0SihSTkRJ89F9Hyyf4MSeAFe0EtAmdqvFa+EhaFJJFJObNJA31OPIwlm2R41Z1yEae8vD/lYT/nyW7C5a4hbw1pbTA7gV2B2efDTqBIceWQvUeufo+58ymHQ27I07lzrIcJuTQMRwWxVvRHcjwqtcj4CTDz5FngZLLnZLrndrnm+eaSG3rL8/acuRy4q/eHVUuLZFzJTDnzZNNz78U33pUPz64979rzrj3v2vOuPe/a864979rzrj3v7e33fWzzre3Ro0fcvn37d71+dnbGw4cP38lb/qE3lxWf62+Ts2QIFTFp+qEiRk0YFClIQp9IPtP3Gu8kbVfSD5a2LRhai24lx7sIXSBvW+SQkNuAdAm19eN57zaQQyINfjzve8gFkVU1hgbacZtzbgqS0aRJQS40YWaJlcZNJaEW+JnAN5k4SaRJwtYeWw00haPRjlJ7tEjjOeAsCYcz8gJwOZIQFEIgEBihGEsFIpIv9jfJCJyvCEkzuIoQzJgL4iVxSESXGQaFGxT9UNB2BX1v6HclspccbRIMA3nTIYaEesYhIEJCtP5wK1FApMP575zHyVKI8dG6kuPWZqtJTUGuDGFiiLXBTyRhIsbOMcukKpFnCVUGyspRHzhUymNkfJYlEpG4LJAiMxBJh3wWCWihxi3wCDKCl90pX85n+FDio8H5Eh/syMFJosvEIeGdou8Vzhl2XYUbNN22JA+So3UaBWndI1xGbTzCZ9TWI31CtuHAwb95Dj4fshGKYtzyrxS5suTCEGtLqiyx0YSJHbf3zsSYHzIfJ408i8giUlUDlfU0esyLe8YhCwISlyWaxJATkMerrvOYFyAO2QEAr/tjXncn+GjxocAFi/Mlycuxb/hM6BMhSPpO47xmt69wXrPfVEQnma/zGIa7Hg4cAsIf6sJHRBvAJ7ILpBDf5GDMuM1dK9B63PJfWWJpxpyESuFnllhK3FwSS8GwgFRk0iwibaKsHEXhacybHMZMFYhZ4g65CUNOiAMHneVBMg2K8VjA4zDnsZ8fOIw1MbhqHBsGRQqZ0CVikHStJgTFdl/jvWKzqfBOMV0LahfIa4d46/iwC2M/6QK4OB4F8RHSOFkIrRHGvMnB6rFvFBo/LUiFIswLYiEZFpJUwHAE0SbiLCGKRFU7jA3POFgZUGI8XhGzwGd5yJdJKAJGKEQWKARWmMOXD7gMEy79hJAMLpSEaBiGajxO0CtShNBmYuQZh92+xnnFZlvhBkWzURRDHLf39xm1G7f9y31E9hE1BBgi2UeSCyOHlMfjUU/zdIwmG02uS7JRxFlJtgo/t6RCMiwU8cAhFZk4i+QiUdYeXQQaO44RhQxoMY7DEYlHjlu+RR63xh84SMRYG0IhgE2s+PfhPjHpcc6Imt5VpKgOHAShTaRnHCS7tsI5w3Zb4gZNtVWoLsJuT24zqo3INo75GG1Eugj90zlj3LXAYbu7rGuQEqwmK0VuRg5pWpKswi8KkpW4o/HI2LAYj0GEeSRXmaLx6Mof5oyBUv0ODoc5Q5IpDrszxoPQUAiNERoBtNny2e4uKSsG/5a5M2lCr8hBHPoF9J0iBMW+LRmcZbcrGHpDsVOctBH2nrzLYw3s4pgfsx/7iOgOOTrDoR5iHMOG65rxnMJhzmhK0oHDWA9j/3ALRSgFbn44MjZL5DphG4+p3JtzpxrnTkEmMo6VQ5YIxjwR/RYOVrwjRfqqaNeed+1515537XnXnnftedeed+1515739vaOzPD+/fv80i/9Eu9973vf9vov/dIvcefOnXfyln/ozWXNA3fGLha8tj9iFwrOtxNaZ/DbgjiM19qqXmB24yqC2Y+rCHofxytoO8987xCDR+z7cSWlH8bVhMGRc3omUVmIMSPCmHGgKCxo/SxYNE/LUaamhlhI3HScMPyEQ1ZCJtaZXCdyE0eZKD2V8VTKUx4GzLcWhj90EJfHfIpxuhgD5s2hTGKWvOaP2YaSN7oFa19yuWvYDQV+ZwmdRu8lqhXoFvReoLuM3WVUn7DbiOwD851DOI/YdeMW7a57tpqQ03+Ag5QIW7zJwRrypBhlYqKJpcJN5LOsiFALQp0Jk0wuM7mO2DJQFp7KeirtDwNFQjKGCcY8BgrKLHAHmTBCIQ4cOIhmQvDAL1iFmkfdnCtXs2xr1l1JaA2+NSOD3Xj23+wEuh8zQvQwZoRIF5ltBoQPiF0HIY6rS/GwqnLYqkzOIwd4ez0Y8yxYNDfFKFO1ItQaXwt8MwYo+okglhk/zeRirAdtDxyMewuH8ZatfODgs8AJgSchDpOIZAxYlM/kEh77GZdhwvkw4Uk/ZdOVXLVjTozfWVQv0NtxC7PdiPHq+k1GDRm7DkifmGwGhBs5iBDIbT9uy+3dYev67+CgDxxKA8bC0/PwlSU2BbFUhIkeB8mpJBbgpoJUZNw8k20mNwlpxn5Rmqf14Mct2yTy4e/ZH2rC54QCNJIsxKFvSJQQAFyGhsd+ztLVPOxm7IaCi/2EMCj81iIGid5I1ADmwKHYZKTLFOtxYmiecth3CBfIXf/mqsohI2PkwLPsFKEUPJXs4lAThSFNxsnCTw2xELiZJFpwM0G0GT/LJJuhiQgbsU85HFg8lUsOXzb8oSZ8zngiEjFm6CGe5QYArGPFA7dg7Sve6Ba0zvJkOyE4hdsWCCfQa4V0Yz2M///5UBfj9fD19hAku+8QzpP7AZwfV9lCGMeIlMiHMUIoNebIPM2IMOYwRuhRIgqJnxqSlQxzSTIwzAXJ8mY9TCLCJkzpKe1b6uHwZQMgIQhZ4J6GVBMRiDHX6MBBI0HAPha87o7YhpLX9gtab3myneK9xm0s2Y1XgEv3Zj2M/QLsJqCGRLFzVH1AtAOiH0OHGdyzlfinHACSEKNMGTOuPFblOFY2FcJo4lOZmhmifVoPYqyHgrEeikyeJCgOHIo368GKgBJpXFnO46UL7lk4c8QIsGNVopHPJLtPhteGk2dz5z6M9TA4g9sUZDf2CzkI7FaM48Q2oXsw2zheW753lJ1HdA7RDePq+zCQ/bgSn1OGFN/G4dlYWRbPOKA16alcz8Z6+F0cpocvn5NIrhK6ClTF0zHCUciIJh2CeOWhHkAeOGSRMeNXrnHOeJe2a8+79rxrz7v2vGvPu/a8a8+79rxrz3t7e0fHNn/8x3+cH//xH+fv/b2/x3d+53cC8Au/8Av88A//MJ/4xCf40R/90T/wH+QPqz3dzv9//NS3UNaGjMAHS0oS70pS1ODMeN57EIgAeUgQxtWo6DN+kHgn6Z2i6zW9U2x7zeAF207jvaBtJTlA6iVEUMN4/l57OU5oUSOzQKbxL2tsgiQiSWSijCSRiDoR1ZgLkHQGm8eshCJjy0RpE5MqUprErAoUJjKpPFonqjKgNOhSIJVAlgoUpAKEiVSnlyg7kLIgZzGuQiaF9wUpGnAagkY6gfCAz2SXiD6Rhoz3AtcrnJe0nWEIks3hLPymVfgwckhBvJ1DBBUO2RBRIbJEJXlYI33KYQy+DDKRRCSqTNSJpBPJZDDjwKlNpqzSeBa8DliTmFfjz9PKoXWmqjxKgSkFQoMsFEILUgHoTHV8ia460jiz4aMhRkUIBTEY8Aa8RnoQTkDIY25CyMQ+EYJg6BQ+SPadwQfJ+vfgEIMg9QKCQA7jufOnHFRSyCRRWSLy04p4k0MUaayJpxxUJtoxRyUXGaUzZZ2wOjOrI0YnFo3H6sT0aT1UAa0yuhJjfmWpEEqQC8gaiqMlttmRsyBnCFEToiYGSwgWwtg3RAA5CIiZ3CdSHDnEIBg6Oa7EHDhsWoML4lAXI4fwOziIKFBBIqNEJzn2iSyRb+GQRSKJRBKZICNJpjc5FAkU5CI946APHKxJzOtAoROTymN1pKwCWmdMAdKIZ/WQ7cjBzjcUs/UzDjFpQtDEaPB+zIIYOYgDB8h9JMdxhTLFcSUmvo3Dm/XgvKTrJN4L0iBIXqCcQIQ3Oagkx5rIApnVWzjkZ2NEkJEsM0Effi4yqEwuE1JlyjofOISxf1Se0hw4mEhZRIyNaCtQViCNRFhJNpANmMmO8mj1bOd0TIoQDTFqvCvHTIjBIuKBQ4A8RPKhX7zJQbDvDM5Ltp1h8JJtr+idpO8FbpAkJ0hOIP3hRxyzQmSWhzFCjH0DMXIgj9kxJKI68DAjh2gzWWUox7p4Wg/TOlIcxsnSRppi/LkoAtZGlBXoQiC0RBaSrCFb0FVHeXIJZHIWpCwPc4bCu5J8qAfi+CV8rIdEjmnkEGDoJcFL2l7jvGLXazqn2PeSdlAMg2DoJelQEzIIpBszGZQf60DHMThbpd/BQSSyyGOejshEHUlyDKXOKkORQWeKcsxOmVSRyiaacmRQ2Uh9YFAUEWU4cBj7RtaQC1DFQHVyCSqSkyBniX86Z7iSnBQMduQwvMmBOO5gSCHjDhyezpvtoNgPmm6Q7HuFc4Kul2Q/zp0ignw6dwaJSBJ96BcqqWccIBOfcpCRLBNBp0O+UCYdOOQDB1Nk6mJkUNnItAwUNjIpA9qMc6fUhznjLXNnGzx//X/9f39XHtu89rxrz7v2vGvPu/a8a8+79rxrz7v2vLe3d7Tz7K/9tb/G5eUlf+kv/SXc4fx2WZb8yI/8yP+shOqtzYrI+4oVtXScSkchEhOpMchxm/OzCZ7xjDyBfUp0ObFOBctUsIwNT8KMZWh4MIyreau2YXCG9a4hOkVYW4QTmLVEeYFdZ6TjbSsX2SXUdkA6j3z2xP5wW4f3z8I2n+Zm8HTl4pAT0JYFrTVc1RXZqvFJdSFxhyfVw1yOT+pngmQzbpZpmp4/Wf8yz9mO+3bFRA4cS0ctIo3QWPG7OXgCQw50ObNPkV02XMaKTap46OdsYsVrwzE7X7BqFzhvWG8b/v/t/XuwLFlZ5o9/1srMqtp7n2vfLzRNgy13QQHbhhlkgg6BcYx29KvoMIrGDAQKoYg6BsYgajhemAhlNBwNnQjQCYNxjFF0CHWCQcGfTstdAcGWS2tz6Xv3OWfvXVWZudZ6f3+8a+Wldu1z6ab77O6znhN1cmdWVlW+z3rXJd9817NcU+BPTSHyYBujkfoGJt2TC43YFzuqmWHnC8yyoaprfbrrHNK6ngfQp4lF0aV/z2cz5tWUE1ubPQ+TgvZoz4OfQDt4glNutHzzV32Ip2zMubo6wVG74HjRsmUcm6ZgZgoqU4wi0Q6dI7+UwE7wzKXkHr/BTpjyJXecbT/j9uXF7Lopp+bHaNuKU9tbtG2JOxmfXJyKApLpicW26BPOHU+59NjdlmLRYhc1Jq3cUi/1KW/bKgfxyQXpycVEn3IvNmYsyopTW8d1mfAjsxiprzTt+YjRJ1qH0RXBDgdkFnjZ0/6Gr9na4fLyJMeLXY7aliPWMTMFm6agNJZq0ER4dI58I4Ht4FlIwT1+g12Z8MX2InbClNuXF7PjpmzPj+HaCds7W9RNiTs1QeqCKj65qLZFn1jsCNUiPvWfe+yipdhtoW4o58rDZLkE73ttgHU8FAX1xoy6nLC9dUx56J7gxKfdhw0uPvX3G4Z2K+A3hK//ik/xwiMnubjc4dLiFIes47htmRrLpikpjWUy4CEgLKWhFeGUeJZiuMdvMg8VX3TH2fEzPl9fxCk3Y3d+FNdO2dnZZN5M8NsVYVFS7hiKhaXahXKuK1xNdoViEah2nOrI7DTYpqXcUa2MSVzpai0PxugUkaKg2ZjRlhU7W0c6HmRa0h4ucbOC9pCh3TS66lN66r8lPPfxn+Mlx05wrJhzRXmSLeu4yDZMjOVQ1BCZmqrjQRAW0uIInAqeWuAev8FcKr7UHmc7zPhCrSvfLedH2G1m7M432FlO8bslYV5R7BrKXUu5UF8olvo0q6hj9kPtKHZqbOM6HiSt+NU0oyd6HQ/xSZbbmOHLkt3Nw/qEd2tKmFW0h/Spv9s0NFsGv/LU/ymXf4mbj5/geLXLVeUJNozjkqJhAhyyZeShjB08CLCUBofWi1rgXj9jVyrudMc44Te5sznKfe0h2uUhtusZi8WUk/MNwqLE71TdU3+1Pz3VDF1baRuvfUbrKbbn4BzVkAef+ozIA3T+EGZT5lXF7sYWzDTzIWxOcJsF7aFCp8oc1qd5qc9ojwjXXHQv/9+x93NxueTq2QPKg62ZGGLfqdNA+iABLKXF4dkJgaUE7g8zToUJ97jD3O8PcU97mLuao0i9ycnlFot6womdDXxd4k9V2NpSnTIrfcaQh0C5rVkgdmeBabVeJC0Z8WFv31lV+rR7MmE+nbA73eTezVnMhJngp9pGuPTUf8BDPVkAv39ug43HCPI4L4/z8jgvj/PyOC+P8/I4L4/z8jhvjAcVPDPG8Au/8Au86U1v4lOf+hQbGxtcf/31TKfTB/N1jwicFHypPYZF+EexIBbxU0QKfIw2B28J3uCdEJzQOsE5YelK6rZk0U7YbWYs24rt5QbLtsTMp1SN5ciu6mjIjodWKHaCaiLseqzTrXGCWahWBEtH8EFTfn1MfxedC05RxA7D6N+D+fFhUqpuwqQgbE4IkwK/VREmmvYcKmiOQKigPRyQCk333XBghYDlrvYI9yD8k8RVesIUCfoEJoSS4JSH4ATvBOeFthUaV7BoK2pXslNvULuKk0vVRJDdKaWzHN4RQuuRnTm0jHgwTjtP0wp26TCtILUjuIBpnQ4mEw/WwnSyl4eiQKYVVAV+Y6rbrQlSWdyhilCpdkSodBARJtAeCoQJsOkxM69PcTDc6w7zAFvc3vEwUR5CRfBV5MDqE7jIRdMKrbc6DcSXbNeb1K7k1GKTxhWE3RmFsxzagdB6ws5izMM8YBoVHbZNoFh6aAI0jtAGTYVvvXYUQTDG6hI0iQdrNbW1tMhUB5dhc4KUiYcCd6hCqugPE6Np8BNoD2n6t2x5TFzRBOB+v6VLwIvFiiFIhURf8H6CJB4CuDbgvfLgOh4KtpebNL7kZOTB7c6wzrK1A7M2EHaW0ILd1ZsL1QGIPNQBW3uoA7Se0HiM90jjIg+hGzRgUA6MhWrgD2VB2JwipVVdlaE/HC7wE6Md5xTcVlwyfTNgZgFTKQ8n/QZzP8EQVyuSkuAnhNhGSLDRH5SHEISmEVwwzOuKNvQ8nFqoX7TzKaYp2JxbqjoQ5nrTUMwDthbsMqhGxDJQLAO2DZil73RD8AFp2qgVoTzYyQQB1YswRvVUYn2R0hI2pqqZkPxhq1Qtja04ZWjT4DbAb4Db1AG22Qww0cHJbphye3PxgIeCEKZISG2l8iDRH4KHug14b6I/WHZSvVhqO1HPp5imZGNuKeqALBrColW7FxJ1QmKdWHhMGzALDy5qI/gw9gd0agxEf7BG64U1kYeCsKH2h80JMi3xmyV+WuA2LX5T9WXaTfCzyMNUMJseM9W08kWYcHtzERbDZ2KfEXzkwas/+LbQp/lNIARomoAPKA+uYKeZUbsJO8sZ83bCcjnBLCumy4KjC69P+Oet1oldLf9iN2DaQDnv+wzNitC+wjStcpDayrKMtid/KKI/VEhZEmZV1FapCLMKPyvwGyV+ZnFbFjeDdkszV9yhgEwENj3MPBiopeQLzUUYhM9JAWKUBynwbqo8OIsEg2uFEPsL74VFW1K7knkzZdFOmTdTduopy7rCLCZMY98pTYvsOkzHQ99naB8asHOP8QFZ6nQQU8dpIVGQW3lAfcBEfygsVGp/mEatnWmpA8upxW/pVJn2ULwBP6x9pzukq+NRukdsjHLQkMd5eZyXx3l5nJfHeXmcl8d5eZyXx3ljPCQ13EOHDvG85z3voXzFI4ZWCm5rLmIRJtxTH2LhJ9y33GLhK07OZyzbknZR4dsCMy8wjaFYGIpaVxcqllDUQrmAohHKhWh0dO6ZtJ6NeQvOYxY6959lnPteN9ogpHnPaS58vC6JFcEUBcYWUJXYsoRJhalK7TinOniSqc79dZslYWJoNwsdPG0adYQt3bZbmsbotzT9u9hqsTOHKQQvltvdxcz9lHubQ+z6CffXm+y2E7YXMxZNRbuscE2BWVpMXHK3WBiKBoo5FK1QzsG2QrUrGBf0iVIbOD5vVEg28bAY8BB87CCiRkRQFnQ+fOShLHRAVVXKQ1ViJhVSVTCLjeRGRUidRWVoNy2hMrjNZL8hlOC2hFCB3xIoA3bLYactptRB1ZfaY+yEGfc1W2y7GSebDbZjpd9dTnBNSbssMbXFLPTJarkw2IZoP1RzwbZCuStYJ5S7jqIVjs0bTOsxyxpcWPGH+LQ1LmOdOomkC4CxmMLqALss49x4fRIrVQkb09hpVDp42KoIpWpnKB89D1JGf6jAb+qgqth0FBMPE9UiuNsd5aTf4ES7wcl2k+12yol6g3lTsb2c4ZuCZlFhWoOZ65L15a5Ofal2lQe1H6rdEHnw2DZwdN6qVsiyAedhGbUQGt0X50baCHt4sBYTB9N2qk8ezVTt73nQwYQ7VBJK2/HgNg2+0qduYQJuUwgT8Bua8ltsOOzUQ+xE7/eHuN+pL9zfbLHrJjxQb7JoS07NN3DO0s4n0BrMboFxyoN1UO4O/QHKFR4Odzy00DqoW2j1Ja2LPPixns6IB81KwFpdhabQLWWBbMx6fyjVH6QytFvFyB/cpsFPVGQzTHVgGWaCnXltHyIPJ/0m97pD7MY2YuEq7ltusXQlJ+cbOFfQzCuktdh5gWmJPJiOhzLyUM0DttGVuGwd2Fq2HK4dpnbQRA7qNvpEq0/WWrcPD0ZvMsoy2j/B2KiVUFhkY6aD641KB5ObZWwvLX6iKzX5Kbqdqeiqn0UR2o2AnXjspsNseDDCbphyd3sJCz/hnuYwC19x73KL2pWc6HiYEFoTeTCUc6NaIXMoEg8NlAvVjimWUStk6Ti0bKHxmFrrBHWsE81KnxE8cZGyrs8wcdCg+kITTGFhOsUUBbKhAtVhtsLDhtWB1BT8TLMU3AaEia7aFCrtM0wVKDZb7KYOqpZScXt78b48NG1Js6gInT8YirmJ7SXYGsql9H3oUrC1amRMG8/mXMvfLHVL0s8Z9Bkisn/fmXiIT6KZTjBlicx0FbMw1YCEnxX6miafUN2prs+otI2QyANVwHDhBs8S8jgvj/PyOC+P8/I4L4/z8jgvj/PyOE9xwSwlZU3g4nIHL5aL7Q4+FCwnE3woqDemeF8gbYm4AtNajDOYBqwDGoFWwAk0QTUvavDO4GuLj3OfvTfUtdG+Y2nw3rBYTgkemnqD4CG0IB79fgHrdd6vFRtXAlFdAGMsljjASAMO57HBU7YtwYI5KQQjFGXo5oQHC2ESkEIbC6xgZ4Gtww32Yo/dEo4XuxyxC47bXXyw1NUEF0qazQnOlYhLPBjlotWGEidQC3jdihNCja5GEu1tIg9qPyw7PiZ4D229gfcQGs26NM5gQuKB+FRMdTKsFB0PqYNVHgLWhxEPYhMPUFW69VM9HqJmgJ0GJjNHeaiBo8LRYsGWrTludnGTgmZS0fqK1lW0bYW4AnFl9AWrmhBN5KGRjg/xYx5C4sHBsrY9Dw6WdYVzFa2OrXQOuAfjDcajvuBNx4PBUkgRV5Iq1X4bBXG9DkQK5/RhxLYQrFAUgZB4KOi0AkLc2mmgmASqf1FjLoZDdsHMNFxkd2irktaXNDP1haadIN4ibaXXWKf56oBHnyL6MQ/iwS31qXYdeahrG/sNo2OKusK1FW0LrkXrlEM58GYND2bkD8bYMQ8SKE45fRixk+qFlr+vQqwXOkc+TPSmw04EUwWmX7eEK2DT1lSl42K7w1XlCZwvqKcTvC9ptiaIL5CmAq/+QOTBrPCAF3ytNrlatWGa2uBbQ90Y1VJpoG0NdVPqtI8W2ka5Cy1aF1zkIRhtG6IWgEU1EpQHg7ElYDBBMK2ubiMG2In1olC/8FW0vxJCqZ1HqARbCWYSmH3VHB4HM9twWXkKX1iuKE7hgqWeTLWt3NQnUBK1IExtIJgxDw59yt61D2g72RraxuBaQ9MY1dZpiX8XNE2Bd8pDiG1Ex4OohooZaCIoD1ovjDEYo+0EQseDteoPhSXWC7Xdl7oNlfpCmAim1Loxe9IO5nEwKR2Xldv4wnJpsT3qM5rNKSH5Q9B6gR/w0PT9RceDI67uphy0iYOuThhd6ayexbay9wciD3T1wmCjRkZB2fNhbOQhpvQ7r0/0FpGHAkob9ZYiD1Kqb2ifETCxjdi4ehtzdaDa8FxSbhOs5RKrfeiyiv6wodk80lSIt7Gd1BtP49B+s3sFQqP2hNbgGvWJpo51YqXvXC6neA9NrX1FaIY8xD5D9vad1mj96PtO5cHOPaYGawVbQrBCWai+TlVp26htJcgk6u3YxSM+Vsl48MjjvDzOy+O8PM7L47w8zsvjvDzOezjHeRdM8KwynsdN7mNmWo7bBVPj2TJQGZiZghJLZcqRBkIQQRCceFrxtKLLQS/FsBMsC6k44TeYy5R73WHmoeKe5giLUHFPfYilqzi52KL2BdvzGc4VuN0ScZZit8A4KLvIrUauq4VGbbvX0lPUOifc1g4aR5Ei2E2jWglNo0/5nANEo9gJMf1549JA9byG4srA1dUpNmzLMbtgZhxbBiYGpvT6B5XpeRARAoKXQCMOh7AMnkYM28GylJIHwqba7Q6zDBPubo+w8JEHX7G92KR2FduLKa0rcPOK0FhsfNKnTzwNxULngZdLKDsOhKLxMc1X9RKs8xTxaSd1E1c+ijx41dAY8YDyUB0SJk9dYp4IV1Qn2bQ1R23NpmnYMDAzRH0U1cOYmCKSqDzIGh5aDKeCpZaC+/0mtVTc3R5mKRPubg6zCBPurbeY+wm7i00WrmJnMaVpS9yiJDSF8lAbimV8Cl4LxQKK9DQjPQVvA+Vcp4T0PCzBB4grP4W67v1Bkj/IiAdbweyqOTwDLq22OWSXHDYNh23N1MCm0dWKpoO6YfbwIDTiokaGpxU4FQoasdzvN1lKyb3uCPMw4d5Wn3DdF5/0LepNFs2E3XrCsq7wywJfl9jaYhdm8PRXxk93nK6MZuNTcOMCdt5gffQHp/4w1AgQr9MjZKirAmD1CdfmdBtuQPVAJnO2TMsRWzM1EnkwzKJOzGQNDwGhCcrDQgJOhO2gyyI/0PFwmN0w5X53iG0/5YFmk203o65nLGrNBJgvJ/imwC/KmBER60Zn/+Cpp4NyHrqn4Jru3GrnsVzDg3Nn5GHr5duYF8KRasnxaje2EUsqhEMWCgyzqIkx1EBIPAhCLQ4vwlL8iIeTYcY8TLjfb7HtNzjhNjnhNzjV6pNw10xZ1BvUTcnOckpoLG4exXt3bf+0Mz4F16fh8Sn4gAfjhGLRYNqgPLSOqolPPNPT36DThvbwYAzGWo68eI55cWBz0nBNdR9T4zheLJkQIg90/jA1ZSeILUL0BrReSKAWTyvCjliWwXAqzNgJU06GDU64Lbb9jPvdFjtuqk/B24rlcpPGlWzPp/jYZxhn9/DQ89H7Q9EKxUKnythli609tm6gaalafRou6elvx4PoqGXAA9Zw7Gsa7IsD0y3HVcUDTI3jIrtgYsKg7ywpoiZGp6MkqhcjCK04nASa+JqLYR4suzLhZNQTut/Hp9/tYea+4t5an4LXS50Ssj2f4Z3F7VaIM7HvTDwYtT/Wj6KNT79boVz4OFXIxVcbn/rq03CJPpHayr08gItLvmc8OpDHeXmcl8d5PQ95nEce563wkMd5eZyXeMjjPB70OO+CCZ4FMez4GQszYe5nWESf9GCwUoFYTKiAQjUQxKgeXQDvBR8CPkRdiGCovaHxJbt+QuNLttsNnQPfbND6kmU9o/UWu6iovGVjDsEF/MIhzmAXLaaFYimqkZC2taa/2ibOi29CfArqkUYrgWnSHGBtJE2IuZ5WB0I6HjL93OiywM4cWItg2A1TaqmY+ykFElc7sRgpMVJgpFQdgGAIoechBMF51QBoveAEls7ShoJdN6EN0f64bSIPjS8wiwmFK5gtDJUT5aE12CX6xHMZNRJq3RZN1ApodX68aQVqr4OHpB+S5sa3cS50SEKKNqbEJx7i3OiiwGwIpjgJwDxMaKRg7meUhO5JaOLASAFS6lzvdTyI+oNKm1jaYNl1U9pQcKrd1G29QRNK5vWMxhWwnFJGHspWCEtPaIPy0NDZbxtRTlpdOt44wdZBdVQiD9LxEefGO6f6GUEg0Efji2IvD5XBlNsALMMEL5Y5M+4/LQ+GEKxqmGpmLc57LQKvGgALZ/Fi2Ik8bLeqDbHdzKh9xbyZ0rgSlhOKtmRaW2wjhNoTatEnOrVOFUn278vDUqeEJK0EmhYTRKdKhAA+YJI/2PjEasSD1TpSzQGd83/Sb7JL4ARb+rRHdNUwk9oIqXRlHr/Kg/pD6wJBlAcXDHM3oQklO63av9POWLqK3XZK3VaEuqJoSqaNhVoIjScsRevEgjhIkPF2qYOIYhkwXlQ7w0fNiCBQt5g0qI7txF4e+n2shcJiJg0YaKTgZNhgJ8w4ySYW068WJhVgMWGyLw8i0vlD7QxtMMxjO7njZiz9hN12yqKdsGgn1O0E35aUyxJpCzaXgrQBv2g1C2DEAxSLEPfVfrsMWC/qD37oD041VWI7ifcYkegDBYZC75dWeZgGwOCk4GTYpCCw4zd6Hkh9RdxKbB/EIF7bCBdCbCcEHyT2F4alr1h6bXvnbsrCTdhtpixdxbLRQVSxrKhay+YSQhsICwcdD2AXAdttIy9eb8CNE0zt49NQhzhtH42LNxd+HQ9EHgZ9RlFgphYweLFs+w3mJrDrN7AIZew7xzwU2mckHgScF0IIuKDaGI031MFQ+5KFm7D0FTtug9qX7DQb1K5k2UxxrsAuJrHvVG2qsGh1JbdFC50fjLfWaR9qXMDUAVrV15HWaz1oB9pTK33nOh6s9dA8TIOSjC87RAyNlAhQBmFhArWBAmFiCgoMpRmuhBmnCyF4CrwEHEJLoMWwMIaGkrYoCGIo8UzFcNgsmIYWUwitL9iqlrShYDGr8N7iD5WauRJXFitqnfZlY5ZbUUvcFwo3GO+1GjjC6TiPEKc3xXGOTnMK8QZA6IMlBmMMk6PC5KiPN3YlBLW9Np6lgQqhMpYCSxm3qzwEAk4KPEJDwBnloTUFDQXeWEoCE3EcMgsmojw0oWCzqml9wWKjwnlLOFQgTjNXjEurD2qwyLbR/lb7+6IVTBsomlh/oxaUaVpttzoeVqb3rPCAMZQzYeMSvSFqpMCGCgu0pmBhYIJQGkuJpYjbvTwITgoNmhBwwAJLayyNLXBiKapAFRyHzJJJ0CmztS/ZqmqaxIMr8E2BtBYTV1+0jVH728SFimZbF8d+Tiji+M7WadwfNbHSNK/Eh/d6xWHIA2AstoBDV+qUJCcFdaiwxhCwVMDcCIWxVFgKY0ZBZQY8tGKVBwl4YG4sDhN5KLBloBLHlqkpg2a1HA4lh6qapS+pm5LGlYTWEpqU7RhXYOzqRF83jNeAqvU69ct40aDyyB9atbmb9qYD9L5u9DwYA0ee0AJoRk2owOj9UAUsjFAYQ0WBjdt1PHT+IAGPsDCWFkNtS1oKLEIlnk1bY4Jgi8CmbzhcLZnPKppWdRWDUx66FVqdZjNZr1PHtc1Q+zs+lgHjAzbVi7rt7n96f4h9/FoetI04/pUNGMGLYSkVIWiYsASWIjFIovcApSkGLSUxVKLT49UvxjwsbUVrCr3ttJ4N2xAClIVnWrUcnVQcnc5pXcGyqVR/su55SG2liTwoH4INaL/u0XGOF0ztMEN/SIHDQQBRg4jrebj4GQ2mgIBVHrAUHkojKzwYKpNWgBzz4Eb1QlhiaEzPAwZK45kVDUcKS1U6qtKz9CVHJktab1lsaDaXP1TErLbYZywTH+oP5VLifsB6NGDmBNNokN20OiUYF6eKO68PGKKO3p6gEQZjDUee6LCVTnGvIw+nApQmUBvlQfuMdX2n8uClwBNo0f6zxlAbw5KKtigQYyiNZ2pbDtsFk9Bii0ATSg5NltpnzNQfVnmwS6N+UOv4N/WdRWwnR32ni/1nnC6vvjC4Fwrr+87WO3gP54QLJnjmpOCL7WEaKXmg3aQOJafaDepQstNOqX3JvJnQuJK6LnGuINSxw2t1fnNq6E1LbPBiOq8DGyu6XfQOjoNy6am8sLnUGxhihWfQAei85zhQcn40uNeeXAta3T46bpoXbg2mUME8W1UxGBAF9CYTDZhNK8zFLUzuxrPkDneMJpSccJssQ8V2O9ObmFZvZhZNRe0qmqagbUukiR2eM/0gqLO7b+jU0WPDH3mwy/7GrjwtD67XRvBeBSQTB3FOfNLO0Jj/Cg9RR0N5mEQeIh+TKvJRYbcEZvcj7HC3O0IdKk75GXM/YcfNmLsJc1cxd5NOPFhTjfVJclgWMa09DgKj/bbbMuJjlQe7DMxcYKOOlbyplY9hw9f6rhPAh8HNf3yy2GmIpHnhpusQ0txwrMFWs4E/FP12UmImFjZPAie4zx1iESp2/IwdP2XuJjHIUbLbTmlcwW4zwbmCuq5UWHZZajr70o7sL5YmLnk+4MHFILHT4GAKgM2csFE7TKP2p5uGjovBoIAQ68hg0BwfAY15iB2C6stEHibV2B+itghViVQF5vASuFcDZ2HK3E855bROnGpnNKFku5nS+oLdZopzlrqudAC07Bt6BvYX9YCHuLUuBonjTYNpBdMEZq2w0fgY9PDdTcP6NiJ1BGfiAU1nJtaLTisgChEnfyhL1dOYlJijHriLHT/jjnCMpa846bStPNlu0PiC7XamPNQTnC9YLrXDk2WBJB5W7B+1m+mmqe45SNtpE5i2niO11n9Tt+PgcJP8QG8i+iDpGXgwg4BQYVU7IeqrJPuptI2QSYW56F6wd7CQCXc1R6ljW9n4khPthgaFmxltKNippzhvWS71plmWJeL7dtLW8SapQW8WmgEHkZOiHvDghMkyMHWBQ8s4OKxP7w96Uy2DPmM9D3oToUFjUxTKQxIuH7QNFNEfLj0FxR00UnKPO9z1Gb0/lGy3U32I1ExoY/vgnQ6IxcWbxTbeLEa7TWNW7F9tIzQoWi0DExfYWmoQlLrVPqMe+oOPukIaJO5uJk/LQ+o7Bzo7A9HZYZ9higAnvxwjkIxHAgHDSb8BwH3x2PDmz5g9HxlheI8F/fBaR2CGwng2TWBj0qhvTU33HjIIXYjZ+yWgDzFWjqW/zegL0vF1F7RybAgDxVRvhLbDBiDc77u3+tMeIg/WBGam5fJJq3ZPHiQPq+ZI5GH1Rx8ED3aif+6GGbtMv+w8gMEiTG3LpdaBwFWTE+fEg9q733FZe3x0QPb8sQc9D1PmTLrfG/GwemAV61yTngeAiXFcXO0AcOXEdO/J0IwHxcN+F3COPETd+YXofU5/5SvbB82DlntlHMfLXY6zi1T78BD9Y88lPwI8mFKfF9ZS0fgyXc3e7TnUjf14OFZ6jjIf8AAiZnDuPjywf1t5Vjzsd2z4/QXYidBKwQN+C4C703vD874MPFSF53Cx1Aes3efM6NyubpwlD3qdXz4enFhO+E0A7k3vDc97iDyUxnOoCGwVtfIw7d87333nYsfxOzl4th7GCDPbUonHlOooh02NC5ZloTeCTVnhfIGblDrPubXgoi5GfNk2BokaUS2AVoMiJu3XohlKDeCNaj548LWoRkCjT4jaJmYwNaLb1uoTy7bQeeBetQUI6EtUMwJB54XHF/EJQdIGMOjceGNiVoHVmwQpg940ADPTUlmPKQRnCw4bfWpaFxUuFDRVRet0xRLvNGhEazVo1OqcdNsoD6YRva5mwEMgagIoDxIg1Jq5lXjwja7W4VrN7EurdyQe0gpQEjTrAEH5FaPzoSMPiMGKBktUM8NgbRl5KOJNUhFvGgukCpplgjA1LYUNWAKbpuGwWdIUJW1Z0viStippnAYHXFuCt8hmDJ41UcOjMT0PcQ68CRL50P2OBw+hMXErOje+1a1z/UpP+rI4V+kKUO06HtA5+Ks8YLBxLryN2gDWJi2RngddrUSftk6swyAU6IC4sQV1MVctiFgnmkmF9wVuWkKwyEYx8IfIQwDTgPHR/ugHJkjvD1ELRhqDJPtbYnmjq351viB4b3BtiXhRnZbEQ1cvtJ4ZwATlwRC1AtbxgNHAQbx51mwjbawr49k0DWXhmeBobcExu8CFgmVR6dz/Sv2hnaomhmyUqk/QqF8WjdGgYkPnB3T+IZrF4gRx6Ks1qofiBN8Qy1v31X6Uj2DwTUkIgm+VB+/QHmvVH4KJgwzVCFB9BNVISJoRXXthhzyoP5QmsGUbJngqPE4KjpiFakEUE9XPKXXrJpqZKRtF5MGqTkGT/CL6Qyvg0IChE2i17ex4cAZJ9tdoebe6r20ktG1APLRt5KHRFZBCG7MPzoaHqJHQ8xH3bbGHh4LApq2Z4CiKgLeWw2apT66LiWqElLoqVTvRp4eyocFU09q+XvgYNHIDHpwGzHADHloQH3lwsY1IbWUQ2shDx0dTIMHi2kLb2NhervLQ8YFqy+zlIXKReEhtZeTBEtiwDRPjsARc0D5jVC9K3bpp7Dsbq1opqb9oNZhqnGZZJLtHfWcjsW1Y03cmu7u+Atq4da2uoOhd7DsTD6mNSDzEPrTnQZVEjOn7iuQTFLGtzAsGPOogrNyUjN98SEjf3N9cyuqbBwbdTfv+bz5oZB56mPTtB5wH0BLb1+Qvg090324OPg+6XfvmQ0LmQTFuIzIPmYfBLxhZ9+Z5QxnO3bgLJnhW4rm8eoBJvEGu8GxaR4UwMYYSqNC0/gKd7x3DUqxGx4WAF9VEaOPc70Z0GuMiGFqMTuekYNvPaKTkpNugkZJTboMmFJxoN1mGklPtTKewNBOWrmQZ05vbptDlcRurq760emOqad4pu4WYzhm3MWBTpmyvGLgoll6F8azBEri8PMnEODZt5MF4KhOYGE1hLonp7NjIhOnmew+j44IQJERtCB81AYLyIIZWDLuhopWCU5GHU16z/bb9jDpUnGxnLPyE7Zj1VrcTFm3FMqY3u9bimhJpDSSRwtrGrL9BOueAh6KOab5Lie/H6V21p5i0iNVgyyXlDlPTsmEbpnhmxjE1gQqYGEOBprNbLGUUrSzW8ADqC70mgvLgEfUHMeyGCa1YtVtKtv0Gy1Cx46cswoRtN2XXTdh1E+ZuQusqFs1El41vNJujrUvEDXgYTAnp0pzDIN25jv7QTREJMQ0+UIjvgkbHi102yoaZaSMHnpnxlB0PJtaN3h/28AAgYx7CWh4qGinYDVO1P2Z6zcOEXT9h103ZdlMWvmKnneJj3WhdwaLW7J5mWSLeQq0BXc16M3vTnFNGSwPWC2Udp/01oUv3NS50QYIjxYKtsmZqHDPTMjGBTeMogOmAB4sZ+ENMa1/DgyA04mM7oRoZy2BoxLAIJbWUzMOke+34We8TvuKk22DZTXktWMTst3nkoY1ZTlJrwKboeEC3KfsvTYeI00K6aSKtxHRnTXuWUqcqbNklV9hlF0ysIg9l5MFimJwVDxAkEAi04nESaCM3SzE0wbCUgkWoWEjFrp+ylKprG065mWZ9tZvUvtAsQF+wrHVK8LzzB82GlC4LcDANwg2mhcTMpzRNSNvSOEWmiVOCG49U6g8z23CZPRV5qKmMRB4k+gPRH7R9SPXCrOkzAoEgASchaitJ1IgwLIOhloJ5KKlFy7+WqmsrTznNfnyg3aQJBSebGU2oaOopjbfM6wnOa/sQnEVqDVzZQZawbftpQt1UoS71fThFpG8rpdJpBxPrucRur/GHMQ9F9Ad7mr4z8aB1QvVSWgm0YmIbYWMbUbIdZmqv34x9qPJwImWON7OYMT7VKVJ1ResL2rrEt1b7Tqf9Zsr6K5rV6XMrbWXqOxvR5dJ352c9xsjIyMjIyMjIyHhs44IJnoFq2IgYMFAScFJRIJRxoG9ToEhKDBaizo/IIOVWjAqrBr0JdCFqHQXBCyyDwYlh4SucWHb9jFYKdpxud1ud7rNs9cbAt7qaB02FdQVFU1I6Xc3Cp+yMONA3te90Msxgm+bE21qzfcwyarnUIWpCeajqOO0PHIVmZASipkWgRGJwxPS3PqJHOh4ACWYPD2q/160IQWDpIw+hwgXLrp+q/V5vgHbdlCbyUPsS1+pqN7Qlpi0p2oKytZjWxpWPQJLdtR/YHacCxrnxxkfNCN9P/6EJukJOE6c8Oc3fdxJXsgkTHJ7WVCwJMYBqRzwYipjhVXT2I2mrukaC0HoNIiZ/qL2JQdXkD3rjn3iYuyl1KFm4KY3TFZq8q3T1p6bEuoKysVhnMXHlI0l2L/3Y/u541IqIwVQTtQJME3Q59TZoSkerWRVeLA26ko8XS2OEmtAFA2wKjoiNPNh9eECDRiK0Xb1QDbA6RB58SSsFCz9Ru70G0RY+Thn2FU07xfmS0E4QV2DbCussZZw6TG3UHzq/N53daYWoLnjsNUBgnGa4qEZA9IdWpwfStCMeBEMQS2MCDVX0h7Tu0ZCH5A+ms5/YVgRRu12qFyH0LiiGZSjV/0NF7SsWYRI5iVOGfUntdIqkb3QFKNNUWG8p60JX4lmaGDzreaALqsuYhxgoSfXDxFVxcNLpQlGruFPA0khJQNu+0ggtPgbL0no/RVz5JgrrShFtH/CAZteKCC54vIhqX4nQBGjERA5KmlCyCGr3sG40oaCOU0W1rbSYusIG5cF4Cys8dFOqa2JALG3jlNk2aiW06hPGhZgRpnVDlnFKFpZGCkIMABVGaKMqTElaASl6hhSxzzgTDxo4crHNaGPdaIPqsDShYB60jdA+o28r6zhV1DdTQiig7v3BeAu16TL4cCEGz9I00aij1/bThhMH1gmmlkFbqTqbLOtYt6GVkiCqoVggOFOp9tmwfRjxkF6x74w8SOwvUt+ZfMIlHsSy8CVOiq6t3PEbUVdzGvtO3XfNhDCoF0VdxKC6oXAxgy0FiAcaS6nvtGka+Z62UtsI4wVZ5NU2MzIyMjIyMjIyFBdM8Mxhucsd6gblTiwLP8GJ7W/SvD65XsZt40paV+CdxTur03KcLlVLa+J0rDgVpE3TGNFpIi1x+k7c76Y5xmmHDd1UFZ3WI8y8MIs3tMY1elPrQ7c1Sfuq9Z0QuGoARc0fH3Vekt5LDJYZEeQSD0sNidzjjtCKZR4DWktfdQGNxpfUXm9mG6d/t065CN7inNUb1baI01ZV6ylN3zNp+eWmt1vtNd1Wp3nq+bR0S92aOA125oSZSzf2USPNidqbePC9/cqH9FpQSStuhQ8jIJsBducIcL8/pHYHDeTUoaQNZSfqPfIJpwK4TVvhg4lTpYxO7fWaDTa0r/OHoR9Enla3aj+d/cYBTphFnzjcqhim6fxB7TbRbtUEk27BAPWHeO5+PBSCnFJ9ihMxs2MZKpah0im8oYpBvYnesHq1f+l0am/dlIRuypghtBaCgcbEcu/9QafxDevDCg8OnRbdEv9W+5UzYdoI0yAcijf2ptPAUvtNsr/jIdaLTitOevvDoH7ErEHuPwnAdphxbzjcBXK6uhEXxHBSdPYv2kqnL3b+UPY8dO1DrPdhWP7E6a5x3/X2a10Yn4uX2FYI01aYemGrUeFLFUrt7TbtuvYhaYOt2L+Oh3t2QHQhjfuC1o25j3bHNmLXTXHBsnA6bTNt66bUaZxtQQh27A/RXuOjr8cy1qnw9PY7PYfUVnZtLGpfrdtJI0yCsDnyB4n+MKgXLmlhhY4P8WFg/4CD1GYg8PgFSGAZKh4Im5GHcZ+hPBQsvE5XTP6wjFudSmgITrPAaA0409ndcTBqC6L9o+0+fUYQqkaoPGy0IfpJ07cPPi6qkdrJbrGAEMVkBzzs6TsiD4d1Na9GSk62m2p3iHYHrQe7LvLidBpr1z60Jc4P+s7Yf+IGPIzsHu93fWaI/pD6kM4f+r6zaAOFh1kMipu20TJPL+8jF77rQ9JL3ErfOeQB0HXTMzIyMjIyMjIyMi6g4BnoPHSDUBiPCFTGYSiYmBZrA0ZEp51IoJWCqvBxdRWLR1eM6FZmw+qfxGCJiRpYVrVWbIHe8BSaDWYLPW4q0RuAKmpCxRtknG7FSdREkrhKhGqfhfh3CIHgQqf9JCGeGyBEzae00pkEG7MfoDhuCKXpeLAIhQmooKHHIARjMFYwovpXRaHT+1xRMBFPMMoDRr+h46EAa/QGz9rER7TbjnkwZXzCX0YeXAyaxYBJ4oFotzhBXOj/9tLZ7zseQuQh3vf4mA3lrS6vHEwkxBA2AmFixzwQKI0nGM1KDMYgVrV+rOjKckURcBRUISBGsxgJFpHEQ9T2IfIQ/cHYaG9BZ3e3DWgGUOLBDXjwaRt9wguhDRAgtGO7Q6vZXokPaUXN9abzA9UAMnGlEYMthDCNPBjBivpDabzKyhmjW2so0NX5HBYrgjeWqvSEYPESg4jolDnVFYq6Y37oD2CCKA/JH4Z8xLI3UQ8q6QkS+q3EIIEkDbhVHgZ+QUiaecSXUR5EVKss8mAAv5H8ASyCNWHQRhgwwtQayhDAgsdjC8EbPRaMxYvX35Ci9wcbeRj5g2aDdf4Qy914wVT0elg++kay3/d+IdEPpA1reIDQ+n5fhNBqhugeHoJRv4hTLd2W7WZd2lg3SqNBhMpYDIIzltJa5cFYTFB/KMtACAYXV2UVYhBRVFPOmhhMtWALo/YXouK1yf42+khJDKLEOjKJ9k/W+EMAaVZ5SBqBfmU/tilp5dzURkZeOh4ON+qwCNaIrkiceLAOGwqcdZRooMmj+pHeWMoQ8EbrRTB21E5ijS7iaNV+W2gwyA7aCLxgo/22jH1GaiuqFX/oeJAYJE3tZIh9hr7EBe03vHTHxUm/OuiIh34Z8vaIR+KuMX1biYn+YIWptRRB/cFhsVbbiarwOArlAYukfjMt7mIY+ERqK8Z9Z+ozbeoryjP5Q2ov1/QXsQ8Nvuem618HfUaIfaZ4Ndz5AHc+2BFHRkZGRkZGRkbGYwkXTPCsIHBRuU1JYGpaShOY4LEIExPiVBziFBTTT+MUGyfoxKmdoivYpaBUpxEqdBpgIv30NSc6fc3F6Yx1iMkDwcYlk3WKyiJqg9Vx6s4yZv4sfcUiVKqLFrOhlk4F7ReujFkwJT4YmrYkeEMbBaSl0a1pLYdmNe3lJ7EsuajcocIzMY7SeCYECgKVCZRAAR0H3TROScpnZ+YBevtDZ3/iI/IgacaY1SWTQ9nbL2WXCVaHsuNi7jULau6qbuuCZd7GbJi2jNlARcyKsoRgkMYiQUW8J9axvHaOYZdjxZzKOKbGUZpAhackUBqh0nDQiAeDpRjwYDAUQx4gClOv50GQbppv4qEZ8ODEUItO51MOel9IWUA65XMyyIayzNsKJ8UKD4a6LaPgfp8dJlHk3wrMn9ICpzhiF0wKR2U8E+Mp8VQEigEPcSJzZ3khHSMDHugCEGbAQad9NeJBCAR8nN7axpcTSxuMZn1Fn0j2L/0kZkxOuiyoVqzyMPKHmU6Da0tcsDSRjy4bKPHgNCvqSc/8NPAAh+ySaaF1YmocBdpGJB4sUEGnfWcwlB0PsZWQsfbXOh5EdOqiRK08fYGP09caAS+GJtjYNvRthE9tRUhZYZYdF6dEx+yfecz+mbuZZgO1pWZNugIXs0eD1+wwcRr0xRmuvP52MPexaRsuKnYp1vAwiTz0bWWqB8WIF20vzGl4ECTq46VpfD5NdZU45TkoD63YmPVV4dDsLy+WeYj+77X8d1L7MGonrK6YGwpqV9C6QnWxYkaU98pBiAui4AxHrrmLUN7Dhm05ZuaUJjCljfY77ICHapUHdIr7sH0Y8oAwWjWoayMjD2G/PkMMjWhbmaaAL6L9XQZ1rBcpk3ruK5pQsnQldSioXcXSlzSu6HyhccUoo7jjoTWUlz5A2LiXyiw4Usy175w4CjP0h7DiD30vMeo749/78kB8MDTgwQ94SP4Q1vJQjbIjF35CE9tL7UO1H619ofb7ktZVtH7YRhQa/G2LmFlttQ/dbuDnz2aEkZGRkZGRkZGR8VjHBRM8Aw3wGKPZBCnDpIzZFap9ZigMnc6TRfVddPgvJNl8GQ36xwL6mtEiGAmAbvXGMd49hj7ghMQMIZEuEyJ02SGaIRJCnC4aCsQVcVtCKJC20kyBotIskkIFxCliFpDRbAKMhcp32RQGGfFQmBCzryIfkYde80oDKClTy0RuRkvTjhYSQO2PPBjQqaORmzEPMtDN0iyhIINsITHRfuUhhALxcYU/F7dl4kFXgtSt0RSGEHnwENMdYiZIzI7r7AoDHoQyap+Vp+GB6BdDkfQhD7qvwRKJwRI6/+h5kOQfQ5/oeDBjHsSqH8jAH6L9UkQ+isRHoVPGCr0xxsZsIKvZcWlVwSjrHbONNGhWdEFEvTmujGZRlbEexHUrsd32zDzYAQdaL1LUWT+b6gKhoylykexH7Q+9P6T6IGKRNm7LSrMCy7hCaqHZUF29GPKQ6kvXRsiAB/WHovMHKE0KpkrcMmgfej6GGomrPIBm+oUBD2bAQ/qTVGdC30boTMPeH0Lyh1Agvm8fJBTKRyh6v2ijP7QDHgoNGGENlHt5KOgzr4rYViZ/GLaP5TnygGjw1MS6YIZ1YsBDCjCp/at1g708yLid1PpRgi/AlcqRK8DZ/lXY3hcGPKS2wZDaynHdUD5MDB5JDJ71WXvDfmMPD5GLIIIZ8sDAJ2Kb0OunxWyxLmsu2r/SPoRUL3x6Rbt9Ba4AXypHzoKLOmFpurGJ022NgbhgQGoftPkMgz4j+YNQYbBd3ymxfqQ+M3J4Gh5S32ljUFXrQ5xC2vWhJvpCyqiV+LbpssdEDCGYQb9pe/tDGX2gVN/wRddGUBTjPsNqEFEmg04uIyMjIyMjIyPjgsYFEzwLWB7wG4SY3RPE0gadUtKEAi+W2mtmRxsKXLC0QTO7fLC69boNwWjGQjB4F0WRndFAiNMbHePQAIXXrXHoDeHoePoc4PQmiLgvPt7MeYO4dK5+TtJ3OCg9bMXpaYedjLSOaONdpgtsHG2YPMUTLjac8FvxCX6JF81u8WLUXrE00W7loYh/lx0PIRgVdA8Gl3jwetMl3ozsUrvp7DbR7j08eJ3ChDcD2wf7AV1xM/EQeh6khSLAptNAw6E23oC3DRKiIHjkoZp6Nh7fIlfBKb+BF0srBU4KnGg2oAsFbfSDdsCFl4Im2Z38whcEMTini1EEFxeYcMNrjfpuscx6/zCdf/S20pe7731jyGm3bWOgpe15sAMexEm0u1UeWul4sCawdXEN18FOmHU8tFJEn1C/SD7QBPWV1uu28bEO+UK30X7nlA/lAb1BlwEPq/Wis3eFhzV2d/sjfxjyQceHCbDhgCBspemPrtVMTKfTP/HqF0fKBTxNtb62ZUPrgES7Ih+pbtS+0sUEgmq+1SMexvUiBBMDGRrYSH4vyR/8Gh4G3DAsb1nDwx4/SOf3/mACzCIP4hIPDvG6miBdmyEcu2mOeRa64qfM8IO2sY18JD/QtlLbjM4fYpsZglE+gmZ2JR7CkAevbZwZcIDX6012j3gQBvbbgX8kH1vjD8P20sPEw8RLrGMSNdDcgAf1iYufs4P9GmjKgt1wWO0LJQG1N4jpeQjVqD40PtUT5ct5bSdGPPiBH8d6blLbMOShq/+pDR3bh4sLE7RDfqJf+DU8xOnxlYdqyIP3iHO6yEwbI3MucOlXblN8TaCdFsz9ob78MX3f6Uv1kyjy33ptJ1sf21Sv/tP1nd7gg93Lw7DP2NNWrvIw6DtlTd+5ykPXl/Rc4KAMcCj2nV096PTytG1wywWfO6sRRkZGRkZGRkZGxmMdF0zwTEAH8d0NgKWW/gbIxdXO2lDQxIF/40sNHnir0wuDTvvRm+M4Fc7Fm2NnY3DAdEECs7qNQRMThlsZ7dukd+Sj/o8D66MmULzZ6f52KhyNT0GRPjiSboAk3hDO2oZCAmD0Jp8+WNIMgmYqFl92f7dBb4SaFFCMN8Wtj1Ph3IAHGfDQpuCZwUgUDI83QmP7V3hItntdDY0A1gt4KGIQwMZt+gxtzNJwAx6kvwFKW/Gq6VVGMWjVsrO0UkYeYrnHlRCbOG3UhRRQ1GCJjzz4GDQKIU57EoOPfKQbfONsz0PnDwMe/LqXjPZtDHoYr/YWbeJBOh6IHHb7suIPARXMDqoJZKxQeR956P2hiVy0MYBYRz9Y+ioGTdT+ZQoWxGBZ48o+iBiUh3QDK6JTZhMPBBN9GhWRX8fBPnWDbl9XDTSS/GDAQwwSGp+CRdEP/JAP1T4iBCbexTYi8kBBI1UfSBZtG1ywYx5EBdK9mK4+tCl4FtuJkLJf4g1+mipqYuBkDw+nqxvr9iXyEIY8sOIXqfwDo3YiiaZHHqauHfHgpKCWaG8MFi3jVLllXE048ZD8ohkEUzW4rNmzw3ZyHQ+9faav98Ot9BdGNnQAAEEzSURBVOVvU1vSyqBNXcNDai8H9Yf0SgtJ+AEPqa30GoUVMfgYVG+6vkL7jsTDovMHXTii9lXXTvoBD97bLmgkPmaJJfu7PmOFB7+Gh0E9GO6bYb0Y+Un0h9hv4Ac8dD4wrBteg0k+sOHaOLXS9H2nxCDysO9MU2olBdEsjUsPHTRo5vw4kCh+EFge9RmRBzmzP6Tyt+0KD37Aw2rfmfqM1Hd2dWHYNsS+U4S2rR+eAUlGRkZGRkZGRsajDhdM8KxAOFzMdYqJ8Z34cZpiYrqpJnRTjlS7RyetGOm1XHR6mtWgUNS0MWkaShhPT4o6090UJESnmoQQos6LrGyJmkhEvReDFxPvcVTrxYneiA2zALqgoGhgQ2/o9EZ26UuKKjC5ymEQDhdLLIGyDLqNPKRX4iFaOeYB02v6xGNI1L4SMJ32Ff3UK9ijBUaccpM0oEKcmpW2OoVV4n2NRLtNvN+znRZSlxUihiaovXqjb7pARxOzxOqg05Bm1zUYYMsudepR6XVKVpquN+JizIMR0/mG6ewe8tD7xVnxMJi6u56LyAOqiRXE0HY8mJgNU0SNrJj1EjSgVa/4QxN50YCHYetavTHctI1OTyy9biMPQ1/QOpJ4oPf7IQ9J02jAA5Gv0/FgoJ++LL1u4N6txJioRLt1+pf6hRnZHbqsqJ6XZQyAtiEGRLwGCI9ctQBgZloO2VrrxNo2Yi8Pqu0FJtq7jofEgZGHysPYL1KMVBNMlQeRoT/0WWPJL1woqFNWqWhwow0aOD5+6S4YmJiWTVt3U7m1ToToB+GMPCC9BhiS7Ld93RnyEOvIqiYaIx7UUB+n8fluX3lIcY/Eg+/8QadzNqKBfzfgoQ0aCNIHJYkbbTuOH9vFVFAZz4Ztuj7DoNN3++06HrqeQwN+kYmeh5U+4yx5YE0bud4fhDaYzh9C11+YLiieso3bri4Ug4cEKQu75NChJXYzUOA5VigPe+tFWGkf6PqL2FJ09aLTPOvsP03fmbhY5eFB9Z1xK6bzg2bAQ2oLUnDYi4kZhIZT2wL/noyMjIyMjIyMjIwLJ3gG0mlZzUxLEcXRC0IUSY+C+UYoY8CgNEnDJuocGUNJ0sNJiwr0QQS9/V37091G6IXCdSsEhgLiut8vOKjBEo/eDDkp9GYwZj944tQheuHkMkxwUlDEKUXiVavGTPVCCjylUR5KE6iMrhyX+KhMUKF4E0XzjekEoS26cl45sHsoFH42PAAEenuF0NkdBnzotl+AUnmw3baJ0y07HmI2xCKooHgVpn3WkFgIE0QMditEHgJVFEUvh2L5JlDiKZMgtlFNo6R5pfYqF6U5HQ+ckQvlgSgargLZfsBDz0VcZE+g6fzBRrv1xm+RsoTidhkqXSE0ComXkQcTBCcFdlN50GCRY2J0EYnKeF08IS2iELXPishDr4FGZ2/1ZeVBA0ceDQ74FR7i4qPUEoOHYgmYvvw7HmLQTFRQvQoT2lBSi2bT2VDpKqOzyIMRymj/tKsTawTSDd0CAknzKobVKc3Ycjtg4mx40NoAEu0PURusbyPo6kaajVrHIEnT8RADAVLF9iJlC1XdAgyNlDHDsqSQEhOEchrAKA+V8ZTGa1vZ8SCRh6CLjKz1h75e2IHl/VIjZ8tD9AkRPKHfgi4+soaHJgZI2n15iIEiKTv7l1KNfIIAZRVUDzHyUBCY2YYCYWoclsDUeKwRpvi1/lBEy0vThY2ApJ2YtAbPoc8QwZH6jGR/0KB67DuGPDgxOEzMFuuz5xop+kxb0QzLRsrYNsQ2Iq68WZVeV0UFysjDhh34AzLwi6QN2PuD1oveH4Y89P5w7jz4rj70PAz9IfWdYx5Ml2ndRh9wxKxr+kVJyrgIQxEX51ieWn9ZGRkZGRkZGRkZFx4umOBZwLDjp0gcSAuGIJYAKjYdn9CHmM0UBq80ZSUES4j6YD6oxlO/tTFLKE7f81GnKAnCh7jvSXeGqs8S+r+Hx4fv9d/BYDGBqBUjuhUhCkabTjg5TZkTb5huNFx+0w6bl9Xshlm0X4MP/TZe+4CDVS4SRyFm+biR/SYe73noxc0fIg/C4Dt6HhBDcAOuQ+IBgtdsjxC1g0KwFKXnoq9fcPTaBfOg/hAw0e6BX8QAnQ+ag+WDHgsS7ZYCEZ3KG6L9AbV/yIf3Y79I19jxsGK3+PUcdO+t8iDJD5K9vf0qmL3CQzoP4dALHBc/eYdlqJgz7XjofEJSHVE/D8TyjvsS90OqB1hcsnuVh9CXSS9yHnkIZuwDEv8e+oj0+x0PcdEJ3e/tG9nf8ZUWnujLINWT6mtu4/KvPkktJYtQDdqGAQ9En5CB3xPtwuBCGduQqP22tn2IYuYSBc2Tv4rpeRjYd9q6MfAVrR+m4yFxK663P/Ez5qGvRyEY/FO/yFU3PkCDrnC7p42MwZjUViZ7+/Yh2W07nlL9CCG1G7EeRR4kibufiYfh/j7He/vVrzoefN9mEm3t21C7h4err72Hx73ofpyBk0GXAgjO9PanPmRY3tHeMCj/rt5Iaj9sz0PygZgZlXhIfi2i01lPy8OqX3iivQMeRv4/5kGGXKzUleANl1x+kqtf/ADFRuCU31jLQ6offtQuJj9Z7S9iX9r5wGob0f9278dR72zYZ/g1PKz4hXT1ZsDDvn2nGdmd3hcx1PPmLEYXGRkZGRkZGRkZFwIumOCZoE+eVQS7XNkWnZ5NmtLhuqk+ttP+8mmay1AwPthOQD4tJBCkX0ggDAbpKpQddV3iDU6niRZvglSvZaB/1B1j5ZyoBRZUEw1JWjCy530TIBxaEmqdVOMoRpk5bdBMtn7BgIFgvpQdD90iCmJ6TZ8olN4Lpseb5sFCAhJWeJCBzk/iIek+JRtDv9/Zn+wciKyrNs5A+0iG58v4fQ9V5fCLUjNYGOr4FHHhgMGCASs6aCqQHqfI+n4buq3yIGJoY5DI+YEWWhfIG/AQxaxN6Pk4LQ/RjmG5jzTCZMzXqv1JC8qagPsqrf6OOHWt46BfOGCoh+diHQnRR4KYTjBf+YhTJ4NqBIZB/ejqRbcC3vhGfg8P0e5ROQ+1sRIPq+We7B3Zn94fayKlOtU+aQLQTXF0KzxoG9Db32uADaaG+n6KaOjqx0q9CDHIOhTQH3CRgohEncBOC2w/Hvbzg/14GNaLNTw0l081uEq/mMiebdA2NC0YUKdpbqHo/GJYPxIPLm477a+gUwlDsIOARR/81ZSqVCeSBpZZKW8zaPfW+MGAn+T34/ZURpyk+rbc3NZguxhaifqY0e91sZmen2W3gELflo6mUPuiaztTf5H+HteJfisx8yvxQXw4kBadsW7QZg546XgY9h3hNDx070mvr+al+45D1Ho9EHkwNKGK294vgpg43THWi5CmCptOP9Ql7bMBB27QRkgYtpW2C/IqD4M+I9rdtZXBjOrFsH/cw0MMyO/pO7s6JGN9PQG/XD58g5KMjIyMjIyMjIxHFS6Y4JlF2LS1ThApkj6L6tfoJJoYYIjnGz3UTTPpdFmitg8SVcEkaTr1+2AgxKk50uu+dFpHQifEPNqX/jdF9L/9t702GMJI+8aH+FVx64NBSkEu0TuDDdNgjHDEPBQekl1n4mFg/7nwgP69n/3JZpUDCis86Dk+fj4MebAgV6kw+oZpu+lZQx70SuM2XQv9Ne3PQ/KPNTxEu/flIX3XWfKQbO14ACRIfwyJ2TwDHuJWM+xArlCh/NkKD+oL4bQ86DWbvvxGPqBsqo0PDw+r9hPtB1QDaZUHROvFgAfNmgG5VBdOmBjXc9HxIJEDGdWFPTwMfGGVh1HdOBMPoAGA0/CQjrGP/SMeNBo22g9hyEF6GeS4RiEqE5gahzF0tp8dDwzsPh0PK34xOK52ps/T76/4AaPXPjzE40kbLJ3faYWt8NDVjcMBSo8xMDFLMGAL/XAx4CFdy7nxYAZtRL+v1zboW6TvO3of6P1jDw/peLJ1YP+wzwgDHtJ+GLSRIfQ8sBmQqfYZh6zyYIo5ZsSDGr0fD8O2/1x4UPsGfaicHQ8GNNMs1nt4cH1n4mNnx/NXv09GRkZGRkZGRkbGhRM8M4hqepkQFwzQG8Qi6jrpNlAYFdBXvS+JIslR28cw0EADa3rtq8L0+i1JDw3o9G2sGd1ajCHrDvX/QrwBGB/rtV6SXtSqBpAmFRgaDI0UfK69lF2ZUCW7O8031+lbJftLE6JgeL+IQGFU28cYdRzVtup5SBo/Qx56zSM9+lB50FhAOsJI+yYdc2t5gBbNoLvNXczJsKErbxrf8VDFv9V+3wmml0gUTu95SNo+hVHbCsAYQxktTv5gV3jo/ONB8wDJ2pCCZvGfW8PDUAspJiZGHgyfd8e5NxyiWLE76b8lTgok8qGBlW4hAdNrPRVGbSsjD129wGCM2ZcHY4Z6YOfGQ+cXSUicoZ7gmeqF8uDFcIc/yh3+iGp5GU8Rde+KqH1mBzykxUYSD2WsD5MYSCiNlmrfLuzlgdh+jOrIOfKg9oYz8DBoPwY8xMUXOx4cqg11TzjM591x1fIa2m0CU9MO7FeehjykhSUqM6wXYx6UF4s1vd3nxMM+XIQYRenaBRnXk05PMdWfqKMXFyyObSm6+IoYToRN/tFdDAgT6wb1QPsMa4b299sCOgH9stOLHPNgO020MQ9r2wpjz5GH2C6s8NDrbPbvhQEPAfq6IZp52IphW2Z8zl1CEMPEumini9pv7aAvlb5+ELCDvrOKPAz7zgKDNUMt0V4br+87B33og2wrex729iN+4CPKDfjYRkjnF4YHTu1T+TIyMjIyMjIyMi44XDDBM8GwkBIkai2tvKfHYkaORB0U4ksMEjVe0nshZk6k40kvLMTskhCfputx9Lx0PGZ6JF0awXT6VcRMi/6pOH2mQOj/RtKTdeJn6Z6uS9yX+F0hQFV5nnD9l9g6tGQpVfxu8yB5YGQ3Yvbs+5hFEOj56D7f/c2ev7vrCg+OByBmYNFl3iVNqBDAWuHaJ93J0eM71KJC4YIZ3YSt5QEz3nZ/r+MBJGr/hCFfXfn3/jLmYA0Pgk7zXMMD0b4+46Q/vo6HTiMt2vq4a+/m4stORiHxMvrz/jwgvT7cOh6AqPmE3joPyjt09WdvfeiP7fWLPTx09jKytzse7WbAU8dD+syABxG44ur7ueLq+7ppq/vzEOvqCg9an/v2gUF7MOKBqHM15G5QH8LZ8pD2O3sHtsrpeFA/CQPuOl0wgUsuO8nV196Dx7Irk7U80H2lGdTvwTa2i2END6z4Qd8mDHggTuEk6nGRrvFB8DAo/xEPAkSfCGt4OHp8l2ufdCfWBubh9DykuhG69i75R2oPzKh/OCMPxKm8+/BA3F/Lw5r2cdxeyJiXfXgAzbzaOrzkCV/xJarKr+VhXRvR1eWufgzKdejnaKbZat/ZlfOauhK6a9P3+77TDOxa01aGFR5SnzE6L/IwalsMy90GeAcZGRkZGRkZGRkZF1TwTPVZVI9Fn66rTpGTtGy9HekepX0XNVxUENl2mi4qkK03P23UgGo7EWTTvS+DrU+6LkPxYun/7m4EBhpIJmkhhaj9k/6OA/2kCURgrAWVtGACbG0sOf64k2weqrtrTzo2yf6kU+OT9lenfdWfk7SukhB0t7/Cg4sLByQh/U7fJwx0fmQgHO7PkgdBtaA6e01vbwySjTSP4vG0PykdL7tszpHjO2O7sSM/6Mo+2R2F0dvkI6HfV7vLngd6ras22Su9BtqQh5B0nwYC6sNXd0M7EFJPWk/JH/Bn4GGonxbPL4zw4qMf5eLLTvZ2U+yxO63o2Yai0wcMA356f9Dt6r4L8ft8z0Oy34c1PMiKoHv0jY4HMZ2Q+r48hN7/ifud5tGgXiROnj/9JFdcfV9vN6l+D9qKoDlFnZZT0jbr7C7788R02lde9rYPIQx5GNSJ5AfDxUeSqLvQa2Gt8rCuXqQ6s4aH7vjQLwI8O3yOq669p7O7rxPRzugH2nYqP/WK3YkHF4XxEw9O+gUFfPKLQd1Q3cieB20rU9BkuAAIvbB+0sLq2sPEw8BeMdEvxu1m5wurnHi4/tov8bjr7gZLrPfaVoYhD93xgcZZXDAjDHQ0u/YgamTqYix9P9LpZg58pdMA2yOk3/sFQqcjycpiLKkeMLRXTFfWyT/2tJErPFx9+b1cfd3dFFUY2e/X+EFXD6IWnAt9u5HaCh/LX9uEfmGaro1YKf9u31vSIjRB+oUvQrQ39R3d4gCh94vT9p1DHgZ1YdhGhEXWPMvIyMjIyMjIyFBcMMEzg05FwsAmsuZ9VrJLVo/HLC3ps3BSForu95pXe96XpInVP1XvswXSb+mxkfbRIKsAodOdSsdWNW/SfnqSns6XAKb02MPbGIRJ5GGDvSuJnZaH9Nun5SHZZ0f7KXvADDIyEl8dD51tp+cBiaUx5AH2aOCs4wEbKC4+iYFumuaZeID++szK/thuBtlIfTkP93sekm/Y7nvGvJgxF6s8RHsfDA+aNSPYS0/BgAfzEHkY7p87D8N60Wd0dTyQfGIND11dGNSVjq8VHpBRvREBe/E2gE7RNSHl2T1IHujsi78W6/jpeUj2yYCD9Tygbcmg3hP63+41wxj4x5iHVP57eDi6i40nFiaQLNiDc+Ghy9Lar4wTD8S68DDwsGL7UD9OZC8P5tCSIq7CMDMBYwDqPTQYGbjgHh4Ge3IaHlb2U9amXuegz5AxV2Me+v4k/d7Y1rPjYbWumI0GW7Wx72wjD/EDxYCHzl7Oqe8c9QGk8td9SfurPMRMs45z6T9/Zh7itZy27xzzMN9teT0ZGRkZGRkZGRkZF1TwTAXBrREmuKjL4rFGOq2votNrCVHnSfW/hhouRdL0QbWOoux11Hoial5Bp2Mz0PpJWlf9+2ZwfQPdG2P2XP/ZYqjvkhAQFmL5i8VF3OurTrdGtc8k2h0GPAg2bot9dOBGPKCaNmpn1PgxQx2fpFsDZeIBEwXJHyYe0BuhIQ+C0Ijh/y2P8QU363SMVOcrYAe2llHLp+g0ndT+KvqMTvYUyiguX54lD2ao/XRaHvpj9iHwAFEgfYUHD3xweZTPtJtq/xoektbT6XhY5w8Wok1JGw/KfXkwkQlGGlCPFA8CfKw+zMebQyP9w1T+5Ujbqg+upXZhgses+MOZ6oUd8hDLX7Wg7ICbXvPp4edBufiHdpMPLY9GO/1I02rVH0rjY/l6NCAfNeASP2dRL5LeWWpDkybc6XiAgTaYOa062hkhUQNslYfPuxn/b3GMAFSpzzBuVA8mxo38QPPReu0vPY9YL3pNuMSDTVyYqJcYeSAe63TyBnba7owVHqIPPXgeet24hIBwt5/w/1scpxajGm8DvdBkdxV5KQmxHqzyMLDfyKh9sCMeiPau9p09D/v1nUN90YfKw2rfeWLb5+BZRkZGRkZGRkYGcAEFzwRUz0igjY/NdYhsiHkB9I+s9el+fM4/+J/uKX76xJ7sAYb746fpXaYJfTbFMENlmGXR6cgIDLN5dEuXpdBfA13GQH+c7nxrPRvHH6CYNDolFZ2SZL7cPJCe/A+zBej52Y+H+B2rWTl02RoPnQe9+MDGsROUs6VOz8Q+CB5ILOzDQ589AimrYsVPEg+DY/tlbY2O78vDIOtjyMOAjxEPCNOjJ5lszvFxKlYjJaa7dTxXHtLfK5l4Z+IhcrEfD+vqUsdDtHdYV7rPrfCQeFrlQQSmh7eZHt7WaXPEaVzxJImeYYdfFH/j3Hjoy7PLMmKFi9PyMLR/wMPA3lUehvpPKePmdDxMtnaZHj2JoNPjjEB9Jh5W+Opzr6BvEx4MDzDKQDoDDx0fp+MhXccqD8mkuF/NlsyOP4AxonVCoIldZeJhXPa9L9phXUls7LH9y8XD+LPD9iD5xqrt58JDMWnYuOh+rA00UoAUKzx0LRg9UhsxPGMdD3S2y1oeenv7PmOVj/146K9jdE7XHp59n7HYboF/IiMjIyMjIyMjI+MCCp6pHougejNC0qtRvRXBdDfP6XgQFQZ3g30vptO26T4jvQZYf9zs1bGJ56meiw70Q9wmXZukjdVpYAlR88iMhLNVAyneMESNq24qaNKASscDHJos+fav+gBXT+7HieoSDe0NUduqu+bIU9KuSuenfS8qip00fdJ1t2kbCgR6ra+B1lVgoPMUeUqi2clufVlCoBNSDyP7Ex+MtcEwnTZY0k0zgXgcJtbxr5/+YZ48u6PTL+rtjf6AiXzEbdT40nPNSOss+Y7am8qbXvNreN6w/AeaV0Fsx4MM+BhyMdI+iuUe1vFA7xfD8pcYDEn7FuFfPvlv+ZrNf+x0zUL07a6cBz7SazUNjov6jsTPjfZT3QljHoJYXdUvamJ1umDBdNz6NTyk75TBfhjWIUE1wSIPcpY8IPCiJ/49Lzz897Hsex6GmmdDHjo76ctzeF6qB50t9NpvflDWfT0Y8ND5xKAtGJY/Yz/o7e+F5HtB9RgkiO93PMSgggljHp77uNt46ZGPIQb8oG74kb1ar13cT9pXXVsSxjw46evMUAvNDezr6lno29KujRzsp/aw+9yZeJDUPq7wEP/ew0NsM59y6R1889EPUxY+8mC6bbIv9SFjuwdtSRi2F6lt6XnwSfMsnqvC+b1O5p6+g6QFFu0naYBFbbw19aNfBGG8nzjZw8NgnwCPP3Yf33bkA2xOmpG/J73QYV+xpw1Nfj/YT+entjQMuBnqRQ55EPbykXgIYsa8JB4H+pEh2T/sO7rFeQZ956DtHPIQdpfAHz6MI5OMjIyMjIyMjIxHC8578OyLX/wiP/ZjP8af/MmfMJ/P+Yqv+Are9ra38dznPhcAEeHNb34zv/mbv8mJEyd4wQtewK/92q9x/fXXn9PvGHRKEUBpPAAzSc/M01N6PVdG+0bvLaV/Io7056xmCw2f+ENadS6tQje4kUnft/L5of5Td6OTzhucyyCzZhQQSL+9knFSWMelszkG4hSs0/CwYuND5wH6VSgH7w/+Tte9l4fBjc6XgQdrApdsqcZVmqKaeJCz5EGIN+aD80ZZQN023sQ9GB7SvsA442QQABgeX7FbhIFW2pAH3TcIFx15AFBOVOuKwbWt44FB2Q956I+PMl5Oy8PQT86Wh3Gmyl4ekq1nzwMCx47f97DwsM4v0uqr6UfW2d5/xo5+Y+QHg3aizzAbtxu9IZGHwX7PQb9/+MjJTteqjArqYvScpIC22laGc+WBviz1sys8DDlYaTtPz4P6QedjiZOuzeg/b+T0PGxu7VDaNP00xJ/2ykNxdjx02bIdD/EzQ5+K74dhWXV1pffvtJLpqG3sfsus4WHcVvY8DLg5Cx5mswWzwu3tOwWCadfyMGwr9e/9eEjHTs/DsPzTiqb79Z3jfuIseIif7fuMQfsQj9U7DW8h46HikRrnZWRkZGRkZGQ8nDivwbMHHniAF7zgBfyLf/Ev+JM/+RMuvfRSPv3pT3P8+PHunLe85S388i//Mr/1W7/Fddddx5ve9CZe8pKX8MlPfpLZbHZOv1eZ/kbAsKLT0unWhJG+kzV6C6D6LKHXcImC2jbeAvQaaEnrJ05EMcTzuokpdPo2ZkX7iFVNqLHGyx7tnyTwIv35+2ERLO9dHuVeXw70vPzIzrHm21jLp4icpcmGYw7p3lvlIWkcDXmg48N0HK3y0GkhnQ0PxC8X/bLV94dwYvjL5RE+7yZR36vXfOu13s7EQ7J3qAUmmOgbWv57eTD0mkfptcpDrwG1ho8VzZ/VEj8XHkQM768P8el21vn/yPdH254HzVlM5X86HvS9XuNoXC/OnodVLnrNr95n7IPmAeBv6y0+1mxGjSa/noc9fuCjppUftAM9L6lt2Y+HVC9WeRi3Af3+2N79eFjVi4taWDFocCaNsH9oNvhAfUiv1/hxPU/7gzZz/H5qHyR+Xka8nYmHoX7k0CfADDjq28iR3SvaaHY/HgDEdP61H253U/5qcRgfy3foByOttwfBQ/KVYVvZt5371w1SfTBfLh7O3Gfc7SvetzhCLZZyRfOtrxd+T/s43Kb2r2tfV/rOM/EAq35xLn3n8PiD4+HUts/Bs4eIR3qcl5GRkZGRkZHxcOG8Bs9+4Rd+gWuuuYa3ve1t3bHrrruu+1tEeOtb38p//I//kZtvvhmA3/7t3+byyy/nne98J9/xHd+x5zvruqau+5XRTp061f3tB0+wjYFWH49j+mfg3d+D5+Ld5/sBvIyG2ul4v0e/smH3TjpuVj4V99cdT9ki6VeHWR3xvWEWWB+aon+ynzIjjLBTLcH6bspYkHQTISPdInuWPIAMfnH1vdPzoNlPq+yZc+eh43SYATG4wkGmQ3p7t1pC4eIUKolZI4XafA489L90Oh50b18euvfOxEPi7Ew8DMv99DzMqxqKNk5RFAwGt4aHFAxIf+/Pw1Dz6cvFw/DYsK70N7xdhhaDjJoBL7221F4eABZlA2WjYRDRZRxcKnvpv+lceDhTGxGr3dnzMLJjPQ/I8OrOnYe6aJGqVstFQzAei0G1vx4OHoCBvQ+Sh1hfHjwP9FlbQGMdblIDEqcjrvIwtDuM7Ey/9NB5GB5f5eAseVhpI86VB2c9dbVEjMQpzYkHMLK3/Mf959gnzp2HIRdnw8P4V1b7lzEPwysf8xCHBF0bOt9xwOfIePB4pMd5GRkZGRkZGRkPF85r8OyP/uiPeMlLXsK3fdu38b73vY+rr76a7//+7+dVr3oVALfddht33nknN910U/eZo0ePcsMNN3DLLbesHVT93M/9HD/1Uz+157gQdVrSED8w0jdK04g67ZmkV8NAkyqen6amDDWvkg5Np8ezZ9+sOY9OA2eP3hVpn7gda0ENNV9ksA9JA2isEbZV1nzT4/+WKzdP4qTU9zFI6PWqOh2aFR6Uo8E10Nst6PW5fXkYn59sSMdH+mYkfbOB1ld3jO7YSAuKXuMpjHhgjxaSCEys52WP+wRPOnKP6rPF218JwzLvy7nT4En2Y7ryDQN7ZXA82TnUflpf/sPvXlf2UdOIgcbP4Jxe+8d2/throQ14SMdD71MWePFVn+KZF32xsyVxlWwNg3ILsaw73SHsyF63lgdG9SPZEmTMaVjDg6xwMvSd7v1YvqqNtN4PJJbtXn70HBHDjZd9lhsv+1xXD4bfva6ch7ys54OuDP06vxjWtZHd47Zgbx3p91ffF4n+MPT7fdqJ1Cas8vCsiz7Pv7jyVjCqeTb67lFb2U/JG9V7MTjG/h5iIMOvsVHP6du1MVd725D97F71Dx/WvJ/a0bU8aAA2HfuKI3fzssd9gsKGjod0nqcY2Nnz4Ffqy2o563cwaGsHvt/xsdpG7u0rVuvBat/R+wujujXigTEn/YuBTxiu3nyAb3r8x9go207zLJ3XlfNK37mu/VvHw2r/urcejK8/nSfQ+XnXNg39Yk896dussGpnPIcVu4d9p9ttgD97EKObjIRHcpyXkZGRkZGRkfFw4rwGzz73uc/xa7/2a7zhDW/gx3/8x/ngBz/ID/zADzCZTHjlK1/JnXfeCcDll18++tzll1/evbeKN77xjbzhDW/o9k+dOsU111wDSNQ00sEzBgoCmDhwHqxx391gAd3NOH1gICHdkMvgfFnzWRgEtkb7A+2s9PmV72MwyO/0hMSwqim1dxVGvYFKWjiTwnG4bDDoFBoAK+a0PMjgeocBw/14WMfVeh728rFqd7+fsgAGN14D3bAxH8Pz+/dT0ETEUNjARdVS7Vexm44HiRpPYY99D4KHeA1heE0D+85c/kMO++8a8zC+Bhl+15CH9PkBTwbh8ulux4NBlB8Tf9f4kd2n44E95/V2rPLQ5aiM7DtN+a+8t8dXzsBDX/5jvxie+7iZZi1YBGPGPJRreYjBlnPgIf2dgpCco93789BfSwqa7eWB9X6xwsPlmye66WyFCcqDXiol2jaEWFcS+rZyeI29r47PG/t0WMtDH5A6nW9oua5ZdZQ+eLaujRy2lfvxcNHGdjfFMvFAx0NAjGEiZmjeqM8YXuuD5uEsyv90bWfyxaHvr/OLPXUq9PuHp3Mqq6tJFCb0nxn1GXZfHk7fd8ag5X5956CurOdhbOe58HCmvnPIQ122/D0ZDwWP7DgvIyMjIyMjI+Phw3kNnoUQeO5zn8vP/uzPAvDVX/3VfOITn+DXf/3XeeUrX/mgvnM6nTKdTvccN0AV9WtMDBYkXZZ4GzXeN0mfJ07LMd0tXdRt6f+GXtsqfVcSWO6PM/iesfZX2vbnmPFnBneraV+/w8TPDu5eSFpoerOQ9pdi+WB9lAdCRRX1i2zHw8BOpNOnSTwkzZ69POhNVDon2Tu0u9e3GU9tSjyk10PhYZWP/XgwRrMTPtIc4U4/XeGBTu+s5yHZfToe9NykBdV9dg8PPWcgvZ1mhZcVTjr7zNnzsKrg02n6DHhA4GPtYf7JbXS6Ras8pOtPPDD0mciDNUO/38tDsi3xkTSgep721o11PHS+MfKJlfI+Ew+YznY67g1/327xD+1Wp33XaZUN/CG1G8kfuuDKYL8v76RxNeZguF3lYVTeZ8GDarn1dn85ePhHt8HHm8OkYEmv2TfUPBzysIYXBIafG9Sdc+GBkW+tf50ND53fdP9OwwPqX3f4KR+pjxBgXx60TYjaZ+naB9pf6f0UYi2ij+zHg439zfg4XR3r7djndRoekg+saimOeYhnDvqM+3zFh5qjNGIpjBv4Zq/ttn/fOeYhnZt46HxplQf6PuN0POzLx0PqO03UWet52N0RfouMh4JHcpyXkZGRkZGRkfFw4rwGz6688kqe9rSnjY499alP5X/9r/8FwBVXXAHAXXfdxZVXXtmdc9ddd/HsZz/7nH9PBEInmq1PqNMAHRgFkBD23OhAf2M7VtWh+45hsGA8LGeffbNnv/+u8RGBTq9l/N7qzVI6P54jBodhKUXkwYAhhkcMIoKJmj4wtFt5WL3ROQg86HX1x4dnn46HAOxE3aKeBwMIIpaA7T5xLjzs8ZMBD6Ci2OPjZ8vDXvtPz8P6bzcrPACcpOj46bJkkDhtquehtzvxsFJXoNPO28PPqFR64fgz89Bf91nxMLDrXHm4v+NhkPWEYCTJ20ebRLpsRbPCQ3fOeeGBwS+Nedj76XU86LEHYm1O70nkxJCyhtbwEIkr+tbktDyMuXhoPIz3B+36OfKQPpN42B6WaswwSzyEYZ8hgje9f4z7jFUehr5w5rZy3fZseUhHzp6H/lgqf8SwoJ+OHaQP4MOD6DvN2M6z5WGv7eO2cszT+j7j3Hno68Z8Gch4aHikx3kZGRkZGRkZGQ8Xzmvw7AUveAG33nrr6Ng//MM/cO211wIqKnvFFVfwnve8pxtEnTp1ive///183/d93zn9lgAu6tWkaR29ble8UUzHo45LfwPZf6bXMrIgDLRedCC+Oq0t6dXI4LzR9w2mIApDzav+d0dTgqTXdNmzL3TfwcCeIJaZbfm6iz/LxZMdHFanxaTzh3o0w8+tHh9ex/Ba6afnhE6HBhhw6/e1M31uyMtD5AFAUpmMeSiN53nHb+OqjRORh4H+Db2uTucXsYxD5ytrphqtKdfOtwZln64h7MMDkYewhoehrw21i/r9Vbv76WBhwFl3Iwx89bHbuW7rnqg/RMfxfjyonavHhzwM7V4p9/18Z42d695P34WwZz/dtvsBLwx46vgb+WBv79MPf4mnHvnSHh722tuX214exuW8Wv5duZ+Bh6SL1fGAWakbe+0Y8zBuc0jnd/WIPb+X7H3S1j08+9jt0b6Bj0Z7of+NsJaHwbS3kb+P26YuZ2llup7HqL/Sc7a+7Ry8v1K/9+wP/GWVBxlx2+9fPXuA5110G4UR1TgLvT1DDcDEA/SabrLCz7ic+99fV66n5WGlvNM1w7o+Zc3+4HpG+/vwIGK4eLLD1138Waa21b7zNDwM+85VHlJbtI6H1bYz+VhfXj03fm15933NufKwX9+Z3gNDvdMAHyHjweORHOdlZGRkZGRkZDycOK/Bsx/6oR/i+c9/Pj/7sz/Lt3/7t/OBD3yA3/iN3+A3fuM3ADDG8PrXv56f+Zmf4frrr++WML/qqqv45m/+5gf1mzHEAPSaV90APQr+WKS7wYM+I4W4Tdo/YgY3pCbp7/Q3a+wZjDO6cQBGg3d9f/2NTffbg5ub4Q0XjIMjDH9TDBPrqYyPVwWCoBk6GiYzxuzhoV+JMv3WKg+DG1bT37gPeUjnDrVv0o3+Xm2cs+dhv+84HQ8iBmuEqXUDHhhYCMaE+JuiR0wKKElv9yoPqVyH3I3s6fkbBqXOjYdxwG0vD+NgSSqDdT6YtjPbsgo74mHwGWOixzwEHsy58ADd5NfVG9tkb/cdg/IfBUtiyZ6GB8GwWTQjDowB25X/Oh6Up3X2p32f2g+zNzgx5CEFrhiUYRjw0AW7MGt5QHqO03Ws1gNZ8cXu+1fakMPFcoUHAUnTI5WHlL1j9+PBrPhivHZZ5WH0+/vzMPIt2ctD+o2hD3WfGdm/jod0neN29Wi5HLT8puMBtP3QrMQxDyZqf+3lIf5Wal9N/zvBrOFhT/ntrzOYvr+r70N/OQce9qsrR8t51x4M/UHtHvMgHQ/StRVDHjrf3MPDsO/seUjBsOHxkZ1reBjpEK7lYR0f63jo6/uy2ttGZpwbzsc4LyMjIyMjIyPj4cB5DZ4973nP4w/+4A944xvfyE//9E9z3XXX8da3vpVXvOIV3Tn/4T/8B3Z3d3n1q1/NiRMn+Gf/7J/xp3/6p8xms3P6LQO9XpXV4fdQ1ysN99OtXfe3WXOse6+/ZV2n+QWMtJBY/TwMXtJNi0lBHYbvjc4dW7aqn9YHhPqJKS0Ft7ZH2ZGqn2ITebD0WkZr7TwDD5ixltc6PtZNbxzZvg8PY3tOx8NY42jMUM+Dx/IZd4QHwvSceQCJmjhnKu99fOkR4mFor9nnuGC4zR3m7rCxwoNOwep52GvH0M79eDitL63lYT0nPefr/PvcedCAz3ia2xf8Fl/wW+Mpd1a/X4XR9/fr/XgY27lfmQ/r1aqvDTkd8zBsI8bnj3k6Wx7SOXeFDW5zh4E0zViwQx7W2Lu+PM/k9/vwYfazP/2Wvvfl5mHcVhruD1M+444QML3dXRvRa/09nDyss3/Mw/C7eh56u6Wz/3TtwOl42JaKf2iP0mL3tJX7aT+e2c7176+192HuO4eT/ffrO3e3A+8g46HgkRznZWRkZGRkZGQ8nDAi6bnwYxMnT57k2LFj/Of3PYetQ5Zh0Gsocq03RGE0oB8GEEaT9vaIXEsUB+8FovvvHHyu+x5WBMYHCwt0ATlF/53phml8Uzi8KVi9WdRr0uBaLQUfXV7GCZn01ziwrQuYxGsYa/f0545uCs+RB7rP7rV3VTzaniMP0AtI78eDQTPjPlZfwt1hY49t63jo/eN0PGjAqS/PnoeRLw2+c/zeQ+Ah2neuPIDh7+qL+II/tFJ245vjzvYVke9h8GDvTfHqeQNezpKHbn/kk2Me9twYm/HN8aqg+DoeDPDp5hifdUdXyvPB80BXbiv1Y/C93XeuCa6tBi7HPPTl/1B50O/qefin9jB/314Uv3colD9uK8dtRx9UG9u73/Gz56FvY3oe6Pjofau3U0Y22hUeUp04Ew93uS0+1lyMx3TXtsrDuraj84OVdrHna+DnpIDPwJe6c9cFnRh8x9Av9vYZY3219TwMuRjx02VfwwN+xkebS2ik2GOXHfWde9vFYZn217P6HTLgYegTq/au9JVn2XeeLQ+rbWVfhwy7O4Gvft49nDhxgqNHj5JxMDEc520cKs735WRkZGRkZGQ8CrDY8fzo13/4nMZ5j/ng2Re+8IW8hHlGRkZGRkbGg8LnP/95Hve4x53vy8jYB3mcl5GRkZGRkfFgcS7jvMd88CyEwK233srTnvY0Pv/5z3PkyJHzfUkZA5w6dYprrrkml80BRC6bg41cPgcXuWwOLs6lbESE7e1trrrqKqxdzeXNOCjI47yDjdweHlzksjnYyOVzcJHL5uDi4R7nnVfNs0cC1lquvvpqAI4cOZId/IAil83BRS6bg41cPgcXuWwOLs62bPJ0zYOPPM57dCCXzcFFLpuDjVw+Bxe5bA4uHq5xXn6UmpGRkZGRkZGRkZGRkZGRkZGRsQ9y8CwjIyMjIyMjIyMjIyMjIyMjI2MfXBDBs+l0ypvf/Gam0+n5vpSMFeSyObjIZXOwkcvn4CKXzcFFLpvHJnK5Hlzksjm4yGVzsJHL5+Ail83BxcNdNo/5BQMyMjIyMjIyMjIyMjIyMjIyMjIeLC6IzLOMjIyMjIyMjIyMjIyMjIyMjIwHgxw8y8jIyMjIyMjIyMjIyMjIyMjI2Ac5eJaRkZGRkZGRkZGRkZGRkZGRkbEPcvAsIyMjIyMjIyMjIyMjIyMjIyNjHzzmg2e/+qu/yhOe8ARmsxk33HADH/jAB873JV1w+Mmf/EmMMaPXU57ylO795XLJa1/7Wi6++GIOHTrEt37rt3LXXXedxyt+bOMv/uIv+KZv+iauuuoqjDG8853vHL0vIvzET/wEV155JRsbG9x00018+tOfHp1z//3384pXvIIjR45w7Ngx/t2/+3fs7Ow8glY8NnGmsvme7/mePXXppS996eicXDYPD37u536O5z3veRw+fJjLLruMb/7mb+bWW28dnXM2bdntt9/ON37jN7K5uclll13Gj/7oj+KceyRNeczhbMrmRS960Z6685rXvGZ0Ti6bRy/yWO/8I4/1Dg7yOO/gIo/zDi7yOO/g4iCN8x7TwbPf/d3f5Q1veANvfvOb+chHPsKznvUsXvKSl3D33Xef70u74PD0pz+dO+64o3v95V/+ZffeD/3QD/G///f/5vd+7/d43/vex5e+9CW+5Vu+5Txe7WMbu7u7POtZz+JXf/VX177/lre8hV/+5V/m13/913n/+9/P1tYWL3nJS1gul905r3jFK/i7v/s73v3ud/Oud72Lv/iLv+DVr371I2XCYxZnKhuAl770paO69I53vGP0fi6bhwfve9/7eO1rX8tf//Vf8+53v5u2bfmGb/gGdnd3u3PO1JZ57/nGb/xGmqbh//2//8dv/dZv8fa3v52f+ImfOB8mPWZwNmUD8KpXvWpUd97ylrd07+WyefQij/UODvJY72Agj/MOLvI47+Aij/MOLg7UOE8ew/jar/1aee1rX9vte+/lqquukp/7uZ87j1d14eHNb36zPOtZz1r73okTJ6SqKvm93/u97tinPvUpAeSWW255hK7wwgUgf/AHf9DthxDkiiuukP/8n/9zd+zEiRMynU7lHe94h4iIfPKTnxRAPvjBD3bn/Mmf/IkYY+SLX/ziI3btj3Wslo2IyCtf+Uq5+eab9/1MLptHDnfffbcA8r73vU9Ezq4t++M//mOx1sqdd97ZnfNrv/ZrcuTIEanr+pE14DGM1bIREfn6r/96+cEf/MF9P5PL5tGLPNY7GMhjvYOJPM47uMjjvIONPM47uDif47zHbOZZ0zR8+MMf5qabbuqOWWu56aabuOWWW87jlV2Y+PSnP81VV13FE5/4RF7xildw++23A/DhD3+Ytm1H5fSUpzyFxz/+8bmczgNuu+027rzzzlF5HD16lBtuuKErj1tuuYVjx47x3Oc+tzvnpptuwlrL+9///kf8mi80vPe97+Wyyy7jyU9+Mt/3fd/Hfffd172Xy+aRw8mTJwG46KKLgLNry2655Rae+cxncvnll3fnvOQlL+HUqVP83d/93SN49Y9trJZNwu/8zu9wySWX8IxnPIM3vvGNzOfz7r1cNo9O5LHewUIe6x185HHewUce5x0M5HHewcX5HOeVD/HaDyzuvfdevPcjggAuv/xy/v7v//48XdWFiRtuuIG3v/3tPPnJT+aOO+7gp37qp/jn//yf84lPfII777yTyWTCsWPHRp+5/PLLufPOO8/PBV/ASJyvqzfpvTvvvJPLLrts9H5Zllx00UW5zB5mvPSlL+VbvuVbuO666/jsZz/Lj//4j/Oyl72MW265haIoctk8Qggh8PrXv54XvOAFPOMZzwA4q7bszjvvXFu30nsZDx3rygbg3/ybf8O1117LVVddxcc+9jF+7Md+jFtvvZXf//3fB3LZPFqRx3oHB3ms9+hAHucdbORx3sFAHucdXJzvcd5jNniWcXDwspe9rPv7q77qq7jhhhu49tpr+Z//83+ysbFxHq8sI+PRhe/4ju/o/n7mM5/JV33VV/GkJz2J9773vbz4xS8+j1d2YeG1r30tn/jEJ0Z6PhkHA/uVzVAP5pnPfCZXXnklL37xi/nsZz/Lk570pEf6MjMyHnPIY72MjIeOPM47GMjjvIOL8z3Oe8xO27zkkksoimLPChh33XUXV1xxxXm6qgyAY8eO8ZVf+ZV85jOf4YorrqBpGk6cODE6J5fT+UHi/HT15oorrtgjxOyc4/77789l9gjjiU98Ipdccgmf+cxngFw2jwRe97rX8a53vYs///M/53GPe1x3/GzasiuuuGJt3UrvZTw07Fc263DDDTcAjOpOLptHH/JY7+Aij/UOJvI479GFPM575JHHeQcXB2Gc95gNnk0mE57znOfwnve8pzsWQuA973kPN95443m8soydnR0++9nPcuWVV/Kc5zyHqqpG5XTrrbdy++2353I6D7juuuu44oorRuVx6tQp3v/+93flceONN3LixAk+/OEPd+f82Z/9GSGErqHKeGTwhS98gfvuu48rr7wSyGXzcEJEeN3rXscf/MEf8Gd/9mdcd911o/fPpi278cYb+fjHPz4a+L773e/myJEjPO1pT3tkDHkM4kxlsw5/8zd/AzCqO7lsHn3IY72DizzWO5jI47xHF/I475FDHucdXByocd45L2/wKML/+B//Q6bTqbz97W+XT37yk/LqV79ajh07NlplIePhxw//8A/Le9/7Xrntttvkr/7qr+Smm26SSy65RO6++24REXnNa14jj3/84+XP/uzP5EMf+pDceOONcuONN57nq37sYnt7Wz760Y/KRz/6UQHkF3/xF+WjH/2o/NM//ZOIiPz8z/+8HDt2TP7wD/9QPvaxj8nNN98s1113nSwWi+47XvrSl8pXf/VXy/vf/375y7/8S7n++uvlO7/zO8+XSY8ZnK5stre35Ud+5Efklltukdtuu03+7//9v/I1X/M1cv3118tyuey+I5fNw4Pv+77vk6NHj8p73/teueOOO7rXfD7vzjlTW+ack2c84xnyDd/wDfI3f/M38qd/+qdy6aWXyhvf+MbzYdJjBmcqm8985jPy0z/90/KhD31IbrvtNvnDP/xDeeITnygvfOELu+/IZfPoRR7rHQzksd7BQR7nHVzkcd7BRR7nHVwcpHHeYzp4JiLyK7/yK/L4xz9eJpOJfO3Xfq389V//9fm+pAsOL3/5y+XKK6+UyWQiV199tbz85S+Xz3zmM937i8VCvv/7v1+OHz8um5ub8q//9b+WO+644zxe8WMbf/7nfy7AntcrX/lKEdFlzN/0pjfJ5ZdfLtPpVF784hfLrbfeOvqO++67T77zO79TDh06JEeOHJHv/d7vle3t7fNgzWMLpyub+Xwu3/AN3yCXXnqpVFUl1157rbzqVa/ac4OYy+bhwbpyAeRtb3tbd87ZtGX/+I//KC972ctkY2NDLrnkEvnhH/5hadv2EbbmsYUzlc3tt98uL3zhC+Wiiy6S6XQqX/EVXyE/+qM/KidPnhx9Ty6bRy/yWO/8I4/1Dg7yOO/gIo/zDi7yOO/g4iCN80y8oIyMjIyMjIyMjIyMjIyMjIyMjIwVPGY1zzIyMjIyMjIyMjIyMjIyMjIyMh4qcvAsIyMjIyMjIyMjIyMjIyMjIyNjH+TgWUZGRkZGRkZGRkZGRkZGRkZGxj7IwbOMjIyMjIyMjIyMjIyMjIyMjIx9kINnGRkZGRkZGRkZGRkZGRkZGRkZ+yAHzzIyMjIyMjIyMjIyMjIyMjIyMvZBDp5lZGRkZGRkZGRkZGRkZGRkZGTsgxw8y8jIyMjIyMjIyMjIyMjIyMjI2Ac5eJaRkXHB4b3vfS/GGE6cOHHa857whCfw1re+9RG5poyMjIyMjIyMjIeOPM7LyMh4OGBERM73RWRkZGQ8kmiahvvvv5/LL78cYwxvf/vbef3rX79nkHXPPfewtbXF5ubm+bnQjIyMjIyMjIyMc0Ie52VkZDwcKM/3BWRkZGQ80phMJlxxxRVnPO/SSy99BK4mIyMjIyMjIyPjy4U8zsvIyHg4kKdtZmRkHEi86EUv4nWvex2ve93rOHr0KJdccglvetObSMmyDzzwAN/93d/N8ePH2dzc5GUvexmf/vSnu8//0z/9E9/0Td/E8ePH2dra4ulPfzp//Md/DIzT+d/73vfyvd/7vZw8eRJjDMYYfvInfxLYm85/++23c/PNN3Po0CGOHDnCt3/7t3PXXXd17//kT/4kz372s/nv//2/84QnPIGjR4/yHd/xHWxvbz/8hGVkZGRkZGRkPEqQx3kZGRmPNuTgWUZGxoHFb/3Wb1GWJR/4wAf4L//lv/CLv/iL/Lf/9t8A+J7v+R4+9KEP8Ud/9EfccsstiAj/8l/+S9q2BeC1r30tdV3zF3/xF3z84x/nF37hFzh06NCe33j+85/PW9/6Vo4cOcIdd9zBHXfcwY/8yI/sOS+EwM0338z999/P+973Pt797nfzuc99jpe//OWj8z772c/yzne+k3e96128613v4n3vex8///M//zCwk5GRkZGRkZHx6EUe52VkZDyakKdtZmRkHFhcc801/NIv/RLGGJ785Cfz8Y9/nF/6pV/iRS96EX/0R3/EX/3VX/H85z8fgN/5nd/hmmuu4Z3vfCff9m3fxu233863fuu38sxnPhOAJz7xiWt/YzKZcPToUYwxp03xf8973sPHP/5xbrvtNq655hoAfvu3f5unP/3pfPCDH+R5z3seoIOvt7/97Rw+fBiA7/qu7+I973kP/+k//acvGy8ZGRkZGRkZGY925HFeRkbGowk58ywjI+PA4uu+7uswxnT7N954I5/+9Kf55Cc/SVmW3HDDDd17F198MU9+8pP51Kc+BcAP/MAP8DM/8zO84AUv4M1vfjMf+9jHHtK1fOpTn+Kaa67pBlQAT3va0zh27Fj3m6BTANKACuDKK6/k7rvvfki/nZGRkZGRkZHxWEMe52VkZDyakINnGRkZj0n8+3//7/nc5z7Hd33Xd/Hxj3+c5z73ufzKr/zKw/67VVWN9o0xhBAe9t/NyMjIyMjIyLhQkMd5GRkZjzRy8CwjI+PA4v3vf/9o/6//+q+5/vrredrTnoZzbvT+fffdx6233srTnva07tg111zDa17zGn7/93+fH/7hH+Y3f/M31/7OZDLBe3/aa3nqU5/K5z//eT7/+c93xz75yU9y4sSJ0W9mZGRkZGRkZGScGXmcl5GR8WhCDp5lZGQcWNx+++284Q1v4NZbb+Ud73gHv/Irv8IP/uAPcv3113PzzTfzqle9ir/8y7/kb//2b/m3//bfcvXVV3PzzTcD8PrXv57/83/+D7fddhsf+chH+PM//3Oe+tSnrv2dJzzhCezs7PCe97yHe++9l/l8vuecm266iWc+85m84hWv4CMf+Qgf+MAH+O7v/m6+/uu/nuc+97kPKw8ZGRkZGRkZGY815HFeRkbGowk5eJaRkXFg8d3f/d0sFgu+9mu/lte+9rX84A/+IK9+9asBeNvb3sZznvMc/tW/+lfceOONiAh//Md/3KXTe+957Wtfy1Of+lRe+tKX8pVf+ZX81//6X9f+zvOf/3xe85rX8PKXv5xLL72Ut7zlLXvOMcbwh3/4hxw/fpwXvvCF3HTTTTzxiU/kd3/3dx8+AjIyMjIyMjIyHqPI47yMjIxHE4yIyPm+iIyMjIxVvOhFL+LZz342b33rW8/3pWRkZGRkZGRkZHwZkcd5GRkZjzbkzLOMjIyMjIyMjIyMjIyMjIyMjIx9kINnGRkZGRkZGRkZGRkZGRkZGRkZ+yBP28zIyMjIyMjIyMjIyMjIyMjIyNgHOfMsIyMjIyMjIyMjIyMjIyMjIyNjH+TgWUZGRkZGRkZGRkZGRkZGRkZGxj7IwbOMjIyMjIyMjIyMjIyMjIyMjIx9kINnGRkZGRkZGRkZGRkZGRkZGRkZ+yAHzzIyMjIyMjIyMjIyMjIyMjIyMvZBDp5lZGRkZGRkZGRkZGRkZGRkZGTsgxw8y8jIyMjIyMjIyMjIyMjIyMjI2Ac5eJaRkZGRkZGRkZGRkZGRkZGRkbEP/v9YtM6iI3+LwQAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "b = 1\n", + "s = 256\n", + "n_heads = 1\n", + "head_dim = 64\n", + "q = torch.ones((b, s, n_heads, head_dim))\n", + "\n", + "nx = 32\n", + "ny = 8\n", + "px = torch.arange(nx).expand(ny, -1)\n", + "py = torch.arange(ny).unsqueeze(-1).expand(-1, nx)\n", + "pos_idx = torch.stack([py, px], dim=-1).reshape(-1, 2)\n", + "\n", + "p1 = 1\n", + "p2 = 0.5\n", + "base = 100\n", + "\n", + "pe = RotaryPositionalEmbedding2D(head_dim, p1, base=base)\n", + "q_pe1 = pe(q, pos_idx).squeeze() # [s, head_dim]\n", + "\n", + "pe = RotaryPositionalEmbedding2D(head_dim, p2, base=base)\n", + "q_pe2 = pe(q, pos_idx).squeeze() # [s, head_dim]\n", + "\n", + "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5))\n", + "ax1.imshow(q_pe1.T) \n", + "ax1.set_title(f\"p={p1}\")\n", + "ax1.set_xlabel(\"position\")\n", + "ax1.set_ylabel(\"channel\")\n", + "ax2.imshow(q_pe2.T) \n", + "ax2.set_title(f\"p={p2}\")\n", + "ax2.set_xlabel(\"position\")\n", + "ax2.set_ylabel(\"channel\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "1b3fab3b-9240-411a-833a-eed89fa8453f", + "metadata": {}, + "source": [ + "## Learned position encoding" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "499e00df-c400-4fd1-a174-b35056a00847", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class LearnedPositionalEmbedding(nn.Module):\n", + " \"\"\"\n", + " This class implements a Learned Positional Embedding, e.g. used for spatial circuit dimension.\n", + " \"\"\"\n", + " \n", + " def __init__(self, dim: int, max_seq_len: int = 64) -> None:\n", + " super().__init__()\n", + "\n", + " self.dim = dim\n", + " self.max_seq_len = max_seq_len\n", + " \n", + " _pos_encoding = torch.zeros((self.max_seq_len, self.dim), dtype=torch.float32)\n", + " self.pos_encoding = nn.Parameter(_pos_encoding)\n", + " \n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.orthogonal_(self.pos_encoding)\n", + " \n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes channel last.\n", + " \n", + " Shapes:\n", + " x ... [b, s, t, ch]\n", + " \"\"\"\n", + "\n", + " seq_len = x.shape[1]\n", + " \n", + " pos_encoding = self.pos_encoding[:seq_len].view(1, seq_len, 1, -1) # to [1, s, 1, ch]\n", + " x = x + pos_encoding\n", + " \n", + " return x" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "beb34075-d6e2-4946-a19e-2dbf57e67778", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABMQAAADQCAYAAADoOL1kAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAKk9JREFUeJzt3Xl0VPX9//HXZJnJvkEIQQKkEGRHIIKYWhEoIqLF9qjli5avSy1t2H9t/XFaoKgFqq0ilQNFrdrvKWLVL64/QETApewpSgTZlcgW2bJnkszc3x8e5jRFJEzeMCTzfJwz55A7Ny/eM/O5n3vvOzN3XI7jOAIAAAAAAADCRESoCwAAAAAAAAAuJRpiAAAAAAAACCs0xAAAAAAAABBWaIgBAAAAAAAgrNAQAwAAAAAAQFihIQYAAAAAAICwQkMMAAAAAAAAYSUq1AU0ht/v1+HDh5WYmCiXyxXqcgAAAAAAABBCjuOorKxMbdq0UUTEud8H1qQbYocPH1ZWVlaoywAAAAAAAMBlpKioSG3btj3n/U26IZaYmChJav9/pyvCE9OorNijNu8wK72yziRHkr7X+zOTnI9f6mGSU3Z1lUlO2prGvVZntNhwzCRn770ZJjmSlLTfJudUH5txlJxebpJTciLeJCcupdokp92vTpnkSFLSCzY17XjjSpOc2K8ck5ySm2xe+5qTsSY5rdbbfUK/ItMmq7KtzyQnfbPN/qM4z6aejLY224fvf9NNciSp272fmuR8UNDVJCfjnyYxSrj3sEnOseU2f9xL/NJmDEnSsVtrTHKcEx6TnPStJjE63dFm/kg6YDNXS5KnxOZ1+/Jmv0lOaqsykxx3pM3jOlloMxe17FlskiNJJ8vjTHIe6PqhSc6S+cNNcsram8So7YBDJjllS64wyZGk1O2nTXL2TbGZ0+K22oyhCKNTz/gjNtvr4RF258Lp69wmOae6m8QopesJk5wT+1NNcmT0Abmoljbn+JLk/leCSU7q7saPo7q6am15d3agZ3QuTbohduZjkhGeGEXENK7JEumxGVERsXaTgDvBZhKIdNs0oCLibA7+rOqJirTZITV27Py7SJuXzGwcRcbVmuREVNo8R5FGYygqwua1l6ToeJsTiMhGNuUDOW6j7SzOZgxFVFk9LruGWKTHJisi1ubgL9Jttf8wqifeaPswmqslu/1ZRKzR/iPaJEZRRs+11fwRFW3XEIuIs9nOnAqj58joNYuMsXlcVnO1ZPe6RcQa7c/ibJqhkVFGj8voOM1qe5WkSL9NTbEJNqdiZsf6RtO+2dxouB8yO2+IMzouMpr3v+WTYBfEbh6yOxeOdBsdOxgNo8g4ozFkdCxj1RCzOseXLI9n7MbR+S6txUX1AQAAAAAAEFZoiAEAAAAAACCs0BADAAAAAABAWLksGmILFixQhw4dFBMTowEDBmjTpk2hLgkAAAAAAADNVMgbYi+99JKmTp2qmTNnqqCgQL1799aNN96o4mK7b4IBAAAAAAAAzgh5Q+zxxx/XT3/6U91zzz3q1q2bFi1apLi4OP31r389a12v16vS0tJ6NwAAAAAAAOBChLQhVlNTo61bt2ro0KGBZRERERo6dKjWr19/1vpz5sxRcnJy4JaVlXUpywUAAAAAAEAzENKG2PHjx+Xz+ZSRkVFveUZGho4ePXrW+tOmTVNJSUngVlRUdKlKBQAAAAAAQDMRFeoCLoTH45HH4wl1GQAAAAAAAGjCQvoOsZYtWyoyMlLHjh2rt/zYsWNq3bp1iKoCAAAAAABAcxbShpjb7Va/fv20evXqwDK/36/Vq1dr4MCBIawMAAAAAAAAzVXIPzI5depUjR07Vrm5uerfv7/mzZuniooK3XPPPaEuDQAAAAAAAM1QyBtid955p7766ivNmDFDR48e1VVXXaUVK1acdaF9AAAAAAAAwELIG2KSNH78eI0fPz7UZQAAAAAAACAMhPQaYgAAAAAAAMCldlm8Q6yx3FeWKjLO26iM6L3JJrVE1Nj1GLefyDTJiT/qs8l5zW2SE+m1qcf9bKVJTtKLJjGSpFO9/CY5iZ9Fm+RkPlpjkuP9sc1rX1kZaZKza2qiSY4kuY+Um+RE27z0qo23yXH2JJjkxFW4THK+6mf0BEmKqnJMctynbOZrd4XNnCbH6Lk+ZbN9OEOrTXIk6dOTRt8c7bd5jqbPec4kZ/a+ESY5Va1txrQ31WaOlaScP1SZ5Phj6kxyDg63GddjfvieSc7Svw82yZGksvY2h+PR8RUmOSeP2hwTt8k6YZLT4hOb7eO41+4b7L+z9CuTnOcHjjTJqW5pMze2X2FzbL23fbpJTkqcSYwkKXJ+iUmO82WsSY7VvF+bXmuSU9HW5twjNrFx5+T/rjjPZp+WucbmeO9IcopJTmStzfYaXWLzuFqtjDHJkaSEbV+Y5Oye0K7RGf7qSGnF+dfjHWIAAAAAAAAIKzTEAAAAAAAAEFZoiAEAAAAAACCs0BADAAAAAABAWKEhBgAAAAAAgLBCQwwAAAAAAABhhYYYAAAAAAAAwgoNMQAAAAAAAIQVGmIAAAAAAAAIKzTEAAAAAAAAEFZoiAEAAAAAACCs0BADAAAAAABAWKEhBgAAAAAAgLBCQwwAAAAAAABhhYYYAAAAAAAAwgoNMQAAAAAAAIQVGmIAAAAAAAAIK1GhLsBC7NtJinTHNCoj+e4vTWpxXmxrkiNJyT2rTXL2DnVMcmJaVpnkeI/EmeQcP97SJOe2cR+a5EjSkm39TXIq2rpMcvwHD9nkRGeY5LhSakxyEtfHmuRIUt2RJJOcyGqb7awu3ua1r2lt81xHJtvMQwkf2jzPklTewW+S0+HNWpOcU53dJjlpBSYxqhvhNclpPWqnSY4k7Xm+n0lO++U+k5zHut9oknP6nUyTnIz9No/ry2E285AkHXvEJsu/KtEkJ/3jOpOc/y2+wSSnpoPdcx1dYjPvR+6KN8lpdc1Rk5yTH7Q2yVFHmxh1LTMKko4OsjkGTT5gM65jT9iMoT1jo01yoo7YnGJWZto8LknatzbbJMeVYHMMEt/1lEmO+5VUk5wBk7aY5Lz/wtUmOZIUV2Uzzx692ea4KHlL4/oNZ5RfU2mSU51ks72WfWXXEnrgTxtMcv74WeOPHXyVDXvdeYcYAAAAAAAAwgoNMQAAAAAAAIQVGmIAAAAAAAAIKzTEAAAAAAAAEFZoiAEAAAAAACCshLQhNmfOHF199dVKTExUq1atNGrUKO3atSuUJQEAAAAAAKCZC2lDbN26dcrPz9eGDRu0atUq1dbWatiwYaqoqAhlWQAAAAAAAGjGokL5n69YsaLez88//7xatWqlrVu36nvf+16IqgIAAAAAAEBzFtKG2H8qKSmRJKWlpX3j/V6vV16vN/BzaWnpJakLAAAAAAAAzcdlc1F9v9+vyZMnKy8vTz169PjGdebMmaPk5OTALSsr6xJXCQAAAAAAgKbusmmI5efnq7CwUEuXLj3nOtOmTVNJSUngVlRUdAkrBAAAAAAAQHNwWXxkcvz48Xrrrbf0/vvvq23btudcz+PxyOPxXMLKAAAAAAAA0NyEtCHmOI4mTJigZcuWae3atcrOzg5lOQAAAAAAAAgDQTXEKioqNHfuXK1evVrFxcXy+/317t+/f3+DcvLz87VkyRK9/vrrSkxM1NGjRyVJycnJio2NDaY0AAAAAAAA4FsF1RC7//77tW7dOt19993KzMyUy+UK6j9fuHChJGnQoEH1lj/33HP67//+76AyAQAAAAAAgG8TVENs+fLlevvtt5WXl9eo/9xxnEb9PgAAAAAAAHChgvqWydTUVKWlpVnXAgAAAAAAAFx0QTXEHn74Yc2YMUOVlZXW9QAAAAAAAAAXlcsJ4nOLffr00b59++Q4jjp06KDo6Oh69xcUFJgV+G1KS0uVnJysrEUzFREb06isFi3LTGo68UWqSY4kKb7OJMZ90G2SU2f0PQcpu21yKoaVm+REr080yZGkpIM+kxx/VHDX5ftP3jEnTXIqN7c0yfngp4+Z5Nww/1cmOZIUc8Lmo9uOzUum4RM+NMl542/XmeR0+eEuk5xdx1uZ5EhSy4QKk5wyr8ckp84X1N+WzuKsbGGSUxdvEqMom6dZklT5PZv5OvKTBJOc6hyvSU6Ux2Y/vbj//5jkTFg0ziRHktzfO26SU/Evm3Fdm2gzVyd3PGWSU7bT7pMSCV/Y7EAy3zlikrNrfIZJTvoWkxh91dcmx4m2u1RLfFGkSU5lpv/8KzVA8m6bMeSLscnxDP3KJOf453bbWVymzX4o+UWb/ZATYfNcp4w7aJKz/yujufqQ0UGIpCsX2eyH9sy0ec1cn9ucDCd+YRKjmFM2c5o3yegkRtKJa2pNcjq83PiMurpq/fPd36mkpERJSUnnXC+oa4iNGjUq2LoAAAAAAACAkAqqITZz5kzrOgAAAAAAAIBLIqiG2Blbt27Vzp07JUndu3dXnz59TIoCAAAAAAAALpagGmLFxcX68Y9/rLVr1yolJUWSdPr0ad1www1aunSp0tPTLWsEAAAAAAAAzAR1JeAJEyaorKxMn376qU6ePKmTJ0+qsLBQpaWlmjhxonWNAAAAAAAAgJmg3iG2YsUKvfvuu+ratWtgWbdu3bRgwQINGzbMrDgAAAAAAADAWlDvEPP7/YqOjj5reXR0tPx+m68PBgAAAAAAAC6GoBpigwcP1qRJk3T48OHAskOHDmnKlCkaMmSIWXEAAAAAAACAtaAaYk899ZRKS0vVoUMHdezYUR07dlR2drZKS0v15z//2bpGAAAAAAAAwExQ1xDLyspSQUGB3n33XX322WeSpK5du2ro0KGmxQEAAAAAAADWgmqISZLL5dL3v/99ff/737esBwAAAAAAALioGtwQmz9/vh544AHFxMRo/vz537ruxIkTG10YAAAAAAAAcDE0uCH2xBNPaMyYMYqJidETTzxxzvVcLhcNMQAAAAAAAFy2GtwQO3DgwDf+GwAAAAAAAGhKgvqWyYceekiVlZVnLa+qqtJDDz3U6KIAAAAAAACAiyWohtisWbNUXl5+1vLKykrNmjWr0UUBAAAAAAAAF0tQDTHHceRyuc5a/vHHHystLa3RRQEAAAAAAAAXS4OvISZJqampcrlccrlc6ty5c72mmM/nU3l5ucaNG2de5HlFOF/fGqF8c0uTUuLP/iRp0KpbBNWvPEtdx2qTHM+nsSY55VmNe63O8B1IMMmpy7V70TyDbbKqPrQZj1M6vW+S82F6jknOgpNXm+RkjfjcJEeSvvpbe5ugs/9GEJS3n77OJKd8gM12f6I63iSn8rMUkxxJGj5yk0nOwjVDTXJ69PrCJOdAfAuTnEibl15VrWxyJClmg818XXG1zRzr3muzP0vJLTHJmfWLe01yar5ns3+VpNhXbcZjdLrN5Og5aZNTe8TmcWXurjPJkaSjP7bZaCPusMlxDtaY5PhHl9nkHEoxyYk6fUGnPd8qvcBrknM6x22SU/odkxgl77GZQ6pXp5vkJNsMRUlSwrs2+6FjA2zOz7L/73qTHP++HiY5kcMSTXKi7aZG7Zlp85rFbYwzybE61o+qtNnOoqr8JjnF/SJNciTJ5bapachj/2x0RnV5rf757vnXu6A9w7x58+Q4ju69917NmjVLycnJgfvcbrc6dOiggQMHXnCxAAAAAAAAwKVyQQ2xsWPHSpKys7N17bXXKjo6+qIUBQAAAAAAAFwsDW6IlZaWKikpSZLUp08fVVVVqaqq6hvXPbMeAAAAAAAAcLlpcEMsNTVVR44cUatWrZSSkvKNF9U/c7F9n89nWiQAAAAAAABgpcENsffeey/wDZJr1qwxL2Tu3LmaNm2aJk2apHnz5pnnAwAAAAAAANIFNMSuv/76b/y3hc2bN+svf/mLevXqZZoLAAAAAAAA/Kegvjd2xYoV+vDDDwM/L1iwQFdddZX+67/+S6dOnbqgrPLyco0ZM0ZPP/20UlNTgykHAAAAAAAAaLCgGmK/+tWvVFpaKknavn27pk6dqhEjRujAgQOaOnXqBWXl5+fr5ptv1tChQ8+7rtfrVWlpab0bAAAAAAAAcCEa/JHJf3fgwAF169ZNkvTqq6/qlltu0ezZs1VQUKARI0Y0OGfp0qUqKCjQ5s2bG7T+nDlzNGvWrGBKBgAAAAAAACQF+Q4xt9utyspKSdK7776rYcOGSZLS0tIa/K6toqIiTZo0SX//+98VExPToN+ZNm2aSkpKAreioqJgygcAAAAAAEAYC+odYt/97nc1depU5eXladOmTXrppZckSbt371bbtm0blLF161YVFxerb9++gWU+n0/vv/++nnrqKXm9XkVGRtb7HY/HI4/HE0zJAAAAAAAAgKQg3yH21FNPKSoqSq+88ooWLlyoK664QpK0fPlyDR8+vEEZQ4YM0fbt27Vt27bALTc3V2PGjNG2bdvOaoYBAAAAAAAAFoJ6h1i7du301ltvnbX8iSeeaHBGYmKievToUW9ZfHy8WrRocdZyAAAAAAAAwEpQDTHp6483vvbaa9q5c6ckqXv37rr11lt5ZxcAAAAAAAAua0E1xPbu3asRI0bo0KFDuvLKKyV9/Q2QWVlZevvtt9WxY8egilm7dm1QvwcAAAAAAAA0VFDXEJs4caI6duyooqIiFRQUqKCgQAcPHlR2drYmTpxoXSMAAAAAAABgJqh3iK1bt04bNmxQWlpaYFmLFi00d+5c5eXlmRUHAAAAAAAAWAuqIebxeFRWVnbW8vLycrnd7kYXdcFOR0ve6EZFxBY7JqVE1JrESJJS9vpNcm4d+b5JzrP7hpnkxB8yiZHP4zLJaTfgqEmOJPVJKTLJedW53iTnied/aJLT9sYvTHLWr+xpkpP+sc8kR5IiY222/eJrbHJcPptxHfWlxyRnvy/dJMeV6TXJkaTlkweZ5KRnBvUm6bMc+Ve2SU6MbMZQq7VHTHJ2/p8MkxxJcpfaXF/Uf9xmXPuMDlWO72hpkhPfzWYsttpqNzceu73aJCdpdZxJzsneNsdEPxv0nknOC0u/b5IjSdGfJJjkFK+IN8mJ6GkzF9Vssdl/ZB2sM8mptNlcJUnHe9nMRT1+tNMkZ/MHXUxyKtqaxMibaXRCVGtzTCRJJZ2Nshyb7cN/XR+TnMPXxZrk1BkdD8++8+8mOZL0y/fvMMmpGlhuktNpnM35UMnSFiY5fdIPmuQceu9qkxxJijxqMze+9uQNjc7w1VRLWnne9YI6Ahs5cqQeeOABbdy4UY7jyHEcbdiwQePGjdOtt94aTCQAAAAAAABwSQTVEJs/f746duyogQMHKiYmRjExMbr22mvVqVMnPfnkk9Y1AgAAAAAAAGaC+shkSkqKXn/9de3du1c7duyQJHXr1k2dOnUyLQ4AAAAAAACwFlRDTJKeffZZPfHEE9qzZ48kKScnR5MnT9b9999vVhwAAAAAAABgLaiG2IwZM/T4449rwoQJGjhwoCRp/fr1mjJlig4ePKiHHnrItEgAAAAAAADASlANsYULF+rpp5/W6NGjA8tuvfVW9erVSxMmTKAhBgAAAAAAgMtWUBfVr62tVW5u7lnL+/Xrp7o6m69GBgAAAAAAAC6GoBpid999txYuXHjW8sWLF2vMmDGNLgoAAAAAAAC4WBp1Uf133nlH11xzjSRp48aNOnjwoH7yk59o6tSpgfUef/zxxlcJAAAAAAAAGAmqIVZYWKi+fftKkvbt2ydJatmypVq2bKnCwsLAei6Xy6BEAAAAAAAAwE5QDbE1a9ZY1wEAAAAAAABcEkFdQwwAAAAAAABoqmiIAQAAAAAAIKzQEAMAAAAAAEBYoSEGAAAAAACAsEJDDAAAAAAAAGGFhhgAAAAAAADCistxHCfURQSrtLRUycnJ6nH/7xXpjmlUlue0zdNworfLJEeSImqMcrw2NfnijIaK3yam56A9Jjm7l3U2yZGkss51Jjm35P7LJOf/7e5ukuP+NM4kJ2bACZOcqFfTTHIsxZz2meQUj6kyyen46xKTnM8eaWGS4y+PNsmRpIhqm7/lpH1iMzf6b7MZ13XvtTTJSfjSZiweHmY0WUuKTvSa5CTE2eSc/jzFJMdzItIkxxdjs391X1lqkiNJCa8mmuRU3m4zF0W6bJ6jyu2pJjmdFn5hkiNJe8a3twkyOgR99Ef/Y5Iz68mfmOQkfWFzbHXqvjKTHEmqrnKb5AzP2WGS89anPU1yEpJtjkHKS2JNcjr+1e40tfgqm5oqBlSa5Owd9LxJTvabPzXJcdXaHFvFFdnsFyWp4kqbk+FWa22OQf1GD62irc1kXXWFzdwY/3mUSY4k1Rn1C+KuOtnoDF+lV4V3/lElJSVKSko653q8QwwAAAAAAABhhYYYAAAAAAAAwgoNMQAAAAAAAIQVGmIAAAAAAAAIKzTEAAAAAAAAEFZC3hA7dOiQ7rrrLrVo0UKxsbHq2bOntmzZEuqyAAAAAAAA0EzZfcdmEE6dOqW8vDzdcMMNWr58udLT07Vnzx6lptp8JTYAAAAAAADwn0LaEPvDH/6grKwsPffcc4Fl2dnZ51zf6/XK6/UGfi4tLb2o9QEAAAAAAKD5CelHJt944w3l5ubq9ttvV6tWrdSnTx89/fTT51x/zpw5Sk5ODtyysrIuYbUAAAAAAABoDkLaENu/f78WLlyonJwcrVy5Uj//+c81ceJEvfDCC9+4/rRp01RSUhK4FRUVXeKKAQAAAAAA0NSF9COTfr9fubm5mj17tiSpT58+Kiws1KJFizR27Niz1vd4PPJ4PJe6TAAAAAAAADQjIX2HWGZmprp161ZvWdeuXXXw4MEQVQQAAAAAAIDmLqQNsby8PO3atavest27d6t9+/YhqggAAAAAAADNXUgbYlOmTNGGDRs0e/Zs7d27V0uWLNHixYuVn58fyrIAAAAAAADQjIW0IXb11Vdr2bJlevHFF9WjRw89/PDDmjdvnsaMGRPKsgAAAAAAANCMhfSi+pI0cuRIjRw5MtRlAAAAAAAAIEyE9B1iAAAAAAAAwKVGQwwAAAAAAABhxeU4jhPqIoJVWlqq5ORk/fz92+RJiG5U1v9+3NekpoTUSpMcScr8o9skp2bmaZOc0mqPTU55rElO2js2Ocf7+U1yJEmJdSYx7V626VUX923cdnFGym6b56jVLw6Y5BRuyTbJkaQu/b4wyTmw0qamqjY+k5zkdiUmOVqVZhJT0sXmcUlS/BVlJjltp9uM6z1jU01yrrpmj0nOJx/lmOS4T7tMciQpttjmUCPnvs9schKKTXLWj8s1ydk7OsYkJz7LZtuwFPt6sknOqWFVJjn+ozbPtT/JZn8vSR3/bjMXHe9p9NgGn7LJ2WAzN/r62YzrqM2JJjmS5LM5JJYvxmZurM3ymuRkvmlz3Hg6J9Ikp8WndttZ3CGbc7TK31eY5Hz5WYZJTruuR01yqv/W2iTHF2137BBZY7N9xJ6wOQY9fJ3N1aZiim2eo7jhx0xyaupstldJqtza0iSnzmBu9FdX6/OZv1FJSYmSkpLOuR7vEAMAAAAAAEBYoSEGAAAAAACAsEJDDAAAAAAAAGGFhhgAAAAAAADCCg0xAAAAAAAAhBUaYgAAAAAAAAgrNMQAAAAAAAAQVmiIAQAAAAAAIKzQEAMAAAAAAEBYoSEGAAAAAACAsEJDDAAAAAAAAGGFhhgAAAAAAADCCg0xAAAAAAAAhBUaYgAAAAAAAAgrNMQAAAAAAAAQVmiIAQAAAAAAIKxEhbqAxnAcR5JUU1Hb6Cx/VXWjMyTJ5/Ga5EhSXZ3fJqfCpiaf0UPzV7pMcnw1Njn+KpvnWZIUVWcSU1dr06v2eX0mOXW1Ns9RbUWNSY6/2mZ7lexq8nltavJX2bxmvkqjDbbm8npckt1jq/PZjGur8Xi5bR8+r80cK0m+Gsckx+o58qrxxw2SVFdntX2YxNht94Z8VnNIpVGO0e7DH22zv5fsjvfsjtNsgvxG+0Wf0Wvv8kab5EiS1R7NL5u50V9ltF+sNToG8Uaa5NTVGm5nPptxZHVeZXXuaXaeZzRX+xy7YwcZHTtYjWt/tU3rxOr4ymf12vtstldJ8hkdg1rMjWeOh8/0jM7F5ZxvjcvYl19+qaysrFCXAQAAAAAAgMtIUVGR2rZte877m3RDzO/36/Dhw0pMTJTL9c2d1tLSUmVlZamoqEhJSUmXuELg4mBcozliXKO5YUyjOWJco7lhTKM5Cvdx7TiOysrK1KZNG0VEnPvTV036I5MRERHf2u37d0lJSWE5ENC8Ma7RHDGu0dwwptEcMa7R3DCm0RyF87hOTk4+7zpcVB8AAAAAAABhhYYYAAAAAAAAwkqzb4h5PB7NnDlTHo8n1KUAZhjXaI4Y12huGNNojhjXaG4Y02iOGNcN06Qvqg8AAAAAAABcqGb/DjEAAAAAAADg39EQAwAAAAAAQFihIQYAAAAAAICwQkMMAAAAAAAAYaXZN8QWLFigDh06KCYmRgMGDNCmTZtCXRLQYO+//75uueUWtWnTRi6XS6+99lq9+x3H0YwZM5SZmanY2FgNHTpUe/bsCU2xQAPMmTNHV199tRITE9WqVSuNGjVKu3btqrdOdXW18vPz1aJFCyUkJOhHP/qRjh07FqKKgW+3cOFC9erVS0lJSUpKStLAgQO1fPnywP2MZzQHc+fOlcvl0uTJkwPLGNtoan73u9/J5XLVu3Xp0iVwP2MaTdGhQ4d01113qUWLFoqNjVXPnj21ZcuWwP2cL367Zt0Qe+mllzR16lTNnDlTBQUF6t27t2688UYVFxeHujSgQSoqKtS7d28tWLDgG+9/9NFHNX/+fC1atEgbN25UfHy8brzxRlVXV1/iSoGGWbdunfLz87VhwwatWrVKtbW1GjZsmCoqKgLrTJkyRW+++aZefvllrVu3TocPH9YPf/jDEFYNnFvbtm01d+5cbd26VVu2bNHgwYP1gx/8QJ9++qkkxjOavs2bN+svf/mLevXqVW85YxtNUffu3XXkyJHA7cMPPwzcx5hGU3Pq1Cnl5eUpOjpay5cv144dO/SnP/1JqampgXU4XzwPpxnr37+/k5+fH/jZ5/M5bdq0cebMmRPCqoDgSHKWLVsW+Nnv9zutW7d2HnvsscCy06dPOx6Px3nxxRdDUCFw4YqLix1Jzrp16xzH+XoMR0dHOy+//HJgnZ07dzqSnPXr14eqTOCCpKamOs888wzjGU1eWVmZk5OT46xatcq5/vrrnUmTJjmOw1yNpmnmzJlO7969v/E+xjSaogcffND57ne/e877OV88v2b7DrGamhpt3bpVQ4cODSyLiIjQ0KFDtX79+hBWBtg4cOCAjh49Wm+MJycna8CAAYxxNBklJSWSpLS0NEnS1q1bVVtbW29cd+nSRe3atWNc47Ln8/m0dOlSVVRUaODAgYxnNHn5+fm6+eab641hibkaTdeePXvUpk0bfec739GYMWN08OBBSYxpNE1vvPGGcnNzdfvtt6tVq1bq06ePnn766cD9nC+eX7NtiB0/flw+n08ZGRn1lmdkZOjo0aMhqgqwc2YcM8bRVPn9fk2ePFl5eXnq0aOHpK/HtdvtVkpKSr11Gde4nG3fvl0JCQnyeDwaN26cli1bpm7dujGe0aQtXbpUBQUFmjNnzln3MbbRFA0YMEDPP/+8VqxYoYULF+rAgQO67rrrVFZWxphGk7R//34tXLhQOTk5WrlypX7+859r4sSJeuGFFyRxvtgQUaEuAAAQnvLz81VYWFjv+h1AU3TllVdq27ZtKikp0SuvvKKxY8dq3bp1oS4LCFpRUZEmTZqkVatWKSYmJtTlACZuuummwL979eqlAQMGqH379vrHP/6h2NjYEFYGBMfv9ys3N1ezZ8+WJPXp00eFhYVatGiRxo4dG+LqmoZm+w6xli1bKjIy8qxvBjl27Jhat24doqoAO2fGMWMcTdH48eP11ltvac2aNWrbtm1geevWrVVTU6PTp0/XW59xjcuZ2+1Wp06d1K9fP82ZM0e9e/fWk08+yXhGk7V161YVFxerb9++ioqKUlRUlNatW6f58+crKipKGRkZjG00eSkpKercubP27t3LfI0mKTMzU926dau3rGvXroGPAnO+eH7NtiHmdrvVr18/rV69OrDM7/dr9erVGjhwYAgrA2xkZ2erdevW9cZ4aWmpNm7cyBjHZctxHI0fP17Lli3Te++9p+zs7Hr39+vXT9HR0fXG9a5du3Tw4EHGNZoMv98vr9fLeEaTNWTIEG3fvl3btm0L3HJzczVmzJjAvxnbaOrKy8u1b98+ZWZmMl+jScrLy9OuXbvqLdu9e7fat28vifPFhmjWH5mcOnWqxo4dq9zcXPXv31/z5s1TRUWF7rnnnlCXBjRIeXm59u7dG/j5wIED2rZtm9LS0tSuXTtNnjxZjzzyiHJycpSdna3p06erTZs2GjVqVOiKBr5Ffn6+lixZotdff12JiYmB6xckJycrNjZWycnJuu+++zR16lSlpaUpKSlJEyZM0MCBA3XNNdeEuHrgbNOmTdNNN92kdu3aqaysTEuWLNHatWu1cuVKxjOarMTExMC1Hc+Ij49XixYtAssZ22hqfvnLX+qWW25R+/btdfjwYc2cOVORkZEaPXo08zWapClTpujaa6/V7Nmzdccdd2jTpk1avHixFi9eLElyuVycL55PqL/m8mL785//7LRr185xu91O//79nQ0bNoS6JKDB1qxZ40g66zZ27FjHcb7+Kt3p06c7GRkZjsfjcYYMGeLs2rUrtEUD3+KbxrMk57nnngusU1VV5fziF79wUlNTnbi4OOe2225zjhw5ErqigW9x7733Ou3bt3fcbreTnp7uDBkyxHnnnXcC9zOe0Vxcf/31zqRJkwI/M7bR1Nx5551OZmam43a7nSuuuMK58847nb179wbuZ0yjKXrzzTedHj16OB6Px+nSpYuzePHievdzvvjtXI7jOCHqxQEAAAAAAACXXLO9hhgAAAAAAADwTWiIAQAAAAAAIKzQEAMAAAAAAEBYoSEGAAAAAACAsEJDDAAAAAAAAGGFhhgAAAAAAADCCg0xAAAAAAAAhBUaYgAAAAAAAAgrNMQAAABC5PPPP5fL5dK2bdtCXUqDDBo0SJMnTw51GQAAAI1GQwwAAAAAAABhhYYYAAAAAAAAwgoNMQAAgIvM7/fr0UcfVadOneTxeNSuXTv9/ve/D9y/f/9+3XDDDYqLi1Pv3r21fv36wH0nTpzQ6NGjdcUVVyguLk49e/bUiy++WC9/0KBBmjhxon79618rLS1NrVu31u9+97t667hcLj3zzDO67bbbFBcXp5ycHL3xxhv11iksLNRNN92khIQEZWRk6O6779bx48ftnxAAAIAQoyEGAABwkU2bNk1z587V9OnTtWPHDi1ZskQZGRmB+3/zm9/ol7/8pbZt26bOnTtr9OjRqqurkyRVV1erX79+evvtt1VYWKgHHnhAd999tzZt2lTv/3jhhRcUHx+vjRs36tFHH9VDDz2kVatW1Vtn1qxZuuOOO/TJJ59oxIgRGjNmjE6ePClJOn36tAYPHqw+ffpoy5YtWrFihY4dO6Y77rjjIj87AAAAl57LcRwn1EUAAAA0V2VlZUpPT9dTTz2l+++/v959n3/+ubKzs/XMM8/ovvvukyTt2LFD3bt3186dO9WlS5dvzBw5cqS6dOmiP/7xj5K+foeYz+fTBx98EFinf//+Gjx4sObOnSvp63eI/fa3v9XDDz8sSaqoqFBCQoKWL1+u4cOH65FHHtEHH3yglStXBjK+/PJLZWVladeuXercubMGDRqkq666SvPmzTN7fgAAAEIhKtQFAAAANGc7d+6U1+vVkCFDzrlOr169Av/OzMyUJBUXF6tLly7y+XyaPXu2/vGPf+jQoUOqqamR1+tVXFzcOTPO5BQXF59znfj4eCUlJQXW+fjjj7VmzRolJCScVd++ffvUuXPnBj5iAACAyx8NMQAAgIsoNjb2vOtER0cH/u1yuSR9fd0xSXrsscf05JNPat68eerZs6fi4+M1efJk1dTUnDPjTM6ZjIasU15erltuuUV/+MMfzqrvTJMOAACguaAhBgAAcBHl5OQoNjZWq1evPusjkw3x0Ucf6Qc/+IHuuusuSV83ynbv3q1u3bqZ1tm3b1+9+uqr6tChg6KiOEQEAADNGxfVBwAAuIhiYmL04IMP6te//rX+9re/ad++fdqwYYOeffbZBv1+Tk6OVq1apX/+85/auXOnfvazn+nYsWPmdebn5+vkyZMaPXq0Nm/erH379mnlypW655575PP5zP8/AACAUOLPfwAAABfZ9OnTFRUVpRkzZujw4cPKzMzUuHHjGvS7v/3tb7V//37deOONiouL0wMPPKBRo0appKTEtMY2bdroo48+0oMPPqhhw4bJ6/Wqffv2Gj58uCIi+BsqAABoXviWSQAAAAAAAIQV/twHAAAAAACAsEJDDAAAAAAAAGGFhhgAAAAAAADCCg0xAAAAAAAAhBUaYgAAAAAAAAgrNMQAAAAAAAAQVmiIAQAAAAAAIKzQEAMAAAAAAEBYoSEGAAAAAACAsEJDDAAAAAAAAGGFhhgAAAAAAADCyv8H1ozCy5ne6uEAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "b = 1\n", + "s = 8\n", + "t = 1\n", + "dim = 64\n", + "\n", + "pe = LearnedPositionalEmbedding(dim)\n", + "\n", + "q = torch.zeros((b, s, t, dim))\n", + "q = pe(q).squeeze() # [s, dim]\n", + "\n", + "plt.figure(figsize=(15, 5))\n", + "plt.imshow(q.detach()) \n", + "plt.xlabel(\"channel\")\n", + "plt.ylabel(\"position\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "bac81d96-41b7-41e7-bec8-e4ac5922a840", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d9bdbe86-6fa0-453a-bcab-f460dbca9274", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/transformers/attention.ipynb b/src/models/transformers/attention.ipynb new file mode 100644 index 0000000..93fb0da --- /dev/null +++ b/src/models/transformers/attention.ipynb @@ -0,0 +1,285 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Transformers and attention\n", + "\n", + "> Common transformer and attention blocks." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.transformers.attention" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *" + ] + }, + { + "cell_type": "markdown", + "id": "b377198c-3676-4086-81d7-7608a48c0500", + "metadata": {}, + "source": [ + "## Feed-forward" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cc6ea7ff-988c-4cab-96ba-fa32e3b6da2c", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class FeedForwardBlock(nn.Module):\n", + " \"\"\"\n", + " A small dense feed-forward network as used in `transformers`. Assumes channel last.\n", + " Inspired by https://arxiv.org/pdf/2401.11605.\n", + " From https://arxiv.org/pdf/2002.05202 a modification to SiGLU\n", + " \"\"\"\n", + "\n", + " def __init__(self, in_dim: int, hidden_dim: int, dropout: float = 0.0) -> None:\n", + " super().__init__()\n", + " self.hidden_dim = hidden_dim\n", + " self.proj_in = nn.Linear(in_dim, 2*hidden_dim) # factor two for GLU part split\n", + " self.proj_out = nn.Linear(hidden_dim, in_dim) \n", + " self.act = nn.SiLU()\n", + " self.drop = nn.Dropout(dropout)\n", + " \n", + " def siglu(self, x: torch.Tensor) -> torch.Tensor:\n", + " x = self.proj_in(x) \n", + " return x[..., :self.hidden_dim] * self.act(x[..., self.hidden_dim:])\n", + "\n", + " #@torch.compile\n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " x = self.siglu(x)\n", + " x = self.drop(x)\n", + " x = self.proj_out(x)\n", + " return x" + ] + }, + { + "cell_type": "markdown", + "id": "6919a4e5-d667-4bc0-93c8-e6f767586239", + "metadata": {}, + "source": [ + "## Attention blocks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a88bad1d-1ce4-4dab-92c0-6e3af703ae5a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class BasisSelfAttnBlock(nn.Module):\n", + " \"\"\"A self attention block, i.e. a `transformer` encoder.\"\"\"\n", + " def __init__(self, ch, num_heads, dropout=0.0, batch_first=False):\n", + " super().__init__()\n", + " self.self_att = nn.MultiheadAttention(ch, num_heads=num_heads, batch_first=batch_first) #[t, b, c]\n", + " self.ff = FeedForwardBlock(ch, 2*ch) \n", + " self.norm1 = nn.LayerNorm(ch)\n", + " self.norm2 = nn.LayerNorm(ch)\n", + " self.drop = nn.Dropout(dropout)\n", + " \n", + " def forward(self, x, attn_mask=None, key_padding_mask=None, need_weights=False):\n", + " #x ... [ t, batch, ch] \n", + " \n", + " self_out = self.norm1(x) \n", + " self_out, _ = self.self_att(self_out, key=self_out, value=self_out, attn_mask=attn_mask, key_padding_mask=key_padding_mask, need_weights=need_weights)\n", + " self_out = self.drop(self_out) + x \n", + " \n", + " feed_out = self.norm2(self_out) \n", + " feed_out = self.ff(feed_out)\n", + " feed_out = self.drop(feed_out) + self_out \n", + " \n", + " return feed_out" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e7291e33-7ca6-48a2-8556-f5a2cf3e476c", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class BasisCrossAttnBlock(nn.Module):\n", + " \"\"\"A cross attention block, i.e. a `transformer` decoder.\"\"\"\n", + " def __init__(self, ch, num_heads, dropout=0.0, batch_first=False):\n", + " super().__init__()\n", + " self.self_att = nn.MultiheadAttention(ch, num_heads=num_heads, batch_first=batch_first) #[t, b, c]\n", + " self.cross_att = nn.MultiheadAttention(ch, num_heads=num_heads, batch_first=batch_first) \n", + " self.ff = FeedForwardBlock(ch, 2*ch) \n", + " self.norm1 = nn.LayerNorm(ch)\n", + " self.norm2 = nn.LayerNorm(ch)\n", + " self.norm3 = nn.LayerNorm(ch)\n", + " self.drop = nn.Dropout(dropout)\n", + " \n", + " def forward(self, x, c_emb, attn_mask=None, key_padding_mask=None, need_weights=False):\n", + " #x ... [ t, batch, ch] \n", + " #c_emb ... [seq, batch, ch]\n", + " \n", + " self_out = self.norm1(x) \n", + " self_out, _ = self.self_att(self_out, key=self_out, value=self_out, attn_mask=attn_mask, key_padding_mask=key_padding_mask, need_weights=need_weights)\n", + " self_out = self.drop(self_out) + x \n", + " \n", + " cross_out = self.norm2(self_out) \n", + " cross_out, _ = self.cross_att(cross_out, key=c_emb, value=c_emb, need_weights=need_weights)\n", + " cross_out = self.drop(cross_out) + self_out \n", + " \n", + " feed_out = self.norm3(cross_out) \n", + " feed_out = self.ff(feed_out)\n", + " feed_out = self.drop(feed_out) + cross_out \n", + " \n", + " return feed_out" + ] + }, + { + "cell_type": "markdown", + "id": "cf12278b-c905-48ce-8f6a-1ebf717b7aa7", + "metadata": {}, + "source": [ + "## Spatial residual transformers" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "42ff2a79-66b6-4e1f-9a86-527894be54ad", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class SpatialTransformerSelfAttn(nn.Module):\n", + " \"\"\"A spatial residual `transformer`, only uses self-attention.\"\"\"\n", + " def __init__(self, ch, num_heads, depth, dropout=0.0, num_groups=32):\n", + " super().__init__() \n", + " self.norm = torch.nn.GroupNorm(num_groups=num_groups, num_channels=ch, eps=1e-6, affine=True)\n", + " self.transformer_blocks = nn.ModuleList([BasisSelfAttnBlock(ch=ch, num_heads=num_heads, dropout=dropout) for d in range(depth)])\n", + " \n", + " def forward(self, x, attn_mask=None, key_padding_mask=None):\n", + " #x ... [batch, ch, space, time] \n", + " #c_emb ... [batch, seq, ch]\n", + " b, ch, space, time = x.shape\n", + " \n", + " x_in = x\n", + " \n", + " #-------------------------\n", + " x = self.norm(x) \n", + " \n", + " x = torch.reshape(x, (b, ch, space*time))\n", + " x = torch.permute(x, (2, 0, 1))#.contiguous() # to [t, batch, ch] \n", + " \n", + " #------------------------- \n", + " for transformer_block in self.transformer_blocks:\n", + " x = transformer_block(x, attn_mask, key_padding_mask)\n", + " \n", + " #-------------------------\n", + " \n", + " x = torch.permute(x, (1, 2, 0)) # back to [batch, ch, t] \n", + " x = torch.reshape(x, (b, ch, space, time))#.contiguous()\n", + " \n", + " return x + x_in" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bcbc5dca-1f77-4276-85e2-b99afeee1128", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class SpatialTransformer(nn.Module):\n", + " \"\"\"A spatial residual `transformer`, uses self- and cross-attention on conditional input.\"\"\"\n", + " \n", + " def __init__(self, ch, cond_emb_size, num_heads, depth, dropout=0.0, num_groups=32):\n", + " super().__init__() \n", + " self.cat_proj = nn.Linear(cond_emb_size, ch) \n", + " self.norm = torch.nn.GroupNorm(num_groups=num_groups, num_channels=ch, eps=1e-6, affine=True)\n", + " self.transformer_blocks = nn.ModuleList([BasisCrossAttnBlock(ch=ch, num_heads=num_heads, dropout=dropout) for d in range(depth)])\n", + " \n", + " def forward(self, x, c_emb, attn_mask=None, key_padding_mask=None):\n", + " #x ... [batch, ch, space, time] \n", + " #c_emb ... [batch, seq, ch]\n", + " b, ch, space, time = x.shape\n", + " \n", + " x_in = x\n", + " \n", + " #-------------------------\n", + " x = self.norm(x) \n", + " \n", + " x = torch.reshape(x, (b, ch, space*time))\n", + " x = torch.permute(x, (2, 0, 1))#.contiguous() # to [t, batch, ch] \n", + " \n", + " c_emb = self.cat_proj(c_emb) \n", + " c_emb = torch.permute(c_emb, (1, 0, 2))#.contiguous() # to [seq, batch, ch]\n", + " \n", + " #------------------------- \n", + " for transformer_block in self.transformer_blocks:\n", + " x = transformer_block(x, c_emb, attn_mask, key_padding_mask)\n", + " \n", + " #-------------------------\n", + " \n", + " x = torch.permute(x, (1, 2, 0)) # back to [batch, ch, t] \n", + " x = torch.reshape(x, (b, ch, space, time))#.contiguous()\n", + " \n", + " return x + x_in" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/transformers/cirdit_multimodal.ipynb b/src/models/transformers/cirdit_multimodal.ipynb new file mode 100644 index 0000000..01b7eeb --- /dev/null +++ b/src/models/transformers/cirdit_multimodal.ipynb @@ -0,0 +1,978 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "44135ab8-3722-438e-b0d7-2889011e1581", + "metadata": {}, + "source": [ + "# CirDiT - Circuit Diffusion Transformer\n", + "\n", + "> The multimodal circuit generation model: *Circuit Diffusion Transformer* (CirDiT)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cf9ba622-3ba1-451d-b465-20d4480c81c1", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp models.transformers.cirdit_multimodal" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27070f09-14a3-4603-8ea4-0b3f3284eae1", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.models.config_model import *\n", + "from genQC.models.position_encoding import RotaryPositionalEmbedding, LearnedPositionalEmbedding\n", + "from genQC.models.layers import PositionalEncoding" + ] + }, + { + "cell_type": "markdown", + "id": "f48c8865-3103-4e42-a5f4-8a5e18b3e80a", + "metadata": {}, + "source": [ + "## RotaryMultiheadAttention" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7cf35439-5f9a-48bc-ae72-ffd835c9f053", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class RotaryMultiheadAttention(nn.Module):\n", + " \"\"\"\n", + " MultiheadAttention described in the paper: Attention Is All You Need (https://arxiv.org/abs/1706.03762).\n", + " We add a rotary position encoding (RoPE). \n", + "\n", + " The attention core is `F.scaled_dot_attention` from pytorch. \n", + " Could be switched to `https://github.com/Dao-AILab/flash-attention` or `xFormers`.\n", + " \"\"\"\n", + "\n", + " def __init__(self,\n", + " in_dim: int,\n", + " embed_dim: int, \n", + " num_heads: int, \n", + " bias: bool = True, \n", + " p_rope: float = 1.0, \n", + " max_seq_len: int = 4096, \n", + " base_rope: float = 10_000,\n", + " enable_qk_norm: bool = False) -> None:\n", + " \n", + " super().__init__()\n", + "\n", + " self.num_heads = num_heads\n", + " self.bias = bias\n", + " self.head_dim = embed_dim // num_heads \n", + "\n", + " self.q_proj = nn.Linear(in_dim, embed_dim, bias=bias)\n", + " self.k_proj = nn.Linear(in_dim, embed_dim, bias=bias)\n", + " self.v_proj = nn.Linear(in_dim, embed_dim, bias=bias)\n", + " \n", + " self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)\n", + "\n", + " self.enable_qk_norm = enable_qk_norm\n", + " if self.enable_qk_norm:\n", + " self.q_norm = nn.RMSNorm(self.head_dim)\n", + " self.k_norm = nn.RMSNorm(self.head_dim)\n", + " \n", + " self.rope = RotaryPositionalEmbedding(head_dim=self.head_dim, p=p_rope, max_seq_len=max_seq_len, base=base_rope)\n", + " \n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.xavier_normal_(self.q_proj.weight)\n", + " nn.init.xavier_normal_(self.k_proj.weight)\n", + " nn.init.xavier_normal_(self.v_proj.weight)\n", + " nn.init.xavier_normal_(self.out_proj.weight)\n", + "\n", + " if self.bias:\n", + " nn.init.zeros_(self.q_proj.bias)\n", + " nn.init.zeros_(self.k_proj.bias)\n", + " nn.init.zeros_(self.v_proj.bias)\n", + " nn.init.zeros_(self.out_proj.bias)\n", + "\n", + " \n", + " def forward(self, query: torch.Tensor, key: torch.Tensor, value: torch.Tensor, pos_idx: Optional[torch.Tensor] = None) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes batch first. When `pos_idx` is provided we use RoPE, else NOT!\n", + "\n", + " Shapes:\n", + " query ... [b, n1, c]\n", + " key/value ... [b, n2, c]\n", + " \"\"\"\n", + "\n", + " assert key.shape == value.shape\n", + " \n", + " b, n1, _ = query.shape\n", + " _, n2, _ = key.shape\n", + "\n", + " q = self.q_proj(query)\n", + " k = self.k_proj(key)\n", + " v = self.v_proj(value)\n", + "\n", + " q = q.view(b, n1, self.num_heads, self.head_dim)\n", + " k = k.view(b, n2, self.num_heads, self.head_dim)\n", + " v = v.view(b, n2, self.num_heads, self.head_dim)\n", + "\n", + " if self.enable_qk_norm:\n", + " q = self.q_norm(q)\n", + " k = self.k_norm(k)\n", + " \n", + " if exists(pos_idx):\n", + " q = self.rope(q, pos_idx=pos_idx)\n", + " k = self.rope(k, pos_idx=pos_idx)\n", + "\n", + " # scaled_dot_product_attention takes [b, num_heads, seq, head_dim]\n", + " q = q.permute((0, 2, 1, 3)) \n", + " k = k.permute((0, 2, 1, 3)) \n", + " v = v.permute((0, 2, 1, 3)) \n", + " \n", + " # see https://pytorch.org/docs/stable/generated/torch.nn.functional.scaled_dot_product_attention.html\n", + " attn = F.scaled_dot_product_attention(query=q, \n", + " key=k, \n", + " value=v, \n", + " attn_mask=None, \n", + " dropout_p=0.0,\n", + " is_causal=False, \n", + " scale=None, \n", + " #enable_gqa=False\n", + " )\n", + "\n", + " # back to [b, seq, num_heads, head_dim]\n", + " attn = attn.permute((0, 2, 1, 3)) \n", + "\n", + " # pack heads together\n", + " attn = attn.reshape(b, n1, self.num_heads * self.head_dim)\n", + " attn = self.out_proj(attn)\n", + " return attn" + ] + }, + { + "cell_type": "markdown", + "id": "4d686473-f80e-4c69-a9eb-f91670418811", + "metadata": {}, + "source": [ + "## Transformer blocks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1cbc7b56-dfc7-4922-b7e4-f3ee94ccd311", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class FeedForwardBlock(nn.Module):\n", + " \"\"\"\n", + " A small dense feed-forward network as used in `transformers`. Assumes channel last.\n", + " Inspired by https://arxiv.org/pdf/2401.11605 and added \n", + " from https://arxiv.org/pdf/2002.05202 a modification to SiGLU structure.\n", + " \"\"\"\n", + "\n", + " def __init__(self, \n", + " in_dim: int, \n", + " hidden_dim: int, \n", + " out_dim: Optional[int] = None, \n", + " dropout: float = 0.0) -> None:\n", + " super().__init__()\n", + " out_dim = default(out_dim, in_dim)\n", + " \n", + " self.hidden_dim = hidden_dim\n", + " self.proj_in = nn.Linear(in_dim, 2*hidden_dim) # factor two for GLU part split\n", + " self.proj_out = nn.Linear(hidden_dim, out_dim) \n", + " self.act = nn.SiLU()\n", + " self.drop = nn.Dropout(dropout)\n", + "\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.zeros_(self.proj_out.bias)\n", + " # nn.init.xavier_normal_(self.proj_out.weight)\n", + " \n", + " def siglu(self, x: torch.Tensor) -> torch.Tensor:\n", + " x = self.proj_in(x) \n", + " return x[..., :self.hidden_dim] * self.act(x[..., self.hidden_dim:])\n", + "\n", + " #@torch.compile\n", + " def forward(self, x: torch.Tensor) -> torch.Tensor:\n", + " x = self.siglu(x)\n", + " x = self.drop(x)\n", + " x = self.proj_out(x)\n", + " return x" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3b028c09-74ad-4db8-bf7f-c1f1a9b259d5", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class SelfAttnBlock(nn.Module):\n", + " \"\"\"A self-attention block which includes the time condition `t_emb`, see https://arxiv.org/pdf/2312.02139.\"\"\"\n", + " \n", + " def __init__(self, ch: int, t_emb_size: int, num_heads: int, dropout: float = 0.0, p_rope: float = 1.0, base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + "\n", + " self.self_att = RotaryMultiheadAttention(in_dim=ch+t_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope)\n", + " \n", + " self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch) \n", + " self.norm_self = nn.RMSNorm(ch)\n", + " self.norm_ff = nn.RMSNorm(ch)\n", + " self.drop = nn.Dropout(dropout)\n", + "\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + "\n", + " # note a bonus of res-pos-norm is that we can init as identity!\n", + " nn.init.zeros_(self.norm_self.weight) \n", + " nn.init.zeros_(self.norm_ff.weight)\n", + " \n", + " def forward(self, x: torch.Tensor, t_emb: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes batch first.\n", + " \n", + " Shapes:\n", + " x ... [b, n, ch] \n", + " t_emb ... [b, 1, t_emb_size]\n", + " pos_idx ... [b, n] or [n]\n", + " \"\"\"\n", + " \n", + " t_emb_self = t_emb.expand(x.shape[0], x.shape[1], -1)\n", + "\n", + " # Self-attention part\n", + " self_out = torch.cat([x, t_emb_self], dim=-1) # concat time tokens\n", + " self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx)\n", + " self_out = self.norm_self(self_out) \n", + " self_out = self.drop(self_out) + x \n", + "\n", + " # Feed-Forward part\n", + " feed_out = self.ff(self_out)\n", + " feed_out = self.norm_ff(feed_out) \n", + " feed_out = self.drop(feed_out) + self_out \n", + " return feed_out" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7d503e52-2c26-4efc-b501-1fa01f84a1f0", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class AdaptiveSelfAttnBlock(nn.Module):\n", + " \"\"\"A self-attention block which includes the time condition `t_emb`, see https://arxiv.org/pdf/2312.02139.\"\"\"\n", + " \n", + " def __init__(self, \n", + " ch: int, \n", + " mod_ch: int,\n", + " t_emb_size: int, \n", + " num_heads: int, \n", + " dropout: float = 0.0,\n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + "\n", + " self.self_att = RotaryMultiheadAttention(in_dim=ch+t_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope)\n", + " \n", + " self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch) \n", + " self.norm_self = nn.RMSNorm(ch)\n", + " self.norm_ff = nn.RMSNorm(ch)\n", + " self.drop = nn.Dropout(dropout)\n", + " \n", + " self.adaRMS_modulation = nn.Linear(mod_ch, 6*ch)\n", + "\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.zeros_(self.adaRMS_modulation.bias) \n", + " \n", + " def forward(self, x: torch.Tensor, mod: torch.Tensor, t_emb: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes batch first.\n", + " \n", + " Shapes:\n", + " x ... [b, n, ch] \n", + " mod ... [b, n, mod_ch] \n", + " t_emb ... [b, 1, t_emb_size]\n", + " pos_idx ... [b, n] or [n]\n", + " \"\"\"\n", + "\n", + " scale_att, shift_att, gate_attn, scale_mlp, shift_mlp, gate_mlp = self.adaRMS_modulation(mod).chunk(6, dim=-1)\n", + " \n", + " t_emb_self = t_emb.expand(x.shape[0], x.shape[1], -1)\n", + "\n", + " # Self-attention part\n", + " self_out = x * (1.0 + scale_att) + shift_att\n", + " self_out = torch.cat([self_out, t_emb_self], dim=-1) # concat time tokens\n", + " self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx)\n", + " self_out = self.norm_self(self_out) * gate_attn.tanh()\n", + " self_out = self.drop(self_out) + x \n", + "\n", + " # Feed-Forward part\n", + " feed_out = self_out * (1.0 + scale_mlp) + shift_mlp\n", + " feed_out = self.ff(feed_out)\n", + " feed_out = self.norm_ff(feed_out) * gate_mlp.tanh()\n", + " feed_out = self.drop(feed_out) + self_out \n", + " return feed_out" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6fea48d2-7167-4cfb-8c20-6cc39c4c10f5", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CrossAttnBlock(nn.Module):\n", + " \"\"\"A cross-attention block which includes the time condition `t_emb`, see https://arxiv.org/pdf/2312.02139\"\"\"\n", + " \n", + " def __init__(self, ch: int, t_emb_size: int, num_heads: int, dropout: float = 0.0, p_rope: float = 1.0, base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + " \n", + " self.self_att = RotaryMultiheadAttention(in_dim=ch+t_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope)\n", + " self.multi_att = RotaryMultiheadAttention(in_dim=ch+t_emb_size, embed_dim=ch, num_heads=num_heads, p_rope=p_rope, base_rope=base_rope)\n", + "\n", + " self.ff = FeedForwardBlock(in_dim=ch, hidden_dim=2*ch) \n", + " self.norm_self = nn.RMSNorm(ch)\n", + " self.norm_multi = nn.RMSNorm(ch)\n", + " self.norm_ff = nn.RMSNorm(ch)\n", + " self.drop = nn.Dropout(dropout)\n", + "\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.zeros_(self.norm_self.weight) \n", + " nn.init.zeros_(self.norm_multi.weight) \n", + " nn.init.zeros_(self.norm_ff.weight)\n", + " \n", + " def forward(self, x: torch.Tensor, c_emb: torch.Tensor, t_emb: torch.Tensor, pos_idx: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes batch first.\n", + " \n", + " Shapes:\n", + " x ... [b, n1, ch] \n", + " c_emb ... [b, n2, ch]\n", + " t_emb ... [b, 1, t_emb_size]\n", + " pos_idx ... [b, n1] or [n1]\n", + " \"\"\"\n", + " \n", + " t_emb_self = t_emb.expand( x.shape[0], x.shape[1] , -1)\n", + " t_emb_multi = t_emb.expand(c_emb.shape[0], x.shape[1] + c_emb.shape[1], -1)\n", + " \n", + " # Self-attention part\n", + " self_out = torch.cat([x, t_emb_self], dim=-1) # concat time tokens\n", + " self_out = self.self_att(query=self_out, key=self_out, value=self_out, pos_idx=pos_idx)\n", + " self_out = self.norm_self(self_out)\n", + " self_out = self.drop(self_out) + x \n", + "\n", + " # Multimodial-attention part\n", + " multi_out = torch.cat([self_out, c_emb], dim=1) # concat latents with condition ... [b, n1+n2, ch]\n", + "\n", + " multi_out = torch.cat([multi_out, t_emb_multi], dim=-1) # concat time tokens \n", + " multi_out = self.multi_att(query=multi_out, key=multi_out, value=multi_out, pos_idx=None)\n", + "\n", + " multi_out, multi_out_gate = multi_out[:, :x.shape[1]], multi_out[:, x.shape[1]:]\n", + " multi_out_gate = multi_out_gate.mean(dim=1, keepdim=True) # ... [b, 1, ch]\n", + " \n", + " multi_out = self.norm_multi(multi_out) * multi_out_gate.tanh()\n", + " multi_out = self.drop(multi_out) + self_out \n", + "\n", + " # Feed-Forward part\n", + " feed_out = self.ff(multi_out)\n", + " feed_out = self.norm_ff(feed_out) \n", + " feed_out = self.drop(feed_out) + multi_out \n", + " return feed_out" + ] + }, + { + "cell_type": "markdown", + "id": "55783ef0-2043-442f-872d-e466acc61d69", + "metadata": {}, + "source": [ + "## Main transformer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "18b30977-f8d2-4f5d-91f7-f96d4e4edf11", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CoreTransformer(nn.Module):\n", + " \"\"\"\n", + " The main transformer of the CirDiT model, intakes time (attn-concat) and condition encodings (cross-attn). \n", + " Applies a RoPE for time dimension.\n", + " \"\"\"\n", + "\n", + " def __init__(self,\n", + " ch: int, \n", + " c_emb_size: int,\n", + " t_emb_size: int,\n", + " depth: int,\n", + " num_heads: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + "\n", + " self.norm = nn.RMSNorm(ch)\n", + " \n", + " self.c_proj = nn.Linear(c_emb_size, ch) \n", + " self.blocks = nn.ModuleList([\n", + " CrossAttnBlock(ch=ch, \n", + " t_emb_size=t_emb_size, \n", + " num_heads=num_heads, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + " for d in range(depth)\n", + " ])\n", + "\n", + " def forward(self, x: torch.Tensor, c_emb: torch.Tensor, t_emb: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Shapes:\n", + " x ... [b, t, ch]\n", + " c_emb ... [b, seq, c_emb_size]\n", + " t_emb ... [b, 1, t_emb_size]\n", + " \"\"\"\n", + " \n", + " c_emb = self.c_proj(c_emb)\n", + " pos_idx = torch.arange(x.shape[1], device=x.device, dtype=torch.int32) \n", + "\n", + " x = self.norm(x)\n", + " \n", + " for block in self.blocks:\n", + " x = block(x=x, c_emb=c_emb, t_emb=t_emb, pos_idx=pos_idx)\n", + "\n", + " return x" + ] + }, + { + "cell_type": "markdown", + "id": "011fff02-e423-4b32-b7fe-efedc6078cdf", + "metadata": {}, + "source": [ + "## Packing blocks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "96923c30-ede7-4d24-a4a8-d80423bece98", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class PackingTransformer(ConfigModel):\n", + " \"\"\"\n", + " The first stage packing/unpacking transformers of the CirDiT model, intakes time (attn-concat). \n", + " Applies a RoPE for time dimension only, not on spatial dimension.\n", + " \"\"\"\n", + " \n", + " def __init__(self,\n", + " ch: int, \n", + " t_emb_size: int,\n", + " depth: int,\n", + " num_heads: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + "\n", + " self.norm = nn.RMSNorm(ch)\n", + " self.blocks = nn.ModuleList([\n", + " SelfAttnBlock(ch=ch, \n", + " t_emb_size=t_emb_size, \n", + " num_heads=num_heads, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + " for d in range(depth)\n", + " ])\n", + " \n", + " def forward(self, x: torch.Tensor, t_emb: torch.Tensor, return_penultimate: bool = False) -> torch.Tensor:\n", + " \"\"\"\n", + " Shapes:\n", + " x ... [b, s, t, ch]\n", + " t_emb ... [b, 1, t_emb_size]\n", + " \"\"\"\n", + "\n", + " b, s, t, ch = x.shape\n", + "\n", + " # create pos_idx such that they only depend on the time position\n", + " pos_idx = torch.arange(t, device=x.device, dtype=torch.int32).expand(b, s, -1)\n", + " pos_idx = pos_idx.reshape(b, -1)\n", + "\n", + " # flatten spatial and time into seq\n", + " x = x.reshape(b, s*t, ch)\n", + " x = self.norm(x)\n", + "\n", + " if return_penultimate:\n", + " for block in self.blocks[:-1]:\n", + " x = block(x=x, t_emb=t_emb, pos_idx=pos_idx)\n", + "\n", + " penultimate = x\n", + " x = self.blocks[-1](x=x, t_emb=t_emb, pos_idx=pos_idx) \n", + " \n", + " else:\n", + " for block in self.blocks:\n", + " x = block(x=x, t_emb=t_emb, pos_idx=pos_idx)\n", + "\n", + " # undo flatten\n", + " x = x.reshape(b, s, t, ch)\n", + " \n", + " if return_penultimate:\n", + " penultimate = penultimate.reshape(b, s, t, ch)\n", + " return x, penultimate\n", + " \n", + " return x" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a16f88c-36fd-4888-a631-4e61f91af3b1", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class UnpackingTransformer(ConfigModel):\n", + " \"\"\"\n", + " The first stage packing/unpacking transformers of the CirDiT model, intakes time (attn-concat). \n", + " Applies a RoPE for time dimension only, not on spatial dimension.\n", + " \"\"\"\n", + " \n", + " def __init__(self,\n", + " ch: int, \n", + " mod_ch: int,\n", + " t_emb_size: int,\n", + " depth: int,\n", + " num_heads: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + "\n", + " self.norm = nn.RMSNorm(ch)\n", + " self.blocks = nn.ModuleList([\n", + " AdaptiveSelfAttnBlock(ch=ch, \n", + " mod_ch=mod_ch,\n", + " t_emb_size=t_emb_size, \n", + " num_heads=num_heads, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + " for d in range(depth)\n", + " ])\n", + " \n", + " def forward(self, x: torch.Tensor, mod: torch.Tensor, t_emb: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"\n", + " Shapes:\n", + " x ... [b, s, t, ch]\n", + " t_emb ... [b, 1, t_emb_size]\n", + " \"\"\"\n", + "\n", + " b, s, t, ch = x.shape\n", + " *_, mod_ch = mod.shape\n", + " \n", + " # create pos_idx such that they only depend on the time position\n", + " pos_idx = torch.arange(t, device=x.device, dtype=torch.int32).expand(b, s, -1)\n", + " pos_idx = pos_idx.reshape(b, -1)\n", + "\n", + " # flatten spatial and time into seq\n", + " x = x.reshape(b, s*t, ch)\n", + " mod = mod.reshape(b, s*t, mod_ch).contiguous()\n", + "\n", + " x = self.norm(x)\n", + "\n", + " for block in self.blocks:\n", + " x = block(x=x, mod=mod, t_emb=t_emb, pos_idx=pos_idx)\n", + "\n", + " # undo flatten\n", + " x = x.reshape(b, s, t, ch)\n", + " return x" + ] + }, + { + "cell_type": "markdown", + "id": "f79129a0-6f1c-49bd-be82-f471dcf2cbc3", + "metadata": {}, + "source": [ + "## Time embedding" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "00e9e5f9-45f5-475d-8daf-537c851b9cb6", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class TimeEmbedding(PositionalEncoding):\n", + " \"\"\"A time embedding layer.\"\"\"\n", + " \n", + " def __init__(self, d_model: int, \n", + " dropout: float = 0.0, \n", + " max_len: int = 5000, \n", + " freq_factor: float = 10_000.0) -> None:\n", + " super().__init__(d_model=d_model, dropout=dropout, max_len=max_len, freq_factor=freq_factor) \n", + " \n", + " self.ff = FeedForwardBlock(in_dim=d_model, hidden_dim=2*d_model) \n", + " \n", + " def forward(self, t: torch.Tensor) -> torch.Tensor: \n", + " x = self.pe[t] \n", + " x = self.ff(x) \n", + " return self.dropout(x)" + ] + }, + { + "cell_type": "markdown", + "id": "6d9433e8-c980-420f-9a22-3ce93c44d7dd", + "metadata": {}, + "source": [ + "## CirDiT architecture " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f0a55971-2132-4b61-b673-e39bed65f679", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class CirDiTConfig: \n", + " clr_dim: int\n", + " ch_packing: int\n", + " ch_core: int\n", + " c_emb_size: int\n", + " t_emb_size: int \n", + " depth_packing: int\n", + " depth_core: int \n", + " num_heads_packing: int\n", + " num_heads_core: int \n", + " dropout: float \n", + " p_rope: float \n", + " base_rope: float" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c97230bd-8f12-423f-a4ca-baab7c9ca7c9", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CirDiT(ConfigModel):\n", + " \"\"\"\n", + " The proposed Circuit Diffusion Transformer (CirDiT).\n", + " \"\"\"\n", + "\n", + " def __init__(self,\n", + " clr_dim: int,\n", + " ch_packing: int, \n", + " ch_core: int,\n", + " c_emb_size: int,\n", + " t_emb_size: int, \n", + " depth_packing: int,\n", + " depth_core: int, \n", + " num_heads_packing: int,\n", + " num_heads_core: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000) -> None:\n", + " super().__init__()\n", + " \n", + " self.ch_packing = ch_packing\n", + " self.ch_core = ch_core\n", + " \n", + " self.params_config = CirDiTConfig(clr_dim=clr_dim,\n", + " ch_packing=ch_packing, \n", + " ch_core=ch_core,\n", + " c_emb_size=c_emb_size,\n", + " t_emb_size=t_emb_size, \n", + " depth_packing=depth_packing,\n", + " depth_core=depth_core, \n", + " num_heads_packing=num_heads_packing,\n", + " num_heads_core=num_heads_core, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + " \n", + " self.packing = PackingTransformer(ch=ch_packing, \n", + " t_emb_size=t_emb_size, \n", + " depth=depth_packing, \n", + " num_heads=num_heads_packing, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + " \n", + " self.unpacking = UnpackingTransformer(ch=ch_packing, \n", + " mod_ch=ch_core,\n", + " t_emb_size=t_emb_size, \n", + " depth=depth_packing, \n", + " num_heads=num_heads_packing, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + "\n", + " self.core = CoreTransformer(ch=ch_core, \n", + " c_emb_size=c_emb_size, \n", + " t_emb_size=t_emb_size, \n", + " depth=depth_core, \n", + " num_heads=num_heads_core, \n", + " dropout=dropout, \n", + " p_rope=p_rope, \n", + " base_rope=base_rope)\n", + "\n", + " self.proj_in = nn.Linear(clr_dim, ch_packing)\n", + " self.proj_out = nn.Linear(ch_packing, clr_dim)\n", + " self.core_proj = nn.Linear(ch_packing, ch_core)\n", + "\n", + " self.t_emb = TimeEmbedding(d_model=t_emb_size, max_len=5000) #here max number of timetseps\n", + " self.qubit_pos_enc = LearnedPositionalEmbedding(dim=ch_packing, max_seq_len=64) #here max number of qubits\n", + "\n", + " self._init_weights()\n", + " \n", + " def _init_weights(self) -> None:\n", + " nn.init.orthogonal_(self.core_proj.weight)\n", + " nn.init.zeros_(self.core_proj.bias)\n", + " nn.init.zeros_(self.proj_out.bias)\n", + " \n", + " def main_pass(self, x: torch.Tensor, t_emb: torch.Tensor, c_emb: torch.Tensor) -> torch.Tensor:\n", + " b, s, t, _ = x.shape\n", + "\n", + " x = self.proj_in(x)\n", + " x = self.qubit_pos_enc(x)\n", + "\n", + " # Pack spatial into tokens\n", + " x_main, x = self.packing(x=x, t_emb=t_emb, return_penultimate=True)\n", + "\n", + " # Downsample, reduce spatial, ... [b, t, ch_core]\n", + " x_main = x_main.mean(dim=1) \n", + " x_main = self.core_proj(x_main) \n", + "\n", + " # Core transformer\n", + " x_main = self.core(x=x_main, c_emb=c_emb, t_emb=t_emb) - x_main # subtraction such that if core=ident at init we cancel the signal\n", + " x_main = x_main.unsqueeze(1).expand(b, s, t, self.ch_core) \n", + "\n", + " # Unpack tokens into spatial\n", + " x = self.unpacking(x=x, mod=x_main, t_emb=t_emb)\n", + " x = self.proj_out(x) \n", + "\n", + " return x\n", + " \n", + " def forward(self, x: torch.Tensor, t: torch.Tensor, c_emb: torch.Tensor, micro_cond: Optional[torch.Tensor] = None) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes a `channel_last` embedding of circuits.\n", + " \n", + " Shapes:\n", + " x ... [b, s, t, ch] \n", + " t ... [b]\n", + " c_emb ... [b, seq, c_emb_size]\n", + " micro_cond ... [b]\n", + " \"\"\"\n", + "\n", + " t_emb = self.t_emb(t) #.detach()\n", + " t_emb = t_emb.unsqueeze(1) # to [b, 1, ch]\n", + " \n", + " x = self.main_pass(x, t_emb, c_emb)\n", + " return x" + ] + }, + { + "cell_type": "markdown", + "id": "fe13c33a-d57f-4695-9c81-5ea3356ec1bc", + "metadata": {}, + "source": [ + "## UnitaryCLIPPartialNoiseCompilationCirDiT" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba7c3c61-2044-4fb2-a00a-eafeaf647664", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class UnitaryCLIPPartialNoiseCompilationCirDiTConfig(CirDiTConfig): \n", + " unitary_encoder_config: dict" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a3bda09d-ca1a-4406-b630-08b346e7a27a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class UnitaryCLIPPartialNoiseCompilationCirDiT(CirDiT):\n", + " \"\"\"Extends `CirDiT` to the multimodal unitary compilation model.\"\"\"\n", + " \n", + " def __init__(self, \n", + " clr_dim: int,\n", + " ch_packing: int, \n", + " ch_core: int,\n", + " c_emb_size: int,\n", + " t_emb_size: int, \n", + " depth_packing: int,\n", + " depth_core: int, \n", + " num_heads_packing: int,\n", + " num_heads_core: int, \n", + " dropout: float = 0.0, \n", + " p_rope: float = 1.0, \n", + " base_rope: float = 10_000,\n", + " unitary_encoder_config: Optional[dict] = None, \n", + " unitary_encoder: Optional[nn.Module] = None) -> None:\n", + " \n", + " super().__init__(clr_dim=clr_dim, \n", + " ch_packing=ch_packing,\n", + " ch_core=ch_core,\n", + " c_emb_size=c_emb_size,\n", + " t_emb_size=t_emb_size,\n", + " depth_packing=depth_packing,\n", + " depth_core=depth_core,\n", + " num_heads_packing=num_heads_packing,\n", + " num_heads_core=num_heads_core,\n", + " dropout=dropout,\n", + " p_rope=p_rope,\n", + " base_rope=base_rope)\n", + "\n", + " if exists(unitary_encoder_config): #load a trained encoder\n", + " self.unitary_encoder = ConfigModel.from_config(unitary_encoder_config, device=None, silent=True)\n", + " \n", + " elif exists(unitary_encoder):\n", + " self.unitary_encoder = unitary_encoder\n", + " unitary_encoder_config = self.unitary_encoder.get_config()\n", + " \n", + " unitary_encoder_config = {\"target\": unitary_encoder_config[\"target\"],\n", + " \"params\": unitary_encoder_config[\"params\"]}\n", + " \n", + " else: \n", + " raise RuntimeError(\"Provide either `unitary_encoder_config` to load a pretrained encoder or a `unitary_encoder` model directly!`\")\n", + "\n", + " self.params_config = UnitaryCLIPPartialNoiseCompilationCirDiTConfig(\n", + " clr_dim=clr_dim, \n", + " ch_packing=ch_packing,\n", + " ch_core=ch_core,\n", + " c_emb_size=c_emb_size,\n", + " t_emb_size=t_emb_size,\n", + " depth_packing=depth_packing,\n", + " depth_core=depth_core,\n", + " num_heads_packing=num_heads_packing,\n", + " num_heads_core=num_heads_core,\n", + " dropout=dropout,\n", + " p_rope=p_rope,\n", + " base_rope=base_rope,\n", + " unitary_encoder_config=unitary_encoder_config\n", + " )\n", + "\n", + " #--------\n", + "\n", + " self.empty_cond = nn.Parameter(torch.randn((1, 1, c_emb_size)))\n", + "\n", + " self.t_emb = TimeEmbedding(d_model=t_emb_size, max_len=5000) #here max number of timetseps\n", + " self.t_emb2 = TimeEmbedding(d_model=t_emb_size, max_len=5000) #here max number of timetseps\n", + " \n", + " def forward(self, \n", + " x: torch.Tensor, \n", + " t_h: torch.Tensor, \n", + " t_w: torch.Tensor, \n", + " c_emb: torch.Tensor, \n", + " U: torch.Tensor, \n", + " rnd: Optional[torch.Tensor] = None) -> torch.Tensor:\n", + " \"\"\"\n", + " Assumes a channel_last embedding of circuits.\n", + " \n", + " Shapes:\n", + " x ... [b, s, t, ch] \n", + " t_h ... [b]\n", + " t_w ... [b]\n", + " c_emb ... [b, seq, c_emb_size]\n", + " U ... [b, 2, N, N]\n", + " rnd ... [b]\n", + " \"\"\"\n", + " \n", + " t_emb = self.t_emb(t_h) + self.t_emb2(t_w)\n", + " t_emb = t_emb.unsqueeze(1) # to [b, 1, ch]\n", + "\n", + " #------\n", + " \n", + " u_emb = self.unitary_encoder(y_emb=c_emb, U=U, penultimate=True).detach() # [batch, seq1+seq2, ch] \n", + "\n", + " if not_exists(rnd):\n", + " # one means we dont drop, so U is not all zero\n", + " rnd = 1-torch.isclose(U, torch.zeros_like(U)).all(dim=(1, 2, 3)).type(torch.int64) \n", + " rnd = rnd.view(-1, 1, 1)\n", + " \n", + " # Note: we ignore text drop and unitary drop, we replace all with a learned uncond token here\n", + " u_emb = u_emb * rnd + (1-rnd) * self.empty_cond.expand(u_emb.shape)\n", + "\n", + " #------\n", + " \n", + " x = self.main_pass(x, t_emb, u_emb) \n", + " return x" + ] + }, + { + "cell_type": "markdown", + "id": "743ecbca-4011-4786-9cea-a141571bb341", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b510ff53-a1a8-4b18-875b-54395567d838", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/transformers.ipynb b/src/models/transformers/transformers.ipynb similarity index 97% rename from src/models/transformers.ipynb rename to src/models/transformers/transformers.ipynb index 92b034d..3fa4d66 100644 --- a/src/models/transformers.ipynb +++ b/src/models/transformers/transformers.ipynb @@ -15,7 +15,7 @@ "metadata": {}, "outputs": [], "source": [ - "#| default_exp models.transformers" + "#| default_exp models.transformers.transformers" ] }, { @@ -234,6 +234,13 @@ "display_name": "python3", "language": "python", "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/src/models/unet_qc.ipynb b/src/models/unet_qc.ipynb index 6399c9e..e4a6e4c 100644 --- a/src/models/unet_qc.ipynb +++ b/src/models/unet_qc.ipynb @@ -35,9 +35,9 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.models.config_model import Config_Model\n", + "from genQC.models.config_model import ConfigModel\n", "import genQC.models.layers as layers\n", - "import genQC.models.transformers as transformers\n", + "import genQC.models.transformers.transformers as transformers\n", "from genQC.models.unitary_encoder import Unitary_encoder, Unitary_encoder_config" ] }, @@ -64,7 +64,7 @@ " \n", " self.resBlocks = nn.ModuleList() \n", " for i in range(num_res_blocks): \n", - " self.resBlocks.append(layers.ResBlock2D_Conditional(ch_in, ch_out, t_emb_size, kernel_size=(1, 3)))\n", + " self.resBlocks.append(layers.ResBlock2DConditional(ch_in, ch_out, t_emb_size, kernel_size=(1, 3)))\n", " ch_in = ch_out \n", " \n", " self.transformer_depth = transformer_depth\n", @@ -196,14 +196,16 @@ "outputs": [], "source": [ "#| export\n", - "class QC_Cond_UNet(Config_Model):\n", + "class QC_Cond_UNet(ConfigModel):\n", " \"\"\"Conditional U-Net model for quantum circuits. Implemets `embedd_clrs` and `invert_clr` functions to embed and decode color-tensors.\"\"\"\n", + "\n", + " channel_last = False\n", " \n", " def __init__(self, model_features=[32,32,64], clr_dim=8, num_clrs=8, t_emb_size=128, cond_emb_size=512, \n", " num_heads=[8,8,2], num_res_blocks=[2, 2, 4], transformer_depths=[1,2,1]):\n", " \n", " super().__init__() \n", - " \n", + "\n", " self.clr_dim = clr_dim \n", " self.num_clrs = num_clrs\n", " \n", @@ -245,7 +247,7 @@ " \n", " #--------------------------------------------\n", " \n", - " def embedd_clrs(self, x):\n", + " def embed(self, x):\n", " sign = torch.sign(x + 0.1) #trick: add 0.1 so that the sign of 0 is +1, else the 0 token would be all 0s. \n", " clr = self.emb_clr(torch.abs(x)) \n", " x = clr * sign[:, :, :, None] \n", @@ -253,7 +255,7 @@ " return x\n", " \n", " @torch.no_grad()\n", - " def invert_clr(self, x):\n", + " def invert(self, x):\n", " #collaps clr to gate ... use cos sim\n", " \n", " clrs = self.emb_clr.weight.detach() # is [clr_num, clr_dim]\n", @@ -286,7 +288,7 @@ " \n", " #--------------------------------------------\n", " \n", - " def forward(self, x, t, c_emb, attn_mask=None, key_padding_mask=None):\n", + " def forward(self, x, t, c_emb, attn_mask=None, key_padding_mask=None, **kwargs):\n", " if attn_mask is None: attn_mask = [None] * len(self.enc_chs)\n", " if key_padding_mask is None: key_padding_mask = [None] * len(self.enc_chs)\n", " \n", @@ -344,10 +346,10 @@ " self.unitary_encoder = Unitary_encoder(**unitary_encoder_config)\n", " self.params_config = QC_Compilation_UNet_config(model_features, self.clr_dim, self.num_clrs, self.t_emb_size, self.cond_emb_size, num_heads, num_res_blocks, transformer_depths, self.unitary_encoder.params_config)\n", " \n", - " def forward(self, x, t, c_emb, U, attn_mask=None, key_padding_mask=None):\n", + " def forward(self, x, t, c_emb, U, attn_mask=None, key_padding_mask=None, **kwargs):\n", " u_emb = self.unitary_encoder(U) # [batch, seq2, ch] \n", " c_emb = torch.cat([c_emb, u_emb], dim=1) # [batch, seq1+seq2, ch] \n", - " out = super().forward(x, t, c_emb, attn_mask, key_padding_mask)\n", + " out = super().forward(x, t, c_emb, attn_mask, key_padding_mask, **kwargs)\n", " return out" ] }, diff --git a/src/models/unitary_encoder.ipynb b/src/models/unitary_encoder.ipynb index 4242f1e..721ac13 100644 --- a/src/models/unitary_encoder.ipynb +++ b/src/models/unitary_encoder.ipynb @@ -27,9 +27,9 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.models.config_model import Config_Model\n", + "from genQC.models.config_model import ConfigModel\n", "import genQC.models.layers as layers\n", - "import genQC.models.transformers as transformers" + "import genQC.models.transformers.transformers as transformers" ] }, { @@ -65,7 +65,7 @@ "outputs": [], "source": [ "#| export\n", - "class Unitary_encoder(Config_Model):\n", + "class Unitary_encoder(ConfigModel):\n", " \"\"\"Encoder for unitary conditions.\"\"\"\n", " def __init__(self, cond_emb_size, model_features=None, num_heads=8, transformer_depths=[4, 4], dropout=0.1):\n", " super().__init__() \n", diff --git a/src/nbdev.yml b/src/nbdev.yml index 003816e..e87b5b1 100644 --- a/src/nbdev.yml +++ b/src/nbdev.yml @@ -4,6 +4,6 @@ project: website: title: "genQC" site-url: "https://FlorianFuerrutter.github.io/genQC" - description: "Generating quantum circuits with diffusion models" + description: "Generative quantum circuits" repo-branch: main repo-url: "https://github.com/FlorianFuerrutter/genQC" diff --git a/src/pipeline/callbacks.ipynb b/src/pipeline/callbacks.ipynb new file mode 100644 index 0000000..0040b69 --- /dev/null +++ b/src/pipeline/callbacks.ipynb @@ -0,0 +1,150 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Callbacks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp pipeline.callbacks" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from operator import attrgetter, itemgetter" + ] + }, + { + "cell_type": "markdown", + "id": "812f168d-41c6-4b00-83c6-690f5ae92fe1", + "metadata": {}, + "source": [ + "## Base" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3f9e18f9-4d9f-4165-baeb-982dc4d60af9", + "metadata": {}, + "outputs": [], + "source": [ + "#|export\n", + "class CancelFitException(Exception): pass\n", + "class CancelBatchException(Exception): pass\n", + "class CancelEpochException(Exception): pass" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36032308-bd0e-4409-9db0-9d89fc258e5a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class Callback(): order=0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5395ac61-82d2-4da8-a665-3c4ad3ae8df5", + "metadata": {}, + "outputs": [], + "source": [ + "#|export\n", + "def run_cbs(cbs, method_nm, pipeline=None):\n", + " if not exists(cbs): return\n", + " for cb in sorted(cbs, key=attrgetter('order')):\n", + " method = getattr(cb, method_nm, None)\n", + " if method: method(pipeline)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c04867ce-7ce5-44cf-b7b4-1429006f34fa", + "metadata": {}, + "outputs": [], + "source": [ + "class CompletionCB(Callback):\n", + " def before_fit(self, pipeline): self.count = 0\n", + " def after_batch(self, pipeline): self.count += 1\n", + " def after_fit(self, pipeline): print(f'Completed {self.count} batches')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22f1e33f-da95-4a3f-80b8-7ef20a94a0c8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Completed 1 batches\n" + ] + } + ], + "source": [ + "cbs = [CompletionCB()]\n", + "run_cbs(cbs, 'before_fit')\n", + "run_cbs(cbs, 'after_batch')\n", + "run_cbs(cbs, 'after_fit')" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/pipeline/compilation_diffusion_pipeline.ipynb b/src/pipeline/compilation_diffusion_pipeline.ipynb new file mode 100644 index 0000000..47af364 --- /dev/null +++ b/src/pipeline/compilation_diffusion_pipeline.ipynb @@ -0,0 +1,181 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "47bf35e5-4b34-4c20-a07f-5934af0bae44", + "metadata": {}, + "source": [ + "# Compilation Diffusion Pipeline\n", + "\n", + "> Special extension to `DiffusionPipeline`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5236e26-0e43-40dc-add4-77ef5496f3b2", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp pipeline.compilation_diffusion_pipeline" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8a149fd0-f4b1-4c8c-be07-ef1f07ae3814", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.pipeline.diffusion_pipeline import DiffusionPipeline" + ] + }, + { + "cell_type": "markdown", + "id": "8c7bf156-2d2c-4886-a2ac-5b7f9fa58c6b", + "metadata": {}, + "source": [ + "## Diffusion Pipeline - Compilation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5307df45-9b01-4d0b-98ee-97bf23609001", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class DiffusionPipeline_Compilation(DiffusionPipeline): \n", + " \"\"\"A special `DiffusionPipeline` that accounts for unitary conditions, i.e. compilation.\"\"\"\n", + " \n", + " #------------------------------------\n", + " \n", + " @torch.no_grad()\n", + " def __call__(self, latents, c, U, g, negative_c=None, negative_u=None, no_bar=False):\n", + " \n", + " latents = latents.to(self.device)\n", + " c = c.to(self.device)\n", + " U = U.to(self.device)\n", + " \n", + " return self.denoising(latents, c=c, U=U, negative_c=negative_c, negative_u=negative_u, enable_guidance=True, g=g, no_bar=no_bar)\n", + "\n", + " #------------------------------------\n", + "\n", + " def empty_unitary_fn(self, U):\n", + " # U ... [b , 2, n, n]\n", + " \n", + " u = torch.zeros_like(U)\n", + " return u\n", + " \n", + " def get_guidance_U(self, U: torch.Tensor, enable_guidance: bool = True, negative_u: Optional[torch.Tensor] = None):\n", + " if not exists(U): return U \n", + " U = U.to(self.device) \n", + " if enable_guidance: \n", + " if exists(negative_u): u = negative_u.to(self.device)\n", + " else: u = self.empty_unitary_fn(U).to(self.device) \n", + " U = torch.cat([u, U]) \n", + " return U\n", + " \n", + " @torch.no_grad()\n", + " def denoising(self, latents, c, U, negative_c=None, negative_u=None, enable_guidance=True, g=1.0, t_start_index=0, no_bar=False, return_predicted_x0=False): \n", + " U = self.get_guidance_U(U, enable_guidance, negative_u) \n", + " return super().denoising(latents, c, negative_c, enable_guidance, g, t_start_index=t_start_index, \n", + " no_bar=no_bar, return_predicted_x0=return_predicted_x0, U=U)\n", + "\n", + " def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=False, g=7.5, U: torch.Tensor=None): \n", + " if enable_guidance:\n", + " x = torch.cat([latents] * 2) #uses batch layer combine here\n", + " \n", + " if ts.numel() > 1: chunk_ts = torch.cat([ts] * 2)\n", + " else: chunk_ts = ts\n", + " \n", + " eps_u, eps_c = self.model(x, chunk_ts, c_emb, U=U).chunk(2) \n", + " \n", + " eps = self.CFG(eps_u, eps_c, g)\n", + " \n", + " else:\n", + " eps = self.model(latents, ts, c_emb, U=U) \n", + " \n", + " x = self.scheduler.step(eps, ts, latents) \n", + " return x.prev_sample, x.pred_original_sample\n", + " \n", + " #------------------------------------\n", + " \n", + " def train_step(self, data, train, **kwargs): \n", + " latents, y, U = data \n", + " b, s, t = latents.shape \n", + " \n", + " #start async memcpy\n", + " latents = latents.to(self.device, non_blocking=self.non_blocking) \n", + " latents = self.embedder.embed(latents) \n", + " \n", + " #do the cond embedding with CLIP \n", + " y = y.to(self.device, non_blocking=self.non_blocking) \n", + " U = U.to(self.device, non_blocking=self.non_blocking) \n", + " \n", + " if self.enable_guidance_train and train: \n", + " rnd_y, rnd_U = torch.empty((2*b,), device=self.device).bernoulli_(p=1.0-self.guidance_train_p).type(torch.int64).chunk(2, dim=0)\n", + "\n", + " y = self.cfg_drop(y, self.empty_token_fn(y) , rnd_y) \n", + " U = self.cfg_drop(U, self.empty_unitary_fn(U), rnd_U) \n", + "\n", + " \n", + " y_emb = self.text_encoder(y, pool=False)\n", + " \n", + " #sample timesteps\n", + " timesteps = torch.randint(low=0, high=self.scheduler.num_train_timesteps, size=(b,), device=self.device, dtype=torch.int64)\n", + "\n", + " #forward noising \n", + " noise = torch.randn(latents.shape, device=self.device) \n", + " noisy_latents = self.scheduler.add_noise(latents, noise, timesteps, train=train)\n", + "\n", + " #predict eps\n", + " eps = self.model(noisy_latents, timesteps, y_emb, U=U)\n", + " \n", + " #comp mse\n", + " loss = self.loss_fn(eps, noise)\n", + " \n", + " #log the loss\n", + " return loss" + ] + }, + { + "cell_type": "markdown", + "id": "9451ff6c-6087-4292-8eab-72959a40a0db", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e47b0b2e-a662-43aa-a6a4-3461bc95f537", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/pipeline/diffusion_pipeline.ipynb b/src/pipeline/diffusion_pipeline.ipynb index 48651e4..c76b518 100644 --- a/src/pipeline/diffusion_pipeline.ipynb +++ b/src/pipeline/diffusion_pipeline.ipynb @@ -29,10 +29,8 @@ "from genQC.imports import *\n", "from genQC.scheduler.scheduler import Scheduler\n", "from genQC.pipeline.pipeline import Pipeline\n", - "from genQC.config_loader import *\n", - "from genQC.models.config_model import Config_Model\n", - "\n", - "from huggingface_hub import snapshot_download" + "from genQC.utils.config_loader import *\n", + "from genQC.models.config_model import ConfigModel" ] }, { @@ -51,6 +49,7 @@ " scheduler: Scheduler, \n", " model: nn.Module,\n", " text_encoder: nn.Module,\n", + " embedder: nn.Module, # clr embeddings or a VAE for latent diffusion\n", " device: torch.device,\n", " enable_guidance_train = True,\n", " guidance_train_p = 0.1,\n", @@ -58,20 +57,24 @@ " ): \n", " super().__init__(model, device)\n", " self.scheduler = scheduler\n", - " self.scheduler.to_device(device) \n", + " self.scheduler.to(device) \n", " \n", " self.text_encoder = text_encoder\n", - " self.text_encoder.eval()\n", + " # self.text_encoder.eval()\n", + " self.trainables.append(self.text_encoder)\n", + " \n", + " self.embedder = embedder\n", + " self.trainables.append(self.embedder)\n", " \n", " self.enable_guidance_train = enable_guidance_train \n", " self.guidance_train_p = guidance_train_p\n", " \n", " self.cached_text_enc = cached_text_enc \n", " self.empty_token = self.text_encoder.empty_token\n", - " \n", + "\n", " if cached_text_enc: \n", " def cached_empty_token_fn(c):\n", - " if c.dim() == 1: return self.text_encoder.cached_empty_token_index # yields then a list of ints \n", + " if c.dim() == 1: return self.text_encoder.cached_empty_token_index.expand(c.shape) # yields then a list of ints \n", " elif c.dim() == 2: return self.empty_token.expand(c.shape) # tokenized input \n", " else: raise NotImplementedError(\"\")\n", " \n", @@ -81,15 +84,16 @@ " self.empty_token_fn = lambda c: self.empty_token.expand(c.shape) # for own clip \n", "\n", " #------------------------------------\n", - " \n", + "\n", " add_config = {}\n", " \n", " def params_config(self, save_path: str): \n", " params_config = {}\n", " \n", " params_config[\"scheduler\"] = self.scheduler.get_config()\n", - " params_config[\"model\"] = self.model.get_config(save_path=save_path+\"model.pt\")\n", - " params_config[\"text_encoder\"] = self.text_encoder.get_config(save_path=save_path+\"text_encoder.pt\")\n", + " params_config[\"model\"] = self.model.get_config(save_path=save_path+\"model\")\n", + " params_config[\"text_encoder\"] = self.text_encoder.get_config(save_path=save_path+\"text_encoder\")\n", + " params_config[\"embedder\"] = self.embedder.get_config(save_path=save_path+\"embedder\")\n", " \n", " params_config[\"device\"] = str(self.device)\n", " params_config[\"enable_guidance_train\"] = self.enable_guidance_train\n", @@ -105,57 +109,72 @@ " save_dict_yaml(config, config_path+\"config.yaml\")\n", " \n", " #only store weights of these submodels\n", - " self.model.store_model(config_path=None, save_path=save_path+\"model.pt\")\n", - " self.text_encoder.store_model(config_path=None, save_path=save_path+\"text_encoder.pt\")\n", + " self.model.store_model(config_path=None, save_path=save_path+\"model\")\n", + " self.text_encoder.store_model(config_path=None, save_path=save_path+\"text_encoder\")\n", + " self.embedder.store_model(config_path=None, save_path=save_path+\"embedder\")\n", " \n", " @staticmethod\n", - " def from_config_file(config_path, device: torch.device): \n", + " def from_config_file(config_path, device: torch.device, save_path: Optional[str] = None): \n", " config = load_config(config_path+\"config.yaml\") \n", " config = config_to_dict(config)\n", "\n", + " def _get_save_path(config_save_path, appendix):\n", + " _save_path = default(save_path, config_path) + appendix\n", + " if \"save_path\" in config_save_path:\n", + " if exists(config_save_path[\"save_path\"]):\n", + " _save_path = config_save_path[\"save_path\"]\n", + " else:\n", + " config_save_path.pop(\"save_path\")\n", + " return _save_path \n", + " \n", " if exists(device):\n", " config[\"params\"][\"device\"] = device\n", - " config[\"params\"][\"scheduler\"][\"params\"][\"device\"] = device\n", - " \n", - " config[\"params\"][\"scheduler\"] = instantiate_from_config(config[\"params\"][\"scheduler\"])\n", - " \n", - " model_path = config_path+\"model.pt\" if config[\"params\"][\"model\"][\"save_path\"] is None else config[\"params\"][\"model\"][\"save_path\"]\n", - " config[\"params\"][\"model\"] = Config_Model.from_config(config[\"params\"][\"model\"], device, model_path)\n", "\n", - " config[\"params\"][\"text_encoder\"] = Config_Model.from_config(config[\"params\"][\"text_encoder\"], device, config[\"params\"][\"text_encoder\"][\"save_path\"]) \n", + " config[\"params\"][\"scheduler\"] = Scheduler.from_config(config[\"params\"][\"scheduler\"], device, _get_save_path(config[\"params\"][\"scheduler\"], \"\"))\n", + " \n", + " config[\"params\"][\"model\"] = ConfigModel.from_config(config[\"params\"][\"model\"], device, _get_save_path(config[\"params\"][\"model\"], \"model\"))\n", + " config[\"params\"][\"text_encoder\"] = ConfigModel.from_config(config[\"params\"][\"text_encoder\"], device, _get_save_path(config[\"params\"][\"text_encoder\"], \"text_encoder\")) \n", + " \n", + " if \"embedder\" in config[\"params\"]:\n", + " config[\"params\"][\"embedder\"] = ConfigModel.from_config(config[\"params\"][\"embedder\"], device, _get_save_path(config[\"params\"][\"embedder\"], \"embedder\")) \n", + " else:\n", + " config[\"params\"][\"embedder\"] = config[\"params\"][\"model\"] #for legacy loading model\n", + " \n", " add_config = config[\"params\"].pop(\"add_config\", None)\n", "\n", " pipeline = instantiate_from_config(config)\n", " \n", " if exists(pipeline.add_config):\n", - " pipeline.gate_pool = [gate for gate in add_config[\"dataset\"][\"params\"][\"gate_pool\"]] \n", " pipeline.add_config = add_config\n", - " \n", - " return pipeline\n", - "\n", + " \n", + " params = add_config[\"dataset\"][\"params\"]\n", + " \n", + " if \"gate_pool\" in params: \n", + " # pipeline.gate_pool = [get_obj_from_str(gate) for gate in params[\"gate_pool\"]] \n", + " pipeline.gate_pool = [gate for gate in params[\"gate_pool\"]] \n", "\n", - " @classmethod\n", - " def from_pretrained(cls, repo_id: str, device: torch.device, **kwargs): \n", - " \"\"\"Load a model pipeline directly from Huggingface.\"\"\"\n", - " model_path = snapshot_download(repo_id=repo_id, repo_type=\"model\", allow_patterns=[\"*.pt\", \"*.yaml\", \"*.safetensors\"], **kwargs) \n", - " pipeline = cls.from_config_file(model_path+\"/\", device) \n", " return pipeline\n", - " \n", + " \n", " #------------------------------------\n", " # Inference functions\n", - " \n", - " @torch.no_grad()\n", - " def __call__(self, latents=None, c=None, seed=None, timesteps=None, no_bar=False, enable_guidance=True, g=7.5): \n", + "\n", + " # @torch.no_grad()\n", + " @torch.inference_mode() \n", + " def __call__(self, latents=None, c=None, negative_c=None, seed=None, timesteps=None, no_bar=False, enable_guidance=True, g=7.5, micro_cond=None): \n", " if exists(seed): torch.manual_seed(seed)\n", " if exists(timesteps): self.scheduler.set_timesteps(self.timesteps)\n", + "\n", + " self.text_encoder.eval()\n", + " self.model.eval()\n", " \n", " latents = latents.to(self.device) \n", - " x0 = self.denoising(latents, c=c, no_bar=no_bar, enable_guidance=enable_guidance, g=g) \n", + " x0 = self.denoising(latents, c=c, negative_c=negative_c, no_bar=no_bar, enable_guidance=enable_guidance, g=g, micro_cond=micro_cond) \n", " \n", " return x0\n", " \n", - " @torch.no_grad()\n", - " def latent_filling(self, org_latents: torch.Tensor, mask: torch.Tensor, c=None, enable_guidance=True, g=7.5, \n", + " # @torch.no_grad()\n", + " @torch.inference_mode()\n", + " def latent_filling(self, org_latents: torch.Tensor, mask: torch.Tensor, c=None, negative_c=None, enable_guidance=True, g=7.5, \n", " t_start_index=0, no_bar=False, return_predicted_x0=False, **kwargs):\n", " \"\"\"mask: area with ones is going to be filled\"\"\"\n", " if mask.dim() == 4: assert list(org_latents.shape) == list(mask.shape) # diff mask per sample and channel\n", @@ -165,9 +184,9 @@ " \n", " self.model.eval()\n", " self.text_encoder.eval() \n", - " self.scheduler.to_device(self.device)\n", + " self.scheduler.to(self.device)\n", " \n", - " c_emb = self.prepare_c_emb(c, enable_guidance, **kwargs)\n", + " c_emb = self.prepare_c_emb(c, enable_guidance, negative_c, **kwargs)\n", " \n", " org_latents = org_latents.to(self.device, non_blocking=self.non_blocking)\n", "\n", @@ -216,74 +235,82 @@ " #------------------------------------\n", " # Helper functions\n", " \n", - " def get_guidance_condition(self, c, enable_guidance):\n", + " def get_guidance_condition(self, c: torch.Tensor, enable_guidance: bool = True, negative_c: Optional[torch.Tensor] = None):\n", " if not exists(c): return c \n", " c = c.to(self.device) \n", " if enable_guidance: \n", - " u = self.empty_token_fn(c).to(self.device) \n", + " if exists(negative_c): u = negative_c.to(self.device)\n", + " else: u = self.empty_token_fn(c).to(self.device) \n", " c = torch.cat([u, c]) \n", - " c = c.type(torch.int64) \n", + " c = c.type(torch.int64) #to token dtype\n", " return c\n", "\n", - " def prepare_c_emb(self, c, enable_guidance, **kwargs):\n", - " c = self.get_guidance_condition(c, enable_guidance) \n", + " def prepare_c_emb(self, c: torch.Tensor, enable_guidance: bool = True, negative_c: Optional[torch.Tensor] = None, **kwargs):\n", + " c = self.get_guidance_condition(c, enable_guidance, negative_c) \n", " c_emb = self.text_encoder(c, pool=False)\n", " return c_emb\n", " \n", - " @torch.no_grad()\n", - " def denoising(self, latents: torch.Tensor, c=None, enable_guidance=True, g=7.5, t_start_index=0, no_bar=False, return_predicted_x0=False, **kwargs):\n", + " # @torch.no_grad()\n", + " @torch.inference_mode()\n", + " def denoising(self, latents: torch.Tensor, c=None, negative_c=None, enable_guidance=True, g=7.5, t_start_index=0, no_bar=False, \n", + " return_predicted_x0=False, micro_cond=None, **kwargs):\n", " self.model.eval()\n", " self.text_encoder.eval()\n", - " self.scheduler.to_device(self.device)\n", + " self.scheduler.to(self.device)\n", " \n", - " c_emb = self.prepare_c_emb(c, enable_guidance, **kwargs)\n", + " c_emb = self.prepare_c_emb(c, enable_guidance, negative_c, **kwargs)\n", " \n", " latents = latents.to(self.device, non_blocking=self.non_blocking)\n", " \n", " if return_predicted_x0: predicted_x0 = list()\n", " \n", " for i, t in enumerate(tqdm(self.scheduler.timesteps[t_start_index:], disable=no_bar)):\n", - " timesteps = (torch.ones((1)) * t).type(torch.int64).to(self.device, non_blocking=self.non_blocking)\n", - " \n", - " latents, x0 = self.denoising_step(latents, timesteps, c_emb=c_emb, enable_guidance=enable_guidance, g=g, **kwargs)\n", - " \n", - " if return_predicted_x0: predicted_x0.append(x0.cpu())\n", - " \n", - " if return_predicted_x0: return latents.cpu(), predicted_x0 \n", - " return latents.cpu()\n", - " \n", - " # @torch.no_grad()\n", - " def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=True, g=7.5, **kwargs): \n", + " timesteps = torch.tensor([t], device=self.device)\n", + " \n", + " latents, x0 = self.denoising_step(latents, timesteps, c_emb=c_emb, enable_guidance=enable_guidance, g=g, micro_cond=micro_cond, **kwargs)\n", + "\n", + " if return_predicted_x0: \n", + " predicted_x0.append(x0)\n", + "\n", + " if return_predicted_x0: \n", + " predicted_x0 = torch.stack(predicted_x0, dim=0) # [timesteps, *latents.shape]\n", + " return latents, predicted_x0 \n", + " \n", + " return latents\n", + " \n", + " def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=True, g=7.5, micro_cond=None, **kwargs): \n", " if enable_guidance:\n", " x = torch.cat([latents] * 2) #uses batch layer combine here\n", " \n", " if ts.numel() > 1: chunk_ts = torch.cat([ts] * 2)\n", " else: chunk_ts = ts\n", - " \n", - " eps_u, eps_c = self.model(x, chunk_ts, c_emb).chunk(2) \n", + " \n", + " eps_u, eps_c = self.model(x, chunk_ts, c_emb, micro_cond=micro_cond).chunk(2) \n", " \n", " eps = self.CFG(eps_u, eps_c, g)\n", - " \n", + "\n", + " x = self.scheduler.step(eps, ts, latents, uncond_model_output=eps_u) \n", + " \n", " else:\n", " eps = self.model(latents, ts, c_emb) \n", - " \n", - " x = self.scheduler.step(eps, ts, latents) \n", + " x = self.scheduler.step(eps, ts, latents) \n", + " \n", " return x.prev_sample, x.pred_original_sample\n", " \n", - " guidance_sample_mode = \"rescaled\" # one of: normal, fastai, rescaled\n", + " guidance_sample_mode = \"normal\" # one of: normal, fastai, rescaled\n", " \n", " def CFG(self, eps_u, eps_c, g):\n", " \"\"\"Apply Classifier-free-guidance sampling\"\"\"\n", " dim = list(range(1, eps_u.dim())) # reduce all but batches\n", "\n", - " if self.guidance_sample_mode == \"normal\": # from https://arxiv.org/pdf/2207.12598.pdf, w=g+1 \n", + " if self.guidance_sample_mode == \"normal\": # from https://arxiv.org/pdf/2207.12598.pdf, w=g+1 s=g+1\n", " eps = eps_u + g * (eps_c-eps_u) \n", "\n", " elif self.guidance_sample_mode == \"fastai\": # from fastAi less 11\n", " eps = eps_u + g*(eps_c-eps_u) * torch.linalg.vector_norm(eps_u, dim=dim, keepdim=True) / torch.linalg.vector_norm(eps_c-eps_u, dim=dim, keepdim=True) \n", " eps = eps * torch.linalg.vector_norm(eps_u, dim=dim, keepdim=True) / torch.linalg.vector_norm(eps, dim=dim, keepdim=True)\n", "\n", - " elif self.guidance_sample_mode == \"rescaled\": # from https://arxiv.org/pdf/2305.08891.pd\n", + " elif self.guidance_sample_mode == \"rescaled\": # from https://arxiv.org/pdf/2305.08891.pdf\n", " phi = 0.7\n", "\n", " eps_cfg = eps_u + g * (eps_c-eps_u) \n", @@ -297,36 +324,55 @@ " #------------------------------------\n", " # Training functions\n", "\n", + " def sample_timesteps_low_variance(self, b: int, scheduler: Scheduler, shuffle: bool = False, continuous_time: bool = False) -> torch.Tensor:\n", + " \"\"\"Low variance sampling, see https://arxiv.org/abs/2406.07524 and originaly https://arxiv.org/abs/2107.00630.\"\"\"\n", + " \n", + " start = torch.linspace(0, 1.0-1.0/b, b, device=self.device, dtype=torch.float32)\n", + " ts = start + torch.rand_like(start) / b\n", + "\n", + " if continuous_time:\n", + " ts = ts.clamp(0., 1.)\n", + " else:\n", + " ts = (ts * scheduler.num_train_timesteps).floor().clamp(0, scheduler.num_train_timesteps-1).to(torch.int64)\n", + "\n", + " if shuffle:\n", + " return ts[torch.randperm(b)]\n", + " return ts\n", + " \n", " def train_on_epoch(self, data_loader: DataLoader, train=True): \n", - " self.scheduler.to_device(self.device, non_blocking=self.non_blocking) \n", + " self.scheduler.to(self.device, non_blocking=self.non_blocking) \n", " super().train_on_epoch(data_loader, train)\n", "\n", - " #@torch.autocast(device_type=device.type)\n", - " def train_step(self, data, **kwargs): \n", + " def cfg_drop(self, y, y_drop, rnd):\n", + " \"\"\"A value of `rnd` one means we take `y`. A value of `rnd` zero means we drop `y` and use `empty_token_fn`.\"\"\"\n", + " rnd = self.scheduler.unsqueeze_vector_to_shape(rnd, y.shape) # e.g. [b, 1, 1] \n", + " y = y * rnd + (1-rnd) * y_drop\n", + " return y\n", + "\n", + " def train_step(self, data, train, **kwargs): \n", " latents, y = data \n", " b, s, t = latents.shape \n", " \n", " #start async memcpy\n", " latents = latents.to(self.device, non_blocking=self.non_blocking) \n", - " latents = self.model.embedd_clrs(latents) #this is only new tensor\n", - " \n", + " latents = self.embedder.embed(latents) \n", + " \n", " #do the cond embedding with CLIP \n", " y = y.to(self.device, non_blocking=self.non_blocking) \n", + " U = U.to(self.device, non_blocking=self.non_blocking) \n", " \n", - " if self.enable_guidance_train: \n", - " rnd = torch.rand((b,), device=self.device) \n", - " rnd = (rnd > self.guidance_train_p).type(torch.int64) # todo: change to bernoulli dist fn\n", - " rnd = self.scheduler.unsqueeze_vector_to_shape(rnd, y.shape) # e.g. [b, 1, 1] \n", - " y = y * rnd + (1-rnd) * self.empty_token_fn(y)\n", - " \n", + " if self.enable_guidance_train and train: \n", + " rnd_y = torch.empty((b,), device=self.device).bernoulli_(p=1.0-self.guidance_train_p).type(torch.int64)\n", + " y = self.cfg_drop(y, self.empty_token_fn(y), rnd_y) \n", + " \n", " y_emb = self.text_encoder(y, pool=False)\n", - " \n", + " \n", " #sample timesteps\n", " timesteps = torch.randint(low=0, high=self.scheduler.num_train_timesteps, size=(b,), device=self.device, dtype=torch.int64)\n", "\n", " #forward noising \n", " noise = torch.randn(latents.shape, device=self.device) \n", - " noisy_latents = self.scheduler.add_noise(latents, noise, timesteps)\n", + " noisy_latents = self.scheduler.add_noise(latents, noise, timesteps, train=train)\n", "\n", " #predict eps\n", " eps = self.model(noisy_latents, timesteps, y_emb)\n", diff --git a/src/pipeline/diffusion_pipeline_special.ipynb b/src/pipeline/diffusion_pipeline_special.ipynb index 2c9c34e..62aab05 100644 --- a/src/pipeline/diffusion_pipeline_special.ipynb +++ b/src/pipeline/diffusion_pipeline_special.ipynb @@ -31,59 +31,11 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "id": "1393a437-2284-4d66-a0d2-3fedd2b62154", + "cell_type": "markdown", + "id": "0b0b42c2-2456-4d93-a6ff-a58be60afc58", "metadata": {}, - "outputs": [], "source": [ - "#| export\n", - "class DiffusionPipeline_attnPadded(DiffusionPipeline): \n", - " \"\"\"A special `DiffusionPipeline` with attention masking.\"\"\"\n", - " def train_step(self, data, **kwargs): \n", - " latents, y, key_padding_mask_list = data \n", - " b, s, t = latents.shape \n", - " \n", - " #start async memcpy\n", - " loss_mask = (key_padding_mask_list[0].to(self.device, non_blocking=self.non_blocking)>-1.0).float().unsqueeze(1)\n", - " \n", - " shaped_mask = []\n", - " for key_padding_mask in key_padding_mask_list:\n", - " key_padding_mask = key_padding_mask.to(self.device, non_blocking=self.non_blocking) \n", - " key_padding_mask = key_padding_mask.reshape((b, -1)) #from [b, s, t] to [b, -1] aka [N, L] \n", - " shaped_mask.append(key_padding_mask) \n", - " \n", - " latents = latents.to(self.device, non_blocking=self.non_blocking) \n", - " latents = self.model.embedd_clrs(latents) #this is only new tensor\n", - " self.scheduler.to_device(self.device, non_blocking=self.non_blocking) \n", - " \n", - " #do the cond embedding with CLIP \n", - " y = y.to(self.device, non_blocking=self.non_blocking) \n", - " \n", - " if self.enable_guidance_train: \n", - " rnd = torch.rand((b,), device=self.device) \n", - " rnd = (rnd > self.guidance_train_p).type(torch.int64) # todo: change to bernoulli dist fn\n", - " rnd = self.scheduler.unsqueeze_vector_to_shape(rnd, y.shape) # e.g. [b, 1, 1] \n", - " y = y * rnd + (1-rnd) * self.empty_token_fn(y)\n", - " \n", - " y_emb = self.text_encoder(y, pool=False)\n", - " \n", - " #sample timesteps\n", - " timesteps = torch.randint(low=0, high=self.scheduler.num_train_timesteps, size=(b,), device=self.device, dtype=torch.int64)\n", - "\n", - " #forward noising \n", - " noise = torch.randn(latents.shape, device=self.device) \n", - " noisy_latents = self.scheduler.add_noise(latents, noise, timesteps)\n", - "\n", - " #predict eps\n", - " eps = self.model(noisy_latents, timesteps, y_emb, key_padding_mask=shaped_mask)\n", - " \n", - " #comp mse\n", - " loss = self.loss_fn(eps*loss_mask, noise*loss_mask)\n", - " # loss = self.loss_fn(eps, noise)\n", - " \n", - " #log the loss\n", - " return loss" + "This file is for **legacy support**, it will be removed in future versions." ] }, { @@ -120,12 +72,12 @@ " return U\n", " \n", " @torch.no_grad()\n", - " def denoising(self, latents, c, U, enable_guidance, g, no_bar=False, return_predicted_x0=False): \n", + " def denoising(self, latents, c, U, enable_guidance=True, g=0, no_bar=False, return_predicted_x0=False): \n", " U = self.get_guidance_U(U, enable_guidance) \n", " # self.unitary_encoder.eval() \n", - " return super().denoising(latents, c, enable_guidance, g, no_bar=no_bar, return_predicted_x0=return_predicted_x0, U=U)\n", + " return super().denoising(latents, c, enable_guidance=enable_guidance, g=g, no_bar=no_bar, return_predicted_x0=return_predicted_x0, U=U)\n", "\n", - " def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=False, g=7.5, U: torch.Tensor=None): \n", + " def denoising_step(self, latents: torch.Tensor, ts: Union[int, torch.IntTensor], c_emb: torch.Tensor=None, enable_guidance=False, g=7.5, U: torch.Tensor=None, micro_cond=None): \n", " if enable_guidance:\n", " x = torch.cat([latents] * 2) #uses batch layer combine here\n", " \n", diff --git a/src/metrics.ipynb b/src/pipeline/metrics.ipynb similarity index 78% rename from src/metrics.ipynb rename to src/pipeline/metrics.ipynb index c958494..88ea28d 100644 --- a/src/metrics.ipynb +++ b/src/pipeline/metrics.ipynb @@ -5,15 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# Metrics" - ] - }, - { - "cell_type": "markdown", - "id": "f7055695-c1f0-4a83-bc23-7f629725331d", - "metadata": {}, - "source": [ - "Definition of metrics used during training." + "# Metrics\n", + "\n", + "> Definition of metrics used during training." ] }, { @@ -23,7 +17,7 @@ "metadata": {}, "outputs": [], "source": [ - "#| default_exp metrics" + "#| default_exp pipeline.metrics" ] }, { @@ -34,8 +28,7 @@ "outputs": [], "source": [ "#| export\n", - "from genQC.imports import *\n", - "from genQC.util import virtual" + "from genQC.imports import *" ] }, { @@ -46,20 +39,20 @@ "outputs": [], "source": [ "#| export\n", - "class Metric:\n", - " \"\"\"Base metric class.\"\"\"\n", + "class Metric(abc.ABC):\n", + " \"\"\"Base metric class.\"\"\" \n", " def __init__(self, name: str, device): \n", " self.name = name\n", " self.device = torch.device(device)\n", - " self.reset_state() \n", + " self.reset_state() \n", " def __repr__(self): return f\"{self.name}={self.result()}\"\n", - "\n", " def update_state(self, inp, tar=None): self.empty=False\n", " def reset_state(self): self.empty=True \n", " \n", - " @virtual\n", + " @abc.abstractmethod\n", " def _eval(self, inp, tar): pass\n", - " @virtual\n", + " \n", + " @abc.abstractmethod\n", " def result(self): pass " ] }, @@ -72,22 +65,30 @@ "source": [ "#| export\n", "class Mean(Metric):\n", - " \"\"\"Mean metric, used for loss ..\"\"\"\n", - " def __init__(self, name: str, device): super().__init__(name, device) \n", + " \"\"\"Mean metric, used for loss.\"\"\"\n", + " \n", + " def __init__(self, name: str, device): \n", + " super().__init__(name, device) \n", + " \n", " @torch.inference_mode()\n", - " def update_state(self, inp: torch.Tensor, tar: torch.Tensor=None, weight: float=1):\n", + " def update_state(self, inp: torch.Tensor, tar: torch.Tensor = None, weight: float = 1):\n", " super().update_state(inp, tar) \n", " val = self._eval(inp, tar) \n", " self.weighted_sum += torch.sum(val * weight)\n", - " self.weight += weight * torch.numel(val) \n", + " self.weight += weight * torch.numel(val) \n", + " \n", " @torch.inference_mode()\n", " def reset_state(self): \n", " super().reset_state()\n", " self.weighted_sum = torch.tensor(0.0, device=self.device)\n", " self.weight = torch.tensor(0.0, device=self.device) \n", - " def _eval(self, inp, tar): return inp \n", + " \n", + " def _eval(self, inp, tar): \n", + " return inp \n", + " \n", " @torch.inference_mode()\n", - " def result(self): return (self.weighted_sum/self.weight).cpu()" + " def result(self): \n", + " return (self.weighted_sum/self.weight).cpu()" ] }, { @@ -168,6 +169,13 @@ "display_name": "python3", "language": "python", "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/src/pipeline/multimodal_diffusion_pipeline.ipynb b/src/pipeline/multimodal_diffusion_pipeline.ipynb new file mode 100644 index 0000000..eaf5e5d --- /dev/null +++ b/src/pipeline/multimodal_diffusion_pipeline.ipynb @@ -0,0 +1,487 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a32597ab-f4af-4489-b591-8e184d3e2534", + "metadata": {}, + "source": [ + "# Multimodal Diffusion Pipeline\n", + "\n", + "> Multimodal extension to `DiffusionPipeline`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b23755da-6458-4c60-b54d-bc803616e98a", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp pipeline.multimodal_diffusion_pipeline" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dcc36a6d-f39b-4e47-b5e7-1755d0935d75", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.pipeline.compilation_diffusion_pipeline import DiffusionPipeline_Compilation\n", + "\n", + "from genQC.scheduler.scheduler import Scheduler\n", + "from genQC.utils.config_loader import *\n", + "from genQC.models.config_model import ConfigModel" + ] + }, + { + "cell_type": "markdown", + "id": "45e498f6-2f99-4eab-a05f-a69d67e12e10", + "metadata": {}, + "source": [ + "## Multimodal Diffusion Pipeline - Compilation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cf5a5dbb-54d6-48ff-b32a-e7c1c5d7c342", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class MultimodalDiffusionPipeline_ParametrizedCompilation(DiffusionPipeline_Compilation): \n", + " \"\"\"A special `DiffusionPipeline_Compilation` that accounts for multimodal parametrized gates.\"\"\"\n", + "\n", + " def __init__(self, *args, scheduler_w, **kwargs):\n", + " super().__init__(*args, **kwargs)\n", + " self.scheduler_w = scheduler_w\n", + " self.scheduler_w.to(self.device) \n", + "\n", + " def params_config(self, *args, **kwargs):\n", + " params_config = super().params_config(*args, **kwargs)\n", + " params_config[\"scheduler_w\"] = self.scheduler_w.get_config()\n", + " return params_config\n", + "\n", + " @staticmethod\n", + " def from_config_file(config_path, device: torch.device, save_path: Optional[str] = None): \n", + " config = load_config(config_path+\"config.yaml\") \n", + " config = config_to_dict(config)\n", + "\n", + " def _get_save_path(config_save_path, appendix):\n", + "\n", + " _save_path = default(save_path, config_path) + appendix\n", + " if \"save_path\" in config_save_path:\n", + " if exists(config_save_path[\"save_path\"]):\n", + " _save_path = config_save_path[\"save_path\"]\n", + " else:\n", + " config_save_path.pop(\"save_path\")\n", + " return _save_path \n", + " \n", + " if exists(device):\n", + " config[\"params\"][\"device\"] = device\n", + " config[\"params\"][\"scheduler\"][\"params\"][\"device\"] = device\n", + " \n", + " config[\"params\"][\"scheduler\"] = Scheduler.from_config(config[\"params\"][\"scheduler\"] , device, _get_save_path(config[\"params\"][\"scheduler\"] , \"\"))\n", + " config[\"params\"][\"scheduler_w\"] = Scheduler.from_config(config[\"params\"][\"scheduler_w\"], device, _get_save_path(config[\"params\"][\"scheduler_w\"], \"\"))\n", + " \n", + " config[\"params\"][\"model\"] = ConfigModel.from_config(config[\"params\"][\"model\"], device, _get_save_path(config[\"params\"][\"model\"], \"model\"))\n", + " config[\"params\"][\"text_encoder\"] = ConfigModel.from_config(config[\"params\"][\"text_encoder\"], device, _get_save_path(config[\"params\"][\"text_encoder\"], \"text_encoder\")) \n", + " config[\"params\"][\"embedder\"] = ConfigModel.from_config(config[\"params\"][\"embedder\"], device, _get_save_path(config[\"params\"][\"embedder\"], \"embedder\")) \n", + " \n", + " add_config = config[\"params\"].pop(\"add_config\", None)\n", + "\n", + " pipeline = instantiate_from_config(config)\n", + " \n", + " if exists(pipeline.add_config):\n", + " pipeline.add_config = add_config\n", + " \n", + " params = add_config[\"dataset\"][\"params\"]\n", + " \n", + " if \"gate_pool\" in params: \n", + " # pipeline.gate_pool = [get_obj_from_str(gate) for gate in params[\"gate_pool\"]] \n", + " pipeline.gate_pool = [gate for gate in params[\"gate_pool\"]] \n", + "\n", + " return pipeline\n", + " \n", + " #------------------------------------\n", + "\n", + " # @torch.no_grad()\n", + " @torch.inference_mode()\n", + " def denoising(self, latents, c, U, negative_c=None, negative_u=None, enable_guidance=True, g=1.0, t_start_index=0, no_bar=False, return_predicted_x0=False):\n", + " return super().denoising(latents=latents, c=c, U=U, negative_c=negative_c, negative_u=negative_u, enable_guidance=enable_guidance, g=g, t_start_index=t_start_index,\n", + " no_bar=no_bar, return_predicted_x0=return_predicted_x0)\n", + "\n", + " #------------------------------------\n", + "\n", + " sample_type = \"joint\"\n", + " \n", + " def denoising_step(self, \n", + " latents: torch.Tensor, \n", + " ts: Union[int, torch.IntTensor], \n", + " c_emb: torch.Tensor = None, \n", + " enable_guidance = False, \n", + " g: float = 7.5, \n", + " U: torch.Tensor = None,\n", + " **kwargs) -> Tuple[torch.Tensor, torch.Tensor]: \n", + "\n", + " match self.sample_type:\n", + " case \"joint\":\n", + " x_tm1, x0 = self.denoising_step_joint(latents, ts, c_emb, enable_guidance, g, U)\n", + "\n", + " case \"w\":\n", + " # Here the single mode denoising functions\n", + " x_tm1, x0 = self.denoising_step_single_mode_w(latents, ts, c_emb, enable_guidance, g, U)\n", + "\n", + " case _:\n", + " raise NotImplementedError(\"\")\n", + " \n", + " return x_tm1, x0\n", + "\n", + " #------------------------------------\n", + " # Cleaned steps\n", + "\n", + " def _get_guidance_scales(self, g: float, ts_h: torch.Tensor, ts_w: torch.Tensor):\n", + " g_h , g_w = g, g\n", + " lambda_h, lambda_w = g, g\n", + " \n", + " if hasattr(self, \"g_h\"): \n", + " if isinstance(self.g_h, Callable):\n", + " assert ts_h.numel() == 1\n", + " g_h = self.g_h(ts_h)\n", + " else:\n", + " g_h = self.g_h\n", + " \n", + " if hasattr(self, \"g_w\"): \n", + " if isinstance(self.g_w, Callable): \n", + " assert ts_w.numel() == 1\n", + " g_w = self.g_w(ts_w)\n", + " else:\n", + " g_w = self.g_w\n", + "\n", + " if hasattr(self, \"lambda_h\"): \n", + " if isinstance(self.lambda_h, Callable):\n", + " assert ts_h.numel() == 1\n", + " lambda_h = self.lambda_h(ts_h)\n", + " else:\n", + " lambda_h = self.lambda_h\n", + " \n", + " if hasattr(self, \"lambda_w\"): \n", + " if isinstance(self.lambda_w, Callable): \n", + " assert ts_w.numel() == 1\n", + " lambda_w = self.lambda_w(ts_w)\n", + " else:\n", + " lambda_w = self.lambda_w\n", + " \n", + " return g_h, g_w, lambda_h, lambda_w\n", + "\n", + " def denoising_step_joint(self, \n", + " latents: torch.Tensor, \n", + " ts: Union[int, torch.IntTensor], \n", + " c_emb: torch.Tensor = None, \n", + " enable_guidance = False, \n", + " g: float = 7.5, \n", + " U: torch.Tensor = None,\n", + " ) -> Tuple[torch.Tensor, torch.Tensor]:\n", + "\n", + " # Prepare variables\n", + " g_h, g_w, lambda_h, lambda_w = self._get_guidance_scales(g, ts_h=ts, ts_w=ts)\n", + "\n", + " # assert enable_guidance\n", + " c_emb_u, c_emb_c = c_emb.chunk(2)\n", + " U_u , U_c = U.chunk(2)\n", + "\n", + " ts_expanded = ts.expand(latents.shape[0])\n", + " T_h_expanded = torch.ones_like(ts_expanded) * (self.scheduler.num_train_timesteps-1)\n", + " T_w_expanded = torch.ones_like(ts_expanded) * (self.scheduler_w.num_train_timesteps-1)\n", + "\n", + " # Get latents of modes\n", + " noisy_latents = torch.randn_like(latents)\n", + " latents_h, latents_w = latents[..., :self.embedder.clr_dim], latents[..., self.embedder.clr_dim:]\n", + " noisy_latents_h, noisy_latents_w = noisy_latents[..., :self.embedder.clr_dim], noisy_latents[..., self.embedder.clr_dim:]\n", + " \n", + " # Get all combinations\n", + " latents_chunked_h = torch.cat([\n", + " latents_h, # sh_h\n", + " latents_h, # sh_hw\n", + " latents_h, # sh_hwc\n", + "\n", + " noisy_latents_h, # sw_w\n", + " latents_h, # sw_hw\n", + " latents_h, # sw_hwc \n", + " ])\n", + " \n", + " latents_chunked_w = torch.cat([\n", + " noisy_latents_w, # sh_h\n", + " latents_w, # sh_hw\n", + " latents_w, # sh_hwc\n", + "\n", + " latents_w, # sw_w\n", + " latents_w, # sw_hw\n", + " latents_w, # sw_hwc \n", + " ])\n", + "\n", + " t_h_chunked = torch.cat([\n", + " ts_expanded, # sh_h\n", + " ts_expanded, # sh_hw\n", + " ts_expanded, # sh_hwc\n", + "\n", + " T_h_expanded, # sw_w\n", + " ts_expanded, # sw_hw\n", + " ts_expanded, # sw_hwc \n", + " ])\n", + "\n", + " t_w_chunked = torch.cat([\n", + " T_w_expanded, # sh_h\n", + " ts_expanded, # sh_hw\n", + " ts_expanded, # sh_hwc\n", + "\n", + " ts_expanded, # sw_w\n", + " ts_expanded, # sw_hw\n", + " ts_expanded, # sw_hwc \n", + " ])\n", + "\n", + " c_emb_chunked = torch.cat([\n", + " c_emb_u, # sh_h\n", + " c_emb_u, # sh_hw\n", + " c_emb_c, # sh_hwc\n", + "\n", + " c_emb_u, # sw_w\n", + " c_emb_u, # sw_hw\n", + " c_emb_c, # sw_hwc \n", + " ])\n", + "\n", + " U_chunked = torch.cat([\n", + " U_u, # sh_h\n", + " U_u, # sh_hw\n", + " U_c, # sh_hwc\n", + "\n", + " U_u, # sw_w\n", + " U_u, # sw_hw\n", + " U_c, # sw_hwc \n", + " ])\n", + "\n", + " # Make all predictions we need\n", + " latents_chunked = torch.cat([latents_chunked_h, latents_chunked_w], dim=-1)\n", + " \n", + " pred = self.model(latents_chunked, t_h=t_h_chunked, t_w=t_w_chunked, c_emb=c_emb_chunked, U=U_chunked)\n", + " pred_h, pred_w = pred[..., :self.embedder.clr_dim], pred[..., self.embedder.clr_dim:]\n", + " \n", + " sh_h, sh_hw, sh_hwc, _, _, _ = pred_h.chunk(6)\n", + " _, _, _, sw_w, sw_hw, sw_hwc = pred_w.chunk(6)\n", + " \n", + " # Combine into CFG \n", + " sh_bar = sh_h + g_h * (sh_hw - sh_h) + lambda_h * (sh_hwc - sh_hw)\n", + " sw_bar = sw_w + g_w * (sw_hw - sw_w) + lambda_w * (sw_hwc - sw_hw)\n", + " \n", + " # Do denoise step with CFG++\n", + " x_h = self.scheduler.step(sh_bar, ts, latents_h, uncond_model_output=sh_h) \n", + " x_w = self.scheduler_w.step(sw_bar, ts, latents_w, uncond_model_output=sw_w) \n", + " \n", + " return torch.cat([x_h.prev_sample, x_w.prev_sample], dim=-1), torch.cat([x_h.pred_original_sample, x_w.pred_original_sample], dim=-1)\n", + " \n", + " #------------------------------------\n", + "\n", + " def denoising_step_single_mode_w(self,\n", + " latents: torch.Tensor,\n", + " ts: Union[int, torch.IntTensor], \n", + " c_emb: torch.Tensor = None,\n", + " enable_guidance = False, \n", + " g: float = 7.5, \n", + " U: torch.Tensor = None\n", + " ) -> Tuple[torch.Tensor, torch.Tensor]:\n", + "\n", + " assert enable_guidance # TODO: remove this\n", + "\n", + " chunk_latents = torch.cat([latents] * 2, dim=0)\n", + " \n", + " if ts.numel() > 1: chunk_ts = torch.cat([ts] * 2, dim=0)\n", + " else: chunk_ts = ts\n", + "\n", + " T = torch.ones_like(chunk_ts) * (self.scheduler.num_train_timesteps-1)\n", + " TZero = torch.zeros_like(chunk_ts) \n", + " \n", + " #------------------------\n", + " # 1. Get: s(h|w), s(w|h) and s(h|w,c), s(w|h,c)\n", + " # Note here we set t_h=0\n", + " \n", + " def f1(chunk_latents, chunk_ts):\n", + " x = chunk_latents.clone()\n", + " \n", + " s_hw, s_hwc = self.model(x, t_h=TZero, t_w=chunk_ts, c_emb=c_emb, U=U).chunk(2) \n", + " \n", + " sw_hw, sw_hwc = s_hw[..., self.embedder.clr_dim:], s_hwc[..., self.embedder.clr_dim:]\n", + " \n", + " return sw_hw, sw_hwc\n", + " \n", + " #------------------------\n", + " # 2. Get: s(w), s(w|c)\n", + "\n", + " def f2(chunk_latents, chunk_ts):\n", + " x = chunk_latents.clone()\n", + " x[..., :self.embedder.clr_dim] = torch.randn_like(x[..., :self.embedder.clr_dim]) #remove h\n", + " \n", + " s_w, s_wc = self.model(x, t_h=T, t_w=chunk_ts, c_emb=c_emb, U=U).chunk(2) \n", + " \n", + " sw_w, sw_wc = s_w[..., self.embedder.clr_dim:], s_wc[..., self.embedder.clr_dim:]\n", + "\n", + " return sw_w, sw_wc\n", + " \n", + " #------------------------------------------------\n", + "\n", + " sw_hw, sw_hwc = f1(chunk_latents, chunk_ts)\n", + " sw_w, sw_wc = f2(chunk_latents, chunk_ts)\n", + "\n", + " g_w = g\n", + " \n", + " if hasattr(self, \"g_w\"): \n", + " if isinstance(self.g_w, Callable): \n", + " assert ts.numel() == 1\n", + " g_w = self.g_w(chunk_ts)\n", + " else:\n", + " g_w = self.g_w\n", + " \n", + " gamma_w = g_w #was no/2\n", + " lambda_w = g_w\n", + "\n", + " if hasattr(self, \"lambda_w\"): \n", + " if isinstance(self.lambda_w, Callable): \n", + " assert ts.numel() == 1\n", + " lambda_w = self.lambda_w(chunk_ts)\n", + " else:\n", + " lambda_w = self.lambda_w\n", + "\n", + " sw_bar = sw_w + gamma_w * (sw_hw - sw_w) + lambda_w * (sw_hwc - sw_hw)\n", + "\n", + " latents_h, latents_w = latents[..., :self.embedder.clr_dim], latents[..., self.embedder.clr_dim:]\n", + " \n", + " #CFG++\n", + " x_h = latents_h\n", + " x_w = self.scheduler_w.step(sw_bar, ts, latents_w, uncond_model_output=sw_w) \n", + "\n", + " return torch.cat([x_h, x_w.prev_sample], dim=-1), torch.cat([x_h, x_w.pred_original_sample], dim=-1)\n", + "\n", + " #------------------------------------\n", + " \n", + " def train_step(self, data, train, **kwargs): \n", + " target_tokens, y, params, U = data \n", + " b, s, t = target_tokens.shape \n", + "\n", + " #start async memcpy\n", + " target_tokens = target_tokens.to(self.device, non_blocking=self.non_blocking) \n", + " params = params.to(self.device, non_blocking=self.non_blocking) \n", + " \n", + " latents = self.embedder(h=target_tokens, w=params) \n", + "\n", + " #do the cond embedding with CLIP \n", + " U = U.to(torch.float32)\n", + " \n", + " y = y.to(self.device, non_blocking=self.non_blocking) \n", + " U = U.to(self.device, non_blocking=self.non_blocking) \n", + " \n", + " if self.enable_guidance_train and train: #CFG training\n", + " rnd = torch.empty((b,), device=self.device).bernoulli_(p=1.0-self.guidance_train_p).type(torch.int64)\n", + "\n", + " y_drop = self.cfg_drop(y, self.empty_token_fn(y) , rnd) \n", + " U_drop = self.cfg_drop(U, self.empty_unitary_fn(U), rnd) \n", + " \n", + " else:\n", + " rnd = torch.ones((b,), dtype=torch.int64, device=self.device)\n", + " y_drop, U_drop = y, U\n", + " \n", + " y_emb = self.text_encoder(y_drop, pool=False)\n", + " \n", + " #--------------------\n", + "\n", + " shuffle = torch.tensor(0, dtype=bool).bernoulli_(p=0.95)\n", + " \n", + " timesteps_h = self.sample_timesteps_low_variance(b, self.scheduler)\n", + " timesteps_w = self.sample_timesteps_low_variance(b, self.scheduler_w, shuffle=shuffle)\n", + "\n", + " \n", + " noise = torch.randn_like(latents) \n", + " noisy_latents_h = self.scheduler.add_noise( latents[..., :self.embedder.clr_dim], noise[..., :self.embedder.clr_dim], timesteps_h, train=train) \n", + " noisy_latents_w = self.scheduler_w.add_noise(latents[..., self.embedder.clr_dim:], noise[..., self.embedder.clr_dim:], timesteps_w, train=train)\n", + " \n", + " noisy_latents = torch.cat([noisy_latents_h, noisy_latents_w], dim=-1)\n", + "\n", + " #-------------------- \n", + " model_output = self.model(x=noisy_latents, t_h=timesteps_h, t_w=timesteps_w, c_emb=y_emb, U=U_drop, rnd=rnd)\n", + " \n", + " #--------------------\n", + "\n", + " if self.scheduler.prediction_type == \"epsilon\":\n", + " pred_target = noise\n", + " raise NotImplementedError()\n", + "\n", + " elif self.scheduler.prediction_type == \"v-type\":\n", + " alphas_cumprod_h = self.scheduler.unsqueeze_vector_to_shape(self.scheduler.alphas_cumprod[timesteps_h], latents.shape)\n", + " alphas_cumprod_w = self.scheduler_w.unsqueeze_vector_to_shape(self.scheduler_w.alphas_cumprod[timesteps_w], latents.shape)\n", + "\n", + " pred_target_h = alphas_cumprod_h.sqrt() * noise[..., :self.embedder.clr_dim] - (1-alphas_cumprod_h).sqrt() * latents[..., :self.embedder.clr_dim]\n", + " pred_target_w = alphas_cumprod_w.sqrt() * noise[..., self.embedder.clr_dim:] - (1-alphas_cumprod_w).sqrt() * latents[..., self.embedder.clr_dim:]\n", + " \n", + " else:\n", + " raise NotImplementedError(f\"{self.scheduler.prediction_type} does is not implemented for {self.__class__}\")\n", + " \n", + " #--------------------\n", + " \n", + " t_h = timesteps_h / (self.scheduler.num_train_timesteps-1)\n", + " # t_h = torch.sin(t_h*(torch.pi/2))**2 \n", + " # t_h = torch.sin(t_h*(torch.pi/2))\n", + " # -> else linear\n", + " \n", + " t_h = self.scheduler.unsqueeze_vector_to_shape(t_h, latents.shape)\n", + " SNR_h = (1.0-t_h) / (t_h+1e-8) + 1e-8 # flip prob to snr\n", + " mse_loss_weight_h = (1.0 - alphas_cumprod_h) * F.sigmoid(SNR_h.log())\n", + "\n", + " SNR_w = alphas_cumprod_w / (1.0-alphas_cumprod_w+1e-8) + 1e-8\n", + "\n", + " #comp mse\n", + " mse_flat = lambda out, target: (out-target).square().mean(dim=list(range(1, len(out.shape))))\n", + " loss_h = mse_flat(model_output[..., :self.embedder.clr_dim], pred_target_h.detach()) * mse_loss_weight_h.squeeze().detach()\n", + " loss_w = mse_flat(model_output[..., self.embedder.clr_dim:], pred_target_w.detach()) * mse_loss_weight_w.squeeze().detach()\n", + " \n", + " loss = loss_h.mean() + loss_w.mean()\n", + " return loss" + ] + }, + { + "cell_type": "markdown", + "id": "da3d5b04-0fdd-4951-9646-bbdecddd1cd8", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cda0e6ab-0299-4f66-aadd-1343a547cce0", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/pipeline/pipeline.ipynb b/src/pipeline/pipeline.ipynb index 278e3f5..80c4039 100644 --- a/src/pipeline/pipeline.ipynb +++ b/src/pipeline/pipeline.ipynb @@ -5,15 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# Pipeline" - ] - }, - { - "cell_type": "markdown", - "id": "330d8f41-d236-461e-b885-06a70caaf3e4", - "metadata": {}, - "source": [ - "Basic PyTorch pipeline for general training." + "# Pipeline\n", + "\n", + "> Basic PyTorch pipeline for general training. " ] }, { @@ -35,9 +29,12 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.util import virtual, number_of_paramters, DataLoaders\n", - "from genQC.metrics import *\n", - "from genQC.config_loader import *" + "from genQC.utils.misc_utils import DataLoaders\n", + "from genQC.pipeline.metrics import *\n", + "from genQC.utils.config_loader import *\n", + "from genQC.pipeline.callbacks import run_cbs, Callback\n", + "\n", + "from huggingface_hub import snapshot_download" ] }, { @@ -56,23 +53,36 @@ "outputs": [], "source": [ "#| export\n", - "nn.Loss = Callable[[torch.Tensor, torch.Tensor], torch.Tensor]" + "Loss = Callable[[torch.Tensor, torch.Tensor], torch.Tensor]" ] }, { - "cell_type": "markdown", - "id": "f473fc41-0fc7-4153-b7b9-8f85750ba85f", + "cell_type": "code", + "execution_count": null, + "id": "6e686a8e-7175-4a27-a9e3-2587fe98e936", "metadata": {}, + "outputs": [], "source": [ - "## Pipeline" + "#|export\n", + "class CheckpointCB(Callback):\n", + " def __init__(self, ck_interval=None, ck_path=None): \n", + " super().__init__()\n", + " self.ck_interval = ck_interval\n", + " self.ck_path = ck_path\n", + " \n", + " def after_epoch(self, pipeline): \n", + " if exists(self.ck_interval) and exists(self.ck_path):\n", + " if (pipeline.epoch%self.ck_interval) == 0 and pipeline.epoch>0:\n", + " store_dir = f\"{self.ck_path}ck_{pipeline.epoch}/\"\n", + " pipeline.store_pipeline(config_path=store_dir, save_path=store_dir) " ] }, { "cell_type": "markdown", - "id": "146f78c1-7ded-4e89-9fec-2bbed9dd95c0", + "id": "f473fc41-0fc7-4153-b7b9-8f85750ba85f", "metadata": {}, "source": [ - "Note, uses functions that require: python>=3.9" + "## Pipeline" ] }, { @@ -83,8 +93,9 @@ "outputs": [], "source": [ "#| export\n", - "class Pipeline_IO: \n", + "class PipelineIO(abc.ABC): \n", " \"\"\"A class providing basic IO functionality.\"\"\"\n", + " \n", " def get_config(self, save_path: str, without_metadata=False): \n", " params_config = self.params_config(save_path) \n", " \n", @@ -110,9 +121,10 @@ " self.config = config \n", " return config\n", " \n", - " @virtual\n", + " @abc.abstractmethod\n", " def params_config(self, save_path: str): return None\n", - " \n", + "\n", + " @abc.abstractmethod\n", " def store_pipeline(self, config_path: str, save_path: str): \n", " if exists(config_path): os.makedirs(config_path, exist_ok=True) \n", " if exists(save_path): \n", @@ -120,9 +132,16 @@ " if hasattr(self, \"fit_losses\"): np.savetxt(save_path + \"fit_losses.txt\", self.fit_losses) \n", " if hasattr(self, \"fit_valid_losses\"): np.savetxt(save_path + \"fit_valid_losses.txt\", self.fit_valid_losses) \n", " \n", - " @virtual\n", " @staticmethod\n", - " def from_config_file(config_path, device: torch.device, save_path: str=None): return None " + " @abc.abstractmethod\n", + " def from_config_file(config_path, device: torch.device, save_path: str=None): return None \n", + "\n", + " @classmethod\n", + " def from_pretrained(cls, repo_id: str, device: torch.device, use_auth_token: bool = False, **kwargs): \n", + " \"\"\"Load a model pipeline directly from Huggingface.\"\"\"\n", + " model_path = snapshot_download(repo_id=repo_id, repo_type=\"model\", allow_patterns=[\"*.pt\", \"*.yaml\", \"*.safetensors\"], use_auth_token=use_auth_token, **kwargs) \n", + " pipeline = cls.from_config_file(model_path+\"/\", device) \n", + " return pipeline" ] }, { @@ -133,37 +152,55 @@ "outputs": [], "source": [ "#| export\n", - "class Pipeline(Pipeline_IO):\n", - " \"\"\"A `Pipeline_IO` class providing basic pytorch model training functionality.\"\"\"\n", + "class Pipeline(PipelineIO):\n", + " \"\"\"A `PipelineIO` class providing basic pytorch model training functionality.\"\"\"\n", " def __init__(self, \n", " model: nn.Module,\n", " device: torch.device):\n", - " self.model = model\n", + " self.model = model.to(device)\n", " self.device = device\n", - " \n", + "\n", + " self.trainables = []\n", + " self.trainables.append(self.model)\n", + " \n", " #------------------------------------\n", " \n", - " @virtual\n", + " @abc.abstractmethod\n", " def __call__(self, inp): pass\n", " \n", - " @virtual\n", + " @abc.abstractmethod\n", " def train_step(self, data, train=True, **kwargs): pass\n", "\n", " #------------------------------------\n", - " \n", - " def compile(self, optim_fn: type(torch.optim.Optimizer), loss_fn: nn.Loss, metrics: Union[Metric, list[Metric]]=None, lr=None, **kwargs): \n", + "\n", + " def _get_parameters(self):\n", + " parameters = itertools.chain(*[trainable.parameters() for trainable in self.trainables]) \n", + " return parameters\n", + " \n", + " def compile(self, optim_fn: type(torch.optim.Optimizer), loss_fn: Loss, metrics: Union[Metric, list[Metric]]=None, lr=None, cbs=None, compile_model=False, **kwargs): \n", " self.loss_fn = loss_fn()\n", " self.optim_fn = optim_fn \n", - " self.optimizer = optim_fn(self.model.parameters(), lr=lr, **kwargs) if lr else None\n", + "\n", + " if lr: self._reset_opt(lr, **kwargs)\n", + " else: self.optimizer = None\n", " \n", " metrics = {m.name:m for m in metrics} if metrics else {}\n", " #metrics |= {f\"{m.name}_valid\":m for m in metrics.values()}\n", " metrics[\"loss\"] = Mean(\"loss\", self.device) \n", " metrics[\"loss_valid\"] = Mean(\"loss_valid\", self.device) \n", " \n", - " self.metrics = metrics \n", - " \n", - " def _reset_opt(self, lr, **kwargs): self.optimizer = self.optim_fn(self.model.parameters(), lr, **kwargs) \n", + " self.metrics = metrics \n", + " self.cbs = cbs\n", + " \n", + " if platform.system() == \"Linux\" and compile_model:\n", + " print(\"[INFO]: Linux, compile model with torch\")\n", + " torch._dynamo.reset()\n", + " #self.model = torch.compile(self.model) #, fullgraph=True, mode =\"max-autotune\")\n", + "\n", + " for model in self.trainables:\n", + " model.compile()\n", + "\n", + " def _reset_opt(self, lr, **kwargs): self.optimizer = self.optim_fn(self._get_parameters(), lr=lr, **kwargs)\n", " \n", " def _set_opt_param(self, lr, **kwargs):\n", " '''at least lr: Does not reset existing optimizer, only changes learn rate.'''\n", @@ -187,21 +224,26 @@ " #backprob\n", " loss.backward()\n", "\n", + " # torch.nn.utils.clip_grad_norm_(self.model.parameters(), 1)\n", + " \n", " #update weights\n", " self.optimizer.step()\n", " \n", " return loss.detach()\n", " \n", " def train_on_epoch(self, data_loader: DataLoader, train=True): \n", - " self.model.train(train)\n", + " # self.model.train(train)\n", + " for model in self.trainables:\n", + " model.train(train)\n", " \n", " mode = \"\" if train else \"_valid\"\n", - " \n", + "\n", " with self.progress_bar(total=len(data_loader), epoch=self.epoch, unit=\" batch\") as batch_prgb: \n", - " for batch, data in enumerate(data_loader): \n", + " for self.batch, data in enumerate(data_loader): \n", + "\n", " loss = self.train_on_batch(data, train=train) \n", " self.metrics[\"loss\"+mode].update_state(loss) \n", - " \n", + " \n", " if train:\n", " self.fit_losses.append(loss.item()) \n", " if self.lr_sched: self.lr_sched.step()\n", @@ -209,27 +251,33 @@ " #pack up metrics\n", " self.out_metric_dict = {m.name:m.result().tolist() for m in self.metrics.values() if not m.empty} \n", " self.end_batch_metrics(batch_prgb, **self.out_metric_dict) \n", - " \n", + " # run_cbs(self.cbs, \"after_batch\", self) # e.g. if max-number of batches is needed\n", + " \n", " #run on train and one on valid\n", " def fit(self, num_epochs: int, data_loaders: DataLoaders, lr: float=None, lr_sched=None, log_summary=True):\n", " if not hasattr(self, \"loss_fn\"): raise RuntimeError(\"'compile' has to be called first\") \n", " \n", - " self._set_opt_param(lr=lr) \n", - " if lr_sched: self.lr_sched = lr_sched(self.optimizer)\n", - " else: self.lr_sched = None\n", - " \n", + " self._set_opt_param(lr=lr) \n", + " if not hasattr(self, \"lr_sched\"):\n", + " if lr_sched: self.lr_sched = lr_sched(self.optimizer)\n", + " else: self.lr_sched = None\n", + "\n", + " self.epoch = 0\n", " self.num_epochs = num_epochs\n", - " self.epochs = range(num_epochs)\n", + " \n", " self.fit_losses = []\n", " self.fit_valid_losses = [] \n", " self.batch_size = data_loaders.train.batch_size \n", " self.dataset_size_train = len(data_loaders.train)\n", " if data_loaders.valid: self.dataset_size_valid = len(data_loaders.valid)\n", + "\n", + " run_cbs(self.cbs, \"before_fit\", self)\n", + "\n", + " self.epochs = range(self.epoch, num_epochs) #after callback so we could resume training on a specific self.epoch\n", " \n", - " \n", - " with self.progress_bar(total=num_epochs, desc=\"Fit\", unit=\" epoch\") as epoch_prgb: \n", + " with self.progress_bar(total=len(self.epochs), desc=\"Fit\", unit=\" epoch\") as epoch_prgb: \n", " for self.epoch in self.epochs: \n", - " \n", + "\n", " #reset all metrics\n", " for m in self.metrics.values(): m.reset_state() \n", " \n", @@ -243,23 +291,36 @@ " self.out_metric_dict[\"loss_valid\"] ]) \n", " \n", " self.end_epoch_metrics(epoch_prgb, **self.out_metric_dict)\n", - "\n", + " run_cbs(self.cbs, \"after_epoch\", self)\n", + " \n", " self.fit_summary(log_summary=log_summary)\n", + " run_cbs(self.cbs, \"after_fit\", self)\n", " \n", " #------------------------------------\n", " \n", - " def summary(self): print(\"Number of model parameters:\", number_of_paramters(self.model))\n", + " def summary(self): \n", + "\n", + " cnt_params = lambda parameters: sum([p.numel() for p in parameters])\n", + "\n", + " s = \"Pipeline stats of explicit trainables\"\n", + " \n", + " for trainable in self.trainables:\n", + " name = str(trainable.__class__)\n", + " all_params = trainable.parameters()\n", + " trainable_params = filter(lambda p: p.requires_grad, trainable.parameters())\n", + " s += \"\\n\" + f\" - {name}: Total={cnt_params(all_params):0.2e} Trainable={cnt_params(trainable_params):0.2e}\"\n", + " return s\n", "\n", " def fit_summary(self, figsize=(12,2), log_summary=True, return_fig=False):\n", - " fig = plt.figure(figsize=figsize, constrained_layout=True) \n", - " plt.xlabel(\"Batches\")\n", + " fig = plt.figure(figsize=figsize, constrained_layout=True, dpi=150) \n", + " plt.xlabel(\"Number of batches / update steps\")\n", " plt.ylabel(\"Loss\")\n", " if log_summary: plt.yscale('log') \n", " plt.plot(self.fit_losses, label=\"train\")\n", " if len(self.fit_valid_losses) > 0: \n", " data = np.array(self.fit_valid_losses)\n", - " plt.plot(data[:,0],data[:,1], label=\"valid\", color=\"tab:orange\")\n", - " plt.plot(data[:,0],data[:,1], \".\", color=\"tab:orange\")\n", + " plt.plot(data[:, 0],data[:, 1], label=\"valid\", color=\"tab:orange\")\n", + " plt.plot(data[:, 0],data[:, 1], \".\", color=\"tab:orange\")\n", " plt.legend()\n", " if return_fig: return fig\n", " plt.show()\n", @@ -300,8 +361,7 @@ " def end_epoch_metrics(self, prgb:tqdm, epoch: int=None, **metrics): self.end_progress_bar_iteration(prgb, False, \"Epoch\", epoch, **metrics) \n", " def end_batch_metrics(self, prgb:tqdm, batch: int=None, **metrics): self.end_progress_bar_iteration(prgb, False, \"Batch\", batch, **metrics)\n", " \n", - " #------------------------------------\n", - " " + " #------------------------------------" ] }, { diff --git a/src/pipeline/unitary_clip_pipeline.ipynb b/src/pipeline/unitary_clip_pipeline.ipynb new file mode 100644 index 0000000..ed06785 --- /dev/null +++ b/src/pipeline/unitary_clip_pipeline.ipynb @@ -0,0 +1,197 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Unitary CLIP Pipeline\n", + "\n", + "> Pipeline for contrastive pre-training of an unitary encoder" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp pipeline.unitary_clip_pipeline" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.pipeline.pipeline import Pipeline\n", + "from genQC.utils.config_loader import *\n", + "from genQC.models.config_model import ConfigModel" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9ea10ee8-e231-421c-8fca-3c5dc3e57743", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class UnitaryCLIPPipeline(Pipeline):\n", + " def __init__(self, \n", + " model: nn.Module,\n", + " device: torch.device) -> None:\n", + " super().__init__(model, device) \n", + " \n", + " self.loss = nn.CrossEntropyLoss()\n", + " self.device = device\n", + "\n", + " #------------------------------------\n", + " \n", + " add_config = {}\n", + " \n", + " def params_config(self, save_path: str) -> dict: \n", + " params_config = {}\n", + " \n", + " params_config[\"model\"] = self.model.get_config(save_path=save_path+\"model\")\n", + " params_config[\"unitary_text_encoder\"] = self.model.unitary_text_encoder.get_config(save_path=None)\n", + " params_config[\"circuit_encoder\"] = self.model.circuit_encoder.get_config(save_path=None)\n", + " \n", + " params_config[\"device\"] = str(self.device)\n", + " params_config[\"add_config\"] = self.add_config\n", + " \n", + " return params_config\n", + "\n", + " def store_pipeline(self, config_path: str, save_path: str):\n", + " super().store_pipeline(config_path, save_path)\n", + " config = self.get_config(save_path)\n", + " save_dict_yaml(config, config_path+\"config.yaml\")\n", + " \n", + " self.model.store_model(config_path=None, save_path=save_path+\"model\")\n", + "\n", + " @staticmethod\n", + " def from_config_file(config_path, device: torch.device, save_path: str=None): \n", + " config = load_config(config_path+\"config.yaml\") \n", + " config = config_to_dict(config)\n", + "\n", + " def _get_save_path(config_save_path, appendix):\n", + " _save_path = default(save_path, config_path) + appendix\n", + " if \"save_path\" in config_save_path:\n", + " _save_path = config_save_path[\"save_path\"]\n", + " return _save_path \n", + "\n", + " if exists(device):\n", + " config[\"params\"][\"device\"] = device\n", + " config[\"params\"][\"model\"][\"params\"][\"text_encoder_config\"][\"device\"] = device\n", + "\n", + " unitary_text_encoder = ConfigModel.from_config(config[\"params\"].pop(\"unitary_text_encoder\", None), device, None) \n", + " circuit_encoder = ConfigModel.from_config(config[\"params\"].pop(\"circuit_encoder\", None), device, None) \n", + " \n", + " config[\"params\"][\"model\"][\"params\"][\"unitary_text_encoder\"] = unitary_text_encoder\n", + " config[\"params\"][\"model\"][\"params\"][\"circuit_encoder\"] = circuit_encoder\n", + " config[\"params\"][\"model\"] = ConfigModel.from_config(config[\"params\"][\"model\"], device, _get_save_path(config[\"params\"][\"model\"], \"model\"))\n", + "\n", + " add_config = config[\"params\"].pop(\"add_config\", None)\n", + "\n", + " pipeline = instantiate_from_config(config)\n", + " \n", + " if exists(pipeline.add_config):\n", + " pipeline.add_config = add_config\n", + " \n", + " return pipeline\n", + " \n", + " #------------------------------------\n", + " # Inference functions\n", + " \n", + " @torch.no_grad() \n", + " def __call__(self, tokens: torch.Tensor, params: torch.Tensor, y: torch.Tensor, U: torch.Tensor, softmax=True) -> torch.Tensor:\n", + " #compute the score of img-label pairs for classification!!\n", + " self.model.eval()\n", + " \n", + " scores = self.model(tokens=tokens, params=params, y=y, U=U) #[b, b] \n", + " \n", + " if softmax: \n", + " scores = F.softmax(scores, dim-1)\n", + " \n", + " return scores\n", + " \n", + " #------------------------------------\n", + " # Training functions\n", + " \n", + " def get_loss(self, tokens: torch.Tensor, params: torch.Tensor, y: torch.Tensor, U: torch.Tensor) -> torch.Tensor:\n", + " \n", + " scores = self.model(tokens=tokens, params=params, y=y, U=U) #[b, b]\n", + " \n", + " #scores is: I=unitary_text T=circuit\n", + " #--------------------------------\n", + " #| I1*T1 I1*T2 I1*T3 ...\n", + " #| I2*T1\n", + " #| I3*T1\n", + " # ...\n", + " #--------------------------------\n", + "\n", + " target = torch.arange(scores.shape[0], device=scores.device)\n", + " \n", + " loss_unitary_text = self.loss(scores , target)\n", + " loss_circuit = self.loss(scores.T, target)\n", + " \n", + " #symmetric loss\n", + " loss = (loss_unitary_text + loss_circuit) / 2.0\n", + " \n", + " return loss\n", + " \n", + " def train_step(self, data, **kwargs): \n", + " tokens, y, params, U = data \n", + " \n", + " tokens = tokens.to(self.device) \n", + " params = params.to(self.device) \n", + " y = y.to(self.device) \n", + " U = U.to(torch.float32).to(self.device)\n", + " \n", + " loss = self.get_loss(tokens=tokens, params=params, y=y, U=U)\n", + "\n", + " return loss" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/backends/base_backend.ipynb b/src/platform/backends/base_backend.ipynb new file mode 100644 index 0000000..3bf36b2 --- /dev/null +++ b/src/platform/backends/base_backend.ipynb @@ -0,0 +1,100 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "abb1587f-e8df-41df-97ca-a7b338d217d0", + "metadata": {}, + "source": [ + "# Base backend\n", + "\n", + "> Base class of corresponding backends." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15d245ad-f61a-4c43-aae6-a2054f1ab9ee", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.backends.base_backend" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e8dd5677-66ab-40f7-93d2-ac88b5dfc4e0", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "009a348f-06d0-48ed-9e99-ca2d7dbe0cbd", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class BaseBackend(abc.ABC):\n", + " \"\"\"Backends implement at least these functions.\"\"\"\n", + "\n", + " BASIC_BACKEND_TYPE = type[Any]\n", + " \n", + " @abc.abstractmethod\n", + " def backend_to_genqc(self, *args, **kwargs):\n", + " raise NotImplementedError()\n", + "\n", + " @abc.abstractmethod\n", + " def genqc_to_backend(self, *args, **kwargs):\n", + " raise NotImplementedError()\n", + "\n", + " @abc.abstractmethod\n", + " def get_unitary(self, *args, **kwargs):\n", + " raise NotImplementedError()\n", + "\n", + " @abc.abstractmethod\n", + " def draw(self, *args, **kwargs) -> None:\n", + " raise NotImplementedError()" + ] + }, + { + "cell_type": "markdown", + "id": "39e56f2e-a161-46ca-8239-a61e6f86cd44", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f321b61f-afe7-4b3d-9a50-b0a0b1bd6139", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/backends/circuits_cudaq.ipynb b/src/platform/backends/circuits_cudaq.ipynb new file mode 100644 index 0000000..22b8e6c --- /dev/null +++ b/src/platform/backends/circuits_cudaq.ipynb @@ -0,0 +1,472 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "5fb2a609-e10c-4af6-9740-676112148336", + "metadata": {}, + "source": [ + "# CUDA-Q circuits backend\n", + "\n", + "> [CUDA-Q](https://github.com/NVIDIA/cuda-quantum) based quantum circuit backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7a65d39-ffb3-441b-bb84-81e0f18e0769", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.backends.circuits_cudaq" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8d25f86-82c1-482e-997e-cb08141fbcf5", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.platform.backends.base_backend import BaseBackend\n", + "from genQC.platform.circuits_instructions import CircuitInstructions\n", + "\n", + "import cudaq" + ] + }, + { + "cell_type": "markdown", + "id": "56ceb33f-7ea8-429a-9a1e-0a232d34e049", + "metadata": {}, + "source": [ + "## Utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "93cd70f8-cc88-4776-9150-ef7e1346bd75", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class ParametrizedCudaqKernel:\n", + " kernel: cudaq.kernel\n", + " params: list[float] # currently only support 1 angle per gate" + ] + }, + { + "cell_type": "markdown", + "id": "ce9ba9e0-b6ed-45cb-9c02-dcdbf37f8fb1", + "metadata": {}, + "source": [ + "## Backend" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31d639ab-a606-4d19-a998-60eca46dd0a2", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitsCudaqBackend(BaseBackend):\n", + "\n", + " BASIC_BACKEND_TYPE = type[cudaq.kernel]\n", + "\n", + " def __init__(self, target: str = \"qpp-cpu\") -> None:\n", + " cudaq.reset_target()\n", + " cudaq.set_target(target) # 'nvidia'\n", + " \n", + " def backend_to_genqc(self):\n", + " raise NotImplementedError(\"Not implemeted cudaq to genQC.\")\n", + "\n", + " # Has to match with insides of belows kernel\n", + " KERNEL_VOCABULARY = {\"h\":1, \n", + " \"cx\":2, \n", + " \"z\":3, \n", + " \"x\":4, \n", + " \"y\":5, \n", + " \"ccx\":6, \n", + " \"swap\":7,\n", + " \"rx\":8,\n", + " \"ry\":9,\n", + " \"rz\":10,\n", + " \"cp\":11,} \n", + "\n", + " def _construct_kernel(self,\n", + " gate_list: List[str],\n", + " target_1_nodes_list: List[int],\n", + " target_2_nodes_list: List[int],\n", + " control_1_nodes_list: List[int],\n", + " control_2_nodes_list: List[int]\n", + " ) -> cudaq.kernel:\n", + " \"\"\"Construct a `cudaq.kernel` from provided paramters.\"\"\"\n", + " \n", + " num_gates = len(gate_list)\n", + " gate_list = [self.KERNEL_VOCABULARY[g] for g in gate_list]\n", + "\n", + " # Note: `@cudaq.kernel` decorator has a overhead of 20ms, regardless of the for-loop inside\n", + " \n", + " @cudaq.kernel\n", + " def place_gate_kernel(gate: int, \n", + " qvector: cudaq.qview,\n", + " target_1: int, \n", + " target_2: int, \n", + " control_1: int, \n", + " control_2: int,\n", + " theta: float): \n", + " \n", + " if gate == 1: h(qvector[target_1])\n", + " elif gate == 2: cx(qvector[control_1], qvector[target_1])\n", + " elif gate == 3: z(qvector[target_1])\n", + " elif gate == 4: x(qvector[target_1])\n", + " elif gate == 5: y(qvector[target_1])\n", + " elif gate == 6: x.ctrl(qvector[control_1], qvector[control_2], qvector[target_1])\n", + " elif gate == 7: swap(qvector[target_1], qvector[target_2])\n", + " \n", + " elif gate == 8: rx(theta, qvector[target_1])\n", + " elif gate == 9: ry(theta, qvector[target_1])\n", + " elif gate == 10: rz(theta, qvector[target_1])\n", + " \n", + " elif gate == 11: \n", + " # R1 applies the unitary transformation; i.e. it is a phase gate\n", + " # R1(λ) = | 1 0 |\n", + " # | 0 exp(iλ) |\n", + " r1.ctrl(theta, qvector[target_1], qvector[target_2])\n", + " \n", + " \n", + " @cudaq.kernel \n", + " def kernel(input_state: list[complex], thetas: list[float]):\n", + " qvector = cudaq.qvector(input_state)\n", + " for i in range(num_gates):\n", + " place_gate_kernel(gate_list[i], qvector, target_1_nodes_list[i], target_2_nodes_list[i], control_1_nodes_list[i], control_2_nodes_list[i], thetas[i])\n", + " \n", + " return kernel\n", + "\n", + " def check_error_circuit(self, \n", + " gate: str, \n", + " num_target_nodes: int, \n", + " num_control_nodes: int) -> bool:\n", + " \"\"\"Check number of connections of given gate. Used to check for error circuits.\"\"\"\n", + "\n", + " if gate not in self.KERNEL_VOCABULARY:\n", + " raise NotImplementedError(f\"Unknown gate {gate}, not in `self.KERNEL_VOCABULARY`.\")\n", + " \n", + " if gate in [\"h\", \"z\", \"x\", \"y\", \"rx\", \"ry\", \"rz\"]:\n", + " if num_target_nodes != 1 or num_control_nodes !=0: return False\n", + "\n", + " elif gate in [\"cx\"]:\n", + " if num_target_nodes != 1 or num_control_nodes !=1: return False\n", + "\n", + " elif gate in [\"ccx\"]:\n", + " if num_target_nodes != 1 or num_control_nodes !=2: return False\n", + "\n", + " elif gate in [\"swap\", \"cp\"]:\n", + " if num_target_nodes != 2 or num_control_nodes !=0: return False\n", + "\n", + " else:\n", + " raise NotImplementedError(f\"Unknown gate {gate}, implemetation is faulty!\")\n", + "\n", + " return True\n", + " \n", + " def genqc_to_backend(self, instructions: CircuitInstructions) -> cudaq.kernel:\n", + " \"\"\"Convert given genQC `CircuitInstructions` to a `cudaq.kernel`.\"\"\"\n", + "\n", + " _params = torch.tensor([\n", + " instruction.params if instruction.params else torch.nan \n", + " for instruction in instructions.data\n", + " ]) # ... [seq, nP]\n", + " \n", + " assert _params.shape[1] == 1 #only support nP=1 for now\n", + " _params = _params.squeeze()\n", + "\n", + " #--------------------\n", + " \n", + " # num_qubits = instructions.num_qubits\n", + " num_gates = instructions.length\n", + "\n", + " # @cudaq.kernel can only take list[int] and no str directly\n", + " # -> we have to map everything to list[int] \n", + " # set default value to 9999 so an error wil be raised if we have a faulty tensor encoding\n", + " \n", + " gate_list = []\n", + " target_1_nodes_list = [9999] * num_gates\n", + " target_2_nodes_list = [9999] * num_gates\n", + " control_1_nodes_list = [9999] * num_gates\n", + " control_2_nodes_list = [9999] * num_gates\n", + "\n", + " for i, instruction in enumerate(instructions.data):\n", + "\n", + " gate = instruction.name.lower()\n", + " control_nodes = instruction.control_nodes\n", + " target_nodes = instruction.target_nodes\n", + " \n", + " num_target_nodes = len(target_nodes)\n", + " num_control_nodes = len(control_nodes)\n", + " \n", + " if not self.check_error_circuit(gate, num_target_nodes, num_control_nodes):\n", + " return None\n", + " \n", + " gate_list.append(gate)\n", + " \n", + " if num_target_nodes > 0:\n", + " target_1_nodes_list[i] = target_nodes[0]\n", + " if num_target_nodes > 1: \n", + " target_2_nodes_list[i] = target_nodes[1] \n", + " \n", + " if num_control_nodes > 0:\n", + " control_1_nodes_list[i] = control_nodes[0] \n", + " if num_control_nodes > 1: \n", + " control_2_nodes_list[i] = control_nodes[1] \n", + " \n", + " #--------------------\n", + " _kernel = self._construct_kernel(gate_list, target_1_nodes_list, target_2_nodes_list, control_1_nodes_list, control_2_nodes_list)\n", + "\n", + " return ParametrizedCudaqKernel(kernel=_kernel, params=_params.tolist())\n", + " \n", + " def get_unitary(self, parametrizedCudaqKernel: ParametrizedCudaqKernel, num_qubits: int) -> np.ndarray:\n", + " \"\"\"Return the unitary matrix of a `cudaq.kernel`. Currently relies on simulation, could change in future releases of cudaq.\"\"\"\n", + "\n", + " kernel, thetas = parametrizedCudaqKernel.kernel, parametrizedCudaqKernel.params\n", + " \n", + " N = 2**num_qubits\n", + " U = np.zeros((N, N), dtype=np.complex128)\n", + " \n", + " for j in range(N): \n", + " state_j = np.zeros((N), dtype=np.complex128) \n", + " state_j[j] = 1\n", + " \n", + " U[:, j] = np.array(cudaq.get_state(kernel, state_j, thetas), copy=False)\n", + " \n", + " return U\n", + "\n", + " def draw(self, kernel: cudaq.kernel, num_qubits: int, **kwargs) -> None:\n", + " \"\"\"Draw the given `cudaq.kernel` using cudaq.\"\"\" \n", + " c = [0] * (2**num_qubits)\n", + " c[0] = 1\n", + " print(cudaq.draw(kernel, c))" + ] + }, + { + "cell_type": "markdown", + "id": "53c81cbb-6660-415f-84af-6f7c1db3bbc1", + "metadata": {}, + "source": [ + "## Test " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ebd4e5fc-d36b-426a-a25d-1ac9b6f512e2", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer" + ] + }, + { + "cell_type": "markdown", + "id": "808e0504-771f-4e75-b9eb-ec49002f45a1", + "metadata": {}, + "source": [ + "### genqc <-> backend" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "85c251bf-8b3f-444c-b965-710f8960246b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CircuitInstruction(name='h', control_nodes=[], target_nodes=[0], params=[0.0])\n", + "CircuitInstruction(name='ry', control_nodes=[], target_nodes=[2], params=[0.10000012069940567])\n", + "CircuitInstruction(name='ccx', control_nodes=[0, 2], target_nodes=[1], params=[0.0])\n", + "CircuitInstruction(name='swap', control_nodes=[], target_nodes=[1, 2], params=[0.0])\n", + "CircuitInstruction(name='rx', control_nodes=[], target_nodes=[1], params=[2.299999713897705])\n", + "CircuitInstruction(name='cp', control_nodes=[], target_nodes=[0, 1], params=[0.7000001072883606])\n" + ] + } + ], + "source": [ + "tensor = torch.tensor([\n", + " [1, 0, -2, 0, 0, 5],\n", + " [0, 0, 2, 3, 4, 5],\n", + " [0, 6, -2, 3, 0, 0],\n", + " ], dtype=torch.int32)\n", + "\n", + "params_tensor = torch.tensor([[0, 0.1, 0, 0, 2.3, 0.7]])/(2*np.pi) - 1\n", + "\n", + "vocabulary = {\"h\":1, \"ccx\":2, \"swap\":3, \"rx\":4, \"cp\": 5, \"ry\":6}\n", + "tokenizer = CircuitTokenizer(vocabulary)\n", + "instructions = tokenizer.decode(tensor, params_tensor)\n", + "\n", + "instructions.print()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "397b03a8-4abe-4557-97a4-0729384788d6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " ╭───╮ \n", + "q0 : ───┤ h ├─────●─────────────────────●─────\n", + " ╰───╯ ╭─┴─╮ ╭─────────╮╭────┴────╮\n", + "q1 : ───────────┤ x ├─╳─┤ rx(2.3) ├┤ r1(0.7) ├\n", + " ╭─────────╮╰─┬─╯ │ ╰─────────╯╰─────────╯\n", + "q2 : ┤ ry(0.1) ├──●───╳───────────────────────\n", + " ╰─────────╯ \n", + "\n", + "Measurement distribution:{ 000:85 010:424 100:83 101:1 110:406 111:1 }\n", + "\n" + ] + } + ], + "source": [ + "N = 2**instructions.num_qubits\n", + "\n", + "backend = CircuitsCudaqBackend()\n", + "parametrizedCudaqKernel = backend.genqc_to_backend(instructions)\n", + "\n", + "kernel, thetas = parametrizedCudaqKernel.kernel, parametrizedCudaqKernel.params\n", + "\n", + "c = [0] * N\n", + "c[0] = 1\n", + "\n", + "print(cudaq.draw(kernel, c, thetas))\n", + "\n", + "results = cudaq.sample(kernel, c, thetas)\n", + "print(\"Measurement distribution:\" + str(results))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1c07e877-6aaa-4e1a-98e1-3275c86084bd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.29-0.03j 0.29-0.03j 0. +0.j 0. +0.j -0.01-0.64j -0.01-0.64j 0. +0.j 0. +0.j ]\n", + " [ 0.29+0.j -0.29+0.j 0. -0.03j 0. +0.03j -0.01+0.j 0.01+0.j 0. -0.64j 0. +0.64j]\n", + " [ 0.01-0.64j 0.01-0.64j 0. +0.j 0. +0.j 0.29+0.03j 0.29+0.03j 0. +0.j 0. +0.j ]\n", + " [ 0.42-0.49j -0.42+0.49j 0.01+0.01j -0.01-0.01j -0.02+0.02j 0.02-0.02j 0.22+0.19j -0.22-0.19j]\n", + " [ 0. +0.j 0. +0.j 0.29-0.03j 0.29-0.03j 0. +0.j 0. +0.j -0.01-0.64j -0.01-0.64j]\n", + " [ 0. -0.03j 0. +0.03j 0.29+0.j -0.29+0.j 0. -0.64j 0. +0.64j -0.01+0.j 0.01+0.j ]\n", + " [ 0. +0.j 0. +0.j 0.01-0.64j 0.01-0.64j 0. +0.j 0. +0.j 0.29+0.03j 0.29+0.03j]\n", + " [ 0.01+0.01j -0.01-0.01j 0.42-0.49j -0.42+0.49j 0.22+0.19j -0.22-0.19j -0.02+0.02j 0.02-0.02j]]\n" + ] + } + ], + "source": [ + "U = backend.get_unitary(parametrizedCudaqKernel, instructions.num_qubits)\n", + "print(np.round(U, 2))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e85cbaf0-9768-47ce-9799-a937b240472f", + "metadata": {}, + "outputs": [], + "source": [ + "U = np.matrix(U)\n", + "assert np.allclose(U.H@U, np.eye(N)) and np.allclose(U@U.H, np.eye(N))" + ] + }, + { + "cell_type": "markdown", + "id": "552d2a7b-9921-4463-8513-40bdefe45cf2", + "metadata": {}, + "source": [ + "## Time targets" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c2e89596-1465-4349-a9d5-e0c1cf7cad92", + "metadata": {}, + "outputs": [], + "source": [ + "def time_target(target):\n", + " if cudaq.has_target(target):\n", + " cudaq.reset_target()\n", + " cudaq.set_target(target)\n", + " res = %timeit -o -q backend.get_unitary(parametrizedCudaqKernel, instructions.num_qubits)\n", + " print(f\"Timeit {target=}: {str(res)}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2f4c1c71-13b0-4fe9-9736-1f5bca7de801", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Timeit target='qpp-cpu': 705 μs ± 758 ns per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n", + "Timeit target='nvidia': 4.68 ms ± 23.7 μs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n" + ] + } + ], + "source": [ + "targets = [\"qpp-cpu\", \"nvidia\"]\n", + "for target in targets:\n", + " time_target(target)" + ] + }, + { + "cell_type": "markdown", + "id": "2f0718e3-8282-4fd3-b883-4f2be50e4912", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f12d3ca-ebf9-4752-9d61-2fbd26f027db", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/backends/circuits_pennylane.ipynb b/src/platform/backends/circuits_pennylane.ipynb new file mode 100644 index 0000000..ca02532 --- /dev/null +++ b/src/platform/backends/circuits_pennylane.ipynb @@ -0,0 +1,305 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "5fb2a609-e10c-4af6-9740-676112148336", + "metadata": {}, + "source": [ + "# Pennylane circuits backend\n", + "\n", + "> [PennyLane](https://pennylane.ai/) based quantum circuit backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7a65d39-ffb3-441b-bb84-81e0f18e0769", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.backends.circuits_pennylane" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8d25f86-82c1-482e-997e-cb08141fbcf5", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.platform.backends.base_backend import BaseBackend\n", + "from genQC.platform.circuits_instructions import CircuitInstructions\n", + "from genQC.platform.tokenizer.base_tokenizer import Vocabulary\n", + "from genQC.utils.config_loader import get_obj_from_str\n", + "\n", + "import pennylane as qml\n", + "import pennylane.ops as pennylane_ops" + ] + }, + { + "cell_type": "markdown", + "id": "1e6ecfe9-0365-49e3-bdf3-3902c5f52fcd", + "metadata": {}, + "source": [ + "## Utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dc3b8e1c-c756-4673-960a-c867d1248ab1", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def instruction_name_to_pennylane_name(name: str) -> str:\n", + " \"\"\"Maps instruction names to PennyLane names.\"\"\"\n", + "\n", + " _maps = {\n", + " \"ccx\": \"Toffoli\",\n", + " \"cp\": \"CPhase\",\n", + " \"cx\": \"CNOT\",\n", + " }\n", + " \n", + " if name in _maps: \n", + " name = _maps[name] \n", + " else:\n", + " name = name.upper()\n", + " \n", + " return name" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c9aa20aa-ae43-4d25-99e6-cff04e7fd897", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class ParametrizedPennylaneCircuit:\n", + " circuit: qml.QNode\n", + " params: torch.Tensor" + ] + }, + { + "cell_type": "markdown", + "id": "7cd858b0-e512-43d0-acf2-b76e9f06e5af", + "metadata": {}, + "source": [ + "## Backend" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31d639ab-a606-4d19-a998-60eca46dd0a2", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitsPennylaneBackend(BaseBackend):\n", + " \"\"\"A backend for [PennyLane](https://pennylane.ai/).\"\"\"\n", + " \n", + " def backend_to_genqc(self, qc: ParametrizedPennylaneCircuit, ignore_barriers: bool = True) -> CircuitInstructions:\n", + " \"\"\"Convert a given Pennylane `ParametrizedPennylaneCircuit` to genQC `CircuitInstructions`.\"\"\"\n", + " raise NotImplementedError()\n", + "\n", + " def genqc_to_backend(self, \n", + " instructions: CircuitInstructions, \n", + " flip_qubit_order: bool = True,\n", + " place_barriers: bool = False,\n", + " ignore_errors: bool = False,\n", + " place_error_placeholders: bool = False) -> ParametrizedPennylaneCircuit: \n", + " \"\"\"\n", + " Convert given genQC `CircuitInstructions` to a `ParametrizedPennylaneCircuit`.\n", + " - flip_qubit_order ... e.g. needed when using little-endian definition.\n", + " \"\"\"\n", + "\n", + " _params = torch.tensor([\n", + " instruction.params if instruction.params else torch.nan \n", + " for instruction in instructions.data\n", + " ]) # ... [seq, nP]\n", + " \n", + " assert _params.shape[1] == 1 #only support nP=1 for now\n", + " _params = _params.squeeze() # swap so we have batched [1, seq]\n", + "\n", + " N = instructions.num_qubits\n", + " dev = qml.device(\"default.qubit\", wires=N)\n", + "\n", + " @qml.qnode(dev, interface='torch')\n", + " def _circuit(params):\n", + " for i, instruction in enumerate(instructions.data): \n", + " \n", + " _name = instruction_name_to_pennylane_name(instruction.name)\n", + " \n", + " op = getattr(pennylane_ops, _name)\n", + " \n", + " # The first wire provided corresponds to the control qubit.\n", + " # e.g. is qml.H(0)\n", + " _wires = (*instruction.control_nodes, *instruction.target_nodes)\n", + " \n", + " if flip_qubit_order:\n", + " _wires = [N-n-1 for n in _wires]\n", + " \n", + " try:\n", + " if op.num_params > 0:\n", + " op(params[i], wires=_wires) \n", + " else:\n", + " op(wires=_wires) \n", + " except Exception as err:\n", + " if ignore_errors: continue\n", + " elif place_error_placeholders:\n", + " qml.Identity(wires=_wires)\n", + " raise err\n", + "\n", + " if place_barriers: qml.Barrier(wires=list(range(N)))\n", + " \n", + " # dummy return, as we only care about the unitary\n", + " # return qml.expval(qml.PauliZ(0))\n", + " return qml.state()\n", + "\n", + " #run once to test for errors\n", + " try:\n", + " _circuit(_params)\n", + " except Exception as err:\n", + " raise err\n", + " \n", + " return ParametrizedPennylaneCircuit(circuit=_circuit, params=_params)\n", + "\n", + " def get_unitary(self, qc: ParametrizedPennylaneCircuit) -> torch.Tensor:\n", + " \"\"\"Return the unitary matrix of a `ParametrizedPennylaneCircuit`.\"\"\"\n", + " return qml.matrix(qc.circuit)(qc.params)\n", + " \n", + " def draw(self, qc: ParametrizedPennylaneCircuit, style:str = \"black_white\", **kwargs) -> None:\n", + " \"\"\"Draw the given Pennylane `ParametrizedPennylaneCircuit`\"\"\" \n", + " fig, ax = qml.draw_mpl(qc.circuit, decimals=2, show_all_wires=True, style=style, **kwargs)(qc.params.cpu().numpy())\n", + " return fig" + ] + }, + { + "cell_type": "markdown", + "id": "890c5b01-6a2c-4f10-8510-8c19077ca12a", + "metadata": {}, + "source": [ + "## Test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0bd1a012-1802-4b4c-9c63-d58f918cd734", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer" + ] + }, + { + "cell_type": "markdown", + "id": "78f03bed-23d1-47db-8497-5fe6943c722d", + "metadata": {}, + "source": [ + "### genqc <-> backend" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b8d9d74-5b66-465d-95ff-7a80111e7d29", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CircuitInstruction(name='rx', control_nodes=[], target_nodes=[0], params=[12.566370964050293])\n", + "CircuitInstruction(name='rx', control_nodes=[], target_nodes=[2], params=[12.566370964050293])\n", + "CircuitInstruction(name='ccx', control_nodes=[0, 2], target_nodes=[1], params=[12.566370964050293])\n", + "CircuitInstruction(name='rx', control_nodes=[], target_nodes=[2], params=[12.566370964050293])\n", + "CircuitInstruction(name='cp', control_nodes=[], target_nodes=[0, 1], params=[11.9380521774292])\n" + ] + } + ], + "source": [ + "tensor = torch.tensor([\n", + " [3, 0, -2, 0, 1],\n", + " [0, 0, 2, 0, 1],\n", + " [0, 3, -2, 3, 0],\n", + " ], dtype=torch.int32)\n", + "\n", + "params_tensor = torch.tensor([ # ... [max_params, time]\n", + " [1, 1, 1, 1, 0.9],\n", + " ])\n", + "\n", + "vocabulary = {\"cp\":1, \"ccx\":2, \"rx\":3}\n", + "tokenizer = CircuitTokenizer(vocabulary)\n", + "instructions = tokenizer.decode(tensor, params_tensor)\n", + "\n", + "instructions.print()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37a72460-592e-49ed-91ec-f51c8bd1c15f", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmsAAAGjCAYAAABzH1KgAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAX9ZJREFUeJzt3XtcVNX+P/7XADKAirdESz0iiBqY5iX9aopUXrJj5g0s08ILigqpJ46oHbUsE9TKAg3SFPNyUlSO2PmUgKmoxzL1pAZqDF7yiooKyP2yf3/4Y057ZoBhmJm99/B6Ph4+Hu41e/a8Zy/W4s3ee62lEgRBABERERHJkp3UARARERFR1ZisEREREckYkzUiIiIiGWOyRkRERCRjTNaIiIiIZIzJGhEREZGMMVkjIiIikjEma0REREQyxmSNiIiISMaYrBERERHJGJM1IiIiIhljskZEREQkY0zWiIiIiGSMyRoRERGRjDFZIyIiIpIxJmtEREREMsZkjYiIiEjGmKwRERERyRiTNSIiIiIZY7JGREREJGNM1oiIiIhkjMkaERERkYwxWSMiIiKSMSZrRERERDLGZI2IiIhIxpisEREREckYkzUiIiIiGWOyRkRERCRjTNaIiIiIZIzJGhEREZGMMVkjIiIikjEma0REREQyxmSNiIiISMaYrBERERHJGJM1IiIiIhljskZEREQkY0zWiIiIiGSMyRoRERGRjDFZIyIiIpIxJmtEREREMsZkjYiIiEjGmKwRERERyRiTNSIiIiIZY7JGREREJGNM1oiIiIhkjMkaERERkYwxWSMiIiKSMSZrRERERDLGZI2IiIhIxpisEREREckYkzUiIiIiGWOyRkRERCRjTNaIiIiIZIzJGhEREZGMMVkjIiIikjEma0REREQyxmSNiIiISMaYrBERERHJGJM1IiIiIhljskZEREQkY0zWiIiIiGSMyRoRERGRjDFZIyIiIpIxJmtEREREMsZkjYiIiEjGmKwRERERyRiTNSIiIiIZY7JGREREJGMOUgdAZA3FxcW4dOkSMjIyoNFocO/ePRQVFaG4uFjq0ETUajWcnJzwxBNPoGPHjvDy8oKHhwfUarXUoRGRlbC/Il1M1sgmlZWV4fDhw4iPj8f+/ftx9epVCIIgdVgmUalUaN++PYYNGwZ/f38MGjQIDg5sukS2gv0V1UQlKPUngsgAjUaDTz75BLt378bdu3elDsciWrZsibFjxyIsLAyenp5Sh0NEJmJ/RcZiskY2oaCgABEREYiMjERJSYnU4ViFWq3G/PnzsWDBAri4uEgdDhEZif0V+6vaYrJGipeUlITp06fj6tWrNe7bqlUrdOzYEe3atYOLiwscHR1hZyePcTYVFRUoKSlBQUEBrl27Bo1Gg6ysrBrf5+7ujtjYWAwdOtQKURJRXbC/Yn9lCiZrpGgJCQkICAhAWVmZwdd79eoFf39/DB48GF5eXnB1dbVyhHWTm5uLjIwMpKSkYOfOnTh9+rTB/RwcHLBz506MHj3ayhESkbHYXz3G/qr2mKyRYlXV8dnb2yM0NBShoaHw8PCQKDrLyMzMRHR0NKKiolBeXi56jR0gkXyxv2J/VRdM1kiRkpKS8Ne//lWv4/P19cXatWvRtWtXiSKzjt9++w2zZ89GamqqqNzBwQH//ve/eYuBSEbqQ3915MgRDBw40OBr7K/qjskaKU5BQQG8vb31nvkICgpCTEyMbJ7psLSKigoEBwdj/fr1onJ3d3ekp6fD2dlZosiIqJKt91cVFRVYtGgRsrOz9foi3f3YX5lO2T8lVC9FRETYbMdXG3Z2doiJiUFQUJCo/MqVK4iIiJAoKiL6M1vurwoLCzF+/HhERkaiX79+1e7L/qpueGWNFEWj0aBr166imbx9fX1x8OBBxXd8pqqoqICfnx+OHDmiLVOr1UhLS+O8RkQSsuX+KisrCyNHjsSJEycAAOnp6Xj66adrfB/7K9Mo+6eF6p1PPvlE1PHZ29tj7dq1iu/46sLOzg5r166Fvb29tqy4uBirV6+WMCoistX+Ki0tDX379tUmak2bNkXnzp2Nei/7K9Mo+yeG6pWysjLs3r1bVBYaGmoTD+fW1TPPPIPQ0FBR2Z49e6qcIoCILMtW+6uUlBT0799fdGu3b9++tUpA2V/VHpM1UozDhw/rLcmi2+Drs5CQENH2nTt39EZfEZF12GJ/tWHDBgwfPhy5ubmi8pqeVzOE/VXtMFkjxYiPjxdt9+rVy+bmJaoLT09P9OzZU1Sme86IyDpsqb+qqKhAeHg4goKCDF79MiVZY39VO0zWSDH2798v2vb395coEvnSPSe654yIrMNW+qvCwkIEBARg5cqVBl9XqVTo27evScdmf2U8JmukCMXFxXrD3wcPHixRNPI1ZMgQ0fbVq1dFDzgTkeXZSn+VlZUFPz8/vWfv/szb2xtNmjQx6fjsr4zHZI0U4dKlS9CdZaZTp04SRSNfXl5eou2KigpcvnxZomiI6idb6K90R3xWxZRboJXYXxmPyRopQkZGhmi7VatWaNy4sUTRyJerqyvc3NxEZbrnjogsS+n9VXJyst6ITwBo0qSJ3qjPuiRr7K+Mx2SNFEGj0Yi2O3bsKFEk8qf71yo7PyLrUnJ/tX79eoMjPt3d3bFz505UVFSIyuuSrAHsr4zFZI0U4d69e6Ltdu3aSRSJ/LVt21a0nZ2dLVEkRPWTEvuryhGf06dPR3l5uei1vn374qeffkJOTo6ovDaT4VaF/ZVxHKQOgMgYRUVFom0XFxeJIpE/3XOje+6IyLKU1l8VFBTgrbfeMjiQwN/fH5s3b4azszOOHz8ueq22k+Eawv7KOEzWSBF0Rwg5OjpKFIn8qdVq0TY7PyLrUlJ/df/+fQwfPrzKgQStW7eGs7MzAOgla3W9BQqwvzIWb4OSIllrbb24uDioVCrtPwcHB7Rp0waBgYG4ceOGdr87d+6gefPmePHFF/WOUVpaimeeeQbu7u7Iz8+3eMxKX3eQyNbIuU02bdoUwcHBaNmypcHXo6KioFKpkJycjNOnT4teM0eyJudzIyc8S0RGWLZsGbZs2YKYmBgMHz4cW7duxaBBg7R/Bbq5uSEyMhIHDx7E5s2bRe/95JNP8NtvvyE6OhoNGzaUInwiIoPs7OwwefJkXLx4EbNnz64yeRo6dChKSkq023WZDJdqj8kakRGGDx+OiRMnYtq0adiwYQPCwsKQmZmJxMRE7T7Tpk3DgAEDEBYWpn1I9vLly1i2bBnGjBmDESNGSBU+EVG1mjVrhujoaJw8edKo/esyGS7VHpM1IhMMHDgQAJCZmaktU6lUiImJQU5ODsLCwgAAs2bNgoODA7744gtJ4iQiqg3dEZ9VMcctUDIeBxgQmeDKlSsAHv81+mc+Pj4ICwvDihUr0LhxY/zwww/4/PPP0aZNGwmirH9KS0uRl5cHAGjcuDEaNGggcUT1G+tDWQRBwAsvvGDUvkzWrIvJGpERcnJycO/ePRQVFeHnn3/GBx98ALVabfDW5uLFi7Fjxw5ERUWhV69emD17tgQR1x+//vorNm7ciOPHj+Ps2bPa52ocHR3RrVs39OvXD1OnTkX37t0ljrR+YH3IS1FREdLT05GWloa8vDztc7ZOTk5o3LgxfHx84OPjA7VajTlz5hg8xunTpzF79mzRaFAma9bFZI3ICLqLMLu7u2Pr1q16EzoCj38pVT7L8dJLL8He3t4qMdY3Z8+eRUhICI4cOWLw9ZKSEpw8eRInT55EVFQUBg4ciOjoaHTr1s3KkdYPrA95yMvLQ3x8PA4cOIAzZ87gwoULepPc6rK3t4eXlxcuXLig95pGo4GnpyeOHj2KzZs3Izw8HKWlpXWeDJdqh8+sERlh7dq1SE5Oxq5du/DKK6/g3r17evMDVfr888/x3//+F127dsUXX3yht/QM1Y0gCIiIiEDv3r2rTAwMOXLkCHr37o2IiAi9RbbJdKwP6QmCgMOHDyMwMBCtW7fG1KlTsX37dqSlpdWYqAFAeXm5wUSte/fu8PDwACAeNfrpp59yyg0r49kmMkKfPn0wePBgjB07FomJiejatSsmTJiAR48eifa7du0ali5dilGjRiEpKQmOjo68DWpGgiAgNDQUCxcuRGlpaa3fX1paioULFyI0NJQJghmwPqSXkpICHx8f+Pn5YfPmzSgoKDDbsc+cOQMfHx+kpKRoy5o1a4bJkyeb7TPIOPUiWfvll1/wyiuvoGnTpmjYsCH+3//7f9i5c6fUYZFC2dvbY8WKFbh58yaio6NFr4WEhAAAvvjiCzz55JNYvnw5kpKS8O2330oRqs2JjIzE2rVr63yctWvXIjIy0gwR1W+sD+ncvHkTr7/+OoYMGYLz58/XuL+npyd8fX0xZMgQDBkyBL6+vvD09KzxfefPn8eQIUPwxhtv4NatW+YInUxg88nawYMH8fzzz+Po0aMICAhAcHAwbt++jfHjx+OTTz6ROjxSKD8/P/Tp0wdr1qzRPrCbkJCAxMRELFu2TLtw86xZs9CrVy/87W9/Q25urpQhK97Zs2exZMkSsx1vyZIlOHv2rNmOV9+wPqQhCAJiY2PRpUsX7Nixw+A+9vb2GDlyJNatW4djx44hNzcXGo0Ghw8fRlJSEpKSknD48GFoNBrMmDHDqM/99ttv0blzZ8TGxprz65CRbDpZKysrQ1BQEOzs7JCamoqvvvoKn3zyCc6cOYNOnTph0aJFuHr1qtRhkkL9/e9/R1ZWFuLi4pCXl4d33nkHPXr0wDvvvKPdx87ODjExMcjKysJ7770nYbTKFxISYtKttqqUlpZqr4RS7bE+rK+srAyhoaEIDg7WTonyZ126dMGqVatw/fp17N27FzNnzkT//v3RuHFjg8d7+PChweQrPDwcXbp00SvPy8tDcHAwQkNDUVZWVvcvREaz6WTtxx9/RGZmJiZMmIBnn31WW96kSRMsWrQIJSUleksDERlrzJgx8PT0xOrVq7Fo0SLcvHkTsbGxeqM/e/fujVmzZmHdunU4deqURNEq26+//lqrh9eNdeTIEZw5c8bsx7V1rA/rKykpQUBAgMHbzs2aNUNsbCzS0tIQFhaG1q1bG3XMJ598Uq/M19cXERERSEtLQ0xMDJo2baq3T3R0NAICAkTLT5Fl2XSydujQIQCP1zTTNWzYMADA4cOHrRkSKUxgYCAEQUDv3r31XrOzs4NGo4FGo0FUVBTKy8vx3HPPGTxO5eu9evWydMg2aePGjYo8tq1ifVhXaWkpxo0bh4SEBL3X3n77bVy4cAHTp0+v1QjNQ4cOaR/h+LPKwQR2dnaYMWMGLl68iLfffltvv4SEBIwbN86sV1epajadrGVkZAAAvLy89F5r3bo1GjVqpN2HiOTrz5NxKunYtor1YV2LFy/Gvn37RGWOjo7Yvn074uLi4ObmVqvjVbVSwYYNG/RWmXBzc0NcXBy2bdsGR0dH0Wv79u0z63OLVDWbnhS3co2zqhabdXV1NXodNLkQBMGsQ7OVgn+9ma60tBT5+flSh2Gy0tJSiz54fvbsWeTk5MDBwaa7Q7NhfdTMnP1VcnKy3kjZhg0b4rvvvoOfn59Jx6xqpYKpU6dW+Z4JEybgqaeewogRI0T9SWRkJF566SW9icPJvJTbGuqpgoICNGrUSOowSEHWr1+P9evXSx2GbBUXFxt8Loekwfr4nzt37mDSpEmisgYNGuCHH37AgAEDTDrmw4cPERUVpVduzOTdfn5++OGHH/Diiy9qE1JBEDBp0iScOXOm1lf4yHg2fRu08opaVVfPcnNzq7zqRkREJKXg4GBkZWWJylauXGlyogZUPajAmDnXAGDAgAF6V/pu376NmTNnmhwT1cymr6xVPquWkZGh92D37du38ejRI/Tp00eK0Ezm4uKiN2t+fTBv3jxeHTJRUFAQPvvsM6nDMFlpaSlatWplsZFnarUaWVlZir7tZk2sj5qZo79KS0vTG1DwyiuvVHkL0xg1DSow1pw5c5CcnIzvv/9eW7Znzx6kp6fD29vb5PioasptDUYYNGgQVqxYgaSkJLz++uui1/bv36/dR0lUKhUaNmwodRhWp/vQqzFWrlyJjRs3Ij09XfHr2C1YsAAHDx7Ezz//XOv3NmjQQPE/M926dcPJkyctdmxeYa8d1kf1TOmvdH366aei7ebNmyMuLg4qlcqk49VmUEFN7OzsEBcXhy5duuDBgweimDds2GBSfFQ9Zf8Gq8FLL70EDw8PbN++Hb/++qu2PCcnBx9//DEcHR3x1ltvSRcgWUxubi4iIyMRHh6uTdR27NiBiRMnwsvLCyqVqsqHc3/55ReEhITAx8cHDRs2xF/+8hcEBATg999/N+qzKztUQ/9u376t3e/QoUNV7qdSqbB8+XLtvnPnzsWZM2eQmJho+klRsH79+iny2LaK9WFZt2/fxtatW0Vls2bNQsuWLU0+pimDCqrj5uaGWbNmicq2bNki6uPIfGz6ypqDgwM2bNiAYcOGwdfXF6+//joaN26M3bt34+rVq1i9ejXc3d2lDpMsYOPGjSgrK8Mbb7yhLfvyyy9x6tQpPPfcc8jOzq7yvZGRkTh27Bj8/f3RrVs33L59G9HR0ejZsyd++ukndO3a1agYli1bhg4dOojK/vzg9NNPP40tW7bovW/Lli1ISkoSzQ/YunVrvPbaa1i9ejVGjhxp1OfbkilTphh8KNpcx6baYX1YVmxsrOg2s6OjY51Wd6jLoILqhISEYNWqVdpYS0pKEBsbi6VLl9bpuGSAUA/8/PPPwssvvyy4uroKzs7OQp8+fYRvv/1W6rCoFmbNmiUA0P6bNWtWtft369ZNmDhxoqjsjz/+EMrLywVBEAQfHx9h0KBBBt977Ngxobi4WFT2+++/C2q1WnjzzTdrjHXTpk0CAOGXX36pcV9DOnbsKHh5eemV79q1S1CpVEJmZma176/tuVKKgQMHir6XOf4NHDhQ6q+lWKyPqtW1Dfr6+oreP3Xq1DrF4+TkpHeufX1963TMSlOmTBEdt6p+tSq22l+Zm03fBq3Up08ffP/998jJyUFBQQF+/vlnjB8/XuqwyEIuX76Ms2fP6s37065dO6OeXevfv7/e5I9eXl7w8fHB+fPnaxVLXl4eysvLjd7/xIkT0Gg0ePPNN/Veq/w+e/furVUMtiI6OtoszwJVatCggcGle8g4rA/LKCsr03secNSoUSYfz1yDCqqiG9vJkye5bqgF1ItkjeqX//znPwCAnj17mu2YgiAgKysLTzzxhNHveeGFF+Dq6goXFxeMHDnSqNUytm3bBgAGk7UmTZrA09MTx44dMz5wG9KtWzcsW7bMbMdbtmwZnnnmGbMdr75hfVjGb7/9pjfxed++fU06lmDGQQVV0Y0tPz8faWlpZjk2/Q+TNbI5Fy5cAAC958XqYtu2bbhx44ZRV2RdXFwQGBiItWvXIiEhAfPnz8eBAwfQv39/XLt2rcr3lZeXY8eOHejTpw86duxocB8PDw+kp6eb/D2ULjw8HLNnz67zcUJCQhAeHm6GiOo31of5/fTTT6JtT09PkwcWmHtQgSFubm7w8PAQlel+B6o7Jmtkc7Kzs+Hg4GC2lR4uXLiA2bNno1+/fgYXNNYVEBCATZs24a233sKoUaPw4YcfYv/+/cjOzhaN8NR14MABZGVlGbyqVqlZs2a4d++eSd/DFqhUKkRFRWHFihUmXRlo0KABVqxYgS+++MLkKRDof1gf5vfHH3+Itnv06GHScSw1qMAQ3Rh1vwPVHZM1omrcvn0bf/3rX9GkSRPs2rUL9vb2Jh1nwIAB6Nu3b7XPiWzbtg329vbVXr0TBKHe/1JTqVRYsGABTp48iYEDBxr9voEDB+LUqVNYsGBBvT+H5sT6MK/CwkLRtqlzztV1pYLa0I1R9ztQ3dn01B1UP7Vo0QJlZWXIy8tD48aNTT5OTk4Ohg8fjocPH+LIkSN46qmn6hRXu3btcPHiRYOvFRYWIiEhAYMHD0arVq2qPMaDBw9q9dycLevWrRtSU1Nx5swZbNy4EcePH8eZM2e00wio1Wp069YN/fr1w5QpU9C9e3eJI7ZtrA/zePfdd/H666+jsLAQhYWFJvU7lh5UoCs0NBTjxo2Ds7MznJ2d0aZNG4t8Tn3GZI1sTpcuXQA8HhXarVs3k45RVFSEV199Fb///jtSUlLMsoTKpUuXqnz2JDExEXl5edXeAgUefyf+khPr3r07Pv/8cwCPE+zKueyysrIUPxO+ErE+6qZt27Zo27atye+3xqACXc8++yyeffZZixybHuNtULI5lTOgm7ocTnl5OcaPH4/jx48jPj6+2hnVb926hQsXLqC0tFRbdvfuXb39/u///g+nTp3Cyy+/bPA427dvh4uLC0aPHl3lZ+Xk5CAzMxP9+/evxbepX/68nqSS15a0FawP67PGoAKyPrYesjkeHh7o2rUrUlJSRLOhp6amIjU1FcDjhCo/Px8fffQRgMfPcvj6+gJ4fBsiMTERr776Ku7fv6+37MvEiRO1/1+4cCE2b96My5cva1fD6N+/P3r06IHevXujSZMmOH36NDZu3Ih27dph0aJFevHev38f33//PcaOHVvtoIiUlBQIgoDXXnvNtBNDRDbNmoMKyLqYrJFNmjJlCpYsWYLCwkI4OzsDAH788Ud88MEHov0WL14MAFi6dKk2WatcR3bfvn3Yt2+f3rH/nKwZMn78ePz73/9GUlISCgoK8OSTTyIoKAhLly41+DxafHw8SktLMWHChGqPGx8fjwEDBljkAWEiUj5rDiog6+JtULJJU6ZMgaOjI7Zv364te//99yEIgsF/77//vna/Q4cOVbmfIAiiz4mLi4MgCKI1Zj/66CP897//xcOHD1FSUoKrV69i3bp1VQ4cmDFjBgRBwKuvvlrl97l9+zb27t2LsLAw004IEdk0aw8qIOtiskaKVFFRUe3rTZo0wfz587Fq1aoa91WCNWvW4JlnnjHqFqgtfF8iW2LpNinFoAJzYX9lHCZrpAhqtVq0XTkdQHXCw8Nx4cIFo9YDlbuIiAicOHHCqH2Li4tF205OTpYIiYiqYEp/VRdKHlTA/so4yv8tRvWCbgPWXTuP/kf33LDzI7Iua/ZXSh9UwP7KOEzWSBF0J4Ktbo3N+u769eui7RYtWkgUCVH9ZM3+SumDCthfGYfJGimC7sLmSvmrUQoZGRmibS8vL4kiIaqfrNVfpaenK35QAfsr4zBZI0XQbcBZWVnIzc2VKBr5ys3NxZ07d0Rl7PyIrMsa/ZUgCAgJCdErV8Kggkrsr4zHZI0UwcPDQ2+xZ92/yEj/nNjZ2aFDhw4SRUNUP1mjvyopKYG3t7doANXzzz+viEEFldhfGY/JGimCWq1G+/btRWVKutRvLcnJyaLt9u3b641MIyLLskZ/pVarER0djZMnT6Jfv35wdXXFrl27zPoZlsb+ynhM1kgxhg0bJtqOj4+XKBL50j0nuueMiKzDWv1Vjx49cPToURw/fhytW7e2yGdYCvsr4zFZI8Xw9/cXbZ86dQqXLl2SKBr5yczMxOnTp0VluueMiKzDmv2VnZ0dvL29LXJsS2F/VTtM1kgxBg0ahJYtW4rKDM0vVF9FR0eLtt3c3LTrnRKRdbG/qh77q9phskaK4eDggLFjx4rKoqKi8Ntvv0kUkXycO3dO7xfBmDFj4ODgIFFERPUb+6uqsb+qPSZrpChhYWGiB1DLy8sxe/bser2+XEVFBWbPno3y8nJtmVqt5qLvRBJjf6WP/ZVpmKyRonh6emL+/PmistTUVAQHB9fLDrCiogLBwcE4cuSIqDw8PFwxM5gT2Sr2V2Lsr0ynEgRBkDoIotooKCiAt7c3rl69KioPCgpCTEyMTSzcbozKjm/9+vWicnd3d6Snp8PZ2VmiyKSTn5+PRo0aAQAePXqEhg0bShxR/cb6YH9Vif1V3dSPnxKyKS4uLvjqq6/0nm9Yv349/Pz8cO7cOYkis55z587Bz89Pr+NzcHBAbGwsOz4imWB/xf7KHJiskSINHToUO3fu1OsAjxw5gh49emDevHnIzMyUKDrLyczMxLx589CjRw+9WwkODg7YuXMnhg4dKlF0RGQI+yv2V3XF26CkaAkJCQgICEBZWZnB13v27Al/f38MGTIEXl5ecHV1tXKEdZObm4uMjAwkJycjPj5eb16iSpUd3+jRo60cobzwtpu8sD7E2F89xv6q9piskeIlJSVhxowZuHLlSo37urm5wcvLC23btoWLiwvUarVsnhmpqKhAcXExCgoKcP36dWRkZOgtcmyIu7s7YmNj+RcqmBzIDetDH/sr9lemYLJGNqGwsBARERGIjIxEcXGx1OFYhVqtRnh4OBYsWMBnPv5/TA7khfVhGPsr9le1xWSNbEpmZiZWr16N3bt34+7du1KHYxFubm4YM2YMwsLCONxdB5MDeWF9VI/9FRmLyRrZpLKyMqSmpiI+Ph779+/HlStXoNQfdZVKBXd3dwwbNgz+/v7w9fXlTN9VYHIgL6wP47C/opowWaN6obi4GJcvX0ZGRgYyMjKQnZ2NoqIiFBUVSR2aiJOTE5ycnNCiRQt4eXnBy8sLHTp0EM2CTlVjciAvrA/TsL8iXUzWiMhmMDmQF9YHkXnIY1gJERERERnEZI2IiIhIxpisEREREckYkzUiIiIiGWOyRkRERCRjTNaIiIiIZIzJGhEREZGMMVkjIiIikjEma0REREQyxmSNiIiISMaYrBERERHJGJM1IiIiIhljskZEREQkY0zWiIiIiGSMyRoRERGRjDFZIyIiIpIxJmtEREREMsZkjYiIiEjGmKwRERERyZiD1AEQEZG0iouLcenSJWRkZECj0eDevXsoKipCcXFxnY5bWlqq/f+8efPQoEGDOh1PrVbDyckJTzzxBDp27AgvLy94eHhArVbX6bhyY6n6MLf6Uh9yoBIEQZA6CCIic8jPz0ejRo0AAI8ePULDhg0ljkieysrKcPjwYcTHx2P//v24evUqlPqrQKVSoX379hg2bBj8/f0xaNAgODgo6zoE64NqwmSNiGwGk7XqaTQafPLJJ9i9ezfu3r0rdTgW0bJlS4wdOxZhYWHw9PSUOpxqsT7IWEzWiMhmMFkzrKCgABEREYiMjERJSYnU4ViFWq3G/PnzsWDBAri4uEgdjgjrQ171oQRM1ojIZjBZ05eUlITp06fj6tWrNe7bqlUrdOzYEe3atYOLiwscHR1hZyePcWgVFRUoKSlBQUEBrl27Bo1Gg6ysrBrf5+7ujtjYWAwdOtQKUdaM9SGv+lAKJmtEZDOYrIklJCQgICAAZWVlBl/v1asX/P39MXjwYHh5ecHV1dXKEdZNbm4uMjIykJKSgp07d+L06dMG93NwcMDOnTsxevRoK0coxvp4TC71oSRM1ojIZjBZ+5+qEgN7e3uEhoYiNDQUHh4eEkVnGZmZmYiOjkZUVBTKy8tFr0mdILA+5FUfiiMQEdmIR48eCQAEAMKjR4+kDkcy+/fvFxwcHLTnovKfr6+vcO7cOanDs7hz584Jvr6+et/fwcFB2L9/v9XjqQ/1kZqaWuVrcqsPJeKVNSKyGXl5edpbR7m5uWjcuLHEEVlfQUEBvL299Z6JCgoKQkxMjGyeebK0iooKBAcHY/369aJyd3d3pKenw9nZ2Spx2Hp9VFRUYNGiRcjOztY717r7yaE+lErZPyVEVC89ePAABw4cQGRkJAICAuDp6QlnZ2fRMz6urq5wdnaGp6cnAgICsHLlShw4cAAPHjyQMHLLi4iIsNnEoDbs7OwQExODoKAgUfmVK1cQERFhtThsuT4KCwsxfvx4REZGol+/ftXuK5f6UCpeWSMiRbh48SK+/PJL7Nu3D5cuXarTsTw8PPDqq69i5syZ6Ny5s5kilJ5Go0HXrl1FM937+vri4MGDik8MTFVRUQE/Pz8cOXJEW6ZWq5GWlmbxeb9suT6ysrIwcuRInDhxAgCQnp6Op59+usb3SVkfSsZkjYhkq6ysDImJiVi3bh0OHDggeq1Dhw7o1asXevfujV69esHT0xOCIGg7/MzMTKhUKmRmZuLUqVM4efIkTp06hcuXL4uO89JLL2HWrFkYOXKk4mdanzlzJmJiYrTb9vb2+PXXX9G1a1cJo5LeuXPn0KNHD9FD7sHBwfjyyy8t+rm2Wh9paWn461//qr1i2LRpU2RnZxudgEpVH4om4fNyREQGFRYWCsuXLxfatGmjfRjZzs5OGDlypJCYmChkZ2cbfJ8xAwyys7OFxMREYeTIkYKdnZ12/zZt2ggff/yxUFRUZMmvZjGlpaVCy5YtRQ9wz507V+qwZGPu3Lmic+Pm5iaUlpZa7PNstT6Sk5MFV1dX0fcaNmxYrY9j7fpQOiZrRCQrP//8s+Dt7a3txFu2bCksWrRIuHLlSo3vre1o0CtXrgiLFi0S/VL19vYWTpw4YY6vYlUpKSl6o+0yMzOlDks2NBqN3vk5cOCAxT7PFutj/fr1Bke1vv/++7U+lrXrQ+mUfdOciGxGUVERFixYgH79+iE9PR2tWrXCN998g2vXrmH58uVo37692T+zffv2WL58Oa5du4ZvvvkGrVq1Qnp6Ov7f//t/WLhwIYqKisz+mZYSHx8v2u7Vq5fNzdtVF56enujZs6eoTPecmZMt1UdFRQXCw8MRFBRkcELfmgYXGGLt+lA6JmtEJLkTJ06gZ8+eiIyMREVFBSZMmIC0tDRMmjQJarXa4p+vVqsxadIkpKWlYcKECaioqEBERAR69eqlfYBa7vbv3y/a9vf3lygS+dI9J7rnzJxspT4KCwu1o6kNUalU6Nu3r0nHtmZ9KB2TNSKS1K5duzBgwACcP38erVq1QkJCArZt24YWLVpYPZYWLVpg27ZtSEhIgJubG9LT0zFgwADs3r3b6rHURnFxsd70EIMHD5YoGvkaMmSIaPvq1auikZrmYiv1kZWVBT8/v2p//r29vdGkSROTjm+t+rAFTNaISDIbN27E+PHjUVpaitdeew1paWkYNWqU1GFh1KhRSE9Px2uvvYbS0lIEBARg06ZNUodVpUuXLkHQGdjfqVMniaKRLy8vL9F2RUWF3uhgc7CF+khLS0Pfvn1rvLJsyi3QStaqD1vAZI2IJLFp0yZMnToVFRUVmDp1Knbv3i3J1bSqtGjRArt379bGOGXKFNkmbBkZGaLtVq1a1cvVG2ri6uoKNzc3UZnuuTMHpddHcnIy+vfvr3d1sEmTJnrTc9QlWbNWfdgCJmtEZHW7d+/GtGnTAABz5szB+vXrYW9vL3FU+uzt7bF+/XrMmTMHADBt2jTs2bNH4qj0aTQa0XbHjh0likT+dK/mWCI5UHJ9rF+/HsOHD0dubq6o3N3dHTt37kRFRYWovC7JGmCd+rAFTNaIyKpOnDiBN954AxUVFZg2bRo+++wzqFQqqcOqkkqlwmeffaa9wvb666/jl19+kToskXv37om227VrJ1Ek8te2bVvRdnZ2ttk/Q4n1UTnic/r06aLJagGgb9+++Omnn5CTkyMqb9q0aZ1XALFGfdgCJmtEZDVFRUUIDAzUPqMWExMj60StkkqlQmxsrPYZtsDAQFlN66Ebi4uLi0SRyJ/uubFEPSqtPgoKCqoc8env74+DBw+iVatWOH78uOi1vn371nnZLGvUhy1gskZEVvPBBx9oR31+/fXXsrz1WRV7e3ts2LBBO0p02bJlUoekpTuCztHRUaJIjOPn5wc/Pz9JPlt3KhhLJAdKqo/79+/jhRdeqHLEZ+vWreHs7AwAeslaXW+BAtapD1vAZI2IrOLEiRPav9xjYmJkNZjAWE888YR2rcfIyEjZ3Q6tZK1FwuPi4qBSqbT/HBwc0KZNGwQGBuLGjRtWiaG2pFhAXc6Ltjdt2hTBwcFo2bKlwdejoqKgUqmQnJyM06dPi14zR7Im53MjJzxLRGRxRUVFmDx5snbCWzlMz2Gq0aNHa5+5CwwM5LxQAJYtW4YtW7YgJiYGw4cPx9atWzFo0CBeJVEAOzs7TJ48GRcvXsTs2bOrTJ6GDh2KkpIS7XZdJsOl2mOyRkQW99lnn2mXkPriiy+kDqfOoqKitEtTffrpp1KHI7nhw4dj4sSJmDZtGjZs2ICwsDBkZmYiMTFR6tDISM2aNUN0dDROnjxp1P51mQyXao/JGhFZVFlZGdauXQsAWLVqlSJvf+pq0aKF9pbuunXrDK6XWJ8NHDgQAJCZmaktEwQBH3/8Mdq1a4fDhw/j8OHDaNu2LT744AO96SBIOrojPqtijlugZDwma0RkUYmJibhx4wbc3NwQEBAgdThmM378eLRs2RLXr1/Hvn37pA5HVq5cuQLg8dWaSu+99x7ee+89vPDCC+jUqRM6deqEoUOH4v3338fcuXOlCZREBEHACy+8YNS+TNasy0HqAIjItq1btw7A4wllrbEou7Wo1WpMmzYNK1aswLp16zB69GipQ5JMTk4O7t27h6KiIvz888/44IMPoFarMWLECADAnTt3sHr1aowePRrffPONdiToxo0bAQDR0dGYM2cOPD09pfoKNquoqAjp6elIS0tDXl6e9jlCJycnNG7cGD4+PvDx8YFardZO/qzr9OnTmD17tmg0KJM162KyRkQWc+HCBRw4cAB2dnaYPn261OGY3YwZMxAREYGUlBRcvHixzhOEKpXuIuXu7u7YunWrdsLTlJQUlJaWIjg4WO+9s2bNwqZNm/D9998jJCTEKvHasry8PMTHx+PAgQM4c+YMLly4oDfJrS57e3t4eXnhwoULeq9pNBp4enri6NGj2Lx5M8LDw1FaWlpvf9alwmSNiCymcpqLESNGoH379hJHY37t27fHiBEjsG/fPnz55ZdYs2aN1CFJYu3atejUqRNycnKwceNGpKamiq6i/vHHHwAML7tUudyQ7jqUZDxBEJCamopNmzYhPj4eBQUFtXp/eXm5wUSte/fu8PDwAPC/UaOjRo3Cv/71L065YWU820RkMZXPclWuA2qLKr/bd999J3Ek0unTpw8GDx6MsWPHIjExEV27dsWECRPw6NEj0X6GVqtQwgoWcpaSkgIfHx/4+flh8+bNtU7UqnPmzBn4+PggJSVFW9asWTNMnjzZbJ9BxrH5ZG3r1q2YMWMGevfuDbVaDZVKhbi4OKnDIrJ5Dx48wKVLlwAAzz//vMTRWE7ld8vMzMTDhw+lDUYG7O3tsWLFCty8eRPR0dEAgL/85S8AxKNDK1Uuem6LV14t6ebNm3j99dcxZMgQnD9/vsb9PT094evriyFDhmDIkCHw9fU16hnB8+fPY8iQIXjjjTdw69Ytc4ROJrD5ZO0f//gHvvrqK1y9ehVPPvmk1OEQ1RuVs5136NABzZs3lzgay2nRogXc3d0BQG+G9/rKz88Pffr0wZo1a1BUVITBgwejQYMG2LBhg96+sbGxUKlUGD58uASRKo8gCIiNjUWXLl2wY8cOg/vY29tj5MiRWLduHY4dO4bc3FxoNBocPnwYSUlJSEpKwuHDh6HRaDBjxgyjPvfbb79F586dERsba86vQ0ay+WRtw4YNuHLlCu7evWvw4VYisozKyTV79eolcSSW17t3bwAwekLR+uDvf/87srKyEBcXBzc3N4SFhWHHjh0IDAzErVu3cOvWLQQFBeGrr75CSEgIR4IaoaysDKGhoQgODkZeXp7e6126dMGqVatw/fp17N27FzNnzkT//v3RuHFjg8d7+PChweQrPDwcXbp00SvPy8tDcHAwQkNDObegldl8sjZ48GBeXieSwKlTpwD8L5GxZZUJaeV3JmDMmDHw9PTE6tWrUV5ejuXLl+Ojjz5CcnIyfv/9d/z+++/497//jaVLl9bbgRm1UVJSgoCAAO0E03/WrFkzxMbGIi0tDWFhYWjdurVRxzR0t8nX1xcRERFIS0tDTEwMmjZtqrdPdHQ0AgICRMtPkWXZfLJGRNKoTFzqw5W1+pqsBQYGQhAEgwm5nZ0dNBoNNBoN7O3toVKp8N577+HGjRsYNGgQBg0ahJs3b+L999/nyMIalJaWYty4cUhISNB77e2338aFCxcwffr0Wp3HQ4cOGVy7tXIwgZ2dHWbMmIGLFy/i7bff1tsvISEB48aNQ2lpaS2+CZmKLYSILOLmzZsAUC9ub1VOSVH5nYnMafHixXqrZDg6OmL79u3a28y1UdVKBRs2bECDBg1EZW5uboiLi8O2bdvg6Ogoem3fvn1YsmRJrT6bTMN51hRGEASzDs0msoSKigrtX+2CICA/P98qn/vnz7HWZwKPvyMAFBYWIi8vz+pXinh1w3SlpaVm/1kxZ30kJycjMjJSVNawYUN899132pUgaquqlQqmTp1a5XsmTJiAp556CiNGjBCdr8jISLz00kt6EyOTeTFZU5iCggI0atRI6jCIjCbVlbVWrVpJ8rmurq6SfC6ZZv369Vi/fr3UYRh0584dTJo0SVTWoEED/PDDDxgwYIBJx3z48CGioqL0yiunUKmOn58ffvjhB7z44ovahFQQBEyaNAlnzpyp9RU+Mh5vgxIRkVUdOnQIhw4dkjoM2QsODkZWVpaobOXKlSYnakDVgwqM/aNqwIABelf6bt++jZkzZ5ocE9WMV9YUxsXFRW9WcCK5qaio0F5hyszMtNpVrvz8fO1nZWVloWHDhlb53KysLO0vu9zcXKvfBp03b55srw7JXVBQED777DOzHtMc9ZGWlqY3oOCVV16p8hamMWoaVGCsOXPmIDk5Gd9//722bM+ePUhPT4e3t7fJ8VHVmKwpjEqlstovIKK6cHJyQlFRkWQ/sw0bNrTa51YumeTs7FzlnFaWpPtQuDFWrlyJjRs3Ij09XfGjMRcsWICDBw/i559/rvV7GzRoYPafE1PqQ9enn34q2m7evDni4uJMXp6rNoMKamJnZ4e4uDh06dIFDx48EMVsaOJjqjtlt1Aikq2nnnoKgOElhmxN5fM+ld9Z7nJzcxEZGYnw8HBtorZjxw5MnDgRXl5eUKlUVT68/ujRIyxduhQvv/wymjdvbtISfsnJyRgwYABcXFzQrFkzjBs3DleuXKn2PZmZmXBycoJKpdKbfHju3Lk4c+YMEhMTaxWHXN2+fRtbt24Vlc2aNQstW7Y0+ZimDCqojpubG2bNmiUq27JlC27fvm3S8ah6Np+sbdiwAYGBgQgMDER8fLxeGf8KILKM+jT3mNLmlNu4cSPKysrwxhtvaMu+/PJL7N27F+3atUOzZs2qfO+9e/ewbNkynD9/Ht27d6/1Z3/33Xd4+eWXUVxcjIiICLz77rs4fPgwBgwYgLt371b5vnnz5sHBwfDNoNatW+O1117D6tWrax2PHMXGxoomnHV0dERISIjJx6vLoILqhISEiKbzKCkp4XJUFmLzydrRo0exefNmbN68Wbtu37Fjx7RlR48elThCIttUmbjUhyWYlJasbdq0CSNHjoSTk5O2bMuWLcjJycGPP/5Y7RXCJ598Erdu3cLVq1exatWqWn92eHg4PDw8cOzYMbzzzjv4xz/+gZSUFNy6dQsREREG37N//37s378f8+bNq/K4AQEBOHr0KC5dulTrmOTmxx9/FG1PmjSpTs991nVQQVVat26NiRMnisoOHjxYp2OSYTafrMXFxUEQhCr/1fbyPREZp3JW+/pwZa0yIVXC0lqXL1/G2bNn9ebFateunVHPrqnVaqOXM9J1//59pKenY/To0aIrMt27d8fTTz+Nb7/9Vu89paWlmDNnDubMmVNtclH5ffbu3WtSbHJRVlam9wfOqFGjTD6euQYVVEU3tpMnT3LdUAuw+WSNiKTRs2dPAI+Tg/v370scjeVkZ2drn7eq/M5y9p///AeANLEWFxcDeDwQQ5eLiwtu3ryp98zTmjVr8ODBA/zjH/+o9thNmjSBp6cnjh07Zr6AJfDbb7/pTXzet29fk45lzkEFVdGNLT8/H2lpaWY5Nv0PkzUisohmzZrBw8MDABT/C7Q6ld/N09PT4KLXcnPhwgUAQIcOHaz+2a1atULTpk31fh6ys7ORnp4OALhx44a2/Pbt2/jwww/x4YcfGjXZsIeHh/Y4SvXTTz+Jtj09PU0eWGDuQQWGuLm5adt5Jd3vQHXHZI2ILObVV18FAJseyFP53UaMGCFxJMbJzs6Gg4ODJCuhVC4OfuDAASxcuBAZGRk4deoUAgICtA/UFxYWavevfL5t2rRpRh2/WbNmuHfvnkVit5Y//vhDtN2jRw+TjmOpQQWG6Mao+x2o7pisEZHFVM5q/t133+Hq1asSR2N+V65cwXfffQcAnMHdSMuWLcPUqVOxcuVKdOrUCb1794aDg4P2ak9lEvnTTz9hy5Yt+Oyzz4yeB04QBJPnIZOLPyerwOPbu6aw1KACQ3Rj1P0OVHecFJeILKZz58546aWXcODAAXz11VdYvny51CGZ1VdffQVBEDB48GB07txZ6nCM0qJFC5SVlSEvL0+SCXwdHR2xYcMGLF++HL///jtatWqFTp06YcKECbCzs0PHjh0BAPPnz8fAgQPRoUMH7TOBlVfNbt26hT/++AN/+ctfRMd+8OABnnjiCat+H3N799138frrr6OwsBCFhYUmzd1n6UEFukJDQzFu3Dg4OzvD2dkZbdq0scjn1GdM1ojIombNmoUDBw5gw4YNWLJkCdRqtdQhmUVxcbH2Fqju5KBy1qVLFwCPB35069ZNsjhatWqlnY6ivLwchw4dQt++fbVX1v744w9cvXrV4LN1I0eORJMmTfDw4UNR+eXLl02a+01O2rZti7Zt25r8fmsMKtD17LPP4tlnn7XIsekxJmtEZFEjR45EmzZtcOPGDezcuROTJk2SOiSz2LFjB+7evYu2bdtqn81Tgn79+gF4PMWCpZO1W7duIScnB56entUmCqtXr8atW7dEz1h99dVXeqMif/zxR0RFRWH16tXapLNSTk4OMjMz6/3taGsMKiDrY7JGRBbl4OCA2bNnY9GiRfj73/+OV155BS1atJA6rDrJzs7G/PnzATy+qlbVzPpy5OHhga5duyIlJQVTpkzRlqempiI1NRUAcPfuXeTn5+Ojjz4C8PhZJ19fX+2+0dHRePjwIW7evAkA2LdvH65fvw7g8S2xymeYFi5ciM2bN+Py5ctwd3cHAGzduhW7d++Gr68vGjVqhJSUFOzcuRPTpk3D2LFjtZ8xdOhQvdgrr6QNGjRIb067lJQUCIKA1157rS6nR9GsOaiArEs5PQwRKdbf/vY3bN26Fenp6XjnnXewbds2qUOqk9DQUGRlZcHb2xt/+9vfpA6n1qZMmYIlS5agsLBQO+fZjz/+iA8++EC03+LFiwEAS5cuFSVrq1evFg0Y2bNnD/bs2QMAmDhxYrUPxXfq1An379/Hhx9+iMLCQnTu3BkxMTGYPn16nb5TfHw8BgwYYJEH6JXCmoMKyLo4GpSILE6tVmPTpk2ws7PD9u3b8a9//UvqkEyWkJCAf/7zn7C3t0dcXJwin8GbMmUKHB0dsX37dm3Z+++/X+VKL++//77o/VeuXKly38oraMD/VpD5c1mfPn1w+PBh3L9/H4WFhfj1118xY8YMo0ZxBgYGQhAEvatqt2/fxt69exEWFmbS+bAF1h5UQNbFZI2IrKJPnz7aW4fBwcHIzs6WOKLau3fvHoKDgwE8Hq343HPPSRyRYRUVFdW+3qRJE8yfPx+rVq2qcV8lWLNmDZ555hmjboFK8X0t/ZlSDCowF1v4+bMGJmtEZDVLly6Ft7c3srKyMHXqVJSXl0sdktHKy8sxbdo03LlzB97e3li6dKnUIWnpXt2rnGC2OuHh4bhw4YLRc5jJWUREBE6cOGHUvpVLXlX682L25mJKfdSFkgcVWKM+bIHyWykRKYaTkxM2bdqEBg0aYO/evZgxYwYEQZA6rBoJgoAZM2Zg7969cHR0lN3tT91fcLqjKOl/dM+NJZIDa9aH0gcVWKM+bAGTNSKyqj59+uCf//wn7Ozs8PXXX2PevHmyTtgEQcC8efPw9ddfw87ODv/85z9ld/tTdyLYa9euSRSJ/FWOWq1kiZHJ1qwPpQ8qsEZ92AIma0RkdWPHjtVOKPv5558jKChIlrdEK299fv755wCAr7/+GmPGjJE4Kn2Vs/5XUspVFSlkZGSItr28vMz+Gdaqj/T0dMUPKrBGfdgCJmtEJInJkydj48aN2itsY8eOldWgg3v37mHs2LHaGDdt2oTAwECpwzJI9xdcVlYWcnNzJYpGvnJzc3Hnzh1RmSWSA2vUhyAICAkJ0StXwqCCStaqD1vAZI2IJDN58mTs3LlT+wybt7c3EhISpA4LCQkJ8PHxwd69e9GgQQPEx8fLNlEDHk90qzv1he4VC9I/J3Z2dgaXs6ora9RHSUkJvL29RQNEnn/+eUUMKqhkrfqwBUzWiEhSY8eOxdGjR/H000/jzp07GDNmDN58801JrrJlZ2djwoQJGDNmjHbU57Fjx2R56/PP1Go12rdvLypT0q0wa0lOThZtt2/f3iIDRaxRH2q1GtHR0Th58iT69esHV1dX7Nq1y6yfYWnWqg9bwGSNiCTXp08fnD59GgsWLNBOnOvj44NvvvlGb2i/JRQXF+Obb76Bj4+PdvDDwoULcerUKdkNJqjKsGHDRNvx8fESRSJfuudE95yZk7Xqo0ePHjh69CiOHz+O1q1bW+QzLMWa9aF0KkHOw7CIqN755ZdfEBgYiPT0dABAy5YtMW3aNMyYMUPvaoWu/Px8NGrUCADw6NEjNGzYsNr9r169itjYWGzYsAF3794FAHh7eyMuLk4xSVqlAwcOYPDgwaKyzMxMeHh4SBSRvGRmZuo9+H/gwAG8+OKLFvk81kf1rF0fSscra0QkK8899xxOnTqFjz/+GG3atMHdu3exYsUKeHh4YOTIkUhMTMT9+/dNPv79+/eRmJiIkSNHokOHDlixYgXu3r2Ltm3b4uOPP8bp06cVl6gBjxc3b9mypajM0Pxb9VV0dLRo283NTbTeqbmxPqpn7fpQOl5ZIyLZKisrw759+7Bu3Tq9Z37c3d3Ru3dv9OrVC7169ULHjh0hCIJ2fqnMzEyoVCpoNBqcOnUKp06dwsmTJ3HlyhXRcQYPHoxZs2bh1VdfhYODg7W+mkXMnDkTMTEx2m17e3v8+uuv6Nq1q4RRSe/cuXPo0aOHaHqY4OBgfPnllxb9XNaHYVLVh5IxWSMiRbh48SK+/PJLfPfdd8jMzKzTsTw9PTFixAjMnDkTnTt3NlOE0svMzISPj4/oOT9fX18cPHjQJpaVMkVFRQX8/Pxw5MgRbZlarUZaWprFJ45lfeiTsj6UrH7+tBCR4nTu3Blr1qyBRqPBgwcPcODAAURGRiIgIACenp5wdnbWe4+zszM8PT0REBCAlStX4sCBA3jw4AE0Gg3WrFljU4ka8DgJnT9/vqgsNTUVwcHB9XLB7IqKCgQHB4sSA+DxuqjWSAxYH2JS14eS8coaEdmMvLw8uLq6Ang84Wbjxo0ljsj6CgoK4O3tjatXr4rKg4KCEBMTU2+u6FQmBuvXrxeVu7u7Iz093WBybwmsj8fkUh9KVT9+SoioXvjzL7768ktQl4uLC7766iu95+/Wr18PPz8/nDt3TqLIrOfcuXPw8/PTSwwcHBwQGxtr1cSA9SGv+lCq+tmbERHZsKFDh2Lnzp16CcKRI0fQo0cPzJs3r87P/clRZmYm5s2bhx49eujdanNwcMDOnTsxdOhQq8fF+pBXfSgRb4MSkc2o7Txrti4hIQEBAQEoKysz+HrPnj3h7++PIUOGwMvLS3sLWSlyc3ORkZGB5ORkxMfH4/Tp0wb3q0wMRo8ebeUIxVgfj8mlPpSEyRoR2Qwma/qSkpIwY8YMvSlLDHFzc4OXlxfatm0LFxcXqNVq2dxOrqioQHFxMQoKCnD9+nVkZGToLQJuiLu7O2JjY2VzBYf1Ia/6UAoma0RkM5isGVZYWIiIiAhERkZaZfkuOVCr1QgPD8eCBQtk90wU60Ne9aEETNaIyGYwWateZmYmVq9ejd27d2uX17I1bm5uGDNmDMLCwmQ/HQTrg4zFZI2IbAaTNeOUlZUhNTUV8fHx2L9/P65cuQKl/ipQqVRwd3fHsGHD4O/vD19fX8WtRMH6oJowWSMim8FkzTTFxcW4fPkyMjIykJGRgezsbBQVFaGoqEjq0EScnJzg5OSEFi1awMvLC15eXujQoQPUarXUoZkV64N0MVkjIpvBZI2IbJE8hpUQERERkUFM1oiIiIhkjMkaERERkYwxWSMiIiKSMSZrRERERDLGZI2IiIhIxpisEREREckYkzUiIiIiGWOyRkRERCRjTNaIiIiIZIzJGhEREZGMMVkjIiIikjEma0REREQyxmSNiIiISMaYrBERERHJGJM1IiIiIhljskZEREQkY0zWiIiIiGSMyRoRERGRjDlIHQCRNRQXF+PSpUvIyMiARqPBvXv3UFRUhOLiYqlDE1Gr1XBycsITTzyBjh07wsvLCx4eHlCr1VKHRjaM7UNeWB+ki8ka2aSysjIcPnwY8fHx2L9/P65evQpBEKQOyyQqlQrt27fHsGHD4O/vj0GDBsHBgU2XTMf2IS+sD6qJSlDqTwSRARqNBp988gl2796Nu3fvSh2ORbRs2RJjx45FWFgYPD09pQ5HVvLz89GoUSMAwKNHj9CwYUOJI5IXtg95YX2Q0QQiG5Cfny8sXrxYcHR0FADUi39qtVpYvHixkJ+fL/Xpl41Hjx5pz8+jR4+kDkc22D7khfVBtcUra6R4SUlJmD59Oq5evVrjvq1atULHjh3Rrl07uLi4wNHREXZ28hhnU1FRgZKSEhQUFODatWvQaDTIysqq8X3u7u6IjY3F0KFDrRClvPHKmj62D3m1D9aHvOpDKZiskaIlJCQgICAAZWVlBl/v1asX/P39MXjwYHh5ecHV1dXKEdZNbm4uMjIykJKSgp07d+L06dMG93NwcMDOnTsxevRoK0coL0zWxNg+HpNL+2B9PCaX+lAUaS/sEZluz549goODg97ldnt7e2Hu3LlCZmam1CGanUajEebOnSvY29vrfW8HBwdhz549UocoKd4G/R+2D3m1D9aHvOpDaZiskSLt37/fYMfn6+srnDt3TurwLO7cuXOCr6+vwQ5w//79UocnGSZrj7F9yKt91If6SE1NrfI1udWHEjFZI8XJz88X2rdvr9fwg4KChPLycqnDs5ry8nIhKChI7zy4u7sLBQUFUocnCSZrbB+V5NI+bL0+ysvLhfDwcGHatGk17ieH+lAqJmukOIsXL7bZjq+2quoAlyxZInVokmCyxvbxZ3JoH7ZcHwUFBcK4ceMEAMLXX39d4/5yqA+lYrJGipKRkSGo1Wq9Wwm20PGZqry8XBg4cKDeMHmNRiN1aFZX35M1tg99UrYPW66P27dvC3369NF+r/T0dKPex/7KNPIYA0xkpE8++US05Iq9vT3Wrl0rm+HsUrCzs8PatWthb2+vLSsuLsbq1asljIqkwPahT8r2Yav1kZaWhr59++LEiRMAgKZNm6Jz585GvZf9lWmU/RND9UpZWRl2794tKgsNDUXXrl0likg+nnnmGYSGhorK9uzZU+UUAWR72D6qJkX7sNX6SElJQf/+/UXzxPXt27dWCSj7q9pjskaKcfjwYb0lWXQbfH0WEhIi2r5z5w5SU1Mlioasje2jetZuH7ZYHxs2bMDw4cORm5srKu/Xr1+tj8X+qnaYrJFixMfHi7Z79eoFDw8PiaKRH09PT/Ts2VNUpnvOyHaxfVTP2u3DluqjoqIC4eHhCAoKMnj1y5Rkjf1V7TBZI8XYv3+/aNvf31+iSORL95zonjOyXWwfNbNm+7CV+igsLERAQABWrlxp8HWVSoW+ffuadGz2V8ZjskaKUFxcrLeW3uDBgyWKRr6GDBki2r569aroAWeyTWwfxrFW+7CV+sjKyoKfn5/es3d/5u3tjSZNmph0fPZXxmOyRopw6dIlCDrL2Hbq1EmiaOTLy8tLtF1RUYHLly9LFA1ZC9uHcazVPmyhPnRHfFbFlFugldhfGY/JGilCRkaGaLtVq1Zo3LixRNHIl6urK9zc3ERluueObA/bh3Gs1T6UXh/Jycl6Iz4BoEmTJnqjPuuSrLG/Mh6TNVIEjUYj2u7YsaNEkcif7l+r7PxsH9uH8azRPpRcH+vXrzc44tPd3R07d+5ERUWFqLwuyRrA/spYTNZIEe7duyfabteunUSRyF/btm1F29nZ2RJFQtbC9mE8a7QPJdZH5YjP6dOno7y8XPRa37598dNPPyEnJ0dUXpvJcKvC/so4DlIHQGSMoqIi0baLi4tEkcif7rnRPXdke9g+jGeN9qG0+igoKMBbb71lcCCBv78/Nm/eDGdnZxw/flz0Wm0nwzWE/ZVxmKyRIuiOEHJ0dJQoEvlTq9WibXZ+to/tw3jWaB9Kqo/79+9j+PDhVQ4kaN26NZydnQFAL1mr6y1QgP2VsXgblBTJWmvrxcXFQaVSaf85ODigTZs2CAwMxI0bN7T73blzB82bN8eLL76od4zS0lI888wzcHd3R35+vsVjVvq6g1R3bB9Vk6J9yLlNNm3aFMHBwWjZsqXB16OioqBSqZCcnIzTp0+LXjNHsibncyMnPEtERli2bBm2bNmCmJgYDB8+HFu3bsWgQYO0fwW6ubkhMjISBw8exObNm0Xv/eSTT/Dbb78hOjoaDRs2lCJ8Ioti+1AuOzs7TJ48GRcvXsTs2bOrTJ6GDh2KkpIS7XZdJsOl2mOyRmSE4cOHY+LEiZg2bRo2bNiAsLAwZGZmIjExUbvPtGnTMGDAAISFhWkfkr18+TKWLVuGMWPGYMSIEVKFT2RRbB/K16xZM0RHR+PkyZNG7V+XyXCp9pisEZlg4MCBAIDMzExtmUqlQkxMDHJychAWFgYAmDVrFhwcHPDFF19IEieRFNg+lEt3xGdVzHELlIzHAQZEJrhy5QqAx3+N/pmPjw/CwsKwYsUKNG7cGD/88AM+//xztGnTRoIo65/S0lKD/yfrYvtQJkEQ8MILLxi1L5M162KyRmSEnJwc3Lt3D0VFRfj555/xwQcfQK1WG7x1s3jxYuzYsQNRUVHo1asXZs+eLUHE9cevv/6KjRs34vjx4zh79qy2vFWrVujWrRv69euHqVOnonv37hJGadvYPuSrqKgI6enpSEtLQ15envY5QicnJzRu3Bg+Pj7w8fGBWq3GnDlzDB7j9OnTmD17tmg0KJM162KyRmQE3UWY3d3dsXXrVr0JHYHHw/Qrn+V46aWXYG9vb5UY65uzZ88iJCQER44cMfh6SUkJTp48iZMnTyIqKgoDBw5EdHQ0unXrZuVIbR/bh3zk5eUhPj4eBw4cwJkzZ3DhwgW9SW512dvbw8vLCxcuXNB7TaPRwNPTE0ePHsXmzZsRHh6O0tLSOk+GS7XDZ9aIjLB27VokJydj165deOWVV3Dv3j29+YEqff755/jvf/+Lrl274osvvtBbeobqRhAEREREoHfv3lUmaoYcOXIEvXv3RkREhN4i21Q3bB/SEgQBhw8fRmBgIFq3bo2pU6di+/btSEtLqzFRA4Dy8nKDiVr37t3h4eEBQDxq9NNPP+WUG1bGs01khD59+mDw4MEYO3YsEhMT0bVrV0yYMAGPHj0S7Xft2jUsXboUo0aNQlJSEhwdHXmbx4wEQUBoaCgWLlxo0jNppaWlWLhwIUJDQ5mwmRHbh3RSUlLg4+MDPz8/bN68GQUFBWY79pkzZ+Dj44OUlBRtWbNmzTB58mSzfQYZx6aTtRs3bmDNmjUYOnQo/vKXv8DR0RGtW7fG2LFj8fPPP0sdHimUvb09VqxYgZs3byI6Olr0WkhICADgiy++wJNPPonly5cjKSkJ3377rRSh2pzIyEisXbu2zsdZu3YtIiMjzRAR6WL7sI6bN2/i9ddfx5AhQ3D+/Pka9/f09ISvry+GDBmCIUOGwNfXF56enjW+7/z58xgyZAjeeOMN3Lp1yxyhkwlsOlmLiorCvHnzcOnSJQwdOhTvvvsuBgwYgL1796J///7YsWOH1CGSQvn5+aFPnz5Ys2aN9oHdhIQEJCYmYtmyZdqFm2fNmoVevXrhb3/7G3Jzc6UMWfHOnj2LJUuWmO14S5YsEQ1IIPNh+7AcQRAQGxuLLl26VPk7zN7eHiNHjsS6detw7Ngx5ObmQqPR4PDhw0hKSkJSUhIOHz4MjUaDGTNmGPW53377LTp37ozY2Fhzfh0ykk0na3369MGhQ4eg0WiwYcMGrFixArt27cLBgwdhb2+PmTNn6q3hRmSsv//978jKykJcXBzy8vLwzjvvoEePHnjnnXe0+9jZ2SEmJgZZWVl47733JIxW+UJCQsw6HUdpaan2Sg+ZH9uH+ZWVlSE0NBTBwcHIy8vTe71Lly5YtWoVrl+/jr1792LmzJno378/GjdubPB4Dx8+NJh8hYeHo0uXLnrleXl5CA4ORmhoKMrKyur+hchoNp2sjRkzBoMGDdIrHzhwIF544QU8ePAA586dkyAysgVjxoyBp6cnVq9ejUWLFuHmzZuIjY3VG93Wu3dvzJo1C+vWrcOpU6ckilbZfv3111oNJjDWkSNHcObMGbMfl9g+zK2kpAQBAQEGHwNo1qwZYmNjkZaWhrCwMLRu3dqoYz755JN6Zb6+voiIiEBaWhpiYmLQtGlTvX2io6MREBAgWn6KLMumk7XqNGjQAADg4MDZS6hqgYGBEAQBvXv31nvNzs4OGo0GGo0GUVFRKC8vx3PPPWfwOJWv9+rVy9Ih26SNGzcq8ti2ju3DOkpLSzFu3DgkJCTovfb222/jwoULmD59eq1GaB46dEh7i/rPKgcT2NnZYcaMGbh48SLefvttvf0SEhIwbtw4Tj5tJfUyWfvjjz+QkpKCJ598Es8884zU4RBRDf48GaeSjk1kDosXL8a+fftEZY6Ojti+fTvi4uLg5uZWq+NVtVLBhg0btBcyKrm5uSEuLg7btm2Do6Oj6LV9+/aZ9TlSqlq9u6xUWlqKSZMmobi4GJGRkYqbkFEQBLMOzVYK/vVmutLSUuTn50sdhslKS0stOhDg7NmzyMnJUfRVdrYP01mifZizPpKTk/VGLjds2BDfffcd/Pz8TDpmVSsVTJ06tcr3TJgwAU899RRGjBghOl+RkZF46aWX9CZGJvNSbu9kgoqKCgQGBiI1NRVBQUGYNGmS1CHVWkFBARo1aiR1GKQg69evx/r166UOQ7aKi4sNPpdD9YOc28edO3f0fk81aNAAP/zwAwYMGGDSMR8+fIioqCi9cmMmJ/bz88MPP/yAF198UZuQCoKASZMm4cyZM7W+wkfGqze3QSsqKjBlyhRs374dEydORExMjNQhERERVSk4OBhZWVmispUrV5qcqAFVDyowZs41ABgwYIDelb7bt29j5syZJsdENasXV9YqKiowefJkfPPNN3jjjTcQFxen2KUyXFxc9GYFrw/mzZsn279+5S4oKAifffaZ1GGYrLS0FK1atbLYyDO1Wo2srCxF3wZl+zCdJdqHOeojLS1Nb0DBK6+8UuUtTGPUNKjAWHPmzEFycjK+//57bdmePXuQnp4Ob29vk+Ojqim3dzLSnxO18ePHY8uWLYp7Tu3PVCoVGjZsKHUYVqf70KsxVq5ciY0bNyI9PV2xyXmlBQsW4ODBgyatvNGgQQPF/8x069YNJ0+etNixKxcWVyq2D3m1D1PqQ9enn34q2m7evDni4uKgUqlMOl5tBhXUxM7ODnFxcejSpQsePHgginnDhg0mxUfVU3YLrUHlrc9vvvkG/v7+2Lp1q6ITNTJebm4uIiMjER4erv1FtGPHDkycOBFeXl5QqVRVPpz7yy+/ICQkBD4+PmjYsCH+8pe/ICAgAL///rtRn13ZoRr6d/v2be1+hw4dqnI/lUqF5cuXa/edO3cuzpw5g8TERNNPioL169dPkceWK7YPebt9+za2bt0qKps1axZatmxp8jFNGVRQHTc3N8yaNUtUtmXLFlEdkvnY9JW1ZcuWYfPmzWjUqBE6deqEjz76SG+fUaNG4dlnn7V+cGRRGzduRFlZGd544w1t2ZdffolTp07hueeeQ3Z2dpXvjYyMxLFjx+Dv749u3brh9u3biI6ORs+ePfHTTz+ha9euRsWwbNkydOjQQVT25wfZn376aWzZskXvfVu2bEFSUhKGDh2qLWvdujVee+01rF69GiNHjjTq823JlClTDD4Uba5j1zdsH/IWGxsruu3v6OhYp9U26jKooDohISFYtWqVNtaSkhLExsZi6dKldTouGSDYsLffflsAUO2/TZs2SR0mGWHWrFmieps1a1a1+3fr1k2YOHGiqOyPP/4QysvLBUEQBB8fH2HQoEEG33vs2DGhuLhYVPb7778LarVaePPNN2uMddOmTQIA4ZdffqlxX0M6duwoeHl56ZXv2rVLUKlUQmZmZrXvr+25UoqBAwfW2J5r+2/gwIFSfy2zYPuQV/uo62f4+vqK3j916tQ6xePk5KT3s+/r61unY1aaMmWK6LhV/dxUxVb7K3Oz6dugcXFxEASh2n+BgYFSh0lmdvnyZZw9e1Zv3p927doZ9WxO//799SZ/9PLygo+PD86fP1+rWPLy8lBeXm70/idOnIBGo8Gbb76p91rl99m7d2+tYrAV0dHRZnkWqFKDBg0MLt1j69g+5K2srEzv+cxRo0aZfDxzDSqoim5sJ0+e5LqhFmDTyRrVT//5z38AAD179jTbMQVBQFZWFp544gmj3/PCCy/A1dUVLi4uGDlyJDIyMmp8z7Zt2wDA4C+jJk2awNPTE8eOHTM+cBvSrVs3LFu2zGzHW7ZsWb1cwYTtQ95+++03vYnP+/bta9KxBDMOKqiKbmz5+flIS0szy7Hpf5iskc25cOECAOg9D1MX27Ztw40bNzB+/Pga93VxcUFgYCDWrl2LhIQEzJ8/HwcOHED//v1x7dq1Kt9XXl6OHTt2oE+fPujYsaPBfTw8PJCenm7y91C68PBwzJ49u87HCQkJQXh4uBkiUh62D3n76aefRNuenp4mDyww96ACQ9zc3ODh4SEq0/0OVHdM1sjmZGdnw8HBwWwrPVy4cAGzZ89Gv379DC5orCsgIACbNm3CW2+9hVGjRuHDDz/E/v37kZ2dLRrBpuvAgQPIysoyeNWgUrNmzXDv3j2TvoctUKlUiIqKwooVK0y6MtCgQQOsWLECX3zxhclTICgd24e8/fHHH6LtHj16mHQcSw0qMEQ3Rt3vQHXHZI2oGrdv38Zf//pXNGnSBLt27TJ56pcBAwagb9++1T4nsm3bNtjb21d7dUIQhHqbZFRSqVRYsGABTp48iYEDBxr9voEDB+LUqVNYsGBBvT+H5sL2YX6FhYWibVPnAKzrSgW1oRuj7negurPpqTuofmrRogXKysqQl5eHxo0bm3ycnJwcDB8+HA8fPsSRI0fw1FNP1Smudu3a4eLFiwZfKywsREJCAgYPHoxWrVpVeYwHDx7U6rkgW9atWzekpqbizJkz2LhxI44fP44zZ85opxFQq9Xo1q0b+vXrhylTpqB79+4SRywPbB/y9u677+L1119HYWEhCgsLTTqvlh5UoCs0NBTjxo2Ds7MznJ2d0aZNG4t8Tn3GZI1sTpcuXQA8HvXWrVs3k45RVFSEV199Fb///jtSUlLMsoTKpUuXqnz2JDExEXl5edXe4gEefycmHWLdu3fH559/DuDxSLrc3FwAgKurq6KXkLIUtg95a9u2Ldq2bWvy+60xqEDXs88+y/lKLYy3QcnmVM5Ib+ryROXl5Rg/fjyOHz+O+Pj4ame4v3XrFi5cuIDS0lJt2d27d/X2+7//+z+cOnUKL7/8ssHjbN++HS4uLhg9enSVn5WTk4PMzEz079+/Ft+mfnFwcEDz5s3RvHlzJmpVYPuwbdYYVEDWx96MbI6Hhwe6du2KlJQU0ez0qampSE1NBfD4F0Z+fr52VQtfX1/4+voCeHwbIjExEa+++iru37+vt+zLxIkTtf9fuHAhNm/ejMuXL8Pd3R3A43moevTogd69e6NJkyY4ffo0Nm7ciHbt2mHRokV68d6/fx/ff/89xo4dW+1D3ykpKRAEAa+99pppJ4YIbB+2zJqDCsi6mKyRTZoyZQqWLFmCwsJCODs7AwB+/PFHfPDBB6L9Fi9eDABYunSp9pfRr7/+CgDYt28f9u3bp3fsP/8yMmT8+PH497//jaSkJBQUFODJJ59EUFAQli5davB5m/j4eJSWlmLChAnVHjc+Ph4DBgywyAPCVL+wfdgmaw4qICuz/qIJRLVX2yVJHj58KDRv3lzYsGGDlSK0rFu3bglOTk7Cv/71rxr35fIt9Q/bh7zahxRt8ODBgwaXVCspKbH4Z9cF+yvj8Jk1UqSKiopqX2/SpAnmz5+PVatW1bivEqxZswbPPPOMUbd4bOH7Ut2wfVRNiu9r6c8UJBhUYC628PNnDUzWSBHUarVou3J6huqEh4fjwoULRq13KHcRERE4ceKEUfsWFxeLtp2cnCwREskI24e82ocp9VEXSh5UwP7KOMpvpVQv6DZg3bXz6H90zw07P9vH9mE8a7QPa9aH0gcVsL8yDpM1UgTdiS6rW0Owvrt+/bpou0WLFhJFQtbC9mE8a7QPa9aH0gcVsL8yDpM1UgTdhZuV8lejFDIyMkTbXl5eEkVC1sL2YTxrtA9r1Ud6erpVVyqwBPZXxmGyRoqg24CzsrK0M9XT/+Tm5uLOnTuiMnZ+to/twzjWah/WqA9BEBASEqJXroRBBZXYXxmPyRopgoeHh94Czbp/kZH+ObGzs0OHDh0kioashe3DONZqH9aoj5KSEnh7e4sGiDz//POKGFRQif2V8ZiskSKo1Wq0b99eVKakS/3WkpycLNpu37693sg0sj1sH8axVvuwRn2o1WpER0fj5MmT6NevH1xdXbFr1y6zfoalsb8yHpM1Uoxhw4aJtuPj4yWKRL50z4nuOSPbxfZRM2u2D2vVR48ePXD06FEcP34crVu3tshnWAr7K+MxWSPF8Pf3F22fOnUKly5dkiga+cnMzMTp06dFZbrnjGwX20f1rN0+rFkfdnZ28Pb2tsixLYX9Ve0wWSPFGDRoEFq2bCkqMzS/UH0VHR0t2nZzc9Ou50i2j+2jetZuH6yP6rG/qh0ma6QYDg4OGDt2rKgsKioKv/32m0QRyce5c+f0fhGMGTMGDg4OEkVE1sb2UTUp2gfro2rsr2pPJQiCIHUQRMbKzMyEj4+PaIkSX19fHDx40CaWzTFFRUUF/Pz8cOTIEW2ZWq1GWlqaYibGJPNg+9AnZftgfehjf2Wa+vnTQorl6emJ+fPni8pSU1MRHBxcLxcErqioQHBwsKjjAx6v+8iOr/5h+xCTun2wPsSkrg9FE4gUJj8/X2jfvr0AQPQvKChIKC8vlzo8qykvLxeCgoL0zoO7u7tQUFAgdXgkEbaPx+TSPlgfj8mlPpSKyRop0v79+wUHBwe9hj9w4EDh7NmzUodncWfPnhUGDhyo9/0dHByE/fv3Sx0eSYztQ17tg/Uhr/pQIiZrpFh79uwx2AHa29sLc+fOFTQajdQhmp1GoxHmzp0r2NvbG+z49uzZI3WIJBNsH/JqH6wPedWH0nCAASlaQkICAgICUFZWZvD1nj17wt/fH0OGDIGXlxdcXV2tHGHd5ObmIiMjA8nJyYiPj9ebl6iSg4MDdu7cidGjR1s5QpIzto/H5NI+WB+PyaU+lITJGileUlISZsyYgStXrtS4r5ubG7y8vNC2bVu4uLhArVbLZlRWRUUFiouLUVBQgOvXryMjI0NvkWND3N3dERsbi6FDh1ohSlIatg95tQ/Wh7zqQzGkvbBHZB4FBQXCkiVLBLVarXe53Vb/qdVqYcmSJXw4l2rE9iEvrA+qLV5ZI5uSmZmJ1atXY/fu3bh7967U4ViEm5sbxowZg7CwMA53p1ph+5AX1gcZi8ka2aSysjKkpqYiPj4e+/fvx5UrV6DUH3WVSgV3d3cMGzYM/v7+8PX15UzfVCdsH/LC+qCaMFmjeqG4uBiXL19GRkYGMjIykJ2djaKiIhQVFUkdmoiTkxOcnJzQokULeHl5wcvLCx06dIBarZY6NLJhbB/ywvogXUzWiIiIiGRMHsNKiIiIiMggJmtEREREMsZkjYiIiEjGmKwRERERyRiTNSIiIiIZY7JGREREJGNM1oiIiIhkjMkaERERkYwxWSMiIiKSMSZrRERERDLGZI2IiIhIxpisEREREckYkzUiIiIiGWOyRkRERCRjTNaIiIiIZIzJGhEREZGMMVkjIiIikjEma0REREQyxmSNiIiISMaYrBERERHJGJM1IiIiIhljskZEREQkY0zWiIiIiGSMyRoRERGRjDFZIyIiIpIxJmtEREREMsZkjYiIiEjGmKwRERERyRiTNSIiIiIZY7JGREREJGNM1oiIiIhkjMkaERERkYwxWSMiIiKSMSZrRERERDLGZI2IiIhIxpisEREREckYkzUiIiIiGWOyRkRERCRjTNaIiIiIZIzJGhEREZGMMVkjIiIikjEma0REREQyxmSNiIiISMaYrBERERHJGJM1IiIiIhljskZEREQkY0zWiIiIiGSMyRoRERGRjDFZIyIiIpIxJmtEREREMsZkjYiIiEjGmKwRERERyRiTNSIiIiIZY7JGREREJGNM1oiIiIhkjMkaERERkYwxWSMiIiKSMSZrRERERDLGZI2IiIhIxpisEREREckYkzUiIiIiGWOyRkRERCRjTNaIiIiIZIzJGhEREZGMMVkjIiIikjEma0REREQyxmSNiIiISMaYrBERERHJGJM1IiIiIhn7/wCpV660fui27gAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "backend = CircuitsPennylaneBackend()\n", + "\n", + "qc = backend.genqc_to_backend(instructions, flip_qubit_order=False)\n", + "backend.draw(qc);" + ] + }, + { + "cell_type": "markdown", + "id": "2f0718e3-8282-4fd3-b883-4f2be50e4912", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f12d3ca-ebf9-4752-9d61-2fbd26f027db", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/backends/circuits_qiskit.ipynb b/src/platform/backends/circuits_qiskit.ipynb new file mode 100644 index 0000000..dd9d647 --- /dev/null +++ b/src/platform/backends/circuits_qiskit.ipynb @@ -0,0 +1,832 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "5fb2a609-e10c-4af6-9740-676112148336", + "metadata": {}, + "source": [ + "# Qiskit circuits backend\n", + "\n", + "> [Qiskit](https://github.com/Qiskit/qiskit) based quantum circuit backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7a65d39-ffb3-441b-bb84-81e0f18e0769", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.backends.circuits_qiskit" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8d25f86-82c1-482e-997e-cb08141fbcf5", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.platform.backends.base_backend import BaseBackend\n", + "from genQC.platform.circuits_instructions import CircuitInstructions\n", + "from genQC.platform.tokenizer.base_tokenizer import Vocabulary\n", + "from genQC.utils.config_loader import get_obj_from_str\n", + "\n", + "import qiskit.circuit.library as ql\n", + "import qiskit.quantum_info as qi\n", + "\n", + "from qiskit import QuantumCircuit, transpile\n", + "from qiskit.circuit.gate import Gate" + ] + }, + { + "cell_type": "markdown", + "id": "1e6ecfe9-0365-49e3-bdf3-3902c5f52fcd", + "metadata": {}, + "source": [ + "## Utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e0e6cce5-01ed-4842-bb24-323776cdae99", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_number_of_gate_params(gate_cls: type[Gate]) -> int:\n", + " # python: gives you the number of any arguments BEFORE *args, minus the ones that have a default, -1 for self parameter of classes\n", + " return gate_cls.__init__.__code__.co_argcount - len(gate_cls.__init__.__defaults__) - 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8d28b5b-702e-4bb0-acf1-20fe45d90c3f", + "metadata": {}, + "outputs": [], + "source": [ + "assert get_number_of_gate_params(ql.HGate) == 0\n", + "assert get_number_of_gate_params(ql.CXGate) == 0\n", + "assert get_number_of_gate_params(ql.U1Gate) == 1\n", + "assert get_number_of_gate_params(ql.U2Gate) == 2\n", + "assert get_number_of_gate_params(ql.U3Gate) == 3" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6e7415a3-7e89-4efc-9a9e-80f9d8d637fe", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def instruction_name_to_qiskit_gate(name: str) -> Gate:\n", + " match name:\n", + " case \"swap\": name = \"Swap\"\n", + " case \"cp\": name = \"CPhase\"\n", + " case _: name = name.upper()\n", + " \n", + " return get_obj_from_str(f\"qiskit.circuit.library.standard_gates.{name}Gate\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2486f09e-78e3-4dec-b9a3-21d22dfdcc4d", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_target_control_qubits(qc: QuantumCircuit, gate: Gate) -> Tuple[List[int], List[int]]:\n", + " \"\"\"Get the target and control qubits of a Qiskit `Gate` of a `QuantumCircuit`.\"\"\"\n", + " \n", + " acts_on_cnt = gate.operation.num_qubits\n", + " acts_on = [qc.find_bit(qubit).index for qubit in gate.qubits] # order: (*control_qubits, *target_qubits)\n", + "\n", + " assert acts_on_cnt == len(acts_on), \"error in: acts_on_cnt == len(acts_on)\"\n", + "\n", + " num_ctrl_qubits = gate.operation.num_ctrl_qubits if hasattr(gate.operation, \"num_ctrl_qubits\") else 0 \n", + " num_targ_qubits = acts_on_cnt - num_ctrl_qubits\n", + "\n", + " control_qubits, target_qubits = acts_on[:-num_targ_qubits], acts_on[-num_targ_qubits:] \n", + " return control_qubits, target_qubits " + ] + }, + { + "cell_type": "markdown", + "id": "7cd858b0-e512-43d0-acf2-b76e9f06e5af", + "metadata": {}, + "source": [ + "## Backend" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31d639ab-a606-4d19-a998-60eca46dd0a2", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitsQiskitBackend(BaseBackend):\n", + "\n", + " BASIC_BACKEND_TYPE = type[QuantumCircuit]\n", + " \n", + " def backend_to_genqc(self, qc: QuantumCircuit, ignore_barriers: bool = True) -> CircuitInstructions:\n", + " \"\"\"Convert a given Qiskit `QuantumCircuit` to genQC `CircuitInstructions`.\"\"\"\n", + "\n", + " if ignore_barriers:\n", + " gates = []\n", + " for gate in qc.data:\n", + " if gate.operation.name != \"barrier\": \n", + " gates.append(gate)\n", + " else:\n", + " gates = qc.data\n", + " \n", + " instructions = CircuitInstructions(tensor_shape=torch.Size([qc.num_qubits, len(gates)]))\n", + " \n", + " for gate in gates: \n", + " control_qubits, target_qubits = get_target_control_qubits(qc, gate) \n", + "\n", + " #Correction to qiskit v1.3.1\n", + " #`cp` is symmetric but qiskit uses\n", + " #`cp(theta, control_qubit, target_qubit)` target and control\n", + " #We make it only target connections like `swap` gates\n", + " if gate.operation.name == \"cp\":\n", + " target_qubits.extend(control_qubits)\n", + " control_qubits = []\n", + " \n", + " instructions.add_instruction(gate.operation.name, control_qubits, target_qubits, gate.operation.params)\n", + "\n", + " return instructions\n", + "\n", + " def genqc_to_backend(self, \n", + " instructions: CircuitInstructions, \n", + " place_barriers: bool = True,\n", + " ignore_errors: bool = False,\n", + " place_error_placeholders: bool = False) -> QuantumCircuit: \n", + " \"\"\"Convert given genQC `CircuitInstructions` to a Qiskit `QuantumCircuit`.\"\"\"\n", + " \n", + " gate_classes = {name:instruction_name_to_qiskit_gate(name) for name in instructions.instruction_names_set} \n", + " qc = QuantumCircuit(instructions.num_qubits) \n", + " \n", + " for instruction in instructions.data:\n", + " gate_cls = gate_classes[instruction.name] \n", + " num_of_paramters = get_number_of_gate_params(gate_cls) \n", + "\n", + " control_qubits, target_qubits = instruction.control_nodes, instruction.target_nodes\n", + " params = instruction.params[:num_of_paramters]\n", + " \n", + " try:\n", + " qc.append(gate_cls(*params), [*control_qubits, *target_qubits], []) \n", + " except Exception as err:\n", + " if ignore_errors: continue\n", + " elif place_error_placeholders:\n", + " qc.append(ql.UnitaryGate(np.eye(2**instructions.num_qubits), label=\"Err\"), range(instructions.num_qubits))\n", + " # qc.append(ql.UnitaryGate(np.eye(2), label=\"Err\"), [0])\n", + " continue\n", + " raise err\n", + " \n", + " if place_barriers: qc.barrier()\n", + " \n", + " return qc \n", + "\n", + " def get_unitary(self, qc: QuantumCircuit, remove_global_phase: bool = True) -> np.ndarray:\n", + " \"\"\"Return the unitary matrix of a `QuantumCircuit`.\"\"\"\n", + " U = qi.Operator(qc).to_matrix().astype(np.complex128)\n", + " if remove_global_phase:\n", + " U *= np.exp(-1j * qc.global_phase) \n", + " return U\n", + "\n", + " def schmidt_rank_vector(self, qc: Optional[QuantumCircuit] = None, densityMatrix: Optional[qi.DensityMatrix] = None) -> List[int]: \n", + " \"\"\"Return the SRV of a `qi.DensityMatrix`.\"\"\"\n", + "\n", + " if not exists(densityMatrix):\n", + " densityMatrix = qi.DensityMatrix(qc)\n", + " \n", + " systems_cnt = len(densityMatrix.dims()) \n", + " total_trace = set(range(systems_cnt)) \n", + " rank_vector = []\n", + " \n", + " for i in range(systems_cnt): \n", + " trace = list(total_trace - {i})\n", + " red_densityMatrix = qi.partial_trace(densityMatrix, trace) \n", + " # r = np.count_nonzero(np.linalg.eigvals(red_densityMatrix) > 1e-14) # was slower during testing \n", + " r = np.linalg.matrix_rank(red_densityMatrix, hermitian=True).item() \n", + " rank_vector.append(r)\n", + " \n", + " return rank_vector\n", + "\n", + " def optimize_circuit(self, \n", + " qc: QuantumCircuit, \n", + " vocabulary: Vocabulary, \n", + " optimization_level: int = 1, \n", + " silent: bool = True) -> QuantumCircuit:\n", + " \"\"\"Use `qiskit.compiler.transpile` to optimize a circuit.\"\"\"\n", + " \n", + " if optimization_level == 0: \n", + " return qc\n", + " \n", + " while optimization_level > 0:\n", + " try:\n", + " qc_opt = transpile(qc, optimization_level=optimization_level, basis_gates=vocabulary)\n", + " return qc_opt\n", + " \n", + " except Exception as er: \n", + " if not silent: print(er)\n", + " pass \n", + " \n", + " optimization_level -= 1\n", + " \n", + " return qc\n", + "\n", + " def rnd_circuit(self, \n", + " num_of_qubits: int, \n", + " num_of_gates:int, \n", + " gate_pool: Union[Sequence[Gate], Sequence[str]], \n", + " rng: np.random.Generator) -> QuantumCircuit:\n", + " \"\"\"Create a random `QuantumCircuit`.\"\"\"\n", + "\n", + " qc = QuantumCircuit(num_of_qubits) \n", + " gate_indices = rng.choice(len(gate_pool), num_of_gates)\n", + "\n", + " gate_pool = list(gate_pool)\n", + " if isinstance(gate_pool[0], str):\n", + " gate_pool = [instruction_name_to_qiskit_gate(gate) for gate in gate_pool] \n", + " \n", + " for gate_index in gate_indices:\n", + " gate_cls = gate_pool[gate_index]\n", + " num_of_paramters = get_number_of_gate_params(gate_cls) \n", + " params = rng.uniform(low=0, high=4.0*np.pi, size=num_of_paramters) if num_of_paramters > 0 else [] \n", + " \n", + " gate = gate_cls(*params) \n", + " act_qubits = rng.choice(num_of_qubits, gate.num_qubits, replace=False) # order: (*act_qubits)=(*control_qubits, *target_qubits) \n", + " qc.append(gate, [*act_qubits], [])\n", + " \n", + " return qc\n", + "\n", + " def randomize_params(self, qc: QuantumCircuit, rng: np.random.Generator) -> QuantumCircuit:\n", + " \"\"\"Randomize all parameters of a `QuantumCircuit`. This creates a new `QuantumCircuit` and therefore deletes global phase.\"\"\"\n", + " \n", + " qc_new = QuantumCircuit(qc.num_qubits)\n", + " \n", + " for gate in qc.data:\n", + " gate_cls = instruction_name_to_qiskit_gate(gate.operation.name)\n", + " control_qubits, target_qubits = get_target_control_qubits(qc, gate)\n", + " params = rng.uniform(low=0, high=4.0*np.pi, size=len(gate.operation.params))\n", + " \n", + " qc_new.append(gate_cls(*params), [*control_qubits, *target_qubits], []) \n", + " \n", + " return qc_new\n", + "\n", + " def draw(self, qc: QuantumCircuit, **kwargs) -> None:\n", + " \"\"\"Draw the given `QuantumCircuit` using Qiskit.\"\"\" \n", + " return qc.draw(\"mpl\", **kwargs)\n", + " # plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "890c5b01-6a2c-4f10-8510-8c19077ca12a", + "metadata": {}, + "source": [ + "## Test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0bd1a012-1802-4b4c-9c63-d58f918cd734", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer" + ] + }, + { + "cell_type": "markdown", + "id": "78f03bed-23d1-47db-8497-5fe6943c722d", + "metadata": {}, + "source": [ + "### genqc <-> backend" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b8d9d74-5b66-465d-95ff-7a80111e7d29", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CircuitInstruction(name='h', control_nodes=[], target_nodes=[0], params=[6.2831854820251465, 6.2831854820251465])\n", + "CircuitInstruction(name='h', control_nodes=[], target_nodes=[2], params=[6.2831854820251465, 6.2831854820251465])\n", + "CircuitInstruction(name='ccx', control_nodes=[0, 2], target_nodes=[1], params=[6.2831854820251465, 6.2831854820251465])\n", + "CircuitInstruction(name='h', control_nodes=[], target_nodes=[2], params=[6.2831854820251465, 6.2831854820251465])\n", + "CircuitInstruction(name='u2', control_nodes=[], target_nodes=[1], params=[11.9380521774292, 1.8849557638168335])\n" + ] + } + ], + "source": [ + "tensor = torch.tensor([\n", + " [3, 0, -2, 0, 0],\n", + " [0, 0, 2, 0, 1],\n", + " [0, 3, -2, 3, 0],\n", + " ], dtype=torch.int32)\n", + "\n", + "params_tensor = torch.tensor([ # ... [max_params, time]\n", + " [0, 0, 0, 0, 0.9],\n", + " [0, 0, 0, 0, -0.7]\n", + " ])\n", + "\n", + "vocabulary = {\"u2\":1, \"ccx\":2, \"h\":3}\n", + "tokenizer = CircuitTokenizer(vocabulary)\n", + "instructions = tokenizer.decode(tensor, params_tensor)\n", + "\n", + "instructions.print()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37a72460-592e-49ed-91ec-f51c8bd1c15f", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzQAAADuCAYAAADvP0KjAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAKaVJREFUeJzt3Xt4VfWd7/HPTgK5EZCEYMI9EJBrwHIbwGpBsKSKiCBaqcWipeNRoZUh08eOZwbHIyLYegCno2O1HqZEwHpBkIpItWgRAghyEwKGNCEJuCGEJISQyz5/UBijiWTvZO9f1vq9X8/jU81aa68v/X5+W79Za6/t8fl8PgEAAACAA4WZLgAAAAAAAsVAAwAAAMCxGGgAAAAAOBYDDQAAAADHYqABAAAA4FgMNAAAAAAci4EGAAAAgGMx0AAAAABwLAYaAAAAAI7FQAMAAADAsRhoAAAAADgWAw0AAAAAx2KgAQAAAOBYDDQAAAAAHIuBBgAAAIBjMdAAAAAAcCwGGgAAAACOxUADAAAAwLEYaAAAAAA4FgMNAAAAAMdioAEAAADgWAw0AAAAAByLgQYAAACAYzHQAAAAAHAsBhoAAAAAjsVAAwAAAMCxGGgAAAAAOFaE6QKApsjKyvJrf6/Xq9dff1233367OnTo0Khjhg8fHkhpCBEyAH8yEEj/JTLQ0pEB2M72NcAVGljF6/XqxRdflNfrNV0KDCEDdqP/IAOwnRvXAAMNAAAAAMdioAEAAADgWAw0AAAAAByLgQZWiYuL08SJExUXF2e6FBhCBuxG/0EGYDs3rgGPz+fzmS4CCJS/T7gKREt+qgfIAMgAyABg+xrgCg2sUllZqby8PFVWVpouBYaQAbvRf5AB2M6Na4CBBlbJycnR1KlTlZOTY7oUGEIG7Eb/QQZgOzeuAQYaAAAAAI4VYboA1M/n80lOuhQYGSmPx2O6CgAAAFiGgaalqqxU9fSZpqtotIjVr0hRUabLAAAAgGW45QwAAACAY3GFBlbp27evtm/fbroMGEQG7Eb/QQZgOzeuAa7QAAAAAHAsBhpYJTc3V7NmzVJubq7pUmAIGbAb/QcZgO3cuAYYaGCViooK7du3TxUVFaZLgSFkwG70H2QAtnPjGmCgAQAAAOBYDDQAAAAAHIuBBgAAAIBjMdDAKsnJyVqwYIGSk5NNlwJDyIDd6D/IAGznxjXA99DAKu3atVN6errpMmAQGbAb/QcZgO3cuAa4QgOrFBcXa82aNSouLjZdCgwhA3aj/yADsJ0b1wADDaxy4sQJLV68WCdOnDBdCgwhA3aj/yADsJ0b1wADDQAAAADHsmKg8Xq9ysjIUGpqqqKiotS1a1fNnTtX5eXluu++++TxeLR8+XLTZQJA0Ow5dEo/X/SJbv/FJk35+SY9vHCrduz/0nRZAAA0mesfCrB7926lp6erqKhIsbGx6t+/vwoKCrR06VIdPXpUp0+fliQNGTLEbKFB8qH3pCZs/UBP9U/TI7361rtP67dX6wcdk/XmyO+GuDoAwfbpQa8eWrhVf9198hvblmce0IiBiVr6y3/QyLSOBqoDAKDpXH2Fxuv1atKkSSoqKtK8efNUWFioXbt2qaioSIsWLdL69euVlZUlj8ejtLQ00+UiBGJiYjRy5EjFxMSYLgWG2JSBv+wo1HfvXV/vMHPJ9n1f6nv3vaONf80PYWXm2NR/1I8MwHZuXAOuvkIzZ84c5efn66GHHtKSJUvqbMvIyNDKlSu1Z88epaSkqG3btoaqRCh169ZNy5YtM10GDLIlA3lFZZo8d5PKK6qvuO/5yhpNfeR9fbp6ilK7ufu90Jb+o2FkALZz4xpw7RWagwcPatWqVerQoYMWLlxY7z5Dhw6VJA0ePLjOz3NycnTrrbcqLi5O7du3149//GOdOnUq6DUj+GpqalRWVqaamhrTpcAQWzLwH6sO6kzphUbvX3auWktX7g9iRS2DLf1Hw8gAbOfGNeDagSYzM1O1tbWaMWOG2rRpU+8+0dHRkuoONKWlpRo7dqzy8/OVmZmpF154QVu2bNEtt9yi2trakNQeDOdqauStrKz3L5tkZ2dr3Lhxys7ONl0KDLEhA5UXavTi64f9Pu6VtdkqO1cVhIpaDhv6j29HBmA7N64B195ytnnzZknS2LFjG9wnP//iPeNfHWheeOEFHT9+XH/5y1/UrVs3SVKXLl00evRorV27Vrfddlvwig6ixw/t1+OH3P/bVwDSroNeeYvP+33c2bIqbd1zUhNGdQ5CVQAABIdrB5rc3FxJUvfu3evdXl1drY8//lhS3YFm3bp1uu666y4PM5I0atQo9ezZU2+//XZAA82wYcNUVFTk1zHRYWE6MGSU3+dqyP3dempqp671bkv/5MMmv36fPn1UYeAK1rRp0/za/+TJix+O3rBhg3bu3NmoY6ZMmeJ3XQgdMvBN51ulSnH3BHTsXXf/RNFVB5q5ouDyJwOB9F9yXgZsQwZgOzesgaSkJO3YsSOgY1070JSXl0uSKioq6t2+atUqeb1excXFKSUl5fLPDxw4oDvuuOMb+w8YMEAHDgT2L/mioiIdP37cr2NiwsOlIQGdrl6pbdroxsSrm+8Fv6agoEDnDNyLeanPjXUpDxUVFY0+1t/eIbTIQD1iYqS4wA497S2Uypz15/UnA4H0X3JgBixDBmA729eAaweapKQkFRcXa9euXRo1qu6VjsLCQs2fP1+SlJaWJo/Hc3lbcXGxrrrqqm+8Xnx8vA4dOhRwLf6KDnPWx5s6depk5ApNbGysX/tfWrjR0dGNPrZzZ26/acnIwDfVempV6KuSPK38O9BXo6SrqhTezll/Xn8yEEj/JedlwDZkALZzwxoI5L+XL3HtQDN+/HgdPHhQixYt0oQJE9SnTx9JUlZWlu655x55vV5JoflCzUAun/nOn1f19JlBqCY4Dh8+LE9UVMjPm5WV5df+1dXVuvfeexUXF6eIiMbF/9lnnw2gMoQKGajfTx77i37/ln8f+Jw+MVWrFn8epIqCx58MBNJ/yZkZsAkZgO1sXwPOugzgh4yMDCUkJCgvL08DBgzQoEGD1Lt3b40YMUI9e/bUuHHjJH3zkc3t27fXmTNnvvF6p0+fVnx8fChKRxBFRESoffv2fi1guIstGXj4h/0VFua58o5fMefuAUGqpuWwpf9oGBmA7dy4Blw70HTp0kVbtmzRzTffrKioKB07dkzx8fF6/vnntX79eh0+fPGRpl8faPr161fvZ2UOHDigfv36haR2BE9+fr7mzZt3+Ql3sI8tGfhO/w567tHGP1hkybwRGnNt8D5n11LY0n80jAzAdm5cA64daKSLw8m6detUWlqq0tJSbdu2TbNnz1Z5ebmOHTumsLAwDRw4sM4xt9xyiz766KM6Td62bZuOHj2qSZMmhfqPgGZWVlamLVu2qKyszHQpMMSmDPzj9H565YnrFRfb8GdpYqMj9J+PjdG8mYNCWJk5NvUf9SMDsJ0b14B7rjX5Yf/+/fL5fOrTp49iYmLqbJs9e7aWLVumyZMna8GCBTp//rwyMjI0YsQITZ482VDFgbuhQ0ddmDT9W/e50nYAzvXjW3vr9vE9tPKdo3rpjcPK2u9Vba1PrSLC9JuMkbrnllS1bdPadJkAAATM1VdoGrJ3715J37zdTJLatm2rzZs3Kzk5WXfddZfuv/9+jR49WuvWrVOYw548BgCS1CamlWZP66tP/nCrkjtES5I6xkfpwbv6M8wAABzPyis03zbQSFKvXr20bt26UJYEAAAAIABWXnK40kAD90pMTNTcuXOVmJhouhQYQgbsRv9BBmA7N64BK6/QbN682XQJMCQhIUEzZswwXQYMIgN2o/8gA7CdG9eAlVdoYK+zZ89q06ZNOnv2rOlSYAgZsBv9BxmA7dy4BhhoYJWCggI9+uijKigoMF0KDCEDdqP/IAOwnRvXAAMNAAAAAMdioAEAAADgWAw0AAAAAByLgQZWiYyM1DXXXKPIyEjTpcAQMmA3+g8yANu5cQ1Y+dhm2CslJUUrVqwwXQYMIgN2o/8gA7CdG9cAV2gAAAAAOBYDDaxy6NAhjRkzRocOHTJdCgwhA3aj/yADsJ0b1wADDazi8/lUVVUln89nuhQYQgbsRv9BBmA7N64BBhoAAAAAjsVDAVqqyEhFrH7FdBWN56InZQAAAMA5GGhaKI/HI0VFmS4DAAAAaNEYaGCVHj16KDMzU507dzZdCgwhA3aj/yADsJ0b1wADDawSFRWlXr16mS4DBpEBu9F/kAHYzo1rgIcCwCqFhYV64oknVFhYaLoUGEIG7Eb/QQZgOzeuAQYaWKWkpERr165VSUmJ6VJgCBmwG/0HGYDt3LgGGGgAAAAAOBYDDQAAAADHYqABAAAA4FgMNLBKfHy8Zs6cqfj4eNOlwBAyYDf6DzIA27lxDTDQwCphYWFq1aqVwsKIvq3IgN3oP8gAbOfGNeCePwnQCF6vVy+++KK8Xq/pUmAIGbAb/QcZgO3cuAYYaAAAAAA4FgMNAAAAAMdioAEAAADgWAw0sEpcXJwmTpyouLg406XAEDJgN/oPMgDbuXENeHw+n890EUCgsrKygn6O4cOHB/0cCBwZ8E+X8Zk6fvKcOneMUf6mH5oup1mQAZAB2M72NcAVGlilsrJSeXl5qqysNF0KDCEDdqP/IAOwnRvXAAMNrJKTk6OpU6cqJyfHdCkwhAzYjf6DDMB2blwDEaYLAAAAQOj4fD5VV7jnt/P+ioiOlMfjMV0GmhEDDQAAgEWqKyr1h14/Ml2GMTOO/rdaxUSZLgPNiFvOAAAAADgWAw0AAAAAx+KWM1ilb9++2r59u+kyYBAZsBv9BxmA7dy4BrhCAwAAAMCxGGhgldzcXM2aNUu5ubmmS4EhNmfA5/Pp0ncp2/qdyjb3HxeRAdjOjWuAW85glYqKCu3bt08VFRWmS4EhNmWg+Gyl1mzM0ba9X2rnAa/2Hy1WdfXFQabgywqlTX1dQ/t30IiBibrjphR1aO/+p/7Y1H/UjwzAdm5cAww0AOAyuz8/paUr9ytzwxc6X1nT4H57s4u1N7tYv38rW79YvE3Tb0rRw3f31/CBiSGsFgCApuGWMwBwiYrz1Zq3ZJu+c+ebevnN7G8dZr6u8kKNVqw7ohF3r9XDC7eq/FxVECsFAKD5MNAAgAt8dvi0htzxpn79//apqR+PWZ55QGnT3tCO/V82T3EArJI6/Xu6t/A1pU7/Xr3b23RJ1L2Fr+m6Zx8MbWFwLQYaWCU5OVkLFixQcnKy6VJgiBszsO2zk7rhJ+t1OLek2V7zi/xSjb1vgz7cUdhsr9kSuLH/8A8ZgO3cuAYYaGCVdu3aKT09Xe3atTNdCgxxWwY+O3xaEx94V2dKLzT7a5edq9ItD72nrH3uuVLjtv7Df2QAtnPjGmCggVWKi4u1Zs0aFRcXmy4FhrgpA+crq3Xn/M1BGWYuKTtXpTvn/1llLvlMjZv6j8CQAdjOjWuAgQZWOXHihBYvXqwTJ06YLgWGuCkD//ofu/R5jn+3mWVl3qq89+5SVuatjT4m53ipfvlslr/ltUhu6j8CQwZgOzeuASsGGq/Xq4yMDKWmpioqKkpdu3bV3LlzVV5ervvuu08ej0fLly83XSYANNq+7NNa8so+v49L6hCjLlfHKqlDjF/HPffqQW3f655bzwAA7uH676HZvXu30tPTVVRUpNjYWPXv318FBQVaunSpjh49qtOnT0uShgwZYrZQAPDD8lcPqra2iY8z89OyzP1aMeh7IT0nAABX4uorNF6vV5MmTVJRUZHmzZunwsJC7dq1S0VFRVq0aJHWr1+vrKwseTwepaWlmS4XABqlpPSC/nvdkZCfd/W7OTp5yj3fLA3ALN/fnzEf1jpCo5f8o6Z+8pxmZK/QlI+Wqu+sdMPVwUlcPdDMmTNH+fn5euihh7RkyRLFxcVd3paRkaHBgwerurpaPXr0UNu2bQ1WilCJiYnRyJEjFRPj3+02cA83ZOCNzcdUXlEd8vNeqKrV6o05IT9vc3JD/9E0ZCD4qs9ffFBJeHRkvdsjYi7+vObv+4WFh6vi5BltvOvf9Yc+P9aHP/u1Bv98qnpMGhWagi3jxjXg2lvODh48qFWrVqlDhw5auHBhvfsMHTpUe/bs0eDBgy//LD8/X0899ZS2b9+uPXv26MKFC5d/gwDn69atm5YtW2a6DBjkhgyY/CyL0x/h7Ib+o2nIQPCV/e2kJOmq3p3r3d6udxdJUunf96uuqNSnT796efvp/ceUt3GHOo7op2Nvbw1ytfZx4xpw7RWazMxM1dbWasaMGWrTpk29+0RHR0tSnYHmyJEj+uMf/6ikpCQNHz48JLUidGpqalRWVqaamhrTpcAQN2RgxwGvsXPvNHju5uCG/qNpyEDwndr7hcqOf6mU28Yo+ur2dbaFtYpQv1np8tXWKm/jjnqP90SE6+qR/VR8MDcU5VrHjWvAtQPN5s2bJUljx45tcJ/8/HxJdQea66+/XoWFhVq7dq3Gjx8f3CIRctnZ2Ro3bpyys7NNlwJD3JCB/UfOGDv3wZwS1dTUGjt/U7mh/2gaMhB8vppaffLP/6VWcTGavPkZDf3Vj9TnR+OV9otpmrTxaSWNHqDPlr2hs0cL6j3+H568T1VlFTq65sMQV24HN64B195ylpt7carv3r17vdurq6v18ccfS6o70ISFNf+MN2zYMBUVFTX760KaNm2aX/ufPHnx8vaGDRu0c+fORh0zZcoUv+tC6NiWAZ+kc/ELGtyelXnrtz6SOalD9OX/zXvvrgb3K/Ke0/Afrv3Gz2trferavZfC1HK+aNOfDATSf6llZQDfRAb808oXpn/ViKCeI//9XXrn1n/RoAdvU+r0GxTZPk7V5yp1al+OPpj9TIO3kg3/t5lKHHqN3r3j31RbFZzPCvbp3UdVHuf+YqY+blgDSUlJ2rGj/qt2V+Lagaa8vFySVFFR/xN5Vq1aJa/Xq7i4OKWkpAS1lqKiIh0/fjyo57DVpT431qU8VFRUNPpYeteyWZmB+IY3XfqemSuJCA9r1H71KSwokHwXAjo2GPzJQCD9l1pgBlAHGfBPa0+4dHXwz3Nqz1F9MPuZRu8/4vF7lXzdIP3pjgWqPF0atLoKCgt0weee260k1oBrB5qkpCQVFxdr165dGjWq7lMyCgsLNX/+fElSWlqaPB5P0GtBcMTG+vcfZJcWbnR0dKOP7dy5/g81omWwMQMFvkr5PPU/PajIe+5bj03qEK2I8DBV19SqyNvwI5gbfB1frTp16iiPWs7DUvzJQCD9l1peBlAXGfBPK1+Y1MIuUIz491lKvm6g/jTt31R56mxQz9UpuZPrrtC4YQ005b+XXTvQjB8/XgcPHtSiRYs0YcIE9enTR5KUlZWle+65R17vxQ+2huILNQO9fIYry8rK8mv/zz//XJmZmUpPT1ffvn0bdcyzzz4bQGUIFRszMPLutdrewNPG6rtN7Kvy3rtLXa6OVZG3Ql0nvPqt+9ZnYO8E7X09z+/jgsmfDATSf6nlZQB1kQH/VJ07rz/0+pHpMi6L7dJB/e//gWrOX9C0bc9d/vmJbZ9r04z/0+znO5x9WK1iopr9dU2yfQ24dqDJyMjQypUrlZeXpwEDBqhv3746f/68jhw5ovT0dPXo0UPvvvtunc/PwP1SU1P17rvv1vlOItjFDRkY2j+hwYEm+OfuYOS8zcUN/UfTkIGWpzzfq98n+/d5SATOjWvAtU8569Kli7Zs2aKbb75ZUVFROnbsmOLj4/X8889r/fr1Onz4sCQx0FgmIiJC7du3V0SEa2d5XIEbMjByUEdj5x4x0NkDjRv6j6YhA7CdG9eAawcaSerXr5/WrVun0tJSlZaWatu2bZo9e7bKy8t17NgxhYWFaeDAgabLRAjl5+dr3rx5lx/ZDfu4IQNTbuyu2OjQ/4sosnW4pn+/Z8jP25zc0H80DRmA7dy4Blw90DRk//798vl86t27t2Jivvl409dee02vvfaaDhw4UOef+SyM85WVlWnLli0qKyszXQoMcUMG2rZprXtuSQ35eafflKIO7Z1937kb+o+mIQOwnRvXgHuuNflh7969khq+3eyOO+6o959nzpyp3//+90GtDQAa4+G7++u/Xj+kmprQPW1szoz+ITsXAACNxUBTD5+v5TyOFADq079Xe2Xcm6aFv9sTkvPNubu/hg1IDMm5AITeiH+fpW7fH6Y2XTtq7fh/0un9xxq1rQ6PR8Meu0edxw5RWES4Tmz/XJ/88r+u+AWZETFRGvu7f1JCWk+FhYdrZd+ZDe4bmdBW1/36fym2S6LCWoXL++kRbf3nF1Rz/oLCo1pr1NM/U8Kgi98vWJp7Qh/P+23QHwMN86y85exKAw0AOMG/PnCtBvS6yq9jirznlH+i/IrfV/NVvbrG6ck5w/ysDoCT5K7fqncm/4vK8k76te2ret99oxIGpejtmzL0xnfnSj6f+t3/gyueu7a6WnuXv6mN0x+/4r6D596uszmFWnvjPL31vUcUndhOqXeOlSRdc88ERUS31ltjH9FbYx/ReW+JBj5w6xVfE85n5RWazZs3my4BhiQmJmru3LlKTOQ3zbZyUwYiW4dr1eJxuv4n63W6pLJRx1zpe2q+Li62lVYvHqfYmFaBlNjiuKn/CAwZqN+JTw4GtO2r4vt3V8GWvZevyORv/lRD5k3X/t9++/tO7YVqFX28T226XLknPp/Uqk205PEorHWEwqMjda7w1N+3+RQRHamwVhHy1dYqIiZKxQf/1qjabeLGNWDlFRrYKyEhQTNmzFBCQoLpUmCI2zIwILW9/vTb76t929bN/tpt27TSO8/dpO84/Ltnvspt/Yf/yEDwnPrsC3W7aZhatYmWJyJcKZNGq03X5v2P5j2/eU1xPZJ052cv6q69v1NJdr7yNl58aNPhFe+pqqxCd+39ne787Hdq3TZGB1/a0KzndwM3rgEGGljl7Nmz2rRpk86e5X5aW7kxA8MHJuovL9+sfj2varbX7N29rT743Q903XeSmu01WwI39h/+IQPBc2TVn3X8z7s18fXHlf764yr5okC+6ppmPUfK5DE6czhfqwf/VKuG/FRte3ZS77tvlCR1umGwPGFhWjX4fq0e8lNdKCnXtRl3Nuv53cCNa4CBBlYpKCjQo48+qoKCAtOlwBC3ZmBg73jtWjVZv7wvTWFhnoBfx+ORfnHPAO1ePUXX9nPPlZlL3Np/NB4ZCK7dz6zW2zfN1zu3/kolh/N15nDzftdJ33u/ry9e3yJfba2qy88rd90nShpz8TsF+/xognI3bFNNZZVqq6r1xetblDSa7xv8OjeuAQYaAHCJqMgILZw7XHvW3Kaf3dHXry/fjIoM16wpfbRr1W369fx/UIyBL+4E4Gzhka3Uul2sJCkyPk6DHrpNe5978/L2KVv+r2KS4pt0jtLcE+o8dogkyRMRrk7fG6wzn//tf7bd8D8PfOoy/js6c4jP0NiAf2MBgMsM7B2v/3xsjBb9fLj+uOmYtu/7UjsPeLXvSLHOV/7P7R/f6Zegof07aPiADpo6IUXx7SINVg3ApFFPz1aXG4cquuNVmpD5L6oqq9Drox++4rbRS/5ReRt3KG/jDrWKi9HE1xfIV+uTJ8yjgy++o/z3dkqSohLaKrJ9G1Weqf/LHG99/xlFJbRVq7ho3bHzeRX9dZ+2PLzs4rb3Fuu9Hz2pihPF2v7Yyxr19GxN3vyMPOFhOrnjsA68sE7SxatDoxf/TJM/+I0kqeTIcW3NeD6o/7+hZWCgAQCXahfXWrOm9NGsKX0u/6zzjStV8GWFOneM0c5Vt5krDkCLsjXjhYC2/fWf/vPy35/3lujN639e735XjxqgAy++o5rzF+rdvvbGeQ2eY+2E+Zf/vizvpN774RP17nfhTJk++OkzDb4O3IuBBlaJjIzUNddco8hIfhNtK9sz4PEE/vkaN7C9/yADpuSu22q6BPydG9cAAw2skpKSohUrVpguAwaRAbvRf5AB2M6Na4CHAgAAAABwLAYaWOXQoUMaM2aMDh06ZLoUGEIG7Eb/QQZgOzeuAQYaWMXn86mqqko+n890KTCEDNiN/oMMwHZuXAN8hgYAAMAiEdGRmnH0v02XYUxEtHs+DI+LGGgAAAAs4vF41ComynQZQLPhljMAAAAAjsUVGlilR48eyszMVOfOnU2XAkPIgN3oP8gAbOfGNcBAA6tERUWpV69epsuAQWTAbvQfZAC2c+Ma4JYzWKWwsFBPPPGECgsLTZcCQ8iA3eg/yABs58Y1wEADq5SUlGjt2rUqKSkxXQoMIQN2o/8gA7CdG9cAAw0AAAAAx2KgAQAAAOBYDDQAAAAAHIuBBlYJCwvTtddeq7Awom8rMmA3+g8yANu5cQ24508CNEJtba0+/fRT1dbWmi4FhpABu9F/kAHYzo1rgIEGAAAAgGMx0AAAAABwLAYaAAAAAI7FQAOrxMXFaeLEiYqLizNdCgwhA3aj/yADsJ0b14DH5/P5TBcBBCorKyvo5xg+fHjQz4HAkQH/dBmfqeMnz6lzxxjlb/qh6XKaBRkAGYDtbF8DXKGBVSorK5WXl6fKykrTpcAQMmA3+g8yANu5cQ0w0MAqOTk5mjp1qnJyckyXAkPIgN3oP8gAbOfGNRBhugAA3+Tz+SSn/eYkMlIej8d0FYBr8D4AAI3DQAO0RJWVqp4+03QVfolY/YoUFWW6DMA9eB8AgEbhljMAAAAAjsVAAwAAAMCxuOUMVunbt6+2b99uugwYRAbsRv9BBmA7N64BrtAAAAAAcCwGGlglNzdXs2bNUm5urulSYAgZsBv9BxmA7dy4BhhoYJWKigrt27dPFRUVpkuBIWTAbvQfZAC2c+MaYKABAAAA4FgMNAAAAAAci4EGAAAAgGMx0MAqycnJWrBggZKTk02XAkPIgN3oP8gAbOfGNcD30MAq7dq1U3p6uukyYBAZsBv9BxmA7dy4BrhCA6sUFxdrzZo1Ki4uNl0KDCEDdqP/IAOwnRvXAAMNrHLixAktXrxYJ06cMF0KDCEDdqP/IAOwnRvXAAMNAAAAAMeyYqDxer3KyMhQamqqoqKi1LVrV82dO1fl5eW677775PF4tHz5ctNlAs3uQ+9JtX57tX599PMG92n99mrdtm1LCKtCqJWUXtDyzAMaeuebKvzynCTp5OkK/WbFPhWfrTRcHYKN9wEAbuf6hwLs3r1b6enpKioqUmxsrPr376+CggItXbpUR48e1enTpyVJQ4YMMVsoAATBy28e1sMLt6q8orrOz6uqfXpk8Tb9atkOLXlkhP7XXf0NVQgAQNO4+gqN1+vVpEmTVFRUpHnz5qmwsFC7du1SUVGRFi1apPXr1ysrK0sej0dpaWmmy0UIxMTEaOTIkYqJiTFdCgyxKQPPvXpAs/73lm8MM19Vcb5GDz65VU+/9FkIKzPHpv6jfmQAtnPjGnD1FZo5c+YoPz9fDz30kJYsWVJnW0ZGhlauXKk9e/YoJSVFbdu2NVQlQqlbt25atmyZ6TJgkC0Z2LH/Sz28cGuj9//nZ7M0anBHfXdoUhCrMs+W/qNhZAC2c+MacO0VmoMHD2rVqlXq0KGDFi5cWO8+Q4cOlSQNHjz48s9ee+01TZ06Vd27d1dMTIz69u2rX/3qVyorKwtJ3QiumpoalZWVqaamxnQpIXWupkbeysp6/7KNLRlYnnlQPp9/xyxduT84xbQgtvS/PrwPXGRzBgDJnWvAtQNNZmamamtrNWPGDLVp06befaKjoyXVHWiWLFmi8PBwPfnkk9qwYYMeeOAB/fa3v9XEiRNVW1sbktoRPNnZ2Ro3bpyys7NNlxJSjx/ar04b36r3L9vYkIFTZ87r1T994fdxb2zOVcHJ8iBU1HLY0P+G8D5wkc0ZACR3rgHX3nK2efNmSdLYsWMb3Cc/P19S3YHm7bffVmJi4uV/vuGGG5SYmKgZM2boo48+0vXXX+93LcOGDVNRUZHfx+HKpk2b5tf+J0+elCRt2LBBO3fubNQxU6ZM8buupooOC9OBIaOa7fXu79ZTUzt1rXdb+icfNss5+vTpowoDQ79bM9AUlRHdVdl2lt/H1dT4lDbyZkVVHQlCVcHjTwYC6b/E+0BjOeF9wEkZABrLDWsgKSlJO3bsCOhY1w40ubm5kqTu3bvXu726uloff/yxpLoDzVeHmUuGDRsmSTp+/HhAtRQVFQV8LL5debl/v02uqKi4/L+NPdZE72LCw6Uhzfd6qW3a6MbEq5vvBetRUFCgcwYuX7s1A03S5iopwI8FnjpdJp111p/XnwwE0n+J94HGcsL7gJMyADSW7WvAtQPNpQZdatrXrVq1Sl6vV3FxcUpJSfnW1/rzn/8sSerXr19AtSQluftDtibFxsb6tf+lXERHRzf62M6dO/tdV1NFhznvbtBOnToZ+c2sWzPQFJURbeQN8NiE9jGKinPWn9efDATSf4n3gcZywvuAkzIANJYb1kBT/nvZtQNNUlKSiouLtWvXLo0aVfeSfWFhoebPny9JSktLk8fjafB1jh8/rscee0wTJ04M+LtqAr18hivLysrya//PP/9cmZmZSk9PV9++fRt1zLPPPhtAZU3jO39e1dNnhvy8TXH48GF5oqJCfl63ZqApKi/UqNtNr+rk6fN+HdcurrVyP3lfsTGtglRZcPiTgUD6L/E+0FhOeB9wUgaAxrJ9DTjv1z+NNH78eEnSokWLdPjw4cs/z8rK0tixY+X1Xvz95bcNKWVlZZo8ebJat26tl156Kaj1IjRSU1P17rvvKjU11XQpMMSGDES2Dtf9t1/j93H33trbccOMv2zoP74dGYDt3LgGXDvQZGRkKCEhQXl5eRowYIAGDRqk3r17a8SIEerZs6fGjRsnqe7nZ76qoqJCkyZNUk5OjjZu3Kjk5ORQlo8giYiIUPv27RUR4dqLk7gCWzLwwPR+im8X2ej927ZppTkz+gexopbBlv6jYWQAtnPjGnDtQNOlSxdt2bJFN998s6KionTs2DHFx8fr+eef1/r16y9ftalvoKmqqtK0adO0Y8cObdiwQf37u/9f8rbIz8/XvHnzLj/hDvaxJQNdkmL19rIJiou98hWXmKgIvfGb8erZxf1fMGxL/9EwMgDbuXENuHagkS5+iH/dunUqLS1VaWmptm3bptmzZ6u8vFzHjh1TWFiYBg4cWOeYS99d8/777+utt97SiBEjDFWPYCgrK9OWLVus+aLUGzp01IVJ0/VIr4bvkb0wabreHPndEFZllk0ZGD3kan30yi26YVjDH7Qcc+3V+vDlH2jcyE4hrMwcm/p/Ce8DddmYAeCr3LgG3HOtyQ/79++Xz+dTnz59FBMTU2fbgw8+qDVr1uiXv/ylYmJi9Mknn1ze1qtXr3of6wwALVVan3h98NLN2n+kWK+szdbfisrk80ldro7VjyelavA1CaZLBACgSawcaPbu3Sup/tvNNmzYIEl66qmn9NRTT9XZ9vLLL+vee+8Nen0A0NwGpLbX049wxRkA4D4MNF9z7NixEFcDAAAAIFCu/gxNQ75toIG7JSYmau7cudw6aDEyYDf6DzIA27lxDVh5hWbz5s2mS4AhCQkJmjFjhukyYBAZsBv9BxmA7dy4Bqy8QgN7nT17Vps2bdLZs2dNlwJDyIDd6D/IAGznxjXAQAOrFBQU6NFHH1VBQYHpUmAIGbAb/QcZgO3cuAYYaAAAAAA4FgMNAAAAAMdioAEAAADgWAw0sEpkZKSuueYaRUZGmi4FhpABu9F/kAHYzo1rwMrHNsNeKSkpWrFihekyYBAZsBv9BxmA7dy4BrhCAwAAAMCxGGhglUOHDmnMmDE6dOiQ6VJgCBmwG/0HGYDt3LgGGGhgFZ/Pp6qqKvl8PtOlwBAyYDf6DzIA27lxDfAZGqAlioxUxOpXTFfhHxd9uBBoEXgfAIBGYaABWiCPxyNFRZkuA4BBvA8AQONwyxkAAAAAx+IKDazSo0cPZWZmqnPnzqZLgSFkwG70H2QAtnPjGmCggVWioqLUq1cv02XAIDJgN/oPMgDbuXENcMsZrFJYWKgnnnhChYWFpkuBIWTAbvQfZAC2c+MaYKCBVUpKSrR27VqVlJSYLgWGkAG70X+QAdjOjWuAgQYAAACAYzHQAAAAAHAsBhoAAAAAjsVAA6vEx8dr5syZio+PN10KDCEDdqP/IAOwnRvXgMfn8/lMFwEAAAAAgeAKDQAAAADHYqABAAAA4FgMNAAAAAAci4EGAAAAgGMx0AAAAABwLAYaAAAAAI7FQAMAAADAsRhoAAAAADgWAw0AAAAAx2KgAQAAAOBYDDQAAAAAHIuBBgAAAIBjMdAAAAAAcCwGGgAAAACOxUADAAAAwLH+P4jlzFtVeKsCAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "backend = CircuitsQiskitBackend()\n", + "\n", + "qc = backend.genqc_to_backend(instructions)\n", + "qc.draw(\"mpl\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3d5ddaab-189b-48c2-8766-29fa3e6a2981", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CircuitInstruction(name='h', control_nodes=[], target_nodes=[0], params=[])\n", + "CircuitInstruction(name='h', control_nodes=[], target_nodes=[2], params=[])\n", + "CircuitInstruction(name='ccx', control_nodes=[0, 2], target_nodes=[1], params=[])\n", + "CircuitInstruction(name='h', control_nodes=[], target_nodes=[2], params=[])\n", + "CircuitInstruction(name='u2', control_nodes=[], target_nodes=[1], params=[11.9380521774292, 1.8849557638168335])\n" + ] + } + ], + "source": [ + "dec_instructions = backend.backend_to_genqc(qc)\n", + "dec_instructions.print()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fde08ecb-fd42-48e7-8b4d-2357854b3e7c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([[ 3, 0, -2, 0, 0],\n", + " [ 0, 0, 2, 0, 1],\n", + " [ 0, 3, -2, 3, 0]], dtype=torch.int32),\n", + " tensor([[ 0.0000, 0.0000, 0.0000, 0.0000, 0.9000],\n", + " [ 0.0000, 0.0000, 0.0000, 0.0000, -0.7000]]))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "enc_tensor, enc_params_tensor = tokenizer.encode(dec_instructions)\n", + "enc_tensor, enc_params_tensor" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d34e84ef-51ec-4692-bd8f-eb9047368892", + "metadata": {}, + "outputs": [], + "source": [ + "assert torch.allclose(tensor, enc_tensor)\n", + "assert torch.allclose(params_tensor, enc_params_tensor)" + ] + }, + { + "cell_type": "markdown", + "id": "b35c1cbe-d80d-4e3c-bafb-25b1514bb734", + "metadata": {}, + "source": [ + "### Calculate unitary and optimize circuit" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e4b5aeb-98e6-48c8-99b9-c8c432201941", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[ 0.64+0.59j -0.25+0.04j -0.31-0.27j 0.12-0.02j]\n", + " [-0.18-0.18j -0.86+0.11j 0.09+0.08j 0.41-0.06j]\n", + " [-0.11+0.05j -0.11+0.4j -0.23+0.1j -0.21+0.84j]\n", + " [ 0.37-0.17j -0.03+0.12j 0.78-0.39j -0.05+0.25j]]\n" + ] + } + ], + "source": [ + "gate_pool = [\"u3\", \"cx\", \"h\"]\n", + "qc = backend.rnd_circuit(2, 10, gate_pool, np.random.default_rng())\n", + "U_initial = backend.get_unitary(qc)\n", + "\n", + "qc_opt = backend.optimize_circuit(qc, gate_pool, silent=0)\n", + "U_opt = backend.get_unitary(qc_opt, remove_global_phase=False)\n", + "\n", + "print(np.round(U_initial, 2))\n", + "\n", + "assert np.allclose(U_initial, U_opt)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b9d244c-9574-4a26-904c-966b3ed5f39d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Timeit get_unitary: 524 μs ± 407 ns per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n" + ] + } + ], + "source": [ + "res = %timeit -o -q backend.get_unitary(qc)\n", + "print(f\"Timeit get_unitary: {str(res)}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e970f669-acca-40f2-84e6-66662a163433", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initial number of gates 10:\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3UAAACuCAYAAACRHyRRAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAM55JREFUeJzt3XdYFNcaBvB3Gx0UREUEQQEVsRfsxhqj2I0tGjXRa2I0msRIeizJNUVvEhNjYnoxtqixoLFE1Nixi4KiiEgVlyJtKbs79w8SEmSBXdw26/t7nvvcyDlnzjc78+zMt3PmHIkgCAKIiIiIiIhIlKSWDoCIiIiIiIhqj0kdERERERGRiDGpIyIiIiIiEjEmdURERERERCLGpI6IiIiIiEjEmNQRERERERGJGJM6IiIiIiIiEWNSR0REREREJGJM6oiIiIiIiESMSR0REREREZGIMakjIiIiIiISMSZ1REREREREIsakjoiIiIiISMSY1BEREREREYkYkzoiIiIiIiIRY1JHREREREQkYkzqiIiIiIiIRIxJHRERERERkYgxqSMiIiIiIhIxJnVEREREREQixqSOiIiIiIhIxJjUERERERERiRiTOiIiIiIiIhFjUkdERERERCRiTOqIiIiIiIhEjEkdERERERGRiDGpIyIiIiIiEjEmdURERERERCLGpI6IiIiIiEjEmNQRERERERGJGJM6IiIiIiIiEWNSR0REREREJGJM6oiIiIiIiESMSR0REREREZGIMakjIiIiIiISMSZ1REREREREIsakjoiIiIiISMTklg6A9CcIAtSqYkuH8dCTO9pDIpFYOgyTEgQBKBbZuWZv+8eFiIj3AtbBHPcCvBaTIZjUiYhaVYxfAqZYOoyH3uT4tVA4OVg6DNMqLoZ6/DRLR2EQ+aYfAQcbPy5E9NDjvYB1MMu9AK/FZAAOvyQiIiIiIhIxJnVEREREREQixqSOiIiIiIhIxJjUERERERERiRiTOiIiIiIieiio1VoUqtRQq7WWDsWoOPslERERERHZHEEQcPxCBvYcS8bZGCXOxiiRkVVUXl6vrj06tfJEp2BPDOrujb5dGol2SQYmdUREREREZDNURWr8uOM6Vm+MRfT17CrrZeYUY9/xFOw7noL3vr2Ilk3r4NlxLTFjTAu4OCnMGPGD4/BLMprA8X0xPW0zAsf31Vnu4lMf09M2o9cnc8wbGBEREZkF7wXI0k5cvIMO47dh9rvHq03odLmacA8vfHgKbcZsxcGoVBNFaBpM6oiIiIiISNS0WgFvfHoGvabtwrVb9x5oW7dS89F/5u+Y//4J0bx7x+GXREREREQkWhqNFk+9dQQ/R9ww6nY/XReDpDsF2PBhP9gpZEbdtrHxSR0REREREYmSIAiY/e5xoyd0f/vtQCKmvnEYWq1gku0bC5M6IiIiIiISpV92xePrLddM2sfGPQn4YlOsSft4UEzqiIiIiIhIdNLuFmLe+ycMbnd6/Qgk7Z+I0+tH6N0m/KPTuJmca3Bf5mLzSZ1SqUR4eDgCAwPh4OAAX19fzJ8/HwUFBZgxYwYkEglWrVpl6TCJiIhMqlClxsGoVGyLvIV9x5NxJ1Nl6ZDMShAEXLyWiYjDt7Hz0G1cuJoJQbDu4VREVL2Xlp9Cdm6Jwe28PJ3g09AZXp5OercpLFJj7jLDE0hzsemJUi5cuIAhQ4YgPT0dzs7OaNWqFVJTU/Hpp58iPj4eWVlZAID27dtbNtCHzL8vot3emwnfQZ2hcHNCab4KiREncOadtdCWqi0YIRnLYWUGBp04hPdbtcVLAS111rHbuQlDGzTCtq69zRwd0cMhITkPqzbE4PttcRVufuQyCcYO8se8J0LQo31DC0ZoWsUlGvy04zpWb4rFhatZFcratfDA7PHBmDYiEA72Nn1LZHV4L2A+tnotTrlTgF/3J5i1z9+PJuNaQg5aNK1r1n71YbNP6pRKJYYPH4709HQsWLAAaWlpOHfuHNLT0/HBBx9g165dOH36NCQSCdq2bWvpcG2CuqjsZkHmaK+zXO5U9ndN0T83FbHf78FvvedjXfOp2DHwZbi38kfbeWNMHywR0UPg0Ok0dBi/DR/9dLnSr9lqjYCNexLQc2oEVvwQbaEITSsntxiDn92DWUuPVUroAODitSw8+84xDJq1B9m5xRaI0PbwXoDM5avN16DRmP9p+xebrpq9T33YbFI3b948JCcnY+7cuVixYgVcXV3Ly8LDw9GuXTuo1Wr4+/vDzc3NgpHajvzbGQCAukGNdZbXCfIBAOT9VQ8A7sUlQ63660IqkUDQCnBt1si0gRIRPQTOxigRNmcf7uXXPDRp4UdR+NLKJwEwVEmpBqNfPIDDZ9JrrHv0/B2MeH4/ior5ZOhB8V6AzMVUs13q0681Dt22yaQuNjYWGzduhKenJ9577z2ddTp16gQAaNeuXYW/JyQkYMSIEXB1dYW7uzumTp2KzMxMk8dsCzKjbyI/5S6ajuoJx4buFcqkCjmCnx4CQatF0r4zFcrazB2FyTd+xqTL38EjxA8xX0WYM2wiIps0/4OTKCzSP0lZsCIKOTb0tGptRDwOnU7Tu/7R83fw007L3CTaEt4LkDnczVIhISXPIn1n3SvGzWTL9F0dmxxAvn79emi1WkyePBkuLi466zg6OgKomNTl5eWhX79+8PDwwPr166FSqRAeHo5hw4bh2LFjkEptMgc2GkGjxclXvka/7xZiZOT/cH1dJPIS0+FQvy6ajugB95ZNcHHlFuTGp1ZoF71qG6JXbUOdoMZoNqY3VBnZFtoDMpVCjQbKYtu5WSSydpfisnDs/B2D2hQWqfHTzhuYNznERFGZjyAI+HxDjMHtVm+MxX/GtoBEIjFBVA8H3gtYL1u6Fp+NsewDlzNXlAjwta6RfjaZ1EVGRgIA+vXrV2Wd5ORkABWTuq+++gopKSn4888/0aRJEwCAj48PevTogR07dmDUqFGmC9pGJB84h90j3kSbOaMQOP4R2Lu7Ql1YjMzLCTg063+4tbPqWYPuXU9B1pVE9P5sHvY+vth8QZPJLb12BUuvXbF0GEQPjR93XK9Vux+2X7eJpO7KjWycizX8pu/itSxcistCuxb1TBDVw4P3AtbJlq7FV+Itm/Rbun9dbDKpS0xMBAD4+fnpLFer1Th27BiAikldREQEevXqVZ7QAUD37t3RrFkz7Ny5s9ZJXefOnZGeXvOY/pooBCkWIfSBt2NqmRfjcWjW/2rVVqqQwc3Kx9E3D2qOUonW0mGYlKNUipj23Y22vZlNmmGst6/OsiEnDxulj+bNm0Olte3jQqSvLOdxgH1rg9tdjLkNHx8fE0RkXkWKQMD1yVq17f/YODiWxhk5IuPhvYB1MMe9AK/FVct1eARw6q+z7PT6ETUuVeDl6Vj+/0n7J1ZZL11ZiC6TdlT6+0crV+O79/YaELF+vLy8cObMmZor6mCTSV1BQQEAQKXSvQbPxo0boVQq4erqiqZNm5b/PSYmBuPGjatUPyQkBDExhg/j+Ft6ejpSUlJq3f5vdhIZYEOzTitcneA3JBS390ShJLcQ7sF+aPfCWKQeumjp0KqVmpaKEkFj6TBMykkmA9obb3uBLi4YUN+0J29qaioKNbZ9XIj05qsCdE8+WC2tVmOU65XFudQFXGuspVNWphLIs97PgPcC1sEc9wK8FlejQS5QRd729xp0+pDLpHrX/beCvDwUpFvX94RNJnVeXl7Izs7GuXPn0L17xV840tLSsHDhQgBA27ZtK4ybz87ORt26dSttz8PDA9euXXugeIxBIUgBW3oQIQhoNrYPuiyeBqmdHEXKXCTuPoULyzdaOrJqeTfyfiie1ImNt7c3n9QR/eWefQnya9FOgXw0aKx71kIxKZUpkAEAggDo+37cX3UbuCugcLPez4D3AtbBHPcCvBZXLc/BAblVlKUrC2ts7+XpCLlMCrVGi3Sl7odA1W3L1cUebib4rnyQnMEmk7qBAwciNjYWH3zwAQYNGoTmzZsDAE6fPo0nn3wSSqUSgPkWHa/tY9T7lRYW4ZeAKUbZljUozVdh34Sllg7DYHHX46BwcrB0GCYlFBVBPX6apcMwSFxcHCQOtn1ciPQVHZeFto//ZnC7j94ai7mT3jJBROYX+sR2nL6s1L+BRIIOLevh7MYzVj1RCu8FrIM57gV4La7a3mPJeGy27uGPuoZL3i9p/0T4NHRGulIF30EbDO7/m1VLMH7wzwa3MyXx/QSgh/DwcNSrVw9JSUkICQlBmzZtEBQUhNDQUDRr1gz9+5eNwb1/OQN3d3fk5ORU2l5WVhY8PDzMEToREdEDa9PcA707GjbMytlRjieHBZooIvN7bkJwrdpYc0JHRGU6tfJ8qPvXxSaTOh8fHxw5cgRhYWFwcHDArVu34OHhgTVr1mDXrl2Iiyt7Afr+pC44OFjnu3MxMTEIDjb84kBERGQpn77aHS5O+g/I+SS8G+q42pkwIvN6YmgABnT11rt+3y6N8ORw20lqiWyZp7sDmvnU8sXZB1Svrr3F+q6OTSZ1QFmCFhERgby8POTl5eHUqVOYNWsWCgoKcOvWLUilUrRuXXFmsGHDhuHo0aPlyx0AwKlTpxAfH4/hw4ebexeIiIhqrX3Letj9+WDUrSFRk0iAla90w8yxLcwUmXnYKWTY+vEAvRK7vl0a4bePB8DeTmaGyIjIGKZa6EeYqcMDrfKJvk2+U1edK1euQBAENG/eHE5OFafNmTVrFj777DOMHDkSS5YsQVFREcLDwxEaGoqRI0daKGIi8XrEswFKho+vtk5N5URUe707eeHi5tFYvTEW3/4WB2V2UYXyKcMC8PykEIS2qW+hCE3LzcUOu1c/ivW7b+LzjTGV3rHr1MoTcyYGY3JYAOwUTOjINtnqtfg/Y1vg3a8vQK0WzNrv7PHWOXrPZp/UVSU6OhpA5aGXAODm5obIyEg0atQIEydOxMyZM9GjRw9ERERAKsIZiIiIiJo0csH7L3RB0r4JOPJDGDzqlK114OXpiJ+X9bXZhO5vdgoZpo0MQtS6kbjy2xjU+2v/G3g44MyGkXhqVHMmdEQi5N3AGRMGNzNrn2F9fBHkV8esferroXtSV11SBwABAQGIiIgwZ0gPRGavwCNfvog6QT7QFJWgSHkPJ179Gnm3Ki927uLbAH2/WQCpVAqJXIZ711NwfOGXKLlXUG1Zdbz7tkPnN/6ZhcvBsw5Ud3Ow89HwSnXrtQtA13eehkdrf6QevojIpz7Uez/bvjAWgRP6AQASth/D+ffXV1m3/YLxaDq6F7QlpSjKysPexxcDAMJ2vQeZXdkpL5HL4N6yCbb3X4Ds2ER0eHUSmjzaGYKmbBre6FXbkLD9mN7xERFZOwd7OXp19IKjfVkCI5Na3/AhU2sV4A6Hv/ZfIbetH2sfj1oNTXEpNEUlAIBLn/2GWzuO66wbNKk/2swdDUglSD92GSde/RqCWgNIJOiyaCoa92sPrVqL4uw8HH/5S533FPcLmT0CgeP7QiKV4F58Ko698DlKcitPBy93tEfXZTPg2S4AUoUct3+PwtllvwAAvLqHYOAvryM3PrW8/q7hb5TvU1VcfOqj18q58Gjtj/zbGdgxaKFeZVWpE9QYw/d+iLi1+xH19g811ifLWbEgFL8fTUbWvWKT9+XipMCq14y3GLyxMamzAdd+3o+UyPMAgJZPPYae/5uNPWMXVapXeCcLv498q/zLMfSdp9D+5fGIeuv7asuqk3roInb8a4HQAT+9hvRjl3XWVWVkI+rt7+HRpil8+nfQe/8adgtG01G9sKP/Amg1Ggzd8V/cPX0NyQfOVaobPHMo3Fv5YXu/l6AtVcOxft3ysl1hr5X/t19YN7RfMA7ZsYkAgCurt5cnik5eHhj15ydIPXIJxVl5esdJRERkSYef/RhZV25VW8fFtwE6hE/EzkfDobqbg/4/vIIWUwbh6g970GRwZzTo0hLbB7wMQa1B2xfGouNrT+DwMx9Vu81GfdoiaGI/RAx9DeqCIrR9YSw6vPoETr3+TaW6beaPgUQmw/b+CyCRyzDgx1fhN6w7EiNOAABy41P1Srz+rSRfhXMfrIedqxM6vjpJ7zJdJHIZeix/Fom/RxkUA1mGl6cTVr3WHU+8esigdn+vP6fPmnZ/W/5SF/g3tr4JUv5mWz9T6SEyMhKCICAsLMzSoRiFpri0PKEDgLvnrsPFV/dQGm2Jujxpk0ilkDvaly22WkOZvhwbuqNRr9aI33xYZ3lhWhaUF25AW1xq0Hb9R/TEzc2HoVYVQ1uixvX1kWg6upfOuq1nj8TZ/66FtlQNAFDdzdFZL+iJAbi+PrL83//+NVHu7ACJRGKVL8ESERE9CL9h3ZC070z59fHaT/vQdHRPAGWXfZmdHDJ7BQBA4eKIwrTMGrfp0cofd6KuQl1Q9s5m8oFzCHi8T5V1Uw6W3bcIag1S/7xYZV19leTkIyPqKtSFlZ/WVFemS/uXxuHWzhPIu5n2QDGR+Uwc0gyzx7c0qE2XSTvgO2iDXmvaAcDksAA8M86wPsztoXtSZ+tazRyK23tPV1kuVcgxbPd7cPapj+zYRByY9oFeZfoInNAPyZHnUZSZW+v4dXFp7ImMqNjyf+cnZaDpqJ6V6ilcHOFYvw58B3eB/7Cyx+NX1uysNPzEybsevLq3wpHnP63w9+AZQ9Fy+mA4edfD8QVfGH0/iIiITKnXp89DIgHunr+Bs8t+QbGO65hLY0/kJ98t/3d+8l04Ny5bcytp3xk06hmCCZe+gTpfhYL0LOwZXXnkz/0yL8Wj5fTBcKxfF6q7OWg2pjfsXJ1gV9cFJTn5ler6D++OxF0nIZXL0eSxUNi5OZeXu/p7Yfi+DyFotLi+4SCu/ah7gWlT8OwQhPqdmmPfhKVov0B8E4c8rCQSCVa93gNFJRp8v+260bf/+CB/fL+0j9X/2P/QPamzZW3mjYGrv1f52HRdtKVq7Bi0EBvbzsS9Gylo8eQgvcr0ETSxH66vO1Dr+B+URC6DVCGH3MEOu8Jew+FnPkLokulwb+VXoV7ghH5I2n+20tDK2G9347fe87F7+BtoO28M7N1dzBk+ERFRrf0++m3sGLAAOx4NR3FWHnqvnGvwNjzbBaBuiyb4tcMsbGw/C2lHotH9w1k1tks/fgWXv9iBAT+/hrBd75Unk4JaU6lu9KptKEhRYtiu9zBw7WtQnr8BQVNWLzP6JjZ1fAY7Hw1H5NMfosXUR+E/3DzvMMkc7dDt/Zk4/vKXZumPjEsqleCbxb3x9jMdIJMZJ/mSSIAFU1tjw4f9oFBYf8pk/RGSXkKeHQG/oV3xx+T/QqOq/oVioCyBu7HhoM4hD9WVVcWrewhk9nZI/df7dcaSn6KEi88/Q0pdfBugIEVZqV5JTj5K81WI3/JnWbvku8g4fQ2e7SuuYxI0oR+ur686+cyOSURheha8eoQYaQ+IiIhM6+/roqDWIObrCDTsqnva9UrXVJ/65W0Dxj2CtGOXy15JEATEbzqk97Xw2o97EfHYK9gV9hrSj19BQYoSpfmqSvU0RSWIeut77Bi0EHvGLEJxVi5yriUBAErzVSjNK3sdojAtCwnbjla5H8bm6ucFl8aeeGzLYjwetRqt/hOGwIn90asWyTFZhlQqwZI5HXFy7XCEBNR9oG0FNnHD4e/CsOLlrpDJxJEuiSNKqlarZ4ah6eie2Ddhqc6Zpv7m7OMJmeNfi9BKJPAb3h1ZsbdrLAOA0UdWwsnLo8ptBz3RHzc2HYSg1dZqHzzbB+LRTbqHeCTuPIFmjz8CuaM9pHZyBE3qj4RtumemvLntKBr3K5uExa6uCzw7BCI7JrG8vFGvNpDIpUg9fKlCuzrNfcr/29WvITxaN0VOXDKIiIisndzRHnZu/6y923R0L2ReTtBZN3HXSfg+2rl8IrEWUx8tv6bm3b6DRj1bQ6ooezvHZ1Dn8oQLqP5ewLFB2fZkjnZoHz4R0au366yncHEsv99w8W2AFtMG48qXO//Zxl9D3OTODvAZ2AmZl28BKJvEbPSRlTV8ErWXc/U2NrSegc2hz2Fz6HOI+XoXbmyIxNH5q0zWJ5lG55D6OLtxFL5d0hsdg+sZ1LZ1oDtWv9EDF38djd6dvEwUoWnwnTqRc2rkgdDF05F7Kx2PbV4MANCUqMtnemy/cAJUd7Jx7ad9cA/2Q8dXnwAASKQSZEYnIOrNbwGg2jKHem6wd3dB8X3j4v+mcHVCk6Fdsb3fS5XKBq59HeeXb0TmxXi4BXhj8KZFkDvaQeZgh3Fn1+DSp1tx7ce9cPGtX+WUxeknruDW9mMYebBs9q2EHceQ/MdZAGXLJHRYOAF/TFkGADi37Bf0/HgOWk4fDKBsmIfywo3ybQVN6o8bGw5WmgSm85tPwqVJAwilamg1Wpx8/Vvcu55S3UdPRERkFRzq10G/bxZCIpNCIgHyEjNw9PnPyst7rHgWSfvOIGnfGeTfzsD5FZswZMe7AMqGTl77eT8A4Or3e1A3yAcjDqyAtlQD1d0cnAhfU9ZHDfcCj254C5BKIVPIEb/5MK5+93t52b/vBVz9GuKRNS9B0GigVWsRteiH8hk7/cK6ocW0wRDUGkjkMiTuPIEbG8omNXPy8oBWx3BOoCyRHHP0M8js5VC4OmHc2TWI33IY55atq7bMsaE7Bq193eDZNsn62dvJ8PTo5nhqVBCiou9i7/EUnI1R4myMEql3C8tvA6VSYEgvX3QK9sTAbt7o1bGh1b87VxWJIBg4xSFZTGlhEX4JmFJzRSPzG9YddQK9cemTLSbro+uymUjYdhQZUVdN1oexTI5fC4WTg6XDMCmhqAjq8dMsHYZB5Jt+hMTBto8LkTH4DFyPlIxCNG7ghOQ/ap7i3daIff9t+V6gOiGzR0CVkY2bW45YpP/7meNegNdi0xAEAT4D1yP1rkq03wO68Ekd1ejvtWNMSddaNkRERGQdzHEvUJ0rX+g39TxRTWx12Sq+U0dERERERCRiTOqIiIiIiIhEjEkdERERERGRiDGpIyIiIiIiEjFOlCIickd7TI5fa+kwHnpyR3tLh2B69vaQb/rR0lEYxv4hOC5E9NDjvYB1MMu9AK/FZAAmdSIikUhsfip9sg4SiQSw8imJiYgeRrwXeHjwWkyG4PBLIiIiIiIiEWNSR0REREREJGJM6oiIiIiIiESMSR0REREREZGIMakjIiIiIiISMSZ1REREREREIsakjoiIiIiISMSY1BEREREREYkYkzoiIiIiIiIRY1JHREREREQkYkzqiIiIiIiIRIxJHRERERERkYgxqSMiIiIiIhIxJnVEREREREQixqSOiIiIiIhIxJjUERERERERiRiTOiIiIiIiIhGTWzoAsk6CIADFxZYOQ3/29pBIJJaOwuQEQYBaJaLjYqPkjtZzvvGcMA9rOuZERET3Y1JHuhUXQz1+mqWj0Jt804+Ag4OlwzA5taoYvwRMsXQYD73J8WuhcLKO843nhHlY0zEnIiK6H4dfEhERERERiRiTOiIiIiIiIhFjUkdERERERCRiTOqIiIiIiIhEjEkdERERERGRiHH2SyIiIhslCAKS7xTgbIwSZ2MycTstH1n3ypbAyMkrwc87r6NTK0+08K8Dmcw2f+fNLyzFhauZOBujxJX4nPL9z84txic/X0anVp7oEFwPLk4KC0dKRFR7TOqIiIhsTH5hKdbtjsfqjbG4eC1LZ50ClRpT3/gTAODl6YhZY1viP2NbwMfL2ZyhmoQgCDhyNh2rN8Viyx+3oFYLleoUFmnw4vJTAAC5XIIxA/zx3IRg9OnkxTUJiUh0mNQRERHZCK1WwKr1MXjr87PIzS/Vu126UoWla87jv99cwMwxLfDhi13g5mJnwkhN51yMEv9ZchTnYjP1bqNWC9i0NwGb9iagfUsPfL2oFzqH1DdhlERExmWbYy2IyCICx/fF9LTNCBzfV2e5i099TE/bjF6fzDFvYGQ2PAcsJz4pF32f3oX5H5w0KKH7N41GwJpfr6LN2K3YfyLFyBGaVkmpBm9/fhahk3cYlNDd78LVLHSbshNvfHoGxSUaI0ZIRGQ6TOqIiIhE7uTFDHSeuB1Hzt0xyvZupxVg8LN78MXGWKNsz9QKCksxbO4+vLPmAjSaykMtDaXRCFj2zUUMfW4v8gtrlyATEZkTkzoiIiIRO335LgY9swc5eSVG3a4gAM/997jVJ3ZFxWoMf34/9p9INfq2I6PSEDZnH1RFaqNvm4jImJjUERERidSdTBXC5uwz6dOkOcuO44+T1jsUc/a7x3HwdJrJtv/n2XTMWnrUZNsnIjIGJnVEREQiJAgCnnv3OO5mFxnU7vT6EUjaPxGn14/Qsx9gxqIjyM037pNAY9h56DZ+2H7doDaG7j8ArI2Ix/aDiYaGR0RkNg9FUqdUKhEeHo7AwEA4ODjA19cX8+fPR0FBAWbMmAGJRIJVq1ZZOkwiIiK9/bovAVsP3DK4nZenE3waOsPL00nvNrfTChD+8WmD+zKlnNxiPPPOMYPb1Wb/AeCZpcfK17gjIrI2Np/UXbhwAW3atMHy5cuRnp6OVq1aobS0FJ9++ikmTJiA2NiydwXat29v2UBt1GFlBux2bsJH8VerrGO3cxNGnTpixqiIiMRNEAQs+fK8Wfv8Zus1pNwpMGuf1flmaxzS7haarb87mSp8tbnqaxkRWS9BEBAVfRfT3/wTTR7dgNS/vjuy7hXj6Ll0CMKDT7BkaTa9Tp1SqcTw4cORnp6OBQsWYNGiRXB1dQUAfPjhh3jllVcgl8shkUjQtm1bC0dL9PD495dnt/dmwndQZyjcnFCar0JixAmceWcttKWcmMCW8Rx4MH+eTUdMfI5Z+9RoBHy95RoWP9fRrP3qotUK+GKT+Sdw+fLXq1g4vQ1kMpv/TZzIZtzNUmH8woM4pOPdW1WxBr2n70LXNvWx9eMB8G7gbIEIjcOmv5XmzZuH5ORkzJ07FytWrChP6AAgPDwc7dq1g1qthr+/P9zc3CwYKZFtUBeVvXMjc7TXWS53Kvu7puifd3Niv9+D33rPx7rmU7Fj4Mtwb+WPtvPGmD5YMgmeA+bx5SbLPDH6ass1aDRai/T9b3+cTMHN5Dyz95uYmo+9x6130hgiqigzpwh9ntqlM6H7t1PRd9FjaoRZn/4bm80mdbGxsdi4cSM8PT3x3nvv6azTqVMnAEC7du3K//Z3EhgaGgp7e3tIJBKzxEtkC/JvZwAA6gY11lleJ8gHAJD3Vz0AuBeXDLXqr/dUJBIIWgGuzRqZNlAyGZ4DpicIgklne6xO2t1CxCXmWqTvf7PU/gOo8eaQiKzHf5YcxdWEe3rVTUzNx+RXD5k2IBOy2aRu/fr10Gq1mDx5MlxcXHTWcXR0BFAxqbtx4wa2bNkCLy8vdOnSxSyxPgwKNRooi4t1/o9sR2b0TeSn3EXTUT3h2NC9QplUIUfw00MgaLVI2nemQlmbuaMw+cbPmHT5O3iE+CHmqwhzhk1GxHPA9FIzCnEnU2Wx/s/GKC3WtzXEYA37T0Q1S0jOw7ZIw2atPXg6DZfiskwUkWnZ7Dt1kZGRAIB+/fpVWSc5ORlAxaSuT58+SEsr+xVu8eLFOHbM8Jm1qLKl165g6bUrlg6DTEzQaHHyla/R77uFGBn5P1xfF4m8xHQ41K+LpiN6wL1lE1xcuQW58RUXCY5etQ3Rq7ahTlBjNBvTG6qMbAvtAT0ongOmdy4208L9KzFlWKBFYzh/1XI3XWct/PkTkX7WbL6K2sx/8sXGWHzxVk/jB2RiEsEWpnvRwdfXF8nJyTh//rzOmS3VajUaNWoEpVKJ+Ph4NGvWrFKdxYsXY8mSJQ88I07nzp2Rnp7+QNswN0epFDHtuz/wdg4rMzDoxCHMbNIMY719ddYZcvIwhjZohG1de9e6n1YXTkCltfx7HqamEKRYpA21dBg1qtcuAG3mjELDri1h7+4KdWExMi8n4NqPe3Fr54lq2/oN646W0wdj7+OLzRNsLSyRRqFUYh3nm7WeE7Z2DljTMS+w64Acl1E6y06vH1HjVP1eno6Qy6RQa7RIV1b9xC9dWYguk3ZU+rtj8QV4FPxmUMzGJABI9VhSZXlNn8GD7j8AeGctgQTWcT4QkW5K16koVgQY3E6hTkGD3K9MEFHNvLy8cObMmZor6mCzT+oKCsqmXVapdH9hb9y4EUqlEq6urmjatKlJY0lPT0dKirherHaSyYD2xtteoIsLBtRvaLwN3ic1NRWFGo3Jtm8t7CQywHQfo9FkXozHoVn/q1VbqUIGNyt/nyo1LRUlgnWcb9Z6TtjaOWBNxxweAYDutwrK12DTh1wm1bvuv6lUJRa+pskAj6pL9f0Marv/AJCaegcQrG8xdiL6l2YCoDC8WalaIrr7dsCGkzovLy9kZ2fj3Llz6N694hOntLQ0LFy4EADQtm1bk0+G4uXlZdLtm4KjVFyvW3p7ez80T+ps6cdhhasT/IaE4vaeKJTkFsI92A/tXhiL1EMXLR1atbwbeVvNUxuxnxNiOQes6ZgX2Lkgp4qydGXNM7cZ8qRKFydHBdwb654IxxwEAKmCFpDovk7V9Bk86P4DgLd3A0hgkwOdiGyGUiGgNjM32MnUqG+h77gHyRlsNqkbOHAgYmNj8cEHH2DQoEFo3rw5AOD06dN48sknoVSWvehsjkXHa/sY1ZKEoiKox0+zdBh6i4uLg8TBwdJhmFxpYRF+CZhi6TCMRxDQbGwfdFk8DVI7OYqUuUjcfQoXlm+0dGTVirseB4WTdZxvoj8nRHIOWNMxPxiViv4zf9dZVtVwwX9L2j8RPg2dka5UwXfQBoP7XzDvKSyds8rgdsbk/9hGJKbm6yyr6TN40P33aeiMpEtJBrcjIvP6bN0VzHv/pMHt3npxFN6cVfUQb2tls0ldeHg41q1bh6SkJISEhKBly5YoKirCjRs3MGTIEPj7+2Pv3r0VJkkhIvMqzVdh34Sllg6DLIjngOE6BntatP9OrSzbPwB0CvasMqkzed+t6lmkXyIyzNThQXj1kzMoLFLr3UYuk2DmmBYmjMp0xDXGzgA+Pj44cuQIwsLC4ODggFu3bsHDwwNr1qzBrl27EBcXBwBM6oiISFTquNohsImbxfrvZOGkErBsYmUNSS0R1ayOqx3mTw4xqM2sx1vWONmUtbLZpA4AgoODERERgby8POTl5eHUqVOYNWsWCgoKcOvWLUilUrRu3drSYRIRERlk+CO6ZxM2tXYtPNC4oeVveIY/0sRifQ/rY5nPnogM987cjpj4WOUZ7nUJ6+OLT8K7mTgi07HZ4ZfVuXLlCgRBQPPmzeHkVPnitHnzZgBATExMhX/7+/ujc+fO5gvUBjzi2QAlw8dXW6emciIiqujZccH4+Gfzr/05Z0KwyScX00eb5h7o1aEhjp6/Y9Z+u7Wtjw5W8KSSiPQjk0nxy/t90SqgLlb+cgWZOZWnTqnjaofZ41vinTmdIJeL93nXQ5nURUdHA6h66OW4ceN0/nvatGn44YcfTBobERFRTZr718Gg7t7YfyK15spG4uaiwBNDDV/zyVSemxBs9qRu9vhgs/ZHRA9OKpXgrWc6YOH0Nvh1XwIOnEpDXkEpXJzk6NWhIZ4YGgBnp1qsfWBlmNTpYKPrsZMNk9rJ0WXRNDTu2x6a4hJkxSTiyNxPK9Wr36k5ur//HwCARCFHRlQsTr35HbQlagRO6IdWM4eW13Xyroc7J2NxcMbyavuWOzmg37cvo17bZpDKZFjXcppeZVVxqOeGkQc/wt2zcYh86sMa67edPwb+w/5ZtsTFryGurzuA04t/rFS3w6uT0OTRzhA0ZVPTR6/ahoTtxwAAgRP7I2RWGOoE+eDM0p8Q8/WuGvu2VqHvPI0mgzvDxbcBdgx8GVlXbpWXuTb1Qu+Vz8PewxWleYU4On8VcuKSK23Dxac+eq2cC4/W/si/nYEdgxbq1bch7VybeqHH8mdhX8cZMgc7JP1xFmeW/gzU8B3sM6Aj2odPgHuLJrj2015Evf1DeVnwjKFoPmUgIAgQBODy6m24ueWIzu08tmUJnH08UZpbNnX9jV8PI+arCL320xp88EIXHDy9A2q1ea5Z787tZFU3PuMHN8XKX67gVPRds/TXOcTTqpJaIjKMg70cTw4PwpPDgywdikkwqSOyAZ3emAIIArb2fB4A4Fi/rs56WTG3sHPIqxDUGkAiQb9vX0bL6Y8h5qsI3Nh4EDc2HiyvO/LgR7i59c8a+9aq1YhetQ0lOfl4bMsSvcuq0n35M0jafxYOHq561b+0cisurdwKoCy5HX/+a9zcqvsm/srq7Tj//noAgJOXB0b9+QlSj1xCcVYeMi/F49AzH6HN82P06teaJe46gcurt2Ho9ncrlfX48BnErd2PG5sOwS+sG3qtnIuIIa9WqleSr8K5D9bDztUJHV+dpHffhrTr8vZU3P49CrHf7obMXoFhv7+PtH7RSIk8X2273IQ0HHtxNfyHd4fCueIyAznXkrB7xJsozSuEk3c9jNi/HHfPxCEvUfcTndOLfsDtPaf13j9r0iHYE6/PaI+la6r/vIyhTycvzJnYyuT9GEImk+L7d/qgw/htKC4x7cLwdgopfninj6iHZhGRbXsov50iIyMhCALCwsIsHQrRA5M72iNoUn+c+ytZAQDV3RyddTWqkrKEDoDMTg65g53OpyKeHYLg4FkHt/fWvMaitkSN9GOXUXKvwKAyXYIm9Uf+7QxknIrVq/79mjwWisJUJTIv3dRZXpL7z2LCcmcHSCSS8veDsmMSce96CmADi9jfORmLwrSsSn93qOeGeu0CEL+lLFlP3HUSzt714OpfebHTkpx8ZERdhbrQsKVbDWknCAIUbmXvNcsc7CBVyKHKyKmxXe7NNGTHJEJQVz5WaUejUZpXdpwLUzOhysiBs7ftvgP1xqx26NLasP1LVxYi+U6BXguVA4C7mx2+W9obUqnl36W7X3CzuvjwxS4GtTF0/wHgvfmdERLobmh4RERm81A+qSOyJa7+XijJyUfbeWPQqE9baIpKcGHFJqQdjdZZ38WnPvr/8Apc/Rsi+Y9zuPrD3kp1gp7oj/jNh8sTQHNw8W2AFlMfxe+j30bTET1qtY2gJwYgbn1ktXWCZwxFy+mD4eRdD8cXfIGizNxa9SVGzo09obqTXT78FADyU5RwbuyJvFvpZo8n6u3vMeDH19By6qOwq+OMi59sQdblBKNtv1HvNrCr4wzlhRtV1un0xhR0CJ+InLhknF32C/JvZxitf3OwU8iw+/PB6Pv0LlyJz9GrjT4LlP/N1VmB31cPRoCv5ZZQqMm8ySFIUxbi/W8v6VXfkP0HgIXT2+DFJzlTNhFZt4fySR2RLZHIpXDxbYCc68mIeOwVnHrzOzyy5kU4eNbRWT8/+S52DHwZG9v+BzJ7BfyGdq1QLne0R9ORPXG9huTI2Hp+/BxOvv4tNEUltWrv7OOJhqEtqxx6+bfYb3fjt97zsXv4G2g7bwzs3V1q1R89uJbTHkPCtqPY1GEWNneZjWZjeqNRn7ZG2Xbdlk3Q6+M5OPzsx1CrdD81/PP5T/Fb7/nY3n8B7pyKxcCfXzNK3+bm6e6AQ9+FGfzErib13R0Q+c0QdG3bwKjbNYVl8zrjnbkdjb7dt5/pgA9e7GIVM34SEVWHSR2RyBWkKKHVaMong8i6nID82xlwD65+HSd1YRESth1DszG9K/zdf3h35FxLwj0dk2eYisLVCe7Bfui75kU8HrUanRdNhfcj7fDopkV6byNoQn/c3nMaJTn5etXPjklEYXoWvHoYtjCpmBWkKOHY0B0S2T9f/S6NPVGQorRIPC2fGowbmw4BAIoyc5Fy4JxRjked5j4Y+PNrOPrSamREXa2yXmFqZvl/X/1+D1ybNBRtku/p7oCjPw7D2890gFz+4AnI2IH+uLx1DDqH1DdCdKYnkUjw5qwOiPxmCPy9H/wY+nm74I+vhmDJnI5M6IhIFJjUEYlccVYe0o5ehnffsol/XHwbwKVJg7L3w+7j6u8FiVwGAJAq5GgyJBRZsYkV6gQ9MUDnU7rRR1bCycvDBHsAlOYVYkPIU9gc+hw2hz6HM0t+Qurhi9g3vmxyFc/2gdUneBIJAif2w/X1B6rtp05zn/L/dvVrCI/WTXXO/GirijJzkRWdgICxfQAAfmHdUJCWZfDQSycvD4w+svKB48lLzEDjfh0AlD0h9urZGjlXbwMAmgwJRa9Pnzd4m3WCGmPQ2tdxfOGXSPuz6uF4Epm0wtNsv7CuUCnvoThbvx8FrJGdQoYlczoi6pcRGNTdu1bbCAmoi00r+mPzRwPQoJ6jkSM0vX6h3ojeOgYvT2uDOq52Brd3c1HgpamtEb1lNAZ0q91nSERkCXynjsgGnAhfg54fPYfOb06BoBVwInwNCtPLJsroseJZJO07g6R9Z9CoV2sEzxgKQaOFRC5D2pFoXPp4c/l23AK84RHiXz7N/98c6rnB3t0FxVU8BRtx4H9wqOcGhasjxp1dg/Tjl3Hk+c9qLtu/HPunLIPqTna1++fiW7/aYZnefdpC0GqRdqTie4SODd0xaO3r5dPqd37zSbg0aQChVA2tRouTr39bnvwGju+LDq9Mgl1dZzR5LBQhz47AgWnvG/UdL3Pp/uEs+AzoBMcGdTFo/ZsozVdha4+yBOl4+Br0+mQO2swbg9J8FY6+8Hl5u3+fKzJHO4w5+hlk9nIoXJ0w7uwaxG85jHPL1sHJywPaKt63rK7d/cfj6PxV6PrfGWg1cyikdnIk7T2DhG1l555b00blE57cr1GvNui1ci4Uro6QSCTwC+uOk699jaR9Z9D1naehcHVC5zemAG9MAQCc+e9apB66iHrtAtBh4QT8MWUZZHYKDFz7OmR2cghaAUVZeTgw7X2jHQNL6hDsiX1rhuB64j18sSkWEX8m4Xpi1e+Oenk6om/nRpg9viV6d/IS/ZMpFycFli8IxeLZHbBudzx+3HkDZ2OUKCrWfc462MvQqZUnpg4PxBNDA+BiRcs2EBHpSyJwUTbSQSgqgnp8zWuKWQv5ph8hcXCouaLIlRYW4ZeAKWbv129Yd9QJ9MalT7aYvW8A6LpsJhK2Ha12KJ05TY5fC4WTdZxvljgnQmaPgCoju8r134yh//fhiHrre+Qnm2cNsppY0zGvjXt5JTh/NROJqfkoLtVAIZfCs64DOrWqB+8GzpYOz+TUai1ib+bg8o1sFBapodUKcHFSICTQHa2a1eVSBUQkenxSR0Q1Sow4YdH+T73+jUX7p4qufGHY7IG1oc/C86S/Oq526NulkaXDsBi5XIo2zT3QprlphpATEVkaf5oiIiIiIiISMSZ1REREREREIsakjoiIiIiISMSY1BEREREREYkYZ78knQRBAIqLLR2G/uztRT8Ntz4EQYBaJaLjYqPkjtZzvvGcMA9rOuZERET3Y1JHREREREQkYhx+SUREREREJGJM6oiIiIiIiESMSR0REREREZGIMakjIiIiIiISMSZ1REREREREIsakjoiIiIiISMSY1BEREREREYkYkzoiIiIiIiIRY1JHREREREQkYkzqiIiIiIiIRIxJHRERERERkYgxqSMiIiIiIhIxJnVEREREREQixqSOiIiIiIhIxJjUERERERERiRiTOiIiIiIiIhFjUkdERERERCRiTOqIiIiIiIhEjEkdERERERGRiDGpIyIiIiIiEjEmdURERERERCLGpI6IiIiIiEjEmNQRERERERGJGJM6IiIiIiIiEWNSR0REREREJGL/Bz18sNzHe7jrAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "After optimization 3:\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXEAAAC5CAYAAADeSw/JAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAK7xJREFUeJzt3XlcFPX/B/DXLueygHIot9wqmmCeiSeKqV9vzSvPvKqvZ6Vkamn1/WlemUqWmplaKmZqhhfeV0googiIgIByqRzKch/7/v1Bbm5cC7IsA+/n48HjATOfmXnPZ+HF7GdmZ0RERGCMMSZIYk0XwBhjrOY4xBljTMA4xBljTMA4xBljTMA4xBljTMA4xBljTMA4xBljTMA4xBljTMA4xBljTMDUFuIikQhHjx5Vuf20adMwYsSIV9pmfHw8RCIRQkNDa7yOlStXon379q9UB2OM1ZVqh3hqaioWLFgAFxcX6Ovrw8LCAt27d8d3332H3NxcddRYq/r06QORSASRSAR9fX20adMGW7du1XRZNXLx4kV06NABenp6cHFxwU8//VRp+5UrVyr2/eUvqVRabvsDBw5AJBKV+ee6cuVKtG7dGlKpFCYmJvD29kZQUJBifnx8PGbMmAFHR0dIJBI4OztjxYoVKCwsVLSJioqCl5cXLCwsoK+vDycnJyxfvhxFRUWKNuHh4Rg9ejQcHBwgEonwzTfflKmxpKQEn376qdK2vvzyS7x8NwkiwmeffQYrKytIJBJ4e3sjOjpaaT0hISHo378/mjZtCjMzM8yePRvZ2dnl9kt6ejpsbW0hEonw7NkzxfRp06aV279t27ZVtJHJZFi4cCHs7e0hkUjg6emJ4OBgpfWXt56BAweWW0tBQQHat29f5uBFlddalf5l9V+1QvzBgwd4/fXXERAQgFWrVuHWrVsIDAyEj48P/P39cfbsWXXVWatmzZqFlJQUREREYOzYsZgzZw7279+v6bKqJS4uDoMHD4aXlxdCQ0OxcOFCzJw5E6dPn65wmUWLFiElJUXpq02bNhgzZkyZtvHx8Vi0aBF69uxZZl7Lli3h6+uLsLAwXL16FQ4ODnjzzTfx9OlTAMC9e/cgl8uxbds2hIeHY+PGjfj++++xdOlSxTp0dHQwZcoUBAQEICoqCt988w127NiBFStWKNrk5ubCyckJX331FSwtLcvdpzVr1uC7776Dr68vIiMjsWbNGqxduxZbtmxRtFm7di02b96M77//HkFBQZBKpRgwYADy8/MBAMnJyfD29oaLiwuCgoJw6tQphIeHY9q0aeVuc8aMGXB3dy8zfdOmTUp9++jRI5iamir178yZM3HmzBns3bsXYWFhePPNN+Ht7Y2kpCSldQ0cOFBpXRX9fvr4+MDa2rrMdFVea1X6lwkAVcOAAQPI1taWsrOzy50vl8sV3wOgI0eOKH6+c+cOeXl5kb6+PpmamtKsWbNIJpMp5k+dOpWGDx9OK1euJHNzczIyMqJ3332XCgoKFG1OnjxJ3bt3pyZNmpCpqSkNHjyYYmJiFPPj4uIIAN26davCfejduzctWLBAaZqrqyuNHz+eiIhWrFhBHh4etGfPHrK3tydjY2MaN24cZWVlqVxHQUEBzZkzhywtLUlPT49atGhBq1atUszPzMykGTNmKPbTy8uLQkNDK6y5PD4+PtS2bVulaePGjaMBAwaovI7Q0FACQJcvX1aaXlxcTJ6envTDDz8oXpfKPH/+nADQ2bNnK2yzdu1acnR0rHQ9H3zwAfXo0aPcefb29rRx48Yy0wcPHkzTp09XmjZq1CiaOHEiEZX+TlpaWtK6desU8589e0Z6enq0f/9+IiLatm0bNW/enEpKShRt7ty5QwAoOjpaad1bt26l3r1707lz5wgAZWZmVrg/R44cIZFIRPHx8URElJubS1paWuTv76/UrkOHDrRs2TLFz6r0ORHRiRMnqHXr1hQeHl7l731Fr/ULFfUvq/9UPhJPT09HQEAA5syZU+Hbb5FIVO70nJwcDBgwACYmJggODsavv/6Ks2fPYu7cuUrtzp07h8jISFy8eBH79+/H4cOH8fnnnyut58MPP8SNGzdw7tw5iMVijBw5EnK5XNXdKJdEIlF6qx8bG4ujR4/C398f/v7+uHTpEr766iuV69i8eTOOHTuGgwcPIioqCr/88gscHBwUy48ZMwZPnjzByZMncfPmTXTo0AH9+vVDRkYGgH/G9i9evFhhzYGBgfD29laaNmDAAAQGBqq83z/88ANatmxZ5mj7iy++QPPmzTFjxowq11FYWIjt27ejSZMm8PDwqLDd8+fPYWpqWuH8mJgYnDp1Cr1791a5fgDw9PTEuXPncP/+fQDA7du3cfXqVQwaNAhA6TuW1NRUpb5q0qQJunbtquirgoIC6OrqQiz+589BIpEAAK5evaqYFhERgS+++AJ79uxRaluRnTt3wtvbG/b29gCA4uJilJSUQF9fX6mdRCJR2g5QOlTWvHlztGrVCu+//z7S09OV5j9+/BizZs3C3r17YWBgUGUtFb3WrAFQNe2vX79OAOjw4cNK083MzEgqlZJUKiUfHx/FdLx0JL59+3YyMTFROoI/fvw4icViSk1NJaLSow9TU1PKyclRtPnuu+/I0NBQ6QjpZU+fPiUAFBYWRkTVPxIvLi6mvXv3EgDy9fUlotIjcQMDA6Uj78WLF1PXrl0rXOe/65g3bx717dtX6Z3JC1euXCFjY2PKz89Xmu7s7Ezbtm0jIqLExERq1aoVBQUFVbhNV1dXpaN7otI+BUC5ubkVLvdCXl4emZiY0Jo1a8rUZ2NjQ0+fPiWiio8K//jjD5JKpSQSicja2pr++uuvCrcVHR1NxsbGtH379jLzunXrRnp6egSAZs+eXeFrXdGRYklJCX388cckEolIW1ubRCKRUr9cu3aNAFBycrLScmPGjKGxY8cSEdHdu3dJW1ub1q5dSwUFBZSRkUGjR48mAIp15efnk7u7O+3du5eIiC5cuFDpkXhSUhJpaWmRn59fmf3t3bs3JSUlKX7/xGIxtWzZUtFm//799Pvvv9OdO3foyJEj5ObmRp07d6bi4mIiKn13MXDgQPryyy+JqOrf+4pe65fxkbhwvfLVKX/99RdCQ0PRtm1bFBQUlNsmMjISHh4eSkfw3bt3h1wuR1RUlGKah4eH0lFFt27dkJ2djUePHgEAoqOjMWHCBDg5OcHY2FhxdPvw4cNq1bx161YYGhpCIpFg1qxZ+OCDD/D+++8r5js4OMDIyEjxs5WVFZ48eaL4uao6pk2bhtDQULRq1Qrz589HQECAYtnbt28jOzsbZmZmMDQ0VHzFxcUhNjYWAGBjY4N79+6hS5cu1dqv6jhy5AhkMhmmTp2qmCaTyTB58mTs2LED5ubmlS7/Yiz+zz//xMCBAzF27FilPnohKSkJAwcOxJgxYzBr1qwy8/38/BASEoJ9+/bh+PHjWL9+fbX24+DBg/jll1+wb98+hISEYPfu3Vi/fj12796t8jratm2L3bt3Y8OGDTAwMIClpSUcHR1hYWGhOOL+5JNP4ObmhkmTJqm0zt27d6Np06ZlTgrv3bsXRAQbGxvo6elh8+bNmDBhgtKR/fjx4zFs2DC0a9cOI0aMgL+/P4KDgxXvzLZs2QKZTIZPPvlEpVrKe61Zw6GtakMXFxeIRCKl0AUAJycnAP+8/VSnoUOHwt7eHjt27IC1tTXkcjlee+01paEQVUycOBHLli2DRCKBlZVVmbfGOjo6Sj+LRCKlIZuq6ujQoQPi4uJw8uRJnD17FmPHjoW3tzcOHTqE7OxsWFlZlTtU0rRpU5X3wdLSEo8fP1aa9vjxYxgbG6v0Wvzwww8YMmQILCwsFNNiY2MRHx+PoUOHKqa92G9tbW1ERUXB2dkZACCVSuHi4gIXFxe88cYbcHV1xc6dO5WCJTk5GV5eXvD09MT27dvLrcPOzg4A0KZNG5SUlGD27Nn46KOPoKWlpVI/LF68GEuWLMH48eMBAO3atUNCQgJWr16NqVOnKk7YPX78GFZWVkp99fKlpG+//TbefvttPH78GFKpFCKRCF9//bXi9/v8+fMICwvDoUOHAEBx9Yu5uTmWLVumNOxHRPjxxx8xefJk6OrqKtXr7OyMS5cuIScnB1lZWbCyssK4ceMU2ymPk5MTzM3NERMTg379+uH8+fMIDAyEnp6eUrtOnTph4sSJZf6Blfdas4ZD5RA3MzND//794evri3nz5lU4Ll4eNzc3/PTTT8jJyVEsd+3aNYjFYrRq1UrR7vbt28jLy1OE0PXr12FoaAg7Ozukp6cjKioKO3bsUIzr/XscUVVNmjSBi4tLjZZVtQ5jY2OMGzcO48aNw1tvvYWBAwciIyMDHTp0QGpqKrS1tZXGyaurW7duOHHihNK0M2fOoFu3blUuGxcXhwsXLuDYsWNK01u3bo2wsDClacuXL4dMJsOmTZsUgVseuVyu9E4sKSkJXl5e6NixI3bt2qXSGLJcLkdRURHkcrnKIZ6bm1tm3VpaWop/Po6OjrC0tMS5c+cUoZ2VlYWgoCCld18vvAi6H3/8Efr6+ujfvz8A4LfffkNeXp6iXXBwMKZPn44rV64o/rG9cOnSJcTExFR6TkEqlUIqlSIzMxOnT5/G2rVrK2ybmJiI9PR0xT+hzZs343//+59ifnJyMgYMGAA/Pz907dpVadmKXmvWgFRn7CUmJoYsLCyodevWdODAAYqIiKB79+7R3r17ycLCgj788ENFW7w0Jp6Tk0NWVlY0evRoCgsLo/Pnz5OTkxNNnTpV0X7q1KlkaGhIEyZMoPDwcDp+/DhZWFjQkiVLiKh07NPMzIwmTZpE0dHRdO7cOercubPSdmp6dcrLXlyd8rKNGzeSvb29ynVs2LCB9u3bR5GRkRQVFUUzZswgS0tLKikpIblcTj169CAPDw86ffo0xcXF0bVr12jp0qUUHBxMRKqNiT948IAMDAxo8eLFFBkZSd9++y1paWnRqVOnFG22bNlCffv2LbPs8uXLydraWjHGWpl/j4lnZ2fTJ598QoGBgRQfH083btygd955h/T09Oju3buK+l1cXKhfv36UmJhIKSkpiq8Xfv75Z/Lz86OIiAiKjY0lPz8/sra2VlxVQlR6lc+tW7fo1q1bZGVlRYsWLaJbt24pXTEydepUsrGxIX9/f4qLi6PDhw+Tubm50vmZr776ipo2baoYZx4+fDg5OjpSXl6eUl/dvHmToqKiyNfXlyQSCW3atKnCfqlsTHzSpEkVnkM5deoUnTx5kh48eEABAQHk4eFBXbt2pcLCQiIikslktGjRIgoMDKS4uDg6e/YsdejQgVxdXcucR3mhst/7yl5rVfqX1X/VCnEiouTkZJo7dy45OjqSjo4OGRoaUpcuXWjdunVKJyVRw0sMP/vsMzIzMyNDQ0OaNWuW0i/umTNnyM3NjfT09Mjd3Z0uXrxY5yGuSh3bt2+n9u3bk1QqJWNjY+rXrx+FhIQols/KyqJ58+aRtbU16ejokJ2dHU2cOJEePnyotB8XLlyosE6i0iBp37496erqkpOTE+3atavMvrxcN1HpPyFbW1taunRppet+4d8hnpeXRyNHjiRra2vS1dUlKysrGjZsmNKJzV27dhGAcr9eOHDgAHXo0IEMDQ1JKpVSmzZtaNWqVUrB+qIf/v3Vu3dvpb5csGABtWjRgvT19cnJyYmWLVumdGmqXC6nTz/9lCwsLEhPT4/69etHUVFRSvs5efJkMjU1JV1dXXJ3d6c9e/ZU2i8VhfizZ89IIpGUexKXiMjPz4+cnJxIV1eXLC0tac6cOfTs2TPF/NzcXHrzzTepWbNmpKOjQ/b29jRr1izFBQDlqej3vqrXWpX+ZfWfiIgflMwYY0LFN8BijDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB4xBnjDEB09Z0AUwziAjFeQWaLoPVIW2JHkQikabLYLWMQ7yRKs4rwC/OkzRdBqtDE2N/ho6BvqbLYLWMh1MYY0zAOMQZY0zAOMQZY0zAOMQZY0zAOMQZY41GcbEcuXnFKC6Wa7qUWsNXpzDGGiQiwqUbqTgTmISbEWm4GZmOtMx8xfxmJvro2MYcHduYYYCnLXp0sBDkJZgiIiJNF8HqXlFuPl9i2Mg0lksMc3KL8OPR+/ju4D1EPnim8nJtnZviv+Pc8M6IlpDoC+f4lodTWIPiMrYPpqUcgsvYPuXON7Rthmkph9Djmzl1WxirE5dupMD9rSOY/9X1agU4AITHPsOcVYHwGHMEf4Y+Vk+BasAhzhgTvJISOT5cdx19pp/Ag0TZK60rOiELPab64+ONf0Eur/8DFRzijDFBKyqSY7zPBWzcG15r6yQC1u4Kw+Sll+r9SVAOccaYYBER3vnsMg6diVfL+vediMXsL66iPp865BBnjAnWtl/v4ZfjsWrdxq6j0dh9LFqt23gVHOKMMUGKT5Jh8dfB1VomeP8wPDozHsH7h1VruYVrg5D0OKday9SVBh/iaWlp8PHxgYuLC/T19WFnZ4cFCxYgJycHM2bMgEgkgq+vr6bLZIxV07zVgcjOLarWMpbmBrC1kMLS3KBayz2XFWLh2uvVWqauCOdiyBoIDQ3FoEGDkJqaCqlUijZt2iA5ORmbN29GbGwsMjIyAADt27fXbKGszr08xvnG6pmw698JOsYGKMrOQ4J/IG58+TPkRcUarJBVJuZhFvwvP6rTbR4+l4CHKdloYWVYp9utSoM9Ek9LS8PQoUORmpqKjz76CCkpKQgJCUFqairWrFmD48ePIzg4GCKRCO7u7poul9WS4vxCAICWRK/c+doGpdNL/m4HAJG7TuFIzwXY13IKjnkvgkkbB7jPH6X+YlmNfX8wss63KZcTtv16r863W5UGG+Lz589HYmIi5s6di/Xr18PIyEgxz8fHBx4eHiguLoaDgwOMjY01WCmrTdkPnwAAmrralDu/iastAED2dzsAeH4/8Z+nHIlEIDnByMlKvYWyGiMi7PWP0ci29/yhme1WpkGGeGRkJPz8/GBubo7Vq1eX26Zjx44AAA8PD6XpcXFxGDZsGIyMjGBiYoIpU6YgPT1d7TWz2pEe9gDZSU/hOKI7JBYmSvPEOtpwmz4IJJfjUcANpXnt5o7AxJi9mHD3R5i2tUfEdv+6LJtVw8OUbDzJyK+6oRokPs5BalquRrZdkQY5Jr5//37I5XJMnDgRhoblj19JJBIAyiEuk8ng5eUFU1NT7N+/H3l5efDx8cGQIUNw7do1iMUN8n9eg0Ilclz/eAe8flyM4ec3IHrfecgSUqHfrCkch3nCpHUL3N70G7Jik5WWC/M9ijDfo2jiagOnUT2R9yRTQ3vAqnIjPE2j278ZkYbBvVpotIaXNcgQP3/+PADAy8urwjaJiYkAlEN8+/btSEpKwuXLl9GiRemLZGtrC09PTxw7dgwjRoxQX9Gs1iSeC8GJYcvRbs4IuIztDT0TIxTnFiD9bhwuzt6A+D8CK1z2eXQSMsIT0HPLfJx+a2XdFc1UFh77TLPbj3lWr0K8Qd7F0M7ODomJibh161a5V54UFxfDysoKaWlpiI2NhZOTE4B/Qv/ChQtK7Z2dndGnTx/s3LmzRvV06tQJqampNVpWXXRIjBXyLpouo15yHNkDnT6djF87vKvpUmrV5+K/UCSq3x8hV8VzSX9kS3qUOy94/7BKLx+0NJdAW0uM4hI5UtPyKt1OalouOk84Vma6Ud5FGOddKGeJV2NpaYkbN25U3fBfGuSReE5O6UX5eXnlv0h+fn5IS0uDkZERHB0dFdMjIiIwZsyYMu3btm2LiIiIGteTmpqKpKSkGi+vDroiLcBC01Vono6RAewHdcHDU3+hMCsXJm728Fg4GskXb2u6tFqXnJKMQirRdBmvzlIGSCqY9fd14FXR1hKr1K48sqwsyJ7Un7/nBhnilpaWyMzMREhICLp166Y0LyUlBYsXLwYAuLu7K90EPjMzE02bNi2zPlNTU0RFRb1SPfWNDokB4R+UvToiOI3uhc4rp0Ksq438tCwknAhC6Do/TVdW66ytrBvEkXiWvh4quk9hVScdq3skXh5jI30Y6ZR/9dOrqGlONMgQ9/b2RmRkJNasWYP+/fujZcuWAIDg4GBMnjwZaWmlJ0bq6kM+NXmLpG78UIhSRdl5CBj3habLqBP3o+83iIdCHD4bj9Efnit3XnnDHy97dGY8bC2kSE3Lg13/AzXa/v5d6/GfnnY1WlYdGuTlFj4+PjAzM8OjR4/Qtm1btGvXDq6urujSpQucnJzQt29fAGUvLzQxMcGzZ8/KrC8jIwOmpqZ1UTpjrAod25hpePvmGt3+vzXIELe1tcWVK1cwePBg6OvrIz4+Hqampti2bRuOHz+O+/fvAygb4m5ubuWOfUdERMDNza1OameMVa6FlSEszCoYFFf7tqUa23ZFGmSIA6WB7O/vD5lMBplMhqCgIMyePRs5OTmIj4+HWCzGa6+9prTMkCFDcPXqVcXlhwAQFBSE2NhYDB06tK53gTFWDpFIhClDXTSy7SlDXTWy3co02BCvSHh4OIgIrq6uMDBQvhRp9uzZsLKywvDhw+Hv749Dhw5hwoQJ6NKlC4YPH66hihlj//bumNao6wfTi8UizH6rVd1uVAWNLsTDwsIAlB1KAQBjY2OcP38eVlZWGD9+PGbOnAlPT0/4+/vzpzUZq0ec7YwxrE/dfuDmrf4OsLOsX3cwBBro1SmVqSzEgdIP9vj7830zGKvvNi/phvN/pUCWU717itdEUyNdbFzcVe3bqQkOcVaGkaMlem6aBz1TIxTJcnF1gS+e3U8s0+6tv7aipKBIcVvXO1uOIP7YnwAAsa42Oq+YCps+7VFSUIiMiARcmbsZAGDT93V0WDIBIpEIIm0t3N36O2J/vVRr9fc/8CkkzZoCcjmKcvIRtPxHZNyNK9Ouy5fT0WJAJxjaNccx70XICI+vtRoA1fvRxqs9Xv94AsQ62ijJK8CfPtuQGZGg0nqqWrYha2FliK8XdcWsz6+qvMyLa7+rexOrzUvegHXzmn04SN0aXYi/uK8Kq5jn2ndx/+cziDl4EfaD30CPTXPhP2hJuW0vvbex3PDruGwSQITD3ecBQGmo/q2X73ycGr0SmZEJMLRthpFXNiHhRBCKc2rnznSXZm9AYVbpH2mLQV3Q45s5OOa9qEy7hOOBuLv1KP7z+/9qZbv/pko/6jaRoqfvApwa+Sme3U9E865u6PXtAvzu9WGV61Fl2YZuxqiWuHrrscrPwKzqOvLyzBrdCpOGaOZEqip4oJcp0TczhpmHM2J/uwwASDh+HVJrMxg5qP5pMm2JHlwn9EXIV/sV0/KePlN8TwToNik9qaxjZID8TBnkhbX3FJ0XAQ4AukYGpRssx+PrkchNyai17b5M1X40crBEQaZMcWT9JCgSUhtzmLZzrHI9VS3bGIhEIvywsgfGD3RSy/qnDHXBd8s9lT7ZXd80uiNxVjmpjTnyHmeCSv75eHZ2UhqkNuaQxZe9iVePzfMgEgFPb8Xg5qpfUJCeBSMHSxQ+y4b7/FGw6uWOkvxChK4/iJSrpUNZl977Gl47F6M4twC6TaS4MGNdrT8KrcfmebDybAsAODNpVa2uWxWq9mPWgxTomRihWadWeHojCnZvdoKukQEM7ZojIyyu0vWk33lQ6bKNhba2GD+v7g07SynW7w6r6H92tYjFInwywx1fzOkIsbj+BjjAIc5ewcmRnyEnKQ0ibS10+HgCem6ai7OTVkGkLYahXXM8i07EzVW/wPQ1R7zp9ymO9v4ABZkyeCx8CxdmrMPj65Ew83BGv91L8HvfD1GQUdEdMUr954//g3EFT9w51n8xcpP/eXjH1flbAADOY3qj0/JJOFvLQV5VLaoqkuXi4qz16Lj0bWhL9fH0xn1kRj0CFVd9o6pXWbah0dISY+2HXTDcyx7vfHYZ0QlZNV6Xm1NT7PqiJ7q6N6/FCtWHQ5wpyUlKg8TCBCItseLoz9DGHDlJZW/E/2IaFZcgYoc/Rl3bopguLynBg9+uAAAy7sYh++ETmLi1QGFWLiQWJnh8vfQZiem3Y5Gbkg7T1xyRcvlOpbWdGLqs2vsT++sldFszG3omhijIzK728jWtRV5QpHI/pv4ZjlOjVgAoPSE87vYPiiGSql6PypZtjLq/boHbv47Enj9i8O2BCIRFq/5wj/atTTFnXBtMGuIMfT3hRCOPiTMl+elZyAiLg/PoXgAA+8FvICclo8xQirZED7rG/3xYynFkD6T/fQVIQYYMKVfvwrpP6RVAhnbNYdiiOZ5HJyEnKQ0GFiZo8vczMI0cLGFkb6F40k6PzfPQYlDN73Oua2yg9Fi2FgM7oyAzu9oB/qp1qNqPACBp3lTxvccHbyHl2l1Fu6rWU9myjZVEXxvvjmmN24dG4uruIVjx3usY0ssOVs2UP9xn3dwAQ3u3wMr3X0fg3qEI8RuBmaNbCSrAgQb6UAhWtcruYmjsbI0e38yBnokRirLzcHXht3h27yEAwHP9e3gUcAOZ9x7C64fFEGmJIRIBsoQn+OvTH5Gd+BQAYNiiObp//V/omxqB5ITbG39FwvEgAIDjiO5wnz8KJCeIxCLc2XIEcUdKLxMbcWkjrsz3Rfrt2Brtl9TWHH22fwRtfV2QnJCfnoUbX+xRXEHzov5HATfQbe1s2PbrCEnzpijIlKEoOw+HPefVSh2q9uOjgBvwXP8eLLq6QaQlxtOb9xG0bKfSydmq1lPZsi+bGPtzg7iL4auw6bcPyU/zYN1MgqRzb2u6nFrBId5I1cdb0eqZGaP3twsQMP5LrkMNOMQBW+/9SHqSC5vmBkg8O0HT5dQKHk5h9UZBela9CM76UgdjquAQZ4wxAeMQZ4wxAeMQZ4wxARPWtTSs1mhL9DAx9mdNl8HqkLZET9MlMDXgEG+kRCJRo79SgbGGgIdTGGNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwDjEGWNMwPjJPo0EEaE4r0DTZbB6RFuiB5FIpOky2CviEG8kivMK8IvzJE2XweqRibE/8yP6GgAeTmGMMQHjEGeMMQHjEGeMMQHjEGeMMQHjEGeMMQHjEGeMMQHjEGeMMQHjEGeMMQHjEGcNisvYPpiWcgguY/uUO9/QthmmpRxCj2/m1G1hjKkJhzhjjAkYhzhjjAkYhzhjjAkYhzhjjAlYowjxtLQ0+Pj4wMXFBfr6+rCzs8OCBQuQk5ODGTNmQCQSwdfXV9NlMsZYtTX4W9GGhoZi0KBBSE1NhVQqRZs2bZCcnIzNmzcjNjYWGRkZAID27dtrtlDGmFoQEf4MfYKtfpFITcsDAKSm5WHK0kv47zg3dHVvJuj7qjfoEE9LS8PQoUORmpqKjz76CCtWrICRkREAYO3atfj444+hra0NkUgEd3d3DVfL6hIRKb5/Y/VM2PXvBB1jAxRl5yHBPxA3vvwZ8qJiDVbIakPK01yM/vAcAm8/UZpeIifs9Y/BXv8Y9OpoiV/X90VzM4mGqnw1DXo4Zf78+UhMTMTcuXOxfv16RYADgI+PDzw8PFBcXAwHBwcYGxtrsFJWW4rzCwEAWhK9cudrG5ROL/m7HQBE7jqFIz0XYF/LKTjmvQgmbRzgPn+U+otlapWalovuU/zLBPi/Xb6Zih7T/JGWmV9HldWuBhvikZGR8PPzg7m5OVavXl1um44dOwIAPDw8FNNehH6XLl2gp8ePrxKa7Ielf7BNXW3Knd/E1RYAIHv4zx/28/uJ/zy6TiQCyQlGTlbqLZSp3ZRllxCXJFOpbXRCFt759LKaK1KPBhvi+/fvh1wux8SJE2FoaFhuG4mk9O3TyyEeExOD3377DZaWlujcuXOd1MpqT3rYA2QnPYXjiO6QWJgozRPraMNt+iCQXI5HATeU5rWbOwITY/Ziwt0fYdrWHhHb/euybFbLImIzcSYwuVrL+F9+hJiHWWqqSH0a7Jj4+fPnAQBeXl4VtklMTASgHOK9evVCSkoKAGDlypW4du2aGqtktY1K5Lj+8Q54/bgYw89vQPS+85AlpEK/WVM4DvOESesWuL3pN2TFKv+Bh/keRZjvUTRxtYHTqJ7Ie5KpoT1gteG7g/dqtNz3ByOxflHXWq5GvRpsiCckJAAA7O3ty51fXFysCOiXQ1wsrv03J506dUJqamqtr7c6dEiMFeii0RrqSuK5EJwYthzt5oyAy9je0DMxQnFuAdLvxuHi7A2I/yOwwmWfRychIzwBPbfMx+m3VtZd0RrQ0rUlikRyTZehFk+MZwLadtVebsvOP3Dgm9FqqKhqlpaWuHHjRtUN/6XBhnhOTg4AIC8vr9z5fn5+SEtLg5GRERwdHdVaS2pqKpKSktS6jaroirQAC42WUKfSb8fi4uwNNVpWrKMF40YwJp6ckoxCKtF0GeohEdUo3QqLoPG/1epqsCFuaWmJzMxMhISEoFu3bkrzUlJSsHjxYgCAu7u72k9eWlpaqnX9qtAhMdAwD7peiY6RAewHdcHDU3+hMCsXJm728Fg4GskXb2u6NLWztrJusEfiT7WLUVh1szL0dOQwtyn/pLi61TQnGmyIe3t7IzIyEmvWrEH//v3RsmVLAEBwcDAmT56MtLQ0AHXzIZ+avEWqbUW5+fjFeZKmy6h/iOA0uhc6r5wKsa428tOykHAiCKHr/DRdmdrdj74PHQN9TZehFl/tvI1PNlX/72710rfxweRVaqhIfRpsiPv4+GDfvn149OgR2rZti9atWyM/Px8xMTEYNGgQHBwccPr0aaXxcNb4FGXnIWDcF5oug9Wy6SNaYsXWEBQWqf5OQ6KvhWnDXdVYlXo02EsMbW1tceXKFQwePBj6+vqIj4+Hqakptm3bhuPHj+P+/fsAwCHOWAPU3EyCd8e0rtYyc8e3gYlx+R8Sq88a7JE4ALi5ucHfv+z1vtnZ2YiPj4dYLMZrr72mgcoYY+q24aOuSEjOxrGLD6ts+1Z/B6xe0KkOqqp9DTrEKxIeHg4iQsuWLWFgYFBm/qFDhwAAERERSj87ODigUydhvtCMNTY6OmL89nU/rPwuBN8eiMQzWdlTnaZN9DBvQht8+m57aGkJc2CiUYZ4WFgYgIqHUsaMGVPuz1OnTsVPP/2k1toYY7VHW1uM/83rhKUz2+PAqQe4fDMVspwiGEl14NXZCmMHOEKiL+wYFHb1NVRViL98hzvGmPAZSLQxfWRLTB/ZUtOl1DoOcQYAMHK0RM9N86BnaoQiWS6uLvDFs/uJZdrZ9H0dHZZMgEgkgkhbC3e3/o7YXy9VOk9LTwe9v/8ATVxtUZJfiPy05whcsgOy+Nr5FGuXL6ejxYBOMLRrjmPei5ARHl+jtpXtW21QtY8raqdqP7qM80KPb+bg/Dtr8PBUcK3Vz+qnRhniL+6rwv7hufZd3P/5DGIOXoT94DfQY9Nc+A9aUqZdL9/5ODV6JTIjE2Bo2wwjr2xCwokgFOfkVziPiksQtfcMks7fAgC0fmcgum94H6dGr6iV2hOOB+Lu1qP4z+//e6W2le1bbVC1jytrV1U/Gto2Q8uJ3nhyI6pWamb1nzBH8lmt0jczhpmHM2J/K70VZ8Lx65Bam8HIoewnyIgA3SalJ4N1jAyQnymDvLC40nklBUWK4AGApyHRMLRrVmv1P74eidyUjFduW9m+vSpV+7iydlX2o0gEzw3vI2j5zlqrm9V/jfJInCmT2pgj73EmqOSfD0ZkJ6VBamNe5q36pfe+htfOxSjOLYBuEykuzFineAJOZfNe1mbmf/DwdP17m69q/TWhah9X57X4dz+2fXcongTfQ/qdB7VSMxMGDnGmMpGWGB4L38KFGevw+HokzDyc0W/3Evze90MUPs+pcF5Bxj835m83fxSMHCzx59jPVdrmf/74vwpvRnWs/2LkJqerfd9erl+TNb7s3/3YtJUd7Ad3xcmRn9X6tlj9xiHOkJOUBomFCURaYsURoKGNOXKS0pTamb7mCImFCR5fjwRQeqfA3JR0mL7miMLnORXOS7l8BwDQ9r1hsP9PVwSM/RwleardnujE0GW1tZuVqmzfXtRfEVVqVLWPVWlXXj9adHWDoV1zjP5zCwBA0qwpuq17D5LmJojaE6BiLzAh4jFxhvz0LGSExcF5dC8AgP3gN5CTklHm7XtOUhoMLEzQ5O9Hnxk5WMLI3gJZscmVzgOANu8OgePI7ggY9wUKs3KV1ttj8zy0GKTZe51XVf+r1qhqH1fVrqJ+jNoTgIPtZ+FQl//iUJf/4mlINAIXf88B3giIiC+KbhSquouhsbM1enwzB3omRijKzsPVhd/i2b3Sjyt7rn8PjwJu4FHADTiO6A73+aNAcoJILMKdLUcQd+QqAFQ4z8DKFGNDtiMrPhXF2aX3dy8pLMbxwZ8AAEZc2ogr832Rfju2RvvWbe1s2PbrCEnzpijIlKEoOw+HPecp5r9cf2VtK9u3V60RUL2PK2pXVT++bOBvnyNih3+llxhOjP25wd7FsDHhEG8k6uutaPXMjNH72wUIGP+lpkupkBBqrAkO8YaBQ7yRqK8hzjSHQ7xh4DFxxhgTMA5xxhgTMA5xxhgTMB4TbySICMV5BZoug9Uj2hI9tT8knKkfhzhjjAkYD6cwxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiAcYgzxpiA/T9QXztrshelGwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "print(f\"Initial number of gates {len(qc.data)}:\")\n", + "display(qc.draw(\"mpl\"))\n", + "\n", + "print(f\"After optimization {len(qc_opt.data)}:\")\n", + "display(qc_opt.draw(\"mpl\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ee9f32e-de6e-461d-bb7c-ea730a7e6772", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAATEAAACuCAYAAABeIjpKAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAGxtJREFUeJzt3XtAVHX+//HnDMNV7qCiolwEFBGxn0jeC5OKVLTUykxt023bn6bblrTrr9a+229zTbuZ22bttm2ZpqtdDMsrWq55Q9QUSIQV5DbqcFHB4TLMfP9wRYnhKsxwhvfjn5z5fM75vCl5dS6f8zkqk8lkQgghFEpt7QKEEOJ2SIgJIRRNQkwIoWgSYkIIRZMQE0IomoSYEELRJMSEEIomISaEUDQJMSGEokmICSEUTUJMCKFoEmJCCEWTEBNCKJqEmBBC0STEhBCKJiEmhFA0CTEhhKJJiAkhFE1CTAihaBJiQghFkxATQiiahJgQQtEkxIQQiiYhJoRQNAkxIYSiSYgJIRRNQkwIoWgSYkIIRZMQE0IomoSYEELRJMSEEIomISaEUDQJMSGEokmICSEUTUJMCKFoEmJCCEWTEBNCKJrG2gWI9mMymTDoq6xdhuJpnB1RqVTWLkO0kISYDTHoq/i0/+PWLkPxZmWvw97FydpliBaS00khhKJJiAkhFE1CTAihaBJiQghFkwv7QnQRJpOJqupaTCZwdLBDrbaNO7ASYkLYqKsV1WzZncPBkxc5ll7MqbMlVNcYAVCpYECgB9ERvgyP6M70uEB69+hm5YrbRmUymUzWLkK0j5prlTLFoh0ofYrFT+fKeGd9Oh9/nUX5tZoWbWNnp+LB8QE8M3MQ46J7dXCF7UuuiYl2E/Lw3TxRtJmQh+822+7q350nijYz5q0Fli2si6ipMfLH944TOe1z3t2Y0eIAA6itNbF5Vw53PfkNc5Z+R+kV5UyalhATwgZk513hzllbWfZuKgbD7Z1cfZKURcSDn7P3SGE7VdexJMSEULi0rFLGzE3i+E/F7bbPokvXuP/XO9i6N7fd9tlRJMSEULBz+VeJ+9V2tDp9u++7usbIjOeT2X2ooN333Z4kxIRQqNpaI4/9bi9Fl6512BjVNUYee2Efl0raPyTbi4SYEAr15idpHPrxUqu2Obohgbxdj3J0Q0KLt7lUWsnC5QdbW57F2HyI6XQ6EhMTCQkJwcnJib59+7J48WIqKiqYN28eKpWKNWvWWLtMIVolT1vOi2uOtXo7P18X/Ht2w8/XpVXbbdpxjm/357V6PEuw6cmuJ06cID4+Hq1WS7du3Rg0aBCFhYWsXr2a7OxsSkpKABg6dKh1C+1ibp2aOGL5fPrGRWPv7kJNuZ7cpIOkvLIOY43BihV2fu9vPkNVda1Fx3z70zTix/a16JgtYbNHYjqdjsmTJ6PVannuuecoKioiNTUVrVbLihUr2LZtG0ePHkWlUjFkyBBrl2sTDJXVANg5O5pt17hc/772v/0AMv6xnS/GLmZ92By2Tnger0GBDFn0UMcXq2DVNbV8sOWMxcfd8UMBZ3MvW3zc5thsiC1atIj8/HwWLlzIqlWrcHNzq2tLTEwkKioKg8FAYGAg7u7uVqzUdpSfvwiAZ2gfs+0eof4AXP1vP4DLmfk3V6NVqTAZTbgFK2vGuKXtOljAhWLrXGhfl5RtlXGbYpMhlpGRwcaNG/H19WX58uVm+wwbNgyAqKioet+fO3eOhIQE3Nzc8PLyYs6cORQXt9/8G1tWfOo/lBdcImjqaJx7etVrU9trCH8yHpPRSN7OlHptkQunMivrE2ae/hDviADS30+yZNmKc+SUzmpjH01r3Y0ES7DJa2IbNmzAaDQya9YsXF1dzfZxdnYG6ofY1atXiY2Nxdvbmw0bNqDX60lMTGTSpEkcOHAAtdomM7/dmGqNHHrhA2I/XMKU5Nc5uz6Zq7lanLp7EpQwCq+B/Tj59hauZNefCX5qzZecWvMlHqF9CH5oLPqLpVb6CZThWIb1QiwlTYfJZOpU7yCwyRBLTk4GIDY2ttE++fn5QP0Qe//99ykoKOD777+nX79+APj7+zNq1Ci2bt3K1KlTO65oG5G/J5VvEl4kcsFUQh6+C0cvNwzXqig+fY59T71OzteN36q/fLaAkrRcxr6ziB3TX7Zc0QqTlmW9kL9UWomutJLu3s5Wq+HnbDLEcnOvPyoREBBgtt1gMHDgwAGgfoglJSUxZsyYugADGDlyJMHBwXz99ddtDrHo6Gi0Wm2btm0Ne5OaZcR0+DjNKT6Zzb6nXm/Ttmp7O9ytfE0sLDSMGpXRqjU0pcgzEdTml805uiGhyekTfr7Odf/M2/Voo/20umsMn7nVbFvk0OFojGUtL7gF/Pz8SElJab6jGTYZYhUVFQDo9eYvfm7cuBGdToebmxtBQUF136enpzNjxowG/SMiIkhPT29zPVqtloKCjn90w0FlBz07fJh2Y+/mQkB8DOe3H6H6yjW8wgOI+s00CvedtGpdhUWFVJssO32hVdxrG72afWMeWHM0duoW9TPngrYIakratG1HsMkQ8/Pzo7S0lNTUVEaOHFmvraioiCVLlgAwZMiQeuf2paWleHp6Ntift7c3Z860/Za2n59fm7dtDXuTGjrvAURDJhPB08Yx/OW5qB00VOqukPvNYU6s3GjVsnr36t2pj8S0KgONRaxW1/QjSH6+zmjs1BhqjU0+b9nUfvx6+mBnat/Tydv5HbHJEJswYQIZGRmsWLGCuLg4wsLCADh69CizZ89Gp7t+YdRSk1zbepjcWkpbFLGmXM/OR/5o7TIayDyb2akXRZy8cCdJ35ufPd/YKeANebsexb9nN7Q6PX3jPmv12D28nSg8eaZTXdi3ydttiYmJ+Pj4kJeXR0REBJGRkYSGhhITE0NwcDDjx48HGk6v8PLyoqysrMH+SkpK8Pb2tkTpQjRr2CBfq47dmQIMbDTE/P392b9/PxMnTsTJyYmcnBy8vb1Zu3Yt27ZtIzMzE2gYYuHh4WavfaWnpxMeHm6R2oVoTkxkd+uNPdh6YzfGJk8n4XogJSU1nDRZXl5OTk4OarWawYMH12ubNGkSS5cuJT8/H3//67PLDx8+THZ2NitXrrRI3UI0Z8KI3vj5OnfIGmLNmT05xOJjNscmj8SakpaWhslkIjQ0FBeX+rein3rqKXr16sWUKVNISkpi8+bNzJw5k5iYGKZMmWKlioWoz8Hejl9OG2Dxce8f7U//vp3vEb0uF2KnTp0CGp5KAri7u5OcnEyvXr149NFHmT9/PqNGjSIpKUlm64tO5VfTB+LsZGfRMX/zeIRFx2spmz2dbExTIQbQv39/s6ehSuTo5cp9m5bVfbZzdsQtoCefRc6juqy8Xl9X/+48dGgNZRnn677bO38VV3Mv4DmwHyOWz8fZ1wOjoRbd8SwOLf1bvdUoGhM8fRyDn05AZadGf+kyB579CxUFDR+bCXkklkHzH6j77NLbhwuHMtg7r+nT+Kbq/rmBT9zPgDlxmGqNqDR2ZK7bTcbfv2n2Z+iM+vTsxp+eiea3Kw9bZLyZ8cHcN9rfImO1loSYDasqLWdr3JK6zxFPJ+A3clCDALvBUF5Zr/8NtVXVHF76d0ozclGp1Yx7dzGRC6Zy4vVNTY7vEdKb4S/NZmvcEvQXywieNpYRf/4le2Y3fCg/a+Nesjburfs8Ze8b/Ofz71v0czZW989lb/menz7aDoC9qzNT9r3BhcMZlJw+16JxOptFjw1iy+4cDhxvGNiNuTH/q7n5ZLfq6ePMO78f2XxHK+lyIXbjucquKPSx8aS++mmrt7t67uYjUyajEd2JbLwGNr84nueAfpRknEd/sQyA/D3HGbv6GRy9XKkqNR+kAL53hOLk68H5He07v67m6s1fXI2LI2qNsv/629mp+XT5XYyek0TBxZaFUnPzyH7O0cGOz16Lxcez886bkws9XUT36AE4enQjb1fjSxprXByZ9O2fmbzzNaKenY7KzHVAjbMjYbPu4fyOo82OWZKeg09kUN2zkP2njUOlVtPNv+nb9KGPjSd783eYDC179Kcldd8QMHEEU/a9yfQjfyXtr1sVexR2Q0BvN3Z/EE/vHq1bbrolHB3s2PLGeO4e3rnXd1P2/4pEi4XOHE/Wv77DVGv+cZprF0vZdMdTVBZfwcHTlbvfe5aIpydz+t2v6vqo7TXctfa3FOw7yflvjzQ75tVzWg4mvs/Y1c+g0tiRv/sYVWXlmAyNP9KjcXYkaMpotk1a2qKfqyV13yp32yFytx3C1b87sR8mkrf7WIOlgZRmYJAn//5oEg8vSSYlrX2W6fHv2Y1Pl9/FuOjOHWAgR2JdgsbFiaCEUWR91viptLHaQGXxFQCqy8o5+1kyPe+8OcFXpbHjrrXPor9YypGXPmzx2LnbDrFt0lKS7n+BM//cicbJgSs5RY32D5w8krIzeVzOzG/R/puruzHl+ZfQHT9L3wnDWvaDdHJB/m4c/GQyry6KxsH+9n6tn3wwjNOfP6SIAAMJsS4haMooStJzuJzV+BGHk487Ks31W/ZqBw0BD4yg+L+nWio7NXe99yxVpeX88Px7Dba9d9MyfIeanwTp3MPz+j7Uaoa9+DgZH22nVt/4Xc3Qx+7h7IaGYfvg/rdx8Wv46FdTdf+cR9jNu2uOPu74jR5MSUbnf8N1S2k0an4/P4q0L6bxm8cj8HBzaPG29ho1M+ODOfDxJP7+P2Nbta21yelkFxA68x4yP93d4PuhSx5Bf6GUMx/vpMed4dyx5JG66Qfaf5/ix7e3ABA0ZTSBE0dQkpZDwq7rUx4uHD3D4aV/Q6VW4x0RQEWR+SW8R7+5AFd/X9QO9uTvTiV1+fq6tlGrniZvZ0rdctXu/XvjHRHIua8O1NuHk4/79ZsBZu6qNlU3QMKulex6/FX0F0oZNP8Bet4ZjrHaACoV6R9so+j7H1v5b7PzC+nnzpuJI/j/C4fx5d5cDv14iWPpOn7MLKFCf/MtUkPCvBk2yIfoQb5Miwuip0/nWeiwNVSmW9+fJRTNGqtY+ET1Z8DsOLNHaO0lYNJIPEJ68+NbW5rv3A5mZa/r1KtY3I4+96yn8JKePj1cyN8909rltAs5EhO3pfhkNj+c7Ng34OQmdd63TytNZ1uBoj3INTEhhKJJiAkhFE1CTAihaBJiQghFkwv7NkTj7Mis7HXWLkPxNM6O1i5BtIKEmA1RqVQ2OzVAiMbI6aQQQtEkxIQQiiYhJoRQNAkxIYSiSYgJIRRNQkwIoWgSYkIIRZMQE0IomoSYEELRJMSEEIomISaEUDQJMSGEokmICSEUTUJMCKFoEmJCCEWTEBNCKJqEmBBC0WRlVwUxmUwY9FXWLqPL0zg72uT7G5VKQkxBDPoqi7/hWzRky28IVyI5nRRCKJqEmBBC0STEhBCKJiEmhFA0CTEhhKJJiAkhFE1CTAihaBJiQghFkxAT7Sbk4bt5omgzIQ/fbbbd1b87TxRtZsxbCyxbmLBpEmJCCEWTEBNCKJqEmBBC0STEhBCK1iVCTKfTkZiYSEhICE5OTvTt25fFixdTUVHBvHnzUKlUrFmzxtplCiHawOaX4jlx4gTx8fFotVq6devGoEGDKCwsZPXq1WRnZ1NSUgLA0KFDrVuoEB3kmt7Ahm+z+dvnZyi6dA2AC8V6lv/tJPMeDKOHj7OVK7w9Nh1iOp2OyZMno9Vqee6551i2bBlubm4AvPbaa7zwwgtoNBpUKhVDhgyxcrVdh8lkqvvziOXz6RsXjb27CzXlenKTDpLyyjqMNQYrVmg7vtiTw5N/2E/Z1ep63xtqTSxdncKyd1N56VdDefGpoYpd6NGmTycXLVpEfn4+CxcuZNWqVXUBBpCYmEhUVBQGg4HAwEDc3d2tWKltMFRe/0Wxc3Y0265xuf59beXNX6iMf2zni7GLWR82h60TnsdrUCBDFj3U8cV2AZ99m8203+5pEGC3qjEY+cNfUkl846gFK2tfNhtiGRkZbNy4EV9fX5YvX262z7BhwwCIioqq++5G6MXExODoKMsQt0b5+YsAeIb2MdvuEeoPwNX/9gO4nJl/c8ltlQqT0YRbcK+OLbQLyM67wtwXv+eWg94mrfrnKb7Yk9OhNXUUmw2xDRs2YDQamTVrFq6urmb7ODtfvxZwa4hlZWWxZcsW/Pz8GD58uEVqtRXFp/5DecElgqaOxrmnV702tb2G8CfjMRmN5O1MqdcWuXAqs7I+YebpD/GOCCD9/SRLlm2T3tv0E9U1xlZt8/anaR1UTcey2WtiycnJAMTGxjbaJz8/H6gfYuPGjaOoqAiAl19+mQMHDnRglbbFVGvk0AsfEPvhEqYkv87Z9clczdXi1N2ToIRReA3sx8m3t3Alu7DedqfWfMmpNV/iEdqH4IfGor9YaqWfwDZUVhn48MvMVm/3XYqWtKxSIkK8mu/cidhsiOXm5gIQEBBgtt1gMNQF1K0hpla3/8FpdHQ0Wq32tvdjb1KzjJh2qKjj5O9J5ZuEF4lcMJWQh+/C0csNw7Uqik+fY99Tr5Pz9cFGt718toCStFzGvrOIHdNftlzRrRQWGkaNqnVHOZZUo/alxPOZNm079t7Hcak+2c4VNc/Pz4+UlJTmO5phsyFWUVEBgF6vN9u+ceNGdDodbm5uBAUFdWgtWq2WgoKC296Pg8oOerZDQR2s+GQ2+556vU3bqu3tcO/k18QKiwqpNtVau4zGOWnAs22bll6+RmnJ7f9dtSSbDTE/Pz9KS0tJTU1l5MiR9dqKiopYsmQJAEOGDOnwi/d+fn7tsh97kxo67wFAq9m7uRAQH8P57UeovnINr/AAon4zjcJ9lj8SaI3evXp36iMxg9qdCwAmE7Ty77aXhyMuzuZvzHSk2/kdsdkQmzBhAhkZGaxYsYK4uDjCwsIAOHr0KLNnz0an0wGWmeTa1sPkn6u5Vmlb7500mQieNo7hL89F7aChUneF3G8Oc2LlRmtX1qTMs5md+r2TRqOJ8ClbyMy93Krt7DVqfjr6heImv9psiCUmJrJ+/Xry8vKIiIhg4MCBVFZWkpWVRXx8PIGBgezYsaPe9TBhWTXlenY+8kdrl2Fz1GoVv354IM+uPNyq7WbcG6S4AAMbnmLh7+/P/v37mThxIk5OTuTk5ODt7c3atWvZtm0bmZnX795IiAlb9MSUUPx7dmtxf0cHNc/PHdyBFXUcmz0SAwgPDycpqeGco/LycnJyclCr1QwerMz/cEI0xdPdkW/+ci/3/PJbLpVWNtnXXqNm/Z9juSPc10LVtS+bDrHGpKWlYTKZCAsLw8XFpUH75s2bAUhPT6/3OTAwkOjoaMsVKsRtiAzz5uC6ySx89SDbD+Sb7XPHQB9efz6G2JjeFq6u/XTJEDt16hTQ+KnkjBkzzH6eO3cuH330UYfWJkR76t/XnW//eh/ZeVf48ItM/pN/lVqjCT9fZ2Y90J+YyO6Kf7ROQswMU0sfOOsEHL1cuW/TsrrPds6OuAX05LPIeVSXlTfo7z9hGMOXzUGlVlP603n+vXgNNeX159KNeWsBIY/Esn7AHKqvXGu2huDp4xj8dAIqOzX6S5c58OxfqCjQNejn6t+dMW8vxHtwIOXnL7I1bonZ/d33r2X4RAazfuDcZsfufXcU0f/v5h1bJ18P9JfK+PrexAZ9faL6c+crT+I9OJDC706S/IvX6tpCHr6bmFd+Uff8Z9Xlik494ba1+vd150+LbPMsQkJM4apKy+uFQcTTCfiNHGQ2wDQuTox+49dsf+gPXM4q5M4/zSPq2emkvPJJXZ9+D9yJ0dDyiZweIb0Z/tJstsYtQX+xjOBpYxnx51+yZ3bDh+6ry/WkrtiAg5sL/+d3M83ub9CvJnE19wI+kcEtGr9w30m23jKv7J6Pf4/2wGmzffUXSznyh3/gHRmE//g7GrRrf0irF2xCGWz27mRTkpOTMZlMTJw40dqltLvQx8ZzdsMes219xt9ByelzXM66/uziT//cQdDUMXXtTr4eDFn0EEeWfdTi8TwH9KMk4zz6i2UA5O85jv/4O3D0avjQfXVZOReP/IThWpX5fYX50+/+GE6980WLx7+Vc08veo0ZTPbm78y2XysqQXciC2NVTZv2LzqnLhlitqp79AAcPbqRt+uY2XbXPr6U51+q+1yedxHnnp6o7K7/NRi16mlSXvkEQ0XTd7NuVZKeg09kUN2jQv2njUOlVtPNv3uraldp7Bi16tccTFyLqbZts+FDHoklP/k4lcVX2rR9j5iBJOxayQNb/0TApJHNbyA6hS55OmmrQmeOJ+tf37UpBEIfu4eKAl2jp2KNuXpOy8HE9xm7+hlUGjvydx+jqqwck6F1NQx9bga53xzm8tkCXFsZgDeEPhrL4Rc/bNO2ebuPce7rH6jVV+MR2od7N7zEtUIdl1LPtml/wnIkxGyExsWJoIRRJMW/0Gif8gIdve+6uQy3a98e6C+UYao14jd6MH53htM3blhde0Ly6yQ/8Rolp881OXbutkPkbjsEgHN3TyIXTOVKTlGr6vcbGUG3Pr6EP3k/Kjs77N2cmX7kXb6O/x1VLTiy8hsZgZ2jQ5ufu6wquVr358tnC8hPTqXH8IESYgogIWYjgqaMoiQ9p+56lzkFe48zYvl8PEJ6czmrkIFz7+PcV9eXI9q/4O16fZ8o2szW8c/V3Z28d9MyUl/9FN2JrAb7de7hif5iGSq1mmEvPk7GR9up1Te+JLI53059qe7Prv7dSdi9is0x/7fuuwf3v82OGf/DNW2J2e1DHxtP1qa9mIxtOxV18fOu27eTrwe9Rkdy7qsf2rQvYVkSYjYidOY9ZH66u8H3Q5c8gv5CKWc+3omhopIfnvsr4//xAio7NWVn8ti/qPlX1anUarwjAqgoKjbbPvrNBbj6+6J2sCd/dyqpy9fXtY1a9TR5O1PI25mCnbMDD/37HewcNdi7uTDj2Fqyt3xH6qvrze73Bicfdxy9XKkyc8cVrq+G0e+BO/kq9rcN2iasW8rxlRspPpmNe//e3LdpGRpnB+ycHJhxbC0/rv6cM//cwcBf3E/f+4ZjqjGAWk3a+0mtPrUW1qEyKWlSVBdnrVUsfKL6M2B2HD88/57FxwYImDQSj5De/PjWFquM/3Ozstd16lUsuho5EhPNKj6ZzQ8ns602fm5S46vBCiFTLIQQiiYhJoRQNAkxIYSiSYgJIRRN7k4qiMlkuvm2bGE1Gmd5M3xnIiEmhFA0OZ0UQiiahJgQQtEkxIQQiiYhJoRQNAkxIYSiSYgJIRRNQkwIoWgSYkIIRZMQE0IomoSYEELRJMSEEIomISaEUDQJMSGEokmICSEUTUJMCKFoEmJCCEWTEBNCKJqEmBBC0STEhBCKJiEmhFA0CTEhhKJJiAkhFE1CTAihaBJiQghFkxATQija/wJ6pspZlsBxVwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "qc_rand = backend.randomize_params(qc_opt, np.random.default_rng())\n", + "display(qc_rand.draw(\"mpl\"))" + ] + }, + { + "cell_type": "markdown", + "id": "d870cb35-35cc-402d-9b2e-50fb39bf047d", + "metadata": {}, + "source": [ + "### Schmidt-rank-vector" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b0077ab7-12ff-45cb-8457-a37f0ae664c4", + "metadata": {}, + "outputs": [], + "source": [ + "def plot_srv_stat(num_of_qubits, min_gates, max_gates, gate_pool, samples, rng=np.random.default_rng()): \n", + " srv_list = list()\n", + " for i in range(samples):\n", + " qc = backend.rnd_circuit(num_of_qubits, rng.integers(min_gates, max_gates+1), gate_pool, rng) \n", + " qc = backend.optimize_circuit(qc, gate_pool)\n", + " srv = backend.schmidt_rank_vector(qc)\n", + " srv_list.append(srv) \n", + "\n", + " srv_unique, srv_cnt = np.unique(np.array(srv_list), axis=0, return_counts=True)\n", + " srv_unique = [f\"{s}\" for s in srv_unique]\n", + " \n", + " plt.bar(srv_unique, srv_cnt)\n", + " plt.title(\"Different SRV distribution\")\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "99159db5-a206-48e3-88eb-91352f25f20d", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAGzCAYAAAAFROyYAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAMttJREFUeJzt3XtYVNXCx/HfADIqOIMYgiSiRzslpllWStrxxhGNrJ6w0siwTF8Nu2hXumhZqccumt089ZR0Kiv1lJWmZWp5SjKzY69RmnZUSBqwjMFLAsJ+/+hhv2cElEFwFvT9PM9+nmbttdZeezHO/Nq3cViWZQkAAMAgQYEeAAAAwNEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoQC098MADcjgcPmVHjhzRnXfeqbi4OAUFBemyyy6TJB04cEA33HCDYmJi5HA4dOutt578AUO7du2Sw+FQVlaWXVbd37GhDBgwQAMGDLBff/zxx3I4HFqyZMlJ2f6YMWPUsWPHk7ItoL4RUPCHlJWVJYfDYS/NmzdXbGyskpOTNW/ePO3fv79W/bz00kt69NFHNWLECL388suaPHmyJGnGjBnKysrSxIkT9corr2j06NENuTsnZOHChZo7d26t65eWlurJJ5/U2WefLZfLpYiICHXr1k3jx4/X1q1b7XpHz3FISIhOPfVUjRkzRnv27JEklZWV6ZRTTlG/fv1q3J5lWYqLi9M555xT5308Ufn5+XrggQe0efPmgI2hJiaPDTghFvAHtGDBAkuSNX36dOuVV16xXnrpJWvGjBnWkCFDLIfDYcXHx1tff/21T5uysjLrt99+8ym76qqrrFNPPbVK/71797b69u3boPtQX1JSUqz4+Pha17/44out4OBg65prrrGeeeYZa+7cudaECROs9u3bWwsWLLDrHT3HL7zwgjV27FgrODjY6ty5sz2XEyZMsBwOh7Vr165qt/fxxx9bkqzHH3/c733buXOnJclnXNX9HY9n48aNVfqpjZKSEqukpMR+vXbtWkuStXjxYr/6qevYSktLrcOHD9fbtoCTKSSA2QgIuGHDhuncc8+1X2dmZmrNmjW6+OKLdckll+i7775TixYtJEkhISEKCfH9J1NYWKiIiIgq/RYWFiohIaHexllRUaHS0lI1b9683vqsi40bN2rZsmV65JFHdM899/ise/rpp1VUVFSlzX/P8Q033KBTTjlFf/vb3/Tuu+/qyiuvVFpamubPn6/XX39dd999d5X2CxcuVFBQkEaOHFkv+1Dd37G+HTp0SC1btlRoaGiDbud4mjVrFtDtAyeCUzzAUQYNGqT7779fu3fv1quvvmqX//e1C5XXNqxdu1Y5OTn2aYzKawx27typ5cuX2+W7du2SJJWUlGjatGnq0qWLnE6n4uLidOedd6qkpMRnDA6HQ5MmTdJrr72mbt26yel0auXKlZKkPXv26Prrr1d0dLScTqe6deuml156yad95TgWLVqkRx55RO3bt1fz5s01ePBg7dixw643YMAALV++XLt377bHeqxrFn744QdJUt++fausCw4OVps2bY47vxdeeGGVvjp27KiFCxdWqVtWVqYlS5Zo4MCBio2NPWa/RUVFGjNmjNxutyIiIpSenl5tYKruGpRVq1apX79+ioiIUHh4uE4//XQ7gH388cc677zzJEnXXXedPU+V17UMGDBAZ555pjZt2qS//OUvatmypd326GtQKpWXl+uee+5RTEyMwsLCdMkllygvL8+nTseOHTVmzJgqbf+7z+ONrbprUA4ePKjbbrtNcXFxcjqdOv300/XYY4/JOuqH7Svfg0uXLtWZZ55pv9cq34dAQ+MIClCN0aNH65577tGHH36ocePGVVkfFRWlV155RY888ogOHDigmTNnSpK6du2qV155RZMnT1b79u1122232fUrKip0ySWX6NNPP9X48ePVtWtXbdmyRXPmzNH333+vpUuX+mxjzZo1WrRokSZNmqRTTjlFHTt2VEFBgfr06WN/eURFRWnFihUaO3asiouLq1yMO2vWLAUFBen222+X1+vV7NmzlZaWpg0bNkiS7r33Xnm9Xv3444+aM2eOJCk8PLzGeYmPj5ckvfbaa+rbt2+djkRUhrXWrVtL+v2L8Oqrr9aMGTOUk5Ojbt262XVXrlypffv2KS0t7Zh9WpalSy+9VJ9++qkmTJigrl276u2331Z6evpxx5OTk6OLL75YPXr00PTp0+V0OrVjxw599tlnkn7/m06fPl1Tp07V+PHj7YB1wQUX2H388ssvGjZsmEaOHKlrrrlG0dHRx9zmI488IofDobvuukuFhYWaO3eukpKStHnzZvuIXW3UZmz/zbIsXXLJJVq7dq3Gjh2rnj176oMPPtAdd9yhPXv22O+BSp9++qneeust3XjjjWrVqpXmzZun1NRU5ebm1iqMAickwKeYgICovD5i48aNNdZxu93W2Wefbb+eNm2adfQ/mf79+1vdunWr0jY+Pt5KSUnxKXvllVesoKAg61//+pdP+fz58y1J1meffWaXSbKCgoKsnJwcn7pjx4612rVrZ/38888+5SNHjrTcbrd16NAhy7L+/1qHrl27+lwD8eSTT1qSrC1btthl/lyDUlFRYfXv39+SZEVHR1ujRo2ynnnmGWv37t1V6lbO8UcffWTt3bvXysvLs5YsWWJFRUVZTqfTysvLs+vm5ORYkqzMzMwq+9W8eXPL6/Uec1xLly61JFmzZ8+2y44cOWJdeOGFVa7POPrvOGfOHEuStXfv3hr7P9Z1HpXzMX/+/GrX9e/f335d+Xc59dRTreLiYrt80aJFliTrySeftMvi4+Ot9PT04/Z5rLGlp6f7/G0r5+nhhx/2qTdixAjL4XBYO3bssMskWaGhoT5lX3/9tSXJeuqpp6psC6hvnOIBahAeHl7ru3lqY/HixeratavOOOMM/fzzz/YyaNAgSdLatWt96vfv39/nOhbLsvTPf/5Tw4cPl2VZPn0kJyfL6/Xqq6++8unjuuuu87kOovL/sP/zn//UaR8cDoc++OADPfzww2rdurVef/11ZWRkKD4+XldddVW1p1SSkpIUFRWluLg4jRgxQmFhYXr33XfVvn17u05CQoLOPvtsvfHGG3bZwYMH9e677+riiy+Wy+U65rjef/99hYSEaOLEiXZZcHCwbrrppuPuU+U1RO+8844qKiqOW786TqdT1113Xa3rX3vttWrVqpX9esSIEWrXrp3ef//9Om2/tt5//30FBwfr5ptv9im/7bbbZFmWVqxY4VOelJSkzp0726979Oghl8tV5/cP4A8CClCDAwcO+HyJnKjt27crJydHUVFRPsuf//xnSb9fWPvfOnXq5PN67969Kioq0vPPP1+lj8ovx6P76NChg8/rytMqv/76a533w+l06t5779V3332n/Px8vf766+rTp499OupozzzzjFatWqUlS5booosu0s8//yyn01mlXlpamnbu3Kn169dLkpYuXapDhw4d9/SOJO3evVvt2rWrcnrq9NNPP27bq666Sn379tUNN9yg6OhojRw5UosWLfIrrJx66ql+XRB72mmn+bx2OBzq0qWLffqroezevVuxsbFV3tddu3a11/+3o98/0u/voRN5/wC1xTUoQDV+/PFHeb1edenSpd76rKioUPfu3fXEE09Uuz4uLs7n9dHXIlR+YV5zzTU1XlvRo0cPn9fBwcHV1rOOuiCyrtq1a6eRI0cqNTVV3bp106JFi5SVleVzbcr5559v38Vz2WWXqV+/frr66qu1bds2n0AxatQo3XnnnVq4cKEuuOACLVy4UK1bt9ZFF11UL2OtSYsWLbRu3TqtXbtWy5cv18qVK/Xmm29q0KBB+vDDD2ucw6P7qG81PUyuvLy8VmOqDw39/gGOhYACVOOVV16RJCUnJ9dbn507d9bXX3+twYMH1+lJplFRUWrVqpXKy8uVlJRUb+Oqj6eqNmvWTD169ND27dv1888/KyYmptp6wcHBmjlzpgYOHKinn37a57bi2NhYDRw4UIsXL9b999+vVatWacyYMbU6MhEfH6/Vq1frwIEDPqFn27ZttRp/UFCQBg8erMGDB+uJJ57QjBkzdO+992rt2rVKSkqq9yfPbt++3ee1ZVnasWOHT8Bs3bp1tafMdu/erT/96U/2a3/GFh8fr48++kj79+/3OYpS+YC9yougARNwigc4ypo1a/TQQw+pU6dOtTq9UFtXXnml9uzZoxdeeKHKut9++00HDx48Zvvg4GClpqbqn//8p7755psq6/fu3VuncYWFhcnr9daq7vbt25Wbm1ulvKioSNnZ2WrdurWioqKO2ceAAQN0/vnna+7cuTp8+LDPurS0NBUWFup//ud/VFZWVuv5v+iii3TkyBE999xzdll5ebmeeuqp47bdt29flbKePXtKkn37d1hYmCRVGxjq4h//+IfP9U1LlizRTz/9pGHDhtllnTt31ueff67S0lK7bNmyZVVuR/ZnbBdddJHKy8v19NNP+5TPmTNHDofDZ/tAoHEEBX9oK1as0NatW3XkyBEVFBRozZo1WrVqleLj4/Xuu+/W64PRRo8erUWLFmnChAlau3at+vbtq/Lycm3dulWLFi3SBx984PPQuOrMmjVLa9euVe/evTVu3DglJCRo3759+uqrr/TRRx9V+2V7PL169dKbb76pKVOm6LzzzlN4eLiGDx9ebd2vv/5aV199tYYNG6YLL7xQkZGR2rNnj15++WXl5+dr7ty5tTr9cMcdd+iKK65QVlaWJkyYYJenpqbqxhtv1DvvvKO4uDj95S9/qdU+DB8+XH379tXdd9+tXbt2KSEhQW+99Vatgtf06dO1bt06paSkKD4+XoWFhXr22WfVvn17+xH8nTt3VkREhObPn69WrVopLCxMvXv3rnKdUG1FRkaqX79+uu6661RQUKC5c+eqS5cuPre033DDDVqyZImGDh2qK6+8Uj/88INeffVVn4tW/R3b8OHDNXDgQN17773atWuXzjrrLH344Yd65513dOutt1bpGwioQN5CBARK5S2wlUtoaKgVExNj/fWvf7WefPJJn1tAK53obcaW9fujx//2t79Z3bp1s5xOp9W6dWurV69e1oMPPuhzK60kKyMjo9qxFxQUWBkZGVZcXJzVrFkzKyYmxho8eLD1/PPP23VqeqR6dY9+P3DggHX11VdbERERlqRj3nJcUFBgzZo1y+rfv7/Vrl07KyQkxGrdurU1aNAga8mSJT51j3Urd3l5udW5c2erc+fO1pEjR3zWXXHFFZYk684776xxHNX55ZdfrNGjR1sul8tyu93W6NGjrX//+9/Hvc149erV1qWXXmrFxsZaoaGhVmxsrDVq1Cjr+++/9+n/nXfesRISEqyQkBCfPmt6D1Suq+4249dff93KzMy02rZta7Vo0cJKSUmp9lbtxx9/3Dr11FMtp9Np9e3b1/ryyy+r9HmssR19m7FlWdb+/futyZMnW7GxsVazZs2s0047zXr00UetiooKn3o1vQdruv0ZqG8Oy+JqJwAAYBauQQEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAME6jfFBbRUWF8vPz1apVq3p/BDUAAGgYlmVp//79io2NVVDQsY+RNMqAkp+fX+WH1QAAQOOQl5en9u3bH7NOowwolT9ylZeXJ5fLFeDRAACA2iguLlZcXJzPj1XWpFEGlMrTOi6Xi4ACAEAjU5vLM7hIFgAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4IYEegIk63r080ENoNHbNSgn0EAAATRBHUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYx++AsmfPHl1zzTVq06aNWrRooe7du+vLL7+011uWpalTp6pdu3Zq0aKFkpKStH37dp8+9u3bp7S0NLlcLkVERGjs2LE6cODAie8NAABoEvwKKL/++qv69u2rZs2aacWKFfr222/1+OOPq3Xr1nad2bNna968eZo/f742bNigsLAwJScn6/Dhw3adtLQ05eTkaNWqVVq2bJnWrVun8ePH199eAQCARs1hWZZV28p33323PvvsM/3rX/+qdr1lWYqNjdVtt92m22+/XZLk9XoVHR2trKwsjRw5Ut99950SEhK0ceNGnXvuuZKklStX6qKLLtKPP/6o2NjY446juLhYbrdbXq9XLpertsOvtY53L6/3PpuqXbNSAj0EAEAj4c/3t19HUN59912de+65uuKKK9S2bVudffbZeuGFF+z1O3fulMfjUVJSkl3mdrvVu3dvZWdnS5Kys7MVERFhhxNJSkpKUlBQkDZs2FDtdktKSlRcXOyzAACApsuvgPKf//xHzz33nE477TR98MEHmjhxom6++Wa9/PLLkiSPxyNJio6O9mkXHR1tr/N4PGrbtq3P+pCQEEVGRtp1jjZz5ky53W57iYuL82fYAACgkfEroFRUVOicc87RjBkzdPbZZ2v8+PEaN26c5s+f31DjkyRlZmbK6/XaS15eXoNuDwAABJZfAaVdu3ZKSEjwKevatatyc3MlSTExMZKkgoICnzoFBQX2upiYGBUWFvqsP3LkiPbt22fXOZrT6ZTL5fJZAABA0+VXQOnbt6+2bdvmU/b9998rPj5ektSpUyfFxMRo9erV9vri4mJt2LBBiYmJkqTExEQVFRVp06ZNdp01a9aooqJCvXv3rvOOAACApiPEn8qTJ0/WBRdcoBkzZujKK6/UF198oeeff17PP/+8JMnhcOjWW2/Vww8/rNNOO02dOnXS/fffr9jYWF122WWSfj/iMnToUPvUUFlZmSZNmqSRI0fW6g4eAADQ9PkVUM477zy9/fbbyszM1PTp09WpUyfNnTtXaWlpdp0777xTBw8e1Pjx41VUVKR+/fpp5cqVat68uV3ntdde06RJkzR48GAFBQUpNTVV8+bNq7+9AgAAjZpfz0ExBc9BMQfPQQEA1FaDPQcFAADgZCCgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBy/AsoDDzwgh8Phs5xxxhn2+sOHDysjI0Nt2rRReHi4UlNTVVBQ4NNHbm6uUlJS1LJlS7Vt21Z33HGHjhw5Uj97AwAAmoQQfxt069ZNH3300f93EPL/XUyePFnLly/X4sWL5Xa7NWnSJF1++eX67LPPJEnl5eVKSUlRTEyM1q9fr59++knXXnutmjVrphkzZtTD7gAAgKbA74ASEhKimJiYKuVer1cvvviiFi5cqEGDBkmSFixYoK5du+rzzz9Xnz599OGHH+rbb7/VRx99pOjoaPXs2VMPPfSQ7rrrLj3wwAMKDQ2tdpslJSUqKSmxXxcXF/s7bAAA0Ij4fQ3K9u3bFRsbqz/96U9KS0tTbm6uJGnTpk0qKytTUlKSXfeMM85Qhw4dlJ2dLUnKzs5W9+7dFR0dbddJTk5WcXGxcnJyatzmzJkz5Xa77SUuLs7fYQMAgEbEr4DSu3dvZWVlaeXKlXruuee0c+dOXXjhhdq/f788Ho9CQ0MVERHh0yY6Oloej0eS5PF4fMJJ5frKdTXJzMyU1+u1l7y8PH+GDQAAGhm/TvEMGzbM/u8ePXqod+/eio+P16JFi9SiRYt6H1wlp9Mpp9PZYP0DAACznNBtxhEREfrzn/+sHTt2KCYmRqWlpSoqKvKpU1BQYF+zEhMTU+WunsrX1V3XAgAA/phOKKAcOHBAP/zwg9q1a6devXqpWbNmWr16tb1+27Ztys3NVWJioiQpMTFRW7ZsUWFhoV1n1apVcrlcSkhIOJGhAACAJsSvUzy33367hg8frvj4eOXn52vatGkKDg7WqFGj5Ha7NXbsWE2ZMkWRkZFyuVy66aablJiYqD59+kiShgwZooSEBI0ePVqzZ8+Wx+PRfffdp4yMDE7hAAAAm18B5ccff9SoUaP0yy+/KCoqSv369dPnn3+uqKgoSdKcOXMUFBSk1NRUlZSUKDk5Wc8++6zdPjg4WMuWLdPEiROVmJiosLAwpaena/r06fW7VwAAoFFzWJZlBXoQ/iouLpbb7ZbX65XL5ar3/jvevbze+2yqds1KCfQQAACNhD/f3/wWDwAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMY5oYAya9YsORwO3XrrrXbZ4cOHlZGRoTZt2ig8PFypqakqKCjwaZebm6uUlBS1bNlSbdu21R133KEjR46cyFAAAEATUueAsnHjRv39739Xjx49fMonT56s9957T4sXL9Ynn3yi/Px8XX755fb68vJypaSkqLS0VOvXr9fLL7+srKwsTZ06te57AQAAmpQ6BZQDBw4oLS1NL7zwglq3bm2Xe71evfjii3riiSc0aNAg9erVSwsWLND69ev1+eefS5I+/PBDffvtt3r11VfVs2dPDRs2TA899JCeeeYZlZaW1s9eAQCARq1OASUjI0MpKSlKSkryKd+0aZPKysp8ys844wx16NBB2dnZkqTs7Gx1795d0dHRdp3k5GQVFxcrJyen2u2VlJSouLjYZwEAAE1XiL8N3njjDX311VfauHFjlXUej0ehoaGKiIjwKY+OjpbH47Hr/Hc4qVxfua46M2fO1IMPPujvUAEAQCPl1xGUvLw83XLLLXrttdfUvHnzhhpTFZmZmfJ6vfaSl5d30rYNAABOPr8CyqZNm1RYWKhzzjlHISEhCgkJ0SeffKJ58+YpJCRE0dHRKi0tVVFRkU+7goICxcTESJJiYmKq3NVT+bqyztGcTqdcLpfPAgAAmi6/AsrgwYO1ZcsWbd682V7OPfdcpaWl2f/drFkzrV692m6zbds25ebmKjExUZKUmJioLVu2qLCw0K6zatUquVwuJSQk1NNuAQCAxsyva1BatWqlM88806csLCxMbdq0scvHjh2rKVOmKDIyUi6XSzfddJMSExPVp08fSdKQIUOUkJCg0aNHa/bs2fJ4PLrvvvuUkZEhp9NZT7sFAAAaM78vkj2eOXPmKCgoSKmpqSopKVFycrKeffZZe31wcLCWLVumiRMnKjExUWFhYUpPT9f06dPreygAAKCRcliWZQV6EP4qLi6W2+2W1+ttkOtROt69vN77bKp2zUoJ9BAAAI2EP9/f/BYPAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4/gVUJ577jn16NFDLpdLLpdLiYmJWrFihb3+8OHDysjIUJs2bRQeHq7U1FQVFBT49JGbm6uUlBS1bNlSbdu21R133KEjR47Uz94AAIAmwa+A0r59e82aNUubNm3Sl19+qUGDBunSSy9VTk6OJGny5Ml67733tHjxYn3yySfKz8/X5ZdfbrcvLy9XSkqKSktLtX79er388svKysrS1KlT63evAABAo+awLMs6kQ4iIyP16KOPasSIEYqKitLChQs1YsQISdLWrVvVtWtXZWdnq0+fPlqxYoUuvvhi5efnKzo6WpI0f/583XXXXdq7d69CQ0Nrtc3i4mK53W55vV65XK4TGX61Ot69vN77bKp2zUoJ9BAAAI2EP9/fdb4Gpby8XG+88YYOHjyoxMREbdq0SWVlZUpKSrLrnHHGGerQoYOys7MlSdnZ2erevbsdTiQpOTlZxcXF9lGY6pSUlKi4uNhnAQAATZffAWXLli0KDw+X0+nUhAkT9PbbbyshIUEej0ehoaGKiIjwqR8dHS2PxyNJ8ng8PuGkcn3luprMnDlTbrfbXuLi4vwdNgAAaET8Diinn366Nm/erA0bNmjixIlKT0/Xt99+2xBjs2VmZsrr9dpLXl5eg24PAAAEVoi/DUJDQ9WlSxdJUq9evbRx40Y9+eSTuuqqq1RaWqqioiKfoygFBQWKiYmRJMXExOiLL77w6a/yLp/KOtVxOp1yOp3+DhUAADRSJ/wclIqKCpWUlKhXr15q1qyZVq9eba/btm2bcnNzlZiYKElKTEzUli1bVFhYaNdZtWqVXC6XEhISTnQoAACgifDrCEpmZqaGDRumDh06aP/+/Vq4cKE+/vhjffDBB3K73Ro7dqymTJmiyMhIuVwu3XTTTUpMTFSfPn0kSUOGDFFCQoJGjx6t2bNny+Px6L777lNGRgZHSAAAgM2vgFJYWKhrr71WP/30k9xut3r06KEPPvhAf/3rXyVJc+bMUVBQkFJTU1VSUqLk5GQ9++yzdvvg4GAtW7ZMEydOVGJiosLCwpSenq7p06fX714BAIBG7YSfgxIIPAfFHDwHBQBQWyflOSgAAAANhYACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcfwKKDNnztR5552nVq1aqW3btrrsssu0bds2nzqHDx9WRkaG2rRpo/DwcKWmpqqgoMCnTm5urlJSUtSyZUu1bdtWd9xxh44cOXLiewMAAJoEvwLKJ598ooyMDH3++edatWqVysrKNGTIEB08eNCuM3nyZL333ntavHixPvnkE+Xn5+vyyy+315eXlyslJUWlpaVav369Xn75ZWVlZWnq1Kn1t1cAAKBRc1iWZdW18d69e9W2bVt98skn+stf/iKv16uoqCgtXLhQI0aMkCRt3bpVXbt2VXZ2tvr06aMVK1bo4osvVn5+vqKjoyVJ8+fP11133aW9e/cqNDT0uNstLi6W2+2W1+uVy+Wq6/Br1PHu5fXeZ1O1a1ZKoIcAAGgk/Pn+PqFrULxeryQpMjJSkrRp0yaVlZUpKSnJrnPGGWeoQ4cOys7OliRlZ2ere/fudjiRpOTkZBUXFysnJ6fa7ZSUlKi4uNhnAQAATVedA0pFRYVuvfVW9e3bV2eeeaYkyePxKDQ0VBERET51o6Oj5fF47Dr/HU4q11euq87MmTPldrvtJS4urq7DBgAAjUCdA0pGRoa++eYbvfHGG/U5nmplZmbK6/XaS15eXoNvEwAABE5IXRpNmjRJy5Yt07p169S+fXu7PCYmRqWlpSoqKvI5ilJQUKCYmBi7zhdffOHTX+VdPpV1juZ0OuV0OusyVAAA0Aj5dQTFsixNmjRJb7/9ttasWaNOnTr5rO/Vq5eaNWum1atX22Xbtm1Tbm6uEhMTJUmJiYnasmWLCgsL7TqrVq2Sy+VSQkLCiewLAABoIvw6gpKRkaGFCxfqnXfeUatWrexrRtxut1q0aCG3262xY8dqypQpioyMlMvl0k033aTExET16dNHkjRkyBAlJCRo9OjRmj17tjwej+677z5lZGRwlAQAAEjyM6A899xzkqQBAwb4lC9YsEBjxoyRJM2ZM0dBQUFKTU1VSUmJkpOT9eyzz9p1g4ODtWzZMk2cOFGJiYkKCwtTenq6pk+ffmJ7AgAAmowTeg5KoPAcFHPwHBQAQG2dtOegAAAANAQCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMbxO6CsW7dOw4cPV2xsrBwOh5YuXeqz3rIsTZ06Ve3atVOLFi2UlJSk7du3+9TZt2+f0tLS5HK5FBERobFjx+rAgQMntCMAAKDp8DugHDx4UGeddZaeeeaZatfPnj1b8+bN0/z587VhwwaFhYUpOTlZhw8ftuukpaUpJydHq1at0rJly7Ru3TqNHz++7nsBAACalBB/GwwbNkzDhg2rdp1lWZo7d67uu+8+XXrppZKkf/zjH4qOjtbSpUs1cuRIfffdd1q5cqU2btyoc889V5L01FNP6aKLLtJjjz2m2NjYKv2WlJSopKTEfl1cXOzvsAEAQCNSr9eg7Ny5Ux6PR0lJSXaZ2+1W7969lZ2dLUnKzs5WRESEHU4kKSkpSUFBQdqwYUO1/c6cOVNut9te4uLi6nPYAADAMPUaUDwejyQpOjrapzw6Otpe5/F41LZtW5/1ISEhioyMtOscLTMzU16v117y8vLqc9gAAMAwfp/iCQSn0ymn0xnoYQAAgJOkXo+gxMTESJIKCgp8ygsKCux1MTExKiws9Fl/5MgR7du3z64DAAD+2Or1CEqnTp0UExOj1atXq2fPnpJ+v6B1w4YNmjhxoiQpMTFRRUVF2rRpk3r16iVJWrNmjSoqKtS7d+/6HA4AGKnj3csDPYRGZdeslEAPAQHgd0A5cOCAduzYYb/euXOnNm/erMjISHXo0EG33nqrHn74YZ122mnq1KmT7r//fsXGxuqyyy6TJHXt2lVDhw7VuHHjNH/+fJWVlWnSpEkaOXJktXfw4I+DD+3aq88PbOa99viiBE4evwPKl19+qYEDB9qvp0yZIklKT09XVlaW7rzzTh08eFDjx49XUVGR+vXrp5UrV6p58+Z2m9dee02TJk3S4MGDFRQUpNTUVM2bN68edgcAgOoRxv0T6EDud0AZMGCALMuqcb3D4dD06dM1ffr0GutERkZq4cKF/m4aAAD8QfBbPAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHEIKAAAwDgEFAAAYBwCCgAAMA4BBQAAGIeAAgAAjENAAQAAxiGgAAAA4xBQAACAcQgoAADAOAQUAABgHAIKAAAwDgEFAAAYh4ACAACMQ0ABAADGIaAAAADjEFAAAIBxCCgAAMA4BBQAAGAcAgoAADAOAQUAABgnoAHlmWeeUceOHdW8eXP17t1bX3zxRSCHAwAADBGwgPLmm29qypQpmjZtmr766iudddZZSk5OVmFhYaCGBAAADBGwgPLEE09o3Lhxuu6665SQkKD58+erZcuWeumllwI1JAAAYIiQQGy0tLRUmzZtUmZmpl0WFBSkpKQkZWdnV6lfUlKikpIS+7XX65UkFRcXN8j4KkoONUi/TVF9/g2Y99pj3gOjvuadOfcP8x4YDfEdW9mnZVnHrRuQgPLzzz+rvLxc0dHRPuXR0dHaunVrlfozZ87Ugw8+WKU8Li6uwcaI2nHPDfQI/piY98Bg3gODeQ+Mhpz3/fv3y+12H7NOQAKKvzIzMzVlyhT7dUVFhfbt26c2bdrI4XAEcGQnR3FxseLi4pSXlyeXyxXo4fxhMO+BwbwHBvMeGH+0ebcsS/v371dsbOxx6wYkoJxyyikKDg5WQUGBT3lBQYFiYmKq1Hc6nXI6nT5lERERDTlEI7lcrj/EG9g0zHtgMO+BwbwHxh9p3o935KRSQC6SDQ0NVa9evbR69Wq7rKKiQqtXr1ZiYmIghgQAAAwSsFM8U6ZMUXp6us4991ydf/75mjt3rg4ePKjrrrsuUEMCAACGCFhAueqqq7R3715NnTpVHo9HPXv21MqVK6tcOIvfT3FNmzatymkuNCzmPTCY98Bg3gODea+Zw6rNvT4AAAAnEb/FAwAAjENAAQAAxiGgAAAA4xBQAACAcQgo9WzAgAFyOBxyOBzavHlzoIdTrcYwRn81hn2qHF9Tesgg8x4YzHtgmD7vu3btssfXs2fPQA/nhBFQGsC4ceP0008/6cwzz7TLbr75ZvXq1UtOp7PWb5znn39eAwYMkMvlksPhUFFR0XHbrFu3TsOHD1dsbKwcDoeWLl1apc5bb72lL774opZ703jUx7zv27dPN910k04//XS1aNFCHTp00M0332z/QGV1ysrKdNddd6l79+4KCwtTbGysrr32WuXn5/vU++mnnzR37ty67p6xjp73r7/+WqNGjVJcXJxatGihrl276sknnzxuP4888oguuOACtWzZslZfasz7ic/7rl27NHbsWHXq1EktWrRQ586dNW3aNJWWlh6zXW0+m5j3mtVl3mvz2RQXF6effvpJt912W/3sbIA1it/iaWxatmxZ7SP7r7/+em3YsEH/+7//W6t+Dh06pKFDh2ro0KE+v/x8LAcPHtRZZ52l66+/Xpdffnm1dSIjIxvsl6ADqT7mPT8/X/n5+XrssceUkJCg3bt3a8KECcrPz9eSJUuqbXPo0CF99dVXuv/++3XWWWfp119/1S233KJLLrlEX375pV0vJiam1o94bkyOnvdNmzapbdu2evXVVxUXF6f169dr/PjxCg4O1qRJk2rsp7S0VFdccYUSExP14osvHne7zPuJz/vWrVtVUVGhv//97+rSpYu++eYbjRs3TgcPHtRjjz1W47Zr89nEvNfvvNfmsyk4OFgxMTEKDw+v/x0PBAv1qn///tYtt9xS4/pp06ZZZ511ll99rl271pJk/frrr361k2S9/fbb1a7buXOnJcn697//7VefpmqIea+0aNEiKzQ01CorK6t1my+++MKSZO3evdunfMGCBZbb7a7TOEx0vHmvdOONN1oDBw6sVZ8nMkfMuy9/5r3S7NmzrU6dOtWq7vE+m5j32vNn3ivV9Nl0Ip93JuEUD3AcXq9XLpdLISG1P+Do9Xqb3Pn3E+H1ehUZGXlStsO8/7+6zPvJ+ls1ZSdr3uvy2dSYNM29AurJzz//rIceekjjx4+vdZvDhw/rrrvu0qhRo/4wv056LOvXr9ebb76p5cuXN+h2mHdfdZn3HTt26Kmnnjrm6R0c28ma97p8NjU2HEEBalBcXKyUlBQlJCTogQceqFWbsrIyXXnllbIsS88991zDDrAR+Oabb3TppZdq2rRpGjJkSINth3n3VZd537Nnj4YOHaorrrhC48aNa+ARNk0na97r8tnUGHEEBajG/v37NXToULVq1Upvv/22mjVrdtw2lV+Su3fv1po1a/7w/xf/7bffavDgwRo/frzuu+++BtsO8+6rLvOen5+vgQMH6oILLtDzzz/fwCNsmk7WvNfls6mx4ggKcJTi4mINGTJEoaGhevfdd9W8efPjtqn8kty+fbs++ugjtWnT5iSM1Fw5OTkaOHCg0tPT9cgjjzTYdph3X3WZ9z179mjAgAHq1auXFixYoKAgvhb8dbLmvS6fTY0ZR1BOkh07dujAgQPyeDz67bff7If8JCQkKDQ0tNo2Ho9HHo9HO3bskCRt2bJFrVq1UocOHWq8mOrAgQN2fUnauXOnNm/erMjISHXo0KF+d6oR8HfeKz8ADh06pFdffVXFxcX2LdlRUVEKDg6u0qasrEwjRozQV199pWXLlqm8vFwej0fS77d01/T3baq++eYbDRo0SMnJyZoyZYo9F8HBwYqKiqqxXW5urvbt26fc3FyVl5fbf6suXbpUe9sk8+6rLvNe+SUZHx+vxx57THv37rXXVXfLfqW6fDY1VSdr3uvy2dToBfo2oqamptvQ+vfvb0mqsuzcubPGvqZNm1ZtmwULFtTYpvK2v6OX9PR0n3p/lNuM/Z33mubvWG0q57K6Ze3atT51/wi3Xdb0vo2Pjz9mX+np6bWaw0rM+y0+ZXWZ9wULFtQ4h8dS288m5r16dZl3fz6buM0Yfvn4449lWVaVpWPHjjW2eeCBB6ptM2bMmBrbDBgwoNo2WVlZ9b5PjYG/817T/B2rTceOHWtsM2DAgAbbN1PV9L7dtWvXMdtlZWX5NYfMu6+6zPuYMWNqnMO6bOtYn01N1cma97p8NjV2BJQG8Oyzzyo8PFxbtmwJ9FCqNWzYMHXr1i3Qw6h3ps97eHi4JkyYEOhh1DvmPTCY98Awed5zc3MVHh6uGTNmBHoo9cJhHS8qwy979uzRb7/9Jknq0KGDkefBG8MY/dUY9qnyfH1wcLA6deoU4NHUD+Y9MJj3wDB93o8cOWIfuXE6nYqLiwvsgE4QAQUAABiHUzwAAMA4BBQAAGAcAgoAADAOAQUAABiHgAIAAIxDQAEAAMYhoAAAAOMQUAAAgHH+D6XRL+dGUbVoAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "gate_pool = [ql.HGate, ql.CXGate]\n", + "\n", + "plot_srv_stat(num_of_qubits=3, min_gates=6, max_gates=8, gate_pool=gate_pool, samples=int(1e3), rng=np.random.default_rng())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "72ca2b73-186b-49ae-8ece-af2ed06d37ee", + "metadata": {}, + "outputs": [], + "source": [ + "def test_srv(system_dims, init, target):\n", + " vec = qi.Statevector(init, dims=system_dims)\n", + " vec *= 1/np.sqrt(vec.trace())\n", + " srv = backend.schmidt_rank_vector(densityMatrix=qi.DensityMatrix(vec)) \n", + " assert srv == target, f\"srv: {srv}\"\n", + " print(f\"passed test, svr: {srv}\")\n", + " display(vec.draw('latex', prefix='|\\\\psi\\\\rangle = '))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6ed24cb5-fb38-464b-9e60-885eefb23f6a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "passed test, svr: [1, 1]\n" + ] + }, + { + "data": { + "text/latex": [ + "$$|\\psi\\rangle = \\frac{\\sqrt{2}}{2} |00\\rangle+\\frac{\\sqrt{2}}{2} |01\\rangle$$" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "passed test, svr: [2, 2]\n" + ] + }, + { + "data": { + "text/latex": [ + "$$|\\psi\\rangle = \\frac{\\sqrt{2}}{2} |00\\rangle+\\frac{\\sqrt{2}}{2} |11\\rangle$$" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "passed test, svr: [2, 2, 2]\n" + ] + }, + { + "data": { + "text/latex": [ + "$$|\\psi\\rangle = \\frac{\\sqrt{2}}{2} |000\\rangle+\\frac{\\sqrt{2}}{2} |111\\rangle$$" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "passed test, svr: [3, 3, 3]\n" + ] + }, + { + "data": { + "text/latex": [ + "$$\\begin{align}\n", + "\n", + "|\\psi\\rangle = \n", + "\\begin{bmatrix}\n", + "\\frac{\\sqrt{3}}{3} & 0 & 0 & 0 & \\cdots & 0 & 0 & \\frac{\\sqrt{3}}{3} \\\\\n", + " \\end{bmatrix}\n", + "\\\\\n", + "\\text{dims=(3, 3, 3)}\n", + "\\end{align}$$" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "passed test, svr: [2, 2, 4]\n" + ] + }, + { + "data": { + "text/latex": [ + "$$\\begin{align}\n", + "\n", + "|\\psi\\rangle = \n", + "\\begin{bmatrix}\n", + "\\frac{1}{2} & 0 & 0 & 0 & \\cdots & 0 & 0 & 0 \\\\\n", + " \\end{bmatrix}\n", + "\\\\\n", + "\\text{dims=(4, 4, 4)}\n", + "\\end{align}$$" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "#---------------- |0+> = |00>+|01>\n", + "system_dims = (2,2)\n", + "init = np.zeros(np.prod(system_dims), dtype=complex)\n", + "init[0] = 1\n", + "init[1] = 1\n", + "test_srv(system_dims, init, [1, 1])\n", + "\n", + "#----------------Bell, |00>+|11>\n", + "system_dims = (2,2)\n", + "init = np.zeros(np.prod(system_dims), dtype=complex)\n", + "init[0] = 1\n", + "init[3] = 1\n", + "test_srv(system_dims, init, [2, 2])\n", + " \n", + "#----------------GHZ, |000>+|111>\n", + "system_dims = (2,2,2)\n", + "init = np.zeros(np.prod(system_dims), dtype=complex)\n", + "init[0] = 1\n", + "init[7] = 1\n", + "test_srv(system_dims, init, [2,2,2])\n", + " \n", + "#----------------Sym, |000>+|111>+|222>\n", + "system_dims = (3,3,3)\n", + "init = np.zeros(np.prod(system_dims), dtype=complex)\n", + "init[0] = 1\n", + "init[13] = 1\n", + "init[26] = 1\n", + "test_srv(system_dims, init, [3,3,3])\n", + " \n", + "#----------------Wikipedia example, |000>+|101>+|210>+|311>\n", + "system_dims = (4,4,4)\n", + "init = np.zeros(np.prod(system_dims), dtype=complex)\n", + "init[0] = 1\n", + "init[17] = 1\n", + "init[36] = 1\n", + "init[53] = 1\n", + "test_srv(system_dims, init, [2, 2, 4])" + ] + }, + { + "cell_type": "markdown", + "id": "2f0718e3-8282-4fd3-b883-4f2be50e4912", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f12d3ca-ebf9-4752-9d61-2fbd26f027db", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/circuits_generation.ipynb b/src/platform/circuits_generation.ipynb new file mode 100644 index 0000000..16d5d62 --- /dev/null +++ b/src/platform/circuits_generation.ipynb @@ -0,0 +1,1092 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "4bf8892a-8c4a-4d36-8fd9-c983a1bea170", + "metadata": {}, + "source": [ + "# Circuits dataset generation functions\n", + "\n", + "> Functions to create a quantum circuit dataset." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f8bab72-5b33-4d47-8805-8efa39510344", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.circuits_generation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c82c4d7b-e275-416e-b88d-3d527a9e3bff", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.platform.backends.base_backend import BaseBackend\n", + "from genQC.platform.tokenizer.circuits_tokenizer import CircuitTokenizer, Vocabulary\n", + "from genQC.dataset.dataset_helper import get_unique_elements_indices\n", + "from genQC.utils.async_fn import MemoryMappedArray, Parallel, delayed" + ] + }, + { + "cell_type": "markdown", + "id": "b87a1715-06d4-41a0-b0fd-493020065164", + "metadata": {}, + "source": [ + "## Generate random circuits" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "297cefeb-ff48-4fdb-9df6-f02b43c518cb", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitConditionType(enum.Enum): \n", + " SRV = enum.auto() \n", + " UNITARY = enum.auto() " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c3c07f63-1d6e-4c6f-8079-f78eadccb172", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_rnd_encoded_circuit(backend: BaseBackend, \n", + " tokenizer: CircuitTokenizer,\n", + " condition: CircuitConditionType,\n", + " num_of_qubits: int, \n", + " gate_pool: Optional[Sequence[str]],\n", + " min_gates: int, \n", + " max_gates: int, \n", + " rng: np.random.Generator, \n", + " optimized: bool = True,\n", + " post_randomize_params: bool = True,\n", + " return_params: bool = True) -> Tuple[Any, torch.Tensor, ...]: \n", + " \"\"\"Generate a random circuit with corresponding condition.\"\"\"\n", + "\n", + " gate_pool = default(gate_pool, tokenizer.vocabulary)\n", + "\n", + " qc = backend.rnd_circuit(num_of_qubits, rng.integers(min_gates, max_gates+1), gate_pool, rng) \n", + " \n", + " if optimized: \n", + " qc = backend.optimize_circuit(qc, gate_pool) \n", + " \n", + " if post_randomize_params: \n", + " qc = backend.randomize_params(qc, rng) \n", + " \n", + " match condition:\n", + " case CircuitConditionType.SRV: \n", + " condition = torch.tensor(backend.schmidt_rank_vector(qc))\n", + " \n", + " case CircuitConditionType.UNITARY: \n", + " U = backend.get_unitary(qc) \n", + " U_r, U_i = torch.from_numpy(np.real(U)), torch.from_numpy(np.imag(U))\n", + " condition = torch.stack([U_r, U_i], dim=0)\n", + " \n", + " case _: raise NotImplementedError(f\"Not implemented given condition: {condition}\")\n", + " \n", + " instructions = backend.backend_to_genqc(qc)\n", + " enc_tuple = tokenizer.encode(instructions, max_gates, return_params_tensor=return_params) # qc_tensor, params_tensor\n", + "\n", + " return qc, condition, *enc_tuple" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8271eeb-3c07-4a94-8cc1-3a44b9cb37d3", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_rnd_encoded_circuits(backend: BaseBackend, \n", + " tokenizer: CircuitTokenizer,\n", + " condition: CircuitConditionType, \n", + " samples: int, \n", + " num_of_qubits: int, \n", + " min_gates: int, \n", + " max_gates: int, \n", + " min_sub_gate_pool_cnt: int = 1,\n", + " max_sub_gate_pool_cnt: Optional[int] = None,\n", + " fixed_sub_gate_pool: Optional[Sequence[str]] = None,\n", + " max_num_params: Optional[int] = None,\n", + " filter_unique: bool = True,\n", + " optimized: bool = True, \n", + " post_randomize_params: bool = True,\n", + " return_params: bool = True,\n", + " silent: bool = False) -> Tuple[torch.Tensor, ...]:\n", + " \"\"\"\n", + " Generate ´samples´ number of random circuits with corresponding condition. \n", + " Creates prompts for conditioning.\n", + " \"\"\"\n", + "\n", + " if condition not in [CircuitConditionType.SRV, CircuitConditionType.UNITARY]:\n", + " raise NotImplementedError(f\"Not implemented {condition}\")\n", + " \n", + " sub_gate_pool = fixed_sub_gate_pool\n", + " gate_pool = list(tokenizer.vocabulary)\n", + "\n", + " rng = np.random.default_rng() \n", + " n = len(gate_pool) + 1\n", + " c_range = np.arange(n-1)\n", + "\n", + " if exists(max_sub_gate_pool_cnt):\n", + " max_sub_gate_pool_cnt = max(min_sub_gate_pool_cnt, min(max_sub_gate_pool_cnt+1, n))\n", + " else: \n", + " max_sub_gate_pool_cnt = n\n", + "\n", + " #------------------\n", + " # Generate single circuits sequentially\n", + " \n", + " x = []\n", + " y = [] \n", + " \n", + " if condition is CircuitConditionType.UNITARY: \n", + " u = []\n", + " \n", + " if return_params: \n", + " p = [] # Note: params is of different size -> keep list\n", + "\n", + " for i in tqdm(range(samples), disable=silent):\n", + "\n", + " if not exists(fixed_sub_gate_pool): \n", + " sub_gate_pool_cnt = rng.integers(min_sub_gate_pool_cnt, max_sub_gate_pool_cnt) \n", + " sub_gate_pool_ind = rng.choice(c_range, size=sub_gate_pool_cnt, replace=False) \n", + "\n", + " #NOTE: with this we have always the same ordering of the prompt gates!!\n", + " sub_gate_pool_ind = np.sort(sub_gate_pool_ind) \n", + " \n", + " sub_gate_pool = [gate_pool[ind] for ind in sub_gate_pool_ind]\n", + "\n", + " val = get_rnd_encoded_circuit(backend=backend,\n", + " tokenizer=tokenizer,\n", + " condition=condition,\n", + " num_of_qubits=num_of_qubits,\n", + " gate_pool=sub_gate_pool, \n", + " min_gates=min_gates, \n", + " max_gates=max_gates,\n", + " rng=rng,\n", + " optimized=optimized,\n", + " post_randomize_params=post_randomize_params,\n", + " return_params=return_params)\n", + " \n", + " if return_params:\n", + " _, cond, qc_tensor, params_tensor = val \n", + " p.append(params_tensor)\n", + " else:\n", + " _, cond, qc_tensor = val\n", + "\n", + " x.append(qc_tensor)\n", + " \n", + " match condition:\n", + " case CircuitConditionType.SRV: \n", + " label = f\"Generate SRV: {cond.tolist()}\" \n", + " \n", + " case CircuitConditionType.UNITARY: \n", + " label = f\"Compile using: {[str(gate) for gate in sub_gate_pool]}\" \n", + " u.append(cond)\n", + " \n", + " case _: raise NotImplementedError(f\"Not implemented given condition: {condition}\")\n", + " \n", + " y.append(label)\n", + " \n", + " #------------------\n", + " # Make tensors unique and combine tensors and arrays\n", + "\n", + " x = torch.stack(x, dim=0)\n", + " y = np.array(y)\n", + "\n", + " if condition is CircuitConditionType.UNITARY: \n", + " u = torch.stack(u, dim=0) \n", + " \n", + " if filter_unique:\n", + " tensor_unique, tensor_indices = get_unique_elements_indices(x)\n", + " \n", + " x = x[tensor_indices]\n", + " y = y[tensor_indices] \n", + " if return_params: p = [p[i] for i in tensor_indices.tolist()] \n", + " if condition is CircuitConditionType.UNITARY: u = u[tensor_indices]\n", + "\n", + " if not silent: \n", + " print(f\"[INFO]: Generated unique circuits: {tensor_unique.shape[0]}.\")\n", + " \n", + " if not exists(max_num_params):\n", + " p_max_para = max(pi.shape[0] for pi in p)\n", + " p_min_value = min(pi.min() if pi.numel()>0 else 0 for pi in p)\n", + " p_max_value = max(pi.max() if pi.numel()>0 else 0 for pi in p)\n", + " if not silent: print(f\"[INFO]: No max_num_params provided, infered {p_max_para=}, {p_min_value=} and {p_max_value=}.\")\n", + " else:\n", + " if not silent: print(f\"[INFO]: Using provided {max_num_params=}.\")\n", + " p_max_para = max_num_params\n", + " \n", + " p_t = torch.zeros((len(p), p_max_para, max_gates))\n", + " for i,pi in enumerate(p):\n", + " p_t[i, :pi.shape[0], :pi.shape[1]] = pi\n", + " p = p_t\n", + "\n", + " if return_params: \n", + " return x, y, u, p\n", + " return x, y, u" + ] + }, + { + "cell_type": "markdown", + "id": "bd859420-2f74-448a-b0c5-327521a70366", + "metadata": {}, + "source": [ + "## Dataset generation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9abc01cb-136e-4d87-bb91-beae08e0bb33", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def generate_circuit_dataset(backend: BaseBackend, \n", + " tokenizer: CircuitTokenizer,\n", + " condition: CircuitConditionType, \n", + " total_samples: int, \n", + " num_of_qubits: int, \n", + " min_gates: int, \n", + " max_gates: int, \n", + " batch_samples: int = 128, \n", + " n_jobs: int = 1,\n", + " unitary_dtype: torch.dtype = torch.float16,\n", + " min_sub_gate_pool_cnt: int = 1,\n", + " max_sub_gate_pool_cnt: Optional[int] = None,\n", + " fixed_sub_gate_pool: Optional[Sequence[str]] = None,\n", + " max_num_params: Optional[int] = None,\n", + " filter_unique: bool = True,\n", + " optimized: bool = True, \n", + " post_randomize_params: bool = True,\n", + " return_params: bool = True) -> Tuple[torch.Tensor, ...]:\n", + " \"\"\"\n", + " Generates ´samples´ number of random circuits with corresponding condition. \n", + " Supports large scale dataset with large unitaries. Uses memory mapping and parallelization.\n", + "\n", + " - ´unitary_dtype´ only relevant for ´condition=CircuitConditionType.UNITARY´ \n", + " \"\"\"\n", + "\n", + " if condition not in [CircuitConditionType.UNITARY]:\n", + " raise NotImplementedError(f\"Not implemented {condition=}\")\n", + "\n", + " if not return_params:\n", + " raise NotImplementedError(f\"Not implemented {return_params=}\")\n", + "\n", + " total_samples = int(total_samples)\n", + " batch_samples = min(int(batch_samples), total_samples)\n", + " njobs = max(min(n_jobs, total_samples//batch_samples), 1)\n", + "\n", + " #------------------\n", + " # Check data sizes\n", + "\n", + " gen_data = functools.partial(get_rnd_encoded_circuits, \n", + " backend=backend, \n", + " tokenizer=tokenizer,\n", + " condition=condition,\n", + " samples=batch_samples, \n", + " num_of_qubits=num_of_qubits, \n", + " min_gates=min_gates, \n", + " max_gates=max_gates,\n", + " min_sub_gate_pool_cnt=min_sub_gate_pool_cnt,\n", + " max_sub_gate_pool_cnt=max_sub_gate_pool_cnt,\n", + " fixed_sub_gate_pool=fixed_sub_gate_pool,\n", + " max_num_params=max_num_params,\n", + " filter_unique=filter_unique,\n", + " optimized=optimized,\n", + " post_randomize_params=post_randomize_params,\n", + " return_params=return_params,\n", + " silent=True)\n", + " \n", + " x, y, u, p = gen_data()\n", + " x_global = torch.zeros((total_samples, *x.shape[1:]), dtype=x.dtype) \n", + " y_global = np.empty(total_samples, dtype=y.dtype) \n", + " u_global = torch.zeros((total_samples, 2, u.shape[-2], u.shape[-1]), dtype=unitary_dtype) \n", + " p_global = torch.zeros((total_samples, *p.shape[1:]), dtype=p.dtype) \n", + "\n", + " #------------------\n", + " # Run memory mapped parallel generation\n", + "\n", + " def _f(idx, x_map, y_map, u_map, p_map):\n", + " x, y, u, p = gen_data()\n", + " \n", + " off = x.shape[0]\n", + " idx *= batch_samples\n", + " \n", + " x_map[idx:idx+off] = x\n", + " y_map[idx:idx+off] = y\n", + " u_map[idx:idx+off] = u\n", + " p_map[idx:idx+off] = p\n", + " \n", + "\n", + " def _scope():\n", + " x_map = MemoryMappedArray(x_global)\n", + " y_map = MemoryMappedArray(y_global, type=\"numpy\")\n", + " u_map = MemoryMappedArray(u_global)\n", + " p_map = MemoryMappedArray(p_global)\n", + " \n", + " with Parallel(n_jobs=n_jobs) as parallel:\n", + " loop_set = range(int(np.floor(total_samples/batch_samples)))\n", + " _ = parallel(delayed(_f)(idx, x_map.obj_memmap, y_map.obj_memmap, u_map.obj_memmap, p_map.obj_memmap) for idx in loop_set)\n", + "\n", + " return x_map.get_obj(), y_map.get_obj(), u_map.get_obj(), p_map.get_obj()\n", + " \n", + " (x_global, x_file), (y_global, y_file), (u_global, u_file), (p_global, p_file) = _scope()\n", + " \n", + " MemoryMappedArray.clean([x_file, y_file, u_file, p_file])\n", + "\n", + " #------------------\n", + " # Collect results and remove the holes\n", + " \n", + " x_global_nonzero = torch.logical_not((x_global==0).all(-1).all(-1)).nonzero(as_tuple=True)\n", + " print(f\"[INFO]: Generated {x_global_nonzero[0].shape[0]} valid circuits.\")\n", + "\n", + " # complex indexing makes copy not view\n", + " x_global = x_global[x_global_nonzero]#.contiguous().clone()\n", + " u_global = u_global[x_global_nonzero]#.contiguous().clone()\n", + " p_global = p_global[x_global_nonzero]#.contiguous().clone()\n", + " y_global = y_global[x_global_nonzero] #np.ascontiguousarray(y_global[x_global_nonzero])\n", + "\n", + " if filter_unique:\n", + " tensor_unique, tensor_indices = get_unique_elements_indices(x_global)\n", + " \n", + " x_global = x_global[tensor_indices]\n", + " y_global = y_global[tensor_indices]\n", + " u_global = u_global[tensor_indices]\n", + " p_global = p_global[tensor_indices]\n", + "\n", + " print(f\"[INFO]: After filtering unique circuits: {x_global.shape[0]}.\")\n", + " \n", + " return x_global, y_global, u_global, p_global" + ] + }, + { + "cell_type": "markdown", + "id": "013ddf9d-aba8-4018-b048-8d66229df040", + "metadata": {}, + "source": [ + "## Test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d97c4a90-7232-4776-83ce-e7dcce160466", + "metadata": {}, + "outputs": [], + "source": [ + "from genQC.platform.simulation import Simulator, CircuitBackendType" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d201f855-f536-41c0-91df-722cfac281e1", + "metadata": {}, + "outputs": [], + "source": [ + "simulator = Simulator(CircuitBackendType.QISKIT)\n", + "tokenizer = CircuitTokenizer({\"rx\":1, \"ccx\":2, \"u\":3, \"cp\": 4})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d72b8c97-86a6-4853-b08d-4b018e47067b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['rx', 'ccx', 'u', 'cp']" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "list(tokenizer.vocabulary)" + ] + }, + { + "cell_type": "markdown", + "id": "a57ad0b3-099c-488b-973e-c06d1cceefbf", + "metadata": {}, + "source": [ + "### Random circuits with condition" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bcbf1a82-f9ab-4fd1-b5bd-2cb5a4e5207e", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzQAAADuCAYAAADvP0KjAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAOJdJREFUeJzt3XlclWX+//HXOYAsiisYKgqKioIC5jZmmWtpLm2apW1O0zY5NpNJU1O/ar7fKS1rJnMqK63mWxllllsupVm4Y2qioBaKAoKKuLDD4dy/PxgpEpWDBw734f18PHwk931d9/05wYXnfa77vm6LYRgGIiIiIiIiJmR1dQEiIiIiIiI1pUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKmpUAjIiIiIiKm5enqAkRE6iPDMLAVFru6DId4+npjsVhcXYaIiEidUqAREamCrbCYj8LudHUZDpmc8iFefj6uLkNERKRO6ZIzERERERExLQUaERERERExLQUaERERERExLQUaERERERExLQUaERERERExLQUaERERERExLQUaERERERExLT2HRkTESYIGRDJy8fOVtpXmF3L2YCYpi74nef5XGGV2F1UnIiLinhRoRESc7ODieNLX7QCLBd/A5nSecC39nr+XZl3asXnGPFeXJyIi4lYUaEREnOxk4iEOfh5f8fX+91dzc/xrdJ00jB0zF1J88qwLqxMREXEvuodGRKSW2QqLObHjJyxWK01DrnB1OSIiIm5FgUZEpA74h5YHmeLTeS6uRERExL3okjMRESfz9G2Ed0v/intowu++jlY9O3Fix0+cPZjp6vJERETcSoMINNnZ2bz00kssXryY9PR0AgMDueWWW3jhhReYNm0aCxYs4PXXX2fq1KmuLrVW5BWU8vFXKaxPyCSvwEYTP08G923DpBvCaOLn5eryGoSUs7D0CBwtBMOAAB+4IRh6tgCLxdXVibP1ir2dXrG3V9qWumILW59810UViYi4VpkBG47B+kw4XQLeHtDZH24MgUAfV1cnZuf2gWbXrl2MGjWKrKwsGjduTEREBEePHmXOnDmkpKSQk5MDQExMjGsLrQU2m51n/v0Db8QlczavtNK+hSsPMuPVbfxxYnf+55HeeHrq6sPakHIWZiXCjpPn71uUCl2bwvQe0DugzkuTWrT//9aQumwzVi9PWnTrQI9HbqJxm1aUFZdUtLE28mTsmpc59EU8u19bXLH96n89gk9gc76Z/A9XlC4i4nTLj8Bb+yGrsPL2b4B3DsDQNvBET2ju7ZLyxA249bvY7Oxsxo4dS1ZWFtOnTyczM5MdO3aQlZXFrFmzWLFiBQkJCVgsFqKiolxdrlOVltq55S9rmTl/93lh5pyzeaXMnL+bW/6yltJSPRvD2faegvs2VB1mzjlwFh7ZXP6JlbiPswezyIxPJGPdTva8sYS198wkICaMAbMerGhjL7GxYdrr9Jx2Cy0iQgDoMLIvwSP6sPGxN1xVuoiIU80/AM/tOj/MnFNmwNdH4fcbILuoTksTN+LWgWbatGmkp6czdepUZs+ejb+/f8W+2NhYoqOjsdlshIaG0rRpUxdW6nwzXt3Gsu+OVKvtsu+OMOPVbbVcUcOSUwx/2Qp5tku3tRnw1A/ws1bydVsntu8nZdH3dLxpIIF9wiu2n9x9kL1vLuWaOX/Cr01LBrz8EFufepfCY6dcWK2IiHN8cxTe3Fe9tkfy4bFtYDdqtyZxT24baJKTk4mLiyMgIIAXX3yxyja9e/cGIDo6utL2Q4cOMW7cOPz9/WnRogV33303J09e5GP2euZETiFvfprsUJ+3PttH9il9NOIsXxyGnJJLtzunxA4fpdRePeJ6P/5zEXZbGb1mTKy8/V+fYy8rY9zXL5O1cQ+Hlmx0UYUiIs5jGOWzM45IOg1bTtRKOeLm3DbQLFy4ELvdzuTJk2nSpEmVbXx9fYHKgSY3N5chQ4aQnp7OwoULefvtt4mPj2fMmDHY7ea4LGvBlwcocfASsuKSMhZ84eBvHqmSzQ6LDzveb00GnHEgBIm55KZmcWjJRtoOiqJ1/+4V2w1bGScS9uPTqhk/x33rwgpFRJznxxz4qQZXHnx2yPm1iPtz20Czbt06AIYMGXLBNunp6UDlQPP222+TkZHBl19+yZgxY5gwYQIff/wxW7ZsYenSpbVbtJOs2phRo34rN6Y7uZKGKTUPjl3gWuGLKbZf/H4bMb/dr5XPxvx6lqZ1/+50njiE5Plf0e/vU/DwaeTCCkVEnGNzDWdaNh8vn90RcYTFMNzzx6Z9+/akp6ezc+fOKlcws9lstGnThuzsbFJSUujUqRPwSwD69tvKn5SGhYUxePBg5s+f73Atffr0ISsry/EXUUPHmz5IqWdbh/t52Y7S+uy8WqioYfEK60ur6V/UqO+Z//yFwi2fObkiqQkvw8qz9n61eg5PPx/GrZ1N0rzl7PtgNaO++DvZP6aQ8Oz7NTre89ZtlFrMMZMsIu7Nf8LfaTzk9zXqm/VoZyjVZfANTVBQENu3b69RX7ddtjk/Px+AwsKqPyqPi4sjOzsbf39/OnbsWLE9KSmJCRMmnNc+MjKSpKSkGtWSlZVFRkbNZk1qxDu/Rt/Z0uK8uq3TTfl6t6ZVDftmZ6ZzWt+DeqGRxQOuqN1z9H3ubvKOHGff+6sA2PDoXMZ9M5sjK7dybItj98EBHM08SolR5uwyRUQc1i7nOI1r0M+w28k4fFDTNOIQtw00QUFBnDp1ih07djBgwIBK+zIzM5kxYwYAUVFRWH71ZMNTp07RvHnz847XsmVL9u/fX+Na6tJpjxPk08Xhfk2s2TRr164WKmpYLEYu9sKzWH0dWznPsNtpmpdBY30P6gUvwwq1ONnRbmgvOo4byJJh0yu25R4+xg//+IiB/3yEpUOnYyssduiYbdu01QyNiNQL3idrttJN6eEfadfW8atMxPwu5/2y2waa4cOHk5yczKxZsxgxYgRdu3YFICEhgbvuuovs7Gygbh6oWdPps5o6kHqG8HGLHO6345tX6RLSrBYqanheToQ4B29sHBhkZc6Pm2qnIHFYaUERH4XdWWvHz1i3k4+73XPe9n3vr6qYsXHUgZ8O4OWnR26LiOvZ7DD2Gzjh4JVj/7i5F2Mf1T294hi3XRQgNjaWVq1akZaWRmRkJD179qRLly7069ePTp06MXToUOD8JZtbtGjB6dOnzzteTk4OLVu2rIvSL1vX0GaMHtTeoT5jBrVXmHGiCaHgablks0ru6FQrpYiIiNQ5TytM7Hjpdr/WyhtG6CIFqQG3DTTBwcHEx8czevRofHx8SE1NpWXLlsybN48VK1Zw4ED5EsW/DTTdu3ev8l6ZpKQkunfvft72+ur9/xlE12oGlK4hzXjvfwbVckUNS6g/PBMD1c00D4TDgNa1WZGIiEjduqszDKnmVUS+HvBqP/DxqN2axD25baCB8nCyfPlycnNzyc3NZevWrTzwwAPk5+eTmpqK1WqlR48elfqMGTOGDRs2VCzpDLB161ZSUlIYO3ZsXb+EGgto4UP8+6MZ3LfNRdsN7tuG+PdHE9BCl6k42+j28GIf8Pe6cBtvK/wlEu7vWnd1iYiI1AUPS/m/g7eGXPwNZ1s/eHsgRLaos9LEzbjtss0Xs3XrVn73u98RHh7Ovn37Ku07e/YsPXv2JCAggOeff56ioiJiY2MJDAxk8+bNWK3my4AJe07w5qfJrE/I4vDRXOwG+Pl4sH7BaPr2CHR1eW6vyAZrjsKSI+UPGjvnsUgY0x6a6rEj9VJt30NTGyanfKh7aESkXsosKH/o9LeZ5c9rO+ef/eCqK8rDj0hNme/duRMkJiYC519uBtC0aVPWrVtHmzZtuP322/nDH/7AVVddxfLly00ZZgD69ghkwd8HcXDlbbQJ9AOgRVNvhZk64uMJ4zrA/Kt/GXBWYFKYwoyIiDQMbfzgke6waGjlfwuvCVKYkcvntqucXczFAg2UP0Rz+fLldVmSiJhcv//5PR2u70OT9q1ZOvxxcvamXnT7eSwW+j57N+2GxGC32Sk+lcumx98iN7XuHsorIiJiRuaccrhMlwo0IiKOOrxiM1/d+DR5acertf23Olzfh9Z9u7Fk2OMsHTadzA2JXPnkpNosWURExC00yBmadevWuboEEXEzx7YkO7T9twwDPBp54uHthc1WhlcTXwoyTzqzRBEREbfUIAONiEh9k7ZmO20GRjJx97vY8grJz8ph1c3PurosERGReq9BXnImIlLfBESH0Ty8A5/1eoC4mAfIjE9kwEsPuLosERGRek+BRkSkHgibcC2ZG/dQcrYADIOUT9cTdFWkq8sSERGp9xRoRETqgdwjx2gzsAdWr/IrgYNH9OH0/jQXVyUiIlL/6R4aEREnGPDSAwQP641v6+aMWPg0pXmFLL7qTxfcDnDV7IdIW7OdtDXb2ffeKpp3CWbc2tnYS8soPHGazbHzXPyqRERE6j8FGhERJ9gc+7ZD2wE2Pf5Wxd/tJbZKX4uIiEj16JIzERERERExLQUaERERERExLQUaERERERExLQUaERERERExLQUaERERERExLa1yJiJSBU9fbyanfOjqMhzi6evt6hJERETqnAKNiEgVLBYLXn4+ri5DRERELkGXnInUU6GhoYSHhxMTE0NERAT//ve/L9h27ty5zJw5E4D33nuPmJiYij8BAQHccsstVfabM2cOPXr0oGfPnkRFRfHhh+fPSBQWFhIREUFMTEzFtt27dzNq1KjLe4EiIiIiTqAZGpF6LC4ujpiYGA4fPkxUVBTXXHMNUVFRldoUFhby6quvkpiYCMCUKVOYMmVKxf4ePXowefLkKo8fGRnJxo0badasGWlpafTq1YsBAwYQFhZW0eaJJ55g4MCBJCQkVGyLiorC29ubdevWMXToUGe+ZBERERGHaIZGxARCQkIIDw/nwIED5+1btGgRAwcOpHHjxuft27p1K8ePH2fcuHFVHnfYsGE0a9YMgPbt2xMUFERaWlrF/m+++YaMjIwqA9Edd9zBvHnzavqSRERERJxCgUbEBBITE9m3bx/R0dHn7Vu/fj39+/evst/8+fO566678PLyuuQ5vvnmG06dOkXfvn0BOH36NLGxsbz55ptVth8wYABr16514FWIiIiIOJ8uOROpxyZOnIivry9+fn4sWLCALl26nNcmPT2dkSNHnrc9Pz+fTz75hC1btlzyPImJiUyZMoW4uLiKmZ6pU6fy1FNP0bp1a5KSks7rExQUxMmTJykqKsLHRzfPi4iIiGso0IjUY+fuobkYPz8/ioqKztv+2WefERkZSURExEX7JyUlMWbMGBYsWMDVV19dsX3Dhg1s2LCBxx9/nKKiInJycggPD2f//v0AFBUV4eHhQaNGjRx/YSIiIiJOokAjYnJRUVEVIePX5s+fz3333XfRvsnJydxwww28/fbbjBgxotK+1NTUir+vX7+eP//5z+zatatS3x49emC16spVERERcR29ExExufHjx7N69epK2/bv38+uXbuYOHHiee1jYmI4evQoANOmTePMmTM88cQTFcs8//ZYF7Jq1SrGjx9/+S9ARERE5DJohkaknvr1DMnF9OzZk9atW5OQkFBxQ394eDi5ublVtv/1LMvXX39drXMMHjy4Ur+SkhI+//xz1q1bV63+IiIiIrVFMzQibmDOnDkcO3aszs536NAhZs6cSUBAQJ2dU0RERKQqmqERcQNhYWGVHoZZ28LDwwkPD6+z84mIiIhciGZoRERERETEtBRoRERERETEtBRoRERERETEtBRoRERERETEtBRoRERERETEtBRoRERERETEtLRss4iYhmEY2AqLXV1Gg+fp643FYnF1GSIiIoACjYiYiK2wmI/C7nR1GQ3e5JQP8fLzcXUZIiIigC45ExERERERE1OgERERERER01KgERERERER01KgERERERER01KgERERERER01KgERERERER01KgERERERER01KgERERERER01KgERGpRZ1vG8y9mYvofNvgKvc3CQ7k3sxFXP2vR+q2MBERETehQCMiIiIiIqalQCMiIiIiIqalQCMiIiIiIqalQCMiIiIiIqbVIAJNdnY2sbGxdO7cGR8fH9q3b8+jjz5Kfn4+9913HxaLhblz57q6zFp1+mwx32zJoLDYBoDdbri4ooYn5Syc+79uAIa+BSINSqENEk7A+kzYchxOFbu6IhER9+Dp6gJq265duxg1ahRZWVk0btyYiIgIjh49ypw5c0hJSSEnJweAmJgY1xZaSxIP5DDn4718/NVBCopsFduzsgu5/7l4pk2KpGfXli6s0L3Z7LAqAxalwp5Tv2w3gNu+hVtD4cYO4Ov2I1Gk4UrLg7hDsDwN8n75NYyXFYa3gYmdoEcL19UnImJ2bj1Dk52dzdixY8nKymL69OlkZmayY8cOsrKymDVrFitWrCAhIQGLxUJUVJSry3W6T1am0OeOJby7+EClMAPlb6jfXXyAPncs4ZOVKa4p0M0V2mD6NnhuZ+Uwc86hPJi9B+7fCCeL6r4+qV8MTdm5pS3HYfJ38MmhymEGoNQOKzNgSnx54BERkZpx60Azbdo00tPTmTp1KrNnz8bf379iX2xsLNHR0dhsNkJDQ2natKkLK3W+Fd8fYfKT31FSar9ou5JSO5Of/I4V3x+po8oahjIDnvoBNh6/dNt9Z2DaViiwXbqtmI+tqAQAD1/vKvd7+pVvL/tvO3Efu3PgsW1QUHbxdgbwciIs169hEZEacdtAk5ycTFxcHAEBAbz44otVtunduzcA0dHRFdvOBaB+/frh7e2NxWKpk3qdqazMzsP/u6na98nY7QZ//McmysouHn6k+r7LhPhj1W+//wx8pk9o3VLekfJU27xLuyr3N+sSDEDukWqkXzENw4CXEqHEgV+rL+8pn9kVERHHuG2gWbhwIXa7ncmTJ9OkSZMq2/j6+gKVA83PP//M559/TlBQEH379q2TWp1txfdppGXlO9TnSGY+X8Wn11JFDc+iVMf7LD4MWqvB/ZxMPEhexgk63jQQ3ysq3yhh9fKk++9HYdjtpK3Z7qIKpTbsPV0+++qIfFv5PXciIuIYtw0069atA2DIkCEXbJOeXv4G/teBZtCgQWRmZrJ06VKGDx9eu0XWkveX/lSzfktq1k8qyyqEbdmO98sogJ0nnV+PuJZRZmfLE+/g5e/Hjeteofff7qTrncOJ+st4xq55iaCrItn9+hecTTnq6lLFiZan1azfMl12JiLiMLddW+nw4cMAhISEVLnfZrOxceNGoHKgsVqdn/H69OlDVlaW0497IcebPgCeVV/ecjHLVm8gOPieWqioYfHqeCWtZiytUd+JD0yjaNtiJ1fkPrwMK8/Sz9VlOCx97Q6+Gvc0PR+5ic63XYt3C39sBcWc3HOI9Q+8Quqyza4u0SFdu3Sl1KJLVC+m+cPv4dNzhMP9dqZkEHxH/1qoSKT+uGLuYSxWD8rsZQQHV/0+TRqeoKAgtm+v2dUKbhto8vPLL7kqLCyscn9cXBzZ2dn4+/vTsWPHWq0lKyuLjIw6vI7Ap6RG39nSkpK6rdNNNW4cTKsa9j2Vc5IcfQ8uqJHFA65wdRU1c/LHFNY/8Iqry3CKo5lHKTEucad7A+dTWIhPDfrZbDb9Hha3d8W5VR0NQz/v4hRuG2iCgoI4deoUO3bsYMCAAZX2ZWZmMmPGDACioqJq/cb/oKCgWj3+b530LKQmqwD7eBbSqp3jMztSmdWr/I2eYRgO/2w1pQhffQ8uyMuwgiYGXK5tm7aaobkEr8KcGvWznD1GO/0OEHd37t9Gi0U/71Lhct4vu22gGT58OMnJycyaNYsRI0bQtWtXABISErjrrrvIzi6/yaEuHqhZ0+mzmlr+3RHG/ulrh/stevvPjB70ai1U1PA8shm2nnAszLTzg23ffo7VfAvr1ZnSgiI+CrvT1WU0eAd+OoCXX03mHxqOpNNw9/eO9/v7rf246TEt0CLurd/S8s+mPKweFfczi1wOt10UIDY2llatWpGWlkZkZCQ9e/akS5cu9OvXj06dOjF06FCg8v0z7mLU1cGEtK16ZbcLCW3bhJEDg2upooZnQqjjfcaHojAj4iYimpf/cUQTTxipD6tFRBzmtoEmODiY+Ph4Ro8ejY+PD6mpqbRs2ZJ58+axYsUKDhw4ALhnoPHwsDLvmYF4eFTv3bGHh4W3nhmIh4fb/jjUuWuCYLADM6cRzcsDjYi4jyd6grcDv1afiAIft71uQkSk9rj1O9ju3buzfPlycnNzyc3NZevWrTzwwAPk5+eTmpqK1WqlR48eri6zVlw/MJhPZg3Bu9HFv8Xejax8MmsI12t2xqk8LPC/veHaaoSayObwWn/w1RsZEbcS2QL+2R8aX2JsW4GnomCUfg2LiNRIg3wLtXfvXgzDoGvXrvj5+Z23f9GiRQAkJSVV+jo0NJQ+ffrUXaGXafx1HYns3IK5C5P4z7KfyCv45RHUFgs8fFt3pt4RQfdOzV1XpBvz8YCX+sLXGeUP2tz1m3uEO/vD+I4wpn15WxFxP/0CYeFg+OwQLD0CZ0or7x/bHiZ2hG7NXVGdiIh7sBiG0eCeTf7uu+9y//33c9tttxEXF3fe/gutTHXPPffw/vvv13J1tSM3v4QdySe55S9ryTlTTJsAX46um+TqshqU1DzIKoAyAwJ9oEvTXxZ6kerRogD1w+SUD7UoQA0UlUHyaXhgIxiABUgY5+KiRFzg3KIAVmCbxoA4QYOcoUlMTAQufP+MO2Y8/8aNuLZPG3y9y6cCrLr7vM6FNin/I7Vj/LY3KCsupayoBIDdr39B6tJNVbZt3q0Dv/vHffgENgNgx8yFHPlqK50nDiHiDzdUtPNr24pjW5L59r6XL3n+yIfH0fm2wVisFs6kHGXjn/9NydmC89pFPXoLoWN+WUq+ScgV/PTxWhKe+4AmwYFc/dpUWvYIJe/IcZaOmFG9F2+x0PfZu2k3JAa7zU7xqVw2Pf4WuannP9A3ZvptdJtyPQWZ5VOGpw6kE//IawB4t2rK1a/+kcbBgVi9PMje+TObn3i74v+pXB4fD+jVqjzInAs0IiJy+RRoRMRtfPfQP8nZm3rRNh6+jRj2/hPET3ud49v2YbFaadSiPGn+HPctP8d9W9H2xm9f5eDiS6+922ZQFF1uH8LyG57Ell9E1J9vpddfJ7H1qXfPa7v7tcXsfm0xANZGnty28x0OLo4HoCSvkB2zFtLI348r/3pHdV82Ha7vQ+u+3Vgy7HEMWxlRf76VK5+cxHcPVr0M+8HF8Wz7f++ftz360Vs4eyiTtffMxGK1MvzDJ+k8cQj7P1hd7VpERETqmlsvCnAh69atwzAMRo8e7epSRKSOdbr5Gk78cIDj2/YBYNjtFJ88e167gF5d8AloxpHVl36OVMuIUI5t24ctv/yRtulrdxA2ftAl+3UY2Y+Co9mc3H0QgJLTeRzftg9bQbEjLwnDAI9Gnnh4ewHg1cSXgsyTDh3j3HG8mviCxYK1kScevt41Oo6IiEhdapAzNCLinq6e8ycsFjix82d+eOGjKoNK867BlJWUMuw/T9K4TUtyko+Q8PwH57XtMmkoKYu+w7CVXfK8J3en0O3e6/ENbE7hidN0uuUaGvn70ah5E0pO512wX5dJwziwcJ3jL/Q30tZsp83ASCbufhdbXiH5WTmsuvnZC7YPGTOAoKt6UHwqlx//uYisTXsB+PGfixjy7uNM3P0unj6NOPhFPGlr6vbBwCIiIo5qkDM0IuJ+Vt78/1g6bDpLr4ulOCeXa16bWmU7i4cHba+JYnPsPJaOmEFB1kkGzLy/UhtPX2863jiQn6oZNrI27WXPm0sZ9n9PMnrFixXh6GJhqHFwAFf061ZxudnlCIgOo3l4Bz7r9QBxMQ+QGZ/IgJceqLLt/v+sYVG/P7J0+OPsfOkTrp33GI2DAwDoeONATh9I59Po+4mLuZ+mndrSZdKwy65PRESkNinQiIhbyM/IBspDRNI7y7mif/cLtsvctJeCrPKb4g8u+p7AK7tWahM6dgCn96dx5kB6tc+//4PVLB/5BCtGP0nWpr3kZ2RTmld4wfZdJg7lyKqEi87gVFfYhGvJ3LinfBECwyDl0/UEXRVZZdvCE6crgtbxhP3k7DlEQHQYAN3uvZ6Di+Mx7HZs+UUcXr6FoIHu+awuERFxHwo0ImJ6nr7eNGr6yzOlOt58NSf3HKqybeqyTQTEhJXfKwK0G3YlOUmpldp0mTSsytmZm+Nfwy+oZZXH9W3dHChfdCAm9nYS31hy4YItFjrfPoSfFq69yKuqzC+oJTfHv1blvtwjx2gzsAdWr/KriINH9OH0/rSqj9Pml/r9OwbRMjKUU8lHyo9z+BjthsSUl+jpQdvB0Zzed6TaNYqIiLiC7qEREdPzCWzGkHdnYPGwYrFA7uHjbPjT6xX7r5r9EGlrtpO2Zjv5GdnsnrOYG5b9A8NuUJCVw6YZb1W0bRrWlpaRoRxasrHyOVo1xbtFE4ovMKNy3SfPgNWKh5cnKYu+Y9+ClRX7hn/4FDtfjuPkjykAtB0UhWG3kxmfWOkYHr6NuGXD63h4e+Ll78eEH+aR8vl37HjhY/yCWmK/wCVs+95bRfMuwYxbOxt7aRmFJ06zOXZexf5xX7/M13e+QOGxU1z510m0iuqEYSvDbrez5cl3OXswE4Btz7zHgJce4MZ1r2DxsHJ8+wGS3l5enW+BiIiIyzTIB2s2ZMHDF5JxvIB2rf1I/6b6y8KK1AeufLBmyJgBNOvclt3/+twl5498eByFx09x8PPLv+fmcunBmpdHDxWUhk5jQJxNMzQiItVwePlml55/75tLXXp+ERGR+kr30IiIiIiIiGkp0IiIiIiIiGkp0IiIiIiIiGkp0IiIiIiIiGlpUQARMQ1PX28mp3zo6jIaPE9fb1eXICIiUkGBRkRMw2KxaLlgERERqUSXnImIiNRDoaGhhIeHExMTQ0REBP/+978v2Hbu3LnMnDkTgNTUVAYPHkyzZs2IiYmp1O5i+37rp59+YsSIEURHRxMZGUlcXFyl873wwgs1fm0i1VXTcQAwf/58unTpQlhYGPfffz+lpaVV9isoKOCOO+6gc+fOdO3alUWLFlXs+9vf/kbPnj2JiYkhJiaGTz75pFLfN954g+7du9OzZ0+io6MpKioC4KmnnqJbt25ER0fTp08fVq9eXalOjR8nM6RBaTfsY4Oe7xrthn3s6lJERBqkvksMo/eS8v9eTEhIiLFz507DMAwjNTXVaNq0qfHjjz+e166goMDo2LGjkZeXZxiGYZw8edKIj483li9fbkRHR1dqe7F9v3XVVVcZ77zzjmEYhnH8+HGjQ4cORnp6umEYhlFcXGx06tTJOH369KVfsMhvVHcMGEbNx8HBgweNNm3aGJmZmYbdbjfGjh1rzJ07t8pzPP/888Y999xT0S8wMNDIzs42DMMwTp06VdEuPT3d8Pf3N06cOGEYhmF8+eWXxlVXXVUxDo4fP27YbDbDMAzjq6++MgoKCgzDMIxdu3YZTZs2rahN48f5NEMjIiJSz4WEhBAeHs6BAwfO27do0SIGDhxI48aNAWjZsiVXX311xde/drF9v/Xjjz9yww03ABAYGEh0dHTFLE2jRo247rrr+Pjjjy/nZYk4xJFxsGjRIsaNG0dQUBAWi4WHHnqIhQsXVnncuLg4HnroIQA6duzI4MGD+eKLLwBo3rx5Rbu8vDwMw8ButwPw8ssv8+yzz9KsWTOgfJx4eHgAMGrUKHx9fQHo2bMnhmFw4sQJQOOnNijQiIiI1HOJiYns27eP6Ojo8/atX7+e/v37O/2cvXv35sMPyxfhOHjwIJs2bSI1NbVi/4ABA1i7dq3TzytyIY6MgyNHjhASElLxdWhoKEeOHKnyuJdqO2fOHMLDw7nyyit5++23ad26NQBJSUls376dgQMH0qdPH+bMmVPl8d977z06depU6RwaP86lRQFERETqqYkTJ+Lr64ufnx8LFiygS5cu57VJT09n5MiRTj/3Bx98wPTp04mJiSEkJIRhw4bh6fnL24agoCDS09Odfl6R33LlOACYNm0a06ZN48cff+TOO+/kuuuuo1WrVthsNg4dOsT333/PqVOnuPbaa+nUqRNjxoyp6Lt27Vqef/55vv76aywWS8V2jR/nUqARERGpp+Li4i55876fn1/FjcjOFBoayueff17x9ciRI7nuuusqvi4qKqq4pEakNtVkHHTo0IGUlJSKr1NTU+nQoUOVfTt06MDhw4dp06ZNRdtf/6yfEx0dTbt27Vi/fj233norHTp04I477sDDw4OAgABuuOEGtmzZUhFovvvuO6ZMmcKyZcsIDw+vdCyNH+fSJWciIiImFhUVxf79+51+3GPHjlXcK7B69WqSkpKYNGlSxf7k5OQqL/0RcYXfjoNbb72VpUuXkpWVhWEYvPXWW9x+++1V9p0wYQJvvfUWAIcOHWL9+vXcdNNNQPllZeekpKSwc+dOIiIiAJg0aRKrVq0CoLCwkPXr11eMie+//5677rqLJUuWVDlONH6cS4FGRETExMaPH19pSdiCggKCg4OZMGECSUlJBAcH8+STT15y39GjRyt9Cr5s2TK6du1KeHg4M2fO5Kuvvqr0ifKqVasYP3583bxIkUv47Tjo1KkTzz//PAMHDqRz584EBgby4IMPAuf/rM+YMYPCwkLCwsK4/vrrmTt3LgEBAQDExsYSGRlJTEwMEydOZO7cuXTv3h2Axx57jGPHjhEREUGfPn0YNWoUEyZMAOC+++6juLiYKVOmVCz5nJiYWHFOjR/nshiGYbi6CKk7wcMXknG8gHat/Uj/5g5XlyMi0uD0Wwp2yj9R3DbOOcccPXo0zz33HH379nXOAS8hKSmJBx98kPj4+Do5n7iX2hgDUPfjoKY0fpxPMzQiIiImN2fOHI4dO1Zn50tLS2PevHl1dj6R6qjrcVBTGj/Op0UBRERETC4sLIywsLA6O9/1119fZ+cSqa66Hgc1pfHjfJqhERERERER01KgERERERER01KgERERERER01KgERERERER01KgERERERER01KgERERERER09KyzSIiIlLvGIaBrbDY1WU4xNPXG4vF4uoyxE1oDDhw3jo/o4iIiMgl2AqL+SjsTleX4ZDJKR/i5efj6jLETWgMVJ8uORMREREREdNSoBEREREREdNSoBEREREREdNSoBEREREREdNSoBEREREREdPSKmduzDAMDqbn8kNSNj8kZZN5opCcM+XL/53JK+HT1QfpHRFAp2B/LTMpIiIiIqakQOOGTp4u4r0vf+Ktz5JJScutsk1egY2JM74FIKy9Pw9N6M6Um7rQqrmWmxQRERER89AlZ26kuKSMZ+b+QPCIT5jx6rYLhpnfSknLZcar2wge8QlPv76d4pKyWq5URERERMQ5NEPjJnYkZXPP09+z5+dTNT5GUXEZ/3jnR5Z8e4T3/3cQvSMCnFihiIhI7QsaEMnIxc9X2laaX8jZg5mkLPqe5PlfYZTZXVSdSO1riGNAgcYNLPn2MBNnfOu0mZU9P59i4N3LiXt5CDcOCXHKMUVEROrSwcXxpK/bARYLvoHN6TzhWvo9fy/NurRj84x5ri5PpNY1pDGgS85Mbtn6I9z62FqnXyZWXFLGrY+tZfl3R5x6XBERkbpwMvEQBz+P5+Ci79n75lJWjH6K/Ixsuk4ahnerpq4uT6TWNaQxoEBjYnt/PsVtM9ZRVmbUyvHLygwmPL6OpJSaX8YmIiJSH9gKizmx4ycsVitNQ65wdTkidc6dx4ACjUnZbHbufeZ7ioodm5lJWDiOtK9vJ2HhuGq1Lyou495nvsdmc69rLUVEpOHxDy1/E1d8Os/FlYi4hruOAd1DY1Kv/CeR7XuzHe4XFOBH8BWNHeqTsCebV/6TyBO/j3b4fCIiIq7g6dsI75b+FfcPhN99Ha16duLEjp84ezDT1eWJ1LqGNAYaRKDJzs7mpZdeYvHixaSnpxMYGMgtt9zCCy+8wLRp01iwYAGvv/46U6dOdXWp1VJYZGPWgt11es6X3ktk2qRIfH0axI+MiIhT7T8Di1Jhy3E4N99tB7adgL4BoGcbO1+v2NvpFXt7pW2pK7aw9cl3XVRRw5aeD5+nwreZlcfAuqMwKAg8dc2Q0zWkMeD270537drFqFGjyMrKonHjxkRERHD06FHmzJlDSkoKOTk5AMTExLi2UAfErT7IqbMldXrOnDPFfLr6EPfc2KVOzysiYmZ5pfD0DthwrOr9f9wMXZrCy30h2LHJc7mE/f+3htRlm7F6edKiWwd6PHITjdu0oqz4l38/rY08GbvmZQ59Ec/u1xZXbL/6X4/gE9icbyb/wxWlu5VSO8zaDUuOQFV3/MZuhyt8YVYf6NGizstzaw1pDLh1Hs7Ozmbs2LFkZWUxffp0MjMz2bFjB1lZWcyaNYsVK1aQkJCAxWIhKirK1eVW21uf7nPJed/8NNkl5xURMaN8Gzy06cJh5pyfzsLvN0BGft3U1VCcPZhFZnwiGet2sueNJay9ZyYBMWEMmPVgRRt7iY0N016n57RbaBFR/piCDiP7EjyiDxsfe8NVpbsNmx1iE+DLC4SZc44Vlo+VxJw6K61BaEhjwK0DzbRp00hPT2fq1KnMnj0bf3//in2xsbFER0djs9kIDQ2laVNzLF+XV1DKtj0nXHLuhL3Z5BeUuuTcIiJmMzsR9p2pXtucYnhiOxi1s2ilACe27ydl0fd0vGkggX3CK7af3H2QvW8u5Zo5f8KvTUsGvPwQW596l8JjWuHzcv3nZ4i/RKA/p6gMpm+DIlvt1tSQufMYcNtAk5ycTFxcHAEBAbz44otVtunduzcA0dG/3Oy+aNEibr31VkJCQvDz86Nbt2787W9/Iy+vfqwGsWvfSZf9g2e3G+zar49PREQuJacYVmU41mffGdilX7G16sd/LsJuK6PXjImVt//rc+xlZYz7+mWyNu7h0JKNLqrQfdjs8Okhx/rklMCao7VTj5Rz1zHgtoFm4cKF2O12Jk+eTJMmTaps4+vrC1QONLNnz8bDw4MXXniBlStX8vDDD/Pmm28ycuRI7HbXL128c99Jl57/hyTHV1YTEWlolh4pv3fAUZ+lOr0U+ZXc1CwOLdlI20FRtO7fvWK7YSvjRMJ+fFo14+e4b11YoftYnwXZxY73W5Tq9FLkV9x1DLjtogDr1q0DYMiQIRdsk56eDlQONMuWLSMwMLDi62uvvZbAwEAmT57Mhg0bGDRokMO19OnTh6ysLIf7VeWs7xDwHVzlvoSF4wgK8Lto/6AA34r/pn19+wXbZWUX0PeOpedtf/q5mbz0hPl+0EVE6lLzP8zD58rRDvf7auch/nPjNbVQkfl4GVaepZ/Tj7v7tc/peNNAes2YyOrxzwHQun93Ok8cQvL8r+j39yksHTGDsiLHF9/p2qUrpRbXf/hZHzS58UmaXP+Iw/325tgJbh+i6y9peGMgKCiI7du316iv2waaw4cPAxASElLlfpvNxsaN5dNpvw40vw4z5/Tp0weAjAwHrx/4r6ysrBr3Pc8VBeBb9S5HnjHj6WF1+Hk0ALm5BeQec9JrERFxU752Cz416Gf39Hbevxcm18jiATV4mHnW5r2832b8Bfef+SmD/wT/crmNp58PV//rEX74x0fs+2A1o774O1c+OYmEZ993+NxHM49SYjj2wGt31b60jKqvj7k4i9XK0eMnMUoKnV6T2WgMVJ/bBpr8/PLlYgoLqx4QcXFxZGdn4+/vT8eOHS96rG+/LZ+R6N69+0XbXUhQUFCN+lXlrK8vuRfYl5VdcOlaAnzx9LBiK7OTlX3hXxYXOpa/vy9NPdtVp1QRkQarkVGzpfWtxXm0a6ffsVD+6TR1MNnR97m7yTtynH3vrwJgw6NzGffNbI6s3MqxLY6t7tm2TVvN0PyXn7Vm/x8MWwltA1s6uRpzamhj4HLeL1sMwz3n9CIiIkhOTmbu3Lk88kjlKc/MzEx69+5NZmYmAwcOZMOGDRc8TkZGBr169aJ3796sXLmytsu+pAVfHOC+Z+Nr3D/t69sJvqIx6cfyaT/iE8fP//drmHJT1xqfX0SkIViRBs/udLzfnWHw50jn12NGpQVFfBR2Z62eo93QXlz7xp9ZMmw6+Rm/3CPa7d6RRDw4hqVDp2MrrP6NIJNTPsTLryZzc+5nd075cuSOujYIXnH+VVampDFQfW67KMDw4cMBmDVrFgcOHKjYnpCQwJAhQ8jOLv+mXeyBmnl5edx44400atSIBQsW1Gq91dU7opWLzx/g0vOLiJjB8LbQrJHj/W6t+ippqSUZ63bycbd7Kr2RA9j3/ioWD5jq0Bs5qaxnC+hagydiTAh1eilyEe4yBtw20MTGxtKqVSvS0tKIjIykZ8+edOnShX79+tGpUyeGDh0KVL5/5tcKCwsZO3Yshw4dYs2aNbRp06Yuy7+giE4t8G7k4ZJz+3h7ENGpuUvOLSJiJt4ecHeYY31GtoP2NbnpQKQesljgPgcv6IhsDv3Ov5VZ5JLcNtAEBwcTHx/P6NGj8fHxITU1lZYtWzJv3jxWrFhRMWtTVaApLS1l/PjxbN++nZUrVxIREVHX5V+Ql5eVUVcHu+TcIwcG4+nptj8yIiJOdXdnuLmaMy5XtoKnY2q1HJE6N6wt/Kmatx+HNoFX+4HVUrs1iXty20UBoPwm/uXLl5+3PS8vj9TUVKxWKz169Ki079yza9auXctXX31Fv37170LOP07szpfrDtf5eR+5vWaLIoiINEQWCzwVBSGN4f9S4GQVV274ecCNIeVv+lw0+S5Sq+7pAlf4wpv7IKOK9YY8LTCiLTzes2aXaYqAmweaC9m7dy+GYdC1a1f8/Co/t+WRRx7hs88+469//St+fn5s2bKlYl9YWFiVyzrXtWH929IlpCk/HT5bZ+fsGtKMof3a1tn5RETcgcUCd3aGiZ3g20zYfBzybODrAT1awA3B0MTL1VWK1K6RwXBdO9hyonwcnCkBbyuENYWx7aGV1lGQy9QgA01iYiJQ9eVm51YymzlzJjNnzqy077333uPee++t9fouxWq18Mr0/oyb9nWdnfOVx/th1TywiEiNeFnL39BdpxWZpYGyWuCq1uV/RJxNgeY3UlNT67iamhk7uAN3jenM/y3/udbPdffYzoy5tkOtn0dERORyjN/2BmXFpRVPON/9+hekLt1UqU1g764MmHk/ABYvT45vS2br0wuwl9guuk/EDNoNiaHXE3dg9fKkrLCYTbHzOJVU+TYFTz8fhsx/nFZRnbB6ePBxt3sq9jVp35rB707HarVi8fTgzE8ZbJrxFiVn8uv6pThEgcbE/vXE74jfkUXq0bxq9zn3wMzqPIQTILRtE/71xO9qVJ+IiEhd++6hf5KzN/WC+3OSUlk26q8YtjKwWBgy/3G63TuSpLeXX3SfSH3XqFljrpn7KKtufobTB9Jp3b87g/79KEuGPFapnd1mI3Hul5SczmPk589X2ldwLIeVNz5T8aFAv/+ZQszjt7Htmffq7HXURIMMNOvWrXN1CU7Rspk3X789ikFTVpB5onoBpe8dS6t9/DaBfnzzzihaNPWuaYkiIiL1SllhScXfPRp54unTCP77jPGL7ROp7/xDgyg+lcvpA+kAHN+aTON2AbTs2ZGcxEMV7ewlNrI27qFJ8Pn3hf96NtJiteLp642toKj2i79MWoPX5Dp3aMqGD0YT1t7fqccNa+//3+PW4KlYIiIiLnL1nD9x47pXuOqVh/FuVfW/YU2CAxn3zWxu37uAkrMF7Ht/dbX2idRnZw9m4t3Cn8A+4QC0v64Pjfz9aNLesRuXrF6ejPv6ZW7fu4Cmndqw8+VPa6Ncp1KgcQOdgpuy89ObePi2bk453sO3dWPnpzfRKVhhRkREzGPlzf+PpcOms/S6WIpzcrnmtalVtstLP8HS4Y8TF3U/Ht5ehNzQv1r7ROqz0twC1t8/m95PTWLM6lm0vTaaU/vTyi+hdIC91MbSETOIi/oDZ37OIPyuEbVUsfMo0LgJ/8aNeOPpgax9ZxQDomu2hMjvogJZ+84o3nh6IP6NtRi8iIiYS35GNgCGrYykd5ZzRf+LPz/NVlDEoS830umWaxzaJ1JfZW3ay6pbnmX59U+Q8PwH+F3RouISNEfZS238/Mm3hI0f5OQqna9B3kPjzob2b8um/m3ZkZTNm58m8/WWoxy+yKIBIW2bMOJ3bXn4tu5cGRFQh5WKiIg4j6evN1YvD0rOlt9T2vHmqzm559B57fxDg8hLP4FhK8Pq5UmHUf3IST58yX0iZuDbujmFx08DEP2X8WRu3ENuala1+zcODqDo5Nny+8ksFkLGDiAn+UgtVes8CjRu6sqIAN55rvxTpexTRfyQlE1WdiHFpWV4e3kQFOBL74gAAlroaVYiImJ+PoHNGPLuDCweViwWyD18nA1/eh2Aq2Y/RNqa7aSt2U6bq3vQ/b4bMMrsWDw9yIxPZPc/FwFcdJ+IGfSKvZ0r+nfH4mHlxA8H2PTYGwDEzJhI4bFT7P/PGgDGrX0Fn1ZN8fL3ZcIP88jatIf4P71Oi+4hXPnXSQBYrBZOJh5i29PzXfZ6qstiGFq+Q0REROqX0oIiPgq709VlOGRyyod4+emDQnEOjYHq0z00IiIiIiJiWgo0IiIiIiJiWgo0IiIiIiJiWgo0IiIiIiJiWloUQEREROodwzCwFRa7ugyHePp6Y7FYXF2GuAmNgepToBEREREREdPSJWciIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJaCjQiIiIiImJa/x+eZPdQYjFKxAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "tensor([[-2, 4, 1, 0, 0, 0, 4, 0],\n", + " [-2, 4, 0, 3, 4, 0, 0, 0],\n", + " [ 2, 0, 0, 0, 4, 1, 4, 1]], dtype=torch.int32)" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "tensor([[ 0.0000, 0.1530, 0.8770, -0.0982, 0.8894, -0.1517, -0.9942, 0.4529],\n", + " [ 0.0000, 0.0000, 0.0000, 0.5461, 0.0000, 0.0000, 0.0000, 0.0000],\n", + " [ 0.0000, 0.0000, 0.0000, 0.3656, 0.0000, 0.0000, 0.0000, 0.0000]])" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "tensor([[[-0.5160, 0.0000, 0.1092, 0.0922, 0.0000, -0.2913, -0.1705, -0.0078],\n", + " [ 0.0059, -0.5158, 0.0487, 0.0356, -0.2911, -0.0280, 0.0643, 0.1626],\n", + " [-0.1575, 0.0191, -0.4388, -0.2793, 0.0899, -0.0853, 0.6793, 0.0832],\n", + " [ 0.0209, -0.1561, -0.1054, 0.1762, -0.0866, 0.0817, -0.1028, -0.4679],\n", + " [ 0.0000, -0.2913, -0.1705, -0.0078, -0.5160, 0.0000, 0.1092, 0.0922],\n", + " [-0.2913, 0.0022, 0.0614, 0.1631, 0.0017, -0.5160, 0.0496, 0.0260],\n", + " [-0.0652, -0.0889, 0.3769, -0.0596, -0.1511, -0.0264, -0.1630, -0.2675],\n", + " [-0.0890, -0.0645, -0.2472, -0.4959, -0.0259, -0.1516, -0.1986, -0.2816]],\n", + "\n", + " [[ 0.0000, -0.2100, -0.1229, 0.0109, 0.7160, 0.0000, -0.1515, 0.0664],\n", + " [-0.2098, -0.0145, 0.0458, -0.2253, -0.0114, 0.7155, -0.0669, 0.0238],\n", + " [-0.0470, -0.0641, 0.2716, 0.0827, 0.2096, 0.0366, 0.2261, -0.1928],\n", + " [-0.0635, -0.0514, -0.1815, 0.6940, 0.0332, 0.2130, 0.2726, -0.2176],\n", + " [ 0.7160, 0.0000, -0.1515, 0.0664, 0.0000, -0.2100, -0.1229, 0.0109],\n", + " [ 0.0009, 0.7160, -0.0695, 0.0206, -0.2099, -0.0042, 0.0448, -0.2266],\n", + " [ 0.2186, -0.0265, 0.6088, -0.2013, 0.0648, -0.0615, 0.4896, -0.1154],\n", + " [-0.0263, 0.2188, 0.1541, 0.1425, -0.0617, 0.0636, -0.0679, 0.6583]]], dtype=torch.float64)" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "qc, condition, qc_tensor, params_tensor = get_rnd_encoded_circuit(backend=simulator.backend, \n", + " tokenizer=tokenizer,\n", + " condition=CircuitConditionType.UNITARY, \n", + " gate_pool=tokenizer.vocabulary, \n", + " num_of_qubits=3, \n", + " min_gates=4, \n", + " max_gates=8,\n", + " rng=np.random.default_rng())\n", + "display(qc.draw(\"mpl\"), qc_tensor, params_tensor, condition)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6fda9ebd-8b03-45cd-9974-0b22b07153b8", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "d085aec59ed94682ae4de97bd206b958", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/128 [00:00" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "data = params.flatten()\n", + "ind = data.nonzero(as_tuple=True)\n", + "\n", + "plt.hist(data[ind], bins=20)\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a9d6013c-eddc-4629-9219-da0e43cee9e5", + "metadata": {}, + "outputs": [], + "source": [ + "def test_unitary(tensor, param, should_U, rtol, atol):\n", + " instructions = tokenizer.decode(tensor, param)\n", + " qc = simulator.genqc_to_backend(instructions)\n", + " is_U = simulator.backend.get_unitary(qc)\n", + " is_U = torch.stack([torch.from_numpy(np.real(is_U)), torch.from_numpy(np.imag(is_U))])\n", + " \n", + " assert torch.allclose(is_U, should_U, rtol=rtol, atol=atol)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1be4d78b-54dd-4178-b15a-71d2245c15bf", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "d833eeb8d58d45a9a5a4623f91de0d52", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/125 [00:00 Classes for quantum circuit instructions." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f8bab72-5b33-4d47-8805-8efa39510344", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.circuits_instructions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c82c4d7b-e275-416e-b88d-3d527a9e3bff", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *" + ] + }, + { + "cell_type": "markdown", + "id": "a0aef250-6b8a-4b34-b9a9-9745048d8ea5", + "metadata": {}, + "source": [ + "## Circuit instructions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "904acc48-5087-4efb-bd49-717f2397c867", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class CircuitInstruction():\n", + " \"\"\"Basic quantum circuit instruction.\"\"\"\n", + " name: str\n", + " control_nodes: Sequence[int]\n", + " target_nodes: Sequence[int]\n", + " params: Sequence[float]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "94b09554-53de-444d-a928-4a57a6bbc6d4", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitInstructions():\n", + " \"\"\"Basic quantum circuit instruction handler.\"\"\"\n", + " \n", + " def __init__(self, tensor_shape: torch.Size) -> None:\n", + " assert len(tensor_shape) == 2 # ... [qubits, time]\n", + " self.tensor_shape = tensor_shape \n", + " self._instructions = []\n", + " self.instruction_names_set = set()\n", + " \n", + " def add_instruction(self, \n", + " name: str, \n", + " control_nodes: Sequence[int], \n", + " target_nodes: Sequence[int], \n", + " params: Sequence[float]) -> None:\n", + " self.instruction_names_set.add(name)\n", + " self._instructions.append(CircuitInstruction(name, control_nodes, target_nodes, params))\n", + "\n", + " @property\n", + " def data(self) -> List[CircuitInstruction]: return self._instructions\n", + "\n", + " @property\n", + " def length(self) -> int: return len(self._instructions)\n", + "\n", + " @property\n", + " def num_qubits(self) -> int: return self.tensor_shape[0]\n", + "\n", + " @property\n", + " def max_gates(self) -> int: return self.tensor_shape[1]\n", + " \n", + " def __repr__(self) -> str: return str(self._instructions)\n", + "\n", + " def print(self) -> None:\n", + " for instruction in self.data: \n", + " print(instruction) " + ] + }, + { + "cell_type": "markdown", + "id": "13cf0ddc-2efe-426f-816a-bc6b1d324ae0", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "99039570-261c-4196-a4bc-6e476c743c1e", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/qcircuit_dataset_construction.ipynb b/src/platform/qcircuit_dataset_construction.ipynb deleted file mode 100644 index 7a995cc..0000000 --- a/src/platform/qcircuit_dataset_construction.ipynb +++ /dev/null @@ -1,1522 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "21de19e5-7294-4e4b-9b83-43c0603dba29", - "metadata": {}, - "source": [ - "# Quantum circuit dataset construction" - ] - }, - { - "cell_type": "markdown", - "id": "4da1f40c-5c09-4284-a045-d48a98c0ff54", - "metadata": {}, - "source": [ - "Functions to construct a dataset. Here we define the tokenization (encoding and decoding)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0d899fa-9bb4-431f-94b7-b963419acc96", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp platform.qcircuit_dataset_construction" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e81b26cc-5347-4259-a254-87ff004c11d6", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from genQC.platform.simulation.qcircuit_sim import *\n", - "import genQC.dataset.dataset_helper as dahe\n", - "\n", - "import qiskit.quantum_info as qi\n", - "from qiskit import QuantumCircuit\n", - "from qiskit.circuit.gate import Gate\n", - "import qiskit.circuit.library as ql" - ] - }, - { - "cell_type": "markdown", - "id": "c2308c49-a291-46e4-bc9d-0d59b73d46dd", - "metadata": {}, - "source": [ - "## Tokenizer: encoding and decoding" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f6eafb80-4229-4632-8692-8170a9fb3a48", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_target_control_qubits(qc: QuantumCircuit, gate: Gate):\n", - " acts_on_cnt = gate.operation.num_qubits\n", - " acts_on = [qc.find_bit(qubit).index for qubit in gate.qubits] # order: (*control_qubits, *target_qubits)\n", - "\n", - " assert acts_on_cnt == len(acts_on), \"error in: acts_on_cnt == len(acts_on)\"\n", - "\n", - " num_ctrl_qubits = gate.operation.num_ctrl_qubits if hasattr(gate.operation, \"num_ctrl_qubits\") else 0 \n", - " num_targ_qubits = acts_on_cnt - num_ctrl_qubits\n", - "\n", - " control_qubits, target_qubits = acts_on[:-num_targ_qubits], acts_on[-num_targ_qubits:] \n", - " \n", - " return control_qubits, target_qubits " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06cb9a08-8c90-41f7-9b32-76025a2a0776", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def encode_circuit(qc: QuantumCircuit, num_of_qubits, gate_classes: dict, max_gates: int, sign_labels={\"control_qubits\":-1, \"target_qubits\":+1}, return_params=False):\n", - " # circuit tensor\n", - " # [qbits, time] .. in +- gate_number \n", - " # 0 for empty\n", - " \n", - " tensor = torch.zeros((num_of_qubits, max_gates), dtype=torch.int32) \n", - " params = []\n", - " \n", - " for t, gate in enumerate(qc.data):\n", - " params.append(gate.operation.params)\n", - " \n", - " gate_id = gate_classes[gate.operation.name] #for new tensor just use this as the abs(T) and then assign the sign dep on the c/t\n", - " \n", - " control_qubits, target_qubits = get_target_control_qubits(qc, gate) \n", - " \n", - " for bit in control_qubits:\n", - " tensor[bit, t] = gate_id * sign_labels[\"control_qubits\"]\n", - " \n", - " for bit in target_qubits:\n", - " tensor[bit, t] = gate_id * sign_labels[\"target_qubits\"]\n", - "\n", - " if return_params: \n", - " num_of_max_params = max(len(para) for para in params)\n", - " params_tensor = torch.zeros((num_of_max_params, max_gates), dtype=torch.float32)\n", - " \n", - " for t, para in enumerate(params):\n", - " params_tensor[:len(para), t] = torch.tensor(para)\n", - " \n", - " return tensor, params_tensor\n", - " \n", - " return tensor" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "89a0ed1f-f5e8-4e9a-aae2-3b06babf84cb", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def decode_circuit(enc_tensor: torch.Tensor, gate_pool: list[Gate], place_barrier=True, sign_labels={\"control_qubits\":-1, \"target_qubits\":+1}, params_tensor=None): \n", - " # should have dim 2, [bits, sequence]\n", - " #minus ... control_qubits\n", - " #plus ... target_qubits\n", - "\n", - " assert enc_tensor.ndim == 2, f\"{enc_tensor.shape=}\"\n", - " num_of_qubits, time = enc_tensor.shape\n", - " \n", - " gate_qiskit_classes = {(i+1):x for i,x in enumerate(gate_pool)}\n", - " \n", - " qc = QuantumCircuit(num_of_qubits) \n", - " \n", - " for t in range(time): \n", - " enc_time_slice = enc_tensor[:, t] # only contains all bits at time t \n", - " \n", - " for gate_index,gate_qiskit_class in gate_qiskit_classes.items(): \n", - " target_qubits = (enc_time_slice == (sign_labels[\"target_qubits\"] *gate_index)).nonzero()\n", - " control_qubits = (enc_time_slice == (sign_labels[\"control_qubits\"]*gate_index)).nonzero()\n", - " \n", - " if target_qubits.nelement() > 0:\n", - " num_of_paramters = get_number_of_gate_params(gate_qiskit_class) \n", - " if exists(params_tensor) and num_of_paramters > 0 : params = params_tensor[:num_of_paramters, t].tolist()\n", - " else: params = [0] * num_of_paramters\n", - " \n", - " qc.append(gate_qiskit_class(*params), [*control_qubits.tolist(), *target_qubits.tolist()], []) \n", - " if place_barrier: qc.barrier()\n", - " break #break on first hit, per def only one gate allowed per t\n", - " \n", - " elif control_qubits.nelement() > 0: #no target but control means error\n", - " raise RuntimeError(\"control_qubits.nelement() > 0\")\n", - " #else we are fine with tensor that have time steps with no action!\n", - "\n", - " return qc" - ] - }, - { - "cell_type": "markdown", - "id": "7289bef7-cb71-44e8-af61-7c759a39566b", - "metadata": {}, - "source": [ - "## Dataset generation" - ] - }, - { - "cell_type": "markdown", - "id": "02849cca-6bad-4096-9901-0c75c172afd2", - "metadata": {}, - "source": [ - "### Totally random SRV circuits" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4c5a67ab-086f-43ea-81a1-2c685582e393", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_rnd_encoded_circuit(num_of_qubits, min_gates, max_gates, gate_pool, gate_classes, rng, optimized=True, return_params=False): \n", - " qc = rnd_circuit(num_of_qubits, rng.integers(min_gates, max_gates+1), gate_pool, rng) \n", - " if optimized: qc = optimize_circuit(qc, gate_pool) \n", - " svr = schmidt_rank_vector(qi.DensityMatrix(qc))\n", - " \n", - " if return_params:\n", - " qc_tensor, params_tensor = encode_circuit(qc, num_of_qubits, gate_classes, max_gates, return_params=return_params) \n", - " return qc, qc_tensor, svr, params_tensor\n", - " \n", - " qc_tensor = encode_circuit(qc, num_of_qubits, gate_classes, max_gates, return_params=return_params) \n", - " return qc, qc_tensor, svr " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a73c45d8-7a90-46e8-b7f1-dac57b7d3216", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_rnd_encoded_circuits(samples, num_of_qubits=3, min_gates=3, max_gates=10, gate_pool=[ql.HGate, ql.CXGate], optimized=True, silent=False, return_params=False):\n", - " gate_classes = gate_pool_to_gate_classes(gate_pool) #{x().name:(i+1) for i,x in enumerate(gate_pool)}\n", - " \n", - " rng = np.random.default_rng()\n", - " \n", - " data = []\n", - " label = []\n", - " params = []\n", - " \n", - " for i in tqdm(range(samples), disable=silent): \n", - " if return_params:\n", - " qc, qc_tensor, svr, params_tensor = get_rnd_encoded_circuit(num_of_qubits, min_gates, max_gates, gate_pool, gate_classes, rng, optimized, return_params=return_params) \n", - " params.append(params_tensor)\n", - " \n", - " else:\n", - " qc, qc_tensor, svr = get_rnd_encoded_circuit(num_of_qubits, min_gates, max_gates, gate_pool, gate_classes, rng, optimized, return_params=return_params) \n", - " \n", - " data.append(qc_tensor)\n", - " label.append(svr)\n", - " \n", - " if return_params: return data, label, params\n", - " return data, label" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4749e144-c6de-4fe2-bb72-e0923bfd4d27", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Encode:\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "0ba61b257e6345b9823ea4b9852edd47", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - " 0%| | 0/1 [00:00" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "gate_pool=[ql.HGate, ql.CXGate, ql.CU3Gate, ql.CRXGate]\n", - "\n", - "print(\"Encode:\")\n", - "enc_t, y, params = get_rnd_encoded_circuits(samples=1, num_of_qubits=3, min_gates=6, max_gates=6, gate_pool=gate_pool, optimized=True, return_params=True)\n", - "\n", - "for enc_i, y_i, params_i in zip(enc_t, y, params):\n", - " print(f\"{enc_i=}\")\n", - " print(f\"{y_i=}\")\n", - " print(f\"{params_i=}\")\n", - "\n", - "print(\"Decode:\")\n", - "qc = decode_circuit(enc_t[0], gate_pool=gate_pool, params_tensor=params[0])\n", - "display(qc.draw(\"mpl\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "65fa2e53-1d1c-4429-a12f-910f8de9f89f", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def gen_qc_dataset(samples, num_of_qubits, min_gates, max_gates, gate_pool, optimized, silent=False):\n", - " tensor, srv = get_rnd_encoded_circuits(samples, num_of_qubits, min_gates, max_gates, gate_pool, optimized, silent)\n", - "\n", - " # make sure we have unique circuits\n", - " tensor = torch.stack(tensor, dim=0)\n", - " tensor_unique, tensor_indices = dahe.get_unique_elements_indices(tensor)\n", - " \n", - " if not silent: print(f\"Generated unique circuits: {tensor_unique.shape[0]}\")\n", - " \n", - " #--------------------------\n", - " #select uniques only\n", - " \n", - " x = tensor[tensor_indices]\n", - " y = torch.Tensor(srv).type(torch.int32)[tensor_indices] #leave as tensor, treat as 2D condition, combine cond into one large (each cat)\n", - " \n", - " return x,y" - ] - }, - { - "cell_type": "markdown", - "id": "3da76639-b8b1-4623-9bd9-9953bfe37ee1", - "metadata": {}, - "source": [ - "### Specific random SRV circuit" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36d252ce-81e2-4833-b070-b290947c8dea", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_specific_rnd_srv_circuit(srv, requested_length, gate_pool, max_i=2000, silent=True, fix_length_after_optimizing=True, requested_length_tolerance=0):\n", - " rng = np.random.default_rng()\n", - " \n", - " num_of_qubits = len(srv) \n", - " is_srv = None\n", - "\n", - " if requested_length < sum(srv)-num_of_qubits: return None #not possible to generate this srv, to few gates\n", - " \n", - " i = 0\n", - " while is_srv != srv: # brute-force sample a SRV\n", - " qc = rnd_circuit(num_of_qubits, requested_length, gate_pool, rng) \n", - " qc = optimize_circuit(qc, gate_pool) \n", - "\n", - " if i > max_i: \n", - " if not silent: print(f\"Max i reached: {srv=} {requested_length=} {requested_length_tolerance=} {max_i=}\")\n", - " return None #raise RuntimeError(\"max i reached\") \n", - " i += 1\n", - "\n", - " #---------------\n", - " if fix_length_after_optimizing and len(qc.data) < requested_length-requested_length_tolerance:\n", - " continue\n", - " \n", - " is_srv = schmidt_rank_vector(qi.DensityMatrix(qc)) \n", - " \n", - " return qc" - ] - }, - { - "cell_type": "markdown", - "id": "cc38416d-ee86-4892-96c2-9b729286d835", - "metadata": {}, - "source": [ - "### Unitary dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "83aee5e2-3e75-4b3f-8ad4-690911d97b23", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def gen_compilation_rndGates_dataset(samples, num_of_qubits, min_gates, max_gates, gate_pool, min_sub_gate_pool_cnt=1, silent=False):\n", - " '''Samples rnd circuit with a rnd subset of gates and return qc with gate label and unitary'''\n", - "\n", - " gate_classes = {x().name:(i+1) for i,x in enumerate(gate_pool)} #+1 for empty! global gate classes so we fix the indices! 1...H 2...CX so on\n", - "\n", - " #-------------------------------\n", - " \n", - " rng = np.random.default_rng()\n", - " n = len(gate_pool) + 1\n", - " c_range = np.arange(n-1)\n", - " \n", - " tensor = []\n", - " label = []\n", - " U = []\n", - " \n", - " for i in tqdm(range(samples), disable=silent):\n", - " sub_gate_pool_cnt = rng.integers(min_sub_gate_pool_cnt, n) \n", - " sub_gate_pool_ind = rng.choice(c_range, size=sub_gate_pool_cnt, replace=False) \n", - " sub_gate_pool = [gate_pool[ind] for ind in sub_gate_pool_ind] # pick random subeset of gates\n", - " \n", - " qc, qc_tensor, svr = get_rnd_encoded_circuit(num_of_qubits, min_gates, max_gates, sub_gate_pool, gate_classes, rng, optimized=True)\n", - " \n", - " tensor.append(qc_tensor)\n", - " label.append(f\"Compile using: {[x().name for x in sub_gate_pool]}\")\n", - " U.append(qi.Operator(qc).to_matrix())\n", - " \n", - " #-------------------------------\n", - " \n", - " # make sure we have unique circuits\n", - " tensor = torch.stack(tensor, dim=0)\n", - " tensor_unique, tensor_indices = dahe.get_unique_elements_indices(tensor)\n", - " \n", - " if not silent: print(f\"generated unique circuits: {tensor_unique.shape[0]}\")\n", - " \n", - " #--------------------------\n", - " #select uniques only\n", - " \n", - " x = tensor[tensor_indices]\n", - " y = [label[i] for i in tensor_indices.tolist()]\n", - " U = [ U[i] for i in tensor_indices.tolist()]\n", - " \n", - " return x,y,U " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ef6c86d9-83f4-4d10-b8be-59093b5aac5d", - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "bbf2a9e343de4bdfadf2536aed805228", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - " 0%| | 0/1 [00:00>> Compile using: ['x'] <<<\n", - "\n", - "tensor([[0, 0, 0, 0],\n", - " [4, 0, 0, 0],\n", - " [0, 4, 4, 4]], dtype=torch.int32)\n", - "\n", - "[[0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 1.+0.j 0.+0.j]\n", - " [0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 1.+0.j]\n", - " [0.+0.j 0.+0.j 0.+0.j 0.+0.j 1.+0.j 0.+0.j 0.+0.j 0.+0.j]\n", - " [0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 1.+0.j 0.+0.j 0.+0.j]\n", - " [0.+0.j 0.+0.j 1.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j]\n", - " [0.+0.j 0.+0.j 0.+0.j 1.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j]\n", - " [1.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j]\n", - " [0.+0.j 1.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j 0.+0.j]]\n", - "\n", - "Decoded:\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAnMAAADuCAYAAABS14G6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/SrBM8AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAc+UlEQVR4nO3df3CU9b328eveEFwTIxJjCYZA+JE0IRDo8GMO0tGB0TNUDHjqLyyinLH6PBWOjlJCZ+qfTG2OqH0UpgfrcOg54+HQguOJCTC2hZki0zrhoJRDEnmiCSFxd0oawoQEMNm9nz8c8jQlYnZJ9pv7+32/ZjoZd/fefOjn0rm4d/dez/d9XwAAAAikkOkBAAAAkDzKHAAAQIBR5gAAAAKMMgcAABBglDkAAIAAo8wBAAAEGGUOAAAgwChzAAAAAUaZAwAACDDKHAAAQIBR5gAAAAKMMgcAABBglDkAAIAAo8wBAAAEGGUOAAAgwChzAAAAAUaZAwAACDDKHAAAQIBR5gAAAAKMMgcAABBglDkAAIAAo8wBAAAEGGUOAAAgwChzAAAAAUaZAwAACDDKHAAAQIBR5gAAAAJsjOkBgOvR0NCQ0OP//Oc/61e/+pUefvhhfeMb3xjSMcXFxcmMhhQhA0gkA8nsXyIDo53rGeDMHJxy9uxZbdu2TWfPnjU9CgwhA25j/7AxA5Q5AACAAKPMAQAABBhlDgAAIMAoc3DKuHHjVF5ernHjxpkeBYaQAbexf9iYAc/3fd/0EECyEv0kYzJG8yeYQAZABkAGODMHp1y+fFmnT5/W5cuXTY8CQ8iA29g/bMwAZQ5OaWxs1LJly9TY2Gh6FBhCBtzG/mFjBihzAAAAAUaZAwAACDDKHAAAQIBR5gAAAAKMS5Mg0Fz/ODrIAMgAyABn5gAAAAKMMgenNDU1adWqVWpqajI9CgwhA25j/7AxA5Q5OKWnp0fHjx9XT0+P6VFgCBlwG/uHjRmgzAEAAAQYZQ4AACDAKHMAAAABRpmDU/Ly8lRZWam8vDzTo8AQMuA29g8bM8B15hBorl9bCGQAZABkgDNzcEpHR4fefvttdXR0mB4FhpABt7F/2JgByhycEolEtHnzZkUiEdOjwBAy4Db2DxszQJkDAAAIMCfKXHt7uyoqKjRjxgyFw2Hl5+frueeeU3d3t5588kl5nqetW7eaHhMAACBhY0wPMNI+/vhjfec731E0GlVmZqZmzpypzz//XK+//ro+/fTT/tfM586da3ZQAACAJFh9Zq69vV3l5eWKRqPasGGDIpGIjh07pmg0qsrKStXU1Ki2tlae56msrMz0uEiBzMxMLV68WJmZmaZHgSFkwG3sHzZmwOpLk3zve9/Trl27tH79er3xxhtX3T937lwdP35cU6dO1WeffWZgQlwv1z+ODjIAMgAyYO2Zufr6eu3evVs5OTl66aWXBn3MvHnzJElz5swZcHtTU5NWrFihrKwsjR8/Xo8//rj+8pe/jPjMGHmxWEwXLlxQLBYzPQoMIQNuY/+wMQPWlrldu3YpHo9r9erVuummmwZ9zI033ihpYJnr6urSkiVL1Nraql27dunNN9/U4cOHdd999ykej6dkdoychoYGLViwICV/i8PoRAbcxv5hYwas/QDEwYMHJUlLliz5yse0trZKGljm3nzzTbW1ten3v/+9Jk+eLEmaNGmS7rjjDlVVVen+++8fuaEBAAASZG2ZO336tCRpypQpg97f19enI0eOSBpY5qqrq/Xtb3+7v8hJ0qJFizRt2jS99957SZW5+fPnKxqNJnwcvt7atWsTevyVi0Tu27dPH3300ZCO2blzZ4JTIZXIABLJQDL7l8jAaGdDBnJzc3X06NGkjrW2zHV3d0uSLl68OOj9u3fvVnt7u7KysjR16tT+2+vq6vTQQw9d9fjS0lLV1dUlNUs0GlVbW1tSx+Laenp6Enr8pUuX+n8O9Vh2N7qRASSSgWT2L5GB0c71DFhb5nJzc3Xu3DkdO3ZMixYtGnBfJBLRxo0bJUllZWXyPK//vnPnzumWW2656vmys7P1ySefJD0LRkZGRkZCjw+Hw/0/h3psXl5ewnMhdcgAEslAMvuXyMBoZ0MGrqcrWHtpkmeffVZvvPGG8vPz9dvf/lZFRUWSpNraWq1Zs0afffaZent7tW7dugHf/jB27FhVVFRo8+bNA55v7dq1+sMf/pB0ocPISPQNrL29verq6lJWVpbS09OHdMxo/jg6yAASy0Ay+5fIwGjnegas/TRrRUWFbr31Vp05c0alpaWaPXu2CgsLtXDhQk2bNk1Lly6VdPVlScaPH6/Ozs6rnq+jo0PZ2dmpGB0jKD09XdnZ2Qn9Cwy7kAG3sX/YmAFry9ykSZN0+PBhLV++XOFwWM3NzcrOztb27dtVU1OjU6dOSbq6zJWUlAz63ri6ujqVlJSkZHaMnJaWFj3zzDNqaWkxPQoMIQNuY/+wMQPWljnpy2JWXV2trq4udXV16cMPP9TTTz+t7u5uNTc3KxQKadasWQOOue+++/TBBx/0X7ZEkj788EN9+umnKi8vT/UfAcOsq6tLhw4dUldXl+lRYAgZcBv7h40ZsLrMfZWTJ0/K930VFhZe9ebHp59+WhMnTtTKlStVXV2tPXv26NFHH9XChQu1cuVKQxMDAAAMzskyd+LECUlXv8QqSTfffLMOHjyoiRMnatWqVfr+97+vO+64Q9XV1QqFnPy/CwAAjGLWXprkWq5V5iRp+vTpqq6uTuVIAAAASXHyVNPXlTnYa8KECdq0aZMmTJhgehQYQgbcxv5hYwasvc4c3JCKL0oezdcWAhkAGQAZcPLMHNx1/vx5HThwQOfPnzc9CgwhA25j/7AxA5Q5OKW1tVXPP//8gEvPwC1kwG3sHzZmgDIHAAAQYJQ5AACAAKPMAQAABBhlDk4Jh8MqKSlROBw2PQoMIQNuY/+wMQNcmgSB5vrH0UEGQAZABjgzBwAAEGCUOTilrq5OZWVlqqurMz0KDCEDbmP/sDEDlDk4xfd99fb2incXuIsMuI39w8YMUOYAAAACjDIHAAAQYJQ5AACAABtjegAglaZPn66qqirl5+ebHgWGkAG3sX/YmAHKHJwSDodVWFhoegwYRAbcxv5hYwZ4mRVOaWtr04svvqi2tjbTo8AQMuA29g8bM0CZg1M6Ozu1d+9edXZ2mh4FhpABt7F/2JgByhwAAECAUeYAAAACjDIHAAAQYJQ5OCUnJ0dPPfWUcnJyTI8CQ8iA29g/bMwAZQ5O8TxPY8eOled5pkeBIWTAbewfNmaAMgennD17Vtu2bdPZs2dNjwJDyIDb2D9szABlDgAAIMAocwAAAAFGmQMAAAgwyhycMm7cOJWXl2vcuHGmR4EhZMBt7B82ZsDzfd83PQSQrIaGhhH/HcXFxSP+O5A8MgAyANczwJk5OOXy5cs6ffq0Ll++bHoUGEIG3Mb+YWMGKHNwSmNjo5YtW6bGxkbTo8AQMuA29g8bMzDG9AAYnO/76onHTI8xZBmhNKsuwAgAQFBQ5kapnnhM4w/+xvQYQ3Zu6T3KTCNOAACkGi+zAgAABBhlDgAAIMB4XQxOKS0tVX19vekxYBAZcBv7h40Z4MwcAABAgFHm4JSmpiatWrVKTU1NpkeBIWTAbewfNmaAMgen9PT06Pjx4+rp6TE9CgwhA25j/7AxA5Q5AACAAKPMAQAABBhlDgAAIMAoc3BKXl6eKisrlZeXZ3oUGEIG3Mb+YWMGuM4cnHLLLbdoxYoVpseAQWTAbewfNmaAM3NwSkdHh95++211dHSYHgWGkAG3sX/YmAHKHJwSiUS0efNmRSIR06PAEDLgNvYPGzPgRJlrb29XRUWFZsyYoXA4rPz8fD333HPq7u7Wk08+Kc/ztHXrVtNjAgAAJMz6Mvfxxx9r9uzZevnllxWNRjVz5kz19vbq9ddf1yOPPNL//Wxz5841O+gIie3Yqd6/v1fxA+9fdZ/v++r74Sb1Ll8hv6k59cMBAIDrZnWZa29vV3l5uaLRqDZs2KBIJKJjx44pGo2qsrJSNTU1qq2tled5KisrMz3uiAitWS0VTFFs+y/kn20fcF/8nXfl/+mEQmsekze1wMyAAADgulhd5p599lm1trZq/fr12rJli7Kysvrvq6io0Jw5c9TX16eCggLdfPPNBicdOV56usZs3CBduqTYqz/rv90/06r4zn+TV/xNhR56wNyAKZaZmanFixcrMzPT9CgwhAy4jf3DxgxYW+bq6+u1e/du5eTk6KWXXhr0MfPmzZMkzZkzp/+2K+Vv4cKFuuGGG+R5XkrmHUle4QyFVj0s/7+PKV6zX34sptg/b5F8X2kbN8hLSzM9YsoUFBTorbfeUkFBgelRYAgZcBv7h40ZsLbM7dq1S/F4XKtXr9ZNN9006GNuvPFGSQPLXGNjo/bu3avc3FwtWLAgJbOmQmj1o9K0aYr94i3Ft/2L/E9OKbT2cXn5k0yPllKxWEwXLlxQLBYzPQoMIQNuY/+wMQPWlrmDBw9KkpYsWfKVj2ltbZU0sMzdeeedikQiqqqq0t133z2yQ6aQN2aMxmx8QfqiV/HqGnmzShX67v2mx0q5hoYGLViwQA0NDaZHgSFkwG3sHzZmwNpvgDh9+rQkacqUKYPe39fXpyNHjkgaWOZCoeHvt/Pnz1c0Gk3oGH/sWGn7tuEdJDNTSk+X+vrkLZgvbxj/rEWFRfK++GLYnm+o1q5dm9Djr1xXaN++ffroo4+GdMzOnTsTnAqpRAaQSAaS2b9EBkY7GzKQm5uro0ePJnWstWWuu7tbknTx4sVB79+9e7fa29uVlZWlqVOnjugs0WhUbW1tiR0UvkHpwziD7/uKvfKa1NcrTc5X/D/+U6G77pR3+8Rhef7PI59Lly4Py3MloqenJ6HHX7p0qf/nUI9NeHdIKTKARDKQzP4lMjDauZ4Ba8tcbm6uzp07p2PHjmnRokUD7otEItq4caMkqaysbMQ/5JCbm5vwMf7YsTo7jDPE362Sf/xPCv3jEwot+jv1rfsnxV55TWlbKoflz3/7xNuNnJnLyMhI6PHhcLj/51CPtenLmG1EBpBIBpLZv0QGRjsbMpBMV7jC2jJ39913q76+XpWVlbrnnntUVFQkSaqtrdWaNWvU3v7lNddScbHgZE6bdsf6NP7gb4bl9/ttbYrv2Cnvm0UKPfygvLQ0hR5brfi//lLxd6uU9g8rr/t3nPq/p5SZlvo4Jfqeh5MnT2rHjh269957VVpaOqRjNm/enMxoSBEygEQykMz+JTIw2rmeAWs/AFFRUaFbb71VZ86cUWlpqWbPnq3CwkItXLhQ06ZN09KlSyUNfL+cjfx4XLGXX5XicaVtfKH/MiShhx+UV1So+I6d8j+35/vpvk5RUZGOHDnSX+7hHjLgNvYPGzNgbZmbNGmSDh8+rOXLlyscDqu5uVnZ2dnavn27ampqdOrUKUn2l7n4nnfk19Ur9MRj8iZP7r/dS0tT2g9fkOIxxV55Tb7vG5wyddLT05Wdna309OF8RyKChAy4jf3DxgxYW+YkqaSkRNXV1erq6lJXV5c+/PBDPf300+ru7lZzc7NCoZBmzZpleswR47e0KP7Lf5dXUqzQA9+96n6vYIpCj62Wf+J/FH+3ysCEqdfS0qJnnnlGLS0tpkeBIWTAbewfNmbA6jL3VU6ePCnf91VYWDjomx/37NmjPXv2qK6ubsA/J/uRYVO8yZOVXvNfGvN/Xv3Kb3lIe/QRpb+/b1jeNxcEXV1dOnTokLq6ukyPAkPIgNvYP2zMgLUfgLiWEydOSPrql1gfeuihQf/5iSee4FpDAABgVKHMDcKV948BAIDgc/Jl1q8rcwAAAEHh5Jm5K9/bCvdMmDBBmzZt0oQJE0yPAkPIgNvYP2zMgOfzmuKoNJwXDU6Fc0vvCcRFg5NRXFw84r8DySMDIANwPQNOvswKd50/f14HDhzQ+fPnTY8CQ8iA29g/bMwAZQ5OaW1t1fPPP6/W1lbTo8AQMuA29g8bM0CZAwAACDDKHAAAQIBR5gAAAAKMMgenhMNhlZSUKBwOmx4FhpABt7F/2JgBLk0ySnFpkqFx/ePoIAMgAyADnJkDAAAIMMocnFJXV6eysjLV1dWZHgWGkAG3sX/YmAHKHJzi+756e3vFuwvcRQbcxv5hYwac/G7WIMgIpenc0ntMjzFkGaE00yMAAOAkytwo5XmekQ8UAACAYOFlVgAAgADj1A+cMn36dFVVVSk/P9/0KDCEDLiN/cPGDFDm4JRwOKzCwkLTY8AgMuA29g8bM8DLrHBKW1ubXnzxRbW1tZkeBYaQAbexf9iYAcocnNLZ2am9e/eqs7PT9CgwhAy4jf3DxgxQ5gAAAAKMMgcAABBglDkAAIAAo8zBKaFQSAsWLFAoRPRdRQbcxv5hYwbs+ZMAQxCPx1VbW6t4PG56FBhCBtzG/mFjBihzAAAAAUaZAwAACDDKHAAAQIBR5uCUcePGqby8XOPGjTM9CgwhA25j/7AxA57v+77pIYBkNTQ0jPjvKC4uHvHfgeSRAZABuJ4BzszBKZcvX9bp06d1+fJl06PAEDLgNvYPGzNAmYNTGhsbtWzZMjU2NpoeBYaQAbexf9iYgTGmBwBwNd/31ROPmR4jIRmhNHmeZ3oMa5ABkAEMFWUOGIV64jGNP/gb02Mk5NzSe5SZxn9ShgsZABnAUPEyKwAAQIBR5gAAAAKMc6FwSmlpqerr602PAYPIgNvYP2zMAGfmAAAAAowyB6c0NTVp1apVampqMj0KDCEDbmP/sDEDlDk4paenR8ePH1dPT4/pUWAIGXAb+4eNGaDMAQAABBhlDgAAIMAocwAAAAFGmYNT8vLyVFlZqby8PNOjwBAy4Db2DxszwHXm4JRbbrlFK1asMD0GDCIDbmP/sDEDnJmDUzo6OvT222+ro6PD9CgwhAy4jf3DxgxQ5uCUSCSizZs3KxKJmB4FhpABt7F/2JgByhwAAECAOVHm2tvbVVFRoRkzZigcDis/P1/PPfecuru79eSTT8rzPG3dutX0mMCIiO3Yqd6/v1fxA+9fdZ/v++r74Sb1Ll8hv6k59cNhxLF/kAH7WV/mPv74Y82ePVsvv/yyotGoZs6cqd7eXr3++ut65JFH+r9sd+7cuWYHBUZIaM1qqWCKYtt/If9s+4D74u+8K/9PJxRa85i8qQVmBsSIYv8gA/azusy1t7ervLxc0WhUGzZsUCQS0bFjxxSNRlVZWamamhrV1tbK8zyVlZWZHhcpkJmZqcWLFyszM9P0KCnjpadrzMYN0qVLir36s/7b/TOtiu/8N3nF31TooQfMDZhirmWA/Q/k2v4lMvC3bMyA1WXu2WefVWtrq9avX68tW7YoKyur/76KigrNmTNHfX19Kigo0M0332xwUqRKQUGB3nrrLRUUFJgeJaW8whkKrXpY/n8fU7xmv/xYTLF/3iL5vtI2bpCXlmZ6xJRxMQPs//9zcf8SGfhrNmbA2jJXX1+v3bt3KycnRy+99NKgj5k3b54kac6cOf237dmzRw888ICmTJmijIwMFRcX68c//rEuXLiQkrkxsmKxmC5cuKBYLGZ6lJQLrX5UmjZNsV+8pfi2f5H/ySmF1j4uL3+S6dFSytUMsP8vubp/iQxcYWMGrC1zu3btUjwe1+rVq3XTTTcN+pgbb7xR0sAyt2XLFqWlpeknP/mJ9u/frx/84Af6+c9/rmXLlikej6dkdoychoYGLViwQA0NDaZHSTlvzBiN2fiC9EWv4tU18maVKvTd+02PlXKuZoD9f8nV/Utk4AobM2DtN0AcPHhQkrRkyZKvfExra6ukgWXuvffe02233db/z3fddZduu+02rV69Wh988IHuvPPOhGeZP3++otFowsfh661duzahx1+5rtC+ffv00UcfDemYnTt3JjjV9fPHjpW2bxv+J87MlNLTpb4+eQvmywsN39/nigqL5H3xxbA931CRgQSM4P6lYGQgmf1LZGCoyEDycnNzdfTo0aSOtbbMnT59WpI0ZcqUQe/v6+vTkSNHJA0sc39d5K6YP3++JKmtrS2pWaLRaNLH4tp6enoSevylS5f6fw71WCO7C9+g9GF+St/3FXvlNamvV5qcr/h//KdCd90p7/aJw/L8n0c+ly5dHpbnSgQZGJqR3r8UjAwks3+JDAwVGTDD2jLX3d0tSbp48eKg9+/evVvt7e3KysrS1KlTr/lchw4dkiSVlJQkNUtubm5Sx+HrZWRkJPT4cDjc/3Oox5r4MmZ/7FidHebnjL9bJf/4nxT6xycUWvR36lv3T4q98prStlTK87zrfv7bJ95u5G/kZGBoRnr/UjAykMz+JTIwVGQgedfTFTzf9/1hnGXUmDlzpurr67V161atW7duwH2RSETz5s1TJBLR4sWL9cEHH3zl87S1telb3/qW5s2bp/3794/02EhQou95OHnypB588EHt2bNHpaWlQzqmuLg4mdGuS3esT+MP/mbYns9va1Pf/14vr2CK0n72iry0NMV27Vb8X3+p0A/+l9L+YeV1/45zS+9RZlrq/35IBr5eKvYvBSMDyexfIgNDRQbMsPYDEHfffbckqbKyUqdOneq/vba2VkuWLFF7+5cXTrzWxYIvXLiglStXauzYsdqxY8eIzovUKCoq0pEjR1RUVGR6lJTx43HFXn5ViseVtvGF/ksQhB5+UF5RoeI7dsr/3J7vKPw6rmWA/Q/k2v4lMvC3bMyAtWWuoqJCt956q86cOaPS0lLNnj1bhYWFWrhwoaZNm6alS5dKGvh+ub928eJFlZeXq6mpSe+//74mThy+9xTAnPT0dGVnZys9fbjfkTZ6xfe8I7+uXqEnHpM3eXL/7V5amtJ++IIUjyn2ymuy9CT9VVzLAPsfyLX9S2Tgb9mYAWvL3KRJk3T48GEtX75c4XBYzc3Nys7O1vbt21VTU9N/tm6wMtfb26sHH3xQR48e1f79+zVz5sxUj48R0tLSomeeeUYtLS2mR0kJv6VF8V/+u7ySYoUe+O5V93sFUxR6bLX8E/+j+LtVBiZMPZcywP6v5tL+JTIwGBszYO0HIKQvP7BQXV191e0XLlxQc3OzQqGQZs2aNeC+K9em+93vfqd9+/Zp4cKFqRoXKdDV1aVDhw5d9T5KW3mTJyu95r+u+Zi0Rx9R2qOPpGgi81zKAPu/mkv7l8jAYGzMgNVl7qucPHlSvu+rqKjoqk+yrFu3Tr/+9a/1ox/9SBkZGfrjH//Yf9/06dMHvXQJAACAKda+zHotJ06ckDT4S6xXPrH605/+VIsWLRrwv5qampTOCQAA8HWcPDN3rTLX3Nyc4mkAAACSx5k5OGXChAnatGmTJkyYYHoUGEIG3Mb+YWMGnDwzd+V7W+GenJychL/LE3YhA25j/7AxA06emYO7zp8/rwMHDuj8+fOmR4EhZMBt7B82ZoAyB6e0trbq+eefV2trq+lRYAgZcBv7h40ZoMwBAAAEGGUOAAAgwChzAAAAAUaZg1PC4bBKSkoUDodNjwJDyIDb2D9szICTlyaBu6ZPn6533nnH9BgwiAy4jf3DxgxwZg4AACDAKHNwSl1dncrKylRXV2d6FBhCBtzG/mFjBihzcIrv++rt7ZXv+6ZHgSFkwG3sHzZmgPfMAaNQRihN55beY3qMhGSE0kyPYBUyADKAoaLMAaOQ53nKTONfT5eRAZABDBUvswIAAAQYlR9OmT59uqqqqpSfn296FBhCBtzG/mFjBihzcEo4HFZhYaHpMWAQGXAb+4eNGeBlVjilra1NL774otra2kyPAkPIgNvYP2zMAGUOTuns7NTevXvV2dlpehQYQgbcxv5hYwYocwAAAAFGmQMAAAgwyhwAAECAUebglJycHD311FPKyckxPQoMIQNuY/+wMQOeb9OXkwEAADiGM3MAAAABRpkDAAAIMMocAABAgFHmAAAAAowyBwAAEGCUOQAAgACjzAEAAAQYZQ4AACDAKHMAAAABRpkDAAAIMMocAABAgFHmAAAAAowyBwAAEGCUOQAAgACjzAEAAATY/wO1dj13UgyHLQAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "gate_pool=[ql.HGate, ql.CXGate, ql.ZGate, ql.XGate, ql.CCXGate]\n", - "\n", - "enc_t, y, U = gen_compilation_rndGates_dataset(samples=1, num_of_qubits=3, min_gates=3, max_gates=4, gate_pool=gate_pool)\n", - "\n", - "np.set_printoptions(edgeitems=30, linewidth=100000, formatter=dict(float=lambda x: \"%.3g\" % x))\n", - "\n", - "print(f\"\\ny Label >>> {y[0]} <<<\")\n", - "print(f\"\\n{enc_t[0]}\")\n", - "print(f\"\\n{U[0]}\")\n", - "print(\"\\nDecoded:\")\n", - "qc = decode_circuit(enc_t[0], gate_pool=gate_pool)\n", - "display(qc.draw(\"mpl\"))" - ] - }, - { - "cell_type": "markdown", - "id": "2854cb4a-2a02-400f-a8af-efd21efa39fe", - "metadata": {}, - "source": [ - "### Graph states dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "79e18993-1742-4c81-9957-f50bd4f3afb9", - "metadata": {}, - "outputs": [], - "source": [ - "#place all h on all bist then only cz" - ] - }, - { - "cell_type": "markdown", - "id": "b603ed4a-dbf3-48ad-a5f2-f162bb989665", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8246b9cc-fd8b-4a1e-92bc-9fcf0a8dd082", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": { - "0015549f1fa849569e28890bc177c2e3": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_909f981cd976432695b6b1e75e7afb56", - "max": 1, - "style": "IPY_MODEL_0ecf8fdd93a34babb273867999ae570d", - "value": 1 - } - }, - "02e0621cfdb94913bee8927c354b374a": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_2c549a4c524f47edb7033299693dd0a6", - "style": "IPY_MODEL_57de0096a47b452fb5c59a99578e8f9e", - "value": "100%" - } - }, - "057884b79c0a470598c5007fddd0a680": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "083b64b1c2ab4f29859b2c9503f5f38a": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_7964f48701a94ccb8db7bdb015b8343a", - "max": 1, - "style": "IPY_MODEL_057884b79c0a470598c5007fddd0a680", - "value": 1 - } - }, - "086d9e84542c4d5d8f5b7721ccebcedb": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "0942138c6b1f44c8bac865e85538a029": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_36f626e49a5b40728c3e9b2299b99cd4", - "max": 1, - "style": "IPY_MODEL_ddcb32006ed84d25bd0633d17e93809e", - "value": 1 - } - }, - "0a634fc58cd548a79002eca6469619f6": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "0b884ca055cd46708f03f5746251ab85": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_a97bf04e73614aafb9ffd52d96768011", - "style": "IPY_MODEL_69cad521b0174f1fb6eb1c9c3c49b98a", - "value": "100%" - } - }, - "0ce0a15148234afdb17365de147be471": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "0da6a9a13677471790f54a77cd9195e5": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "0ecf8fdd93a34babb273867999ae570d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "10a380bdc1474e77834e4dc86e44faaf": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "161308930d6b4ba7b1d8dabf600c2c83": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "17d25484393f4db58e36597d195be4c9": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "1870bcd884cb46d1873db1f975b69350": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "19f4bdbfa7634955a7452063fc4bbb99": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_50e9f5e7b0ba4cf48ac9a3ffc0618d98", - "IPY_MODEL_aa34cd7bb7674094b605c5d0fbecd00d", - "IPY_MODEL_7aa7aef8cb114828a81bb2145952afbc" - ], - "layout": "IPY_MODEL_92b91f59e2924edf8688edd70946b7e2" - } - }, - "1b1b08e7b2b84e059f336d5d181ecdc7": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "1c4502af5e1245b5914b00377decac44": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_fb2ecab4c8e6407d858c00230eb49ef4", - "max": 1, - "style": "IPY_MODEL_161308930d6b4ba7b1d8dabf600c2c83", - "value": 1 - } - }, - "1d53f4b12c5e414aa2e559af80386b95": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "1d88ec425fd44e02b643fce3b0293b10": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "24bcb8a50e9f48dabd88a26f54b33029": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "2c549a4c524f47edb7033299693dd0a6": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "2d22e6269fe0445586c72aeb7dc2f17c": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_faaf66d9ee994ff2b95eb8bf7c2e35df", - "style": "IPY_MODEL_ff518e4efafd4408aace4660827155ed", - "value": " 1/1 [00:00<00:00, 104.59it/s]" - } - }, - "31b3ecd6d12a4144b384f94dbe6a6364": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "36f626e49a5b40728c3e9b2299b99cd4": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "3a5fe39a49a4414c9466c0d6734e4270": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "3abf4f2b2ecb490e9f756f410cd1e60f": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "3acb5b6a709947d98765e9704cc807b7": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "3c1ab4844df74358a65626f9369cdab4": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_31b3ecd6d12a4144b384f94dbe6a6364", - "style": "IPY_MODEL_1b1b08e7b2b84e059f336d5d181ecdc7", - "value": "100%" - } - }, - "4154f0b5aba24fbc8474f8b8cc658879": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "48e66bdd7e764b77b98fb616aa65ece3": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "48fcadc59030414ab70241bd46bbfa29": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_af90e2901f4941cfb82d22985633a270", - "style": "IPY_MODEL_24bcb8a50e9f48dabd88a26f54b33029", - "value": "100%" - } - }, - "49d968f3e1dd4a4688f2dfd2053e3a08": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "49f76da5c06f4fb5a2fd0e7c8a097e26": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "4b5e309320234b6a8b8734984726602b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_17d25484393f4db58e36597d195be4c9", - "style": "IPY_MODEL_0ce0a15148234afdb17365de147be471", - "value": "100%" - } - }, - "4e0930b9b2c34ab39dfe576b0a8b8b7d": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "5022f3d151934b32a09f389c66584dcd": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "50e9f5e7b0ba4cf48ac9a3ffc0618d98": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_95dbb892b4d047d0b8f9bf2d37ffddbd", - "style": "IPY_MODEL_6c89c8b64a444a82b81f821d56878413", - "value": "100%" - } - }, - "51754be353fe4f16b70f65c057eaece8": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "51b6a2e7bd1e44dda1b39419a92e7b44": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_0a634fc58cd548a79002eca6469619f6", - "style": "IPY_MODEL_55884c776021451e8e44669760ce548e", - "value": "100%" - } - }, - "52956696e0c244d2a0a901dd4d17945b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "55884c776021451e8e44669760ce548e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "57476ec739fc4880ad51794b1679f8f0": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_e8c6e1cdcd444c70bb2684053f4ab950", - "style": "IPY_MODEL_4154f0b5aba24fbc8474f8b8cc658879", - "value": "100%" - } - }, - "57de0096a47b452fb5c59a99578e8f9e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "5825b646c1884c1eb45849e5d6e5e301": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_4b5e309320234b6a8b8734984726602b", - "IPY_MODEL_c02ddf83104442ee9b0246e5ffa0dcd6", - "IPY_MODEL_e17d61a465a24e25bbba157f9dad8b3a" - ], - "layout": "IPY_MODEL_3acb5b6a709947d98765e9704cc807b7" - } - }, - "5fbe1ba797f841e5a1825f54b829b65f": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "64e73ddcb6564b579e5b900baa61c735": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "660e7e49d129453991a24d79a041d53d": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "69075f17b5d44639b01678598fc12664": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "69cad521b0174f1fb6eb1c9c3c49b98a": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "6aa4bb7fdd604eea82bc1aa93f540841": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "6c89c8b64a444a82b81f821d56878413": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "7194a10b49b14a0aaf2b8632b9bf06a3": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_c2ae687f635746d38d6303188c07f867", - "style": "IPY_MODEL_8c4e83b4ff88437eb1c3e2271e566d8f", - "value": " 1/1 [00:00<00:00, 59.86it/s]" - } - }, - "732b682f733a4220a62ccd3ed8cc3cfc": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_660e7e49d129453991a24d79a041d53d", - "max": 1, - "style": "IPY_MODEL_dc7977c2674a4788b6f6f5ae5e394b9b", - "value": 1 - } - }, - "775464ed67134938ab83bd1c4aebd1b8": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_1d88ec425fd44e02b643fce3b0293b10", - "max": 1, - "style": "IPY_MODEL_10a380bdc1474e77834e4dc86e44faaf", - "value": 1 - } - }, - "7964f48701a94ccb8db7bdb015b8343a": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "7aa7aef8cb114828a81bb2145952afbc": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_b0e81ff4e05d44f889d84609f1cb1563", - "style": "IPY_MODEL_7f2ba4d35e5d4029ac5cf3dcbfcde713", - "value": " 1/1 [00:00<00:00, 49.41it/s]" - } - }, - "7f2ba4d35e5d4029ac5cf3dcbfcde713": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "8054f5b9dfbe4134a1bbdea5ea28080c": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "84de673445574607a4442781e3544a3c": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_49f76da5c06f4fb5a2fd0e7c8a097e26", - "max": 1, - "style": "IPY_MODEL_52956696e0c244d2a0a901dd4d17945b", - "value": 1 - } - }, - "852f404918c849d69b7406562c808576": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_a26e4cff52c24acd8b17d2cf2422929a", - "style": "IPY_MODEL_a58234ea9e0e4b59a6c1d0b36f9747b1", - "value": " 1/1 [00:00<00:00, 157.49it/s]" - } - }, - "88e6282ae60d473e9a92b2518e778e4a": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "8c4e83b4ff88437eb1c3e2271e566d8f": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "8cee0a7f821e424cbff7a265c167a4ad": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "8e10ab511eb744ec85dd26904ceb49f3": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "909f981cd976432695b6b1e75e7afb56": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "92b91f59e2924edf8688edd70946b7e2": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "95dbb892b4d047d0b8f9bf2d37ffddbd": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "961d1370247542699ae950e20261bb8e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "98665333d2dc4d60b5be86289ccad240": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_0b884ca055cd46708f03f5746251ab85", - "IPY_MODEL_083b64b1c2ab4f29859b2c9503f5f38a", - "IPY_MODEL_2d22e6269fe0445586c72aeb7dc2f17c" - ], - "layout": "IPY_MODEL_c203cfc03f5a4507aa7a1f0e0af2428e" - } - }, - "9fde706be5b24d74aec14968d7ebfb1b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_cd9096c25cb44835868c4e6024649769", - "IPY_MODEL_0942138c6b1f44c8bac865e85538a029", - "IPY_MODEL_cf9232dca9274d71a04a7f1334d747d4" - ], - "layout": "IPY_MODEL_ad7d4fe382d442e1977b1b8eb2839039" - } - }, - "a12e5f36c744406b90a69a42b010d2ba": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_02e0621cfdb94913bee8927c354b374a", - "IPY_MODEL_0015549f1fa849569e28890bc177c2e3", - "IPY_MODEL_a38ca3a1be27436d9f99bd0acf85231d" - ], - "layout": "IPY_MODEL_5022f3d151934b32a09f389c66584dcd" - } - }, - "a26e4cff52c24acd8b17d2cf2422929a": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "a38ca3a1be27436d9f99bd0acf85231d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_ae22b315d0ed4bc0a66d3315ca621852", - "style": "IPY_MODEL_c84083de33bd4559b10c4ea607f7b8a6", - "value": " 1/1 [00:00<00:00, 72.05it/s]" - } - }, - "a58234ea9e0e4b59a6c1d0b36f9747b1": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "a64dc6587a824941aff845dcc0029486": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_48e66bdd7e764b77b98fb616aa65ece3", - "max": 1, - "style": "IPY_MODEL_1d53f4b12c5e414aa2e559af80386b95", - "value": 1 - } - }, - "a7ce671c09364c8ea98bb321426e486f": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_4e0930b9b2c34ab39dfe576b0a8b8b7d", - "style": "IPY_MODEL_961d1370247542699ae950e20261bb8e", - "value": " 1/1 [00:00<00:00, 69.40it/s]" - } - }, - "a8a1245c0969445c8784845c182feda0": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "a97bf04e73614aafb9ffd52d96768011": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "aa34cd7bb7674094b605c5d0fbecd00d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_8e10ab511eb744ec85dd26904ceb49f3", - "max": 1, - "style": "IPY_MODEL_64e73ddcb6564b579e5b900baa61c735", - "value": 1 - } - }, - "ad7d4fe382d442e1977b1b8eb2839039": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "ae22b315d0ed4bc0a66d3315ca621852": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "af6fb457f0cd4fa191fa021ed1373e3c": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_6aa4bb7fdd604eea82bc1aa93f540841", - "style": "IPY_MODEL_bdaa750ca3684382aca31776f99ae320", - "value": "100%" - } - }, - "af826b782a20456cbcc071184a9662ab": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "af90e2901f4941cfb82d22985633a270": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "b0752f4168534b64b236c39535fe4601": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_3c1ab4844df74358a65626f9369cdab4", - "IPY_MODEL_775464ed67134938ab83bd1c4aebd1b8", - "IPY_MODEL_a7ce671c09364c8ea98bb321426e486f" - ], - "layout": "IPY_MODEL_ef0de7e1028241db9a7758c0fb94d130" - } - }, - "b0e81ff4e05d44f889d84609f1cb1563": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "b42c67c0bf924b2eb0804d1dfb90c0dd": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_51754be353fe4f16b70f65c057eaece8", - "style": "IPY_MODEL_a8a1245c0969445c8784845c182feda0", - "value": " 1/1 [00:00<00:00, 165.40it/s]" - } - }, - "bac50950383e49998fd6c1b73a6285dd": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_57476ec739fc4880ad51794b1679f8f0", - "IPY_MODEL_732b682f733a4220a62ccd3ed8cc3cfc", - "IPY_MODEL_b42c67c0bf924b2eb0804d1dfb90c0dd" - ], - "layout": "IPY_MODEL_3a5fe39a49a4414c9466c0d6734e4270" - } - }, - "bd0cc5cfdcf3448d991de44a55cc0ed2": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "bdaa750ca3684382aca31776f99ae320": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "c02ddf83104442ee9b0246e5ffa0dcd6": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_88e6282ae60d473e9a92b2518e778e4a", - "max": 1, - "style": "IPY_MODEL_3abf4f2b2ecb490e9f756f410cd1e60f", - "value": 1 - } - }, - "c203cfc03f5a4507aa7a1f0e0af2428e": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "c20989ccfb18473cab160d5839676691": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_af826b782a20456cbcc071184a9662ab", - "style": "IPY_MODEL_1870bcd884cb46d1873db1f975b69350", - "value": " 1/1 [00:00<00:00, 133.59it/s]" - } - }, - "c2ae687f635746d38d6303188c07f867": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "c84083de33bd4559b10c4ea607f7b8a6": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - }, - "cd9096c25cb44835868c4e6024649769": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_f4f56f1e8c424c3b802cac1e1dbf4eed", - "style": "IPY_MODEL_8cee0a7f821e424cbff7a265c167a4ad", - "value": "100%" - } - }, - "cf9232dca9274d71a04a7f1334d747d4": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_0da6a9a13677471790f54a77cd9195e5", - "style": "IPY_MODEL_69075f17b5d44639b01678598fc12664", - "value": " 1/1 [00:00<00:00, 129.91it/s]" - } - }, - "d1344d6bf51e43ff9f714ddf98a34dfe": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_48fcadc59030414ab70241bd46bbfa29", - "IPY_MODEL_1c4502af5e1245b5914b00377decac44", - "IPY_MODEL_852f404918c849d69b7406562c808576" - ], - "layout": "IPY_MODEL_5fbe1ba797f841e5a1825f54b829b65f" - } - }, - "dc7977c2674a4788b6f6f5ae5e394b9b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "ddcb32006ed84d25bd0633d17e93809e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "df9e83fa4e154886acd51fb14f4320f7": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_51b6a2e7bd1e44dda1b39419a92e7b44", - "IPY_MODEL_84de673445574607a4442781e3544a3c", - "IPY_MODEL_7194a10b49b14a0aaf2b8632b9bf06a3" - ], - "layout": "IPY_MODEL_8054f5b9dfbe4134a1bbdea5ea28080c" - } - }, - "e17d61a465a24e25bbba157f9dad8b3a": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_086d9e84542c4d5d8f5b7721ccebcedb", - "style": "IPY_MODEL_49d968f3e1dd4a4688f2dfd2053e3a08", - "value": " 1/1 [00:00<00:00, 153.08it/s]" - } - }, - "e8c6e1cdcd444c70bb2684053f4ab950": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "ef0de7e1028241db9a7758c0fb94d130": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "f303fdf2e7e549d994d5d0aa9f4fdc7e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_af6fb457f0cd4fa191fa021ed1373e3c", - "IPY_MODEL_a64dc6587a824941aff845dcc0029486", - "IPY_MODEL_c20989ccfb18473cab160d5839676691" - ], - "layout": "IPY_MODEL_bd0cc5cfdcf3448d991de44a55cc0ed2" - } - }, - "f4f56f1e8c424c3b802cac1e1dbf4eed": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "faaf66d9ee994ff2b95eb8bf7c2e35df": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "fb2ecab4c8e6407d858c00230eb49ef4": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "2.0.0", - "model_name": "LayoutModel", - "state": {} - }, - "ff518e4efafd4408aace4660827155ed": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "2.0.0", - "model_name": "HTMLStyleModel", - "state": { - "description_width": "", - "font_size": null, - "text_color": null - } - } - }, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/platform/qcircuit_evaluation.ipynb b/src/platform/qcircuit_evaluation.ipynb deleted file mode 100644 index f9f2e81..0000000 --- a/src/platform/qcircuit_evaluation.ipynb +++ /dev/null @@ -1,162 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Quantum circuit evaluation" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp platform.qcircuit_evaluation" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from genQC.platform.qcircuit_dataset_construction import *\n", - "from genQC.platform.simulation.qcircuit_sim import schmidt_rank_vector, optimize_circuit\n", - "\n", - "import qiskit.quantum_info as qi\n", - "from qiskit import QuantumCircuit" - ] - }, - { - "cell_type": "markdown", - "id": "c19c4594-4749-4e91-bfc5-27cf95508de0", - "metadata": {}, - "source": [ - "## Gate count" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5df02715-8767-43ff-871d-8075871f1edf", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def sort_into_bins(x, y, y_uniques):\n", - " \n", - " x_binned = []\n", - " y_binned = []\n", - " \n", - " for y_unique in y_uniques:\n", - " \n", - " comp = torch.all(y==y_unique, dim=-1)\n", - " indices = comp.nonzero().squeeze()\n", - " \n", - " x_binned.append(x[indices])\n", - " y_binned.append(y[indices])\n", - " \n", - " y_bins = [y[0] for y in y_binned]\n", - " \n", - " return x_binned, y_binned, y_bins" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e2357103-f41c-4caa-ae05-0f90bc26ebcf", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def extract_gate_number(qc: QuantumCircuit, gate_pool, max_gates): \n", - " gate_classes = {\"empty\":0} | {x().name:i+1 for i,x in enumerate(gate_pool)}\n", - " \n", - " gate_cnt = np.zeros(len(gate_classes), dtype=int) \n", - " \n", - " if hasattr(qc, \"data\"): \n", - " for t, gate in enumerate(qc.data): \n", - " gate_id = gate_classes[gate.operation.name] \n", - " gate_cnt[gate_id] += 1\n", - " \n", - " gate_cnt[0] = max_gates - sum(gate_cnt[1:])\n", - " \n", - " return gate_cnt, gate_classes" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "882cd1bd-2c14-4739-9bd4-cb5a42f450d2", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_gate_stat_from_tensors(tensors, gate_pool):\n", - " for i,tensor in tqdm(enumerate(tensors), total=tensors.shape[0]): \n", - " qc = decode_circuit(tensor, gate_pool)\n", - " \n", - " t_gate_cnts, gate_dict = extract_gate_number(qc, gate_pool, max_gates=tensor.shape[1])\n", - " \n", - " if i > 0: gate_cnts = np.vstack([gate_cnts, t_gate_cnts])\n", - " else: gate_cnts = t_gate_cnts\n", - "\n", - " return gate_cnts, gate_dict" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0b67ae98-5d20-4e5c-bad9-498a078b6b4c", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_gate_stat_from_circuits(qcs: list, gate_pool, max_gates):\n", - " for i,qc in tqdm(enumerate(qcs), total=len(qcs)):\n", - " \n", - " t_gate_cnts, gate_dict = extract_gate_number(qc, gate_pool, max_gates)\n", - " \n", - " if i > 0: gate_cnts = np.vstack([gate_cnts, t_gate_cnts])\n", - " else: gate_cnts = t_gate_cnts\n", - "\n", - " return gate_cnts, gate_dict" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/platform/qcircuit_metrics.ipynb b/src/platform/qcircuit_metrics.ipynb deleted file mode 100644 index 1f76c8a..0000000 --- a/src/platform/qcircuit_metrics.ipynb +++ /dev/null @@ -1,123 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Quantum circuit metrics" - ] - }, - { - "cell_type": "markdown", - "id": "b1beda93-a604-4ba9-919b-39e59f53580f", - "metadata": {}, - "source": [ - "Norms for unitary compilation." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp platform.qcircuit_metrics" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *" - ] - }, - { - "cell_type": "markdown", - "id": "c19c4594-4749-4e91-bfc5-27cf95508de0", - "metadata": {}, - "source": [ - "## Unitary distances" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1b178cbc-d116-49c9-862d-16f646daa39b", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "class Unitary_FrobeniusNorm:\n", - " #defined in https://arxiv.org/pdf/2106.05649.pdf\n", - " \n", - " @staticmethod\n", - " def distance(approx_U: torch.tensor, target_U: torch.tensor):\n", - " d = 0.5 * torch.linalg.matrix_norm((approx_U-target_U), ord=\"fro\")**2\n", - " return d\n", - " \n", - " @staticmethod\n", - " def name():\n", - " return \"Frobenius-Norm\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9984b83d-7f70-4dfd-8ab5-f32ad132e2a2", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensor(1.5000)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "a = torch.tensor([[1,2], [2, 1]]).float()\n", - "b = torch.tensor([[2,3], [2, 2]]).float()\n", - "\n", - "g = Unitary_FrobeniusNorm\n", - "g.distance(a,b)" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/platform/qcircuit_util.ipynb b/src/platform/qcircuit_util.ipynb deleted file mode 100644 index 49b09f1..0000000 --- a/src/platform/qcircuit_util.ipynb +++ /dev/null @@ -1,140 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Quantum circuit utils" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp platform.qcircuit_util" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *" - ] - }, - { - "cell_type": "markdown", - "id": "5ba7e853-c591-4943-b023-35034b7169a5", - "metadata": {}, - "source": [ - "## SRV" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "03461e06-4205-4d4a-9c46-8c637d2d8fd3", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_element_matching_indices(a, b):\n", - " \"\"\"Compares (2d) `a` with `b`. Returns the indices of `b`, where a element of `a` matches with `b`.\"\"\"\n", - " # Expand dimensions of a to match the shape of b for element-wise comparison\n", - " expanded_a = a.unsqueeze(0).expand(b.shape[0], *a.shape) # [b0, a0, a1]\n", - " expanded_b = b.unsqueeze(1) # [b0, 1, b1]\n", - " \n", - " # Compare all vector entries of a with all vectors of b\n", - " matches = torch.all(expanded_a == expanded_b, dim=-1)\n", - "\n", - " matching_indices = torch.nonzero(torch.any(matches, dim=1)).squeeze()\n", - " \n", - " if matching_indices.dim() == 0: matching_indices = torch.tensor([matching_indices])\n", - "\n", - " return matching_indices" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8dfeb984-9bd2-4000-8cb5-cde7768cb9e4", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_entanglement_bins(num_of_qubits):\n", - " \"\"\"Returns all SRV sorted in entangle bins which correspond to a number of entangled qubits.\"\"\"\n", - " dist_srvs = [x for x in itertools.product(*([[1,2]]*num_of_qubits))]\n", - " dist_srvs = np.array(dist_srvs, dtype=int)[np.sum(dist_srvs, axis=1)!=num_of_qubits+1].tolist()\n", - " dist_srvs = sorted(dist_srvs, key=lambda x: sum(x))\n", - " dist_srvs = np.array(dist_srvs)\n", - " \n", - " entangle = [1] + [scipy.special.comb(num_of_qubits, i, exact=True) for i in range(2, num_of_qubits)]\n", - " \n", - " entanglement_bins = np.split(dist_srvs, np.cumsum(entangle))\n", - " \n", - " ent_bits = [f\"{sum(n[0])-num_of_qubits} qubit entangled\" for n in entanglement_bins]\n", - " \n", - " return [x.tolist() for x in entanglement_bins], ent_bits" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c5d72d74-3619-41da-ac6e-c6219688a44b", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0 qubit entangled : [[1, 1, 1, 1, 1]]\n", - "2 qubit entangled : [[1, 1, 1, 2, 2], [1, 1, 2, 1, 2], [1, 1, 2, 2, 1], [1, 2, 1, 1, 2], [1, 2, 1, 2, 1], [1, 2, 2, 1, 1], [2, 1, 1, 1, 2], [2, 1, 1, 2, 1], [2, 1, 2, 1, 1], [2, 2, 1, 1, 1]]\n", - "3 qubit entangled : [[1, 1, 2, 2, 2], [1, 2, 1, 2, 2], [1, 2, 2, 1, 2], [1, 2, 2, 2, 1], [2, 1, 1, 2, 2], [2, 1, 2, 1, 2], [2, 1, 2, 2, 1], [2, 2, 1, 1, 2], [2, 2, 1, 2, 1], [2, 2, 2, 1, 1]]\n", - "4 qubit entangled : [[1, 2, 2, 2, 2], [2, 1, 2, 2, 2], [2, 2, 1, 2, 2], [2, 2, 2, 1, 2], [2, 2, 2, 2, 1]]\n", - "5 qubit entangled : [[2, 2, 2, 2, 2]]\n" - ] - } - ], - "source": [ - "for srvs,label in zip(*get_entanglement_bins(5)):\n", - " print(label, \":\", srvs)" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/platform/simulation.ipynb b/src/platform/simulation.ipynb new file mode 100644 index 0000000..0397cdd --- /dev/null +++ b/src/platform/simulation.ipynb @@ -0,0 +1,156 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Simulation backend\n", + "\n", + "> Class to load and run corresponding backends." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.simulation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.platform.backends.base_backend import BaseBackend" + ] + }, + { + "cell_type": "markdown", + "id": "27afa544-b435-4ad2-9897-d731ccfc100d", + "metadata": {}, + "source": [ + "## Backend types" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3e0f28ff-2843-4269-8bb7-2f967928bad1", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitBackendType(enum.Enum): \n", + " QISKIT = enum.auto()\n", + " CUDAQ = enum.auto() \n", + " PENNYLANE = enum.auto() \n", + "\n", + "GenericBackendType = Union[CircuitBackendType]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "961ce363-003b-49d4-95f5-83ebeab1ec2c", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class TensorEncodingType(enum.Enum): \n", + " CIRCUIT = enum.auto()\n", + "\n", + "def is_circuit_type(backend_type): return backend_type in CircuitBackendType" + ] + }, + { + "cell_type": "markdown", + "id": "1bf989ee-ba6d-45a8-ac7e-c71947e85b87", + "metadata": {}, + "source": [ + "## Simulator" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bb0eca29-bf36-4696-9162-f118c1b971de", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class Simulator():\n", + " \"\"\"Basic class for handling backend types.\"\"\"\n", + " \n", + " def __init__(self, backend: GenericBackendType, *args, **kwargs) -> BaseBackend: \n", + " match backend: \n", + " case CircuitBackendType.QISKIT: \n", + " from genQC.platform.backends.circuits_qiskit import CircuitsQiskitBackend \n", + " backend = CircuitsQiskitBackend(*args, **kwargs)\n", + " \n", + " case CircuitBackendType.CUDAQ:\n", + " from genQC.platform.backends.circuits_cudaq import CircuitsCudaqBackend \n", + " backend = CircuitsCudaqBackend(*args, **kwargs)\n", + "\n", + " case CircuitBackendType.PENNYLANE:\n", + " from genQC.platform.backends.circuits_pennylane import CircuitsPennylaneBackend \n", + " backend = CircuitsPennylaneBackend(*args, **kwargs)\n", + "\n", + " case _:\n", + " raise NotImplementedError(f\"Not implemented given backend: {backend}\")\n", + " \n", + " self.backend = backend\n", + "\n", + " \n", + " def backend_to_genqc(self, *args, **kwargs):\n", + " return self.backend.backend_to_genqc(*args, **kwargs)\n", + "\n", + " \n", + " def genqc_to_backend(self, *args, **kwargs):\n", + " return self.backend.genqc_to_backend(*args, **kwargs)" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/simulation/qcircuit_sim.ipynb b/src/platform/simulation/qcircuit_sim.ipynb deleted file mode 100644 index 2f5e73b..0000000 --- a/src/platform/simulation/qcircuit_sim.ipynb +++ /dev/null @@ -1,508 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "355f5771-96ee-4eb7-a96a-7a5e65f4c9b3", - "metadata": {}, - "source": [ - "# Qiskit: quantum circuit simulation" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b760a97f-1fe6-4f5e-ab5b-944b0dc5281e", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp platform.simulation.qcircuit_sim" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2e6b78f1-4a7c-42aa-bdc5-88c3d79d13fd", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from genQC.config_loader import *\n", - "\n", - "import qiskit.quantum_info as qi\n", - "from qiskit import QuantumCircuit, transpile\n", - "from qiskit.circuit.gate import Gate\n", - "import qiskit.circuit.library as ql" - ] - }, - { - "cell_type": "markdown", - "id": "c3f361d8-9fed-4c98-9dbb-6b0be5395158", - "metadata": {}, - "source": [ - "## Circuit" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "62a66436-76b4-4f7b-8577-32b47e936325", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def get_number_of_gate_params(gate_cls):\n", - " return gate_cls.__init__.__code__.co_argcount - len(gate_cls.__init__.__defaults__) - 1 # python: gives you the number of any arguments BEFORE *args, minus ones that have a default, -1 for self parameter of classes" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35351940-c10d-4d6b-9bb9-9c8990143a37", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def gate_pool_to_gate_classes(gate_pool: list[Gate]): \n", - " \"\"\"Creates a vocabulary from a gate pool.\"\"\"\n", - " classes = {}\n", - " \n", - " for i,cls in enumerate(gate_pool):\n", - " num_of_paramters = get_number_of_gate_params(cls)\n", - " name = cls(*[0]*num_of_paramters).name\n", - " classes[name] = (i+1)\n", - " \n", - " return classes" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "84e71331-15b4-4877-a331-e67ebb44ba3a", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def instruction_name_to_qiskit_gate(name: str) -> Gate:\n", - " match name:\n", - " case \"swap\": name = \"Swap\"\n", - " case \"cp\": name = \"CPhase\"\n", - " case _: name = name.upper()\n", - " \n", - " return get_obj_from_str(f\"qiskit.circuit.library.standard_gates.{name}Gate\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "aa7679d4-769f-48f4-a261-b55c646c5a2d", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def schmidt_rank_vector(densityMatrix: qi.DensityMatrix): \n", - " \"\"\"Return the SRV of a `qi.DensityMatrix`.\"\"\"\n", - " systems_cnt = len(densityMatrix.dims()) \n", - " total_trace = set(range(systems_cnt)) \n", - " rank_vector = []\n", - " \n", - " for i in range(systems_cnt): \n", - " trace = list(total_trace - {i})\n", - " red_densityMatrix = qi.partial_trace(densityMatrix, trace) \n", - " # r = np.count_nonzero(np.linalg.eigvals(red_densityMatrix) > 1e-14) # was slower during testing \n", - " r = np.linalg.matrix_rank(red_densityMatrix, hermitian=True).item() \n", - " rank_vector.append(r)\n", - " \n", - " return rank_vector" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "39dc10a2-340b-419a-8093-baae068575c6", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def rnd_circuit(num_of_qubits, num_of_gates, gate_pool: list[Gate], rng):\n", - " \"\"\"Create a random circuit.\"\"\"\n", - " qc = QuantumCircuit(num_of_qubits) \n", - " gate_indices = rng.choice(len(gate_pool), num_of_gates)\n", - " \n", - " for gate_index in gate_indices:\n", - " gate_qiskit_class = gate_pool[gate_index]\n", - " \n", - " num_of_paramters = get_number_of_gate_params(gate_qiskit_class)\n", - " params = rng.uniform(low=0, high=2*np.pi, size=num_of_paramters) if num_of_paramters > 0 else [] # random between 0 and 2pi\n", - " \n", - " gate = gate_qiskit_class(*params) \n", - " act_qubits = rng.choice(num_of_qubits, gate.num_qubits, replace=False) # order: (*act_qubits)=(*control_qubits, *target_qubits) \n", - " qc.append(gate, [*act_qubits], [])\n", - " \n", - " return qc" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "780554ad-8226-43d2-b79d-3e173c274c3b", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def optimize_circuit(qc: QuantumCircuit, gate_pool: list[Gate], optimization_level=2):\n", - " \"\"\"Use qiskit.compiler.transpile to optimize a circuit.\"\"\"\n", - " basis_gates = gate_pool_to_gate_classes(gate_pool).keys()\n", - " \n", - " while optimization_level > 0:\n", - " try:\n", - " qc_opt = transpile(qc, optimization_level=optimization_level, basis_gates=basis_gates) #target=target\n", - " return qc_opt\n", - " except Exception as er: pass\n", - " \n", - " optimization_level -= 1\n", - "\n", - " return qc" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2b376adf-c31f-4b05-b5be-5f2630e1edea", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'Rand: svr=[2, 2, 2] num_gates=8'" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjMAAADuCAYAAADMbYYaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAsdUlEQVR4nO3deVxU9f4/8NcMw76ILAoIirIoLqCpGG7lQmmut3KL0soyu5reX15oT+1mafq95pKlN00rJQs1FdcMUzQXFE1FFgVRWUYcQGUZlpk5vz98RJKgzDAzhzO8no+HDx+c8/mc8wbmDK/5nHM+RyYIggAiIiIiiZKLXQARERFRYzDMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpCnELoCI7icIAjTqSrHL0IvC3hYymUzsMoioGWKYIWqCNOpKbAx4Xuwy9BKV+T2sHezELoOImiGeZiIiIiJJY5ghIiIiSWOYISIiIkljmCEiIiJJ4wXAREQWrqpaiwuXipGSWYzScg0EQYCTgzU6B7iiW5AbbG2sxC6RTKykrApnUgtx+XoJ1JUaWMllaOlii7CObghu1wJyubTvRGSYISKyQCVlVfg+PhMbdlzCmbRCVFXr6mxnrZAjrKMbXhgZiMmjAuHqYmvmSslUritLsSYuHXG/XEF69m0IQt3tnBysERHmiVef6YSxg9rB2lp6J21kglDft0dEYqkur+Ct2WSQO6VVmLsqGV9vzUBpebVefR3sFHhpbBA+ntmToUbCsnLuIPr/kvDzwavQ6fT7E+/t6YA5k7tidlQXKBTSCTUMM0RNkKFhxiuiC4ZtnV97W2Vq3MnKR2bcYaSu3Q1BW/cn9MZimBHfgeO5mDo3Edfyyxq1HZ9WDvjf3P54aoCfkSojc9DpBKzanIq3liahvELTqG2Fd/XE+o8HIqSDq3GKMzHpxC4iarCsrYk4PHMZDr+xHGf/7yfIFVYIn/8iHl34qtilkQkIgoD5XyYjctreRgcZAMgrKMeIGfvx1tKT4OddaVBXaDB29gG88emxRgcZADh54SZ6jP8ZcfuvGKE602OYIbJAheevIGtLIrLiDiPlyx3YNeJdlOWqEPzcENi6u4hdHhnZO8tOYd6XZ4y+3c++OY9ZC48z0DRxFZUajJy5HzsPXTPqdiurtJgQcxCxuzONul1TYJghagY06krcTL4EmVwOl3atxS6HjGjFphQsWnfOZNtfGXsRC9eabvvUOIIg4IV3DyHhZL5Jtq/TCZj8/iEcPJlnku0bC8MMUTPh7H83xFTeKhW5EjKWtCu3EP3fJL36JMWOxvVfJiIpdnSD+3y46jTOphXqWx6ZwffxlxH3S7ZeffR9DWg0Al76MBElZVUGVGgeDDNEFkhhbwNbN2fYurvAtVNb9PnkFbh364CbyZdwJ8s0n+DIvLRaHV764DAqq7R69fPycIBva0d4eTg0uI9GI+DFDw6jqlq/fZFp5d8sx6yFx/XuZ8hr4Gpeqd7B2ZyaRZhRqVSIiYlBYGAg7Ozs4Ofnh9mzZ6OsrAxTp06FTCbDypUrxS6TTEQQBCiPXcTvMavx64uLcHDqYpz6+Dvczmzaw6aN0SNmIialfINJF9Zh7MH/IuSlYcjedRwJLy4SuzQykrhfsnH83E2z7e+P9CJ8H9/0r51oThb87yxulZhvtGT1T2lIv3LLbPvTh8VPmnf27FkMHz4cSqUSjo6O6Ny5M/Ly8rB8+XJkZmaiqKgIANC9e3dxCyWTyD14FknzN+BW+vX71l34Yjt8Bobi0YWvwqW9twjVmU76d/uRvfMY5NYKtOzUFl1njIWjtzu0lX+98cltFBi1fzGubEvEuWVba5b3/3wG7DxdcSBqgRilUwOt2pxq9n1+8cNFvDQ2CDKZtGeLtQQlZVX4dudls+/3q5/SsDTmUbPv92EsemRGpVJh1KhRUCqVmDNnDvLz85GcnAylUolFixZh165dSEpKgkwmQ2hoqNjlkpFlbTuCAy98UmeQ+VPe4XPYNfJdFKdeNWNlpncnS4n8xPPITTiDC6u249cpC+HRPQARi16raaOr0uDIrBXoNutptOzcDgDQdlhv+Eb2wtE3V4lVOjXAhUtFOHxaafb9JqcW4uR5840GUf027spESZl+kyIawzfbL6FMz8kYzcGiw8ysWbOQk5ODmTNnYsmSJXB2dq5ZFxMTg7CwMGg0Gvj7+8PFhberWpKbZy7hyOwVDZogrrKoBL88/wmq7jR+fo6m6uapdGTGHUb7sf3g2atjzfLCc1lI+XIHBix/Aw7ebohYPB0n3v0a6hvFIlZLD7Mrsf6Absn7pr/EHxbn93C7pAq//1Egyr4fxGLDTGpqKjZv3gwPDw98+umndbbp2bMnACAsLKzW8itXrmD06NFwdnZGy5YtMXnyZBQW8kp+Kbmwajt0elysWJ5XiMyfDpmwIvH9sTQOOo0WPaIn1F7++RbotFqM/mUxlEcv4Mr2oyJVSA11+qJ470enL6pE2zf9RczfQ1N8DVhsmImNjYVOp0NUVBScnJzqbGNvbw+gdpgpKSnBoEGDkJOTg9jYWKxZswaJiYkYOXIkdDrTTANPxlWuLMK1PSf17pe2fp9FTw5Wkq3Ele1H4TMwFK36hNQsFzRa3ExKh517C1zefFDECqmhxP1DVmjRx4kU5BWUQalSi7Z/hhkzSkhIAAAMGjSo3jY5OTkAaoeZNWvWIDc3Fz///DNGjhyJcePGYdOmTTh+/Dh27Nhh2qLJKPIOnzPo+UO3L+eiNMeyrwc4t+zuKMy9ozOt+oQgcMIgpK7djfCPXoKVnY2IFVJDXFOKN1fQjUI1NBqGGTEZ45EVjXFd2fROyVvsgyb9/PyQk5ODM2fO1Hmnkkajgbe3N1QqFTIzM9GhQwcAf4Wfgwdrf0INCAjA448/jrVr1+pdS69evaBUmv9ivebqUV1rjBTaG9R3pfwclLJyI1ekP2tBjrm6cJPvR+Fgh9G/LsHF1fFI27APw7d9BNUfmUiau17vbc2Xn0S1jKOXpiZAhjy3efWuT4od/cD5Q7w87KGwkkOj1T3w071SVY7ek+r+AOddtAByNN0J1CxdpaIdVC4v17nuYb9/oPGvAWtNPlrd+Uq/ohvAy8sLp06dMqivxd6aXVZ2Nzmq1XX/ojZv3gyVSgVnZ2e0b//XH76LFy9i3Lhx97Xv0qULLl68aFAtSqUSubm5BvUl/RXYK4AWhoWZ6zfyUKAV/1OHjcwKMMNTB3rPm4zSawVIW78XAHBk9kqMPrAE1/acwI3j+t36m5efhyqBk6qZRUstILOqc9WfE6I9jMJK3qB2dcnPuw4IjX+YIRnIwQ6o556Vhv7+AcNfA9VVFU3ub5rFhhkvLy8UFxcjOTkZERERtdbl5+cjOjoaABAaGlprzoTi4mK4urretz03Nzekp6cbXAuZT6lgDegAAQJkaPh8GKWogl3rlmgjczVdcQ1kLcgBEw9ytBncA+1H98P2IXNqlpVcvYHTCzai39IZ2DF4DjTqygZvz8fbhyMzZpIvlEMnc65znVL14JFFfT6V10UmVMDbp7UeRxYZm0bugBv1rHvY7x9o/GvAVlENjzZtGlKqXhrzt9Jiw8zQoUORmpqKRYsWITIyEsHBwQCApKQkvPDCC1Cp7l7AZI7J8gwdNiPD7R7zPgpOpunVp++sibj6TtO4Lqq6vAIbA5436T5yE85gU6cp9y1PW7+3ZqRGHxmXMmDtYGeM0ughRszYh92JOXWuq+/U0J+u/zIRvq0doVSp4Rf5g977HtjbH7+tq3vfZB46nYCW/b/DndL753t52O8faPxrIHrmePxnZtOaTdxiLwCOiYmBu7s7rl+/ji5duqBbt24ICgpCeHg4OnTogMGDBwO4/7bsli1b4tatW/dtr6ioCG5ubuYonYyg86sj9WpvZWuN4OcjTVQNkXH17OzRLPdNd8nlMjwSIuZrwF20fdfHYsOMr68vEhMTMWLECNjZ2SE7Oxtubm5YvXo1du3ahYyMDAD3h5mQkJA6r425ePEiQkJC7ltOTZP/yEfRZfqoBrWVyWUY+MVsOPl5mrgqIuMY1Fu8x2+IuW/6i1i/B2uFHP26m+GCPj1ZbJgB7gaT+Ph4lJSUoKSkBCdOnMC0adNQVlaG7OxsyOVydO3atVafkSNH4siRIzW3bQPAiRMnkJmZiVGjGvbHkZqGXh9ORo+3JkFuU//ZVFs3Fwze8DbajWh6zxohqs/jvb3R0b+F2ffb1tsRw/v7mn2/dL+p/wiGlZX5r1x6Zqg/PN3szb7fh7HoMFOflJQUCIKAoKAgODjUvoVt2rRp8Pb2xpgxYxAfH4+4uDhMmjQJ4eHhGDNmjEgVkyFkMhnC/vUMxievQa8PXkDLLv6A/O7BL7dRYMCKWRh3+iv4De0pbqFEepLJZPjnBPOPFE8fFwIrq2b5Z6PJadPaEWMHtTP7fmdMbJpnKJrlq/L8+fMA7j/FBAAuLi5ISEiAt7c3Jk6ciFdeeQV9+/ZFfHw85PJm+eOSPDt3F3T95xiMObAEDq1b1iwLeHYgFJwgjiTqpbFB8PMy7NZqQ7R2t8dr4zqZbX/0cO+9GmbW0ZnB4d7o16PpnWICLPhupgd5UJgB7k6QFx8fb86SiBol/D8vo+2TveDk1wo7hv4bRSnZ97Vx8vVE/2Uz4dbVH6XXCrAjMrpmXeCEQej8ylM1Xzv4uOPG8VQcnLrYHOWTAZwdbfD1vP54cvo+s+zvqw/6wq2FrVn2RQ3TI8QD70wNw8drzpp8X04O1lg7f0CtqUyakmY51PCwMEMkNVd3HcPuMe+j9Hr9T7OtKlUjeVEsDv9z2X3rLm8+iB2R0TX/1AW3kLX1sClLJiN4oq+v3qMlSlU5cm6UNWg+kj9FjQjA2MH+elZH5vDBa90R1lG/O20NeQ0smRMO/zZ1z23UFDTLkZk/n9tEZCkaMltv1a1SFJxMg1dElwe28+gRBDuPFri2j/MjScHytx/F1bxS7D3asLlfGjIPyb0e6+WF/83tb0hpZAY21lbYtfIJ9J8Sj+y8hj2zS9/XwJzJXTHt2Y6GlGc2zXJkhojqF/TcYGTGHYKg4aMJpMDG2gpblw7ByIF+Rt92ZIQP4lc+AXu7Zvm5VzLatHbEb+ueQlC7ep5x0AhvvRyKxXPCm+zppT8xzBBRDYW9LdqP6YdLsRy9lBJ7OwW2fT4UH8/sCWtF49/Wraxk+OC17ohf+QScHKyNUCGZWjsfZxz/fjSeHxlglO25tbDFpoWPY+G/ejf5IAMwzBDRPfxHReBW+nXczuB09VKjUMjx3rTuOP3DGDwaavgEkI+EuOPkxtH4aEZP2FjX/TBLaprcWtjiu08ex8/LhjbqTrdnI/2Rsu1pTHrKOMHIHDh2SEQ1gp4bwlEZiesW7IbfvxuFk+dvYtXmVGzedwWVVQ8+ZWitkGPcE+3xzwkh6Nu9lSQ+iVP9xgxqhxED/BB/+BpWbU7FL8fyHtqnpYsNXh4bjOnjQxDY1vinq0yNYYbIAkR8Ng2+Q3rCvpUrImPfR3WpGlv7voG+S6bj+v5TuL7/FKzsbfD0kRWwslXA2tkB406vRuaWQ0j+ZBMAwCXAB25d/HFl+1GRvxtqLJlMhj6hrdAntBVWf9gP5zKKcfqiChcuF+Ob7RlQV2jhaK/Aov/XGz1DPBDW0Y3XxVgYhUKOsYP9MXawP27dqURyaiFOpahw6dptbNyVCXXl3dfAF+/2Rc/O7ujU3hUKI5yiFItMEARB7CKIzOXHR6ahPL8IDt5uGJ+8Ruxy6mWOp2YbW1Tm93xqtgT4Do1FbkE52rRyQM6BSWKXQyKwxNeAdGMYERERERhmiIiISOIYZoiIiEjSGGaIiIhI0hhmiIiISNJ4Lx5RE6Swt0VU5vdG3+7Wvm9AfaMY9q1b4unfVxh12wp7PlGZiMTBMEPUBMlkMpPc5iyTy2r+523URGQpeJqJiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCRNIXYBVDdBEIDKSrHLaDhbW8hkMqNtThAEaNTG//4FnVDzf3V5hVG3rbA37s+guZPcMQAY/TggooZhmGmqKiuhGT9F7CoaTPHjBsDOzmjb06grsTHgeaNt7+/UN4qNvv2ozO9h7WC8n0GzJ7FjADD+cUBEDcPTTERERCRpDDNEREQkaQwzREREJGkMM0RERCRpDDNEREQkaQwzREREJGkMM0RERCRpDDNEREQkaZw0jyyGV0QXDNs6v9ay6jI17mTlIzPuMFLX7oag1YlUHRERmQrDDFmcrK2JyElIBmQy2Hu6InDcYwif/yJaBLXBsejVYpdHRERGxjBDFqfw/BVkbUms+Tp9/T78I3EZgp8bguSFsagsvCNidUREZGy8ZoYsnkZdiZvJlyCTy+HSrrXY5RARkZExzFCz4Ox/N8RU3ioVuRIiIjK2ZhFmVCoVYmJiEBgYCDs7O/j5+WH27NkoKyvD1KlTIZPJsHLlSrHLJCNR2NvA1s0Ztu4ucO3UFn0+eQXu3TrgZvIl3MnKF7s80ZTm3IS2ogoAoK2qhq5aI3JFROalVt1G7m9ncXXPSeQfvYDq8gqxSzIrjUaHo2duQF1599jXWNANERZ/zczZs2cxfPhwKJVKODo6onPnzsjLy8Py5cuRmZmJoqIiAED37t3FLdREDqkKEHnsNyzsHIo3AzrV2cZm5494qpU3fu4zwMzVmUaPmInoETOx1rLsXcdx4p2vRapIXDkJZ5D2zR7k/HoGEAQAQGVhCX7q/TqCo4ai04tPwt7TVdwiTag5HgNU283kDKR+vRvZ8cegq9bWLLd2cUDguMcR8spTcPH3ErFC01IVV+DLH1OxJi4dOTfKapbfKKzAk9P3YsaEEIx6vC1kMpmIVTaORYcZlUqFUaNGQalUYs6cOZg7dy6cnZ0BAJ999hneeustKBQKyGQyhIaGilwtGUv6d/uRvfMY5NYKtOzUFl1njIWjtzu0lVU1beQ2CozavxhXtiXi3LKtNcv7fz4Ddp6uOBC1QIzSjUoQBCTN24CLa+LrXK++UYw//vsTLm36FUO/fxduXfzNWyCRGaSu3Y0TH3xTE+TvVX2nHKlrd+PSDwkY9PW/0ebx7uYv0MRSs25h2Ot7cS2/rM71+3/Pxf7fczF9XCesfDcCVlbSPGEjzaobaNasWcjJycHMmTOxZMmSmiADADExMQgLC4NGo4G/vz9cXFxErJSM6U6WEvmJ55GbcAYXVm3Hr1MWwqN7ACIWvVbTRlelwZFZK9Bt1tNo2bkdAKDtsN7wjeyFo2+uEqt0ozqz6Id6g8y9ypVF2D/xPyi5dsMMVRGZz6UfEnDi/XV1Bpl7acoqkPDiIhSczjBTZeaRoyxD5LQ99QaZe331UxrmLDlphqpMw2LDTGpqKjZv3gwPDw98+umndbbp2bMnACAsLKxm2Z/hJzw8HLa2tpIedqO7bp5KR2bcYbQf2w+evTrWLC88l4WUL3dgwPI34ODthojF03Hi3a+hvlEsYrXGcSdbiXPLtjS4fYXqNpIXxpqwIiLzqi5T4+QH3zS4vbayGifeW2vCisxv3pfJyC0ob3D7ZRtTcD6jyIQVmY7FhpnY2FjodDpERUXBycmpzjb29vYAaoeZy5cvY8uWLfDy8kLv3r3NUqs5lGu1UFVW1vmvOfhjaRx0Gi16RE+ovfzzLdBptRj9y2Ioj17Ale1HRarQuNK/3a93n6vxx6C+ecv4xTQRzf0YaG6ytiSiulStV5/CPzKhOnvZRBWZV/GdSmzak6l3vy9/TDVBNaZnsdfMJCQkAAAGDRpUb5ucnBwAtcPMwIEDkZ9/946XefPm4ehRy/jj9lF6Cj5KTxG7DNGUZCtxZftRBDwzEK36hKDgxN0DVtBocTMpHR6hAbi8+aDIVRrP5c2/6d1HV61F1rYj6DJtpPELagKa+zHQ3Fz+8TeD+3l0DzRuMSKI238F6grtwxv+zXfxmVjxjvSunbHYMHP16lUAQLt27epcr9FoaoLKvWFGLjf+L7BXr15QKpV69bGXy3Gxe4TRanilbQc84+NX57rhxw81evvBwcFQ64x3m5+1IMdchBttewBwbtkWtB/bDz2iJ2Dfs/MAAK36hCBwwiCkrt2N8I9ewo7I6Jrbl/UVHBSMapn4tzoqBBnm6foY1HfpvE+w96PpRq7IMFI7BgDjHwemkO/6JiBvgXxlPnx9fcUux2SitT3QArZ699v2zSY8++08o9djbnfsBwH2j+vdr7S8Gr7+HWEl6DeqZQxeXl44deqUQX0tNsyUld294EmtrvsXsnnzZqhUKjg7O6N9+/YmrUWpVCI3N1evPg5WVkB349UQ6OSEIZ6mm/02Ly8P5Vr9PwXUx0ZmBehZrvJYCtZ7P1vv+tuXcvGt71+nmRQOduj/+QycXrARaRv2Yfi2j/DIO88hae56g2rOy89DlWC8n4GhFJADBt5lWlpaitwS/V6rpiK1YwAw/nFgEs5aQA7otFq935ekROsZCljp309dUYHcWxbwc2lVAtgb1lWZnw9oH37RcFNisWHGy8sLxcXFSE5ORkRE7U93+fn5iI6OBgCEhoaa/CJfLy/9/7LYm2CEyJR8fHyMPjIDE3/A7T1vMkqvFSBt/V4AwJHZKzH6wBJc23MCN47rf97Yx9unSYzMAECpthpOsNa7n9bJBm1c2pigIv1J7RgAjH8cmEK+lRV0AORWVvBu0zR+16ZQqtXCzYB+lfZytHGU/s+lzFaHWwb0kwkV8PZqCRlcjVzRwxnyt/JPFhtmhg4ditTUVCxatAiRkZEIDg4GACQlJeGFF16ASqUCYJ7J8gwZNhMqKqAZP8UE1ZhGRkYGZHZ2RttedXkFNgY8b7Tt/V2bwT3QfnQ/bB8yp2ZZydUbOL1gI/otnYEdg+dAo9bvwtCMSxmwdjDez6AxTn38HS58sV2vPnIbBb49/SvsPVqYqCr9SO0YAIx/HJiC79BY5BaUw9vLGzkXcsQux2Qyvv8Fv0ev1rvfgj0bsDoswAQVmdetO5VoM/QHlFfoN9P3Pyf1wMp3r5uoKtOR3kefBoqJiYG7uzuuX7+OLl26oFu3bggKCkJ4eDg6dOiAwYMHA6h9vQw1H7kJZ7Cp0xSU5apqLU9bvxdbI2bqHWSamo4vPAHoOeLoPzKiyQQZosZq//QAWDvrd57Fo0cgPCwgyACAq4stokbo/728Pj7EBNWYnsWGGV9fXyQmJmLEiBGws7NDdnY23NzcsHr1auzatQsZGXcnR2KYIUvk3K41wt6s//qhv7Nv5YpH3n7OhBURmZe1gx0eXfBKg9tb2dvi0U8a3l4K5r3eA35ejg1uP2dyV3QJbGnCikzHYk8zAUBISAji4++fAbW0tBTZ2dmQy+Xo2rWrCJURmV73OeOhrazGhZU/P7Cdg487Ije+Byc/T/MURmQmAeMeQ3V5BU68+zUEXf2zAFs722PwuhiLuCX7Xj6tHHFgzXAMe30fruSWPLDtG891xmdvGvcOUnOy6DBTn5SUFAiCgODgYDg4ONy3Pi4uDgBw8eLFWl/7+/ujV69e5ivUCB7zaIWqUeMf2OZh60maZDIZer33PHwGhiJt3V5c359U6w3dwccdHZ+PRMcXImFnwaeXeAw0b52mPAmP7oFIXbcHV7Yfha6yumadbUsnBE4cjE4vDYOzXysRqzSdYP8WOPXDGPxvSxq+/DENV/NKa9bJZMCIAX6YMbEznuzXRtIz3jfLMHP+/HkA9Z9iGjduXJ1fT5kyBevXrzdpbUTG5jMgFD4DQlGWX4jbl3OhrayGbUtneIQFQK4w4N5VIonxCAvAgGUz0XvuFGwbMAuVRSWwdXfGuKSvoLDXfy4aqXFrYYu3Xg7Dv6d0w+mLhbhZrIatjRWC27VAW++6Z8iXGoaZOggPeSgZNT3h/3kZbZ/sBSe/Vtgx9N8oSsmus51rp7Z4dMFU2HneHYlIXhiLa7tPwKtfV/R8LwrWjnYQBCDnwGmcXrDxoQ+okxJHb3c4eruLXQaRaOzcnGFle3fKAisb62YRZO5lZSVHeDfLPJ3MMEMW4equY7iw6mc8tf3jettY2dtgyPq3kDhrBQpOpkEml8Om5d1PJVW3y3Bo+lKUXiuAla01nvjxQwSOe8zgKdGJiMh8mmWY+fO5TWQ5GjLJXYd/DMDN0xkoOJkGABB0OlQW3gEAFF24UtNOW1mNogvZcLLQc+hERJbGYm/NJvo712BfaKuqMeTbdzD6l8Xov/wN2Lq73NfO3tMV/iMfxfUDp0WokoiI9MUwQ82GzMoKPgNCcSxmNXZERqNcWYiIha/WamPtZI8h376N86u2o/CPTJEqJSIifTDMULNRlqtC/u8pKFcWAQCy4g7D85HgmvUKRztEbnof1/Yl4eLq++cnIiKipolhhpqN7J2/w6N7AKyd7k5x3mbIIyi6mA3g7hO0Ize9j9yDZ3Du8y0iVklERPpqlhcAk+WJ+GwafIf0hH0rV0TGvo/qUjW29n0DfZdMx/X9p3B9/ymU5apwbvlWPLVzAQSdgHJlEX6P/goA0PnVp+DZIxDWDrZo91QfAEB2/DGcW7ZVzG+LiIgagGGGLMKxmDV1Lv/931/V+jor7jCy4g7f1+7csq0MLkREEsXTTERERCRpDDNEREQkaQwzREREJGkMM0RERCRpDDNEREQkabybqamytYXixw1iV9FwtsZ9+qzC3hZRmd8bdZum1tyewGtyUjsGAKMfB0TUMAwzTZRMJgPs7MQuQzQymQzWDs33+yceA0TUcDzNRERERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSphC7AKqbIAhAZaXYZTScrS1kMpnYVRCRhRAEARq18d8DBZ1Q8391eYVRt62w5/ugWBhmmqrKSmjGTxG7igZT/LgBsLMTuwwishAadSU2Bjxvsu2rbxQbfftRmd/D2oHvg2LgaSYiIiKSNIYZIiIikjSGGSIiIpI0hhkiIiKSNIYZIiIikjSGGSIiIpI0hhkiIiKSNIYZIiIikjROmkdERBbDK6ILhm2dX2tZdZkad7LykRl3GKlrd0PQ6kSqjkyFYYaIiCxO1tZE5CQkAzIZ7D1dETjuMYTPfxEtgtrgWPRqscsjI2OYISIii1N4/gqytiTWfJ2+fh/+kbgMwc8NQfLCWFQW3hGxOjI2XjNDREQWT6OuxM3kS5DJ5XBp11rscsjIGGaIiKhZcPa/G2Iqb5WKXAkZG08zERGRxVHY28DWzbnmmpmOk5+Ae7cOuJl8CXey8sUuj4ysWYzMqFQqxMTEIDAwEHZ2dvDz88Ps2bNRVlaGqVOnQiaTYeXKlWKXaRKHVAWw2fkj/puZVm8bm50/YuyJxHrXE5G03S6pwsrYi+g54Wfk3ywHABQUqbH0uwsovlMpcnWm0SNmIialfINJF9Zh7MH/IuSlYcjedRwJLy4SuzQyAYsfmTl79iyGDx8OpVIJR0dHdO7cGXl5eVi+fDkyMzNRVFQEAOjevbu4hRIRmcA3P2fgjU+PoUytqbW8WiPgzcUn8N6KU1jyZjj+ObGzSBWaRvp3+5G98xjk1gq07NQWXWeMhaO3O7SVVTVt5DYKjNq/GFe2JeLcsq01y/t/PgN2nq44ELVAjNLJABY9MqNSqTBq1CgolUrMmTMH+fn5SE5OhlKpxKJFi7Br1y4kJSVBJpMhNDRU7HKJiIzqix8u4uUPE+8LMvdSV2gx45Nj+GzdOTNWZnp3spTITzyP3IQzuLBqO36dshAe3QMQsei1mja6Kg2OzFqBbrOeRsvO7QAAbYf1hm9kLxx9c5VYpZMBLDrMzJo1Czk5OZg5cyaWLFkCZ2fnmnUxMTEICwuDRqOBv78/XFxcRKyUiMi4TqXcxBufHmtw+7c+T0LiaaUJKxLXzVPpyIw7jPZj+8GzV8ea5YXnspDy5Q4MWP4GHLzdELF4Ok68+zXUN4pFrJb0ZbFhJjU1FZs3b4aHhwc+/fTTOtv07NkTABAWFlazLC4uDs888wzatWsHBwcHdOrUCe+99x5KS6V99Xu5VgtVZWWd/4jI8qyMTYUg6Ndn+aYU0xTTRPyxNA46jRY9oifUXv75Fui0Woz+ZTGURy/gyvajIlVIhrLYa2ZiY2Oh0+kQFRUFJyenOtvY29sDqB1mlixZgrZt2+KTTz6Br68vzp49i/nz5+PQoUM4fPgw5HJp5r+P0lPwUbplv1ER0V2Ftyrww94svfttS7iKvIIy+LRyNEFV4ivJVuLK9qMIeGYgWvUJQcGJVACAoNHiZlI6PEIDcHnzQZGrJENYbJhJSEgAAAwaNKjeNjk5OQBqh5mdO3fC09Oz5uvHHnsMnp6eiIqKwpEjRzBw4EC9a+nVqxeUSv2Gb+3lclzsHqH3vurzStsOeMbHr851w48favT2g4ODodbxeSdETUGloh0qXV7Wu59WKyC0zwjYVV82QVX6sRbkmItwo2/33LItaD+2H3pET8C+Z+cBAFr1CUHghEFIXbsb4R+9hB2R0dBWVD14Q3UIDgpGtYzvg4by8vLCqVOnDOprsWHm6tWrAIB27drVuV6j0eDo0btDifeGmXuDzJ969eoFAMjNzTWoFqVSqXdfBysroLtBu6tToJMThniabtbLvLw8lGu1Jts+EenByRUw8DLAwqJS4I5h73XGZCOzAgx4y1IeS8F672frXX/7Ui6+9f3rNJPCwQ79P5+B0ws2Im3DPgzf9hEeeec5JM1dr/e+8/LzUCXwfVAMFhtmysrKAABqtbrO9Zs3b4ZKpYKzszPat2//wG0dPHh32DEkJMSgWry8vPTuYy+x01k+Pj4cmSFqIioVTlAZ2Ne9pQPsnNsYtR5DWAtywAxvKb3nTUbptQKkrd8LADgyeyVGH1iCa3tO4MbxVL225ePtw5GZRjDkb+WfLDbMeHl5obi4GMnJyYiIqH26Jj8/H9HR0QCA0NBQyGSyereTm5uLDz74AMOGDTN4LhpDhs2Eigpoxk8xaH9iyMjIgMzOTuwyiAhAZZUWbZ/4AQVFFXr1a+Fsg6vHf4Wjg7WJKmu46vIKbAx43qT7aDO4B9qP7oftQ+bULCu5egOnF2xEv6UzsGPwHGjUDb9JIuNSBqwd+D4oBml9/NfD0KFDAQCLFi1CRkZGzfKkpCQMGjQIKtXdzy0PCiilpaUYM2YMbGxssG7dOpPWS0RkLLY2Vnjl6Y4Pb/g3L44OahJBxlxyE85gU6cpKMutPY6Vtn4vtkbM1CvIkLgsNszExMTA3d0d169fR5cuXdCtWzcEBQUhPDwcHTp0wODBgwHUvl7mXmq1GqNGjcKVK1ewf/9+eHt7m7N8IqJGeX18CNxa2Da4vYuTNWZFWdYswNR8WGyY8fX1RWJiIkaMGAE7OztkZ2fDzc0Nq1evxq5du2pGa+oKM9XV1Xj22Wdx6tQp7NmzB5078wAnImnx9XLEzhWRcHZ8+EiLg50C25YORQdfTh5K0iQTBH2nVZK+0tJSuLi4QCaToaSkBA4ODjXrdDodJk6ciB07dmD37t01IzjmJrVrZhQ/buA1M0RN0LmMIsxaeAyHTtU9PUS/Hq3xeUwf9Opy/52cYjLHNTPGFpX5Pa+ZEYnFXgD8ICkpKRAEAcHBwbWCDADMmDEDP/30E95++204ODjg+PHjNesCAgLqvHWbiKipCg12w2/rRiDlcjE27LiEa8pSCALg29oRk0cFIqyju9glEjVaswwz58+fB1D3KaY9e/YAABYuXIiFCxfWWvfNN9/gxRdfNHl9RETG1iWwJT570/iT0BE1BQwzf5OdnW3maoiIyFgCJwxC/89nIOGlRbi2N+m+9V1eH43A8Y9DJpfhdmYejv7rC1TdKYdrp7YYuOKNmnY2LRxh7WSP2M4vmbN8MhDDDBERWQQnX08ERw1Fwan0Otd7DwxF0MRBiH/qHWjKKhD6r2fQ4+3ncOLdr3Er7Rp2REbXtO2zYCr0flInicZi72Z6kISEBAiCgBEjRohdChERGYNMhr7/9zpOvL8WuipNnU3cOvvjxsk0aMruTiaY82syAp69/3l7VrbW6PD0AFyKTTBpyWQ8zTLMEBGRZeny2igUJKWh8Fz9TwsvPJcJnwGhsPd0BQB0eHoAbJwdYOPqVKtd26f6oOTaDRSlZJuwYjKmZnmaiYiILIdrRz+0G9EHe/7x4QPbKX9PwYUvd2DId+9A0Opwbc8JAICgqf1wyKBJQ3BpE0dlpIRhhoiIJK11nxA4+bXCM7+vAADYe7oiYvF02LdqifRv99dqm75hH9I37AMAeD4ShLJcFapL/3ogsZNfK3j2DMJvryw23zdAjcYwQ0REkpb+7f5aoWXYlvm4+L/4Ou9msm/lCnXBLVjZ26B7zEScX7W91vqgSYNxbc9JVN0pN3ndZDwMM0REZLG6R0+A+kZxTdh54ocPALkcVtYKZMYdQtq6PX81lskQOP5xJM5aIVK1ZKhm+TgDKeDjDIioOePjDEgfvJuJiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI0XADdRgiAAlZVil9FwtraQyWRiV0FEFkIQBGjUEnoPBKCw5/ugWBhmiIiISNJ4momIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgk7f8DP108C7pGMGwAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "'Opti: svr=[2, 2, 2] num_gates=8'" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjMAAADuCAYAAADMbYYaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAsdUlEQVR4nO3deVxU9f4/8NcMw76ILAoIirIoLqCpGG7lQmmut3KL0soyu5reX15oT+1mafq95pKlN00rJQs1FdcMUzQXFE1FFgVRWUYcQGUZlpk5vz98RJKgzDAzhzO8no+HDx+c8/mc8wbmDK/5nHM+RyYIggAiIiIiiZKLXQARERFRYzDMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpDHMEBERkaQxzBAREZGkMcwQERGRpCnELoCI7icIAjTqSrHL0IvC3hYymUzsMoioGWKYIWqCNOpKbAx4Xuwy9BKV+T2sHezELoOImiGeZiIiIiJJY5ghIiIiSWOYISIiIkljmCEiIiJJ4wXAREQWrqpaiwuXipGSWYzScg0EQYCTgzU6B7iiW5AbbG2sxC6RTKykrApnUgtx+XoJ1JUaWMllaOlii7CObghu1wJyubTvRGSYISKyQCVlVfg+PhMbdlzCmbRCVFXr6mxnrZAjrKMbXhgZiMmjAuHqYmvmSslUritLsSYuHXG/XEF69m0IQt3tnBysERHmiVef6YSxg9rB2lp6J21kglDft0dEYqkur+Ct2WSQO6VVmLsqGV9vzUBpebVefR3sFHhpbBA+ntmToUbCsnLuIPr/kvDzwavQ6fT7E+/t6YA5k7tidlQXKBTSCTUMM0RNkKFhxiuiC4ZtnV97W2Vq3MnKR2bcYaSu3Q1BW/cn9MZimBHfgeO5mDo3Edfyyxq1HZ9WDvjf3P54aoCfkSojc9DpBKzanIq3liahvELTqG2Fd/XE+o8HIqSDq3GKMzHpxC4iarCsrYk4PHMZDr+xHGf/7yfIFVYIn/8iHl34qtilkQkIgoD5XyYjctreRgcZAMgrKMeIGfvx1tKT4OddaVBXaDB29gG88emxRgcZADh54SZ6jP8ZcfuvGKE602OYIbJAheevIGtLIrLiDiPlyx3YNeJdlOWqEPzcENi6u4hdHhnZO8tOYd6XZ4y+3c++OY9ZC48z0DRxFZUajJy5HzsPXTPqdiurtJgQcxCxuzONul1TYJghagY06krcTL4EmVwOl3atxS6HjGjFphQsWnfOZNtfGXsRC9eabvvUOIIg4IV3DyHhZL5Jtq/TCZj8/iEcPJlnku0bC8MMUTPh7H83xFTeKhW5EjKWtCu3EP3fJL36JMWOxvVfJiIpdnSD+3y46jTOphXqWx6ZwffxlxH3S7ZeffR9DWg0Al76MBElZVUGVGgeDDNEFkhhbwNbN2fYurvAtVNb9PnkFbh364CbyZdwJ8s0n+DIvLRaHV764DAqq7R69fPycIBva0d4eTg0uI9GI+DFDw6jqlq/fZFp5d8sx6yFx/XuZ8hr4Gpeqd7B2ZyaRZhRqVSIiYlBYGAg7Ozs4Ofnh9mzZ6OsrAxTp06FTCbDypUrxS6TTEQQBCiPXcTvMavx64uLcHDqYpz6+Dvczmzaw6aN0SNmIialfINJF9Zh7MH/IuSlYcjedRwJLy4SuzQykrhfsnH83E2z7e+P9CJ8H9/0r51oThb87yxulZhvtGT1T2lIv3LLbPvTh8VPmnf27FkMHz4cSqUSjo6O6Ny5M/Ly8rB8+XJkZmaiqKgIANC9e3dxCyWTyD14FknzN+BW+vX71l34Yjt8Bobi0YWvwqW9twjVmU76d/uRvfMY5NYKtOzUFl1njIWjtzu0lX+98cltFBi1fzGubEvEuWVba5b3/3wG7DxdcSBqgRilUwOt2pxq9n1+8cNFvDQ2CDKZtGeLtQQlZVX4dudls+/3q5/SsDTmUbPv92EsemRGpVJh1KhRUCqVmDNnDvLz85GcnAylUolFixZh165dSEpKgkwmQ2hoqNjlkpFlbTuCAy98UmeQ+VPe4XPYNfJdFKdeNWNlpncnS4n8xPPITTiDC6u249cpC+HRPQARi16raaOr0uDIrBXoNutptOzcDgDQdlhv+Eb2wtE3V4lVOjXAhUtFOHxaafb9JqcW4uR5840GUf027spESZl+kyIawzfbL6FMz8kYzcGiw8ysWbOQk5ODmTNnYsmSJXB2dq5ZFxMTg7CwMGg0Gvj7+8PFhberWpKbZy7hyOwVDZogrrKoBL88/wmq7jR+fo6m6uapdGTGHUb7sf3g2atjzfLCc1lI+XIHBix/Aw7ebohYPB0n3v0a6hvFIlZLD7Mrsf6Absn7pr/EHxbn93C7pAq//1Egyr4fxGLDTGpqKjZv3gwPDw98+umndbbp2bMnACAsLKzW8itXrmD06NFwdnZGy5YtMXnyZBQW8kp+Kbmwajt0elysWJ5XiMyfDpmwIvH9sTQOOo0WPaIn1F7++RbotFqM/mUxlEcv4Mr2oyJVSA11+qJ470enL6pE2zf9RczfQ1N8DVhsmImNjYVOp0NUVBScnJzqbGNvbw+gdpgpKSnBoEGDkJOTg9jYWKxZswaJiYkYOXIkdDrTTANPxlWuLMK1PSf17pe2fp9FTw5Wkq3Ele1H4TMwFK36hNQsFzRa3ExKh517C1zefFDECqmhxP1DVmjRx4kU5BWUQalSi7Z/hhkzSkhIAAAMGjSo3jY5OTkAaoeZNWvWIDc3Fz///DNGjhyJcePGYdOmTTh+/Dh27Nhh2qLJKPIOnzPo+UO3L+eiNMeyrwc4t+zuKMy9ozOt+oQgcMIgpK7djfCPXoKVnY2IFVJDXFOKN1fQjUI1NBqGGTEZ45EVjXFd2fROyVvsgyb9/PyQk5ODM2fO1Hmnkkajgbe3N1QqFTIzM9GhQwcAf4Wfgwdrf0INCAjA448/jrVr1+pdS69evaBUmv9ivebqUV1rjBTaG9R3pfwclLJyI1ekP2tBjrm6cJPvR+Fgh9G/LsHF1fFI27APw7d9BNUfmUiau17vbc2Xn0S1jKOXpiZAhjy3efWuT4od/cD5Q7w87KGwkkOj1T3w071SVY7ek+r+AOddtAByNN0J1CxdpaIdVC4v17nuYb9/oPGvAWtNPlrd+Uq/ohvAy8sLp06dMqivxd6aXVZ2Nzmq1XX/ojZv3gyVSgVnZ2e0b//XH76LFy9i3Lhx97Xv0qULLl68aFAtSqUSubm5BvUl/RXYK4AWhoWZ6zfyUKAV/1OHjcwKMMNTB3rPm4zSawVIW78XAHBk9kqMPrAE1/acwI3j+t36m5efhyqBk6qZRUstILOqc9WfE6I9jMJK3qB2dcnPuw4IjX+YIRnIwQ6o556Vhv7+AcNfA9VVFU3ub5rFhhkvLy8UFxcjOTkZERERtdbl5+cjOjoaABAaGlprzoTi4mK4urretz03Nzekp6cbXAuZT6lgDegAAQJkaPh8GKWogl3rlmgjczVdcQ1kLcgBEw9ytBncA+1H98P2IXNqlpVcvYHTCzai39IZ2DF4DjTqygZvz8fbhyMzZpIvlEMnc65znVL14JFFfT6V10UmVMDbp7UeRxYZm0bugBv1rHvY7x9o/GvAVlENjzZtGlKqXhrzt9Jiw8zQoUORmpqKRYsWITIyEsHBwQCApKQkvPDCC1Cp7l7AZI7J8gwdNiPD7R7zPgpOpunVp++sibj6TtO4Lqq6vAIbA5436T5yE85gU6cp9y1PW7+3ZqRGHxmXMmDtYGeM0ughRszYh92JOXWuq+/U0J+u/zIRvq0doVSp4Rf5g977HtjbH7+tq3vfZB46nYCW/b/DndL753t52O8faPxrIHrmePxnZtOaTdxiLwCOiYmBu7s7rl+/ji5duqBbt24ICgpCeHg4OnTogMGDBwO4/7bsli1b4tatW/dtr6ioCG5ubuYonYyg86sj9WpvZWuN4OcjTVQNkXH17OzRLPdNd8nlMjwSIuZrwF20fdfHYsOMr68vEhMTMWLECNjZ2SE7Oxtubm5YvXo1du3ahYyMDAD3h5mQkJA6r425ePEiQkJC7ltOTZP/yEfRZfqoBrWVyWUY+MVsOPl5mrgqIuMY1Fu8x2+IuW/6i1i/B2uFHP26m+GCPj1ZbJgB7gaT+Ph4lJSUoKSkBCdOnMC0adNQVlaG7OxsyOVydO3atVafkSNH4siRIzW3bQPAiRMnkJmZiVGjGvbHkZqGXh9ORo+3JkFuU//ZVFs3Fwze8DbajWh6zxohqs/jvb3R0b+F2ffb1tsRw/v7mn2/dL+p/wiGlZX5r1x6Zqg/PN3szb7fh7HoMFOflJQUCIKAoKAgODjUvoVt2rRp8Pb2xpgxYxAfH4+4uDhMmjQJ4eHhGDNmjEgVkyFkMhnC/vUMxievQa8PXkDLLv6A/O7BL7dRYMCKWRh3+iv4De0pbqFEepLJZPjnBPOPFE8fFwIrq2b5Z6PJadPaEWMHtTP7fmdMbJpnKJrlq/L8+fMA7j/FBAAuLi5ISEiAt7c3Jk6ciFdeeQV9+/ZFfHw85PJm+eOSPDt3F3T95xiMObAEDq1b1iwLeHYgFJwgjiTqpbFB8PMy7NZqQ7R2t8dr4zqZbX/0cO+9GmbW0ZnB4d7o16PpnWICLPhupgd5UJgB7k6QFx8fb86SiBol/D8vo+2TveDk1wo7hv4bRSnZ97Vx8vVE/2Uz4dbVH6XXCrAjMrpmXeCEQej8ylM1Xzv4uOPG8VQcnLrYHOWTAZwdbfD1vP54cvo+s+zvqw/6wq2FrVn2RQ3TI8QD70wNw8drzpp8X04O1lg7f0CtqUyakmY51PCwMEMkNVd3HcPuMe+j9Hr9T7OtKlUjeVEsDv9z2X3rLm8+iB2R0TX/1AW3kLX1sClLJiN4oq+v3qMlSlU5cm6UNWg+kj9FjQjA2MH+elZH5vDBa90R1lG/O20NeQ0smRMO/zZ1z23UFDTLkZk/n9tEZCkaMltv1a1SFJxMg1dElwe28+gRBDuPFri2j/MjScHytx/F1bxS7D3asLlfGjIPyb0e6+WF/83tb0hpZAY21lbYtfIJ9J8Sj+y8hj2zS9/XwJzJXTHt2Y6GlGc2zXJkhojqF/TcYGTGHYKg4aMJpMDG2gpblw7ByIF+Rt92ZIQP4lc+AXu7Zvm5VzLatHbEb+ueQlC7ep5x0AhvvRyKxXPCm+zppT8xzBBRDYW9LdqP6YdLsRy9lBJ7OwW2fT4UH8/sCWtF49/Wraxk+OC17ohf+QScHKyNUCGZWjsfZxz/fjSeHxlglO25tbDFpoWPY+G/ejf5IAMwzBDRPfxHReBW+nXczuB09VKjUMjx3rTuOP3DGDwaavgEkI+EuOPkxtH4aEZP2FjX/TBLaprcWtjiu08ex8/LhjbqTrdnI/2Rsu1pTHrKOMHIHDh2SEQ1gp4bwlEZiesW7IbfvxuFk+dvYtXmVGzedwWVVQ8+ZWitkGPcE+3xzwkh6Nu9lSQ+iVP9xgxqhxED/BB/+BpWbU7FL8fyHtqnpYsNXh4bjOnjQxDY1vinq0yNYYbIAkR8Ng2+Q3rCvpUrImPfR3WpGlv7voG+S6bj+v5TuL7/FKzsbfD0kRWwslXA2tkB406vRuaWQ0j+ZBMAwCXAB25d/HFl+1GRvxtqLJlMhj6hrdAntBVWf9gP5zKKcfqiChcuF+Ob7RlQV2jhaK/Aov/XGz1DPBDW0Y3XxVgYhUKOsYP9MXawP27dqURyaiFOpahw6dptbNyVCXXl3dfAF+/2Rc/O7ujU3hUKI5yiFItMEARB7CKIzOXHR6ahPL8IDt5uGJ+8Ruxy6mWOp2YbW1Tm93xqtgT4Do1FbkE52rRyQM6BSWKXQyKwxNeAdGMYERERERhmiIiISOIYZoiIiEjSGGaIiIhI0hhmiIiISNJ4Lx5RE6Swt0VU5vdG3+7Wvm9AfaMY9q1b4unfVxh12wp7PlGZiMTBMEPUBMlkMpPc5iyTy2r+523URGQpeJqJiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCRNIXYBVDdBEIDKSrHLaDhbW8hkMqNtThAEaNTG//4FnVDzf3V5hVG3rbA37s+guZPcMQAY/TggooZhmGmqKiuhGT9F7CoaTPHjBsDOzmjb06grsTHgeaNt7+/UN4qNvv2ozO9h7WC8n0GzJ7FjADD+cUBEDcPTTERERCRpDDNEREQkaQwzREREJGkMM0RERCRpDDNEREQkaQwzREREJGkMM0RERCRpDDNEREQkaZw0jyyGV0QXDNs6v9ay6jI17mTlIzPuMFLX7oag1YlUHRERmQrDDFmcrK2JyElIBmQy2Hu6InDcYwif/yJaBLXBsejVYpdHRERGxjBDFqfw/BVkbUms+Tp9/T78I3EZgp8bguSFsagsvCNidUREZGy8ZoYsnkZdiZvJlyCTy+HSrrXY5RARkZExzFCz4Ox/N8RU3ioVuRIiIjK2ZhFmVCoVYmJiEBgYCDs7O/j5+WH27NkoKyvD1KlTIZPJsHLlSrHLJCNR2NvA1s0Ztu4ucO3UFn0+eQXu3TrgZvIl3MnKF7s80ZTm3IS2ogoAoK2qhq5aI3JFROalVt1G7m9ncXXPSeQfvYDq8gqxSzIrjUaHo2duQF1599jXWNANERZ/zczZs2cxfPhwKJVKODo6onPnzsjLy8Py5cuRmZmJoqIiAED37t3FLdREDqkKEHnsNyzsHIo3AzrV2cZm5494qpU3fu4zwMzVmUaPmInoETOx1rLsXcdx4p2vRapIXDkJZ5D2zR7k/HoGEAQAQGVhCX7q/TqCo4ai04tPwt7TVdwiTag5HgNU283kDKR+vRvZ8cegq9bWLLd2cUDguMcR8spTcPH3ErFC01IVV+DLH1OxJi4dOTfKapbfKKzAk9P3YsaEEIx6vC1kMpmIVTaORYcZlUqFUaNGQalUYs6cOZg7dy6cnZ0BAJ999hneeustKBQKyGQyhIaGilwtGUv6d/uRvfMY5NYKtOzUFl1njIWjtzu0lVU1beQ2CozavxhXtiXi3LKtNcv7fz4Ddp6uOBC1QIzSjUoQBCTN24CLa+LrXK++UYw//vsTLm36FUO/fxduXfzNWyCRGaSu3Y0TH3xTE+TvVX2nHKlrd+PSDwkY9PW/0ebx7uYv0MRSs25h2Ot7cS2/rM71+3/Pxf7fczF9XCesfDcCVlbSPGEjzaobaNasWcjJycHMmTOxZMmSmiADADExMQgLC4NGo4G/vz9cXFxErJSM6U6WEvmJ55GbcAYXVm3Hr1MWwqN7ACIWvVbTRlelwZFZK9Bt1tNo2bkdAKDtsN7wjeyFo2+uEqt0ozqz6Id6g8y9ypVF2D/xPyi5dsMMVRGZz6UfEnDi/XV1Bpl7acoqkPDiIhSczjBTZeaRoyxD5LQ99QaZe331UxrmLDlphqpMw2LDTGpqKjZv3gwPDw98+umndbbp2bMnACAsLKxm2Z/hJzw8HLa2tpIedqO7bp5KR2bcYbQf2w+evTrWLC88l4WUL3dgwPI34ODthojF03Hi3a+hvlEsYrXGcSdbiXPLtjS4fYXqNpIXxpqwIiLzqi5T4+QH3zS4vbayGifeW2vCisxv3pfJyC0ob3D7ZRtTcD6jyIQVmY7FhpnY2FjodDpERUXBycmpzjb29vYAaoeZy5cvY8uWLfDy8kLv3r3NUqs5lGu1UFVW1vmvOfhjaRx0Gi16RE+ovfzzLdBptRj9y2Ioj17Ale1HRarQuNK/3a93n6vxx6C+ecv4xTQRzf0YaG6ytiSiulStV5/CPzKhOnvZRBWZV/GdSmzak6l3vy9/TDVBNaZnsdfMJCQkAAAGDRpUb5ucnBwAtcPMwIEDkZ9/946XefPm4ehRy/jj9lF6Cj5KTxG7DNGUZCtxZftRBDwzEK36hKDgxN0DVtBocTMpHR6hAbi8+aDIVRrP5c2/6d1HV61F1rYj6DJtpPELagKa+zHQ3Fz+8TeD+3l0DzRuMSKI238F6grtwxv+zXfxmVjxjvSunbHYMHP16lUAQLt27epcr9FoaoLKvWFGLjf+L7BXr15QKpV69bGXy3Gxe4TRanilbQc84+NX57rhxw81evvBwcFQ64x3m5+1IMdchBttewBwbtkWtB/bDz2iJ2Dfs/MAAK36hCBwwiCkrt2N8I9ewo7I6Jrbl/UVHBSMapn4tzoqBBnm6foY1HfpvE+w96PpRq7IMFI7BgDjHwemkO/6JiBvgXxlPnx9fcUux2SitT3QArZ699v2zSY8++08o9djbnfsBwH2j+vdr7S8Gr7+HWEl6DeqZQxeXl44deqUQX0tNsyUld294EmtrvsXsnnzZqhUKjg7O6N9+/YmrUWpVCI3N1evPg5WVkB349UQ6OSEIZ6mm/02Ly8P5Vr9PwXUx0ZmBehZrvJYCtZ7P1vv+tuXcvGt71+nmRQOduj/+QycXrARaRv2Yfi2j/DIO88hae56g2rOy89DlWC8n4GhFJADBt5lWlpaitwS/V6rpiK1YwAw/nFgEs5aQA7otFq935ekROsZCljp309dUYHcWxbwc2lVAtgb1lWZnw9oH37RcFNisWHGy8sLxcXFSE5ORkRE7U93+fn5iI6OBgCEhoaa/CJfLy/9/7LYm2CEyJR8fHyMPjIDE3/A7T1vMkqvFSBt/V4AwJHZKzH6wBJc23MCN47rf97Yx9unSYzMAECpthpOsNa7n9bJBm1c2pigIv1J7RgAjH8cmEK+lRV0AORWVvBu0zR+16ZQqtXCzYB+lfZytHGU/s+lzFaHWwb0kwkV8PZqCRlcjVzRwxnyt/JPFhtmhg4ditTUVCxatAiRkZEIDg4GACQlJeGFF16ASqUCYJ7J8gwZNhMqKqAZP8UE1ZhGRkYGZHZ2RttedXkFNgY8b7Tt/V2bwT3QfnQ/bB8yp2ZZydUbOL1gI/otnYEdg+dAo9bvwtCMSxmwdjDez6AxTn38HS58sV2vPnIbBb49/SvsPVqYqCr9SO0YAIx/HJiC79BY5BaUw9vLGzkXcsQux2Qyvv8Fv0ev1rvfgj0bsDoswAQVmdetO5VoM/QHlFfoN9P3Pyf1wMp3r5uoKtOR3kefBoqJiYG7uzuuX7+OLl26oFu3bggKCkJ4eDg6dOiAwYMHA6h9vQw1H7kJZ7Cp0xSU5apqLU9bvxdbI2bqHWSamo4vPAHoOeLoPzKiyQQZosZq//QAWDvrd57Fo0cgPCwgyACAq4stokbo/728Pj7EBNWYnsWGGV9fXyQmJmLEiBGws7NDdnY23NzcsHr1auzatQsZGXcnR2KYIUvk3K41wt6s//qhv7Nv5YpH3n7OhBURmZe1gx0eXfBKg9tb2dvi0U8a3l4K5r3eA35ejg1uP2dyV3QJbGnCikzHYk8zAUBISAji4++fAbW0tBTZ2dmQy+Xo2rWrCJURmV73OeOhrazGhZU/P7Cdg487Ije+Byc/T/MURmQmAeMeQ3V5BU68+zUEXf2zAFs722PwuhiLuCX7Xj6tHHFgzXAMe30fruSWPLDtG891xmdvGvcOUnOy6DBTn5SUFAiCgODgYDg4ONy3Pi4uDgBw8eLFWl/7+/ujV69e5ivUCB7zaIWqUeMf2OZh60maZDIZer33PHwGhiJt3V5c359U6w3dwccdHZ+PRMcXImFnwaeXeAw0b52mPAmP7oFIXbcHV7Yfha6yumadbUsnBE4cjE4vDYOzXysRqzSdYP8WOPXDGPxvSxq+/DENV/NKa9bJZMCIAX6YMbEznuzXRtIz3jfLMHP+/HkA9Z9iGjduXJ1fT5kyBevXrzdpbUTG5jMgFD4DQlGWX4jbl3OhrayGbUtneIQFQK4w4N5VIonxCAvAgGUz0XvuFGwbMAuVRSWwdXfGuKSvoLDXfy4aqXFrYYu3Xg7Dv6d0w+mLhbhZrIatjRWC27VAW++6Z8iXGoaZOggPeSgZNT3h/3kZbZ/sBSe/Vtgx9N8oSsmus51rp7Z4dMFU2HneHYlIXhiLa7tPwKtfV/R8LwrWjnYQBCDnwGmcXrDxoQ+okxJHb3c4eruLXQaRaOzcnGFle3fKAisb62YRZO5lZSVHeDfLPJ3MMEMW4equY7iw6mc8tf3jettY2dtgyPq3kDhrBQpOpkEml8Om5d1PJVW3y3Bo+lKUXiuAla01nvjxQwSOe8zgKdGJiMh8mmWY+fO5TWQ5GjLJXYd/DMDN0xkoOJkGABB0OlQW3gEAFF24UtNOW1mNogvZcLLQc+hERJbGYm/NJvo712BfaKuqMeTbdzD6l8Xov/wN2Lq73NfO3tMV/iMfxfUDp0WokoiI9MUwQ82GzMoKPgNCcSxmNXZERqNcWYiIha/WamPtZI8h376N86u2o/CPTJEqJSIifTDMULNRlqtC/u8pKFcWAQCy4g7D85HgmvUKRztEbnof1/Yl4eLq++cnIiKipolhhpqN7J2/w6N7AKyd7k5x3mbIIyi6mA3g7hO0Ize9j9yDZ3Du8y0iVklERPpqlhcAk+WJ+GwafIf0hH0rV0TGvo/qUjW29n0DfZdMx/X9p3B9/ymU5apwbvlWPLVzAQSdgHJlEX6P/goA0PnVp+DZIxDWDrZo91QfAEB2/DGcW7ZVzG+LiIgagGGGLMKxmDV1Lv/931/V+jor7jCy4g7f1+7csq0MLkREEsXTTERERCRpDDNEREQkaQwzREREJGkMM0RERCRpDDNEREQkabybqamytYXixw1iV9FwtsZ9+qzC3hZRmd8bdZum1tyewGtyUjsGAKMfB0TUMAwzTZRMJgPs7MQuQzQymQzWDs33+yceA0TUcDzNRERERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSxjBDREREksYwQ0RERJLGMENERESSphC7AKqbIAhAZaXYZTScrS1kMpnYVRCRhRAEARq18d8DBZ1Q8391eYVRt62w5/ugWBhmmqrKSmjGTxG7igZT/LgBsLMTuwwishAadSU2Bjxvsu2rbxQbfftRmd/D2oHvg2LgaSYiIiKSNIYZIiIikjSGGSIiIpI0hhkiIiKSNIYZIiIikjSGGSIiIpI0hhkiIiKSNIYZIiIikjROmkdERBbDK6ILhm2dX2tZdZkad7LykRl3GKlrd0PQ6kSqjkyFYYaIiCxO1tZE5CQkAzIZ7D1dETjuMYTPfxEtgtrgWPRqscsjI2OYISIii1N4/gqytiTWfJ2+fh/+kbgMwc8NQfLCWFQW3hGxOjI2XjNDREQWT6OuxM3kS5DJ5XBp11rscsjIGGaIiKhZcPa/G2Iqb5WKXAkZG08zERGRxVHY28DWzbnmmpmOk5+Ae7cOuJl8CXey8sUuj4ysWYzMqFQqxMTEIDAwEHZ2dvDz88Ps2bNRVlaGqVOnQiaTYeXKlWKXaRKHVAWw2fkj/puZVm8bm50/YuyJxHrXE5G03S6pwsrYi+g54Wfk3ywHABQUqbH0uwsovlMpcnWm0SNmIialfINJF9Zh7MH/IuSlYcjedRwJLy4SuzQyAYsfmTl79iyGDx8OpVIJR0dHdO7cGXl5eVi+fDkyMzNRVFQEAOjevbu4hRIRmcA3P2fgjU+PoUytqbW8WiPgzcUn8N6KU1jyZjj+ObGzSBWaRvp3+5G98xjk1gq07NQWXWeMhaO3O7SVVTVt5DYKjNq/GFe2JeLcsq01y/t/PgN2nq44ELVAjNLJABY9MqNSqTBq1CgolUrMmTMH+fn5SE5OhlKpxKJFi7Br1y4kJSVBJpMhNDRU7HKJiIzqix8u4uUPE+8LMvdSV2gx45Nj+GzdOTNWZnp3spTITzyP3IQzuLBqO36dshAe3QMQsei1mja6Kg2OzFqBbrOeRsvO7QAAbYf1hm9kLxx9c5VYpZMBLDrMzJo1Czk5OZg5cyaWLFkCZ2fnmnUxMTEICwuDRqOBv78/XFxcRKyUiMi4TqXcxBufHmtw+7c+T0LiaaUJKxLXzVPpyIw7jPZj+8GzV8ea5YXnspDy5Q4MWP4GHLzdELF4Ok68+zXUN4pFrJb0ZbFhJjU1FZs3b4aHhwc+/fTTOtv07NkTABAWFlazLC4uDs888wzatWsHBwcHdOrUCe+99x5KS6V99Xu5VgtVZWWd/4jI8qyMTYUg6Ndn+aYU0xTTRPyxNA46jRY9oifUXv75Fui0Woz+ZTGURy/gyvajIlVIhrLYa2ZiY2Oh0+kQFRUFJyenOtvY29sDqB1mlixZgrZt2+KTTz6Br68vzp49i/nz5+PQoUM4fPgw5HJp5r+P0lPwUbplv1ER0V2Ftyrww94svfttS7iKvIIy+LRyNEFV4ivJVuLK9qMIeGYgWvUJQcGJVACAoNHiZlI6PEIDcHnzQZGrJENYbJhJSEgAAAwaNKjeNjk5OQBqh5mdO3fC09Oz5uvHHnsMnp6eiIqKwpEjRzBw4EC9a+nVqxeUSv2Gb+3lclzsHqH3vurzStsOeMbHr851w48favT2g4ODodbxeSdETUGloh0qXV7Wu59WKyC0zwjYVV82QVX6sRbkmItwo2/33LItaD+2H3pET8C+Z+cBAFr1CUHghEFIXbsb4R+9hB2R0dBWVD14Q3UIDgpGtYzvg4by8vLCqVOnDOprsWHm6tWrAIB27drVuV6j0eDo0btDifeGmXuDzJ969eoFAMjNzTWoFqVSqXdfBysroLtBu6tToJMThniabtbLvLw8lGu1Jts+EenByRUw8DLAwqJS4I5h73XGZCOzAgx4y1IeS8F672frXX/7Ui6+9f3rNJPCwQ79P5+B0ws2Im3DPgzf9hEeeec5JM1dr/e+8/LzUCXwfVAMFhtmysrKAABqtbrO9Zs3b4ZKpYKzszPat2//wG0dPHh32DEkJMSgWry8vPTuYy+x01k+Pj4cmSFqIioVTlAZ2Ne9pQPsnNsYtR5DWAtywAxvKb3nTUbptQKkrd8LADgyeyVGH1iCa3tO4MbxVL225ePtw5GZRjDkb+WfLDbMeHl5obi4GMnJyYiIqH26Jj8/H9HR0QCA0NBQyGSyereTm5uLDz74AMOGDTN4LhpDhs2Eigpoxk8xaH9iyMjIgMzOTuwyiAhAZZUWbZ/4AQVFFXr1a+Fsg6vHf4Wjg7WJKmu46vIKbAx43qT7aDO4B9qP7oftQ+bULCu5egOnF2xEv6UzsGPwHGjUDb9JIuNSBqwd+D4oBml9/NfD0KFDAQCLFi1CRkZGzfKkpCQMGjQIKtXdzy0PCiilpaUYM2YMbGxssG7dOpPWS0RkLLY2Vnjl6Y4Pb/g3L44OahJBxlxyE85gU6cpKMutPY6Vtn4vtkbM1CvIkLgsNszExMTA3d0d169fR5cuXdCtWzcEBQUhPDwcHTp0wODBgwHUvl7mXmq1GqNGjcKVK1ewf/9+eHt7m7N8IqJGeX18CNxa2Da4vYuTNWZFWdYswNR8WGyY8fX1RWJiIkaMGAE7OztkZ2fDzc0Nq1evxq5du2pGa+oKM9XV1Xj22Wdx6tQp7NmzB5078wAnImnx9XLEzhWRcHZ8+EiLg50C25YORQdfTh5K0iQTBH2nVZK+0tJSuLi4QCaToaSkBA4ODjXrdDodJk6ciB07dmD37t01IzjmJrVrZhQ/buA1M0RN0LmMIsxaeAyHTtU9PUS/Hq3xeUwf9Opy/52cYjLHNTPGFpX5Pa+ZEYnFXgD8ICkpKRAEAcHBwbWCDADMmDEDP/30E95++204ODjg+PHjNesCAgLqvHWbiKipCg12w2/rRiDlcjE27LiEa8pSCALg29oRk0cFIqyju9glEjVaswwz58+fB1D3KaY9e/YAABYuXIiFCxfWWvfNN9/gxRdfNHl9RETG1iWwJT570/iT0BE1BQwzf5OdnW3maoiIyFgCJwxC/89nIOGlRbi2N+m+9V1eH43A8Y9DJpfhdmYejv7rC1TdKYdrp7YYuOKNmnY2LRxh7WSP2M4vmbN8MhDDDBERWQQnX08ERw1Fwan0Otd7DwxF0MRBiH/qHWjKKhD6r2fQ4+3ncOLdr3Er7Rp2REbXtO2zYCr0flInicZi72Z6kISEBAiCgBEjRohdChERGYNMhr7/9zpOvL8WuipNnU3cOvvjxsk0aMruTiaY82syAp69/3l7VrbW6PD0AFyKTTBpyWQ8zTLMEBGRZeny2igUJKWh8Fz9TwsvPJcJnwGhsPd0BQB0eHoAbJwdYOPqVKtd26f6oOTaDRSlZJuwYjKmZnmaiYiILIdrRz+0G9EHe/7x4QPbKX9PwYUvd2DId+9A0Opwbc8JAICgqf1wyKBJQ3BpE0dlpIRhhoiIJK11nxA4+bXCM7+vAADYe7oiYvF02LdqifRv99dqm75hH9I37AMAeD4ShLJcFapL/3ogsZNfK3j2DMJvryw23zdAjcYwQ0REkpb+7f5aoWXYlvm4+L/4Ou9msm/lCnXBLVjZ26B7zEScX7W91vqgSYNxbc9JVN0pN3ndZDwMM0REZLG6R0+A+kZxTdh54ocPALkcVtYKZMYdQtq6PX81lskQOP5xJM5aIVK1ZKhm+TgDKeDjDIioOePjDEgfvJuJiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI0XADdRgiAAlZVil9FwtraQyWRiV0FEFkIQBGjUEnoPBKCw5/ugWBhmiIiISNJ4momIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgkjWGGiIiIJI1hhoiIiCSNYYaIiIgk7f8DP108C7pGMGwAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "gs = [ql.HGate, ql.CXGate, ql.CRXGate]\n", - "qc = rnd_circuit(num_of_qubits=3, num_of_gates=8, gate_pool=gs, rng=np.random.default_rng())\n", - "svr = schmidt_rank_vector(qi.DensityMatrix(qc))\n", - "display(f\"Rand: svr={svr} num_gates={len(qc.data)}\", qc.draw('mpl'))\n", - "\n", - "qc = optimize_circuit(qc, gs)\n", - "svr = schmidt_rank_vector(qi.DensityMatrix(qc))\n", - "display(f\"Opti: svr={svr} num_gates={len(qc.data)}\", qc.draw('mpl'))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "112d1b45-1abb-41b2-90df-bf71e8021ffa", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def plot_svr_stat(num_of_qubits, min_gates, max_gates, gs, samples, sort=False, opt=True, rng=np.random.default_rng()): \n", - " svr_list = list()\n", - " for i in range(samples):\n", - " qc = rnd_circuit(num_of_qubits, rng.integers(min_gates, max_gates+1), gs, rng) \n", - " if opt: qc = optimize_circuit(qc, gs)\n", - " svr = schmidt_rank_vector(qi.DensityMatrix(qc))\n", - " if sort: svr = sorted(svr)\n", - " svr_list.append(svr) \n", - " df = pd.DataFrame(data={\"svr\":svr_list}) \n", - " cnts = df['svr'].value_counts(normalize=True)\n", - " for n,v in zip(cnts.index, cnts.values): print(f\"{n}: {v*100:.1f}%\") \n", - " df['svr'].value_counts().plot(kind='bar') " - ] - }, - { - "cell_type": "markdown", - "id": "39b0de7a-9704-4f3c-b74c-f2ed26254bc0", - "metadata": {}, - "source": [ - "SRV distrubtion for random sampling" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bc66db51-7335-47ef-aad2-dd7a19266212", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[1, 1, 1]: 60.2%\n", - "[1, 2, 2]: 11.1%\n", - "[2, 2, 1]: 10.1%\n", - "[2, 2, 2]: 9.9%\n", - "[2, 1, 2]: 8.7%\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAHaCAYAAAAqv7IKAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAosElEQVR4nO3dfXSU5Z2H8e8MIUN4mUmDZIZogLjaQgR8AQ0jLNqYTYrRlRK1YFaj5WilgS1kRYxFKLE1lF0LSytgaRU5JUvXKlaxYGNA2EpACEuFIEFtalLDTHAxGUDI6+wfHMaOBGWAzNzJXJ9z5hzz3M8kv/E2yeW8xeL3+/0CAAAwiDXSAwAAAHwRgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAODGRHuB8tLe3q66uTv369ZPFYon0OAAA4Bz4/X4dPXpUSUlJslq//D6SLhkodXV1Sk5OjvQYAADgPNTW1uqyyy770nO6ZKD069dP0qkbaLfbIzwNAAA4Fz6fT8nJyYHf41+mSwbK6Yd17HY7gQIAQBdzLk/P4EmyAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjBNyoHz88cf6l3/5F/Xv319xcXEaMWKEdu3aFVj3+/2aN2+eBg4cqLi4OGVkZOj9998P+hxHjhxRbm6u7Ha74uPjNXXqVB07duzCbw0AAOgWQgqUTz/9VGPHjlXPnj21YcMG7d+/X08//bS+9rWvBc5ZtGiRli5dqhUrVmjHjh3q06ePsrKydPLkycA5ubm5qqysVGlpqdavX6+tW7fqoYceuni3CgAAdGkWv9/vP9eTH3vsMb399tv6n//5nw7X/X6/kpKS9G//9m965JFHJEmNjY1yOp1atWqVJk+erPfee0+pqanauXOnRo8eLUnauHGjbr31Vv3tb39TUlLSV87h8/nkcDjU2NjIXzMGAKCLCOX3d0j3oLz66qsaPXq07rrrLiUmJuraa6/VypUrA+vV1dXyeDzKyMgIHHM4HEpLS1N5ebkkqby8XPHx8YE4kaSMjAxZrVbt2LEjlHEAAEA3FRPKyX/5y1+0fPlyFRQU6PHHH9fOnTv1r//6r4qNjVVeXp48Ho8kyel0Bl3P6XQG1jwejxITE4OHiIlRQkJC4JwvampqUlNTU+Bjn88Xytjnbchjr4fl63Smvy7MjvQIAACELKRAaW9v1+jRo/XUU09Jkq699lrt27dPK1asUF5eXqcMKEnFxcVasGBBp31+AABglpAe4hk4cKBSU1ODjg0bNkw1NTWSJJfLJUnyer1B53i93sCay+VSfX190Hpra6uOHDkSOOeLCgsL1djYGLjU1taGMjYAAOhiQgqUsWPHqqqqKujYwYMHNXjwYElSSkqKXC6XysrKAus+n087duyQ2+2WJLndbjU0NKiioiJwzqZNm9Te3q60tLQOv67NZpPdbg+6AACA7iukh3hmzZqlG2+8UU899ZTuvvtuvfPOO/rlL3+pX/7yl5Iki8WimTNn6sc//rGuvPJKpaSk6IknnlBSUpImTpwo6dQ9Lt/61rf04IMPasWKFWppadH06dM1efLkc3oFDwAA6P5CCpTrr79e69atU2FhoYqKipSSkqIlS5YoNzc3cM6jjz6q48eP66GHHlJDQ4PGjRunjRs3qlevXoFz1qxZo+nTp+uWW26R1WpVTk6Oli5devFuFQAA6NJCeh8UU4TrfVB4FQ8AABdPp70PCgAAQDgQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIwTUqD86Ec/ksViCboMHTo0sH7y5Enl5+erf//+6tu3r3JycuT1eoM+R01NjbKzs9W7d28lJiZq9uzZam1tvTi3BgAAdAsxoV7hqquu0ptvvvn5J4j5/FPMmjVLr7/+ul588UU5HA5Nnz5dkyZN0ttvvy1JamtrU3Z2tlwul7Zt26ZDhw7pvvvuU8+ePfXUU09dhJsDAAC6g5ADJSYmRi6X64zjjY2N+vWvf62SkhKlp6dLkp5//nkNGzZM27dv15gxY/THP/5R+/fv15tvvimn06lrrrlGTz75pObMmaMf/ehHio2NvfBbBAAAuryQn4Py/vvvKykpSZdffrlyc3NVU1MjSaqoqFBLS4syMjIC5w4dOlSDBg1SeXm5JKm8vFwjRoyQ0+kMnJOVlSWfz6fKysqzfs2mpib5fL6gCwAA6L5CCpS0tDStWrVKGzdu1PLly1VdXa1//Md/1NGjR+XxeBQbG6v4+Pig6zidTnk8HkmSx+MJipPT66fXzqa4uFgOhyNwSU5ODmVsAADQxYT0EM+ECRMC/zxy5EilpaVp8ODB+u///m/FxcVd9OFOKywsVEFBQeBjn89HpAAA0I1d0MuM4+Pj9fWvf10ffPCBXC6Xmpub1dDQEHSO1+sNPGfF5XKd8aqe0x939LyW02w2m+x2e9AFAAB0XxcUKMeOHdOHH36ogQMHatSoUerZs6fKysoC61VVVaqpqZHb7ZYkud1u7d27V/X19YFzSktLZbfblZqaeiGjAACAbiSkh3geeeQR3X777Ro8eLDq6uo0f/589ejRQ1OmTJHD4dDUqVNVUFCghIQE2e12zZgxQ263W2PGjJEkZWZmKjU1Vffee68WLVokj8ejuXPnKj8/XzabrVNuIAAA6HpCCpS//e1vmjJliv7v//5PAwYM0Lhx47R9+3YNGDBAkrR48WJZrVbl5OSoqalJWVlZWrZsWeD6PXr00Pr16zVt2jS53W716dNHeXl5Kioquri3CgAAdGkWv9/vj/QQofL5fHI4HGpsbOzU56MMeez1Tvvc4fLXhdmRHgEAAEmh/f7mb/EAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwzgUFysKFC2WxWDRz5szAsZMnTyo/P1/9+/dX3759lZOTI6/XG3S9mpoaZWdnq3fv3kpMTNTs2bPV2tp6IaMAAIBu5LwDZefOnXr22Wc1cuTIoOOzZs3Sa6+9phdffFFbtmxRXV2dJk2aFFhva2tTdna2mpubtW3bNr3wwgtatWqV5s2bd/63AgAAdCvnFSjHjh1Tbm6uVq5cqa997WuB442Njfr1r3+tn/3sZ0pPT9eoUaP0/PPPa9u2bdq+fbsk6Y9//KP279+v3/zmN7rmmms0YcIEPfnkk3rmmWfU3Nx8cW4VAADo0s4rUPLz85Wdna2MjIyg4xUVFWppaQk6PnToUA0aNEjl5eWSpPLyco0YMUJOpzNwTlZWlnw+nyorK89nHAAA0M3EhHqFtWvXavfu3dq5c+cZax6PR7GxsYqPjw867nQ65fF4Auf8fZycXj+91pGmpiY1NTUFPvb5fKGODQAAupCQ7kGpra3VD37wA61Zs0a9evXqrJnOUFxcLIfDEbgkJyeH7WsDAIDwCylQKioqVF9fr+uuu04xMTGKiYnRli1btHTpUsXExMjpdKq5uVkNDQ1B1/N6vXK5XJIkl8t1xqt6Tn98+pwvKiwsVGNjY+BSW1sbytgAAKCLCSlQbrnlFu3du1d79uwJXEaPHq3c3NzAP/fs2VNlZWWB61RVVammpkZut1uS5Ha7tXfvXtXX1wfOKS0tld1uV2pqaodf12azyW63B10AAED3FdJzUPr166fhw4cHHevTp4/69+8fOD516lQVFBQoISFBdrtdM2bMkNvt1pgxYyRJmZmZSk1N1b333qtFixbJ4/Fo7ty5ys/Pl81mu0g3CwAAdGUhP0n2qyxevFhWq1U5OTlqampSVlaWli1bFljv0aOH1q9fr2nTpsntdqtPnz7Ky8tTUVHRxR4FAAB0URa/3++P9BCh8vl8cjgcamxs7NSHe4Y89nqnfe5w+evC7EiPAACApNB+f/O3eAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYJKVCWL1+ukSNHym63y263y+12a8OGDYH1kydPKj8/X/3791ffvn2Vk5Mjr9cb9DlqamqUnZ2t3r17KzExUbNnz1Zra+vFuTUAAKBbCClQLrvsMi1cuFAVFRXatWuX0tPTdccdd6iyslKSNGvWLL322mt68cUXtWXLFtXV1WnSpEmB67e1tSk7O1vNzc3atm2bXnjhBa1atUrz5s27uLcKAAB0aRa/3++/kE+QkJCgf//3f9edd96pAQMGqKSkRHfeeack6cCBAxo2bJjKy8s1ZswYbdiwQbfddpvq6urkdDolSStWrNCcOXN0+PBhxcbGntPX9Pl8cjgcamxslN1uv5Dxv9SQx17vtM8dLn9dmB3pEQAAkBTa7+/zfg5KW1ub1q5dq+PHj8vtdquiokItLS3KyMgInDN06FANGjRI5eXlkqTy8nKNGDEiECeSlJWVJZ/PF7gXpiNNTU3y+XxBFwAA0H2FHCh79+5V3759ZbPZ9PDDD2vdunVKTU2Vx+NRbGys4uPjg853Op3yeDySJI/HExQnp9dPr51NcXGxHA5H4JKcnBzq2AAAoAsJOVC+8Y1vaM+ePdqxY4emTZumvLw87d+/vzNmCygsLFRjY2PgUltb26lfDwAARFZMqFeIjY3VFVdcIUkaNWqUdu7cqf/8z//Ud77zHTU3N6uhoSHoXhSv1yuXyyVJcrlceuedd4I+3+lX+Zw+pyM2m002my3UUQEAQBd1we+D0t7erqamJo0aNUo9e/ZUWVlZYK2qqko1NTVyu92SJLfbrb1796q+vj5wTmlpqex2u1JTUy90FAAA0E2EdA9KYWGhJkyYoEGDBuno0aMqKSnRW2+9pTfeeEMOh0NTp05VQUGBEhISZLfbNWPGDLndbo0ZM0aSlJmZqdTUVN17771atGiRPB6P5s6dq/z8fO4hAQAAASEFSn19ve677z4dOnRIDodDI0eO1BtvvKF/+qd/kiQtXrxYVqtVOTk5ampqUlZWlpYtWxa4fo8ePbR+/XpNmzZNbrdbffr0UV5enoqKii7urQIAAF3aBb8PSiTwPijnjvdBAQCYIizvgwIAANBZCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYJ6RAKS4u1vXXX69+/fopMTFREydOVFVVVdA5J0+eVH5+vvr376++ffsqJydHXq836JyamhplZ2erd+/eSkxM1OzZs9Xa2nrhtwYAAHQLIQXKli1blJ+fr+3bt6u0tFQtLS3KzMzU8ePHA+fMmjVLr732ml588UVt2bJFdXV1mjRpUmC9ra1N2dnZam5u1rZt2/TCCy9o1apVmjdv3sW7VQAAoEuz+P1+//le+fDhw0pMTNSWLVs0fvx4NTY2asCAASopKdGdd94pSTpw4ICGDRum8vJyjRkzRhs2bNBtt92muro6OZ1OSdKKFSs0Z84cHT58WLGxsV/5dX0+nxwOhxobG2W32893/K805LHXO+1zh8tfF2ZHegQAACSF9vv7gp6D0tjYKElKSEiQJFVUVKilpUUZGRmBc4YOHapBgwapvLxcklReXq4RI0YE4kSSsrKy5PP5VFlZeSHjAACAbiLmfK/Y3t6umTNnauzYsRo+fLgkyePxKDY2VvHx8UHnOp1OeTyewDl/Hyen10+vdaSpqUlNTU2Bj30+3/mODQAAuoDzvgclPz9f+/bt09q1ay/mPB0qLi6Ww+EIXJKTkzv9awIAgMg5r0CZPn261q9fr82bN+uyyy4LHHe5XGpublZDQ0PQ+V6vVy6XK3DOF1/Vc/rj0+d8UWFhoRobGwOX2tra8xkbAAB0ESEFit/v1/Tp07Vu3Tpt2rRJKSkpQeujRo1Sz549VVZWFjhWVVWlmpoaud1uSZLb7dbevXtVX18fOKe0tFR2u12pqakdfl2bzSa73R50AQAA3VdIz0HJz89XSUmJfv/736tfv36B54w4HA7FxcXJ4XBo6tSpKigoUEJCgux2u2bMmCG3260xY8ZIkjIzM5Wamqp7771XixYtksfj0dy5c5Wfny+bzXbxbyEAAOhyQgqU5cuXS5JuvvnmoOPPP/+87r//fknS4sWLZbValZOTo6amJmVlZWnZsmWBc3v06KH169dr2rRpcrvd6tOnj/Ly8lRUVHRhtwQAAHQbF/Q+KJHC+6CcO94HBQBgirC9DwoAAEBnIFAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcWIiPQBwLoY89nqkR7go/rowO9IjAECXwD0oAADAOCEHytatW3X77bcrKSlJFotFr7zyStC63+/XvHnzNHDgQMXFxSkjI0Pvv/9+0DlHjhxRbm6u7Ha74uPjNXXqVB07duyCbggAAOg+Qg6U48eP6+qrr9YzzzzT4fqiRYu0dOlSrVixQjt27FCfPn2UlZWlkydPBs7Jzc1VZWWlSktLtX79em3dulUPPfTQ+d8KAADQrYT8HJQJEyZowoQJHa75/X4tWbJEc+fO1R133CFJWr16tZxOp1555RVNnjxZ7733njZu3KidO3dq9OjRkqSf//znuvXWW/Uf//EfSkpKuoCbAwAAuoOL+hyU6upqeTweZWRkBI45HA6lpaWpvLxcklReXq74+PhAnEhSRkaGrFarduzY0eHnbWpqks/nC7oAAIDu66K+isfj8UiSnE5n0HGn0xlY83g8SkxMDB4iJkYJCQmBc76ouLhYCxYsuJijAjhPvKIKQDh0iZcZFxYWqqCgIPCxz+dTcnJyBCcCgMgjFtGdXdSHeFwulyTJ6/UGHfd6vYE1l8ul+vr6oPXW1lYdOXIkcM4X2Ww22e32oAsAAOi+LmqgpKSkyOVyqaysLHDM5/Npx44dcrvdkiS3262GhgZVVFQEztm0aZPa29uVlpZ2MccBAABdVMgP8Rw7dkwffPBB4OPq6mrt2bNHCQkJGjRokGbOnKkf//jHuvLKK5WSkqInnnhCSUlJmjhxoiRp2LBh+ta3vqUHH3xQK1asUEtLi6ZPn67JkyfzCh4AACDpPAJl165d+uY3vxn4+PRzQ/Ly8rRq1So9+uijOn78uB566CE1NDRo3Lhx2rhxo3r16hW4zpo1azR9+nTdcsstslqtysnJ0dKlSy/CzQEAAN1ByIFy8803y+/3n3XdYrGoqKhIRUVFZz0nISFBJSUloX5pAAAQJfhbPAAAwDhd4mXGAACYrju87Nukl3xzDwoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwTkQD5ZlnntGQIUPUq1cvpaWl6Z133onkOAAAwBARC5Tf/va3Kigo0Pz587V7925dffXVysrKUn19faRGAgAAhohYoPzsZz/Tgw8+qAceeECpqalasWKFevfureeeey5SIwEAAENEJFCam5tVUVGhjIyMzwexWpWRkaHy8vJIjAQAAAwSE4kv+sknn6itrU1OpzPouNPp1IEDB844v6mpSU1NTYGPGxsbJUk+n69T52xv+qxTP384dPa/o3DpDnshdY/9YC/MwV6YpTvsR2fvxenP7/f7v/LciARKqIqLi7VgwYIzjicnJ0dgmq7FsSTSE+DvsR/mYC/MwV6YI1x7cfToUTkcji89JyKBcskll6hHjx7yer1Bx71er1wu1xnnFxYWqqCgIPBxe3u7jhw5ov79+8tisXT6vJ3F5/MpOTlZtbW1stvtkR4nqrEX5mAvzMFemKO77IXf79fRo0eVlJT0ledGJFBiY2M1atQolZWVaeLEiZJORUdZWZmmT59+xvk2m002my3oWHx8fBgmDQ+73d6l/4PrTtgLc7AX5mAvzNEd9uKr7jk5LWIP8RQUFCgvL0+jR4/WDTfcoCVLluj48eN64IEHIjUSAAAwRMQC5Tvf+Y4OHz6sefPmyePx6JprrtHGjRvPeOIsAACIPhF9kuz06dM7fEgnWthsNs2fP/+Mh68QfuyFOdgLc7AX5ojGvbD4z+W1PgAAAGHEHwsEAADGIVAAAIBxCBQAAGAcAgUAABinS7zVfVeWkJAQ0vkWi0W7d+/W4MGDO2mi6DVp0qSQr7NixQolJiZ2wjTRjb0wx9+/S/e5mjt3bsg/23Bu+N74HK/i6WRWq1VLliw5p3fO8/v9+v73v699+/bp8ssvD8N00cVqteruu+9WXFzcOZ1fUlKi9957j73oBOyFOaxWq9xut2JjY8/p/D/96U+qqqpiLzoJ3xufI1A6mdVqlcfjOee67devn/785z93y//YIo29MAd7YQ72wizsx+d4iKeTtbe3h3T+0aNHO2kSbN68OaS7pTds2KBLL720EyeKXuyFOZ5//vlz/tsokvTss8/yjt+diO+Nz3EPCgAAMA6v4jHAp59+qtWrV0d6DMAora2tqqmpifQYACKEQDFATU0Nf8U5TFpaWvToo4/qiiuu0A033KDnnnsuaN3r9apHjx4Rmg5/r7KyUikpKZEeI2osW7ZMGRkZuvvuu1VWVha09sknn3TL5ziYip9TpxAoYeDz+b70wvNOwucnP/mJVq9erYcffliZmZkqKCjQ9773vaBzeNQT0Wbp0qWaPXu2hg4dKpvNpltvvVXFxcWB9ba2Nn300UcRnDC68HPqFJ6DEgZWq1UWi+Ws636/XxaLRW1tbWGcKjpdeeWVWrx4sW677TZJ0gcffKAJEyZo3Lhxeu6551RfX6+kpCT2Igyuu+66L10/ceKEDh48yF6EwVVXXaUf/vCHuueeeyRJ27Zt08SJE/Xwww+rqKhIXq+X74sw4ufUKQRKGDgcDv3whz9UWlpah+vvv/++vve973X7/9hM0Lt3b+3fv19DhgwJHPv444+Vnp6u66+/XosWLVJycjJ7EQa9evXS5MmTz/owzqFDh7Ry5Ur2Igw6+r7Yt2+fMjIy9MADD2jmzJlR8QvRFPycOoWXGYfB6f9TvOmmmzpcj4+Pj4q760zgcrn04YcfBn3jX3rppdq8ebO++c1v6v7774/YbNFm+PDhSktL07Rp0zpc37Nnj1auXBnmqaLTJZdcotra2qDvi+HDh2vTpk1KT09XXV1d5IaLQvycOoXnoITBPffco169ep113eVyaf78+WGcKHqlp6erpKTkjONJSUnatGmTqqurIzBVdBo7dqyqqqrOut6vXz+NHz8+jBNFr3Hjxunll18+43hqaqrKysq0YcOGCEwVvfg5dQoP8SCqfPTRRzpw4ICysrI6XK+rq1Npaany8vLCPBkQOe+++64qKirO+mrCffv26aWXXuJ/pMKEn1OnECgAAMA4PMQDAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoBjCarUqPT1dFRUVkR4l6qWkpGjq1Km8OZUB0tPT9eSTT+qzzz6L9ChRj70wSzT8nCJQDPHcc89p/Pjxys/Pj/QoUS8vL09tbW0aO3ZspEeJeoMGDVJZWZmGDh0a6VGiHnthlmj4OcX7oAAwns/nk91uj/QYEHuB8CFQAACAcXiIxwAffvih0tPTIz1G1Dh06JB+85vf6A9/+IOam5uD1o4fP66ioqIITRZ9Tpw4oT/96U/av3//GWsnT57U6tWrIzBVdGIvuo7a2lp997vfjfQYnY57UAzw5z//Wdddd123/9PZJti5c6cyMzPV3t6ulpYWXXrppXrllVd01VVXSZK8Xi9/Vj5MDh48qMzMTNXU1MhisWjcuHFau3atBg4cKIm9CCf2omuJlt8ZMZEeIBosXbr0S9c//vjjME2Cxx9/XN/+9rf1q1/9SsePH9ecOXN00003qbS0VNdee22kx4sqc+bM0fDhw7Vr1y41NDRo5syZGjt2rN566y0NGjQo0uNFFfbCLK+++uqXrv/lL38J0ySRxT0oYWC1WjVw4EDFxsZ2uN7c3CyPx9Pta9gECQkJ2r59u77+9a8Hji1cuFCLFi3SG2+8oUGDBvF/imHidDr15ptvasSIEZIkv9+v73//+/rDH/6gzZs3q0+fPuxFmLAXZrFarbJYLPqyX88Wi6Xb7wfPQQmDwYMHa/Hixaquru7w8vrrr0d6xKhy8uTJoI8fe+wxPf7448rMzNS2bdsiNFX0OXHihGJiPr8T12KxaPny5br99tt100036eDBgxGcLrqwF2YZOHCgXn75ZbW3t3d42b17d6RHDAsCJQxGjRr1pW/A9lWljItn+PDhHUbII488osLCQk2ZMiUCU0WnoUOHateuXWcc/8UvfqE77rhD//zP/xyBqaITe2EWfmecQqCEQVFRke66666zrqempqq6ujqME0Wv++67T2+//XaHa48++qgWLFjAY+5h8u1vf1v/9V//1eHaL37xC02ZMiUqfgibgL0wy+zZs3XjjTeedf2KK67Q5s2bwzhRZPAcFAAAYBzuQQEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQDLF69Wp9+OGHkR4DYi9MsnXrVjU2NkZ6DIi9ME007AeBYoj7779fqampmjFjRqRHiXrshTluvvlmXX755Xr66acjPUrUYy/MEg37QaAYor29XQcOHNCwYcMiPUrUYy/MUV1drd/97nfyer2RHiXqsRdmiYb94I3aAACAcbgHxQCtra2qqamJ9BgQewEApiBQDFBZWamUlJRIjwGxF+G2bNkyZWRk6O6771ZZWVnQ2ieffKLLL788QpNFH/bCLOwHgQIgQpYuXarZs2dr6NChstlsuvXWW1VcXBxYb2tr00cffRTBCaMHe2EW9uOUmEgPEA2uu+66L10/ceJEmCYBe2GOZ599VitXrtQ999wjSZo2bZomTpyoEydOqKioKMLTRRf2wizsxykEShjs379fkydPPutDB4cOHdLBgwfDPFV0Yi/MUV1dHfQn5W+88UZt2rRJGRkZamlp0cyZMyM3XJRhL8zCfpxCoITB8OHDlZaWpmnTpnW4vmfPHq1cuTLMU0Un9sIcl1xyiWprazVkyJDAseHDh2vTpk1KT09XXV1d5IaLMuyFWdiPU3gOShiMHTtWVVVVZ13v16+fxo8fH8aJohd7YY5x48bp5ZdfPuN4amqqysrKtGHDhghMFZ3YC7OwH6fwPigAIuLdd99VRUWFHnjggQ7X9+3bp5deeknz588P82TRh70wC/txCoECAACMw0M8nezdd99Ve3v7OZ9fWVmp1tbWTpwoerEX5mAvzMFemIX9+Bz3oHSyHj16yOPxaMCAAed0vt1u1549e6LiTXjCjb0wB3thDvbCLOzH53gVTyfz+/164okn1Lt373M6v7m5uZMnil7shTnYC3OwF2ZhPz5HoHSy8ePHf+mrRr7I7XYrLi6uEyeKXuyFOdgLc7AXZmE/PsdDPAAAwDg8SRYAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAF1SW1tbSH+zBEDXQqAACKvf/e53GjFihOLi4tS/f39lZGTo97//vXr16qWGhoagc3/wgx8oPT1dkrRq1SrFx8fr1VdfVWpqqmw2m2pqaiJwCwCEA4ECIGwOHTqkKVOm6Lvf/a7ee+89vfXWW5o0aZJuvvlmxcfH66WXXgqc29bWpt/+9rfKzc0NHPvss8/005/+VL/61a9UWVmpxMTESNwMAGHA3+IBEDaHDh1Sa2urJk2apMGDB0uSRowYIUmaPHmySkpKNHXqVElSWVmZGhoalJOTE7h+S0uLli1bpquvvjr8wwMIK+5BARA2V199tW655RaNGDFCd911l1auXKlPP/1UkpSbm6u33npLdXV1kqQ1a9YoOztb8fHxgevHxsZq5MiRkRgdQJgRKADCpkePHiotLdWGDRuUmpqqn//85/rGN76h6upqXX/99fqHf/gHrV27VidOnNC6deuCHt6RpLi4OFkslghNDyCcCBQAYWWxWDR27FgtWLBA//u//6vY2FitW7dO0ql7UdasWaPXXntNVqtV2dnZEZ4WQKQQKADCZseOHXrqqae0a9cu1dTU6OWXX9bhw4c1bNgwSacCZffu3frJT36iO++8UzabLcITA4gUniQLIGzsdru2bt2qJUuWyOfzafDgwXr66ac1YcIESdIVV1yhG264Qe+8846WLFkS2WEBRJTF7/f7Iz0EAADA3+MhHgAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHH+H2bIYAMNT2tMAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# gs = [ql.HGate, ql.SGate, ql.TGate, ql.CXGate] # approx universal set\n", - "# gs = [ql.HGate, ql.CCXGate] # approx universal set\n", - "gs = [ql.HGate, ql.CXGate]\n", - "\n", - "plot_svr_stat(num_of_qubits=3, min_gates=6, max_gates=8, gs=gs, samples=int(1e3), rng=np.random.default_rng())" - ] - }, - { - "cell_type": "markdown", - "id": "f8f449e7-9d6a-405e-81db-b5635702bed6", - "metadata": {}, - "source": [ - "## SVR Test cases" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "62d87f70-2e3c-4190-9a56-76a7985dd24e", - "metadata": {}, - "outputs": [], - "source": [ - "def test_srv(system_dims, init, target):\n", - " vec = qi.Statevector(init, dims=system_dims)\n", - " vec *= 1/np.sqrt(vec.trace())\n", - " srv = schmidt_rank_vector(qi.DensityMatrix(vec)) \n", - " assert srv == target, f\"srv: {srv}\"\n", - " print(f\"passed test, svr: {srv}\")\n", - " display(vec.draw('latex', prefix='|\\\\psi\\\\rangle = '))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2a277e60-537e-4c5d-b0ed-51f446852990", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "passed test, svr: [1, 1]\n" - ] - }, - { - "data": { - "text/latex": [ - "$$|\\psi\\rangle = \\frac{\\sqrt{2}}{2} |00\\rangle+\\frac{\\sqrt{2}}{2} |01\\rangle$$" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "passed test, svr: [2, 2]\n" - ] - }, - { - "data": { - "text/latex": [ - "$$|\\psi\\rangle = \\frac{\\sqrt{2}}{2} |00\\rangle+\\frac{\\sqrt{2}}{2} |11\\rangle$$" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "passed test, svr: [2, 2, 2]\n" - ] - }, - { - "data": { - "text/latex": [ - "$$|\\psi\\rangle = \\frac{\\sqrt{2}}{2} |000\\rangle+\\frac{\\sqrt{2}}{2} |111\\rangle$$" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "passed test, svr: [3, 3, 3]\n" - ] - }, - { - "data": { - "text/latex": [ - "$$\\begin{align}\n", - "\n", - "|\\psi\\rangle = \n", - "\\begin{bmatrix}\n", - "\\frac{\\sqrt{3}}{3} & 0 & 0 & 0 & \\cdots & 0 & 0 & \\frac{\\sqrt{3}}{3} \\\\\n", - " \\end{bmatrix}\n", - "\\\\\n", - "\\text{dims=(3, 3, 3)}\n", - "\\end{align}$$" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "passed test, svr: [2, 2, 4]\n" - ] - }, - { - "data": { - "text/latex": [ - "$$\\begin{align}\n", - "\n", - "|\\psi\\rangle = \n", - "\\begin{bmatrix}\n", - "\\frac{1}{2} & 0 & 0 & 0 & \\cdots & 0 & 0 & 0 \\\\\n", - " \\end{bmatrix}\n", - "\\\\\n", - "\\text{dims=(4, 4, 4)}\n", - "\\end{align}$$" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "#---------------- |0+> = |00>+|01>\n", - "system_dims = (2,2)\n", - "init = np.zeros(np.prod(system_dims), dtype=complex)\n", - "init[0] = 1\n", - "init[1] = 1\n", - "test_srv(system_dims, init, [1, 1])\n", - "\n", - "#----------------Bell, |00>+|11>\n", - "system_dims = (2,2)\n", - "init = np.zeros(np.prod(system_dims), dtype=complex)\n", - "init[0] = 1\n", - "init[3] = 1\n", - "test_srv(system_dims, init, [2, 2])\n", - " \n", - "#----------------GHZ, |000>+|111>\n", - "system_dims = (2,2,2)\n", - "init = np.zeros(np.prod(system_dims), dtype=complex)\n", - "init[0] = 1\n", - "init[7] = 1\n", - "test_srv(system_dims, init, [2,2,2])\n", - " \n", - "#----------------Sym, |000>+|111>+|222>\n", - "system_dims = (3,3,3)\n", - "init = np.zeros(np.prod(system_dims), dtype=complex)\n", - "init[0] = 1\n", - "init[13] = 1\n", - "init[26] = 1\n", - "test_srv(system_dims, init, [3,3,3])\n", - " \n", - "#----------------Wikipedia example, |000>+|101>+|210>+|311>\n", - "system_dims = (4,4,4)\n", - "init = np.zeros(np.prod(system_dims), dtype=complex)\n", - "init[0] = 1\n", - "init[17] = 1\n", - "init[36] = 1\n", - "init[53] = 1\n", - "test_srv(system_dims, init, [2, 2, 4])" - ] - }, - { - "cell_type": "markdown", - "id": "92a90911-c1ac-4212-b101-7f208855abab", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c96927df-676c-4584-ae67-64fa7707eb65", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/platform/tokenizer/base_tokenizer.ipynb b/src/platform/tokenizer/base_tokenizer.ipynb new file mode 100644 index 0000000..ee386a6 --- /dev/null +++ b/src/platform/tokenizer/base_tokenizer.ipynb @@ -0,0 +1,113 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Base tokenizer\n", + "\n", + "> Base class of corresponding tokenizers." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.tokenizer.base_tokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "49c180c0-dd8c-4a69-b2f5-77944f6d3bcf", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "Vocabulary = dict[str, int] | dict[Any, int]\n", + "VocabularyInverse = dict[int, str] | dict[int, Any]\n", + "\n", + "def invert_vocabulary(vocabulary: Vocabulary) -> VocabularyInverse:\n", + " vocabulary_inverse = {token:gate for gate, token in vocabulary.items()}\n", + " return vocabulary_inverse " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36032308-bd0e-4409-9db0-9d89fc258e5a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class BaseTokenizer(abc.ABC):\n", + "\n", + " def __init__(self, vocabulary: Vocabulary) -> None:\n", + " self.vocabulary = vocabulary\n", + " self.vocabulary_inverse = invert_vocabulary(vocabulary)\n", + "\n", + " @abc.abstractmethod\n", + " def tokenize(self, *args, **kwargs):\n", + " raise NotImplementedError()\n", + "\n", + " @abc.abstractmethod\n", + " def encode(self, *args, **kwargs):\n", + " raise NotImplementedError()\n", + "\n", + " @abc.abstractmethod\n", + " def decode(self, *args, **kwargs):\n", + " raise NotImplementedError()" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/tokenizer/circuits_tokenizer.ipynb b/src/platform/tokenizer/circuits_tokenizer.ipynb new file mode 100644 index 0000000..d4ba3ec --- /dev/null +++ b/src/platform/tokenizer/circuits_tokenizer.ipynb @@ -0,0 +1,344 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Circuits tokenizer\n", + "\n", + "> Class to tokenize quantum circuits. Encode and decode quantum circuits into and from tensor representations." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.tokenizer.circuits_tokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.platform.tokenizer.base_tokenizer import BaseTokenizer, Vocabulary\n", + "from genQC.platform.circuits_instructions import CircuitInstructions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36032308-bd0e-4409-9db0-9d89fc258e5a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class CircuitTokenizer(BaseTokenizer):\n", + "\n", + " def __init__(self, vocabulary: Vocabulary, sign_labels: Optional[dict[str, int]] = None) -> None: \n", + " if 0 in vocabulary.values(): \n", + " print(f\"[WARNING]: The value 0 is reserved for background tokens, i.e. qubit time position which are not effected by gates.\")\n", + " print(f\"[WARNING]: Automatically incrementing all vocabulary values by one ...\")\n", + " vocabulary = {k:v+1 for k,v in vocabulary.items()}\n", + " assert 0 not in vocabulary.values()\n", + " \n", + " super().__init__(vocabulary)\n", + " self.sign_labels = default(sign_labels, {\"control_nodes\": -1, \"target_nodes\": +1})\n", + " \n", + " def tokenize(self, instructions: CircuitInstructions) -> torch.Tensor | Tuple[torch.Tensor, torch.Tensor]:\n", + " \"\"\"Convert given instructions to a tensor. Identical to `CircuitTokenizer.encode`.\"\"\"\n", + " return self.encode(instructions=instructions)\n", + " \n", + " def encode(self, \n", + " instructions: CircuitInstructions, \n", + " max_gates: Optional[int] = None, \n", + " return_params_tensor: bool = True, \n", + " params_4pi_normalization: bool = True, \n", + " randomize_params: bool = False) -> torch.Tensor | Tuple[torch.Tensor, torch.Tensor]:\n", + " \"\"\"Convert given `CircuitInstructions` to a `torch.Tensor`.\"\"\"\n", + "\n", + " assert len(instructions.tensor_shape) == 2\n", + " num_of_qubits, time = instructions.tensor_shape\n", + " max_gates = default(max_gates, time)\n", + " \n", + " tensor = torch.zeros((num_of_qubits, max_gates), dtype=torch.int32) \n", + " params = []\n", + "\n", + " for t, instruction in zip(range(max_gates), instructions.data): # this way we limit the number of gates even if there are more instructions\n", + " \n", + " if instruction.name not in self.vocabulary: raise Warning(f\"`{instruction.name}` not in vocabulary.\")\n", + " \n", + " params.append(instruction.params)\n", + " \n", + " gate_id = self.vocabulary[instruction.name] \n", + " \n", + " control_qubits, target_qubits = instruction.control_nodes, instruction.target_nodes\n", + " \n", + " for bit in control_qubits:\n", + " tensor[bit, t] = gate_id * self.sign_labels[\"control_nodes\"]\n", + " \n", + " for bit in target_qubits:\n", + " tensor[bit, t] = gate_id * self.sign_labels[\"target_nodes\"]\n", + "\n", + " if return_params_tensor: \n", + " num_of_max_params = max([0] + [len(para) for para in params])\n", + " params_tensor = torch.zeros((num_of_max_params, max_gates), dtype=torch.float32)\n", + " \n", + " for t, para in enumerate(params):\n", + " para = torch.tensor(para)\n", + "\n", + " if randomize_params:\n", + " para = 2.0*torch.rand_like(para) - 1.0 # rnd [-1, 1]\n", + " \n", + " elif params_4pi_normalization: \n", + " para = para % (4.0*np.pi) # limit to [0, 4pi]\n", + " para = (para-2.0*np.pi) / (2.0*np.pi) # [0, 4pi] to [-1, +1] \n", + "\n", + " params_tensor[:len(para), t] = para\n", + " \n", + " return tensor, params_tensor \n", + " return tensor\n", + " \n", + " def decode(self, \n", + " tensor: torch.Tensor, \n", + " params_tensor: Optional[torch.Tensor] = None, \n", + " params_4pi_normalization: bool = True,\n", + " ignore_errors: bool = False,\n", + " place_error_placeholders: bool = False) -> CircuitInstructions:\n", + " \"\"\"Convert a given `torch.Tensor` to `CircuitInstructions`.\"\"\"\n", + " \n", + " assert tensor.dim() == 2, f\"{tensor.shape=}\"\n", + " num_of_qubits, time = tensor.shape\n", + " \n", + " instructions = CircuitInstructions(tensor_shape=tensor.shape)\n", + " \n", + " for t in range(time): \n", + " enc_time_slice = tensor[:, t] # contains all bits at time t \n", + "\n", + " _gate_placed = False\n", + " \n", + " for gate_index, gate in self.vocabulary_inverse.items(): \n", + " \n", + " target_nodes = (enc_time_slice == (self.sign_labels[\"target_nodes\"] * gate_index)).nonzero(as_tuple=True)[0]\n", + " control_nodes = (enc_time_slice == (self.sign_labels[\"control_nodes\"] * gate_index)).nonzero(as_tuple=True)[0]\n", + "\n", + " _gate_placed = False\n", + " \n", + " if target_nodes.nelement() > 0: \n", + " params = []\n", + " if exists(params_tensor):\n", + " params = params_tensor[:, t]\n", + " if params_4pi_normalization:\n", + " params = (params+1.0) * 2.0*np.pi # [-1, 1] to [0, 4pi]\n", + " params = params.tolist()\n", + "\n", + " instructions.add_instruction(gate, control_nodes.tolist(), target_nodes.tolist(), params)\n", + " _gate_placed = True\n", + " \n", + " break #break on first hit, per def only one gate allowed per t\n", + " \n", + " elif control_nodes.nelement() > 0: # no target but control means error\n", + " if not ignore_errors: \n", + " raise RuntimeError(\"target_nodes.nelement() <= 0 but control_nodes.nelement() > 0\")\n", + "\n", + " if not _gate_placed and place_error_placeholders:\n", + " # note we place a h gate with no qubits, so this is always an error\n", + " instructions.add_instruction(\"h\", [], [], [])\n", + " \n", + " #else # we are fine with tensors that have time steps with no action!\n", + " \n", + " return instructions\n", + "\n", + " @staticmethod\n", + " def get_parametrized_tokens(vocabulary: Vocabulary) -> List[int]:\n", + " parametrized_names = \"rx ry rz phase cp crx cry crz u u2 u3\".split()\n", + " non_parametrized_names = \"x y z h cx cy cz ch ccx swap s sdg t tdg\".split()\n", + " \n", + " parametrized_tokens = []\n", + " for name, token in vocabulary.items():\n", + "\n", + " if name in parametrized_names:\n", + " parametrized_tokens.append(token)\n", + " elif name not in non_parametrized_names:\n", + " raise NotImplementedError(f\"Unknown gate {name}! Please add it to the known list.\")\n", + "\n", + " return parametrized_tokens" + ] + }, + { + "cell_type": "markdown", + "id": "07373331-a7f5-4bf9-8751-008dc22f7732", + "metadata": {}, + "source": [ + "## Test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2500b26c-f562-45ce-9c54-96c4c2acc829", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CircuitInstruction(name='u2', control_nodes=[], target_nodes=[0], params=[0.628318727016449, 6.91150426864624])\n", + "CircuitInstruction(name='u2', control_nodes=[], target_nodes=[1], params=[11.9380521774292, 1.8849557638168335])\n", + "CircuitInstruction(name='ccx', control_nodes=[0, 2], target_nodes=[1], params=[6.2831854820251465, 6.2831854820251465])\n", + "{'u2', 'ccx'}\n" + ] + } + ], + "source": [ + "tensor = torch.tensor([\n", + " [1, 0,-2],\n", + " [0, 1, 2],\n", + " [0, 0,-2],\n", + " ], dtype=torch.int32)\n", + "\n", + "params_tensor = torch.tensor([ # ... [max_params, time]\n", + " [-0.9, 0.9, 0],\n", + " [ 0.1, -0.7, 0]\n", + " ])\n", + "\n", + "tokenizer = CircuitTokenizer({\"u2\":1, \"ccx\":2})\n", + "instructions = tokenizer.decode(tensor, params_tensor)\n", + "\n", + "instructions.print()\n", + "print(instructions.instruction_names_set)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0e1d1dd2-8071-464b-b5d5-484406eb7c67", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(tensor([[ 1, 0, -2],\n", + " [ 0, 1, 2],\n", + " [ 0, 0, -2]], dtype=torch.int32),\n", + " tensor([[-0.9000, 0.9000, 0.0000],\n", + " [ 0.1000, -0.7000, 0.0000]]))" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "enc_tensor, enc_params_tensor = tokenizer.encode(instructions)\n", + "enc_tensor, enc_params_tensor" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b99b8f57-d808-4974-ae18-23fa9920a61f", + "metadata": {}, + "outputs": [], + "source": [ + "assert torch.allclose(tensor, enc_tensor)\n", + "assert torch.allclose(params_tensor, enc_params_tensor)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46ad7bae-53a9-4d36-a3ec-30f2ea92f7a4", + "metadata": {}, + "outputs": [], + "source": [ + "tokenizer = CircuitTokenizer({\"u2\":1, \"ccx\":2})\n", + "assert tokenizer.vocabulary == {'u2': 1, 'ccx': 2}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "61579dcd-8c9e-4959-b945-6dfe21145fe0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[WARNING]: The value 0 is reserved for background tokens, i.e. qubit time position which are not effected by gates.\n", + "[WARNING]: Automatically incrementing all vocabulary values by one ...\n" + ] + } + ], + "source": [ + "# test background token checking\n", + "tokenizer = CircuitTokenizer({\"u2\":0, \"ccx\":1, \"h\":2, \"ry\":3})\n", + "assert tokenizer.vocabulary == {\"u2\":1, \"ccx\":2, \"h\":3, \"ry\":4}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0b915904-7c7b-44b5-ae83-69fcee861fa4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[1, 4]\n" + ] + } + ], + "source": [ + "print(CircuitTokenizer.get_parametrized_tokens(tokenizer.vocabulary))\n", + "assert CircuitTokenizer.get_parametrized_tokens(tokenizer.vocabulary) == [1, 4]" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/platform/tokenizer/tensor_tokenizer.ipynb b/src/platform/tokenizer/tensor_tokenizer.ipynb new file mode 100644 index 0000000..db986f5 --- /dev/null +++ b/src/platform/tokenizer/tensor_tokenizer.ipynb @@ -0,0 +1,463 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Tensor tokenizer\n", + "\n", + "> Class to further tokenize tensor representations. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp platform.tokenizer.tensor_tokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.platform.tokenizer.base_tokenizer import BaseTokenizer, Vocabulary" + ] + }, + { + "cell_type": "markdown", + "id": "6727ed63-a766-4893-8642-0d0e141c6cff", + "metadata": {}, + "source": [ + "## GatePairTokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a4b051b-eba3-4b3a-9bec-c5c44655bf26", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class GatePairTokenizer(BaseTokenizer):\n", + "\n", + " def __init__(self, unique_class_values, zero_token, padding_token, device):\n", + " super().__init__({})\n", + " \n", + " self.padding_token = padding_token\n", + " self.not_gates_tokens = torch.tensor([zero_token, padding_token]).to(device)\n", + "\n", + " self.current_tokens = torch.tensor(unique_class_values, device=device)\n", + " self._current_depth = 0\n", + " \n", + " self.token_lookup = {} #reduced forms, used for gadget extraction\n", + " self.token_lookup_raw = {} #the raw form, used for encoding\n", + " \n", + " self.token_depth = {tok:0 for tok in self.current_tokens.cpu().tolist()}\n", + " self.token_cnts = {}\n", + " \n", + " def learn(self, tensors, max_depth, max_iters):\n", + " # loop over get bets and then replace\n", + " \n", + " current_tensor = tensors\n", + " self._current_depth = 0\n", + " \n", + " for i in tqdm(range(max_iters), total=max_iters):\n", + " \n", + " overlap_pairs = self.extract_new_gate_overlap_pairs(current_tensor)\n", + " overlap_pairs_std_form = self.standardize_overlap_pairs(overlap_pairs)\n", + "\n", + " top_pairs, topv = self.get_topk_pairs(overlap_pairs_std_form, k=1)\n", + " top_pair = top_pairs[0]\n", + " \n", + " if top_pair.abs().sum() < 1:\n", + " print(\"break: top_pair.abs().sum() < 1\")\n", + " break\n", + "\n", + " if topv < 2:\n", + " print(\"break: no more pair with cnt > 1\")\n", + " break\n", + " \n", + " current_tensor = self.learn_step(current_tensor, top_pair, topv=topv)\n", + "\n", + " current_max_depth = max(self.token_depth.values())\n", + " if current_max_depth > max_depth:\n", + " print(f\"break: max_depth {max_depth} reached\")\n", + " break\n", + " \n", + " print(\"break: max_iters reached\")\n", + " return current_tensor\n", + "\n", + " def to(self, device):\n", + " self.not_gates_tokens = self.not_gates_tokens.to(device) \n", + " self.current_tokens = self.current_tokens.to(device) \n", + " \n", + " for k, v in self.token_lookup.items():\n", + " self.token_lookup[k] = self.token_lookup[k].to(device) \n", + " \n", + " for k, v in self.token_lookup_raw.items():\n", + " self.token_lookup_raw[k] = self.token_lookup_raw[k].to(device) \n", + " \n", + " return self\n", + "\n", + " def tokenize(self, tensors):\n", + " \"\"\"Identical to `GatePairTokenizer.encode`.\"\"\"\n", + " return self.encode(tensors=tensors)\n", + " \n", + " #---------------------------------------\n", + " # Replace pairs with new tokens\n", + "\n", + " def learn_step(self, current_tensor, top_pair, new_tokens: Optional[torch.Tensor] = None, topv: Optional[torch.Tensor] = None):\n", + "\n", + " top_pair_reduced = top_pair[top_pair.abs().sum(-1)>0].unique_consecutive(dim=0)\n", + "\n", + " if not_exists(new_tokens):\n", + " new_tokens = self.current_tokens.max() + 1 + torch.arange(top_pair_reduced.shape[0], device=current_tensor.device)\n", + " self.current_tokens = torch.cat([self.current_tokens, new_tokens])\n", + "\n", + " key = tuple(new_tokens.cpu().tolist())\n", + " self.token_lookup[key] = top_pair_reduced #top_pair[top_pair.sum(-1)>0]\n", + " self.token_lookup_raw[key] = top_pair\n", + " self.token_cnts[key] = topv\n", + " \n", + " _current_depth = max(self.token_depth[k] for k in top_pair_reduced.flatten().cpu().tolist()) + 1\n", + " \n", + " for tok in new_tokens.cpu().tolist():\n", + " self.token_depth[tok] = _current_depth\n", + "\n", + " if _current_depth > self._current_depth:\n", + " self._current_depth = _current_depth\n", + " print(f\"New depth reached {self._current_depth}\")\n", + " else:\n", + " assert top_pair_reduced.shape[0] == new_tokens.shape[0]\n", + " \n", + " # 1) Replace one all even pairs\n", + " current_overlap_pairs = self.extract_current_gate_overlap_pairs(current_tensor, odd_pairs=False)\n", + " current_tensor = self.replace_current_overlap_pairs(current_tensor, current_overlap_pairs, top_pair, top_pair_reduced, new_tokens, odd_pairs=False)\n", + " \n", + " # 2) Then Replace one all odd pairs\n", + " current_overlap_pairs = self.extract_current_gate_overlap_pairs(current_tensor, odd_pairs=True)\n", + " current_tensor = self.replace_current_overlap_pairs(current_tensor, current_overlap_pairs, top_pair, top_pair_reduced, new_tokens, odd_pairs=True)\n", + " \n", + " return current_tensor\n", + " \n", + " def extract_current_gate_overlap_pairs(self, current_tensor, odd_pairs: bool = True):\n", + " # Extract overlap_pairs\n", + " # ToDo optimize loops\n", + "\n", + " seq = current_tensor.shape[-1]\n", + " seq_half = seq // 2 \n", + " assert seq % 2 == 0\n", + "\n", + " overlap_pairs = []\n", + " for current_tensor_i in current_tensor:\n", + " _overlap_pairs = []\n", + "\n", + " if odd_pairs:\n", + " for t in range(seq_half-1):\n", + " _overlap_pairs.append(current_tensor_i[:, 1+2*t:1+2*(t+1)])\n", + " else:\n", + " for t in range(seq_half):\n", + " _overlap_pairs.append(current_tensor_i[:, 2*t:2*(t+1)])\n", + " \n", + " overlap_pairs.append(torch.stack(_overlap_pairs))\n", + " \n", + " overlap_pairs = torch.stack(overlap_pairs)\n", + " return overlap_pairs\n", + " \n", + " def replace_current_overlap_pairs(self, current_tensor, overlap_pairs, top_pair, top_pair_reduced, new_tokens, odd_pairs):\n", + "\n", + " overlap_pairs_std = self.standardize_overlap_pairs(overlap_pairs)\n", + " is_top_overlap_pair = (overlap_pairs_std==top_pair).all(dim=(-1,-2), keepdim=False)\n", + "\n", + " new_tensor = torch.full_like(current_tensor, self.padding_token)\n", + " \n", + " for i in range(is_top_overlap_pair.shape[0]): #ToDo: this loop can be put in parallel! is batch dim\n", + " t = 1 if odd_pairs else 0\n", + " \n", + " for j in range(is_top_overlap_pair.shape[1]):\n", + " \n", + " if is_top_overlap_pair[i, j]: #replace\n", + " \n", + " new_col = torch.zeros((current_tensor.shape[1]), dtype=overlap_pairs.dtype, device=overlap_pairs.device)\n", + " \n", + " for new_token, top_pair_reduced_i in zip(new_tokens, top_pair_reduced):\n", + " ind = (overlap_pairs[i, j]==top_pair_reduced_i).all(-1)\n", + " new_col = torch.where(ind, new_token, new_col)\n", + " \n", + " new_col = new_col.unsqueeze(-1)\n", + " \n", + " tp1 = t + 1\n", + " \n", + " else: #just copy old\n", + " new_col = overlap_pairs[i, j]\n", + " tp1 = t + 2\n", + " \n", + " new_tensor[i, :, t:tp1] = new_col\n", + " t = tp1\n", + "\n", + " if odd_pairs:\n", + " # copy first and last col\n", + " new_tensor[..., 0] = current_tensor[..., 0]\n", + " new_tensor[..., -1] = current_tensor[..., -1]\n", + " \n", + " return new_tensor \n", + " \n", + " #---------------------------------------\n", + " # Find new pairs\n", + " \n", + " def extract_new_gate_overlap_pairs(self, current_tensor):\n", + " #current_tensor = current_tensor.abs()\n", + "\n", + " isgate_token = 1 - torch.isin(current_tensor.abs(), self.not_gates_tokens.to(current_tensor.device)).int()\n", + "\n", + " # These are postions of the pairs (therefore shape-1) in which we have an overlap\n", + " overlaps = isgate_token[..., :-1] + isgate_token[..., 1:]\n", + " overlaps = (overlaps>1).int()\n", + "\n", + " # Number of overlaps two gates have! we can say here only take 2 overlaps, or min 2, or min 1, eg.. -> 0 means parallel!!\n", + " overlaps_cnt = torch.count_nonzero(overlaps, dim=1) \n", + " overlaps_ind = (overlaps_cnt>0)\n", + " \n", + " # Extract overlap_pairs\n", + " # ToDo optimize loops\n", + " \n", + " overlap_pairs = []\n", + " for current_tensor_i, overlaps_ind_i in zip(current_tensor, overlaps_ind):\n", + " for t in range(current_tensor_i.shape[-1]-1):\n", + " if overlaps_ind_i[t]:\n", + " overlap_pairs.append(current_tensor_i[:, t:t+2])\n", + "\n", + " overlap_pairs = torch.stack(overlap_pairs)\n", + " return overlap_pairs\n", + "\n", + " def standardize_overlap_pairs(self, overlap_pairs):\n", + " # Now we convert to std form, where the 1st gate gives the main order and the 2nd the secondory, this should remove all(?) redundant combinations!\n", + "\n", + " # 1) sort inner SECOND gate such that gate 2 is always on top\n", + " inner_sorted_gate2, inner_sorted_gate2_indices = torch.sort(overlap_pairs[..., 1], dim=-1, descending=True, stable=False)\n", + " inner_sorted_gate1 = torch.gather(overlap_pairs[..., 0], dim=-1, index=inner_sorted_gate2_indices)\n", + " \n", + " inner_overlap_pairs = torch.stack((inner_sorted_gate1, inner_sorted_gate2), dim=-1)\n", + " \n", + " # 2) sort outer FISRT gate such that gate 1 is always on top, NOTE WE NEED STABLE SORT TO CONSERVE INNER ORDER\n", + " outer_sorted_gate1, outer_sorted_gate1_indices = torch.sort(inner_overlap_pairs[..., 0], dim=-1, descending=True, stable=True)\n", + " outer_sorted_gate2 = torch.gather(inner_overlap_pairs[..., 1], dim=-1, index=outer_sorted_gate1_indices)\n", + " \n", + " overlap_pairs_std_form = torch.stack((outer_sorted_gate1, outer_sorted_gate2), dim=-1)\n", + "\n", + " return overlap_pairs_std_form.contiguous()\n", + "\n", + " def get_topk_pairs(self, overlap_pairs, k):\n", + " # Now we can easily count the unique valid pairs! \n", + " pot_pairs, pot_pairs_cnts = overlap_pairs.unique(dim=0, return_counts=True)\n", + " \n", + " # Get topk best pairs\n", + " topv, topi = torch.topk(pot_pairs_cnts, k)\n", + " top_pairs = pot_pairs[topi]\n", + " \n", + " return top_pairs, topv\n", + "\n", + " #---------------------------------------\n", + " # Encoding\n", + "\n", + " def encode(self, tensors):\n", + " # just replay all the pair replacements from learn, i.e. the vocab\n", + "\n", + " s = tensors.shape[1]\n", + " current_tensor = tensors\n", + "\n", + " for new_tokens, top_pair in tqdm(self.token_lookup_raw.items()):\n", + " top_pair = self.standardize_vocab_pair(top_pair, s, sort=True)\n", + " new_tokens = torch.tensor(new_tokens, device=top_pair.device, dtype=top_pair.dtype)\n", + " \n", + " current_tensor = self.learn_step(current_tensor, top_pair, new_tokens=new_tokens)\n", + "\n", + " return current_tensor\n", + "\n", + " def standardize_vocab_pair(self, vocab_pair, s, sort: bool = True):\n", + " \n", + " if vocab_pair.shape[0]<2: # repeat for special gadgets which have full symetric sequential connection\n", + " vocab_pair = vocab_pair.repeat(2, 1)\n", + " \n", + " vocab_pair = F.pad(vocab_pair, [0, 0, 0, s-vocab_pair.shape[0]]) # pad to full systemsize to have nice plotting\n", + "\n", + " if sort:\n", + " vocab_pair = self.standardize_overlap_pairs(vocab_pair)\n", + " \n", + " return vocab_pair.contiguous()\n", + " \n", + " #---------------------------------------\n", + " # Decoding\n", + "\n", + " def unpack_col(self, col):\n", + " # col is [s, 1]\n", + " s, _ = col.shape\n", + " \n", + " current_tokens = col.unique()\n", + " current_tokens = current_tokens[current_tokens!=0]\n", + " k = tuple(current_tokens.tolist())\n", + " \n", + " if k in self.token_lookup:\n", + " \n", + " # Unpack one col\n", + " unpacked = torch.zeros((s, 2), dtype=col.dtype, device=col.device)\n", + " new_config = self.token_lookup[k]\n", + " \n", + " for current_token, new_config_i in zip(current_tokens, new_config):\n", + " ind = (col==current_token)\n", + " unpacked = torch.where(ind, new_config_i, unpacked)\n", + " \n", + " # Repeat unpacking for both new cols\n", + " col1, col2 = unpacked.chunk(2, dim=-1)\n", + " \n", + " unpacked1 = self.unpack_col(col1)\n", + " unpacked2 = self.unpack_col(col2)\n", + " \n", + " unpacked = torch.cat([unpacked1, unpacked2], dim=-1)\n", + " return unpacked\n", + " \n", + " return col \n", + "\n", + " def decode(self, tensor, cut_padding: bool = False):\n", + " # split into cols we unpack, then recursively\n", + " # tensor ... [s, t]\n", + " assert tensor.dim() == 2\n", + " \n", + " cols = tensor.chunk(tensor.shape[-1], dim=-1)\n", + " unpacked = torch.cat([self.unpack_col(col) for col in cols], dim=-1)\n", + "\n", + " if cut_padding:\n", + " # Cut from right as this was added padding in packing\n", + " unpacked = unpacked[..., :tensor.shape[-1]]\n", + " \n", + " return unpacked" + ] + }, + { + "cell_type": "markdown", + "id": "035f5a62-65c7-45fa-8853-1bb242594319", + "metadata": {}, + "source": [ + "### Plot learned tokens" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ce69148d-37df-47e3-9c63-5c9149d938ac", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def sort_config(vocab_config):\n", + " \"\"\"Sort a vocab_config for nicer plotting.\"\"\"\n", + " \n", + " t = vocab_config.shape[-1]\n", + " all_inds = set(range(t))\n", + "\n", + " # Sort one ind, gather the rest\n", + " for i in reversed(range(t)):\n", + " gather_inds = all_inds - {i}\n", + "\n", + " sorted_gates = [None] * t\n", + " \n", + " sorted_gate_i, sorted_gate_i_indices = torch.sort(vocab_config[..., i], dim=-1, descending=True, stable=True)\n", + " sorted_gates[i] = sorted_gate_i\n", + " \n", + " for gather_ind in gather_inds:\n", + " sorted_gates[gather_ind] = torch.gather(vocab_config[..., gather_ind], dim=-1, index=sorted_gate_i_indices)\n", + "\n", + " vocab_config = torch.stack(sorted_gates, dim=-1)\n", + " \n", + " return vocab_config" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2aca2f79-49b4-43d4-8c8e-6d5759f77db0", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_topk_depth_unpacked(gate_pair_tokenizer, s, use_raw=False, standardize=True):\n", + " \"\"\"Useful for plotting.\"\"\"\n", + " \n", + " # Sort into depths\n", + " unpacked_vocab_configs_depths = {}\n", + " unpacked_vocab_configs_cnts_depths = {}\n", + "\n", + " if use_raw:\n", + " iters = zip(gate_pair_tokenizer.token_lookup_raw.items(), gate_pair_tokenizer.token_cnts.values())\n", + " else:\n", + " iters = zip(gate_pair_tokenizer.token_lookup.items(), gate_pair_tokenizer.token_cnts.values())\n", + " \n", + " for (vocab_tokens, vocab_config), vocab_config_cnts in tqdm(iters, total=len(gate_pair_tokenizer.token_cnts)):\n", + " \n", + " tok = vocab_tokens[0]\n", + " token_depth = gate_pair_tokenizer.token_depth[tok]\n", + "\n", + " if standardize:\n", + " vocab_config = gate_pair_tokenizer.standardize_vocab_pair(vocab_config, s, sort=False)\n", + " unpacked_vocab_config = gate_pair_tokenizer.decode(vocab_config)\n", + " \n", + " #--------\n", + " unpacked_vocab_config = sort_config(unpacked_vocab_config)\n", + " \n", + " if token_depth not in unpacked_vocab_configs_depths:\n", + " unpacked_vocab_configs_depths[token_depth] = []\n", + " unpacked_vocab_configs_cnts_depths[token_depth] = []\n", + " \n", + " unpacked_vocab_configs_depths[token_depth].append(unpacked_vocab_config)\n", + " unpacked_vocab_configs_cnts_depths[token_depth].append(vocab_config_cnts)\n", + "\n", + " return unpacked_vocab_configs_depths, unpacked_vocab_configs_cnts_depths" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/printing.ipynb b/src/printing.ipynb deleted file mode 100644 index e198246..0000000 --- a/src/printing.ipynb +++ /dev/null @@ -1,361 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", - "metadata": {}, - "source": [ - "# Printing functions" - ] - }, - { - "cell_type": "markdown", - "id": "38c9eb7f-7527-4ea0-98fa-217f83cabc21", - "metadata": {}, - "source": [ - "Advanced printing functions." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp printing" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "from ipywidgets import widgets\n", - "if IN_NOTEBOOK: from IPython.display import Markdown" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ab31972e-76d3-4cb1-8696-c3af2bf7ca33", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def display_colums(display_list, num_col=3):\n", - " \n", - " outputs = [widgets.Output() for i in range(num_col)]\n", - "\n", - " for i in range(len(display_list)//num_col+1):\n", - " \n", - " ds = display_list[i*num_col:(i+1)*num_col]\n", - " \n", - " for d,output in zip(ds,outputs):\n", - " with output:\n", - " display(d)\n", - " \n", - " columns = widgets.HBox(outputs)\n", - " display(columns) " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2c3f248d-fa4f-4e28-b8e9-9d044b9156d1", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[tensor([[0.6266, 0.3672, 0.3988],\n", - " [0.6061, 0.2596, 0.4855],\n", - " [0.3177, 0.8166, 0.3100]]),\n", - " tensor([[0.6266, 0.3672, 0.3988],\n", - " [0.6061, 0.2596, 0.4855],\n", - " [0.3177, 0.8166, 0.3100]]),\n", - " tensor([[0.6266, 0.3672, 0.3988],\n", - " [0.6061, 0.2596, 0.4855],\n", - " [0.3177, 0.8166, 0.3100]])]" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "a = [torch.rand((3, 3))]*3\n", - "a" - ] - }, - { - "cell_type": "markdown", - "id": "fff65d71-865f-43a2-bf7b-c344acc2e35a", - "metadata": {}, - "source": [ - "v.s." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cdcaddd8-0cb5-4893-8b47-8542d2e00755", - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "c5c95e9d1bb9442799097d37b615cf62", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(Output(), Output(), Output()))" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "display_colums(a) # works only in notebook " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "158dd169-fbd8-472d-8af9-2c1e01790fbf", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def ndarray_to_latex(arr):\n", - " \"\"\"Returns a LaTeX `{pmatrix*}[r]` as a string\"\"\"\n", - " if len(arr.shape) > 2: raise ValueError('pmatrix can at most display two dimensions')\n", - " lines = str(arr).replace('[', '').replace(']', '').splitlines()\n", - " rv = [r'\\begin{pmatrix*}[r]']\n", - " rv += [' ' + ' & '.join(l.split()) + r'\\\\' for l in lines]\n", - " rv += [r'\\end{pmatrix*}']\n", - " return '\\n'.join(rv)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "64b05f66-0840-45a1-b18f-b9a34bf48985", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def tensor_to_latex(tensor):\n", - " \"\"\"Returns a `LaTeX {pmatrix*}[r]` as a string \"\"\"\n", - " if len(tensor.shape) > 2: raise ValueError('pmatrix can at most display two dimensions')\n", - " lines = str(tensor.numpy()).replace('[', '').replace(']', '').splitlines()\n", - " rv = [r'\\begin{pmatrix*}[r]']\n", - " rv += [' ' + ' & '.join(l.split()) + r'\\\\' for l in lines]\n", - " rv += [r'\\end{pmatrix*}']\n", - " return '\\n'.join(rv)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7c28d89d-7d89-460a-b4a2-b5671d6a7b55", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\\begin{pmatrix*}[r]\n", - " 2 & 2 & 2\\\\\n", - " 2 & 2 & 2\\\\\n", - " 2 & 2 & 2\\\\\n", - "\\end{pmatrix*}\n" - ] - } - ], - "source": [ - "tex = tensor_to_latex(torch.full((3,3), 2))\n", - "print(tex)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36032308-bd0e-4409-9db0-9d89fc258e5a", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def print_markdown(text, print_raw=False):\n", - " if IN_NOTEBOOK and not print_raw: display(Markdown(text))\n", - " else: print(text)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ca556ece-a053-401c-8b8b-470c6faf9b1d", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "$\\sqrt{2}$, *Test text*" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "print_markdown(\"$\\sqrt{2}$, *Test text*\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ddceb791-8961-4f94-8fa9-7b08d5f8b4c9", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "$\\begin{pmatrix*}[r]\n", - " 2 & 2 & 2\\\\\n", - " 2 & 2 & 2\\\\\n", - " 2 & 2 & 2\\\\\n", - "\\end{pmatrix*}$" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "print_markdown(f\"${tex}$\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "664afc55-6c90-4550-8f70-0f9f4cf3c280", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def print_table(col_headings: list, data: np.array, row_headings=None, print_raw=False): \n", - " assert len(col_headings) == data.shape[1]\n", - " if row_headings is not None: assert len(row_headings) == data.shape[0]\n", - " \n", - " #--------------------------------\n", - " head = \"\"\n", - " if row_headings is not None: head = \"| \" + head \n", - " \n", - " for col_heading in col_headings: head += f\"|{col_heading}\"\n", - " head += \"|\\n\"\n", - " \n", - " #--------------------------------\n", - " seperator = \"\"\n", - " if row_headings is not None: seperator = \"|--\"\n", - " \n", - " for col_heading in col_headings: seperator += \"|--\"\n", - " seperator += \"|\\n\"\n", - " \n", - " #-------------------------------- \n", - " body = \"\"\n", - " for i, row in enumerate(data):\n", - " body_row = \"\"\n", - " for x in row:\n", - " body_row += f\"|{x:.2f}\"\n", - " \n", - " if row_headings is not None: \n", - " body_row = f\"|{row_headings[i]}\" + body_row\n", - " \n", - " body += body_row + \"|\\n\"\n", - " \n", - " #-------------------------------- \n", - " table = head + seperator + body\n", - " \n", - " print_markdown(table, print_raw)" - ] - }, - { - "cell_type": "markdown", - "id": "6f382584-1404-49d7-b672-40474207f7b1", - "metadata": {}, - "source": [ - "Print a table:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9659bea3-8353-4568-a5f2-927daedef3fe", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "| |head1|head2|head3|\n", - "|--|--|--|--|\n", - "|sample|0.32|0.23|0.27|\n", - "|dataset|0.74|0.41|0.00|\n" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "h = [\"head1\", \"head2\", \"head3\"]\n", - "r = [\"sample\", \"dataset\"]\n", - "d = np.random.rand(2, 3)\n", - "print_table(h, d, r)" - ] - }, - { - "cell_type": "markdown", - "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/scheduler/scheduler.ipynb b/src/scheduler/scheduler.ipynb index 9f1e196..183647a 100644 --- a/src/scheduler/scheduler.ipynb +++ b/src/scheduler/scheduler.ipynb @@ -5,15 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# Scheduler" - ] - }, - { - "cell_type": "markdown", - "id": "bf6ceb41-a3d4-43b1-bae8-f255902f8a9b", - "metadata": {}, - "source": [ - "Base class for schedulers." + "# Scheduler\n", + "\n", + "> Base class for schedulers." ] }, { @@ -35,8 +29,7 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.util import virtual\n", - "from genQC.config_loader import *" + "from genQC.utils.config_loader import *" ] }, { @@ -47,22 +40,45 @@ "outputs": [], "source": [ "#| export\n", - "class Scheduler:\n", + "class Scheduler(abc.ABC):\n", " \"\"\"Base class for all diffusion schedulers\"\"\"\n", + " \n", " def __init__(self): \n", " pass\n", " \n", - " @virtual\n", + " @abc.abstractmethod\n", " def set_timesteps(self): pass\n", " \n", - " @virtual\n", + " @abc.abstractmethod\n", " def step(self): pass\n", "\n", - " @virtual\n", + " @abc.abstractmethod\n", " def add_noise(self): pass\n", "\n", + " @abc.abstractmethod\n", + " def to(self): pass\n", " #---------------------------------------\n", "\n", + " @staticmethod\n", + " def from_config(config, device: torch.device, save_path: str=None, verbose=True, silent=False): \n", + " \"\"\"Use this if we have a loaded config.\"\"\"\n", + "\n", + " _config = copy.deepcopy(config)\n", + " \n", + " if exists(device): _config[\"device\"] = device # for loading sub-models\n", + " else: device = _config.pop(\"device\", \"cpu\")\n", + "\n", + " if \"beta_schedule\" in _config[\"params\"]:\n", + " beta_schedule = _config[\"params\"][\"beta_schedule\"]\n", + " \n", + " if \"path:\" in beta_schedule:\n", + " _config[\"params\"][\"beta_schedule\"] = \"path:\" + save_path + beta_schedule[len(\"path:\"):]\n", + " \n", + " scheduler = instantiate_from_config(_config)\n", + " return scheduler\n", + "\n", + " #---------------------------------------\n", + " \n", " def get_config(self, without_metadata=False):\n", " if not without_metadata: \n", " config = {}\n", @@ -75,16 +91,23 @@ " return config\n", " \n", " @property\n", - " @virtual\n", + " @abc.abstractmethod\n", " def params_config(self): return None\n", "\n", " #---------------------------------------\n", "\n", " def unsqueeze_vector_to_shape(self, vec, shape):\n", - " vec = vec.flatten()\n", - " while len(vec.shape) < len(shape):\n", - " vec = vec.unsqueeze(-1)\n", - " return vec " + " return vec.view(*vec.shape, *([1] * (len(shape)-len(vec.shape))) )\n", + " \n", + " #---------------------------------------\n", + "\n", + " @classmethod\n", + " def from_scheduler(cls, scheduler, **kwargs): \n", + " _kwargs = scheduler.params_config \n", + " _kwargs = _kwargs | kwargs\n", + " \n", + " new_scheduler = cls(**_kwargs)\n", + " return new_scheduler" ] }, { @@ -112,6 +135,13 @@ "display_name": "python3", "language": "python", "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/src/scheduler/scheduler_ddim.ipynb b/src/scheduler/scheduler_ddim.ipynb index 4df669c..7f7e75a 100644 --- a/src/scheduler/scheduler_ddim.ipynb +++ b/src/scheduler/scheduler_ddim.ipynb @@ -5,15 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# DDIMScheduler " - ] - }, - { - "cell_type": "markdown", - "id": "45487a0f-d029-461b-853e-9b59a9bc6d3e", - "metadata": {}, - "source": [ - "Denoising diffusion implicit models [(DDIM)](https://arxiv.org/abs/2010.02502)." + "# DDIM Scheduler \n", + "\n", + "> Denoising diffusion implicit models [(DDIM)](https://arxiv.org/abs/2010.02502)." ] }, { @@ -41,7 +35,7 @@ { "cell_type": "code", "execution_count": null, - "id": "de0475b1-e0aa-42eb-9f56-12c131ef868f", + "id": "c6d3b1e2-18d3-4cab-968c-853cb3469b3a", "metadata": {}, "outputs": [], "source": [ @@ -49,10 +43,20 @@ "@dataclass\n", "class DDIMSchedulerOutput:\n", " prev_sample: torch.FloatTensor\n", - " pred_original_sample: Optional[torch.FloatTensor] = None\n", - " \n", + " pred_original_sample: Optional[torch.FloatTensor] = None" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de0475b1-e0aa-42eb-9f56-12c131ef868f", + "metadata": {}, + "outputs": [], + "source": [ + "#| export \n", "class DDIMScheduler(DDPMScheduler):\n", " \"\"\"A `Scheduler` implementing [(DDIM)](https://arxiv.org/abs/2010.02502).\"\"\"\n", + " \n", " def __init__(self, \n", " device: Union[str, torch.device], \n", " num_train_timesteps: int = 1000,\n", @@ -60,9 +64,11 @@ " beta_end: float = 0.02,\n", " beta_schedule: str = \"linear\",\n", " input_perturbation = 0.1,\n", + " prediction_type = \"epsilon\",\n", + " enable_zero_terminal_snr = True,\n", " eta: float = 0\n", " ): \n", - " super().__init__(device, num_train_timesteps, beta_start, beta_end, beta_schedule, input_perturbation)\n", + " super().__init__(device, num_train_timesteps, beta_start, beta_end, beta_schedule, input_perturbation, prediction_type, enable_zero_terminal_snr)\n", " self.eta = eta\n", " \n", " #for stable diff ddim\n", @@ -81,8 +87,8 @@ " #------------------------------------\n", " # Inference functions\n", " \n", - " def set_timesteps(self, num_inference_steps: int): \n", - " super().set_timesteps(num_inference_steps) \n", + " def set_timesteps(self, num_inference_steps: Optional[int] = None, timesteps: Optional[torch.Tensor] = None): \n", + " super().set_timesteps(num_inference_steps=num_inference_steps, timesteps=timesteps) \n", " self.timesteps += self.steps_offset\n", " \n", " clamp_style = None # one of: None, \"static\", \"dynamic\"\n", @@ -90,11 +96,13 @@ " def step(self, \n", " model_output: torch.FloatTensor,\n", " timesteps: Union[int, torch.IntTensor],\n", - " sample: torch.FloatTensor\n", + " sample: torch.FloatTensor,\n", + " uncond_model_output: torch.FloatTensor = None # for CFG++\n", " ) -> DDIMSchedulerOutput:\n", " \"\"\"Denoising step\"\"\"\n", " \n", " prev_timesteps = timesteps - self.num_train_timesteps // self.num_inference_steps\n", + " # prev_timestep = torch.clamp(prev_timestep, 0, self.num_train_timesteps - 1) # NEW\n", " \n", " #get variance sched\n", " alphas_cumprod = self.unsqueeze_vector_to_shape(self.alphas_cumprod[timesteps], sample.shape)\n", @@ -108,10 +116,29 @@ " #calc vars\n", " betas_cumprod = 1.0 - alphas_cumprod\n", " betas_cumprod_tm1 = 1.0 - alphas_cumprod_tm1\n", + "\n", + " uncond_model_output = default(uncond_model_output, model_output)\n", + "\n", + " if self.prediction_type == \"epsilon\":\n", + " #estimate predicted sample\n", + " x0 = (sample - betas_cumprod.sqrt() * model_output) / alphas_cumprod.sqrt()\n", + " eps = uncond_model_output #model_output\n", " \n", - " #estimate predicted sample\n", - " x0 = (sample - betas_cumprod.sqrt() * model_output) / alphas_cumprod.sqrt()\n", + " elif self.prediction_type == \"v-type\": \n", + " a = alphas_cumprod.sqrt()\n", + " b = betas_cumprod.sqrt()\n", + " \n", + " x0 = a * sample - b * model_output \n", + " # eps = a * model_output + b * sample\n", + " eps = a * uncond_model_output + b * sample\n", + "\n", + " elif self.prediction_type == \"x0\":\n", + " x0 = model_output\n", + " eps = (sample - alphas_cumprod.sqrt() * uncond_model_output) / betas_cumprod.sqrt()\n", " \n", + " else:\n", + " raise NotImplementedError(f\"{self.prediction_type} is not implemented for {self.__class__}.step()\")\n", + " \n", " if self.clamp_style == None: pass\n", " elif self.clamp_style == \"static\": x0 = torch.clamp(x0, -1, 1)\n", " elif self.clamp_style == \"dynamic\": raise NotImplementedError(\"clamp_style == 'dynamic'\")\n", @@ -123,16 +150,14 @@ " std = self.eta * variance.sqrt()\n", " \n", " #direction to xt\n", - " dir_xt = ( betas_cumprod_tm1 - std.square() ).sqrt() * model_output\n", + " dir_xt = (betas_cumprod_tm1 - std.square()).sqrt() * eps\n", " \n", " #sample noise\n", - " noise = torch.randn(model_output.shape, device=self.device)\n", - " \n", + " noise = torch.randn_like(x0)\n", + " \n", " #estimate the prev sample \n", " xtm1 = alphas_cumprod_tm1.sqrt() * x0 + dir_xt + std * noise\n", " \n", - " # print(f\"{timesteps=} {prev_timesteps=} ;;; x0: {x0.mean()}+-{x0.std()} xtm1: {xtm1.mean()}+-{xtm1.std()}\")\n", - " \n", " return DDIMSchedulerOutput(prev_sample=xtm1, pred_original_sample=x0) " ] }, @@ -161,6 +186,13 @@ "display_name": "python3", "language": "python", "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/src/scheduler/scheduler_ddpm.ipynb b/src/scheduler/scheduler_ddpm.ipynb index d5acc5c..3c30f63 100644 --- a/src/scheduler/scheduler_ddpm.ipynb +++ b/src/scheduler/scheduler_ddpm.ipynb @@ -5,15 +5,9 @@ "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", "metadata": {}, "source": [ - "# DDPMScheduler" - ] - }, - { - "cell_type": "markdown", - "id": "648ff99d-293b-4e7d-b425-6af2856e2935", - "metadata": {}, - "source": [ - "Denoising diffusion probabilistic models [(DDPM)](https://arxiv.org/abs/2006.11239): reverse beta is fixed and diagonal." + "# DDPM Scheduler\n", + "\n", + "> Denoising diffusion probabilistic models [(DDPM)](https://arxiv.org/abs/2006.11239): reverse beta is fixed and diagonal." ] }, { @@ -35,22 +29,32 @@ "source": [ "#| export\n", "from genQC.imports import *\n", - "from genQC.scheduler.scheduler import Scheduler" + "from genQC.scheduler.scheduler import Scheduler\n", + "from genQC.utils.config_loader import load_tensor" ] }, { "cell_type": "code", "execution_count": null, - "id": "de0475b1-e0aa-42eb-9f56-12c131ef868f", + "id": "ce85e7b8-1c87-48fe-b75d-bd0dbc107494", "metadata": {}, "outputs": [], "source": [ - "#| export\n", + "# | export\n", "@dataclass\n", "class DDPMSchedulerOutput:\n", " prev_sample: torch.FloatTensor\n", - " pred_original_sample: Optional[torch.FloatTensor] = None\n", - " \n", + " pred_original_sample: Optional[torch.FloatTensor] = None" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de0475b1-e0aa-42eb-9f56-12c131ef868f", + "metadata": {}, + "outputs": [], + "source": [ + "# | export\n", "class DDPMScheduler(Scheduler):\n", " \"\"\"A `Scheduler` implementing [(DDPM)](https://arxiv.org/abs/2006.11239)\"\"\"\n", " \n", @@ -62,55 +66,82 @@ " beta_start: float = 0.0001,\n", " beta_end: float = 0.02,\n", " beta_schedule: str = \"linear\",\n", - " input_perturbation = 0.1\n", + " input_perturbation = 0.1,\n", + " prediction_type = \"epsilon\",\n", + " enable_zero_terminal_snr = True\n", " ): \n", " super().__init__()\n", " self.device = device \n", " self.num_train_timesteps = torch.tensor(num_train_timesteps)\n", " self.num_inference_steps = torch.tensor(num_train_timesteps)\n", " \n", - " self.beta_start = beta_start\n", - " self.beta_end = beta_end\n", + " self.beta_start = beta_start\n", + " self.beta_end = beta_end\n", " self.beta_schedule = beta_schedule\n", - "\n", + " \n", " self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps)[::-1].copy().astype(np.int64)) #careful is defined reversed for easy denoising looping\n", - " \n", + "\n", + " self.input_perturbation = input_perturbation # Input Perturbation Reduces Exposure Bias in Diffusion Models, https://arxiv.org/pdf/2301.11706.pdf \n", + " self.prediction_type = prediction_type # one of \"epsilon\", \"v-type\", \"x0\", \"mu\"\n", + " \n", + " if self.prediction_type not in [\"epsilon\", \"v-type\", \"x0\"]: \n", + " raise NotImplementedError(f\"{self.prediction_type} does is not implemented for {self.__class__}\")\n", + "\n", + " #-----------\n", + " \n", " if beta_schedule == \"linear\":\n", " self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n", " \n", - " elif beta_schedule == \"linear_sqrt\":\n", + " elif beta_schedule == \"linear_sqrt\": #LDM\n", " self.betas = torch.linspace(beta_start ** 0.5, beta_end ** 0.5, num_train_timesteps, dtype=torch.float32) ** 2\n", " \n", - " elif beta_schedule == \"cos_alpha\": #cosine-based-variance \n", - " f = lambda t: np.cos((t/self.num_train_timesteps + 0.008)*np.pi/2.016)**2 \n", + " elif beta_schedule == \"cos_alpha\": #cosine-based-variance \n", + " #print(\"[INFO]: using cos_alpha beta-schedule, ignoring beta_start and beta_end!\")\n", + " f = lambda t: np.cos((t/self.num_train_timesteps + 0.008)*np.pi/2.016)**2 # is alpha_bar\n", + " _betas = [] \n", + " for i in range(self.num_train_timesteps): \n", + " b = 1.0-(f(i+1.0)/f(i))\n", + " if not enable_zero_terminal_snr: # v-type allows zero terminal SNR\n", + " b = min(b, 0.999) # clipping disables zero terminal SNR\n", + " _betas.append(b) \n", + " self.betas = torch.tensor(_betas, dtype=torch.float32) \n", + "\n", + " elif beta_schedule == \"cos_alpha4\": #cosine-based-variance \n", + " #print(\"[INFO]: using cos_alpha4 beta-schedule, ignoring beta_start and beta_end!\")\n", + " f = lambda t: np.cos((t/self.num_train_timesteps + 0.008)*np.pi/2.016)**4 # is alpha_bar\n", " _betas = [] \n", - " for i in range(self.num_train_timesteps): \n", - " _betas.append(min(1.0-(f(i+1.0)/f(i)),0.999)) \n", - " self.betas = torch.tensor(_betas, dtype=torch.float32) \n", + " for i in range(self.num_train_timesteps): \n", + " b = 1.0-(f(i+1.0)/f(i))\n", + " if not enable_zero_terminal_snr: # v-type allows zero terminal SNR\n", + " b = min(b, 0.999) # clipping disables zero terminal SNR\n", + " _betas.append(b) \n", + " self.betas = torch.tensor(_betas, dtype=torch.float32) \n", + " \n", + " elif \"path:\" in beta_schedule:\n", + " _save_path = beta_schedule[len(\"path:\"):]\n", + " self.betas = load_tensor(save_path=_save_path, device=device)[\"0\"]\n", + " \n", + " print(f\"[INFO]: Loaded beta_schedule ({beta_schedule}).\")\n", + " \n", " else:\n", - " raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n", - " \n", + " raise NotImplementedError(f\"{beta_schedule} is not implemented for {self.__class__}\")\n", + " \n", + " #-----------\n", + " \n", + " if (self.prediction_type in [\"v-type\", \"x0\"]) and enable_zero_terminal_snr and (beta_schedule not in [\"cos_alpha\", \"laplace\"]): # v-type allows zero terminal SNR\n", + " self.betas = self.enforce_zero_terminal_snr(self.betas)\n", + "\n", + " #-----------\n", + "\n", " self.sigmas = torch.sqrt(self.betas)\n", - " \n", " self.alphas = 1.0 - self.betas\n", - " self.alphas_cumprod = torch.cumprod(self.alphas, dim=0) \n", - " \n", - " ## Is this used anymore?\n", - " self.sqrt_alphas = torch.sqrt(self.alphas) \n", - " self.sqrt_alphas_cumprod = torch.sqrt(self.alphas_cumprod)\n", - " self.sqrt_one_minus_alphas_cumprod = torch.sqrt(1 - self.alphas_cumprod)\n", - " ##\n", - " \n", + " self.alphas_cumprod = torch.cumprod(self.alphas, dim=0) #only do cumprod witj alphas, as betas will go within precision to zero\n", + "\n", " #----------\n", - " # Input Perturbation Reduces Exposure Bias in Diffusion Models\n", - " # https://arxiv.org/pdf/2301.11706.pdf\n", - " \n", - " self.input_perturbation = input_perturbation\n", " \n", - " #----------\n", + " self.to(self.device)\n", + "\n", " \n", - " self.to_device(self.device)\n", - " \n", " @property\n", " def params_config(self): \n", " params_config = {} \n", @@ -119,32 +150,68 @@ " params_config[\"beta_start\"] = self.beta_start\n", " params_config[\"beta_end\"] = self.beta_end\n", " params_config[\"beta_schedule\"] = self.beta_schedule\n", - " params_config[\"input_perturbation\"] = self.input_perturbation \n", + " params_config[\"input_perturbation\"] = self.input_perturbation \n", + " params_config[\"prediction_type\"] = self.prediction_type \n", " return params_config\n", " \n", - " def to_device(self, device: Union[str, torch.device], non_blocking=False):\n", - " #non_blocking = self.non_blocking\n", - " \n", + " def to(self, device: Union[str, torch.device], non_blocking=False):\n", " self.device = device\n", " self.alphas_cumprod = self.alphas_cumprod.to(device, non_blocking=non_blocking)\n", - " self.sqrt_alphas_cumprod = self.sqrt_alphas_cumprod.to(device, non_blocking=non_blocking)\n", - " self.sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod.to(device, non_blocking=non_blocking)\n", " self.sigmas = self.sigmas.to(device, non_blocking=non_blocking)\n", - " self.sqrt_alphas = self.sqrt_alphas.to(device, non_blocking=non_blocking)\n", " self.betas = self.betas.to(device, non_blocking=non_blocking)\n", " self.num_train_timesteps = self.num_train_timesteps.to(device, non_blocking=non_blocking)\n", " self.num_inference_steps = self.num_inference_steps.to(device, non_blocking=non_blocking)\n", - " \n", + " return self\n", + " \n", + " #------------------------------------\n", + " \n", + " @property\n", + " def SNR(self):\n", + " alphas_bar = self.alphas_cumprod\n", + " betas_bar = 1.0 - alphas_bar\n", + " return alphas_bar / betas_bar\n", + " \n", " #------------------------------------\n", " # Inference functions\n", + "\n", + " def enforce_zero_terminal_snr(self, betas):\n", + " # Algorithm 1 in https://arxiv.org/pdf/2305.08891.pdf\n", + " \n", + " # Convert betas to alphas_bar_sqrt\n", + " alphas = 1 - betas\n", + " alphas_bar = alphas.cumprod(0)\n", + " alphas_bar_sqrt = alphas_bar.sqrt()\n", + " \n", + " # Store old values.\n", + " alphas_bar_sqrt_0 = alphas_bar_sqrt[0].clone()\n", + " alphas_bar_sqrt_T = alphas_bar_sqrt[-1].clone()\n", + " # Shift so last timestep is zero.\n", + " alphas_bar_sqrt -= alphas_bar_sqrt_T\n", + " # Scale so first timestep is back to old value.\n", + " alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T)\n", + " \n", + " # Convert alphas_bar_sqrt to betas\n", + " alphas_bar = alphas_bar_sqrt ** 2\n", + " alphas = alphas_bar[1:] / alphas_bar[:-1]\n", + " alphas = torch.cat([alphas_bar[0:1], alphas])\n", + " betas = 1 - alphas\n", + " return betas\n", + " \n", + " def set_timesteps(self, num_inference_steps: Optional[int] = None, timesteps: Optional[torch.Tensor] = None): \n", + " if exists(num_inference_steps):\n", + " if num_inference_steps >= self.num_train_timesteps: raise ValueError(\"num_inference_steps >= self.num_train_timesteps\") \n", + " self.num_inference_steps = torch.tensor(num_inference_steps)\n", + " step_ratio = self.num_train_timesteps // self.num_inference_steps\n", + " timesteps = (np.arange(0, num_inference_steps) * step_ratio.item()).round()[::-1].copy().astype(np.int64)\n", + " self.timesteps = torch.from_numpy(timesteps)\n", + "\n", + " elif exists(timesteps):\n", + " self.num_inference_steps = torch.tensor(timesteps.shape[0])\n", + " self.timesteps = timesteps.clone()\n", + "\n", + " else:\n", + " raise RuntimeError(\"provide `num_inference_steps` or `timesteps`\")\n", " \n", - " def set_timesteps(self, num_inference_steps: int): \n", - " if num_inference_steps >= self.num_train_timesteps: raise ValueError(\"num_inference_steps >= self.num_train_timesteps\") \n", - " self.num_inference_steps = torch.tensor(num_inference_steps)\n", - " step_ratio = self.num_train_timesteps // self.num_inference_steps\n", - " timesteps = (np.arange(0, num_inference_steps) * step_ratio.item()).round()[::-1].copy().astype(np.int64)\n", - " self.timesteps = torch.from_numpy(timesteps)\n", - " \n", " def step(self,\n", " model_output: torch.FloatTensor,\n", " timesteps: Union[int, torch.IntTensor],\n", @@ -152,46 +219,44 @@ " ) -> DDPMSchedulerOutput:\n", " \"\"\"Denoising step\"\"\"\n", " \n", - " sqrt_alphas_cumprod = self.unsqueeze_vector_to_shape(self.sqrt_alphas_cumprod[timesteps], sample.shape)\n", + " sqrt_alphas_cumprod = self.unsqueeze_vector_to_shape(self.sqrt_alphas_cumprod[timesteps], sample.shape)\n", " sqrt_one_minus_alphas_cumprod = self.unsqueeze_vector_to_shape(self.sqrt_one_minus_alphas_cumprod[timesteps], sample.shape)\n", - " \n", + " \n", " sigmas = self.unsqueeze_vector_to_shape(self.sigmas[timesteps], sample.shape)\n", " sqrt_alphas = self.unsqueeze_vector_to_shape(self.sqrt_alphas[timesteps], sample.shape)\n", " betas = self.unsqueeze_vector_to_shape(self.betas[timesteps], sample.shape)\n", - " \n", - " non_zero_t = (timesteps!=0).float()\n", - " \n", - " #estimate the final img\n", - " x0 = (sample - sqrt_one_minus_alphas_cumprod * model_output) / sqrt_alphas_cumprod #DDPM eq.15\n", - " \n", + " \n", + " if self.prediction_type == \"epsilon\":\n", + " #estimate the final img\n", + " x0 = (sample - sqrt_one_minus_alphas_cumprod * model_output) / sqrt_alphas_cumprod #DDPM eq.15\n", + "\n", + " xt_coeff = betas / sqrt_one_minus_alphas_cumprod \n", + " mu_t = (sample - xt_coeff * model_output) / sqrt_alphas\n", + " \n", + " elif self.prediction_type == \"v-type\":\n", + " x0 = sqrt_alphas_cumprod * sample - sqrt_one_minus_alphas_cumprod * model_output \n", + "\n", + " prev_timesteps = timesteps - self.num_train_timesteps // self.num_inference_steps\n", + " alphas_cumprod_tm1 = self.unsqueeze_vector_to_shape(self.alphas_cumprod[prev_timesteps], sample.shape)\n", + " \n", + " non_zero_tm1 = (prev_timesteps>=0.0).float() \n", + " non_zero_tm1 = self.unsqueeze_vector_to_shape(non_zero_tm1, sample.shape)\n", + " alphas_cumprod_tm1 = alphas_cumprod_tm1 * non_zero_tm1 + (1.0 - non_zero_tm1) * self.alphas_cumprod[0]\n", + " \n", + " mu_t = (betas * alphas_cumprod_tm1.sqrt() * x0 + sqrt_alphas * (1.0-alphas_cumprod_tm1) * sample) / sqrt_one_minus_alphas_cumprod\n", + " \n", + " else:\n", + " raise NotImplementedError(f\"{self.prediction_type} is not implemented for {self.__class__}.step()\")\n", + " \n", " #less noisy latent \n", + " non_zero_t = (timesteps>0).float()\n", " noise = torch.randn(sample.shape, device=self.device) \n", " noise = noise * non_zero_t.reshape(-1, 1, 1, 1)\n", - " \n", - " xt_coeff = betas / sqrt_one_minus_alphas_cumprod \n", - " xt = (sample - xt_coeff * model_output) / sqrt_alphas + sigmas * noise \n", + " \n", + " xt = mu_t + sigmas * noise \n", " \n", " return DDPMSchedulerOutput(prev_sample=xt, pred_original_sample=x0)\n", "\n", - " def add_noise_LEdit(self, original_samples: torch.FloatTensor):\n", - " # LEDITS: Real Image Editing with DDPM Inversion and Semantic Guidance; Note: SEGA (Semantic Guidance) is just multiple negative promts with a pixel based weight\n", - " # https://arxiv.org/pdf/2307.00522.pdf\n", - "\n", - " noisy_latents = []\n", - " noises = []\n", - "\n", - " noisy_latent_t = original_samples\n", - " \n", - " for t in self.timesteps[::-1]: #start from no noise and diffuse in non analytic fashion\n", - " noise_t = torch.randn_like(noise) \n", - " alpha_t = self.unsqueeze_vector_to_shape(self.alphas[t], original_samples.shape) \n", - " noisy_latent_t = torch.sqrt(alpha_t) * noisy_latent_t + torch.sqrt(1.0 - alpha_t) * noise_t \n", - "\n", - " noises.append(noise_t)\n", - " noisy_latents.append(noisy_latent_t)\n", - " \n", - " return noisy_latents[::-1], noises[::-1] # invert to self.timestep definition\n", - " \n", " #------------------------------------\n", " # Training functions\n", " \n", @@ -199,15 +264,24 @@ " original_samples: torch.FloatTensor,\n", " noise: torch.FloatTensor,\n", " timesteps: torch.IntTensor,\n", + " train: bool=False\n", " ) -> torch.FloatTensor:\n", " \n", - " alphas_cumprod = self.unsqueeze_vector_to_shape(self.alphas_cumprod[timesteps], original_samples.shape) \n", - " noisy_latents = torch.sqrt(alphas_cumprod) * original_samples + torch.sqrt(1.0 - alphas_cumprod) * noise \n", + " alphas_cumprod = self.unsqueeze_vector_to_shape(self.alphas_cumprod[timesteps], original_samples.shape) \n", + " noisy_latents = torch.sqrt(alphas_cumprod) * original_samples + torch.sqrt(1.0 - alphas_cumprod) * noise # F^2\n", " \n", - " if self.input_perturbation is not None:\n", + " if exists(self.input_perturbation) and train:\n", " noisy_latents = noisy_latents + torch.sqrt(1.0 - alphas_cumprod) * torch.randn_like(noise) * self.input_perturbation\n", " \n", - " return noisy_latents " + " return noisy_latents" + ] + }, + { + "cell_type": "markdown", + "id": "76682376-91d6-498b-8a31-2701066ab3a3", + "metadata": {}, + "source": [ + "# Export -" ] }, { @@ -227,6 +301,13 @@ "display_name": "python3", "language": "python", "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/src/scheduler/scheduler_dpm.ipynb b/src/scheduler/scheduler_dpm.ipynb new file mode 100644 index 0000000..eb4a727 --- /dev/null +++ b/src/scheduler/scheduler_dpm.ipynb @@ -0,0 +1,202 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# DPM Scheduler \n", + "\n", + "> DPM-Solver++: Fast Solver for Guided Sampling of Diffusion Probabilistic Models [(DPM-Solver)](https://arxiv.org/abs/2206.00927) [(DPM-Solver++)](https://arxiv.org/abs/2211.01095)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp scheduler.scheduler_dpm" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from genQC.scheduler.scheduler_ddpm import DDPMScheduler" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c6d3b1e2-18d3-4cab-968c-853cb3469b3a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "@dataclass\n", + "class DPMSchedulerOutput:\n", + " prev_sample: torch.FloatTensor\n", + " pred_original_sample: Optional[torch.FloatTensor] = None" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de0475b1-e0aa-42eb-9f56-12c131ef868f", + "metadata": {}, + "outputs": [], + "source": [ + "#| export \n", + "class DPMScheduler(DDPMScheduler):\n", + " \"\"\"A `Scheduler` implementing [(DPM-Solver++)](https://arxiv.org/abs/2211.01095).\"\"\"\n", + " \n", + " def __init__(self, \n", + " device: Union[str, torch.device], \n", + " num_train_timesteps: int = 1000,\n", + " beta_start: float = 0.0001,\n", + " beta_end: float = 0.02,\n", + " beta_schedule: str = \"linear\",\n", + " input_perturbation = 0.1,\n", + " prediction_type = \"epsilon\",\n", + " enable_zero_terminal_snr = True,\n", + " solver_order: int = 2,\n", + " **kwargs\n", + " ) -> None: \n", + " super().__init__(device, num_train_timesteps, beta_start, beta_end, beta_schedule, input_perturbation, prediction_type, enable_zero_terminal_snr)\n", + "\n", + " self.solver_order = solver_order\n", + " if self.solver_order != 2:\n", + " raise NotImplementedError(f\"{self.solver_order=} is not implemented for {self.__class__}\")\n", + " \n", + " @property\n", + " def params_config(self): \n", + " params_config = super().params_config\n", + " params_config[\"solver_order\"] = self.solver_order\n", + " return params_config\n", + " \n", + " #------------------------------------\n", + " # Inference functions\n", + " \n", + " def step(self, \n", + " model_output: torch.FloatTensor,\n", + " timesteps: torch.IntTensor,\n", + " sample: torch.FloatTensor,\n", + " uncond_model_output: torch.FloatTensor = None, # for CFG++\n", + " ) -> DPMSchedulerOutput:\n", + " \"\"\"\n", + " Denoising step of DPM-Solver++(2M) (Lu et al., 2022b), \n", + " implemeted as CFG++ variant (CFG++, https://arxiv.org/pdf/2406.08070)\n", + " \"\"\"\n", + "\n", + " uncond_model_output = default(uncond_model_output, model_output)\n", + " \n", + " assert timesteps.numel() == 1\n", + "\n", + " # note: here we enforce the sampling to be strictly defined by self.timesteps\n", + " is_warmup_step = (self.timesteps[0] == timesteps)\n", + " # is_last_step = (self.timesteps[-1] == timesteps)\n", + " \n", + " alphas_cumprod = self.unsqueeze_vector_to_shape(self.alphas_cumprod[timesteps], sample.shape)\n", + " betas_cumprod = 1.0 - alphas_cumprod\n", + "\n", + " prev_timesteps = timesteps - self.num_train_timesteps // self.num_inference_steps\n", + " prev_timesteps = prev_timesteps.clamp(0, self.num_train_timesteps-1)\n", + "\n", + " alphas_cumprod_tm1 = self.unsqueeze_vector_to_shape(self.alphas_cumprod[prev_timesteps], sample.shape)\n", + " betas_cumprod_tm1 = 1.0 - alphas_cumprod_tm1\n", + "\n", + " # ---------\n", + " if self.prediction_type == \"v-type\":\n", + " a = alphas_cumprod.sqrt()\n", + " b = betas_cumprod.sqrt()\n", + " \n", + " x0 = a * sample - b * model_output\n", + " x0_uncond = a * sample - b * uncond_model_output \n", + "\n", + " elif self.prediction_type == \"x0\":\n", + " x0 = model_output\n", + " x0_uncond = uncond_model_output\n", + " \n", + " else:\n", + " raise NotImplementedError(f\"{self.prediction_type} is not implemented for {self.__class__}.step()\")\n", + "\n", + " # ---------\n", + " solver_order = self.solver_order \n", + " # mod here for adyptive adjust, if needed\n", + " if solver_order == 2:\n", + " pass\n", + " \n", + " else:\n", + " raise NotImplementedError(f\"{solver_order} is not implemented for {self.__class__}\")\n", + "\n", + " # ---------\n", + "\n", + " lambda_t = 0.5 * torch.log(alphas_cumprod / betas_cumprod)\n", + " lambda_tm1 = 0.5 * torch.log(alphas_cumprod_tm1 / betas_cumprod_tm1)\n", + "\n", + " h_tm1 = lambda_tm1 - lambda_t\n", + " \n", + " if is_warmup_step:\n", + " x_dir = alphas_cumprod_tm1.sqrt() * (x0 - torch.exp(-h_tm1) * x0_uncond)\n", + " \n", + " else:\n", + " r_tm1 = self.last_h_tm1 / h_tm1\n", + " \n", + " sqrt_alphas_cumprod_tm1 = alphas_cumprod_tm1.sqrt()\n", + " exp_mhtm1 = torch.exp(-h_tm1)\n", + " \n", + " x_dir = sqrt_alphas_cumprod_tm1 * x0 - sqrt_alphas_cumprod_tm1 * exp_mhtm1 * x0_uncond + sqrt_alphas_cumprod_tm1 * (0.5/r_tm1) * (x0_uncond-self.last_x0_uncond) * (1.0-exp_mhtm1)\n", + " \n", + " xtm1 = (betas_cumprod_tm1/betas_cumprod).sqrt() * sample + x_dir\n", + "\n", + " # is needed for multistesp integration of DPM \n", + " self.last_x0_uncond = x0_uncond \n", + " self.last_h_tm1 = h_tm1\n", + " \n", + " return DPMSchedulerOutput(prev_sample=xtm1, pred_original_sample=x0) " + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/styles.css b/src/styles.css deleted file mode 100644 index 66ccc49..0000000 --- a/src/styles.css +++ /dev/null @@ -1,37 +0,0 @@ -.cell { - margin-bottom: 1rem; -} - -.cell > .sourceCode { - margin-bottom: 0; -} - -.cell-output > pre { - margin-bottom: 0; -} - -.cell-output > pre, .cell-output > .sourceCode > pre, .cell-output-stdout > pre { - margin-left: 0.8rem; - margin-top: 0; - background: none; - border-left: 2px solid lightsalmon; - border-top-left-radius: 0; - border-top-right-radius: 0; -} - -.cell-output > .sourceCode { - border: none; -} - -.cell-output > .sourceCode { - background: none; - margin-top: 0; -} - -div.description { - padding-left: 2px; - padding-top: 5px; - font-style: italic; - font-size: 135%; - opacity: 70%; -} diff --git a/src/util.ipynb b/src/util.ipynb deleted file mode 100644 index 93f6d7c..0000000 --- a/src/util.ipynb +++ /dev/null @@ -1,383 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "ca8283df-e353-4e58-a726-8e75ae1007e9", - "metadata": {}, - "source": [ - "# Util" - ] - }, - { - "cell_type": "markdown", - "id": "d5b61c95-b4b1-4c6a-b345-4a91f1feb32c", - "metadata": {}, - "source": [ - "Miscellaneous util code." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1ad65227-3d39-4f9c-832a-2d4555aeb399", - "metadata": {}, - "outputs": [], - "source": [ - "#| default_exp util" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5b3271aa-6684-40ab-8c73-b8f9210ab423", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "from genQC.imports import *\n", - "import gc, sys, traceback" - ] - }, - { - "cell_type": "markdown", - "id": "97eb37f5-8fb3-4299-b790-c7b478ed78b1", - "metadata": {}, - "source": [ - "## Memory utils" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a5f68941-9951-42de-a3d6-7c0399281ac3", - "metadata": {}, - "outputs": [], - "source": [ - "#|export\n", - "class MemoryCleaner():\n", - " \"\"\"CLass with static methods to clean (gpu) memory.\"\"\"\n", - " \n", - " @staticmethod\n", - " def _clean_ipython_hist():\n", - " # Code in this function mainly copied from IPython source\n", - " if not 'get_ipython' in globals(): return\n", - " ip = get_ipython()\n", - " user_ns = ip.user_ns\n", - " ip.displayhook.flush()\n", - " pc = ip.displayhook.prompt_count + 1\n", - " for n in range(1, pc): user_ns.pop('_i'+repr(n),None)\n", - " user_ns.update(dict(_i='',_ii='',_iii=''))\n", - " hm = ip.history_manager\n", - " hm.input_hist_parsed[:] = [''] * pc\n", - " hm.input_hist_raw[:] = [''] * pc\n", - " hm._i = hm._ii = hm._iii = hm._i00 = ''\n", - "\n", - " @staticmethod\n", - " def _clean_tb():\n", - " if hasattr(sys, 'last_traceback'):\n", - " traceback.clear_frames(sys.last_traceback)\n", - " delattr(sys, 'last_traceback')\n", - " if hasattr(sys, 'last_type'): delattr(sys, 'last_type')\n", - " if hasattr(sys, 'last_value'): delattr(sys, 'last_value')\n", - " \n", - " @staticmethod\n", - " def purge_mem():\n", - " \"\"\"Clear all. Purge all memory.\"\"\"\n", - " MemoryCleaner._clean_tb()\n", - " MemoryCleaner._clean_ipython_hist()\n", - " gc.collect()\n", - " torch.cuda.empty_cache() " - ] - }, - { - "cell_type": "markdown", - "id": "f910a356-b685-44fb-98ea-4c03ab6969fb", - "metadata": {}, - "source": [ - "## Python utils" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ca1fa9d4-aea9-4c72-a72b-bc187db868d6", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def virtual(f: callable) -> callable:\n", - " '''Decorator to enfore subclass method implementations and raises error at method calls.'''\n", - " @functools.wraps(f)\n", - " def inner(self, *args, **kwargs): raise NotImplementedError(f\"Virtual method {f.__name__} needs to be implemented by subclass {self.__class__.__name__}.\") \n", - " return inner" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "59dc4c68-4a11-4b0e-9486-3faf2d1acbbc", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A p1 1\n", - "Exception that would be raised: Virtual method p2 needs to be implemented by subclass B.\n" - ] - } - ], - "source": [ - "class A():\n", - " def p1(self, x): print(\"A p1\", x)\n", - " \n", - " @virtual\n", - " def p2(self, x): pass\n", - " \n", - "class B(A):\n", - " def p3(self, x): print(\"B p2\", x)\n", - " \n", - "b = B()\n", - "b.p1(1)\n", - "try:\n", - " b.p2(1)\n", - "except BaseException as e:\n", - " print(\"Exception that would be raised: \", e)" - ] - }, - { - "cell_type": "markdown", - "id": "2086a56a-b142-41c2-8b67-716d6afbc574", - "metadata": {}, - "source": [ - "## Torch utils" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1f4e15ca-cbab-4ee0-bf9a-fabe0a687c9b", - "metadata": {}, - "outputs": [], - "source": [ - "#|export\n", - "class DataLoaders:\n", - " \"\"\"Combines train and valid `DataLoader`.\"\"\"\n", - " def __init__(self, *dls: list[DataLoader]): self.train, self.valid = dls[:2]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "61e428d1-3482-412c-8286-011a979a7fae", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def infer_torch_device(): \n", - " if torch.cuda.is_available(): \n", - " torch.backends.cudnn.benchmark = True\n", - "\n", - " dev_cap = torch.cuda.get_device_capability()\n", - " \n", - " if dev_cap[0] >= 8: # AMPERE\n", - " print(f\"[INFO]: Cuda device has a capability of {dev_cap[0]}.{dev_cap[1]} (>= 8), allowing tf32 matmul.\")\n", - " torch.backends.cuda.matmul.allow_tf32 = True\n", - " \n", - " return torch.device(\"cuda\")\n", - " return torch.device(\"cpu\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "84ba05ff-9d30-4c43-9b15-187c219b385d", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" - ] - }, - { - "data": { - "text/plain": [ - "device(type='cuda')" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "infer_torch_device()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e05ec728-70a3-402f-80cc-c58a18394710", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def number_of_paramters(model: nn.Module): return sum([p.flatten().shape[0] for p in model.parameters()])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e7122570-f02a-4fc2-b9db-8d65a72ceb94", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def normalize_tensor(t: torch.Tensor):\n", - " \"\"\"[0,1] to [-1,1]\"\"\"\n", - " return t * 2.0 - 1.0\n", - "\n", - "def scale_tensor(t: torch.Tensor):\n", - " \"\"\"[-1,1] to [0,1]\"\"\"\n", - " return (t / 2.0 + 0.5).clamp(0.0, 1.0)" - ] - }, - { - "cell_type": "markdown", - "id": "058de092-2f9e-4d4e-b321-c4539f618a1d", - "metadata": {}, - "source": [ - "## Plot utils" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34d371d4-9fee-4544-81e5-4d82f6e720cd", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def savePdf(filename): plt.savefig(filename + '.pdf', bbox_inches='tight')\n", - "def savePng(filename): plt.savefig(filename + '.png', bbox_inches='tight')\n", - "def saveSvg(filename): plt.savefig(filename + '.svg', bbox_inches='tight')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "276a1ac5-ff77-47e2-9530-faa152e2a120", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def plot_image_grid(imgs: Union[list, np.array, torch.Tensor], labels: list=None, labels_fs=\"medium\", \n", - " figsize=(16, 4), cols=8, cmap=\"Greys\", show_colorbar=False, **imshow_kwargs): \n", - " if type(imgs) is list: n = len(imgs)\n", - " elif type(imgs) is np.ndarray: n = imgs.shape[0]\n", - " elif type(imgs) is torch.Tensor: n = imgs.shape[0]\n", - " else: raise NotImplementedError(\"err type:\", type(imgs))\n", - "\n", - " if n == 0: return\n", - "\n", - " cols = min(n, cols)\n", - " rows = math.ceil(n/cols)\n", - "\n", - " fig, axs = plt.subplots(rows, cols, figsize=figsize, squeeze=False, constrained_layout=True) \n", - " for i, (r, c) in enumerate(itertools.product(range(rows), range(cols))):\n", - " plt.sca(axs[r,c])\n", - " plt.axis('off')\n", - " \n", - " if i >= n: continue\n", - "\n", - " if labels is not None: plt.title(labels[i], fontsize=labels_fs)\n", - " p = plt.imshow(imgs[i], cmap=cmap, **imshow_kwargs) #cmap ignored for RGB \n", - " if show_colorbar: plt.colorbar(p)\n", - " \n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "46b4389e-3f3b-40ce-8360-d32fe2350a9e", - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABksAAAEpCAYAAAAzsBVMAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/SrBM8AAAACXBIWXMAAA9hAAAPYQGoP6dpAABosElEQVR4nO3dd3SVZbrH/SsQSgIJBAhg6L33XhNqQARBiiIKKCJ2GJXRYxcHxYKiODZAQRRpIkV67yK9hV5CCwFCh0CA7PevmfXOew75XWfMOfKe/f2sNWudNfmy95Xk2fdT7sk6IYFAIGAAAAAAAAAAAABBKsufPQAAAAAAAAAAAMCfic0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3Nkv+D4uLibNCgQe5+2bJlFhISYufPn/9D71uyZEkbMWLEH3oNAP87WCcAeLBWAPBgrQCgsE4A8GCtwJ+NzRL8r5oyZYpVrFjRcubMadWqVbM5c+b82SMBuIPs3LnTunbtaiVLlrSQkBAuVgD8l0aNGmXNmjWzqKgoi4qKstatW9vvv//+Z48F4A4zbdo0q1u3ruXNm9dy5cplNWvWtPHjx//ZYwG4Q02cONFCQkKsc+fOf/YoAO4wY8eOtZCQkH/5T86cOf/ssfA/gM0S/K9Zs2aN9ezZ0/r162ebN2+2zp07W+fOnW3Hjh1/9mgA7hBXr1610qVL27Bhw6xw4cJ/9jgA7lDLli2znj172tKlS23t2rVWrFgxa9u2rR0/fvzPHg3AHSRfvnz26quv2tq1a23btm32yCOP2COPPGLz58//s0cDcIc5fPiwvfjii9asWbM/exQAd6jIyEhLSkr6538SExP/7JHwP4DNkiAwfvx4q1u3rkVERFjhwoXtwQcftFOnTv2nbvXq1Va9enXLmTOnNWzY8D9tYqxatcqaNWtmYWFhVqxYMXvuuefsypUr7jk+/fRTa9eunQ0ePNgqVapk77zzjtWuXds+//zzP/w9Avhj7pR1ol69evbhhx/aAw88YDly5PjD3xeAzHWnrBU//vijPfXUU1azZk2rWLGijR492tLT023x4sV/+HsE8MfdKWtFXFycdenSxSpVqmRlypSxgQMHWvXq1W3VqlV/+HsE8MfcKeuEmdmtW7esV69e9vbbb1vp0qX/0PcFIHPdSWtFSEiIFS5c+J//KVSo0B/63nBnYrMkCNy4ccPeeecd27p1q02fPt0OHz5sffv2/U/d4MGDbfjw4bZ+/XqLjo62jh072o0bN8zM7MCBA9auXTvr2rWrbdu2zSZNmmSrVq2yZ555xj3H2rVrrXXr1v/y38XHx9vatWv/0PcH4I+7U9YJAHe2O3WtuHr1qt24ccPy5cv3b78GgMxzJ64VgUDAFi9ebHv27LHmzZv/kW8PQCa4k9aJIUOGWMGCBa1fv36Z8a0ByER30lpx+fJlK1GihBUrVszuvfde27lzZ2Z8i7jTBPB/TmxsbGDgwIG3/fr69esDZha4dOlSIBAIBJYuXRows8DEiRP/2aSkpATCwsICkyZNCgQCgUC/fv0Cjz/++L+8zsqVKwNZsmQJpKamBgKBQKBEiRKBTz755Lbvmy1btsCECRP+5b/7+9//HihYsOB/59sDkAnu1HXi/+2/0wL4n/H/h7UiEAgEnnzyyUDp0qX/+e8B/O+6k9eK8+fPB3LlyhUIDQ0N5MiRIzBmzJh/4zsE8EfdqevEypUrA0WKFAmcPn06EAgEAn369Ance++9/8Z3CCAz3KlrxZo1awLjxo0LbN68ObBs2bLAPffcE4iMjAwcPXr03/xOcafiL0uCwMaNG61jx45WvHhxi4iIsNjYWDMzO3LkyL90jRo1+uf/nS9fPqtQoYLt2rXLzMy2bt1qY8eOtdy5c//zP/Hx8Zaenm6HDh363/tmAPyPYJ0A4HEnrhXDhg2ziRMn2i+//ML/k0XgDnEnrRURERG2ZcsWW79+vQ0dOtSef/55W7Zs2R//JgH8IXfCOnHp0iV7+OGHbdSoUVagQIFM/O4AZJY7Ya34x+v37t3batasabGxsTZt2jSLjo62r7/+OpO+U9wpQv/sAfA/68qVKxYfH2/x8fH2448/WnR0tB05csTi4+MtLS3N/TqXL1+2AQMG2HPPPfefvla8eHHXaxQuXNiSk5P/5b9LTk7m/4kz8Ce7k9YJAHeuO3Gt+Oijj2zYsGG2aNEiq169+n/r3wL4n3GnrRVZsmSxsmXLmplZzZo1bdeuXfbee+9ZXFyc+zUAZK47ZZ04cOCAHT582Dp27PjP/y49Pd3MzEJDQ23Pnj1WpkwZ9zwAMtedslb8V7Jly2a1atWy/fv3/1v/HncuNkv+j9u9e7elpKTYsGHDrFixYmZmtmHDhv+y/e233/65SJw7d8727t1rlSpVMjOz2rVrW0JCwj9vNP4djRo1ssWLF9ugQYP++d8tXLjwX3Z/Afzvu5PWCQB3rjttrfjggw9s6NChNn/+fKtbt+4fei0AmedOWyv+v9LT0+369euZ+poA/nvulHWiYsWKtn379n/571577TW7dOmSffrpp/+cDcCf405ZK/4rt27dsu3bt9vdd9+daa+JOwObJf/HFS9e3LJnz24jR460J554wnbs2GHvvPPOf9kOGTLE8ufPb4UKFbJXX33VChQoYJ07dzYzs5deeskaNmxozzzzjD322GOWK1cuS0hIsIULF9rnn3/ummXgwIEWGxtrw4cPtw4dOtjEiRNtw4YN9s0332TWtwvg33AnrRNpaWmWkJDwz//7+PHjtmXLFsudOzebMMCf7E5aK95//3174403bMKECVayZEk7efKkmdk//6wewJ/nTlor3nvvPatbt66VKVPGrl+/bnPmzLHx48fbl19+mVnfLoB/w52yTuTMmdOqVq36L/9d3rx5zcz+038P4H/fnbJW/OP1GzZsaGXLlrXz58/bhx9+aImJifbYY49l1reLOwT/P0v+j4uOjraxY8falClTrHLlyjZs2DD76KOP/st22LBhNnDgQKtTp46dPHnSZs2aZdmzZzczs+rVq9vy5ctt79691qxZM6tVq5a98cYbFhMT456lcePGNmHCBPvmm2+sRo0aNnXqVJs+fToXIcCf7E5aJ06cOGG1atWyWrVqWVJSkn300UdWq1YtLkCAO8CdtFZ8+eWXlpaWZt26dbO77rrrn/+53TwA/vfcSWvFlStX7KmnnrIqVapYkyZN7Oeff7YffviB6wrgT3YnrRMA7lx30lpx7tw569+/v1WqVMnuvvtuu3jxoq1Zs8YqV66cKd8r7hwhgUAg8GcPAQAAAAAAAAAA8GfhL0sAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQC/WGK1askM3kyZNlU7hwYdf7bdu2TTYdOnSQzQMPPCCbuXPnumZatGiRbE6dOiWboUOHyubMmTOumY4dOyabokWLymbz5s2yiYqKcs1UrVo12SxZskQ2oaH68KxZs6ZnJJs+fbpscuTIIZvcuXPLJk+ePJ6RLCwsTDZ169aVzdatW2UTGRnpmikiIkI2sbGxGX79448/lq+RJYvep33ooYdkY2a2fPly2XiON8+xVLZsWc9IFh4eLptly5bJpn79+rLJnz+/ZySbNWuWq1MKFiwom4MHD7pe6+TJk7L5j//4D9lcuHBBNjt37nTNVLFiRdl4jt+RI0fKpl+/fq6ZZs+eLZsePXrIZu/evbLxHJdmZr169ZJNXFzcbb82atQo+e9Lly4tm8OHD8vGa8OGDbLxHP9paWmu9/v+++9lM3DgQNm0aNFCNp613czs1VdflU23bt1k4/mMlCxZ0jOSzZw5UzaJiYmy8VwzXr9+3TXTnDlzZOP5jKSmpsrGc51nZnbvvffKJjk5WTaHDh2STUJCgmumvn37yqZChQoZfv2TTz6Rr7F7927ZXLx4UTZmZm+88YZsrly5IpuffvpJNoUKFXLN5Fl3ypUrJxvPsXTkyBHXTBmt7/8QCARks3//ftlMmjTJM5LrWPG8luf60/M5MfOtKefPn5fNxo0bZeO9/6hXr55sPPf9Dz/8sGy+/vpr10zvvPOObNR90xdffCFfY/v27bLJli2bbMzMrl27JpvnnntONp6fUcOGDV0zqfXUzPe8xnPces67ZmZ///vfZeO5d+zYsaNszp4965pp165dslm7dq1sHn/8cdkUKVLENZPnWczx48dls3r1atm0adPGNZPnOs2zVnjWVM+9uplv/WrduvVtv/bSSy/Jf++53//1119lY+b77Kanp8vGc29dvnx510ye50Ke17p8+bJsvNewnus4z2s1btxYNnv27HHN1LRpU9l4nrN61hLvM3LPPabn5+T5eXue6ZrpZ4NmvucLnp+B57Ni5ns+Onr0aNnwlyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCohXrD06dPy6ZZs2ay2b59u+v9WrRoIZsrV67IZuTIkbLZu3eva6bu3bvL5tChQ7L5/vvvZbNr1y7XTM8995xs/vKXv8jmgw8+kM2OHTtcM4WEhMjm4MGDsilfvrxszpw545rp0Ucflc2KFStkk5SUJJuzZ8+6Zrp06ZJs8ubNKxvP8fTtt996RrLly5e7uowcPnxYNi+//LJskpOT//As/1CjRg3ZlCxZUjaeY8TMt17Wq1dPNp419eOPP3bNFAgEMuX98uTJI5uyZcu6ZvKs89OnT5eN51yQkpLiGcmOHj2aKe+3b98+2cyePds106ZNm2Rz8eJF2TRu3Fg2oaG+ywLPuhMXF3fbr2XNmlX++4IFC8omOjpaNmZmM2bMkE2ZMmVkc/78edmsWbPGM5J9+OGHslm4cKFsPOcS70z9+/eXza+//iqbmJgY2cyaNcs107PPPiubmTNnysZzTbF582bXTJcvX5bNkSNHZHPixAnZlChRItNm8lzHedau9u3bu2YaM2aMbNT1Z9WqVeVr/Pbbb7LxXMObmY0YMUI2nnPcAw88IJsNGzZ4RrK33npLNg0bNpRNt27dZFOhQgXPSLZ//37Z5MqVSzaRkZGy+dvf/uaaKTExUTZdunSRzbx582TjuYYzM/vll19kU7FiRdmEh4fLplWrVq6ZPNeDVapUkc3cuXNl41njzHzX1/Hx8Rl+3XP94rnGa9u2rWzMfNcfnnuiunXryubq1auekVzPDzxz58iRQzbq9/EPa9eulc2QIUNkM3z4cNmkpqa6ZvJc7+XOnVs2njXHc242831WPOdez73VjRs3XDONHTtWNp5rNM8503PdaOa7723duvVtv9akSRP579PS0mQTFRUlGzPfOc6zLnmer3mfi3mOW8/ntnnz5rLxnG/MfNcLnnNlgwYNZON5vmbmexZ169Yt2Xjm/u6771wzec7NYWFhsvFcM3rvPzzX+p5nEJMmTZKN55memVn+/PldncJflgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqod5ww4YNsqlTp45sihQp4nq/kydPyqZVq1aySU1Nlc2FCxdcMzVo0EA2CxculM2ZM2dkExER4Zpp1KhRsunevbts0tLSZHPr1i3XTN98841sBg4cKJt58+bJxvP7NTP729/+JpvIyEjZ9OnTRzbbt293zZSeni6bzZs3y6ZDhw6ymTx5smumGzduuLqMPP/887L59ttvZRMTE+N6v9KlS8umdevWstm6datszp8/7xnJunXrJpsZM2bIxnN8P/DAA66Zrl69minNpEmTZBMdHe2aqUqVKrK5du2abDxrU+3atV0zJSYmyuahhx6Szc6dO2WTNWtW10yVK1eWzc2bN2XjOZ48nxUzsxw5cri62zl69KhsPOfm0FDfZYznvFugQAHZLFu2TDbff/+9ZyTbu3evbH777TfZxMXFyebVV1/1jGQ//fSTbMLCwmRTrFgx2dSoUcM10/r162UTFRUlmxEjRsjm2Wef9YxkuXPnls2uXbtkc/nyZdns2bPHNdOVK1dkk5SUJJvMOleYmd19992uLiOXLl2STZs2bWQzd+5c1/sNHjxYNosWLZLN6tWrZRMIBFwzea6rPNdMR44ckU2lSpVcM3l+L999951sHn/8cdl88cUXrpnKli0rG8+1wMqVK2WTPXt210x9+/aVjed8mJKSIpvY2FjPSLZixQrZTJkyRTYlSpSQTalSpVwzec518fHxGX7dcy34ySefyMa7Vpw+fVo2nmcHnvtPz3piZhYeHi4bz+dk06ZNsjl+/LhrpoYNG8pm9+7dsrnvvvtk8/bbb7tm6tevn2zKly8vG88a5/mdmJkdO3ZMNp7rr1y5csnGs+aY+dbLmTNnyuaJJ56QzVdffeWa6Y9KTk6WTUJCgmweeeQR1/t51rZatWrJxnMe7NKli2smz/MMz718Zp4rmzVrJhvPs7OJEyfKpm3btq6ZPPd8zz33nGzGjh0rG+/1sufe2NN4nsHcddddrpnuv/9+2QwbNkw2ns9U0aJFXTPt37/f1Sn8ZQkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIaqHe8MUXX5TN+PHjZfPwww+73u+VV16RTVpammy2bNkimwULFnhGst69e8umRo0asklNTZVNy5YtXTNdvHhRNgsXLpTN2rVrZRMIBFwzPf3007L5/vvvZVOtWjXZlClTxjXTsWPHZBMTEyObI0eOyGbJkiWumSpVqiSb2rVry6ZChQqymTZtmmumJk2auLqMDB48WDb9+vWTTalSpVzvN3bsWNmcP39eNqtWrZLNtWvXHBOZTZ48WTb79+93vZYSFxfn6rJlyyabzZs3y+b++++XzQsvvOCaqX379rLxrAOTJk2STZUqVVwz5cqVSzYrV66UTYECBWTjXb/Kly8vmw8++EA2nu9t1qxZrpnatm3r6m6nRIkSssmdO7dsPOcuM9/v//jx45ky0+zZs10zVa1aVTbFihWTTd++fWXjOb+ZmdWrV082xYsXl81nn30mG88xYGZ29epV2XjWic6dO8tm3759npHsueeek81TTz0lm+7du8vGe908Y8YM2ezcuVM2hw4dks3hw4c9I1n16tVdXUYiIyNl4znneNesNWvWyOa3336Tzd133y2bGzduuGY6efJkpjQeSUlJrq5FixayqVu3rmw8946nT592zeRZ55YvXy6bAwcOyKZcuXKumebPny+bN998UzaPPfaYbD7++GPXTJ57x1q1asnGc22dnJzsGcl1rlOuX78uG8/9p+d1zMwaNGggm6NHj8pm3LhxshkwYIBrJs9nwLN+b9u2TTYbN270jGTNmjWTjec8f/bsWdk8+uijrpl27Nghm++++042sbGxsvFe63iuvzt16iSbH374QTbvvvuuaybPPf2TTz4pm2+//VY28fHxrpmKFCni6m4nf/78svGc37z38REREbLx3H97rik8x7WZWf369WWze/du2XieCZw6dco1k+ce1XNv7Tl/z5071zVTo0aNZONZv0uWLCmb9PR0z0iu+1nPOT5r1qyZNlN4eLhsPOfLL7/8UjYhISGumTzXnx78ZQkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgFhIIBAKe8KOPPpLNpUuXMqUxM2vQoIFsjhw5IpusWbPKpkmTJq6ZPDw/zsmTJ8vmvvvuc73f3r17ZVO+fHnZ/Pzzz7IpWLCga6Y+ffrI5vr167JZunSpbGrXru2aafv27bIpXbq0bObMmSMbz7FrZjZv3jzZ3H///bLZuHGjbHLnzu2a6a677pJNx44dXa+VEc/MnuPWa8+ePbLZvHmzbFasWOF6v/Hjx8umTZs2sqlatapsPJ8lM7OyZcvKJk+ePLLp1q2bbAYNGuQZyQoUKCCb1NRU2QwcOFA248aNc830xhtvyGb58uWyuXnzpmzOnz/vGcny5s0rm0KFCsnGc/yGh4d7RnL9PFevXn3brx09elT+e886sWXLFtmYmaWkpMjGc9794osvZOP5rJmZzZw5UzZ9+/aVzYkTJ2QzYMAAz0i2b98+2Rw+fFg2oaGhsvn+++89I9nDDz8sm5YtW8pm9uzZsvFcM5qZHTx4UDaetdKzTkyYMME1k+f8PXjwYNlMmzZNNgsWLHDN5DnuYmNjM/z6smXL5Gv8+uuvsunQoYNszHzHwHvvvSebwoULy+att97yjGTDhw+XzenTpzPlddauXeuaKTo6WjajRo2SzZkzZ2TzyiuvuGaaMWOGbCIiImTTq1cv2Xi+fzOzOnXqyMZzX7hr1y7ZFCtWzDXTsWPHZDN37lzZ9OzZUzae6xczsx07dsjm8ccfz/DrnvPlU089JZv169fLxstzvvSs3V26dHG9n+feuVKlSrLx3BN5nwt4zpcHDhyQTY8ePWQTGRnpmsmzNpUqVUo2jRs3lo3nmDMze/vtt2XjuWbwPIu6ceOGa6Z8+fLJpmbNmrLx3Fvlz5/fM5Jrfc5opnfeeUf++6ioKNl4rpfNzJKTk2XTu3dv2eTKlUs2ns+RmdnOnTtl47knWrVqlWy8z40mTpwom65du8pm/vz5svn6669dMz355JOy8ZybPfcoV69edc3kOVcOGzZMNgsXLpSN51m7mVlSUpJsOnXqJJvp06fLZuvWrZ6RrHLlyrLx7G/wlyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCWqg3XLdunWzi4+NlExIS4nq//PnzyyZnzpyyyZo1q2y+/fZb10xNmjSRTSAQkE3FihVlc/78ec9IdvjwYdmUK1dONrdu3ZJNZGSkZyR7+eWXZfPMM8/I5uLFi7JJSEhwzVSpUiXZvPfee7IpVaqUbK5cueKaqVChQrL54osvZFO/fn3ZFChQwDWT5+ekvPHGG7KpXr26bG7evOl6v/nz58smLCxMNj169JBNeHi4a6bRo0fLpl+/fpnyfnny5HHNlCtXLtlER0fLZsaMGbJ5+umnXTOdO3dONtOmTZPN999/L5v+/fu7ZnrzzTdlkzdvXtl4fneJiYmekVxrhecc/eyzz8pm69atrpnq1Knj6m5n7969spk5c6ZsvJ/JcePGycZzjJw5c0Y29913n2umBx98UDY7d+6Ujec65/fff3fNdOrUKdlcvnxZNunp6bJp3769aybPa33zzTeySU5Olo1nXTYzCw3Vl89Xr16VjednmS1bNtdMjRo1ks2GDRtkExERIRvv+dl7zZ8Rzznn+eefl82JEydc7zdhwgTZvPXWW7JZv369bKZMmeIZyeLi4mSTlJQkmyNHjsimWLFinpFcvxfPsdupUyfZeD4nZmb33nuvbAoXLiybZcuWyaZo0aKekWz48OGy8RwHu3btko3n2DUz++2332RToUIF2UycOFE2nmcDZmYdOnRwdRnp0qWLbFJTU2XjOUa8PGugZz1NSUlxvV/z5s1lc/36ddl4rk8951Qzs3bt2smmbNmysklLS5PNBx984Jqpc+fOsilRooRsjh07Jptq1ap5RrKlS5fKxnOML168WDbNmjVzzeT5vXjOhydPnpSN5zmbmdnp06dd3e3ExsbKZvv27bJp3Lix6/08z2COHz+eKY3neZ6Z2bVr12Tjuc71XFN6rwOHDBkiG8/zhYYNG8pm8ODBrpk8a45nnShdurRsNm7c6JrJc62/YsUK2axcuVI2ffv29Yxkc+bMkc2oUaNk43nekTt3btdMRYoUcXUKf1kCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqIV6w+LFi8tmz549sqlYsaLr/UaNGiWbq1evymbixImyWbRokWumMWPGyOaxxx6TTZ48eWTTsmVL10yHDx+WzbJly2STJYveN8uXL59jIrMSJUrI5tChQ7KZP3++bF5++WXXTGlpabLp1auXbOrUqSObb775xjVT27ZtZbNt2zbZZM+eXTbXr193zeT5mZctWzbDr9+6dUu+xsmTJ2WzatUq2ZiZNWrUSDY5c+aUjecYSU9Pd820c+dO2dStW1c2ixcvls0zzzzjmmns2LGyqVWrlmxq1qwpm127djkmMtu4caNs+vTpI5s1a9Zkynt532/Hjh2y2bdvn2xy587tmunKlSuyiY6Olo3nM+U9R+fPn9/V3c6cOXNkExMTI5sCBQq43u/tt9+Wjefz1rNnT9ns3r3bNdP69etlc/r0adlcunRJNo8//rhrpu3bt8smNFRfOubKlUs2FSpUcM2UlJSUKTM1bdpUNt9//71rphdeeEE2U6dOlY3nWq9UqVKumfLmzSsbz7G5ZcsW2XjOu973a968eYZfDwQC8jU8n4Gnn35aNmZmPXr0kI3n3sKzvpcrV841k+f781xT7N+/XzYdOnRwzeS55wsLC5ONZ+4XX3zRNVNkZKRsBg0aJJsff/xRNg8++KBnJKtUqZJsihQpIpuoqCjZrFixwjWT5/x97tw52Xjud6ZPn+4Zyb777jvZvPbaaxl+PSUlRb6G517Wex20ZMkS2Vy+fFk2W7dulU1iYqJrJs/9XkJCgmyee+452XivYY8fPy6bAwcOyObee+/NtJmKFSsmG8+9hef5mLpv/gfP8fvBBx/IZvDgwbKpV6+ea6a33npLNp6f5UsvvSSbH374wTOS67PQpk2b235txowZ8t8fPXpUNrVr15aNmVl4eLhsatSoIZtx48bJxnM8mpm1a9dONh9++KFs4uPjZXP27FnXTF26dJFNixYtZONZcz2/EzPfPWZqaqpssmXLJhvPfbyZ2fLly2XjeW7geeZz7Ngx10ye4+7UqVOyuXDhgmy81/Lnz593dQp/WQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCWqg3fPnll2WzcuVK2bz++uuu94uNjZXNPffcI5v169fLZu/eva6ZXnvtNdkUK1ZMNt99951s/va3v7lmeuCBB2Rz/fp12eTNm1c2a9as8YxkycnJsmnVqpVsPD+DhIQE10zR0dGyOXXqlGzmz58vm7CwMNdMjz/+uGy++eYb2Rw4cEA23t9dvnz5XF1Gjh07Jpu+fftm2iyeY/fatWuySUtLk83WrVs9I1mOHDlkc+vWLdk0atRINjt37nTN9OCDD8qmdOnSsvEctzVr1vSMZA899JBsRowYIZtq1arJpkiRIp6RrGjRorJ5++23ZdO5c2fZxMfHe0aykydPyiY1NVU22bNnl8327dtdM3mOlYw8/PDDsjl69KhsvOeAqKgo2fTq1Us2EydOlE16erprJs+5wvP7aNmypWw837+ZWaVKlWTTvHlz2ZQrV042L7zwgmsmz8/zwoULsmnYsKFsnnrqKddMx48fl81dd90lmxkzZsjGcz1s5juePNeDPXv2lM2hQ4dcM2WGDh06yMZzHvz0009d7+dZU86cOSMbz3FbpUoV10yzZ8+WTSAQkM2SJUtkc/bsWddMBQsWlE3x4sVls2HDBtkMHjzYNdPFixdlc/PmTdl07dpVNp7rADOz/v37yyZ37tyy8dyjlC9f3jXT6dOnZXPu3DnZPPHEE7LxnC/MzAoXLuzqMuK5//Bce3uOIzPfZ8Vz7m3fvr1sEhMTXTNduXJFNvXr15fNtm3bZHPp0iXXTN27d5eNZ931rPONGzd2zeR5XrNixQrZeNZmr+rVq8umVq1asvGcV1588UXXTAMHDpTNyJEjZTN27FjZhIeHe0b6w2uFZ731rJGeZwtmZmXLlpXNqFGjZOO5rvZ8/s1813Ce6+EsWfT/7j4mJsY1k+c+1nOMeO6bvNf6v/32m2w854GFCxfKxnuu9Bx3NWrUkI3nesFzTjXzXZ94js3Lly/LpkCBAq6ZFi9eLJt7771XNvxlCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKAW6g2/+OIL2ezcuVM2LVq0cL3fo48+KpsFCxbIpnz58rJ59tlnXTNlzZpVNqtWrZJNzpw5ZeP9ORUsWFA2q1evls3AgQNlc+HCBddMS5culU1CQoJsunfvLpt3333XNVPr1q1lU7NmTdnMnz9fNiEhIZ6RXDN5fi9ffvmlbPbu3euaqUuXLq4uIx999JFsXn75ZdnExcW53m///v2yqV27tmyyZ88uG88xaWZ29OhR2ezZs0c2OXLkkE2VKlVcM5UrV042ycnJssmTJ0+mvI6Z79jt1auXbDw/S8+6a2b2yy+/yOaee+6RzaFDh2TjOaeYmX3++eeyefPNN2XjOS5v3LjhmqlUqVKu7nY854mYmBjZbNu2zfV+HTt2lM0jjzwim4oVK8rm7Nmzrpny5s0rG8/1ief9jh8/7hnJIiIiZOO57vjuu+9c7+dRoEAB2YwaNUo2ly9fls26detcM3nWgBo1asjGcy4sVqyYa6aNGzfKxnMe8KyV1atXd8104MABV5eRyMhI2XiuTydPnvyHZ/mH9u3by2bZsmWy8fzOzMyyZcsmm7lz58omNFTf9jVp0sQ1U7t27WSzfv162cyaNUs2Tz75pGumgwcPyub8+fOy2b59u2xGjBjhmMhsyZIlsvFcV7Vs2VI2nmPOzKxZs2ayOXbsmGyioqJk4732atu2ravLiGcd8KxdnmceZmavvvqqbFJTU2XjOUbq1avnmunSpUuy8ZzDK1SoIBvvdUXJkiVlc+rUKdncfffdmfJeZmYffPCBbDzX6IMGDZLNmTNnPCO5Piue48lzXD7xxBOumTzXcpUrV5aNZ01t06aNa6ZatWq5utvxXOd6nglVq1bN9X7Dhg2TTb9+/WQzZswY2XjXCc+5wvM7a9q0qWyuX7/uGcmio6Nlc+TIEdl47h2952/PPbHnePI8q/I+E/B0p0+flo1nzR08eLBrpvHjx8vG83vxXMN57p3N/PcpCn9ZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKiFBAKBgCfcvXu3bBYtWiSb/Pnze97O1q9fL5v4+HjZJCUlySZbtmyumY4dO5YpM504cUI2YWFhrpmWLFkimwMHDsjm/vvvl822bdtcM+3fv1821atXl03NmjVlkytXLs9ItmPHDtmUKVMmU17n9OnTrpkqVKggm0uXLsnm1KlTsklJSXHNVKRIEdn89a9/zfDrw4cPl6/Rv39/2WzYsEE2ZmYvvPCCbH744QfXayl9+/Z1dT169JCNZ63YvHmzbDyfbzOzOnXqyObMmTOyadasmev9PLp16yabwYMHy2bFihWyyZEjh2umAgUKyCZv3ryy8fwsCxcu7BnJ9Tu+du2abJ5++mnZTJkyxTVT+fLlZdOrV6/bfm3evHny30dFRcnG8xkxMytYsKBsVq5cKZuNGzfKpkaNGq6Zbt68KZssWfT/pqVevXqy8XxvZmYDBw6UzbBhw2STO3du2TRu3Ng1U+nSpWXj+VkWKlRINufPn/eMZIcPH5bNoUOHZOO5XvKs3Wa+c+bzzz8vG881qudnaeZbu7p3757h1//2t7/J1/B85jzXb2ZmTZo0kc0nn3wim5YtW8qmatWqrpnq168vm88++0w2jz32mGymT5/uGcm17ixdulQ2R48elU1sbKxrJs+5OTw8XDajR4+WTdOmTT0jWeXKlWWTM2dO2Tz88MOy8fxOzMzmz58vG8/vxXNNcdddd7lm8vw81fXn2rVr5WtMmDBBNpcvX5aNme98kdF10D/MmTNHNhcuXPCMZLVr15ZNgwYNZHP16lXZeH7eZr7r75iYGNlUrFhRNtHR0a6ZZs+eLZsqVarIZvv27bJJT093zZSQkCCbunXryqZs2bKy8fx+zXz3RJs2bZKN5zmE5zrOzCwuLk427du3v+3XfvzxR/nvp06dKpvU1FTZmPnuZTzrpOc+Jk+ePK6ZWrduLZu0tDTZeK47O3Xq5JppwYIFsvGszc2bN5eN5/mwmdm0adNkc+PGDdl4njV7f06eewvPucnzzG/16tWekSxfvnyy8Zx3Dh48KJuIiAjXTOfOnZPNoEGDZMNflgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqod5w9uzZssmRI4ds8ubN63q/MmXKyOann36STadOnWRz7tw510whISGyOXjwoGx+/vln2eTLl88105NPPimbAwcOyObIkSOy6du3r2ckGzVqlGzOnj0rm9BQfXgWKlTINdPChQsz5f2io6NlU6VKFddMq1evlo3n++vQoYNsLl++7Jpp7969ri4jTZo0kc2+fftkU61aNdf79e/fXzYbN26Uzc2bN2UzYsQIz0g2duxY2fTo0UM2ns9l5cqVPSPZrl27ZOM5TkqUKCGb48ePu2bq3bt3pryf53NSsGBB10zr16+XTVhYmGwuXLggm1KlSrlmuueee2TjObd6zk8tWrTwjGQnT550dbcTCARkM378eNn07NnT9X6e771Lly6yqV27tmwSEhJcM3l+Z55zTtasWWWTkpLiGcmSkpJk41lzTp8+LRvPdZ6Z2Zdffikbz8+yY8eOspkyZYpnJIuIiJBNfHy8bDzr0rJlyzwjWXp6umw8180vvPCCbEaPHu2aybNOdO/ePcOvly5dWr6G5zo3e/bssjEz27p1q2waNGggmxo1asgmKirKNdOPP/4oG89nbt26dbLxXFOZmY0cOVI2nnW+fv36srl69aprJs/9x1//+lfZNG3aVDaeewYzs+vXr8vGcxw88cQTsjl16pRrplatWsnm4sWLsvGcV0qWLOkZyfV7Wbt2bYZfnz9/vnyNYsWKyebYsWOyMfMd355zqucaz3PfbGYWExMjm++//142nnt+z/W5t8uZM6dsPHN7rtHMzLp27SqbV155RTZFihSRzY0bN1wzedZez73qzp07ZVO1alXXTJ5za3h4uGxy584tm2vXrrlm8p43byd//vyyefnll2Xzyy+/uN7Pc43umcnzbHDHjh2umTxrnOcc4Dk3e59TeJ6Ptm7dWjaLFy+Wjfc+zXNsez5vtWrVko33d7dnzx7ZeM4pnmPAc19s5ntG7nku4rm38j4f9Dz38uAvSwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQC/WGCQkJsvmP//iPPzTM/1tERIRsVq5cKZvSpUvL5ueff3bNdNddd8kmNFT/SOPj42Uza9Ys10z58uWTjef7K1++vGw+//xz10yPPfaYbN555x3ZbN++XTaJiYmumXr16iWbgwcPyiYyMlI2qamprpl+//132VSvXl021apVk82ePXtcM61Zs0Y2/fv3z/Dr27Ztk6/h+TlWrFhRNmZmS5culc1zzz0nm2vXrsnG8/k2Mxs+fLhsxo8fL5sWLVrI5vjx466Z7r77btl41pOvvvpKNsuWLfOMZGPGjJFNzpw5ZZOUlCSbunXrumaqVKmSbDZs2CCbmjVrysbzvZmZJScny8bzubvvvvtk8+WXX7pmevLJJ13d7cyfP182L730kmzmzp3rer/du3fLpkqVKrIpUaKEbFatWuWaKS4uTjbZsmWTzaeffiobz/Fo5rsWSElJkc369etlU7BgQddM3bp1k03Dhg1l47lGrVevnmumb7/9VjaBQCBTmvr167tm8qxxCxculM2wYcNk4zmHm5nlzZvX1WVk9OjRsvnwww9ls2XLFtf7Xbp0STZ58uSRzdatW2Vz5coV10y//PKLbJ5//nnZeNbLEydOuGbyXFfu3LlTNp5zgeccaOY7D8bGxsrmt99+k82RI0dcM3nWy7S0NNl4rlFv3LjhmsmzVniudz0zee51zMzef/99V5eRLFn0/wbUc0/sOeeYmS1ZskQ2J0+elE25cuVkU6BAAddMc+bMkc0zzzwjG8917qhRo1wzvf7667LxfMY7duwoG8+1npnZ119/LZs6derI5sKFC7Lp06ePa6Zjx47JxnO/41l3161b55qpVatWsvFcx4SFhWXaTCEhIbLJ6JrQc33avXt32cTExMjGzOzcuXOyadSokWw89xbR0dGumcqUKSMbz7HtuaaYOnWqaybP81HPMy/PutSkSRPXTIsXL5ZNs2bNZON5Nuh9fuh5Rj5u3LhMeR3PemPmu9596qmnZOO5JvY89zPzraee58P8ZQkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgFhIIBAKe8KeffpLN6dOnZVOzZk3P29ncuXNl895778nm1VdflU3WrFldM0VHR8tm3759sundu7dsQkNDXTN5fubXr1+XTaVKlWRz6dIl10zZs2eXzbZt22Tj+XlHRES4Zjp48KBsZsyYIZtWrVrJpnHjxq6ZPLNPnTpVNnny5JFN/vz5XTN5jqfHHnssw6/fvHlTvobnZ71z507ZmJn16dNHNkOHDpXNww8/LBvncmk5cuSQzapVq2STmJgom2zZsrlmeuCBB2TjOSY9x8iWLVs8I1lsbKxsvv76a9lERUXJxvP7NTMbPXq0bBo2bCgbz2fu2rVrrpk8n/Fp06bJpl27drIZP368a6b69evL5qmnnrrt11588UX57+vWrSubqlWrysbM7Pz587KZOXOmbJo2bSqbtLQ0z0iuz+7Ro0czpYmMjHTN1LNnT9ns2bNHNp5rr4EDB7pmGjNmjGyGDBkim3Xr1smmQIECrpk813qeNaBkyZKy8Vy/mJmlp6fL5rPPPpNN6dKlZeM5p5qZPffcc7JR14MrVqyQr7FhwwbZnDhxQjZmvuMyJSVFNlu3bpVNcnKyaybPujNr1izZfPjhh7KZP3++aybPuuNZd7dv3y6b48ePe0bK8JzzDxcuXJDNpk2bZOP9OeXLl082ns/Td999J5tcuXK5ZgoPD5dNTEyMbCpUqCCbNWvWuGaKi4uTTevWrV2vlZFDhw7Jpl+/fq7XatasmWyuXLkim1KlSmXK65j57vnvvfde2Xg+u7/88otnJKtdu7ZsDhw4IJsqVarIxvPZNfNdE3nu55OSkjKlMTObOHGibHr16iWb4sWLy+bWrVuumXbs2CGb1NRU2XjW3U6dOrlm8lwTZPSM0PP5LlGihGw2b94sGzOz3Llzy6ZJkyay8Vwveq8pMut6uF69erJ5//33XTN16dJFNnv37pWNZ27P/Z6Z73rYc14KCwuTzfLly10zeZ55eO4vQ0JCZON97uU5Nj3nMM/52fM7MfM9Q/A81+UvSwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAS1UG945MgR2Zw7d042CxcudL1fhw4dZDN37lzZxMTEyCYsLMw106xZs2RTtGhR2Rw4cEA2VatWdc20Zs0a2dSsWVM2N2/elM3KlSs9I7l+ToMHD5ZNVFSUbM6ePeuaKTw8XDahofrj4DmeRo8e7ZrJ4/r167K5du2abPbs2eN6v6FDh7q6jHiOyfr168umdevWrvfbvn27bF577TXZbNiwQTalS5d2zXT48GHZnD9/Xjae47t79+6OiXxruGdt8vwM7r77btdML774omzuu+8+2Vy+fFk2K1ascM2UPXt22TRt2lQ2BQoUkM3kyZNdMy1dulQ23bp1k41nHfD8TszM8uTJ4+pup2vXrrLxfCZnzJjher/atWvLJj09XTaez+3Vq1c9I7l+HxUqVJDNTz/9JJtvvvnGNdOWLVtks3fvXtl4PiOrV6/2jGT333+/bM6cOSMbz7nJc5yYmcXHx2fKTGlpabJZtWqVa6YPP/xQNiVLlpRNSEiIbE6ePOkZyb7++mtXlxHPebB69eqy8dxXmPmOb8+a3KBBA9kkJye7Zjp69KhsPOfKcuXKyeb33393zdSyZUvZrF+/Xjaec0GRIkVcM23cuFE2nvuBQCAgG891rJlZixYtZOO5B3viiSdks3jxYtdM+fLlk83x48dlM2LECNk8+uijnpEsNTXV1WXE87m8dOmSbNq3b+96P89x6bmXmThxomxq1KjhmslzzbBo0SLZeJ4dPPDAA56RLCkpSTbZsmWTTUpKimzKlCnjmqlEiRKy2blzp2w8x1OPHj1cM504cUI2nmdtnmcHV65ccc3Uq1cv2XzyySey6dOnj2w2b97smsmzNmWkefPmsklISJDNX/7yF9f7ea7zPPcDns+I917Xcy3ouYb1HGsDBgzwjGRVqlSRjec6znMff+HCBddMnvV0//79ssms+xgz3zVq3759ZeO53/Nc55mZ/fbbb7LJkSOHbFq1aiUbz9xmvnsnz/vxlyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCWqg3PHnypGxu3bolmwoVKrjeL1u2bLKJiYmRTXJysmwSEhJcM73wwguy+fvf/y6b06dPy2bNmjWumY4fPy6bc+fOyaZp06ayqVGjhmumRYsWyWbq1KmyKVOmjGxSUlJcM0VGRsqmXLlyspk4caJsWrZs6ZqpZMmSstm0aZNsQkJCZOP93S1YsEA2sbGxf/g1ihYtKpuKFSvKxszs999/l83MmTNlU6pUKdnkypXLNVOlSpVkU7t2bdkcPXpUNqtWrXLN1L17d9l4jqUdO3bIpmbNmp6RrEWLFrJp2LChbObNmyeb3Llzu2bKkyePbJKSkmTz448/yiZv3ryekeyxxx6TzcaNG2Xj+UxduXLFNdP27dtl07Nnz9t+bcWKFfLfe9bI5s2by8bMbMqUKbJZu3Ztpryf57xs5jvvHjlyRDaea5hJkya5Zjpz5oxsqlSpIhvP3F27dnXNtHLlStlcvXpVNpl1jjczu3z5smw81wLFihVzvZ/H2LFjZdOkSRPZeK6/s2fP7hnJihQp4uoyEh0dLZtdu3bJxnOPYub7XHrWyTp16sjGc49iZvbFF1/IplWrVrIpUKCAbFJTU10zedZwz3rieT/v57JWrVqy8XzmQkP17bFnHTTzXS94Pk8ff/yxbMaMGeOayfN58Zx/77vvPtmUKFHCM5J99913sunYsWOGXz927Jh8jcTERNl4rjvNzLJk0f+bU881s+dzsmTJEtdMgwYNcnXK8uXLZeO51zEz27t3r2x69eolmw0bNsjGe5/2+eefy+arr76SzfXr12WzZcsWz0iuc0bVqlVlc/PmTdncuHHDNdP3338vG8850/Ns4MEHH3TNdPfdd7u62/E8p/I8E/A+P/Tc66Wnp8vGc53r+d2bmYWFhcnGsw4ePHhQNgcOHHDNFB4eLpvJkyfLxvPzfuqppzwj2WuvvSYbz3XH+++/LxvvuXLgwIGy8ZzjPcec95rC8zOoXr26bD799FPZPPPMM5k2kwd/WQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCWqg37Natm2wiIiJk89VXX7ne7+LFi7L5+uuvZVO9enXZhIWFuWb69ddfZeP5GeTLl082a9eudc10zz33yCYqKko2gwYNks3gwYM9I9lPP/0km8aNG8smJiZGNikpKa6Z8ubNK5vmzZvLJjk5WTa///67ZySrWbOmbFJTU2XTpk0b2Rw5csQzks2dO9fVZWTAgAGyGT58uGy8n4H33ntPNgsXLpRNlSpVZLN3717XTPv27ZNNIBCQzfHjx2Xj/QyMGzdONmPGjJFNp06dZLNjxw7XTFmy6P36LVu2yObUqVOyadasmWckS0pKks3Nmzdl41lPXnvtNddMkZGRslm9erVsSpcuLZusWbO6ZmratKmrux3Psb1o0SLZVKxY0fV+nvNg9+7dZbNq1SrZXLlyxTVTqVKlZLN//37ZLFiwQDb169d3zbRu3TrZrFy5MlPer3Dhwq6ZunbtKpvly5fLpmTJkrLxnne+++472Wzbtk02nrUyf/78rpk839+0adNk4zl/ff75556RLDTUfZtxW55rpYSEBNnkyZPH9X4ffvihbK5duyabhg0byiZHjhyumR555BHZnD59Wjae363nfs/M7PXXX5dNgwYNZOM5FxQvXtw1U+/evWWzZMkS2XTs2FE2Fy5ccM0UHh4uG8913F//+lfZzJo1yzVToUKFZFOjRg3ZHDx4UDaetdnMrFWrVq4uI57fSZMmTWSze/du1/uFhITIpkOHDrIZOXKkbDzrkpnZxIkTZeM53jZt2iSb+fPnu2Z64403ZONZmzzXcStWrHDN9Mwzz8hm2bJlsrnrrrtks3PnTs9I1rNnT9l4rlE8nwPvuSc+Pl423377rWyGDBkim7Fjx3pGspw5c8rmxRdfvO3XqlatKv+95/lhWlqabMzMypYtK5tPPvlENuXLl5eN5xmjme/54cyZM2Xj+dx6vn8z331sXFycbDzXjFOmTHFM5HtOcffdd8smMTFRNt5rrx9//FE2uXLlko3nOZTnWsHMdxx4rr08r+P5WZplzv2HGX9ZAgAAAAAAAAAAghybJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKiFBAKBgCccPHiwbB555BHZrFq1yvN2VqFCBdns2bNHNhcuXJDNzz//7JopPj5eNiVKlJBNdHS0bBYvXuyaKU+ePLJp3bq1bFJSUmSzcuVK10zLly+Xzd///nfZzJ49WzYdOnRwzTR+/HjZNGrUSDYbNmyQTZcuXVwzeY7fyMhI2SQmJsomW7ZsrpnS0tJk89JLL2X49aFDh8rXqFWrlmxy5colGzOzc+fOyWb69OmyeeCBB2Rz+fJlz0hWvnx52ezcuVM2ffr0kc1rr73mmqlq1aqy2b9/v2w86+C+fftcM3k0bdpUNm3btpWN93NZqlQp2Xg+c8nJybLp1KmTa6a9e/fKpl27drJZt26dbDzfv5nZpUuXZNO+ffvbfm3KlCny3xcqVEg2o0ePlo2ZWVxcnGw8l0Oez23WrFk9I1n27Nll88ILL8hm3LhxsmnRooVrpvz588tm9+7dsvEca82bN3fNdPHiRdl4zpWedclzLWTm+5zUr19fNr///rtsPOcvM9/x++abb8pmzpw5sqlTp45rJs/14Ouvv57h1xcuXChfw/NzzJEjh2zMzKpUqSKbX375RTYRERGyefXVV10zeY7d9PR02XjWXe/65bnf8fwsDx06JJuwsDDXTI8++qhsevXqJZvevXvL5uuvv3bN5LlP8Vxbrl69Wjb9+vVzzeT5XHrWXc/56bfffnPN5FnDW7ZsmeHXv/zyS/ka169fl41nDTTzHUsenpk8nyUz31rhWZtOnDghmzZt2rhm8pzDzp8/L5uCBQvKJiEhwTGR7x7T83xo7NixsvE+01HHt5fnXHfw4EHXa506dSpTGs9zvTNnzrhm8nwWMrq/6tq1q/z3nutq73rreT767bffyqZZs2ay8T7vqV27tmyOHj0qG881hed5l5nZoEGDZON5rut5JuC5rjQzK1mypGw8a254eLhsKlWq5BnJdb3reZ7nWQO9zwQmTJggm7p168qmcOHCsjly5IhrptOnT8tmxIgRsuEvSwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAS10Mx8saioKNlcunTJ9VrTpk2TTZkyZWQTHh4uG8/cZmYnTpyQzd69ezNlpq5du7pmKleunGwSExNlU7JkSdl06dLFM5KtXLlSNikpKbIpW7asbA4fPuwZyWJjY2XTsmVL2ZQuXVo27777rmum3Llzy+bFF1+UzYoVK2TjOS7NzGrVquXqMuL5/bdv3142x48fd73f5s2bZTNs2DDZeD4noaG+JdOzfrVu3Vo2Bw4ckM0bb7zhmql79+6yGTVqlOu1lJw5c7o6z8/Tczzdf//9sgkEAq6Z6tevL5ssWfT/zsAz0/r1610z9ezZUzaDBg2STZ06dWRTvHhxz0h25coVV3c7c+fOlU3jxo1lky1bNtf7eX7/lStXlk3z5s1ls3r1atdM27dvl02nTp1k07BhQ9l88cUXrpnKly8vm61bt8rGczx6rwdr164tG8/a5fndxcTEuGaaNGmSbJKTk2XjOQY813lmvnPhRx99JJtChQrJxvP5NTNr0KCBq8uI5/Pk+Zy89957rvdbtWqVbDzXb88//7xsFixY4JqpevXqsvnxxx9ls2XLFtk0bdrUM5LVq1dPNseOHZON53f366+/umaaOXOmbPbt2yeb/fv3y+bmzZuumTzXsp07d5bN2rVrZeO5bzIzy5o1q2wSEhJk8/rrr8vmiSeecM302WefyUbdp3nurytVqiSbGzduyMbMbM2aNbKpVq2abDy/tzFjxrhm6tChg2wKFiwoG8/5JCwszDWT5z51wIABssmePbtsLl686JrJs4Z/9dVXsvF8Lr2fAc+5x/PZTUpKks3ixYtdMzVp0kQ2Tz31lGy+++472dxzzz2umTZt2uTqbuexxx6Tzffffy+bVq1aud5v2bJlsnnmmWdkM3bsWNmkpaU5JjJr06aNbEaOHCmbPn36yMZ7v7hnzx7ZeH53ntfxfI68rxUfHy8bz+ft999/d83kuW/wPPPxPP/2HLtmvuPu8uXLsvFcd5QqVco1k/eZncJflgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqod7wvvvuk83w4cNlc+LECdf71apVSzZXr16VTUREhGy6dOnimmnFihWyyZUrl2zq1q0rm4ULF7pmOnPmjGyyZs0qm127dsnmwIEDrpneffdd2YwcOVI2e/bskc3o0aNdM/Xp00c2Q4YMkU2ePHlk07dvX89INn/+fNmsXbtWNp5j7u2333bNlJSU5Ooy0rRpU9l4PksXLlxwvV/Dhg1ls3r1atmULFlSNmlpaZ6RrFevXrL5+uuvZdOgQQPZDB482DVTaKhe7mvUqCEbz5p69uxZ10xdu3aVze7du2XjWSsKFSrkmikqKko2y5cvz5T385znzMwOHjwom7vuuks2nuPpsccec830+eefu7rb8ayT06dPl03FihVd7zd16lTZDBs2TDbvvPOObDp16uSa6dy5c7LxfEYOHz4smw4dOnhGcn2WYmNjZbN06VLZZMni+9/r5MiRQzbZs2eXjeecUqZMGddMnvfLnTu3bMqVKyebevXquWYKCQmRzbp162Rz7do12XjWQDPfeadz584Zft1zDfvxxx/LplWrVrIx860pnp9j27ZtZfPss8+6Ztq0aZNsGjduLJt77rlHNp5zvJnvGqZ+/fqyefDBB2XzwQcfuGbyrF9FihSRzZEjR2QTExPjmik6Olo2Y8eOlY3n9/vbb795RnLd88XHx8vGc028aNEi10z58+d3dRm5cuWKbLZv3y6blJQU1/s9/vjjsvH8bhcvXiwbz72VmdmGDRtk4/m9ValSRTbeZzqea0/P9ZDnOjdfvnyumSpVqiQbzz1xIBCQjed7877frVu3ZFOsWDHZDBw40DXTyZMnZbN582bZVK5cOVMaM7MvvvhCNk8++eRtvzZlyhT57z3nCc+1ife1evfuLRvP76xq1aqumTzXMJ7fR4ECBWTjOS+bmTVv3lw2nmceAwYMkM3ly5ddM+3fv182169fd72W4l27POvgjRs3ZOM5BsLCwlwzRUZGyubTTz+Vjef+0rPmmvnXE4W/LAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBLdQbnj9/XjZt2rTJlNcxM1u+fLlsGjRoIJvSpUvLZsuWLZ6RXK+1ePFi2dSqVUs2kZGRrplOnz4tmwoVKsgmJCRENp7vzcysZMmSspkwYYJsmjdvLpvixYt7RrJRo0bJpmzZsrI5c+aMbMLDw10z3bp1SzbHjh2TTc+ePWWzY8cO10wLFy6UTdeuXTP8er58+eRrNG3aVDbff/+9bMzMnnzySdl0795dNiVKlJBNjhw5XDPt27dPNhEREbIpWLCgbLZv3+6aqVixYpnyfsePH5eNZw0wM0tJSZHNZ599Jpvhw4fL5vr1666ZLly4IJvKlSvLZt26dbLxng9z5colm6NHj8omPT1dNkOGDHHNlJiY6OpuJ0sW/b/VyJ49u2yuXLnier8vv/xSNp6foWftatKkiWsmD8/35/k5edeJTp06yWbevHmyiYqKkk358uVdM3mOldq1a8vm559/ls0DDzzgmsmznrz22muy6dGjh2yKFCnimuny5cuyqVGjhmw8P+9hw4a5ZvKsXcobb7whmzfffFM2nnOumVlycrJsDhw4IJtVq1bJxnNtauY7f+/evVs2V69elY33/uOhhx6STbZs2WTjuR+YP3++aybPOc5zz+dZB3r16uUZyb799lvZeK5Rp02bJpuiRYu6ZqpUqZJsLl26JBvP/aznuDQzi42NdXUZqVixomw8n906deq43s9zfvbc6z366KOymTlzpmumkydPyqZw4cKyyZ8/v2ySkpJcMzVs2FA2nrViwYIFsomPj3fN5PndZc2aVTZlypSRzdmzZ10zRUdHyyZPnjyymTx5smyqV6/umslzzoiJiXG9ljJmzBhX5z2X347nucHEiRNl4znfmPmew7300kuy8dyj58yZ0zVTamqqbDzPPT2v410nJk2aJBvPeupZ472/u0AgIBvP/YDn87ZixQrXTHFxcbLxnAc8683s2bM9I1nNmjVl47lXvffee2Vz8+ZNz0iu2Xv37i0b/rIEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUAv1hlmy6H2V9evXyyYmJsb1fvfff79s1q1bJ5utW7fKZsOGDa6ZChYsKJuxY8fKZufOna7388iXL59spk+fLpvevXvL5oknnvCMZJ9++qls3nrrLdmcP39eNo888ohjIrOTJ0/KZsaMGbLxHJeTJk1yzVS8eHHZXLhwQTZr166VTUJCgmsmzzGuREdHy+Zvf/ubbN59913X+3nWlNjYWNlERkbKZt68ea6Z+vfvLxvP3KmpqbIpVqyYa6Zx48bJpmzZsrJp0KCBbLzH208//SSbwYMHu15L8R7bnvfz/Jw86/zQoUNdM82fP182N2/elM3SpUtlkzt3btdM3nP57TRt2lQ2ns+k91g7ceKEbJYsWSKbW7duyWbbtm2umTzXMPXr15fNww8/LJtZs2a5ZpoyZYpsWrRoIRvPeeno0aOumTzXOWlpabKpWrWqbH7//XfXTDly5JDNRx99JJsVK1bI5tdff3XN1Lp1a9l4rtMbNWokm2zZsrlmun79uqvLyNdffy0bz7XStGnTXO83ZMgQ2aSnp8vmyy+/lI1n3TYzq1y5smw8a7dnzSlatKhrJs/v1vP9ff7557KJiIhwzVSrVi3ZTJ48WTZPP/20bDzXQmZmefPmlc0PP/wgmzNnzsimXr16npFc63z37t1l47neDw31PWooUaKEq8tIIBCQjedz4jmfmJlNnDhRNr/99ptsmjdvLpv8+fO7ZnrppZdkk5SUJJusWbPKxvNZMvMduxUqVJDNs88+K5tXX33VNVOVKlVk47lvaNy4sWwWLlzommnBggWy8Vw7e67RZs+e7Zpp0KBBsvE8++rbt69svGvA1KlTXd3tjBgxQjae5579+vVzvd+OHTtk4znvfvLJJ7J5+eWXXTN5jsl27drJxnNv5bmPMfPdg3mu4wsUKJBpM3nuCTzr6fLly2UTHh7umqlIkSKy8Vyjf/zxx7LxXud4not4nkd7no0WLlzYM5K1bdvW1Sn8ZQkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgFhIIBAKesHv37rK5cuWKbL788kvP29mIESNk06ZNG9nkyZNHNlmy+PaMNm3aJJucOXPKJjo6Wjaen6WZ2eXLlzOlKVu2rGw835uZ2aFDh2QTEhIim507d8omKSnJNVOtWrVkc+3aNdlcvHhRNmvXrnXN1LZtW9ns27dPNq1atZJN9erVXTMlJibKplu3bhl+/eeff5avsXv3btmkpaXJxsx37JYsWVI28+fPl80zzzzjGcleeOEF2Xi+P8/r/PTTT66ZqlSpIpvevXvLZvr06bKpX7++ZyR79913ZeP57HosWbLE1X388ceymT17tmw8a3hUVJRrJs/szZo1k02LFi1kM27cONdM4eHhsnn99ddv+7X33ntP/vvjx4/L5v3335eNmdknn3wimwYNGshm8uTJsomJiXHN1K5dO9lkzZpVNp5rihIlSrhmevXVV2XTpEkT2WTLlk02ixcvds20Z88e2TRv3lw2qampmdKYmZ0+fVo2/fr1k8369etlU6xYMddMCQkJsrl586ZsPNdedevWdc1UqFAh2Tz88MMZft1zjvO8z9mzZ2VjZjZv3jzZ1K5dWzY1atSQjWc9MfOdTzzrZfv27WXj/Qx07NhRNnPnzpXNE088IZtJkya5ZgoLC5ONZ33esWOHbDz3Oma+n9PQoUNlU758edl47ivMzI4dOyabxo0by+bcuXOyee2111wzPffcc7K5//77M/x6jx495Gt4zqmffvqpbLydZ6YJEybIJj093TVThw4dZONZCz3X+o888ohnJNu1a5dsihQpIhvPZ/fSpUuumTz3YDNmzJDN/v37ZeO91u/cubNstm7dKhvPfZPnOs7M95wlJSVFNp61omXLlq6ZRo8eLZuM1p0VK1bIfx8bGyubd955RzZmZhUqVHB1yqpVq2TjuRY2M0tOTpZN6dKlZeN5TlW4cGHXTJ7r0xMnTshm7969svHcw5qZVatWTTalSpWSjeeYK1iwoGsmz7VHly5dZPPKK6/IxvuMadCgQbLxXMN41uV8+fJ5RnI9i4qLi5MNf1kCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqIV6w1GjRslmwoQJslm7dq3r/YYPHy6bd955RzaRkZGyOX/+vGcka9SokWy2bdsmm/vvv182K1ascM2UkpIim0KFCsmmSJEisvH8fs3MoqKiZLNv3z7ZFC9eXDZJSUmumbJk0fuCS5Yskc2DDz4oG88xZ2bWpEkT2TRv3lw2N2/elE3Xrl1dM61Zs8bVZSQ9PV02bdq0kY3n821mFh4eLpsKFSrI5umnn5bN6NGjXTPVrl1bNk2bNpXNTz/9JJtu3bq5Zvrmm29kc/bsWdnExMTI5vfff3fN9NFHH8nm9ddfl01sbGymvI6Z2bJly1yd4lmbGzRo4HqtM2fOyKZLly6yefHFF2Xz3HPPuWa6du2aq7ud3Llzy+aVV16RzY8//uh6v3Llysnmrbfekk2fPn1ks2vXLs9INnnyZNlcuXJFNp5rCs95wsysYsWKsvGc42bOnCmbHTt2uGZq166dbC5evCibypUryyYhIcE1088//yybHj16yMZzzXjgwAHXTOvXr5eN5/dy+fJl2XiP8ePHj7u6jBQoUEA2s2bNko3ns2RmVrRoUdmUKlVKNp410nPuMvOddz1rquderlevXq6Ztm7dKpsSJUrIZuTIkbKJi4vzjGQXLlyQTWZd6w8dOtQ1k+eeKDo6WjbXr1/PlPcy8933zp07N1Nep3v37o6JfL87pXXr1rLx/By917CnTp2Sjeec4lnjIiIiXDMVLlxYNtmyZZNNhw4dMuV1zMwaN24sG8/a5HkucNddd7lm8siXL59s6tevL5vSpUu73s9zX+h5fpKYmCib06dPu2ZKS0uTTd68eWXjuW7csmWLYyKzc+fOubrbWb58uWw89xbTp093vV/WrFll47mG9bzOiRMnXDMdPXrU1Sl169aVjffndPjwYdnkz59fNiVLlpSN91md515+xowZsnnggQdk47k+NzOrV6+ebH799VfZDBgwQDbHjh1zzeRZKwOBgGw8v1/PecDMLDk52dUp/GUJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGohgUAg4AknTpwom/Lly8tm5MiRnrezW7duyaZ48eKyiYiIkE379u1dM02dOlU2FStWlM3MmTNlU7NmTc9I1rBhQ9nMmDFDNnXr1pVNnTp1XDO99tprmfJ+nt+v5zgxM9u7d69scufOLZs2bdrI5tq1a66Z9u3bJ5tatWrJxvP79f7u9uzZI5u//OUvGX599erVmfI+zqXJ8ubNK5vIyEjZ7N+/Xza7d+/2jOR6v3LlyskmW7ZssvEct2Zmf/3rX2Xz4YcfysYzd2pqqmumpUuXyqZRo0ayWbhwoWyio6NdM50/f142BQoUkM2JEydk8+KLL3pGspdfflk2cXFxsilbtqzr/TzmzZsnm4yOuRdeeEH++6ioKNk0adJENmZmixcvlk2hQoVk47nOyZUrl2smz7ni0KFDsvEc2xs2bHDNlD17dtnkzJlTNjVq1JDNmjVrXDNdunRJNmfPnpWN5zzoOZ+YmZUqVUo2r7zyimxCQ0Nl079/f9dMW7dulY3nd+f5Wfbq1cs1008//SSbESNGZPj1FStWyNfwfO8pKSmyMfN95jy/N8954pFHHnHNNHjwYNk8/fTTstm1a5ds2rZt65rp66+/lo3nWPLcM5w8edI1U0JCgmwaN24sm4sXL8rGc/1iZnbkyBHZfPvtt7JJTEyUzaJFi1wzeY5xz8/AszZ99NFHrpk8n4XY2NgMv96lSxf5Gj169JDNgQMHZGNm9tBDD8nG8ztp166dbMaMGeOaqXXr1rLxXHuqddnMd79nZta5c2fZrFu3TjYXLlyQjedaz8zsvvvuk03lypVlk5SUJBvPz9LMrG/fvrI5duyYbAoWLCibTZs2eUZyHU+e9ztz5oxstm/f7pqpadOmssno3sBzP+y5t/bcd5mZjRs3TjYDBgyQTdasWWXjueYyM+vUqZNsTp06JRvP78zzPM/MrEyZMrLxnCtbtmwpmxIlSrhm8jz/9vycOnbsKBvvMyaPmzdvysbz/NvzrNLMLF++fLK5ceOGbJKTk2XjeaZrZpY/f37ZvPfee7LhL0sAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAEtVBvmDVrVtkUKlRINidPnnS9X/fu3WWzbt062ZQvX142L7/8smumSpUqZUpTrlw52WTLls01008//SSbd999VzZTpkyRzapVq1wztWzZUja3bt2STWJiomyqVKnimilXrlyyCQQCshk1apRsoqOjXTOVLl1aNtu3b5fNtm3bZFO8eHHXTMuWLZPNX/7ylwy/vnLlSvkasbGxstm6datszMx27dolm/DwcNn0799fNp7PiZlZfHy8bDw/p+vXr8tm3759rpnGjBkjG8+aWrlyZdls3LjRNdOpU6dk4/kZXLx4UTZ16tRxzVSiRAnZeL6/pKQk2SxevNg1U4MGDWTj+f5++OEH1/t5eM51GWnfvr1sPJ+RLFl8/5uPuLg42XjWkrCwMNl4165q1arJ5sKFC7JZs2aNbDyfWzOz8ePHy8azft+4cUM2efPm9YxkUVFRsilatKhs7rvvPtkMHz7cNZNnzWndurVsdu/eLZuOHTu6ZvIcm/ny5ZON57rD8/2bmUVERLi6jMyZM0c2rVq1ko3nXGLmu4Z95ZVXZPPss8/KxnNeNjPr2rWrbH7//XfZeK7jPde5ZmabN2+WzYIFC2Tz4Ycfysaz5piZPfroo7LxHCsPPvigbLzHtufewnMcbNmyRTY5c+b0jGRVq1aVzeTJk2XjOeYaNmzomsm7pmTkrrvuks2RI0dk4z1fes5htWrVko3n3sLzXMTM7OzZs7LxHG9fffWVbGrUqOGaafDgwbK59957ZeO5tnrhhRdcM3nOB+np6bLxXKd6zrtmZgUKFJBNaKh+dDd79mzZeNZKM7P58+fLpnPnzrLxHCveZxWe46BJkya3/Vr27Nnlv/fcd33xxReyMTMrWLCgbDz3w5711vO8w8xs+fLlsvE8q7t69apsTp8+7ZrJcw/mufc8c+ZMps3kuU+7cuWKbDzP6urXr++a6b333pNNzZo1ZfPxxx/Lxnve8ewTeK4/Pb9f7zXF+vXrXZ3CX5YAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIaqHe8NixY7JZvny5bNq2bet6v3HjxslmwIABslmyZIlshg4d6prpzJkzsklKSpJNQkKCbAoWLOiaydMtWLBANlevXpVNtmzZXDN16dJFNqNHj5ZNSkqKbNLT010zlSxZUjazZ8+WTZUqVWRTqlQpz0j2yy+/yKZNmzayefjhh2Wzfft210yff/65q8tI9uzZZRMeHi6bOnXquN7v+vXrstmwYYNsvvjiC9lUrFjRNdP06dNlExERIZucOXPK5ty5c56RbOPGjZnSxMTEyKZo0aKumRITE2VTpEgR2dx3332y+eijj1wzde/eXTavv/66bObMmSObtWvXumaqWbOmbKZOnSqbZs2ayWbp0qWekVzHZkY861/lypVlM378eNf7PfbYY7LxHP+TJk2SzaOPPuqaKUeOHLLxrN1du3aVTUhIiGump556KlNmOnDggGzy5cvnmslz/s6TJ49sjh8/LpsWLVp4RrJ9+/bJJj4+XjatW7eWzWuvveaaqUCBArLJlSuXbLp16yabefPmuWbq3Lmzq8tI3759ZbNq1SrZeH7WZma5c+eWzSOPPCIbz3XulStXXDN5rr/j4uJk41kHLl265BnJPv74Y9kkJyfLxvN78VwHmJl9++23silfvrxsihcvLpudO3e6ZvLcE3h+v9u2bZONZ80xM1u2bJls3nzzTdlERUXJ5oMPPvCMZJcvX5ZNx44dM/z66dOnM+V9bt68KRszs6xZs8rGc97x3A94j7dFixbJxnN//eyzz8rm0KFDrpk855RixYrJxnPe9d7vetam5s2by+bxxx+XjecYMDMrV66cbEaMGCEbzzOdGTNmeEay6OjoTHm/3bt3y8ZzDJj57vsz4rkO8jwTeuutt1zvt3XrVtnkzZtXNoFAQDbeZwKen/WRI0dk47nu2L9/v2ck1+ekWrVqstm7d69s7r77btdMnnXC89zLc871vI6ZWcOGDWXTtGlT2Xie7ZcpU8Y1U2pqqmwqVKggG89aEhYW5pqpffv2rk7hL0sAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAEtVBvGBERIZubN2/KpmDBgq73e+mll2SzdOlS2bRv3142W7dudc0UFhYmm8jISNlkyaL3qA4ePOia6f7775dNcnJypjQ//vija6YyZcrI5t5775XNsmXLZJOSkuIZyWJjY2WzevVq2Xi+t9KlS7tmKly4sGwSEhIy5f26du3qmmnUqFGyGTJkSIZf379/v3yNNm3ayGb9+vWy8bp06ZJsunXrJhvv8XblyhXZeH4nQ4cOlU3NmjU9I7mMGTNGNp999plsateu7Xq/q1evyub69euyuXHjhmw++OAD10w7duyQzfLly2VTqFAh2Rw6dMg1U40aNWRz69Yt2Wzbtk02Fy9edM00b9482dx99923/dqRI0fkv69fv75svOfK2bNny8ZzrPXq1Us2qamprplmzZolmxw5csjGc22SK1cu10yeY7t///6ymTBhgmw810Jmvs/Jgw8+KJu8efPKxnMNZ2a2adMm2SxcuFA2nnOFd6YNGzbI5pFHHpHNDz/8IJuTJ0+6ZipatKiry4hnTdq9e7dsSpQo4Xo/z/d26tQp2Vy+fFk23p9P2bJlZfPpp5/KxnO/ExcX5xnJ9u7dK5u33npLNlOnTpWN59rUzKxixYqy8awDSUlJsvFc65qZFShQQDaJiYmy6dy5s2wuXLjgGcn69u0rmwMHDsjGszY1atTIM5LrPlR59tlnZXPmzBnZeK7fzHzXw55r9OLFi8vGc89oZta8eXPZeM6pnuuYypUru2ZauXKlbPbs2SObu+66Szaez66Z73pvzZo1svF8dg8fPuwZyXU94Lm/qlevnmw81+BmZrVq1ZKNZ332fKaqVavmmmnOnDmyefTRR2/7Nc/ads8998jm559/lo2ZWbFixWTj+UyGhurHtp5jyMz3zNazlgwYMEA2H3/8sWumxo0by8ZzjmvdurVsvM9+Pc99PM+PunfvLhvPcWLmux7cuHGjbDz3hd57x8WLF8smJiZGNp41wHuP7bl/9uAvSwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQC/WGjRs3lk3WrFllM3r0aNf7Xbt2TTa3bt2STdGiRWVTqlQp10yTJ0+WzcKFC2XTvn172TRr1sw103vvvSebYsWKyeb06dOyqVKlimum/Pnzy+azzz6TTVJSkmxCQ32H8OrVq2UTGRkpm0KFCslm3rx5rpny5Mkjm5s3b8pmxIgRsvnLX/7iGcmaN2/u6jKSK1cu2SxatEg2ZcuWdb2fZx3YsWOHbKZPny6bbt26eUayGTNmyGbZsmWyueeee2Rz4cIFz0gWFRUlG8+xGx0dLZsDBw64Zqpbt65sJkyYIJv69evLJm/evJ6RXGvF008/LZvExETZ7N692zWTZ33etGmTbN5//33ZzJ492zVT9erVXd3thIeHyyYtLU02MTExrvd7+OGHZbN161bZbNmyRTY1a9Z0TGR29OhR2Sxfvlw2Dz74oGw8a6CZ73rIc57wfG8JCQmumSZNmiSbX375RTae67ORI0e6Zlq8eLFsHnroIdmsWrVKNsWLF3fNVKdOHdmcO3dONh9//LFs7r//ftdMnt+duib+8ccf5Wu0bNlSNp5reDOzJ554QjZ79uyRjef35r2G9ZxTPdfMPXv2lI33d3vixAnZeK9PlDJlyrg6zzVFliz6fyd48uRJ2Xjuec3MihQpIhvPfZNnjfPMbea7n0tPT5dNWFhYps3kXecy4rn2rlGjhmwGDx7ser8hQ4bIpkSJErKpWrWqbDznCjOzdevWZcr7LVmyRDanTp1yzTRu3DjZvPPOO7LxrLvez6Xn/spz3eS5Zu7atatrpnz58rk6ZcGCBbLxXAuYmZUuXVo29erVk43n9+I5p5j5rq0y4rl/uXTpkmy8vy/Pz3Ds2LGyCQkJkc3Vq1c9I1lcXJxsdu3aJZsPPvjA9X4enjXuyy+/lI3nWY3n/tLMrFy5crJZu3atbDzXHZ5zrpnZ3LlzZfPrr7/KpkePHrJZunSpa6YzZ87IxnO9cP78edl413jPfZrn3oG/LAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDUQgKBQODPHgIAAAAAAAAAAODPwl+WAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKD2/wAtfM6YRDDWfgAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "n = 6\n", - "plot_image_grid(torch.randn((n,28,28,1)), [f\"label {i}\" for i in range(n)])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a9d2dca8-d73f-49fc-a56c-1c148b1abeab", - "metadata": {}, - "outputs": [], - "source": [ - "#| export\n", - "def latents_to_pil(latents:torch.Tensor, channels=None):\n", - " if channels is None: \n", - " channels = latents.shape[1] if len(latents.shape) > 3 else 1\n", - " \n", - " images = scale_tensor(latents)\n", - " images = images.detach().cpu().permute(0, 2, 3, 1).numpy()\n", - " \n", - " if channels == 1: images = images[:, :, :, 0]\n", - "\n", - " images = (images * 255).round().astype(np.uint8)\n", - " \n", - " pil_images = [Image.fromarray(image) for image in images]\n", - " return pil_images " - ] - }, - { - "cell_type": "markdown", - "id": "82152d0d-bfac-4196-ba10-3dfe21fc705f", - "metadata": {}, - "source": [ - "# Export -" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11d3d56b-0aba-4ccc-9299-86bf132fcc99", - "metadata": {}, - "outputs": [], - "source": [ - "#| hide\n", - "import nbdev; nbdev.nbdev_export()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3", - "language": "python", - "name": "python3" - }, - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "state": {}, - "version_major": 2, - "version_minor": 0 - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/src/utils/async_fn.ipynb b/src/utils/async_fn.ipynb new file mode 100644 index 0000000..bdfed6d --- /dev/null +++ b/src/utils/async_fn.ipynb @@ -0,0 +1,153 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Async functions\n", + "\n", + "> Basic functions for async executions." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp utils.async_fn" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from joblib import Parallel, delayed\n", + "\n", + "from tensordict.tensordict import MemoryMappedTensor\n", + "import tempfile" + ] + }, + { + "cell_type": "markdown", + "id": "2bae3d8c-7970-42ee-8723-d0ba4f701fb8", + "metadata": {}, + "source": [ + "# Joblib" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7c3dbea0-513a-4754-b24b-23bfde48d423", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def run_parallel_jobs(f: callable, loop_set, n_jobs: int = 1): \n", + " if n_jobs > 1: res = Parallel(n_jobs=n_jobs)(delayed(f)(x) for x in loop_set) \n", + " else: res = [f(x) for x in loop_set] \n", + " return res " + ] + }, + { + "cell_type": "markdown", + "id": "18d7f6ef-051c-42db-bce0-0450ac87880c", + "metadata": {}, + "source": [ + "# MemMap" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "141766e2-4e5f-45d8-bda8-f606d56e6774", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "class MemoryMappedArray():\n", + " def __init__(self, obj, type=\"tensor\"):\n", + " self.obj = obj\n", + " self.type = type\n", + " assert type in [\"tensor\", \"numpy\"]\n", + " \n", + " with tempfile.NamedTemporaryFile(delete=False) as file: \n", + " # Note can bes simplified with python 3.12 as we can set delete=true, and delete_on_close=True, so it will be kept and we dont need to delete\n", + " # see https://docs.python.org/3.12/library/tempfile.html\n", + " \n", + " self.temporaryFileName = file.name\n", + " file.close()\n", + "\n", + " if self.type == \"numpy\":\n", + " self.obj_memmap = np.memmap(filename=self.temporaryFileName, dtype=obj.dtype, mode='w+', shape=obj.shape)\n", + " self.obj_memmap[:] = self.obj[:]\n", + " self.obj_memmap.flush()\n", + " \n", + " elif self.type == \"tensor\": \n", + " self.obj_memmap = MemoryMappedTensor.from_tensor(self.obj.cpu(), filename=self.temporaryFileName, existsok=True) \n", + " \n", + " else: \n", + " raise NotImplementedError()\n", + "\n", + " def get_obj(self):\n", + " if self.type == \"numpy\":\n", + " self.obj = self.obj_memmap.copy()\n", + " \n", + " elif self.type == \"tensor\":\n", + " self.obj = self.obj_memmap.contiguous().clone().to(self.obj.device)\n", + "\n", + " del self.obj_memmap\n", + " return self.obj, self.temporaryFileName\n", + "\n", + " @staticmethod\n", + " def clean(temp_files):\n", + " for temp_file in temp_files:\n", + " try: os.remove(temp_file)\n", + " except Exception as e: print(f\"[ERROR]: {e}\") " + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/utils/config_loader.ipynb b/src/utils/config_loader.ipynb new file mode 100644 index 0000000..1e911c3 --- /dev/null +++ b/src/utils/config_loader.ipynb @@ -0,0 +1,375 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a8980c24-d62e-462b-ba89-3195cfdcc374", + "metadata": {}, + "source": [ + "# Config loader\n", + "\n", + "> Functions to load and store models and datasets." + ] + }, + { + "cell_type": "markdown", + "id": "1bb62f14-03c7-4d64-b1b9-f1d3ae309b01", + "metadata": {}, + "source": [ + "Code using `omegaconf` to handle IO." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a8832bdd-f61c-44e1-8619-a9cb352ba768", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp utils.config_loader" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06272f6f-b4e3-4504-a90a-feebbf6ad821", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "from omegaconf import OmegaConf\n", + "\n", + "from safetensors.torch import load_file as load_safetensors\n", + "from safetensors.torch import save_file as save_safetensors\n", + "from safetensors.numpy import load_file as load_safetensors_np\n", + "from safetensors.numpy import save_file as save_safetensors_np\n", + "from safetensors import safe_open" + ] + }, + { + "cell_type": "markdown", + "id": "9b6c0b5e-4779-4c4a-98e9-46a3dca8bee6", + "metadata": {}, + "source": [ + "## IO" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cb173637-3d18-4f94-8b95-76cda4117b1e", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def class_to_str(cls):\n", + " return str(cls)[8:-2]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e0d5bc35-cc53-42fb-8fcd-8f2bc66c7c9b", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def load_config(file_path):\n", + " return OmegaConf.load(f\"{file_path}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b105c04a-66d1-4450-8ee0-87aae618e60a", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def config_to_dict(config):\n", + " return OmegaConf.to_container(config)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e3d81e5c-cf3d-4152-ab66-acd6e42ec3c9", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def save_dataclass_yaml(data_obj, file_path):\n", + " conf = OmegaConf.structured(data_obj)\n", + " with open(file_path, 'w') as f:\n", + " OmegaConf.save(config=conf, f=f)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bf89bbfc-9d4e-442f-96ea-db1ab99505e9", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def save_dict_yaml(dict_obj, file_path):\n", + " conf = OmegaConf.create(dict_obj)\n", + " with open(file_path, 'w') as f:\n", + " OmegaConf.save(config=conf, f=f)" + ] + }, + { + "cell_type": "markdown", + "id": "ef21ca53-aa2c-4faa-877f-a9b39eeb8ff4", + "metadata": {}, + "source": [ + "Test" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ed2b17fc-e4d9-4967-89cb-4a0bb28e39a2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'target': 'omegaconf.omegaconf.OmegaConf', 'clr_dim': 80, 'features': [1, 2, 3]}" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "@dataclass\n", + "class MyConfig: \n", + " target:str = class_to_str(OmegaConf)\n", + " clr_dim: int = 80\n", + " features: list[int]=None\n", + " \n", + "c = MyConfig()\n", + "c.features = [1,2,3]\n", + "\n", + "OmegaConf.structured(c)" + ] + }, + { + "cell_type": "markdown", + "id": "a3cbe4ce-7e90-413b-b55e-e07a9eeb6d8f", + "metadata": {}, + "source": [ + "## Object config load" + ] + }, + { + "cell_type": "markdown", + "id": "3398beb4-8b77-4a8b-9075-b3f6a9775bcd", + "metadata": {}, + "source": [ + "Adapted from: https://github.com/Stability-AI/generative-models" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "debf2c16-b2c4-4aa1-a52e-cdd8aafb4ba3", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_obj_from_str(string, reload=False, invalidate_cache=True):\n", + " module, cls = string.rsplit(\".\", 1)\n", + " if invalidate_cache:\n", + " importlib.invalidate_caches()\n", + " if reload:\n", + " module_imp = importlib.import_module(module)\n", + " importlib.reload(module_imp)\n", + " return getattr(importlib.import_module(module, package=None), cls)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ffc7fa40-81aa-42ed-ac23-8562ffdc8e4f", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def instantiate_from_config(config):\n", + " if not \"target\" in config: raise KeyError(\"Expected key `target` to instantiate.\")\n", + " if not \"params\" in config: print(\"[WARNING] Expected key `params` to instantiate.\")\n", + " return get_obj_from_str(config[\"target\"])(**config.get(\"params\", dict()))" + ] + }, + { + "cell_type": "markdown", + "id": "d63da861-63cf-4f6e-8724-8bcfd2ffc9bc", + "metadata": {}, + "source": [ + "### Models" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2dfa8063-d1d1-4ea6-b4ed-055e905a669f", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def store_model_state_dict(state_dict, save_path):\n", + " print(f\"[INFO]: Saving model to `{save_path}`.\")\n", + "\n", + " if save_path.endswith(\"ckpt\") or save_path.endswith(\"pt\"):\n", + " torch.save(state_dict, save_path)\n", + "\n", + " elif save_path.endswith(\"safetensors\"): \n", + " save_safetensors(state_dict, save_path)\n", + " \n", + " else:\n", + " raise NotImplementedError(f\"unknown filetype: {save_path}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06c24366-a179-492e-adc2-5d696f7485fa", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def load_model_state_dict(save_path, device):\n", + " print(f\"[INFO]: Loading model from `{save_path}` onto device: {device}.\")\n", + "\n", + " if save_path.endswith(\"ckpt\") or save_path.endswith(\"pt\"):\n", + " state_dict = torch.load(save_path, map_location=torch.device(device).type, weights_only=True)\n", + "\n", + " elif save_path.endswith(\"safetensors\"): \n", + " state_dict = load_safetensors(save_path, device=torch.device(device).type)\n", + " \n", + " else:\n", + " raise NotImplementedError(f\"unknown filetype: {save_path}\")\n", + "\n", + " return state_dict" + ] + }, + { + "cell_type": "markdown", + "id": "c199cdb0-8a79-46ff-ab49-d4b7071c0450", + "metadata": {}, + "source": [ + "### Tensors and numpy" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3cc82c8d-4e82-4d87-9123-28fd9e144cb6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.serialization.DEFAULT_PROTOCOL" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ec27728c-c8a9-4adf-bdbb-cf9dd9fc4abc", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def store_tensor(tensor, save_path, type=\"tensor\"):\n", + " print(f\"[INFO]: Saving tensor to `{save_path}`.\")\n", + "\n", + " if type==\"numpy\" and save_path.endswith(\"safetensors\"):\n", + " save_path = save_path.replace(\".safetensors\", \".pt\")\n", + " \n", + " if save_path.endswith(\"ckpt\") or save_path.endswith(\"pt\") or type==\"numpy\":\n", + " # serializing a string larger than 4 GiB requires pickle protocol 4 or higher; Protocol version 5 was added in Python 3.8.\n", + " torch.save(tensor, save_path, pickle_protocol=5) \n", + "\n", + " elif save_path.endswith(\"safetensors\") and type==\"tensor\": \n", + " save_safetensors(tensor, save_path)\n", + " \n", + " else:\n", + " raise NotImplementedError(f\"unknown filetype: {save_path} or unknown type {type}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cc632206-1779-462d-968d-1debdbe83c91", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def load_tensor(save_path, device, type=\"tensor\"):\n", + " print(f\"[INFO]: Loading tensor from `{save_path}` onto device: {device}.\")\n", + "\n", + " if type==\"numpy\" and save_path.endswith(\"safetensors\"):\n", + " save_path = save_path.replace(\".safetensors\", \".pt\")\n", + " \n", + " if save_path.endswith(\"ckpt\") or save_path.endswith(\"pt\") or type==\"numpy\":\n", + " tensor = torch.load(save_path, map_location=torch.device(device).type, weights_only=False)\n", + "\n", + " elif save_path.endswith(\"safetensors\") and type==\"tensor\": \n", + " tensor = load_safetensors(save_path, device=torch.device(device).type)\n", + "\n", + " else:\n", + " raise NotImplementedError(f\"unknown filetype: {save_path} or unknown type {type}\")\n", + "\n", + " return tensor" + ] + }, + { + "cell_type": "markdown", + "id": "f41f26a8-ac40-4e91-8c0e-1ef07a0fd4f4", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0474216-8e0c-4ba7-9a37-571ac7d8e82c", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/utils/math.ipynb b/src/utils/math.ipynb new file mode 100644 index 0000000..b90640f --- /dev/null +++ b/src/utils/math.ipynb @@ -0,0 +1,134 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "42ea1626-1c62-43c5-b4c1-0267268179d2", + "metadata": {}, + "source": [ + "# Math and algorithms\n", + "\n", + "> Miscellaneous math and algorithm code" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe7b3562-52ab-457f-80be-a9fb1801d31a", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp utils.math" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "983ccb27-8049-48ad-97e1-c1763fbb71e6", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *" + ] + }, + { + "cell_type": "markdown", + "id": "5ea057fd-ba30-4fda-9d19-a1c44d0d5d15", + "metadata": {}, + "source": [ + "## Matrix functions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbf98023-d46f-46da-8342-c73e8b08d592", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def matrix_power(x: torch.Tensor, p: float) -> torch.Tensor:\n", + " \"\"\"\n", + " Power of a matrix using Eigenspace Decomposition. Assuming decomposition of `x` exists.\n", + " \"\"\"\n", + " \n", + " vals, vecs = torch.linalg.eig(x)\n", + " vals_pow = torch.pow(vals, p)\n", + " matrix_pow = torch.matmul(vecs, torch.matmul(torch.diag(vals_pow), torch.inverse(vecs)))\n", + " \n", + " return matrix_pow" + ] + }, + { + "cell_type": "markdown", + "id": "f77969d8-4d00-472e-bd2f-f75751da1f42", + "metadata": {}, + "source": [ + "## Algorithms" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "35135abe-a5d2-476d-92fc-e9e1bbe300de", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def gram_schmidt(X: torch.Tensor):\n", + " \"\"\"\n", + " Perform Gram–Schmidt orthonormalization on the vectors given by the rows of matrix X.\n", + " \"\"\"\n", + " assert X.dim() == 2, \"Only 2-dim tensor supported.\"\n", + " \n", + " X_type = X.dtype\n", + " X = X.to(torch.float64) \n", + " Q = []\n", + " for q in X:\n", + " # Take the current row vector \n", + " # Subtract projec+tions onto existing basis vectors\n", + " for v in Q:\n", + " q = q - torch.dot(q, v) * v\n", + " # Normalize the vector\n", + " q = q / torch.norm(q)\n", + " Q.append(q)\n", + " return torch.stack(Q).to(X_type)" + ] + }, + { + "cell_type": "markdown", + "id": "798f2242-a888-470f-831e-977c78959524", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d2a0e934-0bf3-48dd-b706-ab1d3a87b0a6", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/utils/misc_utils.ipynb b/src/utils/misc_utils.ipynb new file mode 100644 index 0000000..613627e --- /dev/null +++ b/src/utils/misc_utils.ipynb @@ -0,0 +1,552 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "ca8283df-e353-4e58-a726-8e75ae1007e9", + "metadata": {}, + "source": [ + "# Miscellaneous util\n", + "\n", + "> Miscellaneous util code" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1ad65227-3d39-4f9c-832a-2d4555aeb399", + "metadata": {}, + "outputs": [], + "source": [ + "#| default_exp utils.misc_utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b3271aa-6684-40ab-8c73-b8f9210ab423", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "from genQC.imports import *\n", + "import gc, traceback, inspect" + ] + }, + { + "cell_type": "markdown", + "id": "97eb37f5-8fb3-4299-b790-c7b478ed78b1", + "metadata": {}, + "source": [ + "## Memory utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a5f68941-9951-42de-a3d6-7c0399281ac3", + "metadata": {}, + "outputs": [], + "source": [ + "#|export\n", + "class MemoryCleaner():\n", + " \"\"\"CLass with static methods to clean (gpu) memory.\"\"\"\n", + " \n", + " @staticmethod\n", + " def _clean_ipython_hist():\n", + " # Code in this function mainly copied from IPython source\n", + " if not 'get_ipython' in globals(): return\n", + " ip = get_ipython()\n", + " user_ns = ip.user_ns\n", + " ip.displayhook.flush()\n", + " pc = ip.displayhook.prompt_count + 1\n", + " for n in range(1, pc): user_ns.pop('_i'+repr(n),None)\n", + " user_ns.update(dict(_i='',_ii='',_iii=''))\n", + " hm = ip.history_manager\n", + " hm.input_hist_parsed[:] = [''] * pc\n", + " hm.input_hist_raw[:] = [''] * pc\n", + " hm._i = hm._ii = hm._iii = hm._i00 = ''\n", + "\n", + " @staticmethod\n", + " def _clean_tb():\n", + " if hasattr(sys, 'last_traceback'):\n", + " traceback.clear_frames(sys.last_traceback)\n", + " delattr(sys, 'last_traceback')\n", + " if hasattr(sys, 'last_type'): delattr(sys, 'last_type')\n", + " if hasattr(sys, 'last_value'): delattr(sys, 'last_value')\n", + " \n", + " @staticmethod\n", + " def purge_mem():\n", + " \"\"\"Clear all. Purge all memory.\"\"\"\n", + " MemoryCleaner._clean_tb()\n", + " MemoryCleaner._clean_ipython_hist()\n", + " gc.collect()\n", + " torch.cuda.empty_cache() \n", + " \n", + " @staticmethod\n", + " def free_memory(to_delete: list):\n", + " \"\"\"Remove objs of `to_delete` from namespace\"\"\"\n", + " calling_namespace = inspect.currentframe().f_back\n", + " for _var in to_delete:\n", + " del _var\n", + " calling_namespace.f_locals.pop(_var, None)\n", + " gc.collect()\n", + " torch.cuda.empty_cache()" + ] + }, + { + "cell_type": "markdown", + "id": "f910a356-b685-44fb-98ea-4c03ab6969fb", + "metadata": {}, + "source": [ + "## Python utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ca1fa9d4-aea9-4c72-a72b-bc187db868d6", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def virtual(f: callable) -> callable:\n", + " '''Decorator to enfore subclass method implementations and raises error at method calls.'''\n", + " @functools.wraps(f)\n", + " def inner(self, *args, **kwargs): raise NotImplementedError(f\"Virtual method {f.__name__} needs to be implemented by subclass {self.__class__.__name__}.\") \n", + " return inner" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "59dc4c68-4a11-4b0e-9486-3faf2d1acbbc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "A p1 1\n", + "Exception that would be raised: Virtual method p2 needs to be implemented by subclass B.\n" + ] + } + ], + "source": [ + "class A():\n", + " def p1(self, x): print(\"A p1\", x)\n", + " \n", + " @virtual\n", + " def p2(self, x): pass\n", + " \n", + "class B(A):\n", + " def p3(self, x): print(\"B p2\", x)\n", + " \n", + "b = B()\n", + "b.p1(1)\n", + "try:\n", + " b.p2(1)\n", + "except BaseException as e:\n", + " print(\"Exception that would be raised: \", e)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "92760394-9f81-4dc4-89e8-ba9e365fbcd0", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def cache_data(file_name, force_recompute):\n", + " \"\"\"\n", + " A decorator that memorizes the result of a function and stores it.\n", + " Note, if the function or its arguments change we ignore it, we only check if the file exists!\n", + "\n", + " Parameters:\n", + " - file_name (str): The name of the file to store the memoized results.\n", + " - force_recompute (bool): If True, existing cache is ignored.\n", + " \"\"\"\n", + " \n", + " #-------------------\n", + " def load(): \n", + " if os.path.exists(file_name) and not force_recompute: \n", + " return torch.load(file_name)\n", + " return None\n", + " \n", + " #-------------------\n", + " def save(cache): \n", + " if exists(cache): \n", + " os.makedirs(file_name[:file_name.rfind(\"/\")] + \"/\", exist_ok=True)\n", + " torch.save(cache, file_name)\n", + " \n", + " #-------------------\n", + " def decorator(func: callable) -> callable: \n", + " @functools.wraps(func)\n", + " def inner(*args, **kwargs):\n", + " \n", + " cache = load()\n", + " \n", + " if not exists(cache): # run function normally\n", + " print(f\"Computing: {func.__name__}\")\n", + " cache = func(*args, **kwargs)\n", + " \n", + " save(cache)\n", + " print(f\"Result saved\")\n", + " \n", + " else: # loaded already from cache\n", + " print(f\"Result retrieved from cache: {func.__name__}\")\n", + " \n", + " return cache\n", + " return inner\n", + " return decorator" + ] + }, + { + "cell_type": "markdown", + "id": "2086a56a-b142-41c2-8b67-716d6afbc574", + "metadata": {}, + "source": [ + "## Torch utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f4e15ca-cbab-4ee0-bf9a-fabe0a687c9b", + "metadata": {}, + "outputs": [], + "source": [ + "#|export\n", + "class DataLoaders:\n", + " \"\"\"Combines train and valid `DataLoader` objects.\"\"\"\n", + " def __init__(self, *dls: list[DataLoader]): self.train, self.valid = dls[:2]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "61e428d1-3482-412c-8286-011a979a7fae", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def infer_torch_device(): \n", + " if torch.cuda.is_available(): \n", + " torch.backends.cudnn.benchmark = True\n", + "\n", + " dev_cap = torch.cuda.get_device_capability()\n", + " \n", + " if dev_cap[0] >= 8: # AMPERE and up\n", + " print(f\"[INFO]: Cuda device has a capability of {dev_cap[0]}.{dev_cap[1]} (>= 8), allowing tf32 matmul.\")\n", + " torch.backends.cuda.matmul.allow_tf32 = True\n", + " torch.backends.cudnn.allow_tf32 = True\n", + " \n", + " return torch.device(\"cuda\")\n", + " return torch.device(\"cpu\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "84ba05ff-9d30-4c43-9b15-187c219b385d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INFO]: Cuda device has a capability of 8.6 (>= 8), allowing tf32 matmul.\n" + ] + }, + { + "data": { + "text/plain": [ + "device(type='cuda')" + ] + }, + "execution_count": null, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "infer_torch_device()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e05ec728-70a3-402f-80cc-c58a18394710", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def number_of_paramters(model: nn.Module): return sum([p.flatten().shape[0] for p in model.parameters()])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e7122570-f02a-4fc2-b9db-8d65a72ceb94", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def normalize_tensor(t: torch.Tensor):\n", + " \"\"\"[0,1] to [-1,1]\"\"\"\n", + " return t * 2.0 - 1.0\n", + "\n", + "def scale_tensor(t: torch.Tensor):\n", + " \"\"\"[-1,1] to [0,1]\"\"\"\n", + " return (t / 2.0 + 0.5).clamp(0.0, 1.0)" + ] + }, + { + "cell_type": "markdown", + "id": "058de092-2f9e-4d4e-b321-c4539f618a1d", + "metadata": {}, + "source": [ + "## Plot utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "34d371d4-9fee-4544-81e5-4d82f6e720cd", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def savePdf(filename): plt.savefig(filename + '.pdf', bbox_inches='tight')\n", + "def savePng(filename): plt.savefig(filename + '.png', bbox_inches='tight')\n", + "def saveSvg(filename): plt.savefig(filename + '.svg', bbox_inches='tight')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "276a1ac5-ff77-47e2-9530-faa152e2a120", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def plot_image_grid(imgs: Union[list, np.array, torch.Tensor], labels: list=None, labels_fs=\"medium\", \n", + " figsize=(16, 4), cols=8, cmap=\"Greys\", show_colorbar=False, **imshow_kwargs): \n", + " if type(imgs) is list: n = len(imgs)\n", + " elif type(imgs) is np.ndarray: n = imgs.shape[0]\n", + " elif type(imgs) is torch.Tensor: n = imgs.shape[0]\n", + " else: raise NotImplementedError(\"err type:\", type(imgs))\n", + "\n", + " if n == 0: return\n", + "\n", + " cols = min(n, cols)\n", + " rows = math.ceil(n/cols)\n", + "\n", + " fig, axs = plt.subplots(rows, cols, figsize=figsize, squeeze=False, constrained_layout=True) \n", + " for i, (r, c) in enumerate(itertools.product(range(rows), range(cols))):\n", + " plt.sca(axs[r,c])\n", + " plt.axis('off')\n", + " \n", + " if i >= n: continue\n", + "\n", + " if labels is not None: plt.title(labels[i], fontsize=labels_fs)\n", + " p = plt.imshow(imgs[i], cmap=cmap, **imshow_kwargs) #cmap ignored for RGB \n", + " if show_colorbar: plt.colorbar(p)\n", + " \n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46b4389e-3f3b-40ce-8360-d32fe2350a9e", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABksAAAEpCAYAAAAzsBVMAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAaU9JREFUeJzt3WWUVeeat/u7oHCtwgov3KVwCRosBHcIETwhAkkgxCAe2BtCiEA2GgIEJ0BwC+7u7u6FS0Gt86n3OPt0U/+7d6o7nF7Xb4we4x27Lta6a8mcz5xParwhgUAgYAAAAAAAAAAAAEEqwV89AAAAAAAAAAAAwF+JzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc2S/4OqV69uPXv2dPcrVqywkJAQi46O/lPPGxkZaUOGDPlTjwHgfwfHCQAeHCsAeHCsAKBwnADgwbECfzU2S/C/atq0aVawYEFLmjSpFStWzObPn/9XjwTgKbJ3715r3ry5RUZGWkhICIsVAP+lkSNHWpUqVSwsLMzCwsKsVq1atmnTpr96LABPmd9++83KlCljadOmtRQpUljJkiVt/Pjxf/VYAJ5SkydPtpCQEGvSpMlfPQqAp8zYsWMtJCTkX/4vadKkf/VY+B/AZgn+16xbt87atm1rnTp1su3bt1uTJk2sSZMmtmfPnr96NABPibt371ru3LltwIABFhER8VePA+AptWLFCmvbtq0tX77c1q9fb9mzZ7c6derY2bNn/+rRADxFwsPD7aOPPrL169fbrl27rEOHDtahQwdbtGjRXz0agKfMiRMnrFevXlalSpW/ehQAT6nUqVPb+fPn//l/J0+e/KtHwv8ANkuCwPjx461MmTKWKlUqi4iIsHbt2tmlS5f+U7d27VorXry4JU2a1CpUqPCfNjHWrFljVapUsWTJkln27Nntrbfesjt37rjn+O6776xevXrWu3dvK1SokH3xxRdWqlQp+/HHH//07wjgz3lajhNly5a1gQMHWps2bSxJkiR/+vcCEL+elmPFr7/+at27d7eSJUtawYIFbdSoURYbG2vLli37078jgD/vaTlWVK9e3Zo2bWqFChWyPHnyWI8ePax48eK2Zs2aP/07AvhznpbjhJnZ48eP7YUXXrDPPvvMcufO/ad+LwDx62k6VoSEhFhERMQ//y9Tpkx/6nfD04nNkiAQExNjX3zxhe3cudNmzZplJ06csFdeeeU/db1797ZvvvnGNm/ebBkyZLCGDRtaTEyMmZkdPXrU6tWrZ82bN7ddu3bZlClTbM2aNfbGG2+451i/fr3VqlXrX/63unXr2vr16//U7wfgz3tajhMAnm5P67Hi7t27FhMTY+Hh4f/2YwCIP0/jsSIQCNiyZcvs4MGDVrVq1T/z6wGIB0/TceLzzz+3jBkzWqdOneLjVwMQj56mY8Xt27ctZ86clj17dmvcuLHt3bs3Pn5FPG0C+D+nWrVqgR49ejzx55s3bw6YWeDWrVuBQCAQWL58ecDMApMnT/5nc/Xq1UCyZMkCU6ZMCQQCgUCnTp0CXbt2/ZfHWb16dSBBggSBe/fuBQKBQCBnzpyBb7/99onPmyhRosDEiRP/5X8bOnRoIGPGjP+dXw9APHhajxP/b/+dFsD/jP8/HCsCgUDgtddeC+TOnfuf/x7A/66n+VgRHR0dSJEiRSA0NDSQJEmSwOjRo/+N3xDAn/W0HidWr14dyJo1a+Dy5cuBQCAQePnllwONGzf+N35DAPHhaT1WrFu3LvDLL78Etm/fHlixYkWgQYMGgdSpUwdOnz79b/6meFrxlyVBYOvWrdawYUPLkSOHpUqVyqpVq2ZmZqdOnfqXrmLFiv/8f4eHh1uBAgVs//79Zma2c+dOGzt2rKVMmfKf/1e3bl2LjY2148eP/+/9MgD+R3CcAODxNB4rBgwYYJMnT7aZM2fy/8ki8JR4mo4VqVKlsh07dtjmzZvtq6++snfeecdWrFjx539JAH/K03CcuHXrlr344os2cuRIS58+fTz+dgDiy9NwrPiPx3/ppZesZMmSVq1aNfvtt98sQ4YMNnz48Hj6TfG0CP2rB8D/rDt37ljdunWtbt269uuvv1qGDBns1KlTVrduXXv48KH7cW7fvm3dunWzt9566z/9LEeOHK7HiIiIsIsXL/7L/3bx4kX+P3EG/mJP03ECwNPraTxWDBo0yAYMGGBLly614sWL/7f+LYD/GU/bsSJBggSWN29eMzMrWbKk7d+/3/r372/Vq1d3PwaA+PW0HCeOHj1qJ06csIYNG/7zf4uNjTUzs9DQUDt48KDlyZPHPQ+A+PW0HCv+K4kSJbKoqCg7cuTIv/Xv8fRis+T/uAMHDtjVq1dtwIABlj17djMz27Jly3/Zbtiw4Z8HievXr9uhQ4esUKFCZmZWqlQp27dv3z8vNP4dFStWtGXLllnPnj3/+b8tWbLkX3Z/Afzve5qOEwCeXk/bseLvf/+7ffXVV7Zo0SIrU6bMn3osAPHnaTtW/H/FxsbagwcP4vUxAfz3PC3HiYIFC9ru3bv/5X/7+OOP7datW/bdd9/9czYAf42n5VjxX3n8+LHt3r3b6tevH2+PiacDmyX/x+XIkcMSJ05sP/zwg7366qu2Z88e++KLL/7L9vPPP7d06dJZpkyZ7KOPPrL06dNbkyZNzMysT58+VqFCBXvjjTesc+fOliJFCtu3b58tWbLEfvzxR9csPXr0sGrVqtk333xjzz//vE2ePNm2bNliI0aMiK9fF8C/4Wk6Tjx8+ND27dv3z//32bNnbceOHZYyZUo2YYC/2NN0rPjb3/5m/fr1s4kTJ1pkZKRduHDBzOyff1YP4K/zNB0r+vfvb2XKlLE8efLYgwcPbP78+TZ+/Hj76aef4uvXBfBveFqOE0mTJrWiRYv+y/+WNm1aM7P/9L8D+N/3tBwr/uPxK1SoYHnz5rXo6GgbOHCgnTx50jp37hxfvy6eEvz/WfJ/XIYMGWzs2LE2bdo0K1y4sA0YMMAGDRr0X7YDBgywHj16WOnSpe3ChQs2Z84cS5w4sZmZFS9e3FauXGmHDh2yKlWqWFRUlPXr18+yZMninqVSpUo2ceJEGzFihJUoUcKmT59us2bNYhEC/MWepuPEuXPnLCoqyqKiouz8+fM2aNAgi4qKYgECPAWepmPFTz/9ZA8fPrQWLVpY5syZ//l/T5oHwP+ep+lYcefOHevevbsVKVLEKleubDNmzLAJEyawrgD+Yk/TcQLA0+tpOlZcv37dunTpYoUKFbL69evbzZs3bd26dVa4cOF4+V3x9AgJBAKBv3oIAAAAAAAAAACAvwp/WQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIJaqDccN26cbHLnzi2bV155xfV8DRo0kE369Oll8+jRI9ls2bLFNVPlypVlc/fuXdkkTJhQNp06dXLNNHHiRNmsXr1aNr/++qtstm7d6prJ8/vNmzdPNlevXpVNqVKlXDNlzJhRNo0bN5ZNhQoVZPPBBx+4ZqpTp45sli5dKpuBAwfKJiIiwjXT+++/Lxv1Gng+J9HR0bIpUKCAbMx8n900adLIJlWqVLJp3769a6aDBw/KZurUqbKpXbu2bBYtWuSaqWLFirLZu3evbG7evCmbDBkyuGYqVqyYbBInTiwbz+udI0cO10xjx46VTYcOHWTzyy+/yMZzzDEzu3Llimw857py5crJ5sSJE56RbPny5bL57rvvnvizlStXyn9/5MgR2Vy4cEE2ZmZt27aVzYEDB2SzatUq1/N55MmTJ16aN954QzZ9+vRxzTRmzBjZ1KxZUzbPPfecbHbu3Omaadq0abJp1aqVbBYvXiybjh07umbynFMyZcokm+nTp8vG87uZmf3888+yKViwoGyOHj0qG88xycy35i9ZsmScP581a5Z8DM/1gOd3NzPbvXu3bI4dOyYbz7nLs+4w861hPcemyMhI2XjX1fPnz5dN6tSpZeM57lavXt0zkk2YMCFenu/HH3+UzaRJk1wzXbx4UTafffaZbL788kvZeL4HZr71guczd/z4cdncuXPHNVOKFClko9ZnLVu2lI/RpEkT2Sxbtkw2ZmZdu3aVjef9v3//vmzKli3rmmnYsGGyyZIli2w6d+4sG+99Ac86bfv27bLxfL6910RdunSRTdq0aWWzY8cO2Vy/ft0xkW/d5HkNNmzYIBvP/Sozs8ePH8smLCxMNrdu3ZKNZ24zs7feeks25cuXf+LPWrRoIf990aJFZeM93nquPz3X3zlz5pTN4cOHXTN57tnu27dPNqVLl5aN5/Phfb7mzZvLxrMO8Bxzzcw++eQT2Xz99deyadasmWw819VmZlWqVJGN5xz/8ssvy8ZzP97MLFmyZLLx3Iv0rBc8awUzs8uXL8vGc9+TvywBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBLSQQCAQ8YefOnWVTpEgR2ezbt8/zdBYWFiab7t27y2bUqFGyefbZZ10zbdmyRTZVqlSRzaNHj2SzZ88e10w3b96UTb169WSzdu1a2WzcuNE1U44cOWQTGRkpG89H8/Lly56RLDQ0VDatWrWSzcmTJ2Vz7tw510zVq1eXjedz4HmdhgwZ4pjIrFatWrLp2bPnn36ucuXKyeb48eOyMfN956ZOnSqbJEmSyCY8PNw10+7du2Vz584d2fTo0UM206dPd830yiuvyKZXr16yadasmWxSpEjhGcm2bdsmmwQJ9J5+5syZZZMlSxbXTJ7vk+cYnipVKtlER0d7RrLs2bPLZt26dbI5ceKEbDzfJzOzpUuXyiauY0Hr1q3lv4+KipJNwoQJZWNmVrFiRdl4zqexsbGyWbJkiWumTz/9VDY///yzbC5evCibkiVLOiYy27p1q2xq164tm5QpU8rG83k0M0uXLp1sduzYIZuQkBDZeNYmZr7XPHHixLIZPnx4vDyOmVnfvn1lc+bMGdl4zs/37993zXTlyhXZNGrUKM6fz5gxQz7Grl27ZFOgQAHZmPnWC23atJFNtmzZZJM0aVLXTKNHj5aN5/ztef8XLFjgmqls2bKyOXLkiGySJ08um8WLF7tmKlOmjGw81zKeNUVERIRrppYtW8rm448/lk2fPn1ks2LFCs9ItnPnTtlUrVpVNp7rHc96ycx37fjWW2/F+fNFixbJx1i1apVsunbtKhszs4cPH8rm+++/l02nTp1kkzp1atdMGzZskI3n+5QpUybZeD7bZr7v+IULF2Tz5ptvyqZOnTqumTz3Rjyvwe3bt2Xj+Vya+c69Dx48iJfGe+9r8+bNsvEcdxIlSiSbZMmSeUaymJgY2Xz55ZdP/Jln7e1Zn3qu483MihUrJptnnnlGNuPHj5dNkyZNPCO57md5rps9j+O91vWcT9R60cxs2bJlsvFeE3nubd+7d082nrWw95oob968spkzZ45sPMdTz/0sM99n/Ndff5WN5zq8adOmrpmOHTsmG89eAn9ZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKiFBAKBgCc8dOiQbIYNGyab27dve57OXnjhBdkkS5ZMNlu2bJFNiRIlXDN5HD9+XDb79u2TTYYMGVzP9+yzz8pm4MCBsqlTp45sHj9+7JrpzJkzssmRI4dsJk+eLJurV6+6Zho7dqxsfv/9d9lkzJhRNtevX/eMZEePHpWN53UqX768bDzfAzPfe/zBBx/E+fNdu3bJxxgzZoxsihQpIhszs5iYGNns3LlTNp7vwK1bt1wzZcuWTTae1+nBgweymTBhgmumSZMmyaZ27dqymTVrlmy834GHDx/KpmDBgrL5+9//LpvChQu7ZvK8BvPnz5fNhg0bZPPOO++4ZvKcDzzfcc/p3vN6m5ndv39fNtWqVXviz/r37y///dy5c2VTq1Yt2Xh5fvfZs2fLpmTJkq7nK1SokGyuXLkiG8950HO8MfOd4zzP98wzz8jG8/6amZ08eVI2Q4YMkU2aNGlkM2LECM9IVrNmTdl41s2emXLmzOmaKSwsTDYLFiyQTbp06WTjOTeZmd25c0c2X375ZZw/r1GjhnyMxo0by2b06NGyMfNdf1SpUkU2ixcvlk1ERIRrJs/v51nre47bHTt2dM3kWQ/t2bNHNqdOnZLN4cOHXTNFRUXJJjQ0VDae60vvejBJkiSy8cw9btw42bz66quume7evSub5cuXy+bFF1+UjWfNaGZ26dIl2Xz//fdx/vzGjRvyMTznioMHD8rGzHcNvm7dOtl07dpVNmvWrHHN1LlzZ9l4znOez8jEiRNdMw0fPlw2ixYtko3n2mrVqlWumUqVKiUbz3f33LlzssmfP79rJs85fN68ebKJjIyUjec6xsx3DH/vvfdk47m/kChRItdMnveld+/eT/xZixYt/tS//w9Lly6VjZlZeHi4bDzHnHLlysnGe617+fJl2XiOAY8ePZKN91zpWQ957qGePn1aNvXr13fNdODAAdn88ssvshk6dKhsPGtGM993yfMdad26tWw++eQT10wNGjSQjef+8KZNm2Tjfe/atGkjG8/rxF+WAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKAW6g0fPHggm3r16slm3bp1rudbtWqVbIYNGyab1q1by+bkyZOumd58803ZTJ8+XTavvvqqbJYvX+6aydPVqVNHNj/99JNshg8f7prp9OnTsilcuLBsXnvtNdkkSpTINdO2bdtkc/jwYdns3r1bNp7318ysS5cusunWrZtscubMKRvPd9PM9zop169fl03jxo1lc+rUqT89y3/Inj27bK5duyab/Pnzu55v/Pjxsnn55ZdlkzhxYtmkSJHCNdMPP/wgm507d8rm6NGjspk2bZprpipVqshmwoQJssmUKZNsxo0b55rp0qVLsgkPD5eN57jbpEkTz0jWrl072Xhey7Rp08pmxowZnpGsZMmSru5J8ubNK5umTZvKJmnSpK7n83y/c+TIIZsSJUrIxnOeMDOrXbu2bCIiImSzdetW2fTp08c1U4cOHWTjOcaXK1dONp7jsplZpUqVZDNp0iTZ5MmTRzalS5d2zeQ5nhYrVkw269evl02SJElcM7Vs2VI2N27ckI3nfbl48aJrpowZM7q6uHjWOJs3b5ZNZGSk6/n27t0rm3Pnzsnm0KFDsunVq5drpp9//lk25cuXl82dO3dk4/n9zXxrtMyZM8smQ4YMskmVKpVrpk6dOsnmb3/7m2xSp04tm7CwMNdM+fLlk82YMWNk4zk/edYmZmZr166Vjec12LRpk2w8x10zs86dO7u6uHjOTZ61h+f1MTPbv3+/bDzXX57H8RxPzHzrgZCQENlUqFBBNunSpXPNtGXLFtmEhupbUgkS6P/G13NONfNdN3juISVMmFA27733nmumUaNGySZNmjSy8XwGcufO7Zpp4sSJshk9erRsPOt5r969e/+pplatWvLfe74jnjWemVlMTIxsPOdKzzonOjraM5I988wzslm6dKlsPN9bz+fRzPd989xjfOWVV2SzZMkSz0jWvn172XjOF6tXr5aN9/zdqFEj2YwcOVI23377rWw89yHNzLZv3y4bzz00z+fyyJEjrpmGDh0qm3feeUc2/GUJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoBbqDTdu3CibJEmSyCYmJsb1fGXLlpXN5MmTZTNq1CjZNGrUyDXT0KFDZdO8eXPZTJgwQTYhISGumVavXi2bpk2byiZ//vyyGTZsmGumxYsXyyY6Olo2efLkkU22bNk8I9mFCxdk061bN9lcunRJNp999plrpvr168vG89lMly6dbDyfXTPfZ0XZunWrbB49eiSb7du3u57P83pfuXJFNvPmzZPN4MGDXTMtWLBANkuXLpXNrl27ZOM9fuXMmVM2y5Ytk43nON++fXvXTEOGDJFNtWrVZON5LadOneoZyW7cuCGbGTNmyObw4cOyef31110zRUREyObx48eyuXz5smyuXr3qmun06dOu7kk8r/OtW7dk4/lcm5ldvHhRNjly5JBN+fLlZVOlShXXTJ988olsKleuLBvPeXDbtm2umd555x3ZeF4nz3G5YsWKrplKlSolm4EDB8pmw4YNsjl+/LhrpgoVKsjGcx7wrKvCw8NdM40YMUI2uXLlkk3RokVlM2nSJNdMDRs2dHVxKVmypGzmz58vm+7du7uez3N896yVRo4cKZsdO3Z4RrLr16/LxvOetG3bVjb79+93zdSyZUvZvPTSS/Ey06FDh1wzec6pjRs3lo1nrR8IBFwzedZM7777rmx2794tG895zswsb968svG85rlz55aN5/tkZnbmzBnZ9O7dO86fjxs3Tj5Gvnz5ZON5z8x8xwHP2uy7776Tjec9MzM7deqUbDxrqwcPHsjmjTfecM20YsUK2Xjue3jWjf369fOMZFFRUbL56aefZHPu3DnZeD5zZmaJEiWSzdq1a2WTIIH+b6GfffZZ10yzZ8+Wjef97dChg2wGDBjgGcl69Ojh6p7k5s2bsvEcb2fOnOl6Ps/9LM99qho1ashm7969rpmKFy8um6pVq8rGc80YGRnpGckyZswom9q1a8tmzpw5svHej/Z8tj3XO2PGjJGNZ61rZrZnzx7ZlChRQjYFCxaUzW+//eaayfN98XwObt++LZukSZN6RrIsWbK4OoW/LAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDUQt1hqE5jY2Nlc//+fdfz7du3TzbXr1+XTcOGDWWTIIFvzyhXrlyyKVKkiGyuXbsmG8/rbWaWMmVK2Vy5ckU27dq1k82hQ4dcMxUrVkw2JUuWlM22bdtkEx4e7hnJ9XrOnDlTNp07d5bNo0ePXDN5vi+LFi2STZ8+fWRTvHhx10z58uVzdXFp2bKlbI4fPy6bEiVKuJ7v008/lc0bb7whm6ioKNnMmzfPM5Lt2rVLNpcvX5ZNq1atZHPgwAHXTJ7v0yeffCKbwYMHy8Zz3DUzS5IkiWyyZcsmmzJlyshm69atrplKly4tm3Tp0snmww8/lM27777rmikiIkI2v/76q2wqVaokm+bNm7tmOnbsmKt7kuTJk8vmhx9+kI3neGNmljhxYtl4Po9du3aVTf/+/V0z9e3bVzaeY+WwYcNk06BBA9dM69atk021atVkU7t27Xh5LjOzs2fPysbz+V++fLnr+TxSpEghm4oVK8rm0qVLsvGuUT3fqXr16sXL87Vo0cI104kTJ1xdXLZs2SKb6dOny+bFF190PZ9nfeo573qO257rGDOzVKlSxcvzjR07Vjbly5f3jGQHDx6UzWeffSabn376STa9evVyzeQxYMAA2XjOK48fP3Y934oVK2TjuXZctmyZbDznJzPfsTdr1qyyyZ8/v2w8a10zs/bt27u6uCRMmFA2R44ckc3Nmzddz7dw4ULZZMqUSTae9Unq1KldM1WpUkU2adOmlY3ndRo5cqRnJLt69apsPMeTTZs2ycZ7/eG53s2bN6/rsRTP99vMLBAIyMZzL2rGjBmy8VyjmJmFhITI5o8//pDNa6+9JhvPtbqZ/5jyJJ77VJ5rT89a2Mxs+/btsvGcB2/fvi2byMhIz0i2Y8cO2TzzzDOy8awpPI9j5ruujImJkY3n/prnPrOZ7/MfHR0tm8qVK8vGc61jZrZ48WLZeK75RowYIRvv56l69eqyefjwoWzCwsJkM3z4cM9I7s+dwl+WAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGqh3jBr1qyyWblypWxiY2Ndz1enTh3ZREdHy+bAgQOy+emnnzwjWc2aNWXz2WefySZjxoyy6du3r2umPXv2yGbkyJGy2bRpk2xat27tmmn27NmyqVGjhmzu3Lkjm99//901U86cOWVToEAB2Xz11Veyeffdd10zbd26VTaPHz+WTcKECWVTtWpV10zjx4+Xzaeffhrnz7dt2yYfI23atLI5deqUbMzMOnToIJsNGzbIJk+ePLJp3769a6Y0adLIJnXq1LIJBAKyCQkJcc1UuHBh2QwbNkw2DRs2lM20adNcMzVq1Eg2169fj5eZ5syZ45rp/v378fJY9+7dk03btm1dM928eVM2zz//vGyOHTsmm+zZs7tm8r7HT5I0aVLZ9OjRQzZFihRxPV/+/PllM3ToUNl4Xmfva1O8eHHZPHjwQDY5cuSQjfd4miFDBtk8evRINhUqVJDNuXPnXDNlyZJFNp51XK5cuWRTt25d10yez9PPP/8sG89xefPmza6Z7t69KxvPmuny5cuyKVu2rGumS5cuubq4dO7cWTZdunSRjWd9Y2b2wQcfyMZz/Dpz5oxsoqKiXDM1bdpUNp61wNmzZ2Wza9cu10yZM2eWTcqUKWXjOX+vXr3aNdP+/ftlM2rUKNl4zj3Jkyd3zVSiRAnZFCtWTDaetb5n7WlmduXKFdlUqVJFNiNGjJCN55xpZnbkyBHZlC9fPs6fe47L5cqVk01MTIxszMwSJUokm3z58skmIiJCNp7PtpnvGO/5zr3xxhuy6d+/v2smz3Hg6NGjsunXr59sPK+3me98eevWLdnUr19fNp77GWZmGzdulE2mTJlk89prr8nmyy+/dM3kOQ541qCea97jx4+7ZmrcuLGre5IxY8bIplq1arLx3lvxdJ7PyN69e2Vz/vx510zxtUavXLmybHbu3OmaybP+VvegzMymT58um5IlSzomMtu+fbtsPOcdz/etV69erpk856fDhw/LpmvXrrKZMGGCaybPNYFnvfvxxx/LxnMtZ+a/nlP4yxIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1EICgUDAE77++uvx8oTlypVzdcuWLZNN8eLFZfP48WPZFChQwDXTtm3bZHP69GnZtGzZUjYPHz50zRQbGyubTZs2yaZ79+6y+f33310zZcuWTTanTp2Kl8dJnz69a6YzZ87IZufOnbJ55plnZPPTTz+5Zurbt69sLl68KJvVq1fLpmzZsq6ZduzYIZsBAwbE+fNr167Jx/D87g0aNJCNmVnRokVl88EHH8imRIkSsgkNDXXNdOXKFdl06dJFNlOnTpXNvn37XDN17txZNvPmzZNN5cqVZbN//37XTFu2bJFN//79ZbNmzRrZLFmyxDXTjz/+KJtPPvlENjly5JDN5s2bXTPVqFFDNnfu3JHN8ePHZdOrVy/XTPPnz5fNq6+++sSfnTx5Uv77sWPHymbXrl2yMTOrUKGCbO7evSubjBkzysZznjAzmzt3rmw8v1/hwoVl41mbmPmOg/fv35eN5/ydJk0a10xHjx6VzZ49e2STIIH+74N69uzpGclGjBghG8/3Nl26dLJZu3ata6azZ8/KJlGiRLLxrJfy5s3rmungwYOymTRpUpw/96ypYmJiZBMSEiIbM7MsWbLIxvMd8Kxh1q9f75pp4cKFskmVKpVsPMeKOnXquGbyXFtER0fLJjw8XDbedU727Nll41mfLV26VDbec+X48eNlkydPHtmUKlVKNitXrnTN9Pbbb8vGcy4YOXKkbDxrODOz1q1by6Zr165x/rx3797yMZIlSyabunXrysbM7LfffpON5zvg+dyeP3/eM5Lr3Ov5/Tzn1Js3b7pm8lwTeI6XU6ZMkU2hQoVcM507d042SZIkkc3169dl470GL1mypGzy5csnG8/9mgsXLnhGsqxZs8rGs770HOcPHz7smilz5syy6dOnzxN/5rl+8aw7vWsKz+/lWVN51sy3bt1yzeT5/Tz3GMPCwmTjOZaYmVWrVk02ns+/5zXIkCGDaybP7Pfu3ZNNZGSkbLz3KRo3biwbz/1vz3W459rZzGzGjBmy8dw/2rBhg2ycWxdWu3Zt2VSvXl02/GUJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoBYSCAQCnvCnn36Szblz52Szb98+z9NZ7dq1ZbNnzx7ZFCpUSDbZs2d3zZQ2bVrZ3Lx5UzbLly+XTb169TwjuV6DkiVLyubIkSOyiYmJ8YxkQ4YMkc37778vm1KlSslm9+7dnpHs/PnzsilWrJhsPvzwQ9k0bNjQNZPns7l161bZlC5dWjYFChRwzXT//n3ZVKpUKc6f//rrr/IxQkNDZZM+fXrZmJlduHBBNp7X+vjx47LZvHmza6ZEiRLJpm7durI5duyYbC5duuSayXP8ioyMlM2VK1dkc/XqVcdEZpUrV5bN3r17ZeOZ+/bt256RLGHChLLZsWOHbB48eCAbz2fAzGzp0qWyuXfvnmyuX78um2TJkrlm6t69u2xy5sz5xJ999tln8t97Ptve81K6dOlkU6VKFdksWrRINhs3bnTNpI6lZr7zyeeffy4bz/nUzCxFihSyOXPmjGzy588vm8OHD7tmmjFjhmyGDRsmm+TJk8smc+bMrpk++OAD2XjOTXXq1JFNeHi4a6a2bdvKZvHixbLJlCmTbN555x3XTJ71gPrebdq0ST7Gt99+K5sBAwbIxsxs+vTpsqlevbpsUqVKJRvvGjYsLEw2nnV8RESEbNauXeuaKa7j+3/wvAaec87JkyddM3nOle+9955sPK+BZ01lZhYdHS2b2NhY2bRs2VI2nrWJmVmCBPq/lfRcG3teb+852nM74ssvv4zz5z/++KN8DM93YNCgQbLxzGPmu97xnL8877+Z2TfffCMbzzVRjx49ZDNq1CjXTB06dJDNtWvXZOP5LG3YsME1k+e+gGed67kvcOfOHddMY8aMkY1nHeO51zZ37lzXTPny5ZNNkSJFZHPgwAHZeK73zMxKlCghm86dOz/xZ1988YX890mTJpVNjhw5ZGNmtmDBAtl47kF53nvP62zmO397fj/PubJw4cKumR49eiSbcuXKycbz3fa8lmZmS5YskY3nXOk5xz/33HOumTzX+6dPn5ZN0aJFZfPdd9+5ZqpWrZps9u/fLxvP/W/PdbiZ756851jAX5YAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIaqHecP369bKpWLGibKpWrep6vhMnTsimbNmysilfvrxsvvvuO89INmfOHNl89dVXssmaNatszp8/75opJiZGNteuXXM9llKzZk1Xd+XKFdn07dtXNsOHD5fN9evXXTNlypRJNjNmzJBN3bp1ZZM9e3bXTNHR0bLxfA4iIiJkc/fuXc9Idvz4cdlUqlQpzp97PpM5c+aUzfbt22VjZpYlSxbZ7N27VzZp06aVzfz58z0j2dChQ2WTMmVK2dy5c0c2ns+RmVmNGjVkM2vWLNm8+eabsunVq5dnJLt69aps2rVrFy+P8+uvv7pmKlGihGy2bNkiG8/rFB4e7prJc/7t3bu3bDzH3XLlyrlmOnz4sGzi+p4nTpxY/nvPd6Rjx46yMTMbMmSIbL755hvZ1K9fXzaeY7KZWbZs2WTjOX83a9ZMNi1atHDN5DkPTps2TTYJEyaUTZkyZVwzJU2aVDYhISGySZYsmWzGjh3rGcl1nm/YsKFsPGsYz9xmvvcuRYoUsvG83q+88opnJBs0aJBsqlSpEufPjx49Kh8jderUskmUKJFszMzWrFkjG891Q5MmTWTTrVs3z0i2ceNG2Zw8eVI2nmOT57tkZpYhQwbZeN6XHTt2uJ7Po3Tp0rIJBAKyuXjxomw862Uzs7CwMNm0bNlSNp5rosuXL7tmun//vmw8a8aSJUvKpkCBAp6RXOtPxfPZ9awFPfcOzMwWLVokG8/x+969e7LxnHfNzJo2bSobdR1n5rsP8/bbb3tGslOnTsnGsx7evXu3bLyfI885zHPN51nr79+/3zOS/eMf/5BNwYIFZeM5z+/bt881U4UKFWTjucfi+a54zplmZmfPnnV1TzJ69GjZeNannvtdZmavvfaabDzXeUmSJJFNrly5XDN57qFOmTJFNp7zqfeepuc+suc8mC9fPtnkzp3bNVOtWrVkc+nSJdl47jEdO3bMM5IdOXJENp57Yz/++KNs8ubN65rpwYMHsvFcN3nu5zRo0MA1065du1ydwl+WAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGqh3vD111+XTdu2bWXz1VdfuZ7vwYMHssmbN69sXnzxRdmkS5fONdP7778vm0SJEskmNjZWNnfv3nXNNHnyZNm0adNGNkePHpXNjBkzXDN9+umnsnn8+LFs0qRJI5v9+/d7RnJ9Ng8cOCAbz+fp4cOHrplu374tm/Pnz8vmwoULsqlevbpnJLt+/bqri0tUVJRsFi5cKJskSZK4nm/RokWy6d27t2wuXbokG8/7b2Z28eJF2cTExMgmU6ZMssmdO7drpkOHDskmT548somOjpZN/fr1PSO5jr07duyQTebMmWXz3nvveUay4cOHyyZBAv3fGXg+A+Hh4a6ZunTpIhvPMbVy5cqyOXnypGumQYMGyaZWrVpP/NnXX38t//27774rG8850Mzs5s2bsilWrJhsPK/z6tWrXTM1atRINjlz5pTNggULZOM5LpuZ7d69WzZ9+vSRTZUqVWTj+QyYmaVOnVo2Z8+elY1nvZA8eXLXTJ7zheezUqBAAdkcPnzYNZPn/F2+fHnZ3Lt3TzbZs2d3zeQ5pyiec06PHj1ks2zZMtfzdejQQTae4+Szzz4bL49jZnbjxg3ZeNYUv/32m+v5PHr27Cmb7t27y6ZVq1ayGTJkiGMis+bNm8tm5MiRsvG8d9u3b3fNlC1bNtn84x//kE1oqL5k9669SpQoIRvPdeHly5dlU7FiRddMO3fudHVxmT17tmzatWsnmzNnzrier0KFCrLxrL09a8p69eq5Zvrll19kU7ZsWdkMGzZMNhkzZnTNVLBgQdl4zjtJkyaVzdChQ10zJU6cWDa7du2SjWe917JlS9dMnu/41KlTZeNZV0RGRnpGsmnTpslm3bp1svFcx3jvQXjOBy+99NITf1a7dm3578uVKyebffv2ycbM7ODBg7I5ceKEbDzXKJ71spnZpk2bZFO4cGHZrFq1Sjae383M7MMPP5TNuHHjZOO5F5s2bVrPSK7jkuexPK9TWFiYZyTXetBzDvPc0/JcE5qZbdy4UTaeuT2NZ11p5vscePCXJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCohXrDzZs3y+aFF16Qzf79+13Pd/LkSdmMHz9eNv369ZPN2LFjPSNZTEyMbLJnzy6b8PBw2URGRnpGssGDB8vmxo0bsqlatapsChcu7JrJ87506dJFNp7PgOdxzMxCQkJkkzhxYtnMmTNHNm+//bZrJs/nLjo6WjY5cuSQzS+//OKYyKxSpUquLi4LFy6UTc6cOWWzd+9e1/ONGDFCNn//+99lU69ePdmcO3fONdPhw4dl8+mnn8pm9uzZslm5cqVnJNfsnpk++eSTeGnMzH788UfZdOvWTTaLFy+WzZkzZ1wz1apVSzb379+XzYkTJ2STIkUKz0iWIIH+7xomTJggm4iICNm88cYbrpkGDBjg6p5k1apVspk7d65sfv/9d9fz5cmTRzZt27aVjWfuDz74wDXT9OnTZXPlyhXZxMbGymbfvn2umSpWrCibBQsWyMZzDPSsz8zM5s2bJxvP+dtz3kmXLp1rpvPnz8vGcwzwfJ7atWvnGcm19ho9erRsPOuAa9euuWbKkiWLq4vL5cuXZROf54BUqVLJpm7durL5448/ZJMrVy7XTLlz55bN5MmTZdOmTRvZXL161TXTnj17ZONZV4WFhcmmdevWrpk812BRUVGy8XzmvDONGTNGNr169ZKN5zNXtmxZ10z/+Mc/ZON5Xzzryps3b7pmatCggauLS9++fWXz22+/yaZ9+/au5/Ocmzzn+YkTJ7qez8PzHfAcvzds2CCbN9980zWT5ziwceNG2Rw7dkw2w4cPd81UvHhx2XjWFZ5z3LJly1wzeda85cqVk82sWbNkU7lyZc9IrnVMkyZNZHP9+nXZ3L171zGRWZEiRVzdk3jW1YUKFfpTz/H/tnPnTtnUqFFDNrdv35aN9z5FsmTJZHPgwAHZlClTRjY1a9Z0zeR5nRo1aiSbS5cuyeb06dOumfLmzSsbz/3hDBkyyGbt2rWumTxrdM99T89aL0mSJK6ZBg4cKBvP+vPRo0eySZ48uWum5cuXy6ZZs2ay4S9LAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABLVQbxgVFSWb33//XTYJEvj2Zzp27Bgvz7dx40bZ9OjRwzVTIBCQzffffy+brFmzyubq1auumcaPHy+b1157TTbvv/++bIYMGeIZyerUqSObfv36ySZz5syy8X6ebt68KZvHjx/LxvM9WLRokWsmT9eoUSPZDB8+XDYnT550zXT8+HHZVK1aNc6fr169Wj5GbGysbKZOnSobM7MSJUrIJiwsTDY7duyQzYABAzwj2Z49e2Szfv162RQpUkQ2V65ccc2UOHFi2XiOJ2+++aZs1q5d65opZ86csrl27ZpstmzZIpssWbK4ZkqZMqVszp8/L5uYmBjZeH5/M993/NKlS7K5f/++bH799VfXTBUqVHB1T3L69GnZHD16VDbFihVzPV/p0qVlkyZNGtkkS5ZMNidOnPCMZPv375dN3rx5ZeM5ns6bN881U8+ePWVTqFAh2XjOpx999JFnJOvatats3nvvPdl88803srl165ZrJs/327NmLF++vGzOnDnjmumTTz6RjecY8MEHH8gmIiLCNZP39YxLixYtZBMdHS2bhAkTup6vSpUqspk9e7ZsevXqJZsDBw64Znr48KFsmjVrJhvP8dJzzDEzW7x4sWw6dOggG8/n+9GjR66ZSpUqJZvs2bPL5uLFi7LxfAbMfJ+7JEmSyKZ58+aymT9/vmsmz/OVK1dONp51h+e7GV8ePHggm2eeeUY2nuO7me8c/s4778hmxIgRsvGsT8x89zQ8c3vWX95rouTJk8smvtY6nvWCmdnBgwdls337dtl47kNUrlzZNVOmTJlks3fvXtnkyZNHNuvWrXPN5PncLVu2TDYTJ06UjXdNqO5DKKGh+vbnmDFjZOO5R2Pmu0a9e/eubFKlSiUbz7HdzHdfxPM6edZ43s/a5cuXZeO5BvPch1yxYoVnJNdx0LOm2L17t2yqVavmmslzPPGcmz3fbe89pn379skmV65cssmWLZtsvv76a9dMnnu2HvxlCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKAW6g1Pnz4tmytXrsjm7Nmzrue7ePGibEJCQmRTpkwZ2cycOdM1k+c1aNu2rWxy5Mghm7/97W+umVavXi2bjh07ymbt2rWyqVatmmum69evy6Z48eKyefDggWzOnz/vmqlcuXKyyZcvn2zmzp0rmypVqrhmKlasmGw8n7lu3brJ5urVq66Ztm/f7uri8sUXX8gmWbJkssmVK5fr+S5fviwbz2e3b9++svHMbeb7LHmOXzNmzJBNqVKlXDN5js+ez+769etlU6RIEddMGTNmlE2WLFlk06BBA9mUL1/eNdPhw4dlExUVJZuRI0fKZuPGja6ZWrVqJZusWbPKZvLkybIJBAKumdKkSePqnuS3336TTbZs2WRTtmxZ1/MNHz5cNkmTJpVN+/btZfPNN9+4Znrrrbdk41mffPzxx7J59OiRa6ZRo0bJJnny5LLxvJZ169Z1zeQ5Dno+B573zvNcZmZnzpyRTc6cOWXjOc8NGjTINZPnWOk5BnjWAfPmzXPNdOLECVcXl4ULF8pmz549svGev4sWLSqbO3fuyGb37t2y+eqrr1wzhYbqy7WIiAjZeK4/PMdKM7NKlSrJ5rnnnpPN4MGDZeNdD3766aeySZkypWwSJkwomwQJfP+9oec89ssvv8jGc91UtWpV10wbNmyQjef6w3MO81xfmplNmTJFNm+//XacP/dcD+TJk0c2v//+u2zMzLp27Sobz/2MY8eOyeaFF15wzTRw4EDZvPTSS7LxHHc9c5uZ3bhxQzYlSpSQzZgxY2TjOc+b+a75PNfp69atk03atGk9I7nuVXhmmj17tmw8x0EzswwZMsjGc+7xfFf++OMP10ze694nqVixomw875nnutrMrHLlyrLxrKnq1KkjG8+9BTOzVatWySZv3ryyOXLkiGw8320z3+t5//592Xz77beyef31110zLVq0SDZHjx6VTaJEiWTjubYyM9uxY4ds7t27J5tt27bJxvudbNiwoWwKFCggm/Tp08vG+zp5O4W/LAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDUQuPzwb777jvZDBkyxPVY6dKlk82FCxdkU758edls377dNdPVq1dlM336dNlkz55dNn379nXNFBYWJps5c+bI5vLly7IJDfV9XA4cOCAbz/tbtGhR2Xz00Ueumfbu3Sub1q1by6Z+/fqymTBhgmsmz2s+depU2Xjm3rZtm2umIkWKuLq43LlzRzYpUqSQze+//+56vn79+slm7dq1shk4cKBsZs2a5RnJcuXKJZukSZPKpmDBgrIpUaKEaybPYw0ePFg2LVq0kI3nOGhm1qxZM9l4jvPPPvusbAYNGuSayXNMfeaZZ2TjOX7du3fPNdPMmTNl8+qrr8pm9erVsvn6669dM33zzTd/aqZy5crJf3///n3ZBAIB2ZiZ3bx5UzaXLl2SzYcffiibBQsWuGZKkiSJbGJjY2UzZcqUeHkuM7OWLVvK5uLFi7KZO3duvDyOmdmSJUtkExUVJZvatWvH20znzp2TzZgxY2TToEED2XiON2a+NWrFihVlU7x4cdkUKlTINdPKlStdXVw867fGjRvLxnteevjwoWw8n2/POsBzjWJmVrlyZdl41lUnTpyQTY8ePTwj2Zo1a2Tjub6qW7eubBYtWuSaqV27drIZO3asbL788kvZeI7NZmY5cuSQzdatW2XjOR/OmDHDNVPKlCll4zlnetfpHh07dvzTj+FZD0yePFk2nnWumVl4eLhsfvnlF9l4vgM///yza6ZPP/1UNp614OPHj2XTpEkTx0S+a/4ffvhBNp5rcM9awMysbdu2sjl06JBsMmfOLJs8efK4Zjp16pRszp4963osxXMvysz32fSseefPny+bL774wjWT9zj3JH//+99lU6xYMdl4P2uez5Fn7Z0xY0bZeNYvZr5zgGem69evy2bUqFGumTznuEqVKsnGc72TPHly10znz5+XTUhIiGzy5s0rG++awvP7ec6FiRMnjreZPPe0ChcuLJudO3fKxnMtZ2Z29+5dV6fwlyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqIUEAoGAJ3zppZdkU7RoUdnUrl3b83T2xx9/yGb37t2y8fx6ZcuWdc1UqFAh2WzevFk26dKlk03Dhg1dM3388ceySZw4sWxKly4tm7lz57pmatmypWyOHTsmm6pVq8pm+vTprpmyZcsmmyVLlsimW7durufzWLlypWw8712ZMmVks3btWtdMUVFRsunUqVOcP//73/8uH6NSpUqyiY6Olo2Z2aNHj2Szfv162TzzzDOyuXr1qmumChUqyGbs2LGySZ48uWych3DXce7OnTuyuX79umySJUvmmil16tSy8by/efPmlc25c+dcM3nOK5kzZ5bNw4cPZRMWFuaa6ejRo7LJkyePbNKnTy+bAwcOuGby/H5vvPHGE3925swZ+e8XLFggmytXrsjGzKxkyZKyefXVV2WTMWNG2bRq1cozkt24cUM2nuOS53G8x67Lly/LJkOGDLLJnz+/bHbs2OEZyVKmTCmbI0eOyObatWuySZs2rWck13o3SZIkshk9erRsXnjhBddMS5culY1n/ZkmTRrZPPfcc66ZTp8+LZu2bdvG+fPy5cvLx3j77bdlExkZKRszs3nz5smmdevWstm1a5dsFi1a5JrJc24uUKCAbDxrCu/30nO9U6tWLdnUrVtXNrdu3XLNtHr1atl41hRNmjSRTZUqVTwj2cmTJ2Uzf/582YwcOVI2/fr1c8109+5d2XjWFClSpJCNZ24z33nl+++/j/Pnw4YNk48RHh4uG88x0My3hp01a5ZsYmJiZONdV9epU0c2nvPzO++8I5vXX3/dNdPevXtlU6xYMdmkSpVKNgsXLnTN5DkOeK5Vc+TIIZv+/fu7ZvKsK1asWCGbDz/8UDbvv/++ZyR79tlnZdOuXTvZhISEyGbDhg2umVq0aCGb0NDQJ/7M8xnJmjWrbDyfazPfMeD555+XjWcd4Lk2MzNr06aNbMaMGSMbz3cyV65crpkmTpwoG897v2rVKtl4zl1mvntsU6ZMkY3nfOq5ZjDz3YNYs2aNbDzHNy/P587z+fVcP3uPXU2bNpWNZy3PX5YAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIaqHesHnz5rI5duyYbCZOnOh6vjfffFM2586dk03VqlVlExMT45rpxo0bsgkEArLJmDGjbPr16+eaafDgwbL55ZdfZJMnTx7ZvPXWW66Z5syZI5uWLVvKZsOGDbJ59913XTMtWbJENj169JCN5zNw4sQJz0g2c+ZM2TRr1kw2xYoVk83AgQNdMzVo0MDVxaV48eKyOXnypGyyZ8/uer7169fLxvPZfeedd2TjOZ6YmY0dO1Y2U6dOlc2oUaNk891333lGsueee042nu9uo0aNZHPw4EHXTOPHj5fNxx9/LJsrV67IZsCAAa6Zpk2bJptNmzbJZvv27bJJly6da6bGjRvLpn///rLxfDdz5MjhmmnHjh2u7kk8592wsDDZeM65Zmb79u2Tjedz9M0338gmV65crpnWrl0rm7lz58omTZo0sjl79qxrpty5c8vmxx9/lE2lSpVk8/rrr7tmOnLkiGyio6Nl07ZtW9n88MMPnpFs165dslmzZo1sXn75Zdls3LjRNdNrr70mG8+5d9KkSbLxfFfMzPr06ePq4lK7dm3ZNGnSRDYTJkxwPd/+/ftl4zmfhIbqSyzPWtjMt17InDmzbIoWLSqbUqVKuWbyfMdjY2PjpfHMbWa2Z88e2VSuXFk2R48elY3nWs7Mtx6KjIyUTcOGDWXz8OFDz0iuc9Tly5dl4/mM169f3zWT59ikXLhwQTae97ZNmzau5/Mc4z1ryldeeUU2zz77rGckGzdunGwSJUokm2TJksnm5s2brplSpkwpmylTpsgmbdq0sgkPD/eMZFWqVJHNtm3bZFOiRAnZeL7fXtWrV5fNoEGDZFOxYkXX83k+43nz5pXNli1bZFOkSBHXTJ5rmbjW/J7zqWdN4bkPaWZWq1Yt2cTX/cPHjx+7Zrp165ZsPN/b+/fvy8ZzPWxm9sILL8jGcx/Oczz1rDvMfMe4jz76SDae19J7Xe35TrZv3142nrV+zZo1XTN5zimpUqWSjec8kCCB72894uu4y1+WAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGqh3vD+/fuyqVOnjmzGjh3rer5169bJ5uDBg7JJkyaNbJIlS+aaKXv27LK5cOGCbC5evCibhg0bumbq2bOnbCpVqiSb2bNny6ZAgQKekaxGjRqyGTZsmGxSpUolm++//94106pVq2RTq1Yt2fTo0UM2n3zyiWumkSNHymbcuHGux1JOnjzp6qZOnSqbevXqxfnzW7duycc4duyYbHbs2CEbM7OrV6/KpmjRorL56quvZFOkSBHXTNmyZZNNwoQJZfPw4UPZfPnll66ZOnfuLJu//e1vsomJiZGN5zhoZla4cGHZJEmSRDaZMmWSTdeuXV0z7dmzRzZhYWGy2bdvn2xKlizpGckWLlwoG8/nwDPTzJkzXTPlzJnT1T3JvHnzZHPp0iXZNG7c2PV8GzZskI3nHLBo0SLZPPfcc66Z7t69K5t27drJpnjx4rIZPHiwa6bz58/LplWrVrLxrAM8n0czswcPHshmypQpssmSJYtscuXK5Zopd+7csqlevbpsPMeuR48eeUayESNGyCZr1qyyqVmzpmzSp0/vmslz3vnjjz/i/HmFChXkY3z99dey8Z6/z5w5I5uIiAjZtGnTRjae44mZ75rg7NmzsgkN1Zd9s2bN8oxkd+7ckc2nn34qm8uXL8tm06ZNnpFcXbVq1WTjWTMfPXrUNVOePHlk47l+Xrx4sWxatmzpmmnNmjWyefbZZ2WzdOlS2Xg+J2b+Y29c7t27J5vHjx/LJnXq1K7n83ze3nvvPdl4jsv58uVzzeT5LK1fv142nuPl6tWrXTN5jqkTJ06UTa9evWTjOTebmR0+fFg227Ztk03SpEll4znumpnlzZtXNjdu3JCN573z3NMyM0ubNq1sPL+fZ/3l+Vyamb3yyiuu7knefvtt2XjeV8+5y8zsyJEjskmcOLFsrl+/Lpv8+fPH20ye82BkZKRsPPdrzczKlCkjG89Mnu+t5x6Mme+Y47l35jmneD4DZr613pIlS2Tjuc/qOXabmYWEhMjG89mMjY2Nl+cy890XmTNnjmz4yxIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBLdQbzp8/XzbXr1+XTSAQcD3fiRMnZNO9e3fZ3Lx5UzYPHjzwjGQxMTGy6dOnj2wmT54sG8/cZmZlypSRzd27d2Vz8OBB2eTNm9c108yZM2VTr1492Vy9elU2GzZscM30yiuvyGbVqlWyGTlypGwSJkzoGckyZ84smwYNGsjm2rVrsvnggw9cM1WuXNnVxWXixImy+frrr2Xz5Zdfup4vIiJCNlOnTpWN5/u9detW10ye75znPfnjjz9kc+zYMddMkyZNks2iRYtkc+HCBdmkSJHCNVOPHj1kM2LECNkkT57c9Xwezz77rGyOHj0qm7p168rm119/dc2UI0cO2Zw5c0Y2t27dks29e/dcM/3ZY0WGDBlkU7RoUdksW7bM9XyJEyeWjed4u3v3btkkSOD771DWr18vm+rVq8vm22+/lU2yZMk8I7m6yMhI2QwbNkw2xYoV84zkWjdOmDBBNp7zTps2bVwzeb5vpUuXlo3neFOwYEHXTIkSJZLNjRs3ZONZLy1cuNAzkj333HOuLi4HDhyQjWdNOWfOHNfzValSJV5mev7552WzZ88e10z79u2TzZEjR2TjOb4PHz7cNdPSpUtl4/n9Hj9+LJs333zTNZPn2PTw4UPZtG3bVjbnz5/3jOQ693jeO8+6avPmza6ZUqdOLZsVK1bI5tChQ7LJnz+/ZyTX9dwLL7wQ58/LlSsnH8NzDR4dHS0bM7Nq1arJZvv27bLxvG+e63Qzs4sXL8rG8z3xvP+NGjVyTOT7vC1YsEA2nmu5bdu2uWbynOvv3Lkjm127dsmmZs2arplSpUolm8KFC8umbNmysvHcrzIzGzp0qGw81/2e45fnnpaZ2aZNm1zdk5w8eVI2586dk02WLFlcz+f5/GfMmFE2O3fulI3nmGxmdunSJdnMnTtXNp51gHcNe/r0adnUqFFDNmvXrpWN5z6Nme98kSlTJtl47qF6j12eNWq/fv1k4/neeo65ZmbNmzeXzerVq2VTu3Zt2XjWVGa+z4oHf1kCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAglpofD7YokWLZJM1a1bXY92+fVs2q1evlk2HDh1ks3v3btdMCRMmlM2ZM2dkc/LkSdkkS5bMNdOOHTtkU7BgQdnUrl1bNhcuXPCMZBkyZJCN5zVPmTKlbLJnz+6a6dChQ7Lp2LGjbM6fPy+bFClSuGbyfF+KFCkim9mzZ8vm7t27rpnmzZsnm+XLl8f58/fff18+xrvvviubiIgI2ZiZ3bt3L14ea/78+bJJnz69a6aHDx/KZtSoUbKpUaOGbLzfS8/nrVSpUrLZu3evbG7cuOGayfM65c+f3/VYSpkyZVzd1atXZTNlyhTZ5MqVSzZNmzZ1zbR9+3bZJE6cWDYjR46UzbfffuuaqVevXrKZO3fuE3+WNm1a+e8937dLly7JxsysWLFiskmXLl28PF/x4sVdM8XExMjG8x3xfCfLly/vmikqKko2W7ZskY3ndfJ8BszMKlSoIJuvvvpKNnXq1JHNihUrPCNZ69atZTN58mTZTJo0STZTp051zeRZoz5+/Fg2ntfg+eef94xk4eHhri4uDRo0kM2sWbNk4z0HPHr0SDZ58uSRjecz0qhRI9dMEyZMiJfHunnzpmy6dOnimik0VF9Ceq5lPMem7t27u2YqV66cbPLmzSsbz/EkMjLSM5Lr+sqzZmrfvr1sjh075prJc6zwrC1jY2NlU6lSJddMI0aMcHVx8XzePMcuz3fXzPc5yZYtm2xWrVolm06dOrlm2rBhg2yyZMkimzfffFM2nrWwmdmVK1dkM23atHhpZsyY4Zpp3bp1sjl8+LBsqlevLpuePXs6JjLr1q2bbDxrQs867sMPP3TNNGTIENl47lV4jrve9cLZs2dd3ZN47nV4viNhYWGu51uwYIFsunbtKptr167JJnny5K6ZevToIZsCBQrIpn///rLx3s/xvP8HDx6UjWcNlzp1atdMJUqUkM3AgQNlU7p0adl4vkdmvvWn53p/3LhxsmnRooVrpkAgIJu3335bNt9//71sPPf/zXz3Gj34yxIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBLdQb3r59WzY3b96UzerVq13Pt3btWtkMHjxYNkWKFJHN+PHjXTMlSpRINvfu3ZNNsWLFZLN582bXTJcuXYqX5ytXrpxsrl+/7ppp9uzZsunWrZtsPO9Lnjx5XDOFhYXJZtWqVbLJnz+/bEqUKOGa6cMPP5RNTEyMbF588UXZeN4TM7P333/f1cXl7t27snnppZdks3v3btfzJU2aVDaffvqpbD7//HPZjBo1yjOS3blzRzYVKlSQzfr162Vz4sQJz0iu96V27dqy8RxTPXObmc2YMUM2NWvWlM2ECRNk4/1eLly4UDYVK1aUzeHDh2WTLVs210yRkZGyefz4sWzKli0rG8+53sysVatWru5JlixZIpvY2FjZNGnSxPV8nseaNWuWbDZs2CCb5MmTe0ayAQMGyGbp0qWySZkypWyuXLnimunQoUOy8byWpUuXlo33OHHhwgXZeF7L7t27y+bVV191zeRZex04cEA2u3btkk3x4sVdM3nOO57PZq1atWQzceJE10zt27d3dXHZtGmTbB48eCCb06dPu56vXr16spk3b55sPOvOzJkzu2bynL8KFCggG88xzjO3mdkLL7wgm4wZM8rm4MGDsunQoYNrpnPnzsnm1q1bssmXL59sqlat6ppp8eLFsvGsqzxr3TNnzrhm8pznPd/dt956SzY5c+Z0zeQ51ympUqWSzb59+2TTu3dv1/MtW7ZMNh07dpRN48aNZTNixAjXTD/99JNsPOc5z/2Mli1bumbau3evbJ555hnZrFy5Ujae766Z2aRJk+Kl6dWrl2zq1Knjmun8+fOyyZ49u2zWrVsnG+/5MF26dLKJiIiQzf79+2VTo0YN10ydOnVydU8SHh4uG89aadiwYa7nq1+/vmw899gSJND/jXuWLFlcM73++uuy8dxj81yjX7t2zTXT2bNnZeNZD3q+b571spnvXNmzZ0/ZeO5pHT161DOS67vr+fw+99xzsvHei/MYO3asbKpVqyYbz7nJzGzo0KGy8Zwv+csSAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQS0kEAgEPGGrVq1k07VrV9nExMR4ns5GjBghmyZNmsgmTZo0somMjHRMZPb777/LZs+ePbJJnz69bFKmTOmaqWjRorLZsmWLbDJnziwbz9xmZgkTJpSN52OXLVs22YwePdo1U6lSpWSTOnVq2dy9e1c2J06c8Ixk9+7dk03t2rVl43m9jx496prp9u3bsvnyyy/j/PmOHTvkY0yePFk2OXPmlI2Z2bFjx2RTvnx52Tx69Eg2efPmdc20b98+2Vy4cEE2LVq0kI33vT148KBsypYtKxvPe7dr1y7XTM2bN5fN3LlzZbNx40bZfPbZZ66ZUqVKJZuZM2fKxnNM9X6eTp06FS+PVbhwYdl4X6dixYrJZujQoU/8mWdN4Tn+TZw4UTZmvmNAuXLlZOP5fISEhLhm8qw9Zs2aJZvQ0FDZxMbGOibyHXcfPnwoG895wHMONDP7/vvvZdO3b1/Z5MqVSzYff/yxa6YZM2bIZtCgQbIpU6aMbLzvnef3mzNnjmySJUsmG89308z3fWnQoEGcPx81apR8jKxZs8omRYoUsjEz27lzp2w81xa//PKLbDyfWzPfNZFnjV6gQAHZnD592jVT6dKlZTNkyBDZVKhQQTae19vMrE+fPrJ57733ZOO5Znjw4IFrpmvXrslGfQfMzCZNmiSb6tWre0ayc+fOyWbhwoWyuX//vmzef/9910yeNXHjxo3j/Lln7fnbb7/JZt68ebIxM+vZs6dssmfPLhvP++F5rc1810SVK1eWjWet71lXmZnlz59fNmvWrJHN1q1bZfPBBx+4ZlqyZIlsPMfUw4cPy8Z7n2n9+vWy8VwPNGzYUDaeex5mZmfPnpWN5/y7YcMG2XjW4Ga+e4lvv/32E3/mee9v3rwpG8/vbWb28ssvy+a1116TjefeQnh4uGumqKgo2Zw5c0Y2adOmlY3n3pmZ71rm4sWLssmRI4dsvN9Jz3nXs7bs3r27bMaNG+eaqVmzZrK5fv26bP744w/ZeO4bmJnt379fNp57PqtXr5aN53xiZjZ9+nTZeK4v+MsSAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQS3UG9apU0c2CxYskE3OnDldz3fo0CHZ5M6dWzYrV66UzahRo1wzffLJJ7JZvny5bKpXry6b8PBwz0g2evRo2dSsWVM2a9eulU2XLl1cM/3++++yyZ49u2yuX78um3v37rlmqlSpkmx27twpm3PnzsmmaNGirpk8n5UsWbLIZvbs2bK5ffu2a6bUqVO7uri8/fbbsvnb3/4mm1KlSrmez/N58/xeadKkkc26detcM3mOTYULF5bNoEGDZPPMM8+4ZvIcd7JlyyYbz+c7NNR3akmSJIls+vTpIxvPa+n5npiZRUZGyiYsLEw21apVi5fGzGzcuHGyadSokWw6duwom7Jly7pmat68uat7kpCQENl4jsnez3+ePHlk4zkvedYUFy9edM2UNWtW2ezZs0c2nmNlIBBwzXTixAnZnD59WjZvvvmmbGbMmOEZyYYNGyabhAkTysbzvnTt2tU1k2f2/Pnzy6ZIkSKymTp1qmumS5cuyaZv376y8azlvefCiIgI2TRo0CDOn3vWL57vd+vWrWVjZvbSSy/JpmnTprJZtWqVbDzHEzOzqKgo2XjWp82aNZPN8ePHXTPduHFDNh9//LFsduzYIZsKFSp4RnJdy3iOFY8ePZJNdHS0ZyQ7c+aMbObPny+b2rVry8Z7nPesqwYPHiyb9evXy2bixImumerWrevq4jJ9+nTZeK4tW7Zs6Xq+GjVqyGbz5s3x0hw5csQ1U8WKFWXjXaMruXLlcnWedYVnDXv+/HnZeF5LM7OUKVPKJlOmTLLxXA9s27bNNdNHH30km/79+8smefLkstm7d69rJs+aaOjQobLxrNUPHDjgmslzLRMXz/WgZw07adIk1/M1bNhQNp7rj7Rp08rm2rVrnpFc6/jdu3fLxvNeeO6vmJlNmzZNNsWKFZONZw3j+Y6Ymb377ruy8XyeLl++LJtChQq5ZvJ8d69evSqbmzdvyubs2bOumWJiYmTjWct79gnu3LnjmqlNmzauTuEvSwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQCwkEAgFP+O2338rm8ePHskmcOLHn6ezWrVuyuX79umyio6Nl8/DhQ89I1qRJE9mkTp1aNjdv3pTNpk2bPCPZhQsXZPP222/LJn369LI5ceKEZyRr27atbKZNmyYbz2elS5curpnq168vm3bt2snm/fffl02ZMmVcM1WtWlU2ns94hQoVZOP5/pqZ5cuXTzadOnWK8+fFihWLl3n27dsnGzOzzJkzyyZRokSyGT9+vGxSpUrlmum5556TTdKkSWWTPHly2ezZs8c1U4MGDWRz5MgR2Vy7dk02R48edc1UsWJF2Wzbtk021atXl01ISIhnJDt+/Lhszp07JxvP9/LYsWOumTznw1GjRsmmUqVKsilcuLBrJs9586233nriz8aMGSP/fbJkyWSzc+dO2ZiZpUiRQjYrVqyQjec8ceDAAc9Irvc/S5Yssjlz5oxs3nnnHddM/fv3l02uXLlk4zl/ex7HzLdm8hy7Tp8+LRvPesnMbNmyZbKJjY2Vjed86T121atXTzYbN26Uzbp161zP5zFv3jzZ7N+/P86f9+vXTz6G5/ojT548sjEz69u3r2zeeOONeJnJe7z1nJdKliwpG8/38quvvvKMZD/88INsPMfnBAn0f7fn+b6Z+c6VzZo1k82lS5dk4712rFGjhmzu378vmytXrsjGc840861hYmJiZOO5hTB69GjXTJ5zVOPGjeP8+csvvywfw/N+eNYCZmYNGzaUjecc/tlnn8nGcxw0M1uzZo1sbt++LZvixYvL5tGjR66ZPMc5z2N51gLDhg1zzVSqVCnZeNZ7nutZ71rH06nzpZnZ8uXLZVO7dm3XTBkyZJDNnTt3ZOM5xnnv6Xheg7juxXz66afy37dq1Uo2hw8flo2Z2ZYtW2Rz9epV2aRJk0Y2LVq0cM3kWVN47kEdOnRINidPnnTNFBUVJZu7d+/GS6POJf/Bcy/Ks/YaPny4bBImTOgZyXVPq1u3brLZsGGDbDz3tc181ymeNczmzZtlU6tWLddMnvOO554tf1kCAAAAAAAAAACCGpslAAAAAAAAAAAgqLFZAgAAAAAAAAAAghqbJQAAAAAAAAAAIKixWQIAAAAAAAAAAIIamyUAAAAAAAAAACCosVkCAAAAAAAAAACCGpslAAAAAAAAAAAgqIV6wxQpUsjm+eefl80nn3zier7XX39dNj/++KNsnnvuOdmEhYW5ZlqwYIFsChcuLJv79+/L5vTp066ZIiIiZHP8+HHZzJgxQzblypVzzdS0aVPZ3Lt3z/VYSu/evV3dsWPHZDN48GDZlC5dWjZnz551zbR7927ZJEig9zP37dsnm6ioKNdMt2/fdnVxeeWVV2Rz8+ZN2Zw8edL1fFOmTJFNly5d4qVp2bKla6Y33nhDNrlz55ZNv379ZFO9enXPSDZy5EjZXLlyRTYvv/yybG7duuWayfMeZ86cWTae79y4ceNcMzVr1kw2ntfpzp07sqlYsaJrpmHDhsnG8754vneNGjVyzbRlyxZX9yRnzpyRzYMHD2STLVs21/NNnjxZNmXLlpWNZy2ULFky10wvvviibGbPni2bkiVLymb+/Pmekez8+fOyOXHihGw8r/fAgQM9I1mnTp1kExsbK5uePXvKZuHChZ6RXK9Tt27dZLNz507ZeNde7777rmw836mJEyfKZurUqa6ZRo8e7eriEh4eLpvVq1fLxnPONfN9TnLkyCGbokWLymb69OmekSx58uSy8bwGZcqUkc3777/vmmnJkiWyOXTokGw85wLvOmfmzJmy2bBhg2w8x/BatWq5Zvriiy9k06tXL9l43t8PP/zQNZPnc+c5NuXNm1c2pUqVcs0UGRnp6uKSPXt22RQqVEg2nt/LzGzevHmy8Ry/OnfuLJv33nvPNVPt2rVl47k3cvDgQdnUrFnTNZPnvLN+/XrZpEmTRjaff/65aybP9e6lS5dkc/XqVdmUKFHCNZPnuiG+1h7R0dGekWzXrl2yCQ3VtxM9r7fnOtXMtw6P61y3bt06+e8rV64smwIFCsjGzGzVqlWy8XwnV65cKZuPP/7YNVP79u1l47nWzZUrl2xeffVV10x79+6VjefcvHjxYtnMmTPHNVP9+vXj5bE89+Fy5szpmmnNmjWy8dz39NyP9t4j99yvnDVrlmw6duwom3PnznlGstSpU8vGsybmL0sAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAEtVBvePfuXdmsX79eNl27dnU938yZM2VTvnx52YSG6l/x0KFDrpkePHggm2PHjskmEAjIpnjx4q6Zli5dKpuMGTPKplKlSrLZvn27a6b06dPL5v79+7JZvXq1bJo1a+aa6fz587LJnj27bGrWrCmbjRs3umY6cOCAbCIiImRTo0YN2ezZs8c105QpU1xdXDJkyCCbrVu3yqZUqVKu5/vwww9lc/ToUdns27dPNlFRUa6ZVq1aJZtatWrJ5rXXXpPN2rVrXTPlzJlTNq+//rpsjhw5Ipt27dq5Zho2bJhssmbNKpurV6/Kpk2bNq6ZTp8+LZvSpUvLplChQrL5xz/+4Zqpbt26sjl79qxsateuLZtevXq5ZmrdurWre5J79+7JxvN7R0dHu56vc+fOskmQQP/3I55jadmyZV0zeT5rp06dkk3v3r1lM3jwYNdMns9tvnz5ZPPrr7/KZsGCBa6ZPJ/JsLAw2XheyzJlyrhmqlixomzWrFkjm4cPH8rGc5w0MwsJCZFNgQIFZOP5PHnWw2b+40lcEiVKJBvPNYpn7W1mtn//ftls2LBBNgsXLpSNZx1gZjZq1CjZlCxZUjae18C71i9RooRskiRJIpvChQvLJlWqVK6Z6tevLxvP9cCjR49k41nHmvnWQwcPHpRNpkyZZOM57pqZjR8/Xjaea8fHjx/LxvP9NTObNGmSbNRnznP+atu2rWymTZsmGzPf2tOzRvG81p65zcyKFSsmm0WLFsnGcy2/adMm10wrVqyQjWf95TmnfvbZZ56RXGvmc+fOyaZevXqyuXLlimumcuXKycZzzedZ63vusZiZZcuWTTY7duyQzYULF2QTExPjGcl69Oghmy5dujzxZ3369JH/3nPfwDOH9/k817oeDRs2dHVFihSJl+fz3O9ImjSp67EuX74sm9jYWNl4zt+e46SZ2TfffCMbz72hM2fOyCZ58uSumTz3iCdOnCgbzz1Nz7HbzDdT06ZNZXPx4kXZNGnSxDOSTZgwIV4ei78sAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENRCvWF4eLhstmzZIpt79+65ni9PnjyySZo0qWwWLVokm+eff941048//iibd999VzYrV66UTaFChVwz9evXTzY7duyQzd27d2Xz4MEDz0h2+/Zt2SxdulQ2VapUkc3MmTNdMyVMmFA2hQsXls39+/dlc/z4cddMOXPmlI3ne9ehQwfZfPTRR66ZcuTI4ericuXKFdnUqlVLNokTJ3Y938OHD2WTIIHeF/a8t/3793fNtGnTJtn07t1bNpkzZ5bN5s2bXTN5Zj958qRsKlWqJJuvv/7aNVOnTp1k4zmv7Nq1SzanT592zZQqVSrZvPLKK7I5duyYbFq2bOkZycLCwmSzfv162SRJkkQ2TZs2dc1UrFgxV/ckyZIlk83u3btlkzFjRtfzpUyZUjae4+1PP/0kmwIFCrhmSp8+vWyKFCkiG8/3KCoqyjVTgwYNZHPnzh3ZjBgxQja5cuVyzbR161bZeNYwL730kmz69u3rmslz/p4/f75s6tatKxvv6+RZN69du1Y2sbGxsvEcJ83iZ03hOVa0bt1aNt41RfLkyWVTvXp12Sxfvlw2nnOXmdmYMWNkM23aNNlkypRJNtOnT3fN5FmfeD5v5cqVk43n/GbmOw40atRINn/88YdsMmTI4JrJs/YoX768bDzfyxUrVnhGch13Hj16JJs0adLIJiIiwjXTrVu3XF1cPNepNWvWlM2hQ4dcz+d5HU+dOiUbz+uYIkUK10zp0qWTjedc8fjxY9lER0d7RnJdg3nue6ROnVo2nvswZmZfffWVbDzHL8/n9urVq66ZqlatKhvPOsZzDfr999+7ZurVq5ds9u/fLxvPWt3zmTMzW7hwoat7Es9xwnNeGjt2rOv5Ro0aJRvP/dEKFSrIxntfbO/evbJJlCiRbHLnzi0bz30hM989iI4dO8pm4MCBskmbNq1nJCtbtqxsPPcrPWuYPXv2uGaqXLmybF599VXZeK7lPMckM7ONGzfKxnO++OWXX1zP5+F9jxX+sgQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAENTZLAAAAAAAAAABAUGOzBAAAAAAAAAAABDU2SwAAAAAAAAAAQFBjswQAAAAAAAAAAAQ1NksAAAAAAAAAAEBQY7MEAAAAAAAAAAAEtVBveOnSJdlUr15dNq1atXI939dffy2b+vXry+batWuymTVrlmck++abb2QTGxsrmzfffFM23333nWumyMhI2WTJkkU2Z8+elU3Hjh09I1nChAlls2bNGtls3bpVNp7fzcz3HterV082GzdulE3Lli09I9m6detks3nzZtl8/PHHslm7dq1rpuXLl8umS5cucf48WbJk8jEKFSokm5UrV8rGzOzmzZuyqVSpkmw8x7hHjx65Zjpw4IBsMmTIIJtDhw7JxnPcNTPLlCmTbMqVKyebPHnyyObgwYOume7cuSObLVu2yCZFihSyee+991wzbdu2TTanTp2STfLkyWWzePFi10w7duyQzcsvvyybixcvysbz2TXzvea5c+d+4s+OHz8u//0//vEP2bRo0UI2ZmapU6eWjef77XlfPedTM7OMGTPKJleuXLIpVqyYbDznGzPf+//48WPZeNZn69evd83kWcfFxMTIZtCgQbK5ceOGa6Zly5bJ5vXXX5eN533Jnz+/aybP8aRkyZKyKV++vGxGjBjhGcl1/H7xxRfj/PmuXbvkY7Rr1042OXPmlI2Z7xzw888/y6ZIkSKy8bzWZmbDhw+XjefYFF/XVma+z5tnPTxz5kzZ5MiRwzVT4sSJZeNZCzVr1kw2V65ccc1069Yt2YwbN042nTt3ls2zzz7rmslzPty9e7dsPK+B5z0x810XqusPz9rTs871nJvNfNe74eHhsvGcd7Nly+aayXOcmz9/vmw8c1+9etU1k+d18lx/xrWm/A+e766Z77PiuceSNm1a2YSFhTkm8l3Pe675AoGAbD7//HPPSJYvXz7ZeNZWderUkc3ChQtdM3nWhFWrVn3iz86dOyf/vec76Vl7m5mFhurbrdHR0bLx3F/xNGa+7/eFCxdk4znH165d2zWT517NmDFjZJM+ffp4m8nzWVuwYIFsHj58KBvvWt/zWfGsmTz3vTzrYTPfeT5dunSy8XyeSpcu7ZrJe9xV+MsSAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQY3NEgAAAAAAAAAAENTYLAEAAAAAAAAAAEGNzRIAAAAAAAAAABDU2CwBAAAAAAAAAABBjc0SAAAAAAAAAAAQ1NgsAQAAAAAAAAAAQS0kEAgE/uohAAAAAAAAAAAA/ir8ZQkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIamyWAAAAAAAAAACAoMZmCQAAAAAAAAAACGpslgAAAAAAAAAAgKDGZgkAAAAAAAAAAAhqbJYAAAAAAAAAAICgxmYJAAAAAAAAAAAIav8PYIzWKp7tXZYAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "n = 6\n", + "plot_image_grid(torch.randn((n,28,28,1)), [f\"label {i}\" for i in range(n)])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a9d2dca8-d73f-49fc-a56c-1c148b1abeab", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def latents_to_pil(latents:torch.Tensor, channels=None):\n", + " if channels is None: \n", + " channels = latents.shape[1] if len(latents.shape) > 3 else 1\n", + " \n", + " images = scale_tensor(latents)\n", + " images = images.detach().cpu().permute(0, 2, 3, 1).numpy()\n", + " \n", + " if channels == 1: images = images[:, :, :, 0]\n", + "\n", + " images = (images * 255).round().astype(np.uint8)\n", + " \n", + " pil_images = [Image.fromarray(image) for image in images]\n", + " return pil_images " + ] + }, + { + "cell_type": "markdown", + "id": "79d6cbc5-61cc-4ed3-bf0f-93ebd7b8c742", + "metadata": {}, + "source": [ + "## Inference utils" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f7be07ff-99cb-4d36-9b41-0fcab28cfbf0", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def set_seed(seed: int):\n", + " \"\"\"Sets a seed to pytorch, numpy and python. Additionally sets cuda flags.\"\"\"\n", + "\n", + " torch.manual_seed(seed)\n", + " np.random.seed(seed)\n", + " random.seed(seed)\n", + "\n", + " # see https://pytorch.org/docs/stable/notes/randomness.html\n", + " torch.backends.cudnn.benchmark = False\n", + " torch.backends.cudnn.deterministic = True\n", + " torch.use_deterministic_algorithms(True)\n", + "\n", + " # see https://docs.nvidia.com/cuda/cublas/index.html#results-reproducibility\n", + " os.environ[\"CUBLAS_WORKSPACE_CONFIG\"] = \":4096:8\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fedabe68-7767-49f3-a2d8-73a2c105c64f", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_element_matching_indices(a: torch.Tensor, b: torch.Tensor) -> torch.Tensor:\n", + " \"\"\"Compares (2d) `a` with `b`. Returns the indices of `b`, where a element of `a` matches with `b`.\"\"\"\n", + " # Expand dimensions of a to match the shape of b for element-wise comparison\n", + " expanded_a = a.unsqueeze(0).expand(b.shape[0], *a.shape) # [b0, a0, a1]\n", + " expanded_b = b.unsqueeze(1) # [b0, 1, b1]\n", + " \n", + " # Compare all vector entries of a with all vectors of b\n", + " matches = torch.all(expanded_a == expanded_b, dim=-1)\n", + "\n", + " matching_indices = torch.nonzero(torch.any(matches, dim=1)).squeeze()\n", + " \n", + " if matching_indices.dim() == 0: matching_indices = torch.tensor([matching_indices])\n", + "\n", + " return matching_indices" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "547b1bae-d899-4b8e-8368-f1c1968c9e76", + "metadata": {}, + "outputs": [], + "source": [ + "#| export\n", + "def get_entanglement_bins(num_of_qubits: int) -> Tuple[List[List], List[str]]:\n", + " \"\"\"Returns all SRV sorted in entangle bins, corresponding to a number of entangled qubits.\"\"\"\n", + " \n", + " dist_srvs = [x for x in itertools.product(*([[1,2]]*num_of_qubits))]\n", + " dist_srvs = np.array(dist_srvs, dtype=int)[np.sum(dist_srvs, axis=1)!=num_of_qubits+1].tolist()\n", + " dist_srvs = sorted(dist_srvs, key=lambda x: sum(x))\n", + " dist_srvs = np.array(dist_srvs)\n", + " \n", + " entangle = [1] + [scipy.special.comb(num_of_qubits, i, exact=True) for i in range(2, num_of_qubits)]\n", + " \n", + " entanglement_bins = np.split(dist_srvs, np.cumsum(entangle))\n", + " \n", + " ent_bits = [f\"{sum(n[0])-num_of_qubits} qubit entangled\" for n in entanglement_bins]\n", + " \n", + " return [x.tolist() for x in entanglement_bins], ent_bits" + ] + }, + { + "cell_type": "markdown", + "id": "b3ea52bf-f57f-40b3-bb4b-9a1a40010088", + "metadata": {}, + "source": [ + "Print the Schmidt-rank-vector bins for 5 qubits:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f949cb38-42c7-48d0-bf7c-520c841cd273", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0 qubit entangled : [[1, 1, 1, 1, 1]]\n", + "2 qubit entangled : [[1, 1, 1, 2, 2], [1, 1, 2, 1, 2], [1, 1, 2, 2, 1], [1, 2, 1, 1, 2], [1, 2, 1, 2, 1], [1, 2, 2, 1, 1], [2, 1, 1, 1, 2], [2, 1, 1, 2, 1], [2, 1, 2, 1, 1], [2, 2, 1, 1, 1]]\n", + "3 qubit entangled : [[1, 1, 2, 2, 2], [1, 2, 1, 2, 2], [1, 2, 2, 1, 2], [1, 2, 2, 2, 1], [2, 1, 1, 2, 2], [2, 1, 2, 1, 2], [2, 1, 2, 2, 1], [2, 2, 1, 1, 2], [2, 2, 1, 2, 1], [2, 2, 2, 1, 1]]\n", + "4 qubit entangled : [[1, 2, 2, 2, 2], [2, 1, 2, 2, 2], [2, 2, 1, 2, 2], [2, 2, 2, 1, 2], [2, 2, 2, 2, 1]]\n", + "5 qubit entangled : [[2, 2, 2, 2, 2]]\n" + ] + } + ], + "source": [ + "for srvs,label in zip(*get_entanglement_bins(5)):\n", + " print(label, \":\", srvs)" + ] + }, + { + "cell_type": "markdown", + "id": "82152d0d-bfac-4196-ba10-3dfe21fc705f", + "metadata": {}, + "source": [ + "# Export -" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11d3d56b-0aba-4ccc-9299-86bf132fcc99", + "metadata": {}, + "outputs": [], + "source": [ + "#| hide\n", + "import nbdev; nbdev.nbdev_export()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/webpage/api_reference.css b/src/webpage/api_reference.css new file mode 100644 index 0000000..a572149 --- /dev/null +++ b/src/webpage/api_reference.css @@ -0,0 +1,8 @@ +h2 { + border-bottom: none; +} + +#execution table, +div[id^=table-] { + margin-top: 30px; +} \ No newline at end of file diff --git a/src/webpage/api_reference.qmd b/src/webpage/api_reference.qmd new file mode 100644 index 0000000..bf106a3 --- /dev/null +++ b/src/webpage/api_reference.qmd @@ -0,0 +1,22 @@ +--- +format: + html: + code-copy: false + css: api_reference.css +listing: + - id: reference-links + template: ./links.ejs + sort: false + type: grid + contents: + - "../!(webpage|examples|.ipynb_checkpoints)/**/*.ipynb" +--- + +# Modules Overview + +The library `genQC` consists of the below listed modules and corresponding files. + +::: {#reference-links} +::: + + diff --git a/src/assets/inference.png b/src/webpage/assets/inference.png similarity index 100% rename from src/assets/inference.png rename to src/webpage/assets/inference.png diff --git a/src/assets/logo.png b/src/webpage/assets/logo.png similarity index 100% rename from src/assets/logo.png rename to src/webpage/assets/logo.png diff --git a/src/webpage/assets/qft_4qubit_circuit_15s_wpause.gif b/src/webpage/assets/qft_4qubit_circuit_15s_wpause.gif new file mode 100644 index 0000000..47e804b Binary files /dev/null and b/src/webpage/assets/qft_4qubit_circuit_15s_wpause.gif differ diff --git a/src/webpage/custom.scss b/src/webpage/custom.scss new file mode 100644 index 0000000..d352792 --- /dev/null +++ b/src/webpage/custom.scss @@ -0,0 +1,242 @@ +//----------------------------- +/*-- scss:defaults --*/ + + +// Fonts +@import url('https://fonts.googleapis.com/css2?family=Lexend:wght@100..900&family=Ubuntu+Mono:ital,wght@0,400;0,700;1,400;1,700&display=swap'); + +$ubuntu_mono: "Ubuntu Mono", serif; +$lexend: "Lexend", serif; +$academicons: "Academicons"; + +// Color system +$dark-green: #858E79; +$light-green: #D1D9CE; +$cream: #FDFBF7; +$gray: #64605f; +$purple: #9158A2; +$orange: #ad7237; +$yellow: #fcee63; +$darkblue: #053660; +$teal: #147C91; +$darkgray: #333333; + +// Quarto Sass Variables + +//$h2-font-size: 1.6rem !default; +//$headings-font-weight: 500 !default; + +$link-color: #d9230f; // hyperlinks + +$background-clr-page: #3b3f57; /*linear-gradient(135deg, #2b2d42, #1c1e29);*/ +$background-clr-navbar: #201232; + +$dark-hl-clr: #eb8a80; +$text-clr: #ffffff; + +$navbar-bg: $background-clr-navbar; +$navbar-fg: $text-clr; +$navbar-hl: $dark-hl-clr; + +// $sidebar-bg: #dde8eabd; +$sidebar-fg: #674ea7; +/* $sidebar-hl: $dark-hl-clr; */ + +//----------------------------- +// CSS goes here +/*-- scss:rules --*/ + +/*h1, h2, h3, h4, h5, h6 { + font-family: $ubuntu_mono; +}*/ + +h3, h4, h5, h6 { + color: $teal; +} + +p, ul, ol { + font-family: $lexend; + font-weight: 300; +} + +a { //, .sourceCode, code + font-family: $ubuntu_mono; +} + +/*-- Fixes wrong links on floating source links inside API docs--*/ +a { + z-index: 10; + position: relative; +} + + +.listing-description { + font-family: $lexend; + font-weight: 300; +} + +.quarto-listing-category .category.active { + color: $link-color; +} + + +.quarto-grid-item .card-title { + font-size: 130%; + font-weight: bold; + margin-bottom: 20px; +} + +#quarto-sidebar { + border-right: 2px solid #c5c6c7; +} + +.quarto-grid-item.card-left:hover { + box-shadow: 0 8px 16px 0 rgba(99, 42, 110, 0.245); + transform: scale(1.05); +} + +.quarto-grid-item p.card-img-top>img { + object-fit: contain; +} + +.quarto-grid-item.card-left { + border-style: solid; + border-width: 4px; + border-radius: 25px; + background-color: rgb(229 229 229 / 29%); +} + +.button-styling { + background: $teal; + color: $darkblue; + border-color: $darkgray; +} + +.sidebar-header-item { + font-size: 200%; + font-weight: 400; +} + +.sidebar-menu-container * { + font-weight: 400; +} + +/*.sidebar-item-container { + font-size: 110%; + font-family: $lexend; + +}*/ + +.sidebar-item-container .active { + box-shadow: inset 5px 0px 0 0 #d9230f; +} + +.sidebar-item-text.sidebar-link{ + font-family: $lexend; + font-size: 100%; + margin-left: 8px; + padding-left: 8px; + padding-bottom: 2px; + padding-top: 2px; +} + +.sidebar-item-text.sidebar-link.text-start{ + font-family: $lexend; + font-size: 105%; + margin-left: 0px; + margin-top: 0px; + color: $teal; +} + +.menu-text { + font-family: $lexend; +} + +.nav-link.active { + font-weight: 350; +} + +.navbar { + border-radius: 0px; +} + +.navbar-brand img { + max-height: 45px; + width: auto; + padding-right: 0px; +} + +.navbar-brand.navbar-brand-logo { + margin-right: 10px; +} + +.navbar-logo { + background: #4B555B; + border-radius: 8px; +} + +.navbar-title { + font-size: 110%; + font-family: $lexend; + font-weight: 400; + background: linear-gradient(240.01deg, #4B555B 15.29%, rgba(67, 78, 84, 0) 138.75%); + padding: 4px 10px; + border-radius: 8px; +} + +.nav-item { + font-size: 120%; + margin-left: 10px; + font-weight: 300; +} + +.px-0 { + padding-top: 10px; +} + +#toc-title{ + font-size: 110%; +} + +.toc-active * { + font-size: 100%; + +} + +.toc-actions li{ + font-size: 120%; +} + + +#quarto-back-to-top { + border-radius: 15px; +} + +#quarto-back-to-top:hover { + box-shadow: 0 8px 16px 0 rgba(99, 42, 110, 0.245); + color: $link-color; +} + + +//----------------------------- +// fOR THE ARCHIVE BUTTONS USE THIS + + +/* .hero-buttons */ +/* { */ + /* position: absolute; */ + /* border: black 1px solid; */ + /* top: 100px; */ + /* left:100px; */ + /* width: 10px; */ + /* height: 10px; */ +/* } */ + +/* .hero-buttons:hover{ */ + /* width: 30px; */ + /* height: 30px; */ + /* margin-left:-10px; */ + /* margin-top:-10px; */ +/* } */ + + diff --git a/src/webpage/fonts/academicons/css/academicons.css b/src/webpage/fonts/academicons/css/academicons.css new file mode 100644 index 0000000..233678b --- /dev/null +++ b/src/webpage/fonts/academicons/css/academicons.css @@ -0,0 +1,571 @@ +/* + * Academicons 1.9.4 by James Walsh (https://github.com/jpswalsh) and Katja Bercic (https://github.com/katjabercic) + * Fonts generated using FontForge - https://fontforge.org + * Square icons designed to be used alongside Font Awesome square icons - https://fortawesome.github.io/Font-Awesome/ + * Licenses - Font: SIL OFL 1.1, CSS: MIT License + */ +@font-face { + font-family: 'Academicons'; + font-style: normal; + font-weight: 400; + font-display: block; + src:url('../fonts/academicons.eot'); + src:url('../fonts/academicons.eot') format('embedded-opentype'), + url('../fonts/academicons.ttf') format('truetype'), + url('../fonts/academicons.woff') format('woff'), + url('../fonts/academicons.svg') format('svg'); +} +.ai { + font-family: 'Academicons'; + font-weight: 400; + -moz-osx-font-smoothing: grayscale; + -webkit-font-smoothing: antialiased; + display: inline-block; + font-style: normal; + font-variant: normal; + text-rendering: auto; + line-height: 1; +} +.ai-academia:before { + content: "\e9af"; +} +.ai-academia-square:before { + content: "\e93d"; +} +.ai-acclaim:before { + content: "\e92e"; +} +.ai-acclaim-square:before { + content: "\e93a"; +} +.ai-acm:before { + content: "\e93c"; +} +.ai-acm-square:before { + content: "\e95d"; +} +.ai-acmdl:before { + content: "\e96a"; +} +.ai-acmdl-square:before { + content: "\e9d3"; +} +.ai-ads:before { + content: "\e9cb"; +} +.ai-ads-square:before { + content: "\e94a"; +} +.ai-africarxiv:before { + content: "\e91b"; +} +.ai-africarxiv-square:before { + content: "\e90b"; +} +.ai-archive:before { + content: "\e955"; +} +.ai-archive-square:before { + content: "\e956"; +} +.ai-arxiv:before { + content: "\e974"; +} +.ai-arxiv-square:before { + content: "\e9a6"; +} +.ai-biorxiv:before { + content: "\e9a2"; +} +.ai-biorxiv-square:before { + content: "\e98b"; +} +.ai-ceur:before { + content: "\e96d"; +} +.ai-ceur-square:before { + content: "\e92f"; +} +.ai-ciencia-vitae:before { + content: "\e912"; +} +.ai-ciencia-vitae-square:before { + content: "\e913"; +} +.ai-clarivate:before { + content: "\e924"; +} +.ai-clarivate-square:before { + content: "\e925"; +} +.ai-closed-access:before { + content: "\e942"; +} +.ai-closed-access-square:before { + content: "\e943"; +} +.ai-conversation:before { + content: "\e94c"; +} +.ai-conversation-square:before { + content: "\e915"; +} +.ai-coursera:before { + content: "\e95f"; +} +.ai-coursera-square:before { + content: "\e97f"; +} +.ai-crossref:before { + content: "\e918"; +} +.ai-crossref-square:before { + content: "\e919"; +} +.ai-cv:before { + content: "\e9a5"; +} +.ai-cv-square:before { + content: "\e90a"; +} +.ai-datacite:before { + content: "\e91c"; +} +.ai-datacite-square:before { + content: "\e91d"; +} +.ai-dataverse:before { + content: "\e9f7"; +} +.ai-dataverse-square:before { + content: "\e9e4"; +} +.ai-dblp:before { + content: "\e94f"; +} +.ai-dblp-square:before { + content: "\e93f"; +} +.ai-depsy:before { + content: "\e97a"; +} +.ai-depsy-square:before { + content: "\e94b"; +} +.ai-doi:before { + content: "\e97e"; +} +.ai-doi-square:before { + content: "\e98f"; +} +.ai-dryad:before { + content: "\e97c"; +} +.ai-dryad-square:before { + content: "\e98c"; +} +.ai-elsevier:before { + content: "\e961"; +} +.ai-elsevier-square:before { + content: "\e910"; +} +.ai-figshare:before { + content: "\e981"; +} +.ai-figshare-square:before { + content: "\e9e7"; +} +.ai-google-scholar:before { + content: "\e9d4"; +} +.ai-google-scholar-square:before { + content: "\e9f9"; +} +.ai-hal:before { + content: "\e92c"; +} +.ai-hal-square:before { + content: "\e92d"; +} +.ai-hypothesis:before { + content: "\e95a"; +} +.ai-hypothesis-square:before { + content: "\e95b"; +} +.ai-ideas-repec:before { + content: "\e9ed"; +} +.ai-ideas-repec-square:before { + content: "\e9f8"; +} +.ai-ieee:before { + content: "\e929"; +} +.ai-ieee-square:before { + content: "\e9b9"; +} +.ai-impactstory:before { + content: "\e9cf"; +} +.ai-impactstory-square:before { + content: "\e9aa"; +} +.ai-inaturalist:before { + content: "\e900"; +} +.ai-inaturalist-square:before { + content: "\e901"; +} +.ai-inpn:before { + content: "\e902"; +} +.ai-inpn-square:before { + content: "\e903"; +} +.ai-inspire:before { + content: "\e9e9"; +} +.ai-inspire-square:before { + content: "\e9fe"; +} +.ai-isidore:before { + content: "\e936"; +} +.ai-isidore-square:before { + content: "\e954"; +} +.ai-isni:before { + content: "\e957"; +} +.ai-isni-square:before { + content: "\e958"; +} +.ai-jstor:before { + content: "\e938"; +} +.ai-jstor-square:before { + content: "\e944"; +} +.ai-lattes:before { + content: "\e9b3"; +} +.ai-lattes-square:before { + content: "\e99c"; +} +.ai-mathoverflow:before { + content: "\e9f6"; +} +.ai-mathoverflow-square:before { + content: "\e97b"; +} +.ai-mendeley:before { + content: "\e9f0"; +} +.ai-mendeley-square:before { + content: "\e9f3"; +} +.ai-moodle:before { + content: "\e907"; +} +.ai-moodle-square:before { + content: "\e908"; +} +.ai-mtmt:before { + content: "\e950"; +} +.ai-mtmt-square:before { + content: "\e951"; +} +.ai-nakala:before { + content: "\e940"; +} +.ai-nakala-square:before { + content: "\e941"; +} +.ai-obp:before { + content: "\e92a"; +} +.ai-obp-square:before { + content: "\e92b"; +} +.ai-open-access:before { + content: "\e939"; +} +.ai-open-access-square:before { + content: "\e9f4"; +} +.ai-open-data:before { + content: "\e966"; +} +.ai-open-data-square:before { + content: "\e967"; +} +.ai-open-materials:before { + content: "\e968"; +} +.ai-open-materials-square:before { + content: "\e969"; +} +.ai-openedition:before { + content: "\e946"; +} +.ai-openedition-square:before { + content: "\e947"; +} +.ai-orcid:before { + content: "\e9d9"; +} +.ai-orcid-square:before { + content: "\e9c3"; +} +.ai-osf:before { + content: "\e9ef"; +} +.ai-osf-square:before { + content: "\e931"; +} +.ai-overleaf:before { + content: "\e914"; +} +.ai-overleaf-square:before { + content: "\e98d"; +} +.ai-philpapers:before { + content: "\e98a"; +} +.ai-philpapers-square:before { + content: "\e96f"; +} +.ai-piazza:before { + content: "\e99a"; +} +.ai-piazza-square:before { + content: "\e90c"; +} +.ai-preregistered:before { + content: "\e906"; +} +.ai-preregistered-square:before { + content: "\e96b"; +} +.ai-protocols:before { + content: "\e952"; +} +.ai-protocols-square:before { + content: "\e953"; +} +.ai-psyarxiv:before { + content: "\e90e"; +} +.ai-psyarxiv-square:before { + content: "\e90f"; +} +.ai-publons:before { + content: "\e937"; +} +.ai-publons-square:before { + content: "\e94e"; +} +.ai-pubmed:before { + content: "\e99f"; +} +.ai-pubmed-square:before { + content: "\e97d"; +} +.ai-pubpeer:before { + content: "\e922"; +} +.ai-pubpeer-square:before { + content: "\e923"; +} +.ai-researcherid:before { + content: "\e91a"; +} +.ai-researcherid-square:before { + content: "\e95c"; +} +.ai-researchgate:before { + content: "\e95e"; +} +.ai-researchgate-square:before { + content: "\e99e"; +} +.ai-ror:before { + content: "\e948"; +} +.ai-ror-square:before { + content: "\e949"; +} +.ai-sci-hub:before { + content: "\e959"; +} +.ai-sci-hub-square:before { + content: "\e905"; +} +.ai-scirate:before { + content: "\e98e"; +} +.ai-scirate-square:before { + content: "\e99d"; +} +.ai-scopus:before { + content: "\e91e"; +} +.ai-scopus-square:before { + content: "\e91f"; +} +.ai-semantic-scholar:before { + content: "\e96e"; +} +.ai-semantic-scholar-square:before { + content: "\e96c"; +} +.ai-springer:before { + content: "\e928"; +} +.ai-springer-square:before { + content: "\e99b"; +} +.ai-ssrn:before { + content: "\e916"; +} +.ai-ssrn-square:before { + content: "\e917"; +} +.ai-stackoverflow:before { + content: "\e920"; +} +.ai-stackoverflow-square:before { + content: "\e921"; +} +.ai-viaf:before { + content: "\e933"; +} +.ai-viaf-square:before { + content: "\e934"; +} +.ai-wiley:before { + content: "\e926"; +} +.ai-wiley-square:before { + content: "\e927"; +} +.ai-zenodo:before { + content: "\e911"; +} +.ai-zotero:before { + content: "\e962"; +} +.ai-zotero-square:before { + content: "\e932"; +} +/* Duplication of the FontAwesome style classes using 'ai' in place of 'fa'. */ +.ai-lg { + font-size: 1.33333em; + line-height: 0.75em; + vertical-align: -.0667em; +} +.ai-xs { + font-size: .75em; +} +.ai-sm { + font-size: .875em; +} + +.ai-1x { + font-size: 1em; +} +.ai-2x { + font-size: 2em; +} +.ai-3x { + font-size: 3em; +} +.ai-4x { + font-size: 4em; +} +.ai-5x { + font-size: 5em; +} +.ai-6x { + font-size: 6em; +} +.ai-7x { + font-size: 7em; +} +.ai-8x { + font-size: 8em; +} +.ai-9x { + font-size: 9em; +} +.ai-10x { + font-size: 10em; +} + +.ai-fw { + text-align: center; + width: 1.25em; +} + +.ai-ul { + list-style-type: none; + margin-left: 2.5em; + padding-left: 0; +} +.ai-ul > li { + position: relative; +} +.ai-li { + left: -2em; + position: absolute; + text-align: center; + width: 2em; + line-height: inherit; +} + +.ai-border { +border: solid 0.08em #eee; +border-radius: .1em; + padding: .2em .25em .15em; +} + +.ai-pull-left { + float: left; +} +.ai-pull-right { + float: right; +} +.ai.ai-pull-left { + margin-right: .3em; +} +.ai.ai-pull-right { + margin-right: .3em; +} + +.ai-stack { + display: inline-block; + height: 2em; + line-height: 2em; + position: relative; + vertical-align: middle; + width: 2.5em; +} +.ai-stack-1x, +.ai-stack-2x { + left: 0; + position: absolute; + text-align: center; + width: 100%; +} +.ai-stack-1x { + line-height: inherit; +} +.ai-stack-2x { + font-size: 2em; +} +.ai-inverse { + color: #fff; +} diff --git a/src/webpage/fonts/academicons/css/academicons.min.css b/src/webpage/fonts/academicons/css/academicons.min.css new file mode 100644 index 0000000..29a4f84 --- /dev/null +++ b/src/webpage/fonts/academicons/css/academicons.min.css @@ -0,0 +1 @@ + @font-face {font-family: 'Academicons';font-style: normal;font-weight: 400;font-display: block;src:url('../fonts/academicons.eot');src:url('../fonts/academicons.eot') format('embedded-opentype'), url('../fonts/academicons.ttf') format('truetype'), url('../fonts/academicons.woff') format('woff'), url('../fonts/academicons.svg') format('svg');}.ai {font-family: 'Academicons';font-weight: 400;-moz-osx-font-smoothing: grayscale;-webkit-font-smoothing: antialiased;display: inline-block;font-style: normal;font-variant: normal;text-rendering: auto;line-height: 1;}.ai-academia:before {content: "\e9af";}.ai-academia-square:before {content: "\e93d";}.ai-acclaim:before {content: "\e92e";}.ai-acclaim-square:before {content: "\e93a";}.ai-acm:before {content: "\e93c";}.ai-acm-square:before {content: "\e95d";}.ai-acmdl:before {content: "\e96a";}.ai-acmdl-square:before {content: "\e9d3";}.ai-ads:before {content: "\e9cb";}.ai-ads-square:before {content: "\e94a";}.ai-africarxiv:before {content: "\e91b";}.ai-africarxiv-square:before {content: "\e90b";}.ai-archive:before {content: "\e955";}.ai-archive-square:before {content: "\e956";}.ai-arxiv:before {content: "\e974";}.ai-arxiv-square:before {content: "\e9a6";}.ai-biorxiv:before {content: "\e9a2";}.ai-biorxiv-square:before {content: "\e98b";}.ai-ceur:before {content: "\e96d";}.ai-ceur-square:before {content: "\e92f";}.ai-ciencia-vitae:before {content: "\e912";}.ai-ciencia-vitae-square:before {content: "\e913";}.ai-clarivate:before {content: "\e924";}.ai-clarivate-square:before {content: "\e925";}.ai-closed-access:before {content: "\e942";}.ai-closed-access-square:before {content: "\e943";}.ai-conversation:before {content: "\e94c";}.ai-conversation-square:before {content: "\e915";}.ai-coursera:before {content: "\e95f";}.ai-coursera-square:before {content: "\e97f";}.ai-crossref:before {content: "\e918";}.ai-crossref-square:before {content: "\e919";}.ai-cv:before {content: "\e9a5";}.ai-cv-square:before {content: "\e90a";}.ai-datacite:before {content: "\e91c";}.ai-datacite-square:before {content: "\e91d";}.ai-dataverse:before {content: "\e9f7";}.ai-dataverse-square:before {content: "\e9e4";}.ai-dblp:before {content: "\e94f";}.ai-dblp-square:before {content: "\e93f";}.ai-depsy:before {content: "\e97a";}.ai-depsy-square:before {content: "\e94b";}.ai-doi:before {content: "\e97e";}.ai-doi-square:before {content: "\e98f";}.ai-dryad:before {content: "\e97c";}.ai-dryad-square:before {content: "\e98c";}.ai-elsevier:before {content: "\e961";}.ai-elsevier-square:before {content: "\e910";}.ai-figshare:before {content: "\e981";}.ai-figshare-square:before {content: "\e9e7";}.ai-google-scholar:before {content: "\e9d4";}.ai-google-scholar-square:before {content: "\e9f9";}.ai-hal:before {content: "\e92c";}.ai-hal-square:before {content: "\e92d";}.ai-hypothesis:before {content: "\e95a";}.ai-hypothesis-square:before {content: "\e95b";}.ai-ideas-repec:before {content: "\e9ed";}.ai-ideas-repec-square:before {content: "\e9f8";}.ai-ieee:before {content: "\e929";}.ai-ieee-square:before {content: "\e9b9";}.ai-impactstory:before {content: "\e9cf";}.ai-impactstory-square:before {content: "\e9aa";}.ai-inaturalist:before {content: "\e900";}.ai-inaturalist-square:before {content: "\e901";}.ai-inpn:before {content: "\e902";}.ai-inpn-square:before {content: "\e903";}.ai-inspire:before {content: "\e9e9";}.ai-inspire-square:before {content: "\e9fe";}.ai-isidore:before {content: "\e936";}.ai-isidore-square:before {content: "\e954";}.ai-isni:before {content: "\e957";}.ai-isni-square:before {content: "\e958";}.ai-jstor:before {content: "\e938";}.ai-jstor-square:before {content: "\e944";}.ai-lattes:before {content: "\e9b3";}.ai-lattes-square:before {content: "\e99c";}.ai-mathoverflow:before {content: "\e9f6";}.ai-mathoverflow-square:before {content: "\e97b";}.ai-mendeley:before {content: "\e9f0";}.ai-mendeley-square:before {content: "\e9f3";}.ai-moodle:before {content: "\e907";}.ai-moodle-square:before {content: "\e908";}.ai-mtmt:before {content: "\e950";}.ai-mtmt-square:before {content: "\e951";}.ai-nakala:before {content: "\e940";}.ai-nakala-square:before {content: "\e941";}.ai-obp:before {content: "\e92a";}.ai-obp-square:before {content: "\e92b";}.ai-open-access:before {content: "\e939";}.ai-open-access-square:before {content: "\e9f4";}.ai-open-data:before {content: "\e966";}.ai-open-data-square:before {content: "\e967";}.ai-open-materials:before {content: "\e968";}.ai-open-materials-square:before {content: "\e969";}.ai-openedition:before {content: "\e946";}.ai-openedition-square:before {content: "\e947";}.ai-orcid:before {content: "\e9d9";}.ai-orcid-square:before {content: "\e9c3";}.ai-osf:before {content: "\e9ef";}.ai-osf-square:before {content: "\e931";}.ai-overleaf:before {content: "\e914";}.ai-overleaf-square:before {content: "\e98d";}.ai-philpapers:before {content: "\e98a";}.ai-philpapers-square:before {content: "\e96f";}.ai-piazza:before {content: "\e99a";}.ai-piazza-square:before {content: "\e90c";}.ai-preregistered:before {content: "\e906";}.ai-preregistered-square:before {content: "\e96b";}.ai-protocols:before {content: "\e952";}.ai-protocols-square:before {content: "\e953";}.ai-psyarxiv:before {content: "\e90e";}.ai-psyarxiv-square:before {content: "\e90f";}.ai-publons:before {content: "\e937";}.ai-publons-square:before {content: "\e94e";}.ai-pubmed:before {content: "\e99f";}.ai-pubmed-square:before {content: "\e97d";}.ai-pubpeer:before {content: "\e922";}.ai-pubpeer-square:before {content: "\e923";}.ai-researcherid:before {content: "\e91a";}.ai-researcherid-square:before {content: "\e95c";}.ai-researchgate:before {content: "\e95e";}.ai-researchgate-square:before {content: "\e99e";}.ai-ror:before {content: "\e948";}.ai-ror-square:before {content: "\e949";}.ai-sci-hub:before {content: "\e959";}.ai-sci-hub-square:before {content: "\e905";}.ai-scirate:before {content: "\e98e";}.ai-scirate-square:before {content: "\e99d";}.ai-scopus:before {content: "\e91e";}.ai-scopus-square:before {content: "\e91f";}.ai-semantic-scholar:before {content: "\e96e";}.ai-semantic-scholar-square:before {content: "\e96c";}.ai-springer:before {content: "\e928";}.ai-springer-square:before {content: "\e99b";}.ai-ssrn:before {content: "\e916";}.ai-ssrn-square:before {content: "\e917";}.ai-stackoverflow:before {content: "\e920";}.ai-stackoverflow-square:before {content: "\e921";}.ai-viaf:before {content: "\e933";}.ai-viaf-square:before {content: "\e934";}.ai-wiley:before {content: "\e926";}.ai-wiley-square:before {content: "\e927";}.ai-zenodo:before {content: "\e911";}.ai-zotero:before {content: "\e962";}.ai-zotero-square:before {content: "\e932";}.ai-lg {font-size: 1.33333em;line-height: 0.75em;vertical-align: -.0667em;}.ai-xs {font-size: .75em;}.ai-sm {font-size: .875em;}.ai-1x {font-size: 1em;}.ai-2x {font-size: 2em;}.ai-3x {font-size: 3em;}.ai-4x {font-size: 4em;}.ai-5x {font-size: 5em;}.ai-6x {font-size: 6em;}.ai-7x {font-size: 7em;}.ai-8x {font-size: 8em;}.ai-9x {font-size: 9em;}.ai-10x {font-size: 10em;}.ai-fw {text-align: center;width: 1.25em;}.ai-ul {list-style-type: none;margin-left: 2.5em;padding-left: 0;}.ai-ul > li {position: relative;}.ai-li {left: -2em;position: absolute;text-align: center;width: 2em;line-height: inherit;}.ai-border {border: solid 0.08em #eee;border-radius: .1em;padding: .2em .25em .15em;}.ai-pull-left {float: left;}.ai-pull-right {float: right;}.ai.ai-pull-left {margin-right: .3em;}.ai.ai-pull-right {margin-right: .3em;}.ai-stack {display: inline-block;height: 2em;line-height: 2em;position: relative;vertical-align: middle;width: 2.5em;}.ai-stack-1x, .ai-stack-2x {left: 0;position: absolute;text-align: center;width: 100%;}.ai-stack-1x {line-height: inherit;}.ai-stack-2x {font-size: 2em;}.ai-inverse {color: #fff;} diff --git a/src/webpage/fonts/academicons/fonts/academicons.eot b/src/webpage/fonts/academicons/fonts/academicons.eot new file mode 100644 index 0000000..37ae5e3 Binary files /dev/null and b/src/webpage/fonts/academicons/fonts/academicons.eot differ diff --git a/src/webpage/fonts/academicons/fonts/academicons.svg b/src/webpage/fonts/academicons/fonts/academicons.svg new file mode 100644 index 0000000..d72201d --- /dev/null +++ b/src/webpage/fonts/academicons/fonts/academicons.svg @@ -0,0 +1,1859 @@ + + + + + +Created by FontForge 20190801 at Thu Jun 1 11:28:32 2023 + By Nicolas + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/webpage/fonts/academicons/fonts/academicons.ttf b/src/webpage/fonts/academicons/fonts/academicons.ttf new file mode 100644 index 0000000..ca7b48c Binary files /dev/null and b/src/webpage/fonts/academicons/fonts/academicons.ttf differ diff --git a/src/webpage/fonts/academicons/fonts/academicons.woff b/src/webpage/fonts/academicons/fonts/academicons.woff new file mode 100644 index 0000000..9d631dd Binary files /dev/null and b/src/webpage/fonts/academicons/fonts/academicons.woff differ diff --git a/src/webpage/index.scss b/src/webpage/index.scss new file mode 100644 index 0000000..e8639d6 --- /dev/null +++ b/src/webpage/index.scss @@ -0,0 +1,343 @@ +//----------------------------- +/*-- scss:defaults --*/ + +// Color system +$background-clr-page: #3b3f57; /*linear-gradient(135deg, #2b2d42, #1c1e29);*/ +$background-clr-navbar: #201232; + +$dark-hl-clr: #eb8a80; +// $text-clr: #e0f7fa; +$text-clr: #ffffff; + +// nice purple to test #800080 + +$navbar-bg: $background-clr-navbar; +$navbar-fg: $text-clr; +$navbar-hl: $dark-hl-clr; + +$feature-header-clr: #5de4ff; + + +$arrow-box-clr-bg: rgba(128, 81, 139, 0.52); //#88b7d5; +$arrow-box-clr-ed: rgba(128, 81, 139, 0.00); //#c2e1f5; + + +//----------------------------- +// CSS goes here +/*-- scss:rules --*/ + +body { + /* margin: 10px; */ + /* font-family: Arial, sans-serif; */ + background: $background-clr-page; + color: $text-clr; + /* width: 100vw; */ +} + +#title-block-header { + display: none; +} + +.navbar { + /* border-width: 0px; */ + /* border-style: none; */ + /* border-color: linear-gradient(135deg, #6a0572, #00bcd4); */ + /* background: $background-clt-navbar; linear-gradient(135deg, #6a0572, #00bcd4);*/ + /* color: $text-clr; */ +} + +.hero-banner { + margin-top: 3%; + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + text-align: center; + /* height: 400px; */ + /* width: 100vw; */ + height: 60%; + /* background: linear-gradient(135deg, #6a0572, #00bcd4); */ + background: linear-gradient(110deg, rgb(59, 63, 87) 10%, rgb(128 81 139 / 52%) 40%, rgb(59, 63, 87) 90%); + // color: #ffffff; + position: relative; +} + +.hero-banner h1 { + font-size: 3rem; + margin: 0; + text-shadow: 2px 2px 5px rgba(0, 0, 0, 0.5); +} + +.hero-banner p { + font-size: 1.5rem; + margin: 10px 0 20px; + text-shadow: 1px 1px 3px rgba(0, 0, 0, 0.3); +} + +.features { + display: flex; + justify-content: center; + flex-wrap: wrap; + gap: 20px; + padding: 20px; +} + +.feature { + background: rgba(255, 255, 255, 0.1); + border-radius: 10px; + padding: 20px; + width: 400px; + text-align: center; + box-shadow: 0 4px 6px rgba(0, 0, 0, 0.2); + transition: transform 0.3s; +} + +.feature:hover { + transform: translateY(-10px); +} + +.feature h3 { + color: $feature-header-clr; + margin-bottom: 23px; +} + +.animated-svg { + position: absolute; + bottom: 20px; + width: 200px; + height: 200px; + animation: float 4s infinite ease-in-out; +} + +@keyframes float { + 0%, 100% { + transform: translateY(0); + } + 50% { + transform: translateY(-10px); + } +} + +.hero-banner .hero-buttons { + display: flex; + gap: 40px; +} + +.hero-banner .hero-buttons .btn { + padding: 10px 20px; + font-size: 1rem; + font-weight: bold; + color: #fff; + background-color: #5a20cb; + border: none; + border-radius: 5px; + cursor: pointer; + transition: transform 0.3s, background-color 0.3s; + margin: 5px; +} + +.hero-banner .hero-buttons .btn:hover { + background-color: #1cb5e0; + transform: scale(1.05); +} + +.container .btn { + padding: 10px 20px; + font-size: 1rem; + font-weight: bold; + color: #fff; + background-color: #5a20cb; + border: none; + border-radius: 5px; + cursor: pointer; + transition: transform 0.3s, background-color 0.3s; +} + +.container .btn:hover { + background-color: #1cb5e0; + transform: scale(1.05); +} + +//----------------------------- + +.container { + display: flex; + justify-content: space-between; + align-items: flex-start; + max-width: 1267px; + margin: 0 auto; + margin-top: 30px; + margin-bottom: 30px; +} + +.left { + flex: 1; + padding-right: 30px; + max-width: 500px; +} + +.left h1 { + font-size: 2.5em; + margin-bottom: 10px; + margin-top: 0rem; +} + +.left h1 span { + color: #7bd3ef; +} + +.left p { + font-size: 1.1em; + line-height: 1.6; + margin-bottom: 20px; +} + +/* +.tutorial-btn { + background-color: white; + border: 1px solid #ccc; + color: #333; + padding: 10px 20px; + font-size: 1em; + border-radius: 8px; + cursor: pointer; +}*/ + +.right { + flex: 1; + background: #ffffff; + color: #000000; + padding-top: 5px; + padding-bottom: 5px; + padding-right: 20px; + padding-left: 20px; + border-radius: 12px; + // font-family: monospace; + box-shadow: 0 2px 12px rgba(0, 0, 0, 0.1); + position: relative; +} + +.code-block { + background: #ffffff; + color: #000000; + padding: 20px; + border-radius: 8px; + //overflow-x: auto; + //max-height: 500px; + // font-size: 0.9em; + //white-space: pre-wrap; + // line-height: 1.4; +} + +.run-btn { + margin-top: 10px; + background-color: #20232a; + color: white; + border: none; + padding: 12px 18px; + font-size: 1em; + border-radius: 8px; + cursor: pointer; +} + +.left .left-grid { + display: flex; + flex-wrap: nowrap; + gap: 10px; + padding: 0px; + justify-content: flex-start; + align-items: center; + flex-direction: row; +} + +.left .left-grid-text { + margin-top: 0px; +} + +.left .left-grid-text p { + margin: auto; +} + +.left .left-grid-src { + margin: 0px; +} + +.left .sourceCode { + background: #ffffff; + color: #000000; + max-width: 185px; + float: right; + position: relative; + min-width: 180px; + margin: 0px; +} + +.left .sourceCode * { + //color: #000000; + color: $link-color; + padding-left: 3px; +} + +//----------------------------- + +.figure { + display: flex; + align-items: center; + justify-content: center; + // height: 100vh; +} + +.figure .arrow { + width: 0; + height: 0; + border-top: 30px solid transparent; + border-bottom: 30px solid transparent; + border-left: 40px solid red; + margin: 0 10px; +} + + + +.arrow_box { + position: relative; + background: $arrow-box-clr-bg; + border: 5px solid $arrow-box-clr-ed; + border-radius: 10px; + margin-right: 2em; + // background: rgb(59, 63, 87); + padding: 8px; + // width: 400px; + text-align: center; + // box-shadow: 0 4px 6px rgba(0, 0, 0, 0.2); + // transition: transform 0.3s; +} + +.arrow_box h3 { + margin-top: 2rem; + margin-bottom: 2rem; + color: #ffffff; +} + +.arrow_box:after, .arrow_box:before { + left: 104%; + top: 50%; + border: solid transparent; + content: ""; + height: 0; + width: 0; + position: absolute; + pointer-events: none; +} + +.arrow_box:after { + border-color: rgba(136, 183, 213, 0); + border-left-color: $arrow-box-clr-bg; + border-width: 40px; + margin-top: -40px; +} +.arrow_box:before { + border-color: rgba(194, 225, 245, 0); + border-left-color: $arrow-box-clr-ed; + border-width: 47px; + margin-top: -47px; +} \ No newline at end of file diff --git a/src/webpage/links.ejs b/src/webpage/links.ejs new file mode 100644 index 0000000..83c976b --- /dev/null +++ b/src/webpage/links.ejs @@ -0,0 +1,91 @@ +<% + function myCallback({ outputHref }) + { + return outputHref.split("/")[0]; + } + + const groupitems = Object.groupBy(items, myCallback); +%> + +<%/* for (const [key, value] of Object.entries(groupitems) ){ + console.log(`${key}: ${value}`); + } +*/%> + +<%/* for (const [key, value] of Object.entries(groupitems) ){ + for (const item of value ){ + console.log(item); + } + } +*/%> + +::: {.link-cards} + +<% for (const [key, value] of Object.entries(groupitems) ){ %> + +::: {.card} +::: {.card-body} + +::: {.card-title} +<% let card_title = key.charAt(0).toUpperCase() + key.slice(1); %> +#### <%- card_title %> + +::: {.card-subtitle .text-muted} +<% + let website_meta = value[0]["website"]; + let module_link = website_meta["repo-url"] + "/tree/" + website_meta["repo-branch"] + "/" + website_meta["title"] + "/" + key; +%> +[`genQC.<%- key %>`](<%- module_link %>) +::: +::: + + + +<% + function myCallback2({ outputHref }) + { + let s = outputHref.split("/"); + + if (s.length < 3) { + return "000"; + } + + return s[1]; + } + + const groupitems2 = Object.groupBy(value, myCallback2); + var keys2 = Object.keys(groupitems2).sort(); +%> + +<% for (const key2 of keys2 ) { + var value2 = groupitems2[key2]; + var module_link2 = module_link + "/" + key2; +%> + +<% if (key2 != "000") { %> +::: {.card-title} +::: {.card-subtitle .text-muted} +[`genQC.<%- key %>.<%- key2 %>`](<%- module_link2 %>) +::: +::: +<% } %> + +<% for (const item of value2 ){ %> +- [<%- item.title %>](<%- item.path %>)<% } %> + +<% } %> + +::: +::: + +<% } %> + +::: + + + + + diff --git a/src/webpage/research.qmd b/src/webpage/research.qmd new file mode 100644 index 0000000..21550ea --- /dev/null +++ b/src/webpage/research.qmd @@ -0,0 +1,128 @@ +--- +toc: true +format: + html: + header-includes: | + +theme: + light: [simplex, research.scss] +--- + +# Synthesis of discrete-continuous quantum circuits with multimodal diffusion models +::: {.authors} +

Florian Fürrutter, Zohim Chandani, Ikko Hamamura, Hans J. Briegel and Gorka Muñoz-Gil.

+::: + +::: {.paper} +::: {.links} +```{=html} +
+
+``` +::: +::: {.tldr} +

TL;DR:

+

We develop a multi-modal generative diffusion model to compile quantum operations in parameterized quantum circuits.

+::: +::: {.abstract} +

Abstract

+

Efficiently compiling quantum operations remains a major bottleneck in scaling quantum computing. Today’s state-of-the-art methods achieve low compilation error by combining search algorithms with gradient-based parameter optimization, but they incur long runtimes and require multiple calls to quantum hardware or expensive classical simulations, making their scaling prohibitive. Recently, machine-learning models have emerged as an alternative, though they are currently restricted to discrete gate sets. Here, we introduce a multimodal denoising diffusion model that simultaneously generates a circuit’s structure and its continuous parameters for compiling a target unitary. It leverages two independent diffusion processes, one for discrete gate selection and one for parameter prediction. We benchmark the model over different experiments, analyzing the method’s accuracy across varying qubit counts, circuit depths, and proportions of parameterized gates. Finally, by exploiting its rapid circuit generation, we create large datasets of circuits for particular operations and use these to extract valuable heuristics that can help us discover new insights into quantum circuit synthesis.

+::: +::: + + +# Quantum circuit synthesis with diffusion models +::: {.authors} +

Florian Fürrutter, Gorka Muñoz-Gil and Hans J. Briegel.

+::: + +::: {.paper} +::: {.links} +```{=html} +
+
+``` +::: +::: {.tldr} +

TL;DR:

+

We use a generative diffusion model to synthesize quantum circuits for entanglement generation and unitary compilation.

+::: +::: {.abstract} +

Abstract

+

Quantum computing has recently emerged as a transformative technology. Yet, its promised advantages rely on efficiently translating quantum operations into viable physical realizations. In this work, we use generative machine learning models, specifically denoising diffusion models (DMs), to facilitate this transformation. Leveraging text-conditioning, we steer the model to produce desired quantum operations within gate-based quantum circuits. Notably, DMs allow to sidestep during training the exponential overhead inherent in the classical simulation of quantum dynamics — a consistent bottleneck in preceding ML techniques. We demonstrate the model’s capabilities across two tasks: entanglement generation and unitary compilation. The model excels at generating new circuits and supports typical DM extensions such as masking and editing to, for instance, align the circuit generation to the constraints of the targeted quantum device. Given their flexibility and generalization abilities, we envision DMs as pivotal in quantum circuit synthesis, enhancing both practical applications but also insights into theoretical quantum computation.

+::: +::: {.cite} +

BibTeX

+``` latex +@article{furrutter2024quantum, + title={Quantum circuit synthesis with diffusion models}, + author={F{\"u}rrutter, Florian and Mu{\~n}oz-Gil, Gorka and Briegel, Hans J}, + journal={Nature Machine Intelligence}, + doi = {https://doi.org/10.1038/s42256-024-00831-9}, + vol = {6}, + pages = {515-–524}, + pages={1--10}, + year={2024}, + publisher={Nature Publishing Group UK London} +} +``` +::: +::: + + + diff --git a/src/webpage/research.scss b/src/webpage/research.scss new file mode 100644 index 0000000..42994d5 --- /dev/null +++ b/src/webpage/research.scss @@ -0,0 +1,164 @@ + +//----------------------------- +/*-- scss:defaults --*/ + + + +//----------------------------- +// CSS goes here +/*-- scss:rules --*/ + + +//----------------------------- + +h1 { + text-align: center !important; + margin-bottom: 0.2rem; +} + +.authors h3 { + // color: $link-color; + text-align: center !important; + margin-top: 10px; + font-size: 1.25rem; +} + +.paper { + display: block; + flex-basis: 0; + flex-grow: 1; + flex-shrink: 1; + padding: .75rem; + // background: #c5c6c7; + background: #3e2d5d42; + border-radius: 20px; + margin-bottom: 3em; +} + +.paper .links { + +} + +.paper .tldr { + background: #3b3f57; + margin: 5px; + padding: 18px; + // background: white; + background: linear-gradient(110deg, rgb(59, 63, 87) 10%, rgba(128, 81, 139, 0.52) 40%, rgb(59, 63, 87) 90%); + border-radius: 20px; +} + +.paper .tldr h4 { + margin-top: 0.1rem; + margin-bottom: 0.1rem; + // color: white; + color: #ffccc2; +} + +.paper .tldr p { + margin-top: 0.4rem; + margin-bottom: 0rem; + font-size: 1.25rem; + color: white; +} + + +.paper .abstract { + margin: 5px; + padding: 18px; + background: white; + border-radius: 20px; +} + +.paper .abstract h4 { + margin-top: 0.1rem; +} + +.paper .cite { + margin: 5px; + padding: 18px; + background: white; + border-radius: 20px; +} + +.paper .cite h4 { + margin-top: 0.1rem; +} + +//----------------------------- + +.has-text-centered { + text-align: center !important; +} + +.publication-links { + width: 100%; + height: auto; + display:inline-block; + margin: .75rem; + margin-top: 0.5rem; +} + +.link-block a { + margin-top: 5px; + margin-bottom: 5px; + font-size: 120%; + text-decoration: none; + padding: 10px; + margin: 3px; +} + +.link-block .button.is-rounded { + border-radius: 290486px; + //padding-left: calc(1em + .25em); + //padding-right: calc(1em + .25em); +} + +/* .link-block .button.is-normal { */ + /* font-size: 1rem; */ +/* } */ + +.link-block .button.is-dark { + background-color: #363636; + border-color: transparent; + color: #fff; +} + +/* .ai { */ + /* font-family: $academicons; */ + /* font-weight: 400; */ + /* -moz-osx-font-smoothing: grayscale; */ + /* -webkit-font-smoothing: antialiased; */ + /* display: inline-block; */ + /* font-style: normal; */ + /* font-variant: normal; */ + /* text-rendering: auto; */ + /* line-height: 1; */ +/* } */ + +.icon { + align-items: center; + display: inline-flex; + justify-content: center; + height: 1.5rem; + width: 1.5rem; + scale: 110%; +} + +/* .button .icon, .button .icon.is-large, .button .icon.is-medium, .button .icon.is-small { */ + /* height: 1.5em; */ + /* width: 1.5em; */ +/* } */ + +/* .button .icon:first-child:not(:last-child) { */ + /* margin-left: calc(-.5em - 1px); */ + /* margin-right: .25em; */ +/* } */ + +/* .column { */ + /* display: inline-block; */ + /* flex-basis: 0; */ + /* flex-grow: 1; */ + /* flex-shrink: 1; */ + /* padding: .75rem; */ +/* } */ \ No newline at end of file diff --git a/src/webpage/styles.css b/src/webpage/styles.css new file mode 100644 index 0000000..92f0389 --- /dev/null +++ b/src/webpage/styles.css @@ -0,0 +1,139 @@ +.cell { + margin-bottom: 1rem; +} + +.cell > .sourceCode { + margin-bottom: 0; +} + +.cell-output > pre { + margin-bottom: 0; +} + +.cell-output > pre, .cell-output > .sourceCode > pre, .cell-output-stdout > pre { + margin-left: 0.8rem; + margin-top: 0; + background: none; + border-left: 2px solid lightsalmon; + border-top-left-radius: 0; + border-top-right-radius: 0; +} + +.cell-output > .sourceCode { + border: none; +} + +.cell-output > .sourceCode { + background: none; + margin-top: 0; +} + +div.description { + padding-left: 2px; + padding-top: 5px; + font-style: italic; + font-size: 135%; + opacity: 70%; +} + +/* --------------------------------------- */ + +/* show_doc signature */ +blockquote > pre { + font-size: 14px; +} + +.table { + font-size: 16px; + /* disable striped tables */ + --bs-table-striped-bg: var(--bs-table-bg); +} + +.quarto-figure-center > figure > figcaption { + text-align: center; +} + +.figure-caption { + font-size: 75%; + font-style: italic; +} + + +/* --------------------------------------- */ + +@media (max-width: 1200px) { + +.link-cards .card { + margin-bottom: 20px; + margin-right: 10px; +} + +} + +@media (min-width: 400px) { +.link-cards { + display: flex; + flex-direction: row; + flex-wrap: wrap; +} + +.link-cards .card { + width: 455px; + margin: 0 20px 10px 0; +} + +} + +.link-cards .card { + border: none; + padding: 0; + background-color: #add8e300; + border-radius: 25px; + padding: 5px; + padding-left: 15px; +} + +.link-cards .card:hover { + box-shadow: 0 8px 16px 0 rgba(99, 42, 110, 0.245); +} + +.link-cards .card-title h4 { + margin-top: 0; +} + +.link-cards .card-title p { + margin-bottom: 0px; + margin-top: 5px; + padding-left: 0.2rem; +} + +.link-cards .card-subtitle { + margin-bottom: 0.1rem; + margin-top: 0.6rem; + text-decoration: none; + padding-left: 0.5rem; +} + +.link-cards .card-body { + padding: 0.5rem; + padding-left: 0.5rem; +} + +.link-cards .card-body ul { + margin-bottom: 0; + padding-left: 1.5rem; + list-style-type: none; + font-size: 95%; +} + +.link-cards .card-body ul a { + text-decoration: none; +} + +.link-cards .card-body ul li { + padding-bottom: 0.01rem; +} + +.card .source-code { + margin-top: 3px; +} \ No newline at end of file