diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 352049ffbd9211..b1882c395c2873 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -97,7 +97,7 @@ # QA Tests: /tests/ @openvinotoolkit/openvino-tests-maintainers -/tests/layer_tests/ @openvinotoolkit/openvino-tests-maintainers @openvinotoolkit/openvino-mo-maintainers +/tests/layer_tests/ @openvinotoolkit/openvino-tests-maintainers @openvinotoolkit/openvino-ovc-maintainers /tests/layer_tests/pytorch_tests/ @openvinotoolkit/openvino-pytorch-frontend-maintainers /tests/layer_tests/tensorflow_tests @openvinotoolkit/openvino-tf-frontend-maintainers /tests/layer_tests/jax_tests @openvinotoolkit/openvino-tf-frontend-maintainers @openvinotoolkit/openvino-jax-frontend-maintainers @@ -110,7 +110,7 @@ /tools/legacy/ @openvinotoolkit/openvino-samples-maintainers /tools/openvino_dev/ @openvinotoolkit/openvino-tools-maintainers @openvinotoolkit/openvino-ie-python-api-maintainers /tools/mo/ @openvinotoolkit/openvino-mo-maintainers -/tools/ovc/ @openvinotoolkit/openvino-mo-maintainers +/tools/ovc/ @openvinotoolkit/openvino-ovc-maintainers /thirdparty/open_model_zoo/ @openvinotoolkit/omz-maintainers # Documentation @@ -118,7 +118,7 @@ /docs/CMakeLists.txt @openvinotoolkit/openvino-ie-maintainers /**/*.md @openvinotoolkit/openvino-docs-maintainers /**/*.svg @openvinotoolkit/openvino-docs-maintainers -/docs/MO_DG/ @openvinotoolkit/openvino-docs-maintainers @openvinotoolkit/openvino-mo-maintainers +/docs/MO_DG/ @openvinotoolkit/openvino-docs-maintainers @openvinotoolkit/openvino-ovc-maintainers /docs/OV_Runtime_UG/ @openvinotoolkit/openvino-docs-maintainers @openvinotoolkit/openvino-ie-maintainers /docs/IE_PLUGIN_DG/ @openvinotoolkit/openvino-docs-maintainers @openvinotoolkit/openvino-ie-maintainers /docs/Extensibility_UG/ @openvinotoolkit/openvino-docs-maintainers @openvinotoolkit/openvino-ie-maintainers diff --git a/.github/actions/create_manifest/action.yml b/.github/actions/create_manifest/action.yml new file mode 100644 index 00000000000000..66d59930e93712 --- /dev/null +++ b/.github/actions/create_manifest/action.yml @@ -0,0 +1,44 @@ +name: 'Create manifest' +description: 'Creates manifest containing versions of the product and the corresponding repositories' +inputs: + repos: + description: "Multi-line list of repositories to include to manifest" + required: true + product_type: + description: "Unique string to reflect product configuration" + required: true + save_to: + description: "Path to save manifest to" + required: true + action_path: + description: "Action path, if not set - taken from github context" + required: false + target_arch: + description: "Target architecture" + required: true + build_type: + description: "Build type: release | debug | release_with_debug" + required: true + + +runs: + using: "composite" + steps: + - name: Install Python dependencies + shell: ${{ runner.os == 'Windows' && 'pwsh' || 'bash' }} + run: >- + pip install -r ${{ env.ACTION_PATH }}/requirements.txt + env: + ACTION_PATH: ${{ runner.os == 'Windows' && '$env:GITHUB_ACTION_PATH' || '$GITHUB_ACTION_PATH' }} + + - name: 'Create manifest' + id: create_manifest + shell: ${{ runner.os == 'Windows' && 'pwsh' || 'bash' }} + run: >- + python ${{ env.ACTION_PATH }}/create_manifest.py + --target_arch "${{ inputs.target_arch }}" --build_type "${{ inputs.build_type }}" + --save_to "${{ inputs.save_to }}" --product_type "${{ inputs.product_type }}" -r "${{ inputs.repos }}" + env: + BASE_SHA: ${{ github.event.pull_request.base.sha }} + PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} + ACTION_PATH: ${{ runner.os == 'Windows' && '$env:GITHUB_ACTION_PATH' || '$GITHUB_ACTION_PATH' }} diff --git a/.github/actions/create_manifest/create_manifest.py b/.github/actions/create_manifest/create_manifest.py new file mode 100644 index 00000000000000..1fb3a4712807e6 --- /dev/null +++ b/.github/actions/create_manifest/create_manifest.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +import argparse +import logging +import os +from datetime import timezone +from pathlib import Path +import re +import git + +from manifest_manager import Manifest, Repository, Component + + +def parse_args(): + parser = argparse.ArgumentParser(description='Creates manifest with product and repositories version') + parser.add_argument('-e', '--event_name', help='Name of GitHub event', required=False) + parser.add_argument('-r', '--repos', type=str, help='Paths to repositories to lon in manifest', + required=True) + parser.add_argument('--product_type', help='Unique string to reflect product configuration', required=True) + parser.add_argument('--target_arch', help='Target architecture', required=True) + parser.add_argument('--build_type', help='Build type: release | debug | release_with_debug', required=True) + parser.add_argument('--save_to', help='Path to save manifest to', required=True) + args = parser.parse_args() + return args + + +def init_logger(): + logging.basicConfig(level=logging.DEBUG, + format='%(asctime)s %(name)-15s %(levelname)-8s %(message)s', + datefmt='%m-%d-%Y %H:%M:%S') + + +def set_github_output(name: str, value: str, github_output_var_name: str = 'GITHUB_OUTPUT'): + """Sets output variable for a GitHub Action""" + logger = logging.getLogger(__name__) + # In an environment variable "GITHUB_OUTPUT" GHA stores path to a file to write outputs to + with open(os.environ.get(github_output_var_name), 'a+') as file: + logger.info(f"Add {name}={value} to {github_output_var_name}") + print(f'{name}={value}', file=file) + + +def get_repo_data(repo_dir: str | Path) -> dict: + repo = git.Repo(str(repo_dir)) + repo_url = next(repo.remote().urls) + repo_name_match = re.search(r'github\.com/[^/]+/([^/]+)', repo_url) + repo_name = repo_name_match.group(1) if repo_name_match else None + + trigger_repo_url = f"{os.getenv('GITHUB_SERVER_URL')}/{os.getenv('GITHUB_REPOSITORY')}" + is_trigger_repo = repo_url == trigger_repo_url + + branch = os.getenv('GITHUB_REF') if is_trigger_repo else repo.references[0].name + target_branch = os.getenv('GITHUB_BASE_REF') if is_trigger_repo else None + revision = os.getenv('PR_HEAD_SHA') or os.getenv('GITHUB_SHA') if is_trigger_repo else repo.head.commit.hexsha + target_revision = os.getenv('BASE_SHA') if is_trigger_repo else None + # Commit time of a merge commit (in case of PR merged to target) + # TODO: Save commit time of a head commit in PR as well? + commit_time = repo.head.commit.committed_datetime.astimezone(timezone.utc) + merge_target = branch.endswith('/merge') + return { + 'name': repo_name, + 'url': repo_url, + 'branch': branch.replace('refs/heads/', ''), # To align with internal manifest + 'target_branch': target_branch, + 'revision': revision, + 'target_revision': target_revision, + 'commit_time': commit_time, + 'merge_target': merge_target, + 'trigger': is_trigger_repo, + } + + +def parse_ov_version(header_file: str | Path) -> str: + header_code = Path(header_file).read_text() + major, minor, patch = (re.search(rf"#define OPENVINO_VERSION_{name} (\d+)", header_code).group(1) + for name in ["MAJOR", "MINOR", "PATCH"]) + return f"{major}.{minor}.{patch}" + + +def generate_manifest(repos: list, product_type: str, event_type: str, build_type: str, target_arch: str) -> Manifest: + manifest = Manifest() + component_name = 'dldt' # historical, keep for internal compatibility + repositories = [] + ov_version = None + trigger_repo = None + + for repo_dir in repos: + repo = Repository(**get_repo_data(repo_dir)) + repositories.append(repo) + if repo.name == 'openvino': + version_file = Path(repo_dir) / 'src' / 'core' / 'include' / 'openvino' / 'core' / 'version.hpp' + ov_version = parse_ov_version(version_file) + if repo.trigger: + trigger_repo = repo + + custom_branch_name = f'-{trigger_repo.branch}' if trigger_repo.branch != 'master' else '' + run_number_postfix = f'-{os.environ.get("GITHUB_RUN_NUMBER")}' if os.environ.get("GITHUB_RUN_NUMBER") else '' + product_version = f"{ov_version}{run_number_postfix}-{trigger_repo.revision[:11]}{custom_branch_name}" + ci_build_dev_tag = f'dev{trigger_repo.commit_time.strftime("%Y%m%d")}' + wheel_product_version = f'{ov_version}.{ci_build_dev_tag}' + + set_github_output('CI_BUILD_NUMBER', product_version, 'GITHUB_ENV') + set_github_output('CI_BUILD_DEV_TAG', ci_build_dev_tag, 'GITHUB_ENV') + + component = Component(name=component_name, version=product_version, product_type=product_type, + target_arch=target_arch, build_type=build_type, build_event=event_type, + repositories=repositories, custom_params={'wheel_product_version': wheel_product_version}) + + manifest.add_component(component) + return manifest + + +def main(): + init_logger() + logger = logging.getLogger(__name__) + args = parse_args() + + event_name = args.event_name or os.getenv('GITHUB_EVENT_NAME') + event_type = 'pre_commit' if event_name == 'pull_request' else 'commit' + + repos = args.repos.split() + manifest = generate_manifest(repos, args.product_type, event_type, args.build_type, args.target_arch) + + logger.info(f"Saving manifest to {args.save_to}") + manifest.save_manifest(args.save_to) + + +if __name__ == '__main__': + main() diff --git a/.github/actions/create_manifest/manifest_manager.py b/.github/actions/create_manifest/manifest_manager.py new file mode 100644 index 00000000000000..9978f65c0d560e --- /dev/null +++ b/.github/actions/create_manifest/manifest_manager.py @@ -0,0 +1,336 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import yaml +from pathlib import Path +from copy import deepcopy +from typing import Optional, Dict, List, Union, Iterator, Any + + +class ManifestException(Exception): + """Base Manifest file manager exception""" + + +class ManifestDoesNotExist(ManifestException): + """ManifestDoesNotExist Manifest file manager exception""" + + +class ManifestSavingError(ManifestException): + """ManifestSavingError Manifest file manager exception""" + + +class WrongComponentFormatError(ManifestException): + """WrongComponentFormatError Manifest file manager exception""" + + +class WrongRepositoryFormatError(ManifestException): + """WrongRepositoryFormatError Manifest file manager exception""" + + +class Manifest: + """Manifest wrapper""" + + default_manifest_name = "manifest.yml" + + def __init__(self, manifest_path: Optional[str] = None): + """ + :param manifest_path: Path to a manifest file + """ + self._manifest_file = Path(manifest_path or self.default_manifest_name) + if self._manifest_file.is_dir(): + self._manifest_file = self._manifest_file / self.default_manifest_name + + self._manifest_version = "1.0" + self._components: Dict[str, Component] = {} + + if manifest_path is not None: + self._prepare_manifest() + + def __repr__(self) -> str: + return str(self._manifest_file) + + def _prepare_manifest(self) -> None: + """Read manifest file and convert its data to objects""" + if not self._manifest_file.is_file(): + raise ManifestDoesNotExist(f'Cannot find manifest "{self._manifest_file}"') + + with self._manifest_file.open("r") as manifest: + manifest_info = yaml.safe_load(manifest) + + if not isinstance(manifest_info, dict): + raise ManifestDoesNotExist(f'Incorrect manifest "{self._manifest_file}"') + + self._manifest_version = manifest_info.get("manifest_version", self._manifest_version) + + for name, info in manifest_info["components"].items(): + self._components[name] = Component.from_dict({ + "name": name, + "version": info["version"], + "repository": info["repository"], + "product_type": info["product_type"], + "target_arch": info["target_arch"], + "build_type": info["build_type"], + "build_event": info["build_event"], + "custom_params": info.get("custom_params") + }) + + @property + def version(self) -> str: + return self._manifest_version + + @property + def components(self) -> List[Component]: + return list(self._components.values()) + + def get_component(self, component_name: str) -> Optional[Component]: + return self._components.get(component_name) + + def add_component(self, component: Component, replace: bool = False) -> bool: + if not replace and component.name in self._components: + return False + self._components[component.name] = component + return True + + def delete_component(self, component_name: str) -> bool: + return self._components.pop(component_name, None) is not None + + def save_manifest(self, save_to: Union[str, Path]) -> None: + class YamlDumper(yaml.SafeDumper): + """Formatting PyYAML dump() output""" + + def write_line_break(self, data=None): + super().write_line_break(data) + if len(self.indents) in {1, 2, 4}: + super().write_line_break() + + path_to_save = Path(save_to) + if path_to_save.is_dir(): + path_to_save = path_to_save / self.default_manifest_name + else: + path_to_save.parent.mkdir(parents=True, exist_ok=True) + + manifest_data = {"components": {}, "manifest_version": self._manifest_version} + for comp_name, comp_data in self._components.items(): + comp = dict(comp_data) + manifest_data["components"][comp_name] = { + "version": comp["version"], + "product_type": comp["product_type"], + "target_arch": comp["target_arch"], + "build_type": comp["build_type"], + "build_event": comp["build_event"], + "trigger_repo_name": comp["trigger_repo_name"], + "custom_params": comp["custom_params"], + "repository": comp["repositories"], + } + + try: + with path_to_save.open("w") as manifest: + yaml.dump(manifest_data, stream=manifest, Dumper=YamlDumper, default_flow_style=False, sort_keys=False) + except Exception as ex: + raise ManifestSavingError(ex) from ex + + def as_dict(self) -> Dict[str, Union[str, Dict]]: + """Return manifest as dictionary""" + if not self._manifest_file.is_file(): + raise ManifestDoesNotExist(f'Cannot find manifest "{self._manifest_file}"') + + with self._manifest_file.open("r") as manifest: + manifest_dict = yaml.safe_load(manifest) + + if not isinstance(manifest_dict, dict): + raise ManifestDoesNotExist(f'Incorrect manifest "{self._manifest_file}"') + + return manifest_dict + + +class Repository: + def __init__(self, **kwargs) -> None: + self._state: dict = { + "name": None, + "url": None, + "branch": None, + "revision": None, + "commit_id": None, + "commit_time": None, + "target_branch": None, + "target_revision": None, + "target_commit_id": None, + "merge_target": False, + "revert_time": None, + "trigger": False, + "default_branch": None, + "type": "git", + } + for arg_name, arg_value in kwargs.items(): + if arg_name in self._state: + self._state[arg_name] = arg_value + + def __getattr__(self, attr_name: str) -> Any: + if attr_name in self._state: + return self._state.get(attr_name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{attr_name}'") + + def __iter__(self) -> Iterator: + for name in self._state: + yield name, self._state.get(name) + + def get_git_repo_state(self) -> dict: + state = deepcopy(self._state) + state.pop("revision") + state.pop("target_revision") + state.pop("commit_time") + state.pop("type") + state["commit_id"] = self._state["revision"] + state["target_commit_id"] = self._state["target_revision"] + return state + + +class Component: + def __init__( + self, + name: str, + version: str, + repositories: list, + product_type: str, + target_arch: str, + build_type: str, + build_event: str, + custom_params: Optional[dict] = None + ): + """ + Initialize the product component. + + :param name: Name of component + :param version: Version of component + :param repositories: List of repositories + :param product_type: Unique key to describe a product type (can include OS, arch, build variant, etc) + :param target_arch: Target architecture + :param build_type: Type of build (release, debug) + :param build_event: Build event (pre_commit, commit) + :param custom_params: Custom parameters (optional) + """ + self._name = name + self._version = version + self._repositories = {} + self._product_type = product_type + self._target_arch = target_arch + self._build_type = build_type + self._build_event = build_event + self._custom_params = custom_params if custom_params is not None else {} + self._trigger_repo_name = None + + self._prepare_repositories(repositories) + + def __iter__(self) -> Iterator: + yield "name", self._name + yield "version", self._version + yield "product_type", self._product_type + yield "target_arch", self._target_arch + yield "build_type", self._build_type + yield "build_event", self._build_event + yield "trigger_repo_name", self._trigger_repo_name + yield "custom_params", self._custom_params + yield "repositories", [dict(repo) for repo in self._repositories.values()] + + def _prepare_repositories(self, repositories: list) -> None: + for repo in repositories: + repo_name, repo_obj = self._parse_repository(repo) + self._repositories[repo_name] = repo_obj + + if repo_obj.trigger: + if self._trigger_repo_name: + raise WrongRepositoryFormatError( + f"Found trigger repo duplicates: {self._trigger_repo_name}, {repo_name}" + ) + self._trigger_repo_name = repo_name + + @staticmethod + def _parse_repository(repo: Union[dict, Repository]) -> tuple[str, Repository]: + if isinstance(repo, dict): + repo_name = repo["name"] + repo_obj = Repository(**repo) + elif isinstance(repo, Repository): + repo_name = repo.name + repo_obj = repo + return repo_name, repo_obj + + @staticmethod + def from_dict(comp_data: dict) -> Component: + """ + Convert a dictionary to a Component object. + + :param comp_data: Component data dictionary + :return: Component object + """ + try: + return Component( + comp_data["name"], + comp_data["version"], + comp_data["repository"], + comp_data["product_type"], + comp_data["target_arch"], + comp_data["build_type"], + comp_data["build_event"], + comp_data.get("custom_params"), + ) + except Exception as ex: + raise WrongComponentFormatError(ex) from ex + + @property + def name(self) -> str: + return self._name + + @property + def version(self) -> str: + return self._version + + @property + def product_type(self) -> str: + return self._product_type + + @property + def target_arch(self) -> str: + return self._target_arch + + @property + def build_type(self) -> str: + return self._build_type + + @property + def build_event(self) -> str: + return self._build_event + + @property + def repositories(self) -> List[Repository]: + return list(self._repositories.values()) + + @property + def trigger_repo_name(self) -> Optional[str]: + return self._trigger_repo_name + + @property + def trigger_repository(self) -> Optional[Repository]: + return next((repo for repo in self._repositories.values() if repo.trigger), None) + + def get_repository(self, repository_name: str) -> Optional[Repository]: + return self._repositories.get(repository_name) + + def add_repository(self, repository: Repository, replace: bool = False) -> bool: + if not replace and repository.name in self._repositories: + return False + self._repositories[repository.name] = repository + return True + + def delete_repository(self, repository_name: str) -> bool: + return self._repositories.pop(repository_name, None) is not None + + def get_custom_param(self, name: str) -> Optional[Any]: + return self._custom_params.get(name) + + def add_custom_param(self, name: str, value: Any) -> None: + self._custom_params[name] = value + + def delete_custom_param(self, name: str) -> bool: + return self._custom_params.pop(name, None) is not None diff --git a/.github/actions/create_manifest/requirements.txt b/.github/actions/create_manifest/requirements.txt new file mode 100644 index 00000000000000..eb0abf060b23b0 --- /dev/null +++ b/.github/actions/create_manifest/requirements.txt @@ -0,0 +1,2 @@ +GitPython~=3.1.43 +pyyaml~=6.0.1 diff --git a/.github/actions/store_artifacts/action.yml b/.github/actions/store_artifacts/action.yml new file mode 100644 index 00000000000000..d9c4184a622cce --- /dev/null +++ b/.github/actions/store_artifacts/action.yml @@ -0,0 +1,39 @@ +name: 'Store artifacts' +description: 'Store given artifacts in a proper place on a shared drive' +inputs: + artifacts: + description: "Multi-line list of artifacts to store" + required: true + storage_dir: + description: "Directory name to store artifacts in" + required: true + storage_root: + description: "Root path of the storage to place artifacts to" + required: true + + +outputs: + artifacts_storage_path: + description: "Path where the artifacts are stored" + value: ${{ steps.copy_artifacts.outputs.artifacts_storage_path }} + +runs: + using: "composite" + steps: + - name: Install Python dependencies + shell: ${{ runner.os == 'Windows' && 'pwsh' || 'bash' }} + run: >- + pip install -r ${{ env.ACTION_PATH }}/requirements.txt + env: + ACTION_PATH: ${{ runner.os == 'Windows' && '$env:GITHUB_ACTION_PATH' || '$GITHUB_ACTION_PATH' }} + + - name: 'Copy artifacts' + id: copy_artifacts + shell: ${{ runner.os == 'Windows' && 'pwsh' || 'bash' }} + run: >- + python ${{ env.ACTION_PATH }}/store_artifacts.py + --storage_dir "${{ inputs.storage_dir }}" --storage_root "${{ inputs.storage_root }}" + -a "${{ inputs.artifacts }}" + env: + PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} + ACTION_PATH: ${{ runner.os == 'Windows' && '$env:GITHUB_ACTION_PATH' || '$GITHUB_ACTION_PATH' }} diff --git a/.github/actions/store_artifacts/requirements.txt b/.github/actions/store_artifacts/requirements.txt new file mode 100644 index 00000000000000..eb0abf060b23b0 --- /dev/null +++ b/.github/actions/store_artifacts/requirements.txt @@ -0,0 +1,2 @@ +GitPython~=3.1.43 +pyyaml~=6.0.1 diff --git a/.github/actions/store_artifacts/store_artifacts.py b/.github/actions/store_artifacts/store_artifacts.py new file mode 100644 index 00000000000000..7dde088dc91593 --- /dev/null +++ b/.github/actions/store_artifacts/store_artifacts.py @@ -0,0 +1,134 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import argparse +import logging +import os +import re +import sys +import git +import shutil +from contextlib import contextmanager +from pathlib import Path + + +def parse_args(): + parser = argparse.ArgumentParser(description='Returns product components changed in a given PR or commit') + parser.add_argument('-e', '--event_name', help='Name of GitHub event', required=False) + parser.add_argument('-b', '--branch_name', help='Name of GitHub branch', required=False) + parser.add_argument('-s', '--commit_sha', help='Commit hash for which artifacts were generated', required=False) + parser.add_argument('-a', '--artifacts', type=str, help='Paths to artifacts to store (files/dirs)', required=True) + parser.add_argument('--storage_dir', help='Directory name to store artifacts in', required=True) + parser.add_argument('--storage_root', help='Root path of the storage to place artifacts to', required=True) + args = parser.parse_args() + return args + + +def init_logger(): + logging.basicConfig(level=logging.DEBUG, + format='%(asctime)s %(name)-15s %(levelname)-8s %(message)s', + datefmt='%m-%d-%Y %H:%M:%S') + + +def set_github_output(name: str, value: str, github_output_var_name: str = 'GITHUB_OUTPUT'): + """Sets output variable for a GitHub Action""" + logger = logging.getLogger(__name__) + # In an environment variable "GITHUB_OUTPUT" GHA stores path to a file to write outputs to + with open(os.environ.get(github_output_var_name), 'a+') as file: + logger.info(f"Add {name}={value} to {github_output_var_name}") + print(f'{name}={value}', file=file) + + +@contextmanager +def preserve_stats_context(): + """ + Workaround for copying to samba share on Linux + to avoid issues while setting Linux permissions. + """ + _orig_copystat = shutil.copystat + shutil.copystat = lambda x, y, follow_symlinks=True: x + try: + yield + finally: + shutil.copystat = _orig_copystat + + +def rotate_dir(directory: Path) -> bool: + """ + Renames directory if exists: + dir -> dir_1 + """ + log = logging.getLogger('rotate_dir') + + if not directory.exists(): + return False + + dir_parent = directory.parent + dir_name = directory.name + max_dir_num = 0 + for redir in dir_parent.iterdir(): + dir_num = redir.name.split('_')[-1] + if redir.name.startswith(dir_name) and dir_num.isdigit() and int(dir_num) > max_dir_num: + max_dir_num = int(dir_num) + + duplicate = dir_parent / f'{dir_name}_{max_dir_num + 1}' + log.info(f"Move previous directory to {duplicate}") + directory.rename(duplicate) + return True + + +def main(): + init_logger() + logger = logging.getLogger(__name__) + args = parse_args() + + event_name = args.event_name or os.getenv('GITHUB_EVENT_NAME') + branch_name = args.branch_name or os.getenv('GITHUB_BASE_REF') or os.getenv('GITHUB_REF_NAME') + + # TODO: return, once we decide to get rid of post-commit and choose artifacts generated for a merged PR in queue? + # merge_queue_matcher = re.search(r'gh-readonly-queue/(.*?)/pr-', branch_name) + # if merge_queue_matcher: + # branch_name = merge_queue_matcher.group(1) + + commit_hash = args.commit_sha or os.getenv('PR_HEAD_SHA') or os.getenv('GITHUB_SHA') + event_type = 'pre_commit' if event_name == 'pull_request' else 'commit' + storage_root = args.storage_root or os.getenv('ARTIFACTS_SHARE') + + storage = Path(storage_root) / 'dldt' / branch_name / event_type / commit_hash / args.storage_dir + set_github_output("artifacts_storage_path", str(storage)) + + logger.info(f"Storing artifacts to {storage}") + rotate_dir(storage) # TODO: use more stable approach to handle storing artifacts from re-runs + + error_found = False + for artifact in args.artifacts.split(): + artifact_path = Path(artifact) + logger.debug(f"Copying {artifact_path} to {storage / artifact_path.name}") + try: + with preserve_stats_context(): + if artifact_path.is_dir(): + shutil.copytree(artifact_path, storage / artifact_path.name) + else: + storage.mkdir(parents=True, exist_ok=True) + shutil.copy2(artifact_path, storage / artifact_path.name) + except Exception as e: + logger.error(f'Failed to copy {artifact}: {e}') + error_found = True + + github_server = os.getenv('GITHUB_SERVER_URL') + if github_server: # If running from GHA context + # TODO: write an exact job link, but it's not trivial to get + workflow_link = f"{github_server}/{os.getenv('GITHUB_REPOSITORY')}/actions/runs/{os.getenv('GITHUB_RUN_ID')}" + with open(storage / 'workflow_link.txt', 'w') as file: + file.write(workflow_link) + + logger.debug(f"Copying finished") + (storage / 'copying_finished').touch() + if error_found: + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/.github/components.yml b/.github/components.yml index 9c5d7c0089c9ca..8de51a2ced3343 100644 --- a/.github/components.yml +++ b/.github/components.yml @@ -111,6 +111,7 @@ IR_FE: ONNX_FE: revalidate: - MO + - OVC - ONNX_RT build: - CPU @@ -119,6 +120,7 @@ ONNX_FE: PDPD_FE: revalidate: - MO + - OVC build: - CPU - Python_API @@ -126,6 +128,7 @@ PDPD_FE: TF_FE: revalidate: - MO + - OVC build: - CPU - Python_API @@ -134,6 +137,7 @@ TF_FE: TFL_FE: revalidate: - MO + - OVC build: - CPU - Python_API @@ -141,6 +145,7 @@ TFL_FE: PyTorch_FE: revalidate: - MO + - OVC build: - CPU - Python_API @@ -148,6 +153,7 @@ PyTorch_FE: JAX_FE: revalidate: - MO + - OVC build: - CPU - Python_API @@ -165,6 +171,7 @@ Python_API: revalidate: - samples - MO + - OVC - tools - TF_FE build: @@ -207,6 +214,18 @@ IE_Tests: build: - IR_FE +OVC: + revalidate: + - PyTorch_FE + - TF_FE + - TFL_FE + - ONNX_FE + - PDPD_FE + - JAX_FE + build: + - Python_API + - TOKENIZERS # TF_FE tests depends on tokenizers build + MO: revalidate: - PyTorch_FE diff --git a/.github/dependency_review.yml b/.github/dependency_review.yml index 11639f4d2d4b22..5636a441501fc8 100644 --- a/.github/dependency_review.yml +++ b/.github/dependency_review.yml @@ -13,6 +13,7 @@ allow-licenses: - '0BSD' - 'Python-2.0' - 'LGPL-3.0' + - 'MPL-2.0' fail-on-scopes: - 'runtime' - 'development' diff --git a/.github/github_org_control/config.json b/.github/github_org_control/config.json index 717403f27d13ea..7fc23b7888c170 100644 --- a/.github/github_org_control/config.json +++ b/.github/github_org_control/config.json @@ -37,6 +37,7 @@ "openvino-onnx-frontend-maintainers": "category: ONNX FE", "openvino-ie-tests-maintainers": "category: IE Tests", "openvino-mo-maintainers": "category: MO", + "openvino-ovc-maintainers": "category: OVC", "openvino-ngraph-maintainers": "category: Core", "openvino-scripts-maintainers": "category: build", "openvino-tests-maintainers": "category: IE Tests", diff --git a/.github/labeler.yml b/.github/labeler.yml index 64a8661cf1e2e8..49aeac7325aa4f 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -97,8 +97,10 @@ 'category: MO': - 'tools/mo/**/*' -- 'tools/ovc/**/*' - 'tests/layer_tests/mo_python_api_tests/**/*' + +'category: OVC': +- 'tools/ovc/**/*' - 'tests/layer_tests/ovc_python_api_tests/**/*' 'category: ONNX FE': diff --git a/.github/scripts/workflow_rerun/errors_to_look_for.json b/.github/scripts/workflow_rerun/errors_to_look_for.json index 51e8106944ca9c..3d59bb9a1e569f 100644 --- a/.github/scripts/workflow_rerun/errors_to_look_for.json +++ b/.github/scripts/workflow_rerun/errors_to_look_for.json @@ -58,5 +58,9 @@ { "error_text": "status_string: \"Timeout was reached\"", "ticket": 142653 + }, + { + "error_text": "ERROR 502: Bad Gateway", + "ticket": 146254 } ] \ No newline at end of file diff --git a/.github/workflows/android_arm64.yml b/.github/workflows/android_arm64.yml index de5b6c0011e34d..25081433f48f10 100644 --- a/.github/workflows/android_arm64.yml +++ b/.github/workflows/android_arm64.yml @@ -175,7 +175,7 @@ jobs: # Upload build logs # - name: Upload build logs - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: build_logs diff --git a/.github/workflows/build_doc.yml b/.github/workflows/build_doc.yml index 66d4c8067edea9..b590d093207e39 100644 --- a/.github/workflows/build_doc.yml +++ b/.github/workflows/build_doc.yml @@ -72,13 +72,13 @@ jobs: echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV - name: 'Upload sphinx.log' - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: sphinx_build_log_${{ env.PR_NUMBER }}.log path: build/docs/sphinx.log - name: 'Upload docs html' - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_docs_html_${{ env.PR_NUMBER }}.zip path: build/docs/openvino_docs_html.zip @@ -95,7 +95,7 @@ jobs: - name: 'Upload test results' if: failure() - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_docs_pytest path: build/docs/_artifacts/ diff --git a/.github/workflows/code_style.yml b/.github/workflows/code_style.yml index 223a7418156e43..f43e60538a6089 100644 --- a/.github/workflows/code_style.yml +++ b/.github/workflows/code_style.yml @@ -31,7 +31,7 @@ jobs: - name: suggester / clang-format if: startsWith(github.event_name, 'pull_request') - uses: reviewdog/action-suggester@a1d57ff096639094e0ba35ef3039e79316364796 # v1.15.0 + uses: reviewdog/action-suggester@63b8f8cc21dfa052ac44436e65ed31edcffcb6c1 # v1.17.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} level: warning @@ -60,7 +60,7 @@ jobs: # always provide suggestions even for skipped scripts in ov_shellcheck tagret - name: ShellCheck action if: always() - uses: reviewdog/action-shellcheck@52f34f737a16c65b8caa8c51ae1b23036afe5685 # v1.23.0 + uses: reviewdog/action-shellcheck@d99499e855260c9c56f7a1d066933b57326e9e7c # v1.26.0 with: level: style reporter: github-pr-review diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml index ef0fd80e7813cf..da790552c239c1 100644 --- a/.github/workflows/coverity.yml +++ b/.github/workflows/coverity.yml @@ -139,7 +139,7 @@ jobs: run: ${COVERITY_TOOL_DIR}/cov-analysis*/bin/cov-configure -c ${COVERITY_TOOL_DIR}/cov-analysis-linux64-2023.6.2/config/coverity_config.xml -lscc text - name: Upload Coverity build log - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: coverity_logs @@ -147,7 +147,7 @@ jobs: if-no-files-found: 'error' - name: Upload Coverity build archive - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: coverity_archive diff --git a/.github/workflows/fedora.yml b/.github/workflows/fedora.yml index 02cd0abf018319..5833c1d2000fa7 100644 --- a/.github/workflows/fedora.yml +++ b/.github/workflows/fedora.yml @@ -189,7 +189,7 @@ jobs: # Upload build artifacts and logs # - name: Upload build logs - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: build_logs @@ -198,7 +198,7 @@ jobs: - name: Upload openvino package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_package path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz @@ -206,7 +206,7 @@ jobs: - name: Upload openvino RPM packages if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_rpm_packages path: ${{ env.BUILD_DIR }}/*.rpm @@ -214,7 +214,7 @@ jobs: - name: Upload openvino tests package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz @@ -234,7 +234,7 @@ jobs: steps: - name: Download OpenVINO RPM packages - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_rpm_packages path: ${{ env.RPM_PACKAGES_DIR }} diff --git a/.github/workflows/job_cpu_functional_tests.yml b/.github/workflows/job_cpu_functional_tests.yml index 04fa0c8860ab66..986c2c42315371 100644 --- a/.github/workflows/job_cpu_functional_tests.yml +++ b/.github/workflows/job_cpu_functional_tests.yml @@ -33,13 +33,13 @@ jobs: PARALLEL_TEST_CACHE: ${{ github.workspace }}/install/tests/test_cache.lst steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -108,7 +108,7 @@ jobs: key: ${{ runner.os }}-${{ runner.arch }}-tests-functional-cpu-stamp-${{ github.sha }} - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: test-results-functional-cpu diff --git a/.github/workflows/job_cxx_unit_tests.yml b/.github/workflows/job_cxx_unit_tests.yml index 92c12dfcd71251..29c656f416ecbc 100644 --- a/.github/workflows/job_cxx_unit_tests.yml +++ b/.github/workflows/job_cxx_unit_tests.yml @@ -35,13 +35,13 @@ jobs: INSTALL_TEST_DIR: ${{ github.workspace }}/install/tests steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -255,7 +255,7 @@ jobs: ${INSTALL_TEST_DIR}/ov_hetero_func_tests --gtest_print_time=1 --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVHeteroFuncTests.xml --gtest_filter="*smoke*" - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-cpp diff --git a/.github/workflows/job_debian_packages.yml b/.github/workflows/job_debian_packages.yml index a7547a2483dd16..a8f2731563f779 100644 --- a/.github/workflows/job_debian_packages.yml +++ b/.github/workflows/job_debian_packages.yml @@ -33,7 +33,7 @@ jobs: run: echo 'Acquire::Retries "10";' > /etc/apt/apt.conf.d/80-retries - name: Download OpenVINO debian packages - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_debian_packages path: ${{ env.DEBIAN_PACKAGES_DIR }} diff --git a/.github/workflows/job_gpu_tests.yml b/.github/workflows/job_gpu_tests.yml index 7a5af97cdcde49..5d9fb1172e62cb 100644 --- a/.github/workflows/job_gpu_tests.yml +++ b/.github/workflows/job_gpu_tests.yml @@ -38,13 +38,13 @@ jobs: GTEST_PARALLEL_SCRIPT: ${{ github.workspace }}/gtest_parallel.py steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: 'openvino_package' path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: 'openvino_tests' path: ${{ env.INSTALL_TEST_DIR }} @@ -128,7 +128,7 @@ jobs: - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: test-results-${{ inputs.test_type }}-${{ inputs.device }} diff --git a/.github/workflows/job_onnx_models_tests.yml b/.github/workflows/job_onnx_models_tests.yml index 3fac0998d88ced..19bf3b23482b89 100644 --- a/.github/workflows/job_onnx_models_tests.yml +++ b/.github/workflows/job_onnx_models_tests.yml @@ -38,13 +38,13 @@ jobs: if: ${{ github.event_name != 'merge_group' }} steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} diff --git a/.github/workflows/job_onnx_runtime.yml b/.github/workflows/job_onnx_runtime.yml index ae0f21bf58ab37..b7da6d827d542d 100644 --- a/.github/workflows/job_onnx_runtime.yml +++ b/.github/workflows/job_onnx_runtime.yml @@ -43,7 +43,7 @@ jobs: ONNX_RUNTIME_BUILD_DIR: ${{ github.workspace }}/onnxruntime/build steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} diff --git a/.github/workflows/job_openvino_js.yml b/.github/workflows/job_openvino_js.yml index 25e29dd3f3f9c3..880726bd0d5878 100644 --- a/.github/workflows/job_openvino_js.yml +++ b/.github/workflows/job_openvino_js.yml @@ -45,7 +45,7 @@ jobs: echo "OPENVINO_JS_LIBS_DIR=$GITHUB_WORKSPACE/openvino/src/bindings/js/node/bin" >> "$GITHUB_ENV" - name: Download OpenVINO JS package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_js_package path: ${{ env.OPENVINO_JS_LIBS_DIR }} diff --git a/.github/workflows/job_python_unit_tests.yml b/.github/workflows/job_python_unit_tests.yml index 4c7a14e891b49e..e1bd58fb781d69 100644 --- a/.github/workflows/job_python_unit_tests.yml +++ b/.github/workflows/job_python_unit_tests.yml @@ -41,13 +41,13 @@ jobs: steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -306,7 +306,7 @@ jobs: --ignore=${INSTALL_TEST_DIR}/pyopenvino/tests/test_utils/test_utils.py - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-python diff --git a/.github/workflows/job_pytorch_models_tests.yml b/.github/workflows/job_pytorch_models_tests.yml index c740cd89079ec2..b910d9242647b1 100644 --- a/.github/workflows/job_pytorch_models_tests.yml +++ b/.github/workflows/job_pytorch_models_tests.yml @@ -49,19 +49,19 @@ jobs: fi - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tokenizers extension - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tokenizers_wheel path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -134,7 +134,7 @@ jobs: if: ${{ inputs.model_scope == 'precommit' || inputs.model_scope == 'nightly_scope1' }} run: | export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/pytorch/ -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-torch_model_timm_tv_tests.html --self-contained-html -v -n 4 -k "TestTimmConvertModel or TestTorchHubConvertModel" + python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/pytorch/ -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-torch_model_timm_tv_${{ inputs.model_scope }}_tests.html --self-contained-html -v -n 2 -k "TestTimmConvertModel or TestTorchHubConvertModel" env: TYPE: ${{ inputs.model_scope == 'precommit' && 'precommit' || 'nightly' }} TEST_DEVICE: CPU @@ -144,7 +144,7 @@ jobs: if: ${{ inputs.model_scope == 'precommit' || inputs.model_scope == 'nightly_scope2' }} run: | export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/pytorch -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-torch_model_tests.html --self-contained-html -v -k "not (TestTimmConvertModel or TestTorchHubConvertModel)" + python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/pytorch -m ${TYPE} --html=${INSTALL_TEST_DIR}/TEST-torch_model_${{ inputs.model_scope }}_tests.html --self-contained-html -v -k "not (TestTimmConvertModel or TestTorchHubConvertModel)" env: TYPE: ${{ inputs.model_scope == 'precommit' && 'precommit' || 'nightly' }} TEST_DEVICE: CPU @@ -155,7 +155,7 @@ jobs: if: ${{ inputs.model_scope == 'precommit' }} run: | export PYTHONPATH=${MODEL_HUB_TESTS_INSTALL_DIR}:$PYTHONPATH - python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/transformation_tests/test_pa_transformation.py -m precommit --html=${INSTALL_TEST_DIR}/TEST-torch_pagedattention_tests.html --self-contained-html -v --tb=short -n 4 + python3 -m pytest ${MODEL_HUB_TESTS_INSTALL_DIR}/transformation_tests/test_pa_transformation.py -m precommit --html=${INSTALL_TEST_DIR}/TEST-torch_pagedattention_tests.html --self-contained-html -v --tb=short -n 2 env: TEST_DEVICE: CPU USE_SYSTEM_CACHE: False @@ -180,10 +180,10 @@ jobs: df -h - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: - name: test-results-torch-models + name: test-results-torch-models-${{ inputs.model_scope == 'precommit' }} path: | ${{ env.INSTALL_TEST_DIR }}/TEST-torch* if-no-files-found: 'error' diff --git a/.github/workflows/job_samples_tests.yml b/.github/workflows/job_samples_tests.yml index e453210d58b13b..2fce9965e36b6c 100644 --- a/.github/workflows/job_samples_tests.yml +++ b/.github/workflows/job_samples_tests.yml @@ -34,13 +34,13 @@ jobs: BUILD_DIR: ${{ github.workspace }}/build steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} diff --git a/.github/workflows/job_tensorflow_layer_tests.yml b/.github/workflows/job_tensorflow_layer_tests.yml index 168d9bf61308d7..9c2392093ab446 100644 --- a/.github/workflows/job_tensorflow_layer_tests.yml +++ b/.github/workflows/job_tensorflow_layer_tests.yml @@ -44,19 +44,19 @@ jobs: LAYER_TESTS_INSTALL_DIR: ${{ github.workspace }}/install/tests/layer_tests steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} - name: Download OpenVINO tokenizers extension - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tokenizers_wheel path: ${{ env.INSTALL_DIR }} @@ -158,7 +158,7 @@ jobs: TEST_PRECISION: FP16 - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-python-tf-layers diff --git a/.github/workflows/job_tensorflow_models_tests.yml b/.github/workflows/job_tensorflow_models_tests.yml index 1a452c94db0ace..ab8163139e4a2b 100644 --- a/.github/workflows/job_tensorflow_models_tests.yml +++ b/.github/workflows/job_tensorflow_models_tests.yml @@ -37,19 +37,19 @@ jobs: NUMBER_OF_REPLICAS: 2 steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tokenizers extension - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tokenizers_wheel path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -114,7 +114,7 @@ jobs: TEST_DEVICE: CPU - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-tensorflow-models-${{ inputs.model_scope }} diff --git a/.github/workflows/job_tokenizers.yml b/.github/workflows/job_tokenizers.yml index 9cf1acc05e7220..e1ef48b14ee7d9 100644 --- a/.github/workflows/job_tokenizers.yml +++ b/.github/workflows/job_tokenizers.yml @@ -73,7 +73,7 @@ jobs: ref: 'master' - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} @@ -137,7 +137,7 @@ jobs: - name: Upload openvino tokenizers wheel if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tokenizers_wheel path: ${{ env.EXTENSION_BUILD_DIR }}/*.whl diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index da0309e20b37bd..80ad7ffa92c4f1 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -109,6 +109,9 @@ jobs: BUILD_DIR: /__w/openvino/openvino/openvino_build SCCACHE_AZURE_KEY_PREFIX: ubuntu20_x86_64_Release ONNX_RUNTIME_UTILS: /__w/openvino/openvino/openvino/src/frontends/onnx/tests/ci_utils/onnxruntime + ARTIFACTS_SHARE: "/mount/build-artifacts" + MANIFEST_PATH: '/__w/openvino/openvino/manifest.yml' + PRODUCT_TYPE: 'public_linux_ubuntu_20_04_release' if: "!needs.smart_ci.outputs.skip_workflow" steps: @@ -135,6 +138,18 @@ jobs: submodules: 'true' ref: 'master' + - name: Generate product manifest and set CI_BUILD_NUMBER & CI_BUILD_DEV_TAG + id: create_manifest + uses: ./openvino/.github/actions/create_manifest + with: + repos: | + ${{ env.OPENVINO_REPO }} + ${{ env.OPENVINO_CONTRIB_REPO }} + product_type: ${{ env.PRODUCT_TYPE }} + target_arch: 'intel64' + build_type: 'release' + save_to: ${{ env.MANIFEST_PATH }} + # # Print system info # @@ -254,7 +269,7 @@ jobs: # Upload build artifacts and logs # - name: Upload build logs - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: build_logs @@ -263,7 +278,7 @@ jobs: - name: Upload openvino package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_package path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz @@ -271,7 +286,7 @@ jobs: - name: Upload openvino js package if: fromJSON(needs.smart_ci.outputs.affected_components).JS_API - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_js_package path: ${{ env.INSTALL_DIR_JS }} @@ -279,7 +294,7 @@ jobs: - name: Upload openvino developer package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_developer_package path: ${{ env.BUILD_DIR }}/openvino_developer_package.tar.gz @@ -287,7 +302,7 @@ jobs: - name: Upload openvino debian packages if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_debian_packages path: ${{ env.BUILD_DIR }}/*.deb @@ -295,12 +310,34 @@ jobs: - name: Upload openvino tests package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz if-no-files-found: 'error' + - name: Prepare debian packages for storage on share + if: ${{ always() }} + continue-on-error: true + run: | + pushd ${{ env.BUILD_DIR }} + mkdir deb && mv *.deb deb/ + popd + + - name: Store artifacts to a shared drive + id: store_artifacts + if: ${{ always() }} + uses: ./openvino/.github/actions/store_artifacts + with: + artifacts: | + ${{ env.BUILD_DIR }}/openvino_package.tar.gz + ${{ env.BUILD_DIR }}/openvino_developer_package.tar.gz + ${{ env.BUILD_DIR }}/openvino_tests.tar.gz + ${{ env.BUILD_DIR }}/deb + ${{ env.MANIFEST_PATH }} + storage_dir: ${{ env.PRODUCT_TYPE }} + storage_root: ${{ env.ARTIFACTS_SHARE }} + Debian_Packages: name: Debian Packages needs: Build @@ -360,13 +397,13 @@ jobs: # - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -425,7 +462,7 @@ jobs: - name: Upload Conformance Artifacts if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: conformance_artifacts_${{ matrix.TEST_TYPE }}-${{ env.TEST_DEVICE }} path: ${{ env.CONFORMANCE_ARTIFACTS_DIR }}/conformance_artifacts.tar.gz @@ -451,7 +488,7 @@ jobs: - name: Upload Conformance Artifacts if: ${{ matrix.TEST_TYPE == 'API' }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: conformance_artifacts_${{ matrix.TEST_TYPE }}-TEMPLATE path: ${{ env.CONFORMANCE_ARTIFACTS_DIR }}/conformance_artifacts.tar.gz @@ -553,7 +590,7 @@ jobs: # - /mount:/mount PyTorch_Models_Tests: name: PyTorch Models tests - if: fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test + if: ${{ github.event_name != 'schedule' && fromJSON(needs.smart_ci.outputs.affected_components).PyTorch_FE.test }} needs: [ Build, Smart_CI, Openvino_tokenizers ] uses: ./.github/workflows/job_pytorch_models_tests.yml with: @@ -612,13 +649,13 @@ jobs: steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO Developer package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_developer_package path: ${{ env.INSTALL_DIR }} diff --git a/.github/workflows/linux_arm64.yml b/.github/workflows/linux_arm64.yml index 825a4b076d631d..d38eda93e7d2b8 100644 --- a/.github/workflows/linux_arm64.yml +++ b/.github/workflows/linux_arm64.yml @@ -104,6 +104,9 @@ jobs: BUILD_DIR: /__w/openvino/openvino/openvino_build SCCACHE_AZURE_KEY_PREFIX: 'ubuntu20_aarch64_Release' ONNX_RUNTIME_UTILS: /__w/openvino/openvino/openvino/src/frontends/onnx/tests/ci_utils/onnxruntime + ARTIFACTS_SHARE: "/mount/build-artifacts" + MANIFEST_PATH: '/__w/openvino/openvino/manifest.yml' + PRODUCT_TYPE: 'public_linux_ubuntu_20_04_arm64_release' if: "!needs.smart_ci.outputs.skip_workflow" steps: @@ -121,6 +124,18 @@ jobs: submodules: 'true' ref: 'master' + - name: Generate product manifest and set CI_BUILD_NUMBER & CI_BUILD_DEV_TAG + id: create_manifest + uses: ./openvino/.github/actions/create_manifest + with: + repos: | + ${{ env.OPENVINO_REPO }} + ${{ env.OPENVINO_CONTRIB_REPO }} + product_type: ${{ env.PRODUCT_TYPE }} + target_arch: 'aarch64' + build_type: 'release' + save_to: ${{ env.MANIFEST_PATH }} + # # Print system info # @@ -167,7 +182,7 @@ jobs: -DCMAKE_COMPILE_WARNING_AS_ERROR=ON \ -DCMAKE_CXX_COMPILER_LAUNCHER=${{ env.CMAKE_CXX_COMPILER_LAUNCHER }} \ -DCMAKE_C_COMPILER_LAUNCHER=${{ env.CMAKE_C_COMPILER_LAUNCHER }} \ - -DOV_CPU_AARCH64_USE_MULTI_ISA=OFF \ + -DOV_CPU_AARCH64_USE_MULTI_ISA=ON \ -S ${OPENVINO_REPO} \ -B ${BUILD_DIR} @@ -247,7 +262,7 @@ jobs: # Upload build artifacts and logs # - name: Upload build logs - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: build_logs @@ -256,7 +271,7 @@ jobs: - name: Upload openvino package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_package path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz @@ -264,7 +279,7 @@ jobs: - name: Upload openvino developer package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_developer_package path: ${{ env.BUILD_DIR }}/openvino_developer_package.tar.gz @@ -272,7 +287,7 @@ jobs: - name: Upload openvino js package if: fromJSON(needs.smart_ci.outputs.affected_components).JS_API - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_js_package path: ${{ env.INSTALL_DIR_JS }} @@ -280,7 +295,7 @@ jobs: - name: Upload openvino debian packages if: ${{ 'false' }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_debian_packages path: ${{ env.BUILD_DIR }}/*.deb @@ -288,12 +303,25 @@ jobs: - name: Upload openvino tests package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz if-no-files-found: 'error' + - name: Store artifacts to a shared drive + id: store_artifacts + if: ${{ always() }} + uses: ./openvino/.github/actions/store_artifacts + with: + artifacts: | + ${{ env.BUILD_DIR }}/openvino_package.tar.gz + ${{ env.BUILD_DIR }}/openvino_developer_package.tar.gz + ${{ env.BUILD_DIR }}/openvino_tests.tar.gz + ${{ env.MANIFEST_PATH }} + storage_dir: ${{ env.PRODUCT_TYPE }} + storage_root: ${{ env.ARTIFACTS_SHARE }} + Debian_Packages: name: Debian Packages needs: Build diff --git a/.github/workflows/linux_conditional_compilation.yml b/.github/workflows/linux_conditional_compilation.yml index cdae9c77af9b40..cfccad5fe23e12 100644 --- a/.github/workflows/linux_conditional_compilation.yml +++ b/.github/workflows/linux_conditional_compilation.yml @@ -220,7 +220,7 @@ jobs: # Upload build artifacts and logs # - name: Upload build logs - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: always() with: name: build_logs @@ -229,7 +229,7 @@ jobs: - name: Upload openvino package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_package path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz @@ -237,7 +237,7 @@ jobs: - name: Upload selective build statistics package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_selective_build_stat path: ${{ env.BUILD_DIR }}/openvino_selective_build_stat.tar.gz @@ -245,7 +245,7 @@ jobs: - name: Upload OpenVINO tests package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz @@ -293,7 +293,7 @@ jobs: ref: 'master' - name: Download selective build statistics package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_selective_build_stat path: ${{ env.SELECTIVE_BUILD_STAT_DIR }} diff --git a/.github/workflows/linux_sanitizers.yml b/.github/workflows/linux_sanitizers.yml index 6f089f205d3b1d..5227eb3eacdac9 100644 --- a/.github/workflows/linux_sanitizers.yml +++ b/.github/workflows/linux_sanitizers.yml @@ -186,7 +186,7 @@ jobs: - name: Upload openvino package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_package_${{ matrix.SANITIZER }} path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz @@ -194,7 +194,7 @@ jobs: - name: Upload openvino tests package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests_${{ matrix.SANITIZER }} path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz @@ -230,13 +230,13 @@ jobs: run: echo 'Acquire::Retries "10";' > /etc/apt/apt.conf.d/80-retries - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: ${{ format('openvino_package_{0}', matrix.SANITIZER) }} path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: ${{ format('openvino_tests_{0}', matrix.SANITIZER) }} path: ${{ env.INSTALL_TEST_DIR }} @@ -462,7 +462,7 @@ jobs: ${INSTALL_TEST_DIR}/ov_hetero_func_tests --gtest_print_time=1 --gtest_output=xml:${INSTALL_TEST_DIR}/TEST-OVHeteroFuncTests.xml --gtest_filter="*smoke*" - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-cpp diff --git a/.github/workflows/mac.yml b/.github/workflows/mac.yml index 32f5474d14ce76..3880f8333f18c5 100644 --- a/.github/workflows/mac.yml +++ b/.github/workflows/mac.yml @@ -204,7 +204,7 @@ jobs: - name: Upload openvino package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_package path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz @@ -212,7 +212,7 @@ jobs: - name: Upload openvino tests package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz @@ -220,7 +220,7 @@ jobs: - name: Upload openvino js package if: fromJSON(needs.smart_ci.outputs.affected_components).JS_API - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_js_package path: ${{ env.INSTALL_DIR_JS }} diff --git a/.github/workflows/mac_arm64.yml b/.github/workflows/mac_arm64.yml index 26eb440eb87cb2..8386f54719b02c 100644 --- a/.github/workflows/mac_arm64.yml +++ b/.github/workflows/mac_arm64.yml @@ -204,7 +204,7 @@ jobs: - name: Upload openvino package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_package path: ${{ env.BUILD_DIR }}/openvino_package.tar.gz @@ -212,7 +212,7 @@ jobs: - name: Upload openvino tests package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.tar.gz @@ -220,7 +220,7 @@ jobs: - name: Upload openvino js package if: fromJSON(needs.smart_ci.outputs.affected_components).JS_API - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_js_package path: ${{ env.INSTALL_DIR_JS }} diff --git a/.github/workflows/py_checks.yml b/.github/workflows/py_checks.yml index 3a9b23ea2685db..7bd7fe3d840222 100644 --- a/.github/workflows/py_checks.yml +++ b/.github/workflows/py_checks.yml @@ -49,7 +49,7 @@ jobs: git diff > samples_diff.diff working-directory: samples/python - - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: failure() with: name: samples_diff @@ -67,7 +67,7 @@ jobs: git diff > pyopenvino_diff.diff working-directory: src/bindings/python/src/openvino - - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: failure() with: name: pyopenvino_diff @@ -85,7 +85,7 @@ jobs: git diff > wheel_diff.diff working-directory: src/bindings/python/wheel - - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + - uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: failure() with: name: wheel_diff diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 0e49752fc92968..26ed3615ea973a 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -64,6 +64,9 @@ jobs: INSTALL_DIR_JS: "${{ github.workspace }}\\openvino_install\\js" INSTALL_TEST_DIR: "${{ github.workspace }}\\tests_install" BUILD_DIR: "${{ github.workspace }}\\openvino_build" + ARTIFACTS_SHARE: "C:\\mount\\build-artifacts" + MANIFEST_PATH: "${{ github.workspace }}\\manifest.yml" + PRODUCT_TYPE: 'public_windows_vs2019_release' # TODO: specify version of compiler here if: ${{ !needs.smart_ci.outputs.skip_workflow && github.event_name != 'merge_group' }} @@ -81,6 +84,27 @@ jobs: path: 'openvino_contrib' ref: 'master' + - name: Setup Python ${{ env.PYTHON_VERSION }} + uses: ./openvino/.github/actions/setup_python + with: + version: ${{ env.PYTHON_VERSION }} + pip-cache-path: ${{ env.PIP_CACHE_PATH }} + should-setup-pip-paths: 'true' + self-hosted-runner: 'true' + show-cache-info: 'true' + + - name: Generate product manifest and set CI_BUILD_NUMBER & CI_BUILD_DEV_TAG + id: create_manifest + uses: ./openvino/.github/actions/create_manifest + with: + repos: | + ${{ env.OPENVINO_REPO }} + ${{ env.OPENVINO_CONTRIB_REPO }} + product_type: ${{ env.PRODUCT_TYPE }} + target_arch: 'intel64' + build_type: 'release' + save_to: ${{ env.MANIFEST_PATH }} + # # Print system info # @@ -92,15 +116,6 @@ jobs: # Dependencies # - - name: Setup Python ${{ env.PYTHON_VERSION }} - uses: ./openvino/.github/actions/setup_python - with: - version: ${{ env.PYTHON_VERSION }} - pip-cache-path: ${{ env.PIP_CACHE_PATH }} - should-setup-pip-paths: 'true' - self-hosted-runner: 'true' - show-cache-info: 'true' - - name: Install python dependencies run: | # For Python API: build and wheel packaging @@ -219,14 +234,14 @@ jobs: # - name: Upload openvino package - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_package path: ${{ env.BUILD_DIR }}/openvino_package.zip if-no-files-found: 'error' - name: Upload openvino tests package - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.zip @@ -234,12 +249,24 @@ jobs: - name: Upload openvino js package if: fromJSON(needs.smart_ci.outputs.affected_components).JS_API - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_js_package path: ${{ env.INSTALL_DIR_JS }} if-no-files-found: 'error' + - name: Store artifacts to a shared drive + id: store_artifacts + if: ${{ always() }} + uses: ./openvino/.github/actions/store_artifacts + with: + artifacts: | + ${{ env.BUILD_DIR }}/openvino_package.zip + ${{ env.BUILD_DIR }}/openvino_tests.zip + ${{ env.MANIFEST_PATH }} + storage_dir: ${{ env.PRODUCT_TYPE }} + storage_root: ${{ env.ARTIFACTS_SHARE }} + Samples: needs: [ Build, Smart_CI ] if: fromJSON(needs.smart_ci.outputs.affected_components).samples @@ -257,13 +284,13 @@ jobs: steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -351,7 +378,7 @@ jobs: path: 'openvino' - name: Download OpenVINO js package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_js_package path: ${{ env.OPENVINO_JS_LIBS_DIR }} @@ -422,13 +449,13 @@ jobs: steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -580,7 +607,7 @@ jobs: run: python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/ovc/unit_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-OpenVinoConversion.xml - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-python @@ -610,13 +637,13 @@ jobs: steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -786,7 +813,7 @@ jobs: ${{ env.INSTALL_TEST_DIR }}/ov_hetero_func_tests --gtest_print_time=1 --gtest_output=xml:${{ env.INSTALL_TEST_DIR }}/TEST-OVHeteroFuncTests.xml --gtest_filter="*smoke*" - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-cpp @@ -810,13 +837,13 @@ jobs: if: fromJSON(needs.smart_ci.outputs.affected_components).CPU.test steps: - name: Download OpenVINO package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_package path: ${{ env.INSTALL_DIR }} - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -870,7 +897,7 @@ jobs: key: ${{ runner.os }}-tests-functional-cpu-stamp-${{ github.sha }} - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-functional-cpu diff --git a/.github/workflows/windows_conditional_compilation.yml b/.github/workflows/windows_conditional_compilation.yml index 977f9aee91bcce..963a6edb37a56a 100644 --- a/.github/workflows/windows_conditional_compilation.yml +++ b/.github/workflows/windows_conditional_compilation.yml @@ -247,7 +247,7 @@ jobs: - name: Upload selective build statistics package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_selective_build_stat path: ${{ env.BUILD_DIR }}/openvino_selective_build_stat.zip @@ -255,7 +255,7 @@ jobs: - name: Upload OpenVINO tests package if: ${{ always() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 with: name: openvino_tests path: ${{ env.BUILD_DIR }}/openvino_tests.zip @@ -292,7 +292,7 @@ jobs: ref: 'master' - name: Download selective build statistics package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_selective_build_stat path: ${{ env.SELECTIVE_BUILD_STAT_DIR }} @@ -355,7 +355,7 @@ jobs: steps: - name: Download OpenVINO tests package - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: openvino_tests path: ${{ env.INSTALL_TEST_DIR }} @@ -397,7 +397,7 @@ jobs: timeout-minutes: 60 - name: Upload Test Results - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4 if: ${{ !cancelled() }} with: name: test-results-functional-cpu diff --git a/cmake/toolchains/onecoreuap.toolchain.cmake b/cmake/toolchains/onecoreuap.toolchain.cmake index af4285ee124117..b9c71254e09c15 100644 --- a/cmake/toolchains/onecoreuap.toolchain.cmake +++ b/cmake/toolchains/onecoreuap.toolchain.cmake @@ -50,8 +50,12 @@ endif() unset(_onecoreuap_arch) # compile flags +if(CMAKE_GENERATOR MATCHES "Ninja") + set(includes "/I\"\$\$\(UniversalCRT_IncludePath\)\"") +else() + set(includes "/I\"\$\(UniversalCRT_IncludePath\)\"") +endif() -set(includes "/I\"\$\(UniversalCRT_IncludePath\)\"") set(CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS_INIT} ${includes}") set(CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS_INIT} ${includes}") unset(includes) diff --git a/docs/articles_en/about-openvino.rst b/docs/articles_en/about-openvino.rst index a9b599960d2e2b..dbe5f6d3c1061f 100644 --- a/docs/articles_en/about-openvino.rst +++ b/docs/articles_en/about-openvino.rst @@ -1,5 +1,3 @@ -.. {#about_openvino} - About OpenVINO ============== @@ -10,6 +8,7 @@ About OpenVINO about-openvino/performance-benchmarks about-openvino/compatibility-and-support + about-openvino/contributing Release Notes OpenVINO is a toolkit for simple and efficient deployment of various deep learning models. diff --git a/docs/articles_en/about-openvino/contributing.rst b/docs/articles_en/about-openvino/contributing.rst new file mode 100644 index 00000000000000..f14e5f58249259 --- /dev/null +++ b/docs/articles_en/about-openvino/contributing.rst @@ -0,0 +1,169 @@ +Contribute to OpenVINO +======================== + +.. toctree:: + :maxdepth: 1 + :hidden: + + contributing/code-contribution-guide + +OpenVINO™ is always looking for opportunities to improve and your contributions +play a big role in this process. Here are four ways you can make OpenVINO better: + +- `Provide feedback <#provide-feedback>`__ +- `Contribute code changes <#contribute-code-changes>`__ +- `Improve documentation <#improve-documentation>`__ +- `Promote and support OpenVINO <#promote-and-support-openvino>`__ + + +:fas:`comments` Provide feedback +################################ + +.. rubric:: Report bugs / issues + :name: report-bugs-issues + +If you notice unexpected behavior in OpenVINO or its components, you can +`create a new issue `__ +in the GitHub issue tracker. + +.. rubric:: Propose improvements + :name: propose-improvements + +If you want to share your ideas for improving OpenVINO: + +- Open a new `GitHub Discussion `__. +- Create a `Feature Request Issue `__ + if your idea is already well defined. + +In both cases, provide a detailed description and list potential use cases, +benefits, and challenges. Keep in mind that even if your input is not immediately +prioritized, it may be used at a later or undertaken by the community. + + +:fas:`code-branch` Contribute code changes +########################################## + +Always check if the change is still needed! Verify if +`the issue `__ or +`request `__ is still open +and nobody has started working on it. If the ticket is already work in progress, +you can always ask if you can help. + +**Address only the issues that affect the master or** +:doc:`LTS release branches <./release-notes-openvino/release-policy>`. + +**Do not start work on contributions, if a proper issue/ request has not been created.** + +.. tip:: + + If you want to start with something simple, check out + `first-time contributions `__. + + +.. rubric:: Fix bugs + :name: fix-bugs + +Choose one of the issues reported in +`GitHub Issue Tracker `__ and +`create a Pull Request `__ +(PR) addressing it. + +If you find a new bug and want to fix it, you should still +create a new issue before working on the PR. This way, it will be easier for other +developers to track changes. + +.. rubric:: Develop new features + :name: develop-new-features + +If you find a `Feature Request `__ +you want to work on, make sure it is clearly defined. If you have any doubts, +or the change is complex, `discuss it `__ +with OpenVINO developers first. + +If you have an idea for a new feature and want +to develop it, you should still create a Feature Request before working on the +PR. This way, it will be easier for other developers to track changes. + +.. rubric:: Develop a new device plugin + :name: develop-new-device-plugin + +If you want to run inference on a device that is currently not supported, you +can see how to develop a new plugin for it in the +`Plugin Developer Guide `__. + + +:fas:`file-alt` Improve documentation +##################################### + +OpenVINO user documentation is built from several sources, mainly the files in +the `docs/articles_en `__ +folder, using `Sphinx `__ and the +`reStructuredText `__ +markup language. + +OpenVINO `developer documentation `__ +is available only in markdown in the `docs/dev `__ +folder. + +To edit docs, consider using the Editor’s +`guide `__ +and contacting `documentation maintainers `__, +who will help you with information architecture and formatting, as well as +review, adjust, and merge the PR. + +.. rubric:: Review user documentation + :name: review-user-documentation + +In most cases, creating a PR is enough to correct a documentation mistake, improve +the language, and update or extend the information. For your convenience, the +top-right panel of most pages includes the “Edit on GitHub” button that will +take you to the source file of the given article. + +.. rubric:: Write new content + :name: write-new-content + +For more extensive changes in docs, reach out to any of the +`documentation maintainers `__ +to discuss the new content. + + +:fas:`bullhorn` Promote and support OpenVINO +############################################ + +.. rubric:: Popularize OpenVINO + :name: popularize-openvino + +Articles, tutorials, blog posts, demos, videos, and any other involvement in the +OpenVINO community is more than welcome. If you discuss or present OpenVINO on +various social platforms, you are raising awareness of the product among AI +enthusiasts and enabling other people to discover the toolkit. + +Feel free to reach out to OpenVINO developers if you need help with making a +contribution. You can also contact +`documentation maintainers `__ +, if you need help with visuals, brand materials, or content creation in general. + +.. rubric:: Help other community members + :name: help-community + +If you are an experienced OpenVINO user and want to help, you can share your +expertise with the community at any time. Check GitHub +`Discussions `__ +and `Issues `__ to see if +you can help someone. + +.. note:: + + By contributing to the OpenVINO project, you agree that your contributions + will be licensed under `the terms of the OpenVINO repository `__. + + +Additional Resources +##################### + +- :doc:`Code Contribution Guide <./contributing/code-contribution-guide>` +- Choose a `"Good First Issue" `__. +- Learn more about `OpenVINO architecture `__. +- Check out a `blog post on contributing to OpenVINO `__. +- Visit `Intel DevHub Discord server `__ to join + discussions and talk to OpenVINO developers. \ No newline at end of file diff --git a/docs/articles_en/about-openvino/contributing/code-contribution-guide.rst b/docs/articles_en/about-openvino/contributing/code-contribution-guide.rst new file mode 100644 index 00000000000000..a74bb586e18130 --- /dev/null +++ b/docs/articles_en/about-openvino/contributing/code-contribution-guide.rst @@ -0,0 +1,88 @@ +Code Contribution Guide +======================= + +This section will start you off with a few simple steps to begin your code contribution. +If you have any doubts, talk to +`the development team `__. +Remember, your questions help us keep improving OpenVINO. + + +1. **Choose the issue you want to work on.** + + Choose one of the existing `issues `__ / + requests. The `“Good First Issue” `__ + board is a good place to start. If you have a new idea for the contribution, + make sure to first create a proper issue, discussion, or feature request. + + Here are some of the components you may choose to work on. + + .. tab-set:: + + .. tab-item:: APIs + + - `Core C++ API `__ + - `C API `__ + - `Python API `__ + - `JavaScript (Node.js) API `__ + + .. tab-item:: Frontends + + - `IR Frontend `__ + - `ONNX Frontend `__ + - `PaddlePaddle Frontend `__ + - `PyTorch Frontend `__ + - `TensorFlow Frontend `__ + - `TensorFlow Lite Frontend `__ + + .. tab-item:: Plugins + + - `Auto plugin `__ + - `CPU plugin `__ + - `GPU plugin `__ + - `NPU plugin `__ + - `Hetero plugin `__ + - `Template plugin `__ + + .. tab-item:: Tools + + - `Benchmark Tool `__ + - `Model Conversion `__ + +2. **Assign yourself to the issue.** + + To get assigned to a task, simply leave a comment with the ``.take`` command in + the selected issue. You can always ask OpenVINO developers for guidance, + both technical and organizational: + + - assign users in the **“Contact points”** section, + - visit `Intel DevHub Discord server `__ to ask + questions in the channel dedicated to **“Good First Issue”** support, or any other. + +3. **Build OpenVINO.** + + In order to build OpenVINO, follow the + `build instructions for your specific OS `__. + + Use the local build and the information found in the issue description to + develop your contribution. + +4. **Submit a PR with your changes.** + + Follow the `guidelines `__ + and do not forget to `link your Pull Request to the issue `__ + it addresses. + +5. **Wait for a review.** + + We will make sure to review your **Pull Request** as soon as possible and provide feedback. + You can expect a merge once your changes have been validated with automatic tests and + approved by `maintainers `__. + + +Additional Resources +##################### + +- Choose a `“Good First Issue” `__. +- Learn more about `OpenVINO architecture `__. +- Check out a `blog post on contributing to OpenVINO `__. +- Visit `Intel DevHub Discord server `__ to join discussions and talk to OpenVINO developers. \ No newline at end of file diff --git a/docs/articles_en/assets/images/quantization_scheme.svg b/docs/articles_en/assets/images/quantization_scheme.svg new file mode 100644 index 00000000000000..b58934ec08e57d --- /dev/null +++ b/docs/articles_en/assets/images/quantization_scheme.svg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d197730e090d582d7ae1f68d139564b845bba5eb9aa168437c2b80f53545e706 +size 100328 diff --git a/docs/articles_en/assets/snippets/main.py b/docs/articles_en/assets/snippets/main.py index a063a1645f0ec1..4d5429cd4b7925 100644 --- a/docs/articles_en/assets/snippets/main.py +++ b/docs/articles_en/assets/snippets/main.py @@ -9,7 +9,7 @@ from contextlib import redirect_stdout, redirect_stderr -skip_snippets = ["main.py", "__init__.py", "utils.py", "ov_common.py"] +skip_snippets = ["main.py", "__init__.py", "utils.py", "ov_common.py", "ov_stateful_model_intro.py"] def import_python_modules(directory, subdirectory=""): for item in os.listdir(directory): diff --git a/docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp b/docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp new file mode 100644 index 00000000000000..75eb50839ca117 --- /dev/null +++ b/docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp @@ -0,0 +1,67 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include +#include + +int main() { + ov::Core core; + auto model = core.read_model("model.xml"); + auto input = model->get_parameters().at(0); + + auto compiled_model = core.compile_model(model, "NPU"); + auto npu_context = compiled_model.get_context().as(); + + auto in_element_type = input->get_element_type(); + auto in_shape = input->get_shape(); + + { + //! [default_context_from_core] + auto npu_context = core.get_default_context("NPU").as(); + // Extract raw level zero context handle from RemoteContext + void* context_handle = npu_context.get(); + //! [default_context_from_core] + } + + { + //! [default_context_from_model] + auto npu_context = compiled_model.get_context().as(); + // Extract raw level zero context handle from RemoteContext + void* context_handle = npu_context.get(); + //! [default_context_from_model] + } + + { + //! [wrap_nt_handle] + void* shared_buffer = nullptr; // create the NT handle + auto remote_tensor = npu_context.create_tensor(in_element_type, in_shape, shared_buffer); + //! [wrap_nt_handle] + } + + { + //! [wrap_dmabuf_fd] + int32_t fd_heap; // create the DMA-BUF System Heap file descriptor + auto remote_tensor = npu_context.create_tensor(in_element_type, in_shape, fd_heap); + //! [wrap_dmabuf_fd] + } + + { + //! [allocate_remote_level_zero_host] + auto remote_tensor = npu_context.create_l0_host_tensor(in_element_type, in_shape); + // Extract raw level zero pointer from remote tensor + void* level_zero_ptr = remote_tensor.get(); + //! [allocate_remote_level_zero_host] + } + + { + //! [allocate_level_zero_host] + auto tensor = npu_context.create_host_tensor(in_element_type, in_shape); + // Extract raw level zero pointer from remote tensor + void* level_zero_ptr = tensor.data(); + //! [allocate_level_zero_host] + } + + return 0; +} diff --git a/docs/articles_en/assets/snippets/ov_caching.cpp b/docs/articles_en/assets/snippets/ov_caching.cpp index cefb3da55c7827..891d3e9368292d 100644 --- a/docs/articles_en/assets/snippets/ov_caching.cpp +++ b/docs/articles_en/assets/snippets/ov_caching.cpp @@ -1,10 +1,10 @@ #include +//! [ov:caching:part0] void part0() { std::string modelPath = "/tmp/myModel.xml"; - std::string device = "GPU"; + std::string device = "GPU"; // For example: "CPU", "GPU", "NPU". ov::AnyMap config; -//! [ov:caching:part0] ov::Core core; // Step 1: create ov::Core object core.set_property(ov::cache_dir("/path/to/cache/dir")); // Step 1b: Enable caching auto model = core.read_model(modelPath); // Step 2: Read Model diff --git a/docs/articles_en/assets/snippets/ov_caching.py b/docs/articles_en/assets/snippets/ov_caching.py index c03e8b34cfe9ce..4ce0b91ccd7506 100644 --- a/docs/articles_en/assets/snippets/ov_caching.py +++ b/docs/articles_en/assets/snippets/ov_caching.py @@ -8,6 +8,7 @@ import openvino.properties as props +# For example: "CPU", "GPU", "NPU". device_name = 'CPU' model_path = get_path_to_model() path_to_cache_dir = get_temp_dir() diff --git a/docs/articles_en/assets/snippets/ov_stateful_model_intro.py b/docs/articles_en/assets/snippets/ov_stateful_model_intro.py new file mode 100644 index 00000000000000..f9e84b2891a7a3 --- /dev/null +++ b/docs/articles_en/assets/snippets/ov_stateful_model_intro.py @@ -0,0 +1,210 @@ +# Copyright (C) 2018-2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import logging as log +import numpy as np + +import openvino as ov +from openvino.runtime import opset13 as ops +from openvino.runtime.op.util import VariableInfo, Variable +from openvino.runtime.passes import LowLatency2, MakeStateful, Manager +from openvino.runtime.utils import replace_node + + +def state_model_example(): + #! [ov:stateful_model] + input = ops.parameter([1, 1], dtype=np.float32, name="data") + init_const = ops.constant([[0]], dtype=np.float32) + + # Typically ReadValue/Assign operations are presented as pairs in models. + # ReadValue operation reads information from an internal memory buffer, Assign operation writes data to this buffer. + # For each pair, its own Variable object must be created. + # Variable defines name, shape and type of the buffer. + var_info = VariableInfo() + var_info.data_shape = init_const.get_shape() + var_info.data_type = init_const.get_element_type() + var_info.variable_id = "variable0" + variable = Variable(var_info) + + # Creating Model + read = ops.read_value(init_const, variable) + add = ops.add(input, read) + assign = ops.assign(add, variable) + result = ops.result(add) + model = ov.Model(results=[result], sinks=[assign], parameters=[input], name="model") + #! [ov:stateful_model] + + return model + + +def low_latency_2_example(): + #! [ov:low_latency_2] + # Precondition for Model. + # TensorIterator and Parameter are created in body of TensorIterator with names + tensor_iterator_name = "TI_name" + body_parameter_name = "body_parameter_name" + idx = "0" # this is a first variable in the model + + # The State will be named "TI_name/param_name/variable_0" + state_name = tensor_iterator_name + "//" + body_parameter_name + "//" + "variable_" + idx + + #! [ov:get_ov_model] + core = ov.Core() + ov_model = core.read_model("path_to_the_model") + #! [ov:get_ov_model] + + # reshape input if needed + + #! [ov:reshape_ov_model] + ov_model.reshape({"X": ov.PartialShape([1, 1, 16])}) + #! [ov:reshape_ov_model] + + #! [ov:apply_low_latency_2] + manager = Manager() + manager.register_pass(LowLatency2()) + manager.run_passes(ov_model) + #! [ov:apply_low_latency_2] + + compied_model = core.compile_model(ov_model) + # Try to find the Variable by name + infer_request = compied_model.create_infer_request() + states = infer_request.query_state() + for state in states: + name = state.get_name() + if (name == state_name): + # some actions + #! [ov:low_latency_2] + pass + + #! [ov:low_latency_2_use_parameters] + manager.register_pass(LowLatency2(False)) + #! [ov:low_latency_2_use_parameters] + + +def replace_non_reshapable_const(): + #! [ov:replace_const] + # OpenVINO example. How to replace a Constant with hardcoded values of shapes in the model with another one with the new values. + # Assume we know which Constant (const_with_hardcoded_shape) prevents the reshape from being applied. + # Then we can find this Constant by name in the model and replace it with a new one with the correct shape. + core = ov.Core() + model = core.read_model("path_to_model"); + # Creating the new Constant with a correct shape. + # For the example shown in the picture above, the new values of the Constant should be 1, 1, 10 instead of 1, 49, 10 + new_const = ops.constant( """value_with_correct_shape, type""") + for node in model.get_ops(): + # Trying to find the problematic Constant by name. + if node.get_friendly_name() != "name_of_non_reshapable_const": + continue + # Replacing the problematic Constant with a new one. Do this for all the problematic Constants in the model, then + # you can apply the reshape feature. + replace_node(node, new_const) + + #! [ov:replace_const] + + +def apply_make_stateful_tensor_names(): + #! [ov:make_stateful_tensor_names] + core = ov.Core() + ov_model = core.read_model("path_to_the_model") + tensor_names = {"tensor_name_1": "tensor_name_4", + "tensor_name_3": "tensor_name_6"} + manager = Manager() + manager.register_pass(MakeStateful(tensor_names)) + manager.run_passes(ov_model) + #! [ov:make_stateful_tensor_names] + + +def apply_make_stateful_ov_nodes(): + #! [ov:make_stateful_ov_nodes] + core = ov.Core() + ov_model = core.read_model("path_to_the_model") + # Parameter_1, Result_1, Parameter_3, Result_3 are + # ops.parameter/ops.result in the ov_model + pairs = ["""(Parameter_1, Result_1), (Parameter_3, Result_3)"""] + manager = Manager() + manager.register_pass(MakeStateful(pairs)) + manager.run_passes(ov_model) + #! [ov:make_stateful_ov_nodes] + + +def main(): + + #! [ov:state_api_usage] + # 1. Load inference engine + log.info("Loading OpenVINO") + core = ov.Core() + + # 2. Read a model + log.info("Loading model files") + model = core.read_model("path_to_ir_xml_from_the_previous_section"); + model.get_parameters()[0].set_layout("NC"); + ov.set_batch(model, 1); + + # 3. Load the model to CPU + compiled_model = core.compile_model(model, "CPU") + + # 4. Create Infer Request + infer_request = compiled_model.create_infer_request() + + # 5. Reset memory states before starting + states = infer_request.query_state() + + if len(states) != 1: + log.error(f"Invalid queried state number. Expected 1, but got {str(states.size())}") + return -1 + + infer_request.reset_state() + + # 6. Inference + input_data = np.arange(start=1, stop=12, dtype=np.float32) + + # This example demonstrates how to work with OpenVINO State API. + # Input_data: some array with 12 float numbers + + # Part1: read the first four elements of the input_data array sequentially. + # Expected output for the first utterance: + # sum of the previously processed elements [ 1, 3, 6, 10] + + # Part2: reset state value (set to 0) and read the next four elements. + # Expected output for the second utterance: + # sum of the previously processed elements [ 5, 11, 18, 26] + + # Part3: set state value to 5 and read the next four elements. + # Expected output for the third utterance: + # sum of the previously processed elements + 5 [ 14, 24, 35, 47] + target_state = states[0] + + # Part 1 + log.info("Infer the first utterance") + for next_input in range(len(input_data)/3): + infer_request.infer({0 : input_data[next_input]}) + state_buf = target_state.state.data + log.info(state_buf[0]) + + # Part 2 + log.info("\nReset state between utterances...\n") + target_state.reset() + + log.info("Infer the second utterance") + + for next_input in range(len(input_data)/3, (len(input_data)/3 * 2)): + infer_request.infer({0 : input_data[next_input]}) + state_buf = target_state.state.data + log.info(state_buf[0]) + + # Part 3 + log.info("\nSet state value between utterances to 5...\n") + data = np.asarray([5], dtype=np.float32) + tensor = ov.Tensor(data, shared_memory=True) + target_state.state = tensor + + log.info("Infer the third utterance") + for next_input in range((len(input_data)/3 * 2), len(input_data)): + infer_request.infer({0 : input_data[next_input]}) + + state_buf = target_state.state.data + log.info(state_buf[0]) + + log.info("Execution successful") + #! [ov:state_api_usage] + return 0 diff --git a/docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp b/docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp index 3f3cd2cb713a19..01170795dbea22 100644 --- a/docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp +++ b/docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp @@ -14,14 +14,16 @@ using namespace ov; void state_network_example () { - //! [ov:state_network] + //! [ov:stateful_model] // ... auto input = std::make_shared(ov::element::f32, ov::Shape{1, 1}); auto init_const = ov::opset8::Constant::create(ov::element::f32, ov::Shape{1, 1}, {0}); - // The ReadValue/Assign operations must be used in pairs in the network. - // For each such a pair, its own variable object must be created. + // Typically ReadValue/Assign operations are presented as pairs in models. + // ReadValue operation reads information from an internal memory buffer, Assign operation writes data to this buffer. + // For each pair, its own Variable object must be created. + // Variable defines name, shape and type of the buffer. const std::string variable_name("variable0"); ov::op::util::VariableInfo var_info = {init_const->get_shape(), init_const->get_element_type(), @@ -37,7 +39,7 @@ void state_network_example () { auto model = std::make_shared(ov::ResultVector({result}), ov::SinkVector({save}), ov::ParameterVector({input})); - //! [ov:state_network] + //! [ov:stateful_model] } void low_latency_2_example() { diff --git a/docs/articles_en/documentation/openvino-extensibility/openvino-plugin-library/advanced-guides/low-precision-transformations.rst b/docs/articles_en/documentation/openvino-extensibility/openvino-plugin-library/advanced-guides/low-precision-transformations.rst index c8e041e5a367e9..5d922ef8bdc4e7 100644 --- a/docs/articles_en/documentation/openvino-extensibility/openvino-plugin-library/advanced-guides/low-precision-transformations.rst +++ b/docs/articles_en/documentation/openvino-extensibility/openvino-plugin-library/advanced-guides/low-precision-transformations.rst @@ -12,6 +12,7 @@ OpenVINO™ Low Precision Transformations :caption: Low Precision Transformations :hidden: + Quantization Scheme Attributes Step 1. Prerequisites transformations Step 2. Markup transformations diff --git a/docs/articles_en/documentation/openvino-extensibility/openvino-plugin-library/advanced-guides/low-precision-transformations/quantization-scheme.rst b/docs/articles_en/documentation/openvino-extensibility/openvino-plugin-library/advanced-guides/low-precision-transformations/quantization-scheme.rst new file mode 100644 index 00000000000000..90d757c10668f3 --- /dev/null +++ b/docs/articles_en/documentation/openvino-extensibility/openvino-plugin-library/advanced-guides/low-precision-transformations/quantization-scheme.rst @@ -0,0 +1,27 @@ +Quantization Scheme +============================== + + +.. meta:: + :description: Learn about quantization scheme. + +.. toctree:: + :maxdepth: 1 + :caption: Low Precision Transformations + +Key steps in the quantization scheme: + +* Low Precision Transformations: ``FakeQuantize`` decomposition to Quantize with a low precision output and Dequantize. For more details, refer to the :doc:`Quantize decomposition <../low-precision-transformations>` section. +* Low Precision Transformations: move Dequantize through operations. For more details, refer to the :doc:`Main transformations <./step3-main>` section. +* Plugin: fuse operations with Quantize and inference in low precision. + +Quantization scheme features: + +* Quantization operation is expressed through the ``FakeQuantize`` operation, which involves more than scale and shift. For more details, see: :doc:`FakeQuantize-1 <../../../../openvino-ir-format/operation-sets/operation-specs/quantization/fake-quantize-1>`. If the ``FakeQuantize`` input and output intervals are the same, ``FakeQuantize`` degenerates to ``Multiply``, ``Subtract`` and ``Convert`` (scale & shift). +* Dequantization operation is expressed through element-wise ``Convert``, ``Subtract`` and ``Multiply`` operations. ``Convert`` and ``Subtract`` are optional. These operations can be handled as typical element-wise operations, for example, fused or transformed to another. +* OpenVINO plugins fuse ``Dequantize`` and ``Quantize`` operations after a low precision operation and do not fuse ``Quantize`` before it. + +Here is a quantization scheme example for int8 quantization applied to a part of a model with two ``Convolution`` operations in CPU plugin. + +.. image:: ../../../../../assets/images/quantization_scheme.svg + :alt: Quantization scheme diff --git a/docs/articles_en/learn-openvino/llm_inference_guide/genai-guide.rst b/docs/articles_en/learn-openvino/llm_inference_guide/genai-guide.rst index 79c3471f3ab783..08efa7406e42b5 100644 --- a/docs/articles_en/learn-openvino/llm_inference_guide/genai-guide.rst +++ b/docs/articles_en/learn-openvino/llm_inference_guide/genai-guide.rst @@ -44,7 +44,7 @@ will not work with these instructions, make sure to import openvino_genai as ov_genai pipe = ov_genai.LLMPipeline(model_path, "CPU") - print(pipe.generate("The Sun is yellow because")) + print(pipe.generate("The Sun is yellow because", max_new_tokens=100)) .. tab-item:: C++ :sync: cpp @@ -57,7 +57,7 @@ will not work with these instructions, make sure to int main(int argc, char* argv[]) { std::string model_path = argv[1]; ov::genai::LLMPipeline pipe(model_path, "CPU"); - std::cout << pipe.generate("The Sun is yellow because"); + std::cout << pipe.generate("The Sun is yellow because", ov::genai::max_new_tokens(100)); } The `LLMPipeline` is the main object used for decoding. You can construct it directly from the @@ -85,7 +85,7 @@ below, where a lambda function outputs words to the console immediately upon gen pipe = ov_genai.LLMPipeline(model_path, "CPU") streamer = lambda x: print(x, end='', flush=True) - pipe.generate("The Sun is yellow because", streamer=streamer) + pipe.generate("The Sun is yellow because", streamer=streamer, max_new_tokens=100) .. tab-item:: C++ @@ -104,7 +104,7 @@ below, where a lambda function outputs words to the console immediately upon gen // false means continue generation. return false; }; - pipe.generate("The Sun is yellow because", ov::genai::streamer(streamer)); + pipe.generate("The Sun is yellow because", ov::genai::streamer(streamer), ov::genai::max_new_tokens(100)); } You can also create your custom streamer for more sophisticated processing: @@ -132,7 +132,7 @@ You can also create your custom streamer for more sophisticated processing: # Decode tokens and process them. pipe = ov_genai.LLMPipeline(model_path, "CPU") - pipe.generate("The Sun is yellow because", streamer=CustomStreamer()) + pipe.generate("The Sun is yellow because", streamer=CustomStreamer(), max_new_tokens=100) .. tab-item:: C++ @@ -164,7 +164,7 @@ You can also create your custom streamer for more sophisticated processing: std::string model_path = argv[1]; ov::genai::LLMPipeline pipe(model_path, "CPU"); - pipe.generate("The Sun is yellow because", ov::genai::streamer(custom_streamer)); + pipe.generate("The Sun is yellow because", ov::genai::streamer(custom_streamer), ov::genai::max_new_tokens(100)); } Using GenAI in Chat Scenario diff --git a/docs/articles_en/openvino-workflow/model-optimization-guide/weight-compression.rst b/docs/articles_en/openvino-workflow/model-optimization-guide/weight-compression.rst index a393a0925cba3c..da4f34b8806aea 100644 --- a/docs/articles_en/openvino-workflow/model-optimization-guide/weight-compression.rst +++ b/docs/articles_en/openvino-workflow/model-optimization-guide/weight-compression.rst @@ -52,11 +52,12 @@ Compress Model Weights **8-bit weight quantization** method offers a balance between model size reduction and maintaining accuracy, which usually leads to significant performance improvements for Transformer-based models. Models with 8-bit compressed weights are performant on the -vast majority of supported CPU and GPU platforms. +vast majority of supported CPU and GPU platforms. By default, weights are compressed +asymmetrically to "INT8_ASYM" mode. -The code snippet below shows how to do 8-bit quantization of the model weights represented -in OpenVINO IR using NNCF: +The code snippet below shows how to do asymmetrical 8-bit quantization of the model weights +represented in OpenVINO IR using NNCF: .. tab-set:: @@ -72,7 +73,7 @@ Now, the model is ready for compilation and inference. It can be also saved into a compressed format, resulting in a smaller binary file. **4-bit weight quantization** method stands for an INT4-INT8 mixed-precision weight quantization, -where INT4 is considered as the primary precision and INT8 is the backup one. +where INT4 is considered as the primary precision and asymmetric INT8 is the backup one. It usually results in a smaller model size and lower inference latency, although the accuracy degradation could be higher, depending on the model. @@ -100,7 +101,7 @@ memory reduction, speed gain, and accuracy loss. - Memory Reduction - Latency Improvement - Accuracy Loss - * - INT8 + * - INT8 Asymmetric - Low - Medium - Low @@ -122,8 +123,8 @@ trade-offs after optimization: **Symmetric Compression** - ``INT4_SYM`` - INT4 Symmetric mode involves quantizing weights to an unsigned 4-bit integer - symmetrically with a fixed zero point of 8. This mode is faster than the INT8, making + INT4 Symmetric mode involves quantizing weights to a signed 4-bit integer + symmetrically without zero point. This mode is faster than the INT8_ASYM, making it ideal for situations where **speed and size reduction are prioritized over accuracy**. .. code-block:: python @@ -159,15 +160,15 @@ trade-offs after optimization: `Larger Group Size`: Results in faster inference and a smaller model, but might compromise accuracy. -* ``ratio`` controls the ratio between INT4 and INT8 compressed layers in the model. +* ``ratio`` controls the ratio between INT4 and INT8_ASYM compressed layers in the model. Ratio is a decimal between 0 and 1. For example, 0.8 means that 80% of layers will be - compressed to INT4, while the rest will be compressed to INT8 precision. The default + compressed to INT4, while the rest will be compressed to INT8_ASYM precision. The default value for ratio is 1. `Higher Ratio (more INT4)`: Reduces the model size and increase inference speed but might lead to higher accuracy degradation. - `Lower Ratio (more INT8)`: Maintains better accuracy but results in a larger model size + `Lower Ratio (more INT8_ASYM)`: Maintains better accuracy but results in a larger model size and potentially slower inference. In this example, 90% of the model's layers are quantized to INT4 asymmetrically with @@ -238,7 +239,7 @@ If the model comes from `Hugging Face `__ and is by Optimum, it may be easier to use the Optimum Intel API to perform weight compression. The compression type is specified when the model is loaded using the ``load_in_8bit=True`` or ``load_in_4bit=True`` parameter. The second example uses the Weight Compression API -from Optimum Intel instead of NNCF to compress the model to INT8. +from Optimum Intel instead of NNCF to compress the model to INT8_ASYM. .. tab-set:: @@ -359,7 +360,7 @@ score indicates a lower accuracy. It is measured on the - 5.01 - 10.3 * - databricks/dolly-v2-3b - - INT8 + - INT8_ASYM - 5.07 - 2.6 * - databricks/dolly-v2-3b @@ -371,7 +372,7 @@ score indicates a lower accuracy. It is measured on the - 4.25 - 24.8 * - facebook/opt-6.7b - - INT8 + - INT8_ASYM - 4.27 - 6.2 * - facebook/opt-6.7b @@ -383,7 +384,7 @@ score indicates a lower accuracy. It is measured on the - 3.28 - 25.1 * - meta-llama/Llama-2-7b-chat-hf - - INT8 + - INT8_ASYM - 3.29 - 6.3 * - meta-llama/Llama-2-7b-chat-hf @@ -395,7 +396,7 @@ score indicates a lower accuracy. It is measured on the - 4.15 - 25.6 * - togethercomputer/RedPajama-INCITE-7B-Instruct - - INT8 + - INT8_ASYM - 4.17 - 6.4 * - togethercomputer/RedPajama-INCITE-7B-Instruct @@ -407,7 +408,7 @@ score indicates a lower accuracy. It is measured on the - 2.92 - 48.5 * - meta-llama/Llama-2-13b-chat-hf - - INT8 + - INT8_ASYM - 2.91 - 12.1 * - meta-llama/Llama-2-13b-chat-hf diff --git a/docs/articles_en/openvino-workflow/model-preparation.rst b/docs/articles_en/openvino-workflow/model-preparation.rst index c6c7eaeb17fb31..bea0fcdba5311b 100644 --- a/docs/articles_en/openvino-workflow/model-preparation.rst +++ b/docs/articles_en/openvino-workflow/model-preparation.rst @@ -267,6 +267,7 @@ Before saving the model to OpenVINO IR, consider :doc:`Post-training Optimization ` to achieve more efficient inference and a smaller model. +.. _convert_model_cli_ovc: Convert a Model in CLI: ``ovc`` ############################### diff --git a/docs/articles_en/openvino-workflow/running-inference/inference-devices-and-modes/npu-device.rst b/docs/articles_en/openvino-workflow/running-inference/inference-devices-and-modes/npu-device.rst index 4c262b49f6f704..7ac982e37f6716 100644 --- a/docs/articles_en/openvino-workflow/running-inference/inference-devices-and-modes/npu-device.rst +++ b/docs/articles_en/openvino-workflow/running-inference/inference-devices-and-modes/npu-device.rst @@ -6,6 +6,13 @@ NPU Device a low-power processing device dedicated to running AI inference. +.. toctree:: + :maxdepth: 1 + :hidden: + + npu-device/remote-tensor-api-npu-plugin + + The Neural Processing Unit is a low-power hardware solution, introduced with the Intel® Core™ Ultra generation of CPUs (formerly known as Meteor Lake). It enables you to offload certain neural network computation tasks from other devices, @@ -164,8 +171,8 @@ offer a limited set of supported OpenVINO features. **ov::intel_npu::compilation_mode_params** -``ov::intel_npu::compilation_mode_params`` is an NPU-specific property that allows to -control model compilation for NPU. +``ov::intel_npu::compilation_mode_params`` is an NPU-specific property that allows +control of model compilation for NPU. .. note:: @@ -176,7 +183,7 @@ Following configuration options are supported: **optimization-level** -Defines a preset of optimization passes to be applied during compilation. +Defines an optimization effort hint to the compiler. .. list-table:: :widths: 10 200 @@ -185,7 +192,7 @@ Defines a preset of optimization passes to be applied during compilation. * - **Value** - **Description** * - 0 - - Reduced subset of optimization passes. Smaller compile time. + - Reduced subset of optimization passes. May result in smaller compile time. * - 1 - **Default.** Balanced performance/compile time. * - 2 @@ -193,7 +200,7 @@ Defines a preset of optimization passes to be applied during compilation. **performance-hint-override** -An extension for LATENCY mode being specified using ``ov::hint::performance_mode`` +The LATENCY mode can be overridden by specifying ``ov::hint::performance_mode`` Has no effect for other ``ov::hint::PerformanceMode`` hints. .. list-table:: @@ -207,15 +214,31 @@ Has no effect for other ``ov::hint::PerformanceMode`` hints. * - latency - Prioritize performance over power efficiency. -.. tab-set:: +Usage example: - .. tab-item:: Usage example +.. code-block:: - .. code-block:: + map config = {ov::intel_npu::compilation_mode_params.name(), ov::Any("optimization-level=1 performance-hint-override=latency")}; + + compile_model(model, config); + +**npu_turbo** + +The turbo mode, where available, provides a hint to the system to maintain the +maximum NPU frequency and memory throughput within the platform TDP limits. +The turbo mode is not recommended for sustainable workloads due to higher power +consumption and potential impact on other compute resources. + +.. code-block:: + + core.set_property("NPU", ov::intel_npu::turbo(true)); + +or + +.. code-block:: - map config = {ov::intel_npu::compilation_mode_params.name(), ov::Any("optimization-level=1 performance-hint-override=latency")}; + core.compile_model(ov_model, "NPU", {ov::intel_npu::turbo(true)}); - compile_model(model, config); Limitations ############################# diff --git a/docs/articles_en/openvino-workflow/running-inference/inference-devices-and-modes/npu-device/remote-tensor-api-npu-plugin.rst b/docs/articles_en/openvino-workflow/running-inference/inference-devices-and-modes/npu-device/remote-tensor-api-npu-plugin.rst new file mode 100644 index 00000000000000..2e41f4f5616ff2 --- /dev/null +++ b/docs/articles_en/openvino-workflow/running-inference/inference-devices-and-modes/npu-device/remote-tensor-api-npu-plugin.rst @@ -0,0 +1,137 @@ +.. {#openvino_docs_OV_UG_supported_plugins_NPU_RemoteTensor_API} + +Remote Tensor API of NPU Plugin +=============================== + + +.. meta:: + :description: The Remote Tensor API of NPU plugin in OpenVINO™ supports + interoperability with existing native APIs, such as + NT handle, or DMA-BUF System Heap. + + +The NPU plugin implementation of the ``ov::RemoteContext`` and ``ov::RemoteTensor`` interface assists NPU +pipeline developers who need memory sharing with existing native APIs (for example, OpenCL, Vulkan, DirectX 12) +by exporting an NT handle on Windows, or DMA-BUF System Heap on Linux and passing that pointer as the +``shared_buffer`` member to the ``remote_tensor(..., shared_buffer)`` create function. They allow you +to avoid any memory copy overhead when plugging OpenVINO™ inference into an existing NPU pipeline. + +Supported scenario by the Remote Tensor API: + +* The NPU plugin context and memory objects can be constructed from low-level device, display, or memory handles and used to create the OpenVINO™ ``ov::CompiledModel`` or ``ov::Tensor`` objects. + +Class and function declarations for the API are defined in the following file: ``src/inference/include/openvino/runtime/intel_npu/level_zero/level_zero.hpp`` + +The most common way to enable the interaction of your application with the Remote Tensor API is to use user-side utility classes +and functions that consume or produce native handles directly. + +Context Sharing Between Application and NPU Plugin +################################################## + +NPU plugin classes that implement the ``ov::RemoteContext`` interface are responsible for context sharing. +Obtaining a context object is the first step in sharing pipeline objects. +The context object of the NPU plugin directly wraps Level Zero context, setting a scope for sharing the +``ov::RemoteTensor`` objects. The ``ov::RemoteContext`` object is retrieved from the NPU plugin. + +Once you have obtained the context, you can use it to create the ``ov::RemoteTensor`` objects. + +Getting RemoteContext from the Plugin ++++++++++++++++++++++++++++++++++++++ + +To request the current default context of the plugin, use one of the following methods: + +.. tab-set:: + + .. tab-item:: Get context from Core + :sync: get-context-core + + .. doxygensnippet:: docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp + :language: cpp + :fragment: [default_context_from_core] + + .. tab-item:: Get context from compiled model + :sync: get-context-compiled-model + + .. doxygensnippet:: docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp + :language: cpp + :fragment: [default_context_from_model] + +Memory Sharing Between Application and NPU Plugin +################################################# + +The classes that implement the ``ov::RemoteTensor`` interface are the wrappers for native API +memory handles, which can be obtained from them at any time. + +To create a shared tensor from a native memory handle, use dedicated ``create_tensor``, ``create_l0_host_tensor``, or ``create_host_tensor`` +methods of the ``ov::RemoteContext`` sub-classes. +``ov::intel_npu::level_zero::LevelZero`` has multiple overloads methods which enable wrapping pre-allocated native handles with the ``ov::RemoteTensor`` +object or requesting plugin to allocate specific device memory. +For more details, see the code snippets below: + + +.. tab-set:: + + .. tab-item:: Wrap native handle + :sync: wrap-native-handles + + .. tab-set:: + + .. tab-item:: NT handle + :sync: nthandle + + .. doxygensnippet:: docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp + :language: cpp + :fragment: [wrap_nt_handle] + + .. tab-item:: DMA-BUF System Heap file descriptor + :sync: dma-buf + + .. doxygensnippet:: docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp + :language: cpp + :fragment: [wrap_dmabuf_fd] + + .. tab-item:: Allocate device memory + :sync: allocate-device-memory + + .. tab-set:: + + .. tab-item:: Remote Tensor - Level Zero host memory + :sync: remote-level-zero-host-memory + + .. doxygensnippet:: docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp + :language: cpp + :fragment: [allocate_remote_level_zero_host] + + .. tab-item:: Tensor - Level Zero host memory + :sync: level-zero-host-memory + + .. doxygensnippet:: docs/articles_en/assets/snippets/npu_remote_objects_creation.cpp + :language: cpp + :fragment: [allocate_level_zero_host] + + +Limitations ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +* Allocation of the NT handle or DMA-BUF System Heap file descriptor is done manually. + +Low-Level Methods for RemoteContext and RemoteTensor Creation +############################################################# + +The high-level wrappers mentioned above bring a direct dependency on native APIs to your program. +If you want to avoid the dependency, you still can directly use the ``ov::Core::create_context()``, +``ov::RemoteContext::create_tensor()``, and ``ov::RemoteContext::get_params()`` methods. +On this level, native handles are re-interpreted as void pointers and all arguments are passed +using ``ov::AnyMap`` containers that are filled with the ``std::string, ov::Any`` pairs. +Two types of map entries are possible: a descriptor and a container. +The descriptor sets the expected structure and possible parameter values of the map. + +For possible low-level properties and their description, refer to the header file: +`remote_properties.hpp `__. + +Additional Resources +#################### + +* `ov::Core `__ +* `ov::RemoteTensor `__ + diff --git a/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing.rst b/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing.rst index 7d19e17a70f2c6..3fa01212b6d86b 100644 --- a/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing.rst +++ b/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing.rst @@ -10,7 +10,6 @@ Optimize Preprocessing optimize-preprocessing/preprocessing-api-details optimize-preprocessing/layout-api-overview - optimize-preprocessing/integrate-save-preprocessing-use-case Torchvision preprocessing converter .. meta:: diff --git a/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/preprocessing-api-details.rst b/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/preprocessing-api-details.rst index cb03e3b4e8129f..ef8613b84f0626 100644 --- a/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/preprocessing-api-details.rst +++ b/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/preprocessing-api-details.rst @@ -3,6 +3,11 @@ Preprocessing API - details =========================== +.. toctree:: + :maxdepth: 1 + :hidden: + + preprocessing-api-details/integrate-save-preprocessing-use-case .. meta:: :description: Learn the details on capabilities of pre-processing API and post-processing. diff --git a/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/integrate-save-preprocessing-use-case.rst b/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/preprocessing-api-details/integrate-save-preprocessing-use-case.rst similarity index 60% rename from docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/integrate-save-preprocessing-use-case.rst rename to docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/preprocessing-api-details/integrate-save-preprocessing-use-case.rst index aeb59c2e37a08e..2563b9270082b0 100644 --- a/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/integrate-save-preprocessing-use-case.rst +++ b/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimize-preprocessing/preprocessing-api-details/integrate-save-preprocessing-use-case.rst @@ -10,8 +10,8 @@ Use Case - Integrate and Save Preprocessing Steps Into IR OpenVINO Intermediate Representation. -Previous sections covered the topic of the :doc:`preprocessing steps ` -and the overview of :doc:`Layout ` API. +Previous sections covered the :doc:`preprocessing steps <../preprocessing-api-details>` +and the overview of :doc:`Layout API <../layout-api-overview>`. For many applications, it is also important to minimize read/load time of a model. Therefore, performing integration of preprocessing steps every time on application @@ -20,25 +20,18 @@ once pre and postprocessing steps have been added, it can be useful to store new model to OpenVINO Intermediate Representation (OpenVINO IR, `.xml` format). Most available preprocessing steps can also be performed via command-line options, -using Model Optimizer. For details on such command-line options, refer to the -:doc:`Optimizing Preprocessing Computation <../../../../documentation/legacy-features/transition-legacy-conversion-api/legacy-conversion-api/[legacy]-embedding-preprocessing-computation>`. +using ``ovc``. For details on such command-line options, refer to the +:ref:`Model Conversion `. Code example - Saving Model with Preprocessing to OpenVINO IR ############################################################# -When some preprocessing steps cannot be integrated into the execution graph using -Model Optimizer command-line options (for example, ``YUV``->``RGB`` color space conversion, -``Resize``, etc.), it is possible to write a simple code which: +In the following example: -* Reads the original model (OpenVINO IR, TensorFlow, TensorFlow Lite, ONNX, PaddlePaddle). -* Adds the preprocessing/postprocessing steps. -* Saves resulting model as IR (``.xml`` and ``.bin``). +* Original ONNX model takes one ``float32`` input with the ``{1, 3, 224, 224}`` shape, the ``RGB`` channel order, and mean/scale values applied. +* Application provides ``BGR`` image buffer with a non-fixed size and input images as batches of two. -Consider the example, where an original ONNX model takes one ``float32`` input with the -``{1, 3, 224, 224}`` shape, the ``RGB`` channel order, and mean/scale values applied. -In contrast, the application provides ``BGR`` image buffer with a non-fixed size and -input images as batches of two. Below is the model conversion code that can be applied -in the model preparation script for such a case. +Below is the model conversion code that can be applied in the model preparation script for this case: * Includes / Imports @@ -62,7 +55,6 @@ in the model preparation script for such a case. * Preprocessing & Saving to the OpenVINO IR code. - .. tab-set:: .. tab-item:: Python @@ -83,8 +75,8 @@ in the model preparation script for such a case. Application Code - Load Model to Target Device ############################################## -After this, the application code can load a saved file and stop preprocessing. In this case, enable -:doc:`model caching <../optimizing-latency/model-caching-overview>` to minimize load +Next, the application code can load a saved file and stop preprocessing. In this case, enable +:doc:`model caching <../../optimizing-latency/model-caching-overview>` to minimize load time when the cached model is available. @@ -108,10 +100,10 @@ time when the cached model is available. Additional Resources #################### -* :doc:`Preprocessing Details ` -* :doc:`Layout API overview ` -* :doc:`Model Optimizer - Optimize Preprocessing Computation <../../../../documentation/legacy-features/transition-legacy-conversion-api/legacy-conversion-api/[legacy]-embedding-preprocessing-computation>` -* :doc:`Model Caching Overview <../optimizing-latency/model-caching-overview>` +* :doc:`Preprocessing Details <../preprocessing-api-details>` +* :doc:`Layout API overview <../layout-api-overview>` +* :doc:`Model Caching Overview <../../optimizing-latency/model-caching-overview>` +* :doc:`Model Preparation <../../../../model-preparation>` * The `ov::preprocess::PrePostProcessor `__ C++ class documentation * The `ov::pass::Serialize `__ - pass to serialize model to XML/BIN * The ``ov::set_batch`` - update batch dimension for a given model diff --git a/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimizing-latency/model-caching-overview.rst b/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimizing-latency/model-caching-overview.rst index 38af00d3796d5d..09701ab97d23fd 100644 --- a/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimizing-latency/model-caching-overview.rst +++ b/docs/articles_en/openvino-workflow/running-inference/optimize-inference/optimizing-latency/model-caching-overview.rst @@ -61,7 +61,8 @@ To enable model caching, the application must specify a folder to store the cach With this code, if the device specified by ``device_name`` supports import/export model capability, -a cached blob is automatically created inside the ``/path/to/cache/dir`` folder. +a cached blob (the ``.cl_cache`` and ``.blob`` file for GPU and CPU respectively) is automatically +created inside the ``/path/to/cache/dir`` folder. If the device does not support the import/export capability, cache is not created and no error is thrown. Note that the first ``compile_model`` operation takes slightly longer, as the cache needs to be created - diff --git a/docs/articles_en/openvino-workflow/running-inference/stateful-models/obtaining-stateful-openvino-model.rst b/docs/articles_en/openvino-workflow/running-inference/stateful-models/obtaining-stateful-openvino-model.rst index a350d1bcbb5a77..a7db3317203045 100644 --- a/docs/articles_en/openvino-workflow/running-inference/stateful-models/obtaining-stateful-openvino-model.rst +++ b/docs/articles_en/openvino-workflow/running-inference/stateful-models/obtaining-stateful-openvino-model.rst @@ -60,12 +60,20 @@ Parameter/Result tensor names. If there are no tensor names, .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp :fragment: [ov:make_stateful_tensor_names] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:make_stateful_tensor_names] .. tab-item:: Using Parameter/Result operations .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp :fragment: [ov:make_stateful_ov_nodes] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:make_stateful_ov_nodes] .. tab-item:: command line @@ -114,6 +122,10 @@ To apply LowLatency2 Transformation, follow the instruction below: .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp :fragment: [ov:get_ov_model] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:get_ov_model] 2. Change the number of iterations inside TensorIterator/Loop nodes in the model using the :doc:`Reshape <../changing-input-shape>` feature. @@ -129,6 +141,10 @@ To apply LowLatency2 Transformation, follow the instruction below: .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp :fragment: [ov:reshape_ov_model] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:reshape_ov_model] **Unrolling**: If the LowLatency2 transformation is applied to a model containing TensorIterator/Loop nodes with exactly one iteration inside, these nodes are unrolled. @@ -143,6 +159,10 @@ To apply LowLatency2 Transformation, follow the instruction below: .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp :fragment: [ov:apply_low_latency_2] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:apply_low_latency_2] (Optional) Use Const Initializer argument: @@ -159,6 +179,10 @@ To apply LowLatency2 Transformation, follow the instruction below: .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp :fragment: [ov:low_latency_2_use_parameters] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:low_latency_2_use_parameters] .. image:: ../../../assets/images/llt2_use_const_initializer.svg @@ -178,6 +202,10 @@ To apply LowLatency2 Transformation, follow the instruction below: .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp :fragment: [ov:low_latency_2] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:low_latency_2] 4. Use state API. See sections :doc:`OpenVINO State API <../stateful-models>`, @@ -208,6 +236,10 @@ To apply LowLatency2 Transformation, follow the instruction below: .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp :fragment: [ov:replace_const] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:replace_const] Stateful Model from Scratch ################################## @@ -228,7 +260,11 @@ a sink from `ov::Model` after deleting the node from the graph with the `delete_ .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.cpp :language: cpp - :fragment: [ov:state_network] + :fragment: [ov:stateful_model] + + .. doxygensnippet:: docs/articles_en/assets/snippets/ov_stateful_models_intro.py + :language: py + :fragment: [ov:stateful_model] .. note:: diff --git a/samples/js/node/README.md b/samples/js/node/README.md index 59fb381f460abc..7375219ccf2c0a 100644 --- a/samples/js/node/README.md +++ b/samples/js/node/README.md @@ -26,6 +26,7 @@ VSCode extension to run these notebook samples - hello-detection.nnb - question-answering.nnb - pose-estimation.nnb + - optical-character-recognition.nnb ## Live Sample diff --git a/samples/js/node/notebooks/optical-character-recognition.nnb b/samples/js/node/notebooks/optical-character-recognition.nnb new file mode 100644 index 00000000000000..b7e8e109ff857f --- /dev/null +++ b/samples/js/node/notebooks/optical-character-recognition.nnb @@ -0,0 +1,314 @@ +{ + "cells": [ + { + "language": "markdown", + "source": [ + "# Optical Character Recognition with OpenVINO™" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "#### This tutorial demonstrates how to perform optical character recognition (OCR) with OpenVINO models" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "# Imports" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "const fs = require(\"node:fs\");\nconst path = require(\"node:path\");\nconst { createCanvas, Image, ImageData } = require(\"canvas\");\nconst { addon: ov } = require(\"openvino-node\");\nconst { display } = require(\"node-kernel\");\nconst { cv } = require(\"opencv-wasm\");\nconst {\n transform,\n getImageData,\n displayArrayAsImage,\n downloadFile,\n arrayToImageData,\n getImageBuffer,\n argMax,\n setShape,\n} = require(\"../helpers.js\");\n" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "# Download Models" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "// Intializing Images, Models\nconst baseArtifactsDir = '../../assets/models';\nconst detBaseURL = 'https://storage.openvinotoolkit.org/repositories/open_model_zoo/2022.3/models_bin/1/horizontal-text-detection-0001/FP32/';\nconst recBaseURL = 'https://storage.openvinotoolkit.org/repositories/open_model_zoo/public/text-recognition-resnet-fc/';\nconst detectionModelName = 'horizontal-text-detection-0001';\nconst textRecModelName = 'text-recognition-resnet-fc';\n\nconst detModelXMLName = `${detectionModelName}.xml`;\nconst detModelBINName = `${detectionModelName}.bin`;\n\nconst detModelXMLPath = `${baseArtifactsDir}/${detModelXMLName}`;\nconst detModelBINPath = `${baseArtifactsDir}/${detModelBINName}`;\n\nconst recModelXMLName = `${textRecModelName}.xml`;\nconst recModelBINName = `${textRecModelName}.bin`;\n\nconst recModelXMLPath = `${baseArtifactsDir}/${textRecModelName}.xml`;\nconst recModelBINPath = `${baseArtifactsDir}/${textRecModelName}.bin`;\n\nawait downloadFile(\n detBaseURL + detModelXMLName,\n detModelXMLName,\n baseArtifactsDir\n);\n\nawait downloadFile(\n detBaseURL + detModelBINName,\n detModelBINName,\n baseArtifactsDir\n);\n\nawait downloadFile(\n recBaseURL + recModelXMLName,\n recModelXMLName,\n baseArtifactsDir\n);\n\nawait downloadFile(\n recBaseURL + recModelBINName,\n recModelBINName,\n baseArtifactsDir\n);\n" + ], + "outputs": [ + { + "items": [ + { + "mime": "application/vnd.code.notebook.stdout", + "value": [ + "File successfully stored at '/home/prakash/OpenSource-Repos/ocr-node-sample/assets/models/horizontal-text-detection-0001.xml'", + "File successfully stored at '/home/prakash/OpenSource-Repos/ocr-node-sample/assets/models/horizontal-text-detection-0001.bin'", + "File successfully stored at '/home/prakash/OpenSource-Repos/ocr-node-sample/assets/models/text-recognition-resnet-fc.xml'", + "File successfully stored at '/home/prakash/OpenSource-Repos/ocr-node-sample/assets/models/text-recognition-resnet-fc.bin'", + "" + ] + } + ] + } + ] + }, + { + "language": "markdown", + "source": [ + "# Dowload Image" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "const baseImagesDir = '../../assets/images';\nconst imgUrl = 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/data/data/image/intel_rnb.jpg';\nconst imgName = 'intel_rnb.jpg';\nawait downloadFile(imgUrl, imgName, baseImagesDir);\n" + ], + "outputs": [ + { + "items": [ + { + "mime": "application/vnd.code.notebook.stdout", + "value": [ + "File successfully stored at '/home/prakash/OpenSource-Repos/ocr-node-sample/assets/images/intel_rnb.jpg'", + "" + ] + } + ] + } + ] + }, + { + "language": "markdown", + "source": [ + "# Load a Detection Model" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "// Initialize OpenVINO core and load the detection model\nconst core = new ov.Core();\nconst detModel = await core.readModel(detModelXMLPath);\nconst detCompiledModel = await core.compileModel(detModel, 'AUTO');\nconst detInputLayer = detCompiledModel.input(0);\nconst detOutputLayer = detCompiledModel.output('boxes');\n" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "# Prepare Image for Inference" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "const imageData = await getImageData(`${baseImagesDir}/intel_rnb.jpg`);\nconst inputImageMat = cv.matFromImageData(imageData);\nconst displayImageMat = inputImageMat.clone();\n\n// Resize the image to meet network input size\nconst [B, C, H, W] = detInputLayer.shape;\nconst resizedImage = new cv.Mat();\ncv.cvtColor(inputImageMat, inputImageMat, cv.COLOR_RGBA2RGB);\ncv.cvtColor(inputImageMat, inputImageMat, cv.COLOR_BGR2RGB);\ncv.resize(inputImageMat, resizedImage, new cv.Size(W, H));\n\n// Prepare input tensor\nconst inputImage = transform(resizedImage.data,\n { width: W, height: H },\n [0, 1, 2]);\nconst tensorData = new Float32Array(inputImage);\nconst tensor = new ov.Tensor(\n ov.element.f32,\n detInputLayer.shape,\n tensorData\n);\n" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "## Define Post-Processing Functions" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "// Function to extract bounding boxes from the model output\nfunction extractBoundingBoxes(output) {\n console.log(`Output shape: ${output.getData()}`);\n const { data: boxes } = output;\n const foldingCoefficient = 5;\n const numberOfBoxes = boxes.length / foldingCoefficient;\n\n return setShape(boxes, [numberOfBoxes, foldingCoefficient]);\n}\n" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "# Do Inference" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "// Create infer request\nconst detInferRequest = detCompiledModel.createInferRequest();\n\nconst detResult = await detInferRequest.inferAsync([tensor]);\nconst boundingBoxesArray = extractBoundingBoxes(detResult[detOutputLayer]);\n\n// Show original image\ndisplayArrayAsImage(\n displayImageMat.data,\n displayImageMat.cols,\n displayImageMat.rows,\n display\n);\n" + ], + "outputs": [ + { + "items": [ + { + "mime": "image/jpeg", + "value": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAIFArIDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD1SXSkkG1ihGQehHQ59a429+D3h+9klkY3SNKxZjHcY5Jz/Epr0RvvfjSgVEcPSg/djYp1pyWrPJpvgbpjKVh1G/j/AN5kf+grPufgXMYysGtvj/btR/R69rFLVciFzM8Cn+CetpjytRtHx/fSRc/kpqhL8JfE0RYqLSX/AHJtv/oQFfRtLS9mh8x8yP8ADjxXGrE6cGPolzE38nqtJ4M8TQld+i6gRnkrAzD81zX1IUVuqg/hTDbQHrEn/fNL2Ycx8py6VqVqx+0Wd3F/11gZcfmKgG4ZBbnmvrMWsI6KV/3SRUU2mWlwCJYhIPR/m/nS9mw5j5W+fAzxTwW3AjJAFfS03hHQpx8+l2Z9zbRn/wBlqjJ8PfDshz/Z1uPopX/0Eip9mylJHzZEzYI3H7x6/Wp0ZiseM/d6173L8KfDjA+XZhDnOUmkH82NUZfg/pDABJLpAOBtuAR+sdSqbTLc00eJbjuY8kdOKYWbHPPocV7DN8Gockw6lcL7NGj/ANVrOn+Dd8ABDqStj/npBj+TGqsyLnmJPI5FKCSSNxrvpvhJr0Zys9k49MyD/wBkqhN8MvE0IbZaW8mf7twgH/jxFFmByO9gn3j+PanAuGAyfxrem8DeJIcZ0edsd49sn/oJNUpPD2tW5Jm0nUEAHVrZwP5UrDuZ4JYnp+VLuPHofSnNBNEAJUdGzjDLjrQFJYZ5xUlCgnOOfes+d3XU4scHA/rWiAc5x7Vm3fF3EQM57596mQ0aMUm6PrnnAp2/AznNRwHbFv754pxOAFweaUdimBftkY9qCxAz+tAIJPtTSV25I6+tUSOJbOc03cc8nHelbHTn3o25Ht069qkYmfu5yD2oyScnj60u05xyTTgpB49KVxgCRTWO5Dj09afg5HAye1KVODnnIzUsZl2TsplHPbI/OtLYjYbbgHms2Bis0q4/WteHJgTjgipe41sQ+SpxlTSGAf3m9OtXAoAzwKUqMcLmi4WKX2f+EMffNM+ynPOCO2RWhsB4oCZPFLmHYoCyLAEqhP5UNZnHCd+gNagj9BSFc5qeYfKYxtW7Ix9/Wo2tGByQ6jPcVulMDnjHrTGjOwnB6Ucwcpzo3K/r2qdYtyAnAPrSTrtkPHetSwVXtACoPJB4zQ2CRlPZAIWaJD34ArF1C12xl0hC8/wjFdfdJHgoFGO+KxL6P/RZMdAc/jW1KTTM5o5bawPANHmOP4m/OrxT3PPrUezII6/hXZzHPYq+Y/8AePFHmHvU5jGM4H5Unlgj7uPei6EQbzS7+MU8xjHIIpjIAuRmnoGopJ4JzzTd555609uY09gR+v8A9eo9poAXfil8w+lNwaTB9KYXJFcA8jinmYYwM49ahwfQ0lKwXJd4IxRuFRUUWAc/NNoopiHI5SRXBwQcg19uaPffb9EsLwHInt45fzUGviGvrv4X3pv/AIa6HKTkrAYj/wAAYr/SuLGLRM2o7nXlzSbzSUhrhOiyF3GjcaSkqh2FViHYZ9DT9x9ajHDj3Bp1ddPWJEh24+tG4+tJRViF3H1o3NTaKYh28+tQzMd8bZ74qWopwfLyOxzQxoXc3rSb29aTtSGkAu808OcVHTl6VLQ0O3tVS+lmjsriUT+XsQsCEzjAz+NWGOB0yT0FVLyyF3aTQzOxEileOAv0Hf8AGpavoPY46TxpbC2huJLdpZJNrBSqgpn+LO44+nB9ap6ze6fKJQ11cm7JCNGMny/baOAT6MQcVn3ekwXlhd2YmZLzT2MfmNgkqBz379f0riFvtkUNs06tPBJhcAFVABODkc4PvjNY1KV4tGTqNM6z7bpwHK3ue+JUH6eXRVJfFjBQHS4LAckAdfzorx/Y1OxtzRPdG+9Sjmkb71KK+wZ56HClpBxSikMWlpOtLQAtFFFABS0lLQAUtJS0hhS0lFAC0UlFAC0daSloAaUQ9VU/UU3yIv8Anmn4CpKM0ARNbRMMFePTJqnNoOlXBzLYW0me7xK38xWhmilZDuYM3gvQJs7tKsx9LdB/ICsy5+GHha5YM+mRhh0KvIv8mrsaM1PKg5mcE/wm8P8APlC4i9kmOP8Ax4GqM/we09jmC/uo+ONzK3/sor0uijkQ+ZnkkvwclCsI9WYk+tuP/i6ozfCHVhjy722YL03q4/kDXtNLS9mh87PBp/hb4hiDlBaSn/ZlIz/30BWfL8P/ABFABnTVZRyds8bfpur6JpCARyAfrUukhqoz5sfwrryKXOjXzDtshZv5Cqcuk39vgT2V1F674WX+Yr6aWGIoAY0/75FH2aH+4B9Dip9iUqh8ulcM249PejYdpwck9K+n5LG3l4ZC31YmqUvhvSJ/9bYWzH1aBG/mtQ6LGqiPlfaUu2XuRxWxaZNqgPQZ9s179N8PvDM7bn0m13eqx7f/AEHFVW+Gnh/btithGvoryZ/VjUujIpVInig+7nH6U4RbgF5+tevy/CzSWOY5bpcdB5q4/wDQDVRvhZEhJivpRngZQNj9RWbpTLVSJ5gIULHIGKcsKjr1Poa9Gl+GNyMeXfBsf34sfyY1Sm+HeqpuKyWzHtw//wATWbpz7FqUe5w/lgtxkbR1PemmI468n1FdbJ4E1tMYto3A6kSqP5kVTfwrrEWSdNnJ7BBu/lmo5Zdh3RzhhOSBj3OKYyN5fbn3rcl0TUYiA9heIAOS0LD+lUJbd4j86upHQEYNLUehyd8hVyCBnOTg1a06YrasOpDcUmooVDjHzNyah075oXHuK0SuiHuWXOWz3PrWbeoDDKD0A61pEHGe9Urpcq4xwVPvWkCJHOsPTnHvmoOTxjg1YJwABn+dQHBFdaOdjW4zSMBxnGfrS56kHnofemknjI4NMBDjpj3pj/dbFOOcd6aeQetUIQH/AEccfdfr9R/9al4OB1zSIMwSD0INIMkUwDC0YwcGjNHX+tABjNBUDijnOCOKPrQAmP8AOKQrTs+2fxpD7UCGkYptOI4pKYhK+mfgNffafh89uWybW8kQD0BAb+ZNfM1e7fs73vya7Yk94plH/fQP9K58Ur0zSk/ePcTRmg0leYdaFopBS1QCHgqfenU1/u/TmnV1UtiJC0UUVqSFJS0lAC01xlSPUU6kNOwEKcoPpSmmowO5RjKsQfanUgEpy96bS5xnHWpY0OAJOAOaazwhXDSKMe/Q1Onkqu+V05H8R4rAvNathqzad5roWXeFTjI756n3wB9cVlKryK493Y4DxmZYfE32/TpI0VodkrSKYwTyMFiAOmfeuJtNAxOupahf29naysfKUtjzeecd8e4HX1r1XxBq0ElosEttJNG2GSJISiJ6HLLtyOvXFeWXfh+91L95d3iR7mLCWc+X8o+p549K5Z13JWbsS4JPa52McPhJIkUMjAKACUbJ/wDHxRXMx2Nokao19bFlABO1+f0org9k/wCdl83kfQjfepRQ33qUV9azgQZqB3xubLAD0NTmqsoysi9MgimhM5rR/iJoOtXkVrbXM8c8pwiTxFdx9M8jNdYsrfxVyFl4Ksra4gnJV2iZXDBcHI5Fav8Aawl1k2UaSjyk3SM6FQSTwBnr0PI4qrCub4bIpjTbTgDOOtNQ/JWL4mjefw5eQRsVaZRHkHnDMAf0JpWHc2I71JRlGR1BwSjA4/KpxICK878H+HH0LVhLC7LDMhSSPPB4yD9cj9a7rzNsuz2zQ4gmXCwUZPSm+cn979Kikb93+IrivHGoa9YT6edFuFjDLIZUaJWD4246jjv09aXKO53gdW6MDTs1kafcyS6da3NwoSWSJGdRxgkDI/M1oq2RS5Q5iajNY+p3rWOm3l6kLTNBG8giU4L7QTgH8K5Sz+IbXLBToWoqx4wh3Y/QUcoJnoeaM1BHKXUHkcd6jedgXO7Cr7e2aVh3LeaXNctpXjfQ9ZuUt7DVI5Zn+7G0bIW78bgK6JJtw5FFguTZoqrcXiQFAxA3ZxlsU0XoI+4SPY5osFy5miq8dysjbQrD61MDmkMdmikprSKgy7BR6k4pAPJozTQ6sMggj1Bpc0ALmjNJRQAi9KdmmL3HvTu1AC5opKBQAtGaSigY7NGaaKKkB2aM0lFAC5oIB4IBptLmgaG+TEefLT/vmka3iYYK8fU0+ilZFXZRn0LS7nPn2FtJnr5kStn8xWe/gbw1Jn/iT2a567IVX/0ECt+ilyod2clP8M/DE2SLDYfVJZB/7NWZc/B7w/OSVlvIif7kw/qpr0HNGaXJEOZnkNx8A9MbP2fVr2Pjjfsf+i1j3P7PtwB/o+u5P+3a4H6Oa93BpQafKI+b7j4CeJEyYdQ0+QDpkyqf/QDWRc/BfxlADts7ab0KXKr/AOhYr6ozQDRqGh8gXHwy8Y2+d+g3D/8AXFkk/wDQSazbjwb4mtgTN4e1VAO5s5MfyxX2iQrdQD9aZ5EP/PJP++RRdhZHw01ldW4lWe2miO3pJGV7+9VsHFfdjWsLjDLkem44qjceG9Gus+fplpLnr5kCv/MGjmYcp8RYPegn2r7IuPhx4RuQfM0CwGf7luif+ggVk3HwY8E3GT/ZCRk/3JpV/wDZqOfyFynybRgnpX1BcfAXwlL/AKoXkP8AuXH/AMUGrJuf2etJYk22p30fpvKP/wCyrR7RBynzqBxRtHrXulx+zzOMm31sn/rpaj+kn9KyLn4B+Ios+TfWEo7BhIp/RSKPaIfKzyEgd6bjmvSbr4KeMof9Xa2s3+5chf8A0PFY9x8LfGtqcvoUzjP/ACykjk/9BY01JdxcrONr1T4CXnkeO57fOBcWTrj1IZW/oa4m58FeKLQnzvDuqKB3+yOR+YFb3wuF3pHxL0d7m3mgDyNCfMjK/eUqOvuRU1bODCCakj6rNJRmivKO0KUUlFUgHEZBHrQpyoPtQOaRD8pHoSK6KREh1FFFbkBRRRQAUhpaKoRABiVwB15pTSP/AK5T6jFKeaQXCpYIfMfcQMCohy6oOp6CtHb5UOEGSBx7moaQN2KVxFDcMftTKtspA2lsBmPr/LFYWvaTpcNnNIq/Yw/ytMkrRFs9RwCWPTjHNdO22PBIzjpxk5qte2EeoweVeIGhYg+Vng49cdfp0rmnTTTTV2wUtTztS06Yim1S6hQZBkUspwD91SCQePY4471xt61ld6sls9/fM9zKUaJ48FRxk4GcDj0H0Net61cvJbxw6bGQkbKA6LkEZwVXHt3OB2z6ec+JdRG26ewnghuWg2yrb4Z1IBXBYf7wIxjPNebOnyysnc6FrG5lyrp6Sui+C9YKqxAy0o4+gTiiuf8AsuunmJ7jy/4NzDOO2eaKfso9/wAWY83kfSzfeopWHzUCvqGcaEqhfc2dyPWNv5VoEVy2sf2+uoPHavbNZS4GHU7lBGDz+dNNLclps47w14XvdG1Ozube9m8oOokjL8MvQg/hXos4Xz4iRzg4/SmLZPHZsYwDKFyoPTPasnTtR1a/vhDe6WluiAnzFkzz6Y/z0qrruKzOoT/V/hVLURv0/jn5kP6ir0akJiufvNch0mZrXU4pVhOfLnRCylfQ45BFCYmjRt1CtGf89DSu+dQwOyD+ZrGbxj4fgj3LemRv4USNsn25FXNGlmvw17JE0QmbcqN1Vegz74FDYJM2W/1Y+o/nXLeLvFn/AAjU9kraW16k4csyvgx7dvbBznP6V013LHbWrTTOqRpgszHAArDvjo+ryRtJqFq3lghQJFPX8fakMsI0et2VlqcE0yxFRKkecBsj+IeorXgY7BmsJtS0/Tra3061lSSRiI40jYEgdycdBjNbdvkoCaAK19JbxaddS3jFbZI3aVsE4QZJ6c9K56y17wk7KLXUItzEBQVYZP4iuiu4Rc2NzbkZ8xHTB75yKw7DwpZWsyytaxBgQQdoNSyonURdOOlQNz5wPc/0FTxgBcZqEj5ph6n+lNCZydh4LtbHULa7GxZIHDgqOTiuit9QS4u5Io1cCI7WLoVyfbPbpzXn2ieH9S0fVbO4h1C4a28wB4mclSp4r0bA81DjnHWhgirr2h22t2qR3Me4oSUOcYJqroelJpmjS2SZCq8mOfXn+tdCvK1UAw8w/wBr+goA5jwnoUulXDStd3MoKbQskhIH+cV2ydKoWaAIvHar4FQWPzWVrhJswO24GtSszWRm0/EU0Jmdpl55TNESdrcj2NQWVy39qiQsxBds8/WsGTUJLfxX9gfiOS0WaFv9oMwYfltP51paezGVWcEHzW/9COP0pdB9TtI23Cn1XgOVqehgC9T9aWmjqfrS0gKV7qP2N0UR7yRk84xTkv0a0+0EEAAlhnOMVn6x/wAfCf7tZUl6p0+9Eb5CKwYZ6MBUt2nZ7FqN4XR01pqEd3uCoykAHnFWi3FYGnMY5MA9Vq/qFy0FhJIv3sYH1NODuhTSTJpNTtInKPOu4dQATj8qnhuIp03RSK49j0rzjWINSuYEGm3PkOCWY7c7vQVc8L392UtpLrInJ8uUevOP8DUptq5XKtjvy2BntQGBGR0rO1SF7vS54I2wzoQOcc1zWhagYZRu+XPySA9iP8/rVuNlchb2O2zzS1yj38tzqAKzSLHuAARsZFdIJQULZ496lO70KasTbqXNcjq3iRrWGS5VpBAnQRJuZqseH/Ea6pDDKrl4ZvulhhlPTBxU8w+VnT5ozVa5ult7aSZhkIM49ayz4gjTHmoqZ4GZMZ/SnJqO4RTlsbuaM1RsdSjvSyopBUZ6g1dBoTuNqw7NGabmqz6hbRyGN50Vh1BPSgRczS5qtDdQznEcqOfRWBqbPFG4EmaM1GGyKN4JxSC5JmjNNzgUA5HFJjH5o3UzNGaQyTdS7qjzRmgB+aSm5ozSAcaYQD1AP1ozSGiwxhijPPlr+VNMEZ7Y+hNSE0manlQ0yI2644LD/gVM8g9pX/HH+FTmk71Ps4voVzMgMcg6SL+K/wD16TbKP7h/MVOabS9lEfMyPMg/gH4NUMDzlpC0SqS2cbv8+lWqjHEp96FTS2DmuKGfun5Gjfj+Bvyp9Ga0sTcZ5i/7Q/4CaTzo8/fFSUUrMY3ep6MD+NLSFFPVQfwphgiJPyLz7U9RDLhtgVjnhhSb95wn4nsKiurQNbuI2ZWxwckgfhmpowBGoHTFGtw0Lttb+UN5+8euetWSQFLHPHoM1WS6UIA2S1Zlxrl7Zbml0tpY92FaCUNgf7QIBH4ZrKpUjBXkTyuT0Ndl/wCWjZHHTPSud13ULu6YWGnouBgyvISA690AHJHqR24zzVLVPE89xILNVS2YkHaWzIw9h2/I1nXGqSWtm4SK2V5mw5lO4tgdSTkn8a8ytipSaVPbudEKDSvIbeLMLSSK906TLZ8yaIrMPrsyFHAAA5xwPSuH1a10ay8iWzknMMpCtlcMG6gsDxjnjAHWuluNVuhF/o8toZGBACQqQD2OcYwPrXM3V3G9zuv2e9uRwCuUh/IDJz+B46HFYOTbLcUkc9Jq2rRSNGlpeuiEqrHaSQO+cUV1hksSTts/l7bYcj8Cc5/Oil7eP8pnys9qkHz0gp0v36QV9UcAYpjRhjzUgFLQBEEAGKYLdQ5YDmrGKMUANC4FV7i0juFKuoIPqKtUuKQGGug2aSb1gQHPUKK04YBGoAGBVjFGKSSRV2yC5to7m2eGVFdHGCpGQa5ibwXpjtn7Mg+gxXX0hANDVxJ2Ob0zwzZadL5kMCq/rjJ/Ot9EwMYqXaPSjFCVgepyWsaFqU9/LPZ6vdW6Pg+WrfKDjsKoLpPiSM8a7MR/tRA/zruygPUU3yl9KHfuNW7Fe1RxCokYs4ABbHX3rF1ldbjvwdOktxbuBuWRCSD3P8q6QLikaMN1qrvoTZGNJYuNPfywDKFygPTI6VQ0i81a8uil9ZxQIgyGV8kn0x6V1HljGKYIVVsgChyYWQiLha5/U9bj0zUWt5LS7kMgDK8UeV9MZz1rpAMVDNbJKwZhkii9h2RBZqwjUkdquimogXin4pXADWfqxAsyzEKARkk4ArQqrf2UV/ZyW06B43HINNMVjhtfsftaRXtqytcWmHQqc5x1H4g1csZRIIZV+62CK3NO0O102BoYIgqkkn3NZy+EbWPVReI0ow+8IGO3P0pNqw0nc3VmWGLe+ccCrUUyyLuXP41n6jYvdae8MMrQycFXXqCKbo1rc2tr5d1cvPJnJdgB+HFJtBY1R9406mj734UtAGPq4/exfQ1w0StY+LNQtpiWtNTUSJ6B1UKw/EYrtPEv2xLWOWythcSBsFCccVjT6Rc6lpEcssQivIyJUUHO1h2z9OKHG8tSoytE07D5WjGc4XGTVvVxnS5T6YP61h6Je3d1e+VLp00CoDudzxn6V0t3bm4sJouMshA+tKkrKzHVd3dGFpqKzsD6VRVBDfzKvAE2QPyNVbTWYrWdorlhb3MfyukvANWba4hvb/McqO7tuIQ5x0/wpS92PKwiry5jsYvmUA965XVrI2OsCSNf3NzksB2YV1cCkIM1m+IUUWSSHjbICfatY6qxD0dzO0uHzJQ2OF4rfuFIspcddh/lWToTLJHIVIOG7Vuuu6Jh6jFZU1Yuo7s5G3t0ukSJwCrDpUel+GZrDU1MV5ItmsnmLCqjHrjP1qXT5hG4z96JijD0PSrcl9cteP5T7YlwFwAc+tJrlvcpXlaxr6ou7Srgf9Mya5K/0u21e1ijukDKpDj2OMf1rsbkebZyr13If5Vx1zYnU9MjgEssJO074m2tx70VfhTCl8TRteGNMsdKieK0QK7HLnByfSukBrm/DOjtpiSM9zcXDvgZmkLYHtXSClHYJbinpmuK1BZC96sb7ZNzhWxnB7Gu0PSuSvlK6ldL/tA/mKqS/dsUH76Oe8J6hqkiTjUTm5tpsK4TbkY9v8816TLIfs7lDhtpI/KvNdE1m8utVuLG9tY4pYAHVkJwwz716HbSedaq/wDeXms4aS1Ln8JhaFcubiJ2dm3qQcnPP/6xUN1qt0ut3KiaRVikUBQ2BjHpTNJzHMsZ6xzMn61W1RfL8Q3I/wCeiI/6Y/pV2+NE9Ys7a4uPKtZZQM7ULY9eKydE1S4u5QJnUqybgAoHPFXoj5+mrn+OIfqK5zw/Jia2B9Cp/WlL4Uxx+Kx0OsamdPto3Cb2dwgBOAPepdMvjfWnmMgRg20gHPpWV4qUnSkkH/LOZTx+X9ak8PN+6mT0YH8//wBVE1ZJijrc381mtq0KzvGN7FDg4HGa0M/LXJSIDfXkJPBdge3WpnpByXQcdZJM6KDUYZHCbiGPQMMVdzXmemW13pHiCSxluppreSIPH5rFtv5/jXoMUpltA2fmK4J96mLd7SKaVrodFfW80zQpcRNKvVFcFh+FTO4UZrzfQJfJ8RW7HqWKnPuCK6HxrI40iMKSFaTDYPXg1t7P33Az5vd5jpt4K5oVgwryzTdfnsNIvbQSld67o2B5VsjOPqP5Vs+BL+aW7u4Zp5ZMoGUO5YDB7Z+oqHH3blX1sd2SAOabuB4zWB4o15tGslMSB55M7d3Qdsn864aDxlrcV3GJbjcJBlQ8ICt9MD+tS9ClqesUlUNG1RNW0yO6UbScq65+6w6j/PrV+gYlNI+cU6mv2PoaQDqKKKYBRRRQAlFHeloENYZBFV4STEPUcGrOKrRArLKuMfNkfjTAkpyp5iOnHTIzTaVZFiJdzhQDk1FSKcWmCdmczrFjbTTtDPDHKGw2xgDjtu59KwpdMmkVPsWozwjn5pFEowTnjdyB6YI/rXTyW7zT3N1Jvj3ttRHUZUADH884/Os2+SO006Rp1PlkEZfPzn0z71xWjK7asjVt2Ocmsv7Tu2ie4i+zxDcAifM6nuzZPU59B149eC1e3uG1C4gvH8i1DmOOR5yS0YPy5AywXgH0r0e3SG3sy8rwNcTN5kuWACH6e3QCvM/HumXt3rDraM0sD7JF2PldxGCT6nj9amFOCemhMm7XNmO+tUiRF12dVVQAqwMQB6A9xRXBR6VqKxqPtiLgAbc9Paio+rU/5yfavsfYUo+emAVhHxnpMp6zr9Y8/wAjUq+KtHPW4dfrC/8AhXte0h3MPZy7G1S4rLXxFpD4xfIM/wB5SP5ip11jTG6X9t+MgFHPHuLll2LtFQJf2Un3Lu3b6SKf61Mro33XU/Q1XMhWYuKWil7UBYSiloxQA2lxRS0AJRilooATFFLRQA3FFLRQAlFLiigBMUlLRQAlFLRSASiiimAm0UhUU6ikA3aKTYBT6KAGfx/hTqP4h9KWgBjIGHNMEQAxipqKAK4t1ViwAzUm3ipKTtQO5l32jWd9zcW8ch9WUGm2Wi2tif3ECRj/AGVArWoxSauNNoYq4FV7+xh1Cze3nTdG/UVbxRigRl6XpVvpcBht4wiZyfc1o44p+KMUkDZyuseGpZrxr3T7pradvvjbuVvwqPTdJ1FJ83t0sijoEj211u0dxSbF9KUlzblRlykAi/dlfbFcRLa+ILWRo4oLSSJThSSc4rv9tMaJW6im7tWCLSdzk9D/ALWfUUN1bRRQqCSVckn2xXXCmLCqnIFSgYqYqxTdw7Vx/iG5Sw1Eu8UzCRQcxoW6etdjioJrdJhhlB+tVfRoSWtzgLW7sp74SRRSm5Zdn+qYcZzz2/Ou4sozHaonoKEsIlbIQZq2se0YrOMWndmkpJqxwt3crpeu3ME58tZG82Jj0OaZdyLd6it0GA/deWQDnJzmuw1LR7TU4wtzCkmOhI5H0NZtn4V0+ynWWKAB16EknH51VSV7tdSYJdehpWCMlhEjfeCjIPauY0/9xqLJnGy5Yfhurs1TauKxm8O251Y3xaU5bfs3kLn1xSv7nKwXxXH+IY/M0O5Hoob8iDWfot0kDsXcBXUHJ4FdDdWwubOWA8CRCp/EVx7eG9WiOI9S+UDgGEGiclyoILVnZw3Ec4ISRGI7K2a5q+/d69MvZ1Vv0xVzQNOu7NpJLy5ErsNoCoFAFV/ENnffbY7qyijk+TYwdiO9CknB3DltJWKWqup1nSnUcsJFP5D/AOvXSWBzagelcvbLq0kyLNYxqP7/AJmcfpXW2sXlwqvoKxUnKafZGjjaLPO2/wBD8RnsI7on8N2a63xVF5ugyNjJjZW/XH9a5/xBp9ydfkEFvK5lYMpVeOnr9RXW6nC8+h3MZHzmE8D1xXZzL2ql6GCT5LHjt1J5NxDG33JdyH69R/I10ngqQw65GhP342T9M/0rPj0l9ZNwkKFmjXzEbHAcHpn35FS+HneHXbIqpLGQAqByM8H+tRNb2HF9zofHsZK2zdijD6YINc3O1ncaJpsTyulygc7kA4G4jv8AhXaeNLcyaSkwHEb/ADH0BGP54rzia1t7qxNvNH+9Rw0bdsZGRRJK0WNdUel+DXs1014LXzsowaRpcZYkdePpXS1w/gqdTeXMa9CgOPocf1rt6wi9DWQU1+VNOpD0IqibgDlQfalpqcoKdzQAUdqWjpTEJ3ooooASq7fLcj0Zf5VZqvOCJImGMbsH6GmxIfUKhnut54jjI2+7dz+HT86mpMVMlcaY7UYwVHpiuW1CFC0KswcbtwiTlnYDIA/me1dDqM94Y1W3sfNBX77ShRn07ntWBqaXVtaTzJEd/lgvtBfn0B7LnH6nmvOxNZJcsXZmsFpqc5cxNc6n5eoXKwwuP3cSnooHO5+3XOBj3NV7iKG8N55dhBcEhi08eCIsYAwO5PJ7dce9XdQgllVUuJTFBjmNdu8npgDJHvkHiua1BD9oZYdVEdogBEaPtJJGenU4GPXnp7eZUlOd1I1suhfSHSlRVaCYsAASZohn9KKpJ4USRFeTVpEdgCy5JwfSisLx/mFyPsPEjK3X9asx3Gep5qk+d2R1pyEe4r6B6gjUWQN0NShuKzVcqcfhVlJSO/HrUtFFnCE8ov5Uo2g8KAfpUav+NPDY6fzqRk6TyJysjr/usRUy6jeJ928uB/21b/GqeeKdk9setO7CyL6axqSdL24/GQmpU8QaqowLx/xVT/MVl5pe1Pnl3Fyx7GyvibVF6zq31jX/AAqZfFmojqIG+qH+hrB/xoyaftZ9yfZx7HSJ4vvMfNbwH6ZH9asL4ul/jskP0lI/pXKKec9qm6Ue3qLqHsYPodUvi1P4rJh9JM/0qVfFlr/FbTj6bT/WuRGcZpaf1mp3D6vA7FfFNgesdwPqo/xqRfEmmN1lkX6xn+lcVR1p/Wpk/VoHdDXtMb/l6A+qMP6VIur6c3S8h/FsVwOfejOKpYuXYX1aPc9DW/sm+7dwH6SCpFmif7sqN9GFecemT+dGPoaf1t9hfVV3PSuDS4rzUMVPBI/GpFuJ0+7PIPTDEVX1vyF9V8z0XFGK8+GoXq4xd3H/AH8P+NTLrGor0vJfxOf50/rcewvqz7nd4oxXELr+pr/y85+qL/hUq+JdRXq0TfVP8KpYqBP1aZ2WKK5JfFN8OsVuR/usP61IviycfetIz9HI/wAapYmmL6vM6cj5lpcVzB8WsWX/AEEH1/ff/Y1OviyH+O0kH+64P+FP6xT7kujPsdBRWIviqyY8wXA/BT/Wpl8S6cw5aVfrGf6VSrQ7i9lPsatFZy+INLJ/4+cfWNh/SpF1nTX6XkX4nH86ftIdxckuxdoquuo2LH5by3P/AG1H+NSrcQP92aM/RxT5l3DlY+jFAIPQg/Q0uKLisJRS4ooEJRS0YoASjFLRQNISilxRSKEoxTqSgAxS0UUAGKTFOooAbRilooATFNKA9afRSAYEAoZA3UU+ilYoh8lc5xTwuKdRilZBcieJWYEjkUuwbcVJSGiw7lZLVIydqgZ9KiTT4IpzNHEiu3Ugcmr1JS5UO7IJoEmhaKRQyMMMpGQRXNyeDNP84siOq/3A5xXVdqTFDV1YE7GJovh+z0YH7NHgnjJJJxWzTsUmKSVgbuJRS0VQhkf8Q9DT6YvErD1p9ABRRRQAUlFFNCYtQzjMRI7HNS0xhuUj1FOwhOvSkpIzmNfpS0hkkm46c+z74zt9q5zUkeSBAkpaISK7KuP3mPcmt+EGSJ/NH+0qen196yNQVVkPHI5rgq0HObkawkkrHP6pbi7vLXfbr5WC0mWAbjGATyQOvTr7DmsvV2mW2lng0jTxLEhMcjMG2bQcHbt9ulXri9XlLaMzSDJZYiAFPU5J4H8/aqoSRlZ7psDPEUXOc9jnr+lS8LC3NLVh7R3sjzseKYgAH1e93j7224bGfaitN9ABdi/iLT42JOUEcQCn044/KiuPlo/1/wAMV7xqMvzHjjNKq8D2qWUKjfMQp9+KRMEfeH4Gu1O5tawBcds08D2pwX0PPtQFHai4xVYrwf0qVG6ZFRjHY5oXOD29c0gLKv8ApT8gjqeKqhj1qQMRQBY68j60dOpqMMP608HjFIB3b1pcc00EZxml/KgBR1xmp1quODU65xUtlIdTuKTtSfQVJQppM+1LgikI+nFFwDr/APXoA4IxQB+VLxn1ouFgB/lR68UdOv8AKii4Bx1o+tL+dJQAAUdaWk6jigLC5pD796XHSkI9utAWA4oz9KM80h64/OmFgAzS0DrSY4oFYXg5ozikx15ozQOwuaAfek70Z4pALn0pcAj/AOtTc460E0wsO4AyB+VPWaVfuyuv0Yioh7UvGKLsVkWBf3aH5bqcD/rq3+NTLq+oJ0vJvxbP86o+9B47U+eXcThF9DTXXtTX/l6J+qL/AIVKviPUV6yRt9Yx/SsjNGfXFP2s+4vZw7G4vie/HWO3P/AD/jUi+Krkfet4T9CRXPg0ucVXtqncXsYdjpB4rb+KzH4S/wD1qlTxVCfvWrj6MDXLZPFGfen9Yqdxewg+h1y+J7M9Yp1/Af41MviLTz1aRfqn+FcYDS7gQaf1mYvq8Dt117TW/wCXjH1Rv8KlXVrBul3H+JxXB55pQ1P63PsL6rHud8NQsj0vIP8Av4KkW4gf7s0bfRhXn27HNLuprFvsL6qu56ICD0IP0pcV50GA6cfpUi3MqfdmkX6Mar655C+q+Z6DRXBpqV2vS6m4/wBs1MusXwHF0/480/rcewvqsu521Fccmu3463Gfqq/4VKPEV6OpjP1Wn9agL6vM6uiuYXxJcjrHEfwP+NSr4lk/it1P0OKpYmmL2EzocUVhL4lU9bb8n/8ArVIviOA9YJPwINH1in3J9jPsbFJWWPENmeqyj/gI/wAaeNcsCf8AWMPqpqvawfUXs5roaNJVIavYN0uB/wB8n/CnjU7JulzH+JxT9pHuHJLsWaKgF/Zv926hP0kFSCaJvuyIfowp8yFZjqKOvTmiqEMPEoPqMU+o5DgoffFSUAFJRRTEFFFIaYhTTaWkpiIk/iHoxp1J0lb3ANKMA5IzUybSutRixnD/AFrDvpLa+kePzFfaNssOcEH39q2toLlskMfeuW8Q+D4tZnF3a3j2t0DnzFBKntxgjB/OvHr46pTnapCyNVFWuiu0UMPyRRiKIHCoOiiucurvUILhrgaUWmU+UCLjeqjqTjAI9z06c9M9RFo93pdr+/upNSYAkcJGemMZOc8/z9gKqpbz3N7uNobdowMx4QjHqWA6fQjt61jPHqs+Wm7JFxhbVnFyWdh5jeZOd+Tuz5fXv/HRXXNpeiFiZbW8aQn5j9mfk9+9FcXtX3NOVHJ/GFiPD1hjOTdZwP8AcP8AjXm+gs7MnzNjcDjcfWvRPjGcaPpyc8zk/kv/ANevP/DwysLHHLf1r38vV6KuRjH+8Z1S7xjDuD7NTw8+B+9k/wC+jT0Xr3qYKMZP1r0uVdjhUmMFxdL/AMvEv4nNOF3eD/lu2fcCn7AOMUu3Bpckew+eXcFvrw/8tAT7qKkGo3ag8ofwpNn+c0nlg0vZQ7B7WfclGr3IHKRn8/8AGpF1qbIHkr+Bqt5YPB/lR5Yo9hT7D9tPuXhrj9Db9uzf/Wp415R1t3yPcVn7MCmtEP8AJqfq1PsP6xPuWpvF9lbShJoLnkZyqqR/OtGLxDaPGrbZhkZGVH+Nefa6Nl6mOPkGPzNdBaR7raLPdB/Ko+qU2y/rM0jphr1l6yf98U9dbsT/AMtWHsUP+Fc95IPAFO8n/wCtxR9Sph9bmdENWsW6XCj6jFPGpWbEYuovxYVzPkgcAUnkj86n6jHuNYyXY6tby2I4uIz77xUgliPSRf8AvoVyH2cE5Iqvd2w8jkAc1DwK7lrFvsdyCCOoNKO3tXk/2chbwKxHOTg47Ukf2q2W2Y3U65yWxK3fn1rP6m+5f1pdj1rjjtRjmvJYLvVQsxOoXgLJvUee3HJ96emp61HpzuNUujIVLZaQnHtzS+py7j+tR7Hq9GPWvGx4x8QRGMf2m7fL0KIcn8q0pvGWtrKscVyCAwDFolOfyFS8JMr6zA9S7c0Y+ua4JfFWrqPmMB9zGR/I1XXxzrPnmP7NZtlyB8rDA9T81J4WoNYmB6LTRXFSeM71Uz9kgOOT8xGaLTx3NPEXfTFTnGBPn/2Wl9Wqdh/WKb6na44pOc1yEXj2OW4aP+zZQq9WWUH+gqSDx5ZSytE1ldqy8kjaQf1qfYVOw/bQ7nWGk/SueXxnpxODDdL/AMAU/wDs1IfHGiK215JlwcZMJP8AKk6U10H7WD6nRdTQSeawIvGugTPtW9bcemYH/wAKl/4SvQhw2pwqf9oEfzFL2c+xXPHubXOeKO1ZqeIdGkj3rqdptHUmUAVLHq+myjdHqNqwPcTKf60uWXYfNHuXcn9aUVXF5ayHCXELewkBqVXVujA/Q1NmO6Hg0Umc8nNGcA0WGLnJozmm5zS5Hf8AWgBc8Z70tNzkf40E9qQxaBSE0ZoAcDg0pJxTM/Wkz7GgCTJyaN3FR7uODSk0ASE88Um7mo91NLf407CJt3/1qaXqIuKaxAosFydX5qQPVMPUgfiiwXLO/rQH75/Wq5fApC5pWC5a3+9AfjFVfMwO9HmHHNFguWw9L5nHPNUxL604S4o5R3LRkpnmc+9QGTjmmGUZosFy6JPfip45PlPSs5JM+tTROefTFKwFvzKaWX0B/CoN/uaQygDPX1ouFiwHA5AA+lI1y6naJZsnoqOwJ/I1Wjd5z8gwv989Pw9auwW6J7serHqaLt7CsluXdMW6ku1kluZiq5xGZCynPrmuoByBWHaLsGa2YjmNfbivSoRajqcNZ3kPpM0UldFjEXNGaSigQUtJRRa4GVJb31tcloJfNhYnCNjK+w9RUcuqXFs4S4te3LDI/ofbvWrL0U+hpc15ssFVhJujUa8nqjRTT3RiHxTZIxVo3Axyw5OfpVlvFWjzCILOVZhwNvtnHHetBrK1mRmnt4nDdQUGTUcmi6dJZ7ltEUGTzCF4+bkZ4+prmksSk+azt1KTjdWMo6vpu8oL1JWZSfLQ5475J44p8enw3YaZIVWZjtIB2jA6fXHPbvVS88M6WINzvLDsAJkD4Ax356VlXFjfW1havpFzdCMF1YOCrTbjhccccA85Hr714VWHNJNKx0XaRdmtrOKaSP8AtdItrFfLEiAJg9MbeMUVCuhakVGLfTU4+60SMR7E45PvRWXspdwuzz/4xOBZ6UvTMsn/AKCtcRoCfLBj/np/n+ddh8ZWITSFz/FMf/QK5Pw8Plt88gsD9Oa+wwC/cxMsX/EZ1qIOp65qUDnPr1oQZ6j61IBxmvTOEZg5NOCj0p23HWnY9RwaAEx/OjHGM89aUDIz7U4cCgBmOeKMdqfjJNOA5NFxWI8YphXA9PepSMnpxTSOOfpVCZyniIf6dFx/yz5H4muisx/osPGfkXn8BWB4jGL2L3j7/U10ViM2kByceWvf2pLcHsWQvGMc08qenNIBke/c1IopiGbevrS7e3FSY70bQe1AyPaB25qC7X9wenUVbxnkVDdg+QenUUmNHM3H35kXq7DP4ClnjEltAo6nAGfcVO8Ya5nbsqY/SoBmSSH0QqvHrWVywmVYpmyBjy8Co4kBsAMdjVi4jEl2i9gMn3xVYOUs1RR87ZA/OmmI5G4jZZSxwNzE4/u84A/St+O3WO3t1HUkEnHU1i3Z2uoPTn+fFdAozBbn/d/lSGaL4jjLnGAKq2cQeWWQjk1PJm4nEIPyjlj60tqu2aYAd+cU7gLJEu08DNUI1CWhUD5mY44rXZeOelVbeESPv/gThRSYIjhsxFEBgbjyTjvVeGAC/kOOueK1mXv61UVcXrk9x/SkwIp4/LjPy4PbNZr2obPr1+tacxLsWI46CmLC3lu5JGBx71Ldykita2KLGH43NWRrEWGb6CumhA+zr+P86wddGCT221fQkj02ITaU2eRlu1XYLcR2qrt5PQVFoLA2Mm4Zw54/AVrWqCRzIcEDoPSkMqxaaqKSQN7dTT/smDxxWoVHbOKbt9v/AK1OyFdmPdwyiEFZJBznhiKfbm8WBNt1cAgYz5rA/wA6vXKZgPHTmmwIPIA9M4qeVXHzMiW61IdL+6+nmt/jTxqmsIOL+cn3fP8AOpRGB2PFQy9lUZJ9qHCPYfPLuNk8R6+p2Rag+7PdFP8AMVNfa/4r0/w++qG4VohII9zwLhiew4q3pGn2b3BN7cLEijcc9WPoKd44vY73QpLaN0ZFUFVjPyoB2x0rJwjzWSLU5WvcwLf4ja+0UbN9lbd1JiP9DXUW/irU5I0Z47clgDwpH9a8qtjttozxlW/LmvRbFQ1pEePu1MqUL7Fxqy7myPFF5jm2hP0JFV7nxvcWrqDpquGGciYjH6VEIxjP61Q1G3DbDj1Gaj2MOxftZdy+vxCOPn0lwfacH/2WpB8Q7X/lpp10p9mU1zjW64+70phtVIIAHFHsIB7aZ1I+IOmkfNaXq4/2V/8AiqkXx/oxIDi5T6xf4GuOa0Tg7ec+lRtZoaPYRD28juh450HHN3IvoDA/+FO/4TXw+2M6mi/7yMP5ivPXslP8qztSsNthLID0APQdjTWGiH1iR6unizQXwRq1qM+sgH86tp4g0mQALqdmf+2y/wCNfPmP3HX7rfz/AP1VuW9orwo/qoODSlhorqCxL7HuC6jaSD5LqBvTEin+tSCeM/dcMPUGvExp6sp4HJ6mo308AfKMVH1ddyvrHke4h8nj60hfGM5rw4wToMRyyIe2HIogk1guFhv7xfpMw/rR9W8x/WPI9x8w+tL5nPWvFn1DWrQPnV75mCZ5uGOPzP1r2T4SSrr3hSaTUh9puYrtozJLy2NqkD9TWVWl7OPMXCspO1ibeOgpm/nj9a7g6HprHm1X8GP+NRP4c0xh/wAe5H0dv8a5vaI1ucekgPepklwf8K6M+F9PySDOv0f/AOtUE/heDyX8m6uFfB25KkZ/KpdRFJmG0x3BVyWPQDmporVnIaY9OiDp+PrToIFgXGMt3J6mrK81W5WxIijgYq3CvNV415q7CvIrelT1MZyLsIworTgbMVZydKu2rZUivQgrHDJliikNFaEBRRSUALRRmkqkJiPyh+lIDnHvSnkGmRn5B6ik0CJpXwAgOB3qe1O63kXuOapn9as2LfvGXuV6ViqSjCw76mPq8DOQ+AyKufLPQn1Pr9P/AK2MGae7nLy+cwVG+YnoBzx9c4rqNThaWJlLGNOQSvX/AOtWNaxRXFpLaNiMCRANvfJ4rwqvuuT5dE0dyfuopCe8KgjkEcHc3P6UVea21gsTGyKmflUSAADsMUUvaw7IV2eQ/GRiZ9JUdQsv/slc5oAwlt6bv61t/F2bzrrSDn70Lvj1yVrG8NjEdr3A6V7GC/gxMsV/EkdavHtj9KmAHH6imooxT1GDn+leicKFxxRjHvTufWgZx9KBjQBmnBR6daXHNGOKQxCBnqetLjOD607b2HWgLzTQhCM0w9s9DUuPpTGHpTJZyniMYvIjx/q/6mujsB/ocHT/AFa9PpXNeJm23sJ/6Z/1NdPpw3WFuf8Apkv8qS3B7Foenan4+uaaFI4z+VPA49cUwsHIzR1FOx1PB9KNuRx0pXHYTAbtUN0B9mP4VY9sfhUN2ubcnGOQf1pPYZgthZLkngAD+VRlRFbwE4GW3GlnDNLKnY/MeOwHSnTKrRW8fQtj8sVkaWK8YLTl2z86k49B2pkEQETS46qQvsKtTMI7hiSMLHio0AGnnjHymmhHH6iux075zmt2ED+z4GPQKOv0rC1RHW4V2ztYjaPbHX8a1rMvLZL1CRoB+NDEjatIike5vvvyaLcf6VMKmhJMSNzyBUO8Q3UhPHy5/lSbGLOxdxCnU/eIHakswQsg9GqS2jJBlcfM9JaH55R70rjJdueDVOfakjAck9fYVclk8tCf4jWexyx9TSkxpDVjMjgD+VW5IwkBHoKfBD5aZI+c0s+PKY9OOKEJlW3H+jDI9axdcXKnjOUrbg/1HJ9awdek2ywxryWyD9Ku+hNiHQAziVBwobcfy/8ArVvaeflbjnisTw02XuUz2U/zrcseZH60uoy6BznFKRk04Dj2/nQeOcc1VySvKuY2+maitV+U9OuatOMofU1BGDDHlgcnnHpUNlJBI20YHJPTFIsQU5Y5Y1IkZ5dx82Pyp5AJzjNNAyFhVLUY99hcL2KH+VaJHyjPSq1wu+J1xwQRVXEefWozanHUPnp7V6Lo7B9OhbjG3v2rzm2zsmRuxH9a7/w44fSo+nHFZSNYG0Bxz0qrfp+6BHUNzirQ/wAiq96pNufYg1BZmFecnp3ppUZ44OKk56/1pvOOvJpk2I2GevSmFOPXJqU/mBTT/nvTuBXZevb6VVu4t9lOg/ijbH5Veb6VEVBOMcHiqTJZwQ5gkHuD/n866PS/nsYWxggY/XFc8g2iZOny/wAq3dEbNmAeqsRjNOWwkagTI6Ch09PrzUsalzgDmr0Vrt5cc9ayuWkUIrJpW+YYX1q2IFjXCgCrm0AYH6VG64BPb1ouVaxganHiXp95P/rV6N8B7rNnrVqTyrxSgZ9QwP8AIVwGpphYyB1DD+VdR8D7gxeLdQtScCa1bA9Srr/QmssQr0mODtNHvZOcGg0g6Y9KWvIudYzvSHkGg8GkNQ2Uc3ewmO6ce+R+NRqvNampxZKSf8BNUAuCPfmuyjrFBJkkYq7CMCq0a1bjGBXfTics5Fhat2xw2KqLU8JxIK6EYMu5opKKskWikzRmmJhmlpKKYgzUa8Fh71JUfSQ+4oaEKaltW23Ke/FRGhW2uG9DmpktBk9+uRIvfrXD3MLPe7SSo7kdQPb1Nd7fKqgzEgLs5J9K4q5R5LlpJI9kefkQjlh6sD/L8/Q+XVdp27nZSd4Dc3X/ACz09Cn8JMRYkdue/wBaKYwYsSZHJJ9aKy9kyrM8i+Lv7vVdOjBO1YG4x/tHH8qy9EuVggtWMbMFQZx3rQ+MDA+IrJT2tv8A2dqy9MAFnBnBOzr+Jrqw8uWhFk11erI6hNahxkwzDv0H+NTDWbYgfJL/AN8//XrDXnp9KlH6YzW31iZl7GJuDV7U9RIP+AUv9qWmc72BH+yaxDx3pVPbNH1iQexibn9p2ZH+t59wRTxqFpx++A/AisIdMjrR37U/rEg9ijoBfWp/5eEH1NPN5anj7RF9N1c7wB0ppAzjGKf1l9ifYo6Q3MH/AD2i+u8U0zxEYEiH/gQrmjg88flULgDnj8qpYh9hOiu5F4slRb6Ahs/uzyOe9dZpbqdNtiGGTEv8hXm2suEuY+3y1vaaF+xwHGPkH8qp17K9iVSvodyF9P51IFPrXJqfQkfialUuD/rJPwY0vrK7FKh5nUgHrjn3pCMgY9ea5pZJs/6+X/vs1ILi4B/18v4tR9ZXYPYvudFg96iu1/cH2IrGW7uf+fiT86Gurhkw07EehxQ8RHsHsGRsuXu2I5AA/SoEBeaGQ/dBCqPoKny5LfPy3B4600bxtAbhORxWftUV7NjLiPzLph1Crk1FKfL00Kf4kIHPepxu3M+RuYYJx1qtPGSgU4wowOKaqoXs2c3ri7Xt8enX8K1bBAdLTAHK8/WsfxA7i4jDFdoUYGK09JkdrCMDG3BGMVbmrEqGtjZsyDbRn29abIA90sfQdz602BniiCKFwvrSpGUkWQHJBzz3qXURSgy5joB17VWtyFuZgT71J5rcZQc+9REEs7AYLe9J1ECgyKeUu2c8dqbCB56cf55pxhJOccd6eR+9V1XAUYxUqauPlLYHI7daimJ8p8eh6UpnGMbWpjSqQRtbnPatOdEcrKqy+Xa+jZwPesrUYCsPmONzkH8K1QF8zc4JUD5RiqGs3MSwAnPXHK+1OMkJxZleGW/0ycZPKf1/+vXQWX/HzIoJ/H61y/h6QR6s+ehUjp7g109uwW5LfwnPNNtIFE1OR/hSZ4xmmCVDyD+GKPMQDr09KOZC5WKx2g889KrwtvnO7qBT12u5Z3AC9BUUOFuskYXkZJqeYqxsWOnvdAuwKxDuB94+grVGnIBvZOeylD0p9vLarFGxki2gYVcjn681R1HVFkDRW7jn7zjj8B7VnzOTsXZJFPUGgRzHCEb+84HH4VlSLuUgg1YbDDsKhZeuea3jZGLPO9nl3t3GR0JA46c12XhZy2nnnO1v8a5K/URa7dj1JP8AI10nhCTMMq56H/D/ABqZFwOr71Fcgm2fnoM09TikmBMDjGeCOKg0MnnB9qQ9fYUdevSgn8qLiGkGmHvwSKcf/rUh75P9KAImHH+NRnjnP5VMRxjH5VAeuRxTTFY4yaLZqdxH0+ZwM1qeHYmkjlQZ4YH86i1K0K6rPIeFBDAeuQM1a8NNtubiPIyVB59v/wBdVKWgorU6aCJYx8o59anGAP61GuAakHB96wubLQXtUcigjrz3qXIxg9PrTGOeKaYmY+qL+5BGeG5/KrfwvuPsfxNshnCzb4z/AMCQ/wBQKh1Bc2zjv161n+H7j7F440W4yFAuoST7bwD+lOSvBonZo+qDw31pe1EnYijORXiHZ0Iz1pD0pW600nAznArNlIhuo/Mt3HcciskCtW1vba8lmjgkEnl43EDjnPQ9+lUHj8uVk9DXdhdrE1NBI16cVaSokFSpXpQRyTJl7VKhw1QipVPIrYyL1FNQ/IPpS5qkSLmikoqhC0UUU0Ji1G/31980/NMl4UH0IoYhTTTTj0pppDNFsSWsTEZwP/rVh3cCtdAFFYsQBuOK2rZt1lj+6cVlaomORXmYuN2deHfQQ6hZIdggiIXjOOv6UVjhJMD5M/nRXF9XX8xryI8L+LrhvFEC9harx/wJ6z9NnhSzhUyICEGcsKufFaT/AIq9RkjFqgOPQlq4piRI2DwAOa9GjG9GK8jOs7VZHcpNCQMzR/8AfYqZJoiOJFI+tcADjjP4U7cT3xjtVeyJ9oegBl45H509WGe1ee+a27KknHpTvPk6iR1z6MaXsg9oehD6Zpe4rz77VOowJpPYhjTxe3IU4nlx1++aPZsPaHf5Hemnt6muDGo3isCLmYen7w07+1L0Di6mI/3jR7Nhzo7Yn/IqJzlcAfhXH/2rfZH+lSdu9KdXvQeblz9QP8Kagxc6J9eP+kxeu3ofrXRaXzYwdT8g/lXF3N1JclWmfc44yRV2DWbu3hRI3G1RgAqDgVUotpIUXZncJUy+mOO3NcSviO/Gf3sZ4/uCnDxPqC/MTF+K/wD16z9my1NHa9sU4+o/UdK4seKb4Y3LB+Kn/GnjxbejgxQH2wf8aPZyHzo7IHBHH6U7P41x48XXRwfIhPr1/wAad/wls2ObWMn2Y1PJIOdHWkgDGaM5A6+tcp/wl8ne0Q/Rz/hTh4t6k2g44/1n/wBanySByR0+7I9aic+lc6PFy4/49D/38/8ArUp8Uwtw1tIM+4NPlkLmRV8RsVnjY88Vd0Ns6dF17/zNYer6kl+VaNCMDByas6Xq0VtbCOQMSCeg96tp8pKa5jrY2AGKlz3rCTXrUcNv/Kpl160PV2HsVrOzLujYyM8A0bvSsga7ZMB+8Iz6qaX+3LM/8tfzBpWYXRqlunIxRkdulZY1qzJ/1w/EGnf2vZ8ZmUfnRZhoaWRznA/rTSe1Z/8Aa1mQR568fhQdVszj/SE/OjULouOeKydYG6yPHRgastqVoc/v4/8AvqqGo3cMltIFlRiOwIqo3uSzF0ZtusLweQw/SuziI4HT61w2mPt1iJmYbdx5J9q7OKQHHI6+tXPcmGxfB7nNBHHPFRq3HBBz75pc547+lTcsdn+VJwM8A0mc0h6UrisOJX0FNIB7Um4delIDTTGIQmTx9ajZVHf24NSMT1/SoZG46002TY5HWkSLVJSAQSAePcVZ8NyNicKzLgjkHHr/AIVH4hGL1CehQfzNReG3xdzoTjKg/kf/AK9a/ZIW510csuR++kxjPJqUSysMeaxB681VjP8AKpwc9xnNZ3NACAcZORR5QHHSnL+eKM/jSuFiMxZH3qQxlT94496mOcDPNIev+TRcLFfyuPf6U6O3GeeakJA6/jTkyM880OQJHPa9Ftutw6tF/iKz9Ck26p7OrD+Rra19QTA5GRyv8v8AGue0tymqQMT3x19aqOqJa1OzBx0P5VKv0/Oq6tx7VODzzjmszQeDjp+NMZuOn60pYZHb8ajY8HP40XCxVuzuikX1Q1y1zMY57aVW5Q5B/KuslUE7T0PFcffLtiQtn5Wwf8/561pF30M5I+vYJlubOKdPuyorg+xGach4IrmvC2t2kfw+0fULy5jhiFpGjO7fxKNpHucg1h6j8RbKR3is5Wjj/wCehQ7j9PT+f0rxnCXM0kdkNUdlqGq29iMMd8vaNOv4+lcpqWszXmRI22M9Il6fj6/yrmJfEto5OJTycknOSfeqkmu278+bx3pqjLsdEeSPU7rwjd+Zq08JOQ8W78iP8a6O+jxMrjow/UV534N1eB/FFrEsgJlDJ/46T/SvT72PfBu7qc100Vyy1MKzTehQQdKkApiipBXoxOKQ5etSCo8U4VqZsuRHKfjUlQwH5CKe7rGpZ2CqOSTwBVrYgfR3qKK4hnz5Uiv/ALppzSohAZ1Unpk4pgPozSe/agMD0IP0NMQ6mSjMTfSnUHkY9aYhqnKAjuKaabF/qgO44pxqRl7TmyJE9QDUVzGpXc/IXkg96LBttyAf4gRUt0H2soQH1+bFcONXu3NaT1MVruTccPIBns2P6UU4xS5+8B7Bun6UV43NDzO/lifNnxQfd40cdlgjH9f61xzDErZwK6z4ltnxrc9OI4x/44K5UfMzA859K9uhpSj6HLW/iMASDQfqadtBIpcdBjr3PetTMbuPt0pDnOfWnlcc5xmjHzZ7mkAwHHr+PFGOOAak2Z+7/KgLz60XAjz0xgZ9aXtjnFPVSc85BOMYpAvTIPv7UXAjJ5OOtGecgAZ7etP24GD+RFIVA5yOO1O4rEbcc5HXHXpUoPyj2GOtLb2st7dQ2ttHvnlcIiA9WPQV06/DvxPgZ0zBHX98n/xVKU4x3Y4xb2OW3YHH5005J6V1q/DjxQyknTlx6+fH/wDFUH4a+KWP/IPUD1M8f+NR7WHcv2c+xyeec8AijP4jtXXp8L/Fbgj7BGD/ANd0/wAaU/DDxWQB9gT/AL/p/jR7an3D2c+xyKybCSQD9RTS5Jz0rqn+HHieJyp0/JPYOpx+tIPh34oYhRpp3HuZFx/On7SHcXJLscvu57fhQXH4d66d/h74nViraa3B7Op/rTD4A8TL/wAwyT/vpf8AGj2kO4ckuxzSt2z+NAcZBx165rov+EC8TAZOlSe/zL6/WkbwJ4kGf+JVJzjoV/xp88O4uWXY5tiQPb+dPjY7SQeK328CeJW+7pUv/fS/40+PwF4oyd2lTcejL/jT9pC24ckr7GBvxznjuKBIQO4xx1/z610f/CCeJSD/AMSmbp/eH+NNPgPxJk/8Sqbkccr/AI1HtIdx8kuxz289z9OKUSFV684rfbwJ4kI50qbP1H+NJ/wgnib/AKBMx98j/Gnzw7i5ZdjA83pwc4xilEpC56Gt8eA/E/I/suX/AL6X/GlHgLxNg/8AErk/FlP9aOeHcfLLsc9vOeaaZDjnHPJxXSp8P/E7kgaVIOOcuo/rUTeBPEm7H9lzf99D/Gjnh3Dkl2Of80sfx+lIZRwM8jv610B8CeJhx/ZUxweuR/jSf8IJ4mOP+JVNz05H+NP2lPuLkl2OZVyHHrmnGQ9Mmug/4QPxP0/sqYY9x/jTv+EB8TY/5BUn5j/Gn7Sn3FyT7HPCZxyHI/HFO+1TqQPNfj/ardPgLxN/0CpRx6j/ABo/4QLxMTj+y5c9eo/xp+0p9w5J9jFXULoLxczc+jn/ABpw1O9A/wCPufA7eYa2G8B+J++lS/gw/wAaT/hAfE+f+QVJ+Y/xo56fdByT7GQNVvVb/j7uM56GQ1IusXwGPtcvt85rT/4QLxPnH9ly/XI/xpf+EB8TD/mGSf8Afa/40c9Pug5Z9jN/tvUOn2uT8e9OOt3/AEN05x64P9K0l+H/AIpbONLf/vtf8ac3w+8UI206Y+R6Op/XNLmp90Pln2MC6vri5IM8hZhwCQOn4U20vJrOcyRNhiCDxnit9vh94nBUf2Y6hjgEuMfzqVfht4oLAfYBz/01Uf1p+0p9xezn2M2PxHeoP+WTY65X/wCvUw8UXg48uEn/AHT/AI1bb4ceKR/zDh9fNT/Goj8P/E69dN4/66p/jS5qb6j5anYaPFVwOsEP05/xqRfFUmebVOP9o0f8K+8UZx/Z/PtKn+NQ3/gvxBpllJe3enmO3iALv5iNgZA7Glem+o7TXQtL4qYfK1shPs//ANan/wDCTqRzafk//wBauTDds9qcJD2xmn7NE87OsXxNEcZgcZHZs1NDr8DSY8mQenIrjw5zz+VWbaU+cOvPH6VMoaFKR0Gq6hFdQoqo4YHOD9KwI5PLvUfHCyZOOe9W5GyvTPpms2UkSv8AUdKVMJM7BNSgx1YfhUyapbYA3N/3zXNo3yZGee9TbuAeMVm0UmdF/aVtgHf39DSG/t848zr7GueD4XuD/Oq09/FBxnc3oMUKLew3Kx0sl3b7c+aAPWuS1S9haaWKI7gz/eHTr2qlcX01wTlgqk52jpVU9a6YU7bmM532N/S7jXtVRLG0u2aK1Q+XE7DagLZOAfc1pPo/ilQSTE3/AABf8K5W3u57KdpLeRo2PGVPatFPFGsIu37a5H+0Af6Upwlf3bFQnC3vXLstr4kjPzRr9Qif4VXLa2n3ox+KrUDeJNTcfNcZ/wCACoW1q8cYMg/75FJRn1SByj0bOj8Jalqdp4x0aWZNsYvIlc4/hLAH9DX1Yyh1KnoRivjCHWLmOeOXeco4YY9jX2bbTrdWsNwnKyorr9CM1z14tNOxpSle6MwLglT1FPFS3SbZyex5qKt4O6uRLcUCnCkHWlrUzZPAe1Vtatp7rT2jtwDJuBwTjIzU8R+cVLPJ5MDyYztUnH0q1sR1OehgOn6hCqOxdgpkQtu2k9ce1T3llcT3Vwzs8SAFllGCOnAwar6NLFNK95dSr5hOQDV/V7gsI4I2+V/mLZ4x2pK1inuQJcyto8nzEbXUZ9jjiq9u0lpdQzeaWSX+HA4wcEe9afkRDSpYIZEdthJ2kEk1kuwksoSDkrIR+BGf6VRJYvvtE97OkcgV05QEZHAzitTSblri0+dizKep7is+V9uqQSdBIik/yP8ASptFJjmmhPbgfgcUxGoOGce+aDyaDxMfcUGgCSBtlwjdgwrVlTLH3FYtbhO9Eb1GawrxvFlRdmZTRfMfrRV0xjJ4orxeQ6faHyP8RufG97jP3Yxj/gC1zeCXbgn5utdT4/iabxpevEu9D5eGXkcIvpWLiGMvvQh93rXqUnanFeRNTWbKew4zjPtSgZ4P5Y9qurdQINoTGfcUCWy67G/w/Wq1JKYQnHXigIQQcE1cEtmflUNycDk00SWhIxkA8UaiK2w5G0/pSgZyOg71o2tvHdIzp0BxknvU/wDZygHp+JqeYqxjleTxz0pPL7dxW0umqD8wwM4+8KeNLhPBzjt8wo5h8pglQeg6DpSMhXqD/Sug/s2DByzc+4xTG0q2Yf61h+VCmHIyt4TbHjHQ845vY/8A0IV9WBI2wSi5x1xXy9otutr420NFYsPtkRycdd4r6eD4Ue1ZV1zNM0pOyY7yogf9Wn/fNKY4j/Av5CmtOpG3bz/eppc1j7M15x3lxZ/1a/lS+VEesa/lUfnBOSCfpTRLnmj2Yc4phj37gi5+lKY0/uj8qTfSbqagHMSCOPH3R+VOEcf91fyqIOPWnb6OQOYf5af3R+VN8qM/wj8qN+aTzKOQOYcIowfuj8qcET0H5VHvpd9HIHMPCIB90flQUT+6PypnmUhejkDmHbFB6D8qCqf3R+VMLn1NJvp+zFzD/Lj/ALg/KmlE/uj8qbvpN9HIHOO2J/dX8qjkRNv3V6elBemSOMelKVPQamORU3H5R0pCqbVG1fyqNHGR7ikD8is/Zl85J5abD8q/lTPKTZnYMn2pDL8nPrQz/u/ej2Yc5II48fcX8qYY0JPyjJ9qTzPlzTRJl+e1P2Yc4540AHyj8qUxRkfcX8qZI+QPrS+YCAaapicyNoo/MHyDp2FMeJGOAq/lTnk+bPHApitxk5yTT9mLnI3hjUgBVx9Kq6leWenWpknwgPyqQmT+FWLqQQxGRs4VSxx7Vy2k66dS1BJ5OoXft3cKD0FXGlcTqGsLyW+YjT9Jnu2XpJKxRF9hk8/gMU28fXbKDedDWQdT5bq2B9MZrsbOePylVFHTt3qW5mKJyACe1WqMbD523Y5HT9QtdSjO2IJMo+eNgAR+FTPbxscCNffjpVm9trcM12sCpLjJlTqcdjTIvmQOP4uazUE3Yc20QrCgfBVevpWf4rtY7jwrqsWxf+PSQgbR1Ckj+Vajy4YqPp265zn19qi1FBPp1zFjPmROv5qRVxp2Zm53R8sYGf8AClGO5xjilKEZ/XNLtLNwO3YV3HGABzwRx+tSwfLKp9xyKaqnrj86eAByADzmpY0aDkYwep55FUbknzgfb061oOBtGOh68Vn3vRSPT/P9azp7ly2LsBJiQj0HINNmnSJNznHp6ms8XpihVFUZHc1VdmdtzHcferVO71Icy1cag8o2x/Iv61Sz70UVqklsQ22FIaWlamIQ0lL2BoIxQAlFFFABX2D4Avf7Q8AaFcZyTZohPug2n9Vr4+r6f+CN79q+G8ER62txLD+u/wD9nrnxKvFM1ovU7y8TKBvQ4qlWpKm+Jl9RWZ3qaL0NJrUWlBpKWt0YjlOGzVllDoVYZVhgiq3selWIzmJCepUZqkQzOXw/YJ9xZV/3ZW/xqe40uC4WNS0i+WNoKsRxV2iqshXZStNMjtJDIskjEjGGOagbQofOLxzSIhOfLGCB9MjIrUpadkF2UrzTI7uKNQ7RvH9x19PQ+opbDT/se5mk8xzxnGKuUoNOwrkcgxIh+opSKJfug+hBpTSAbWvaNvs09uKyDWlpzZgdfQ5qJq6GibFFPxRXD7JGlz4cRzGqgMR9DQXbcQwIIJzwOKigDMiDPPtV2HRNUup2EMeWAzy4rsUbsnmsVxKf9rnpTllx/CRz3FXf+EV1wD/UAD/rov8AjUi+FdbJI8lOn/PRar2b7C513M4S5A+X/wAdpfMzwFOf92r3/CLa4Bxbrz6Ov+NIPDeuKP8Aj34/31/xpezYc6KqzOBsUuB1wq9fU8fhThcz5wHm9OCa3NM8NX7wubm1y4bj5x6exq+3hmcEFLMj2D/T3qHFp7Fp36nKrdXLOMPLyMd6mWS+I+UTfgp5rpE8N3SsCbV15/vj/GpRpF7GSEtW46dDUO/RFxt1ZzAGqsOI7kj/AHSf6UjnVYhl4bkZ6kxnn9K6wW1+gINoT/wH/Comh1DdhreQZ5+4QKlOXY0tDuY/hqS4fxdpAlDgi8iHzLj+MV9Ng8V86WEMkXjHR2ljZCbuLqMfxivoodBxzRNXsKDtcUmk8wCmnOOBUZyTSURORJlSc0uVxUADMeBk0bucGq5Bc5PlaXK/SoA1Juo5A5yxvWjzF9arbutG7mnyIXOWi60b1qru/wDr0bsfSjkDnLW9aN61V3cUu+jkDnZZ3rSF1qvu96Tfk0ciDnZZ3rmk3r6GoN31pC9HIHOyxvX0pN61XLe9BanyC5yxuX0qOV129Kj3n1qOV8gUOA1MkEibl4/hpodNucd6r78BW/2TQG/cn61HIh87J/MX0pTInl9O1Vt2dx/Cjd+7x6ijkQ+dlhpUCYxjNIkibuh5FVWYk+3rTgwWUfTFCgHOyzNIu3pThIpA44qpK3yU7fhc+gp8iJ52SPIpIXb1pwZBjjgdqps/II6Z9am30+RBzsfOUcAFRg8GvNoraeDxBb6bDBNCPNJZnTaSgPBGRzmvRJG4BAHHY1Unso4pEvowi28IwEUcuzH5iT6jg/hUy0RrS956l+eOYWojg34bhismw4/3u3865rRtOnOtSHZDGm75zHcPMSvuzd66h71LS3eQsGCLnav8R7ViPrRSN3j1CwinzxAyhlA+gIOfesU0dai9ypqkN5FrckWZmVDgGOfbhTyCV6HniugjISBEGPlUCueXWZr2fzrqO1LghcRHOPfNdCqkoG74FOjZtmeKTikV5Tlzjil6jB78VIYGfc6cqo55/wA+lJj5PwrblORM8Qm0vQ4ruaJ2td6SMrAyAEEEj1pV07QO32UjrxL/APXrP8S+GJm8T6myMQr3MjKNhPU5/rWSfDFznhuv+yahxj/MaqUv5Tqho+glRxBtPTE3X9alTQtCckAR8df3xrj/APhGLofxj/vk/wCFMbw5dr/EPyNLkX84+d/yHdnSNExgAE4xxKTXmk3myXLx5LFSQAPb/wDVVx9AvFGcrj8f8KzJY3gnaJ/vKcGtaNNRvZ3Mq03K3u2NW1sraXTpPNt7gXe/CNn5NvuOvrQdKjwRmQH6j/CqdhY3N95nkMP3YyctirH9i33H7xf++jVt2e5mtVsbumaDod7LHbsb4TMvJyoXIHOODW2fh/oythrm5U/9dF4/8drio9J1RX3RS7W9RIRU62GujG28kHGP9eRisZKTekzeEopawOw/4V5o7Di8uh/wJP8ACsjxR4LsdF0Y3lrdTSOrqpVypGD9B9Ky/sniQdL6fj/p5PH60k1h4iuYDBNdyywsclGuCRn6E0oqaabnoVJwcWlDU5zHy/jXqOn/AAu0++0+2uxq0w8+JZNojXjIBx1rhh4Z1TBHlrjP98VrQQeLreBIYb6dY0G1EE/AHoParrScl7k0jOjFRb543Omk+EluPuapMT/1xB/rUDfCOTkjUZAPU23/ANesR7nxvZwyTfb7oIiZZhN0A61mt458TM2W1i4Y+5B/pWMYYh/DNM1lPDreDR05+Etx/DqOc+tuf8a9Y+Emg3HhrSdQ0+abzVadZlOwrjK4PX/dFeNab4g8f6lb/aLG6nli3FdwjjxkduRXp3wo1DxbNrl5D4iMjW7W+YiyIMOGH90ehP5VEvbXtOaY/wB01eEWj1ys2dNkzDtmtKql2n3W/A1rSdmZTWhVxmnCiiupGItTxnKfiagqWI/KRVIhktFFANWSLRSUUxDqUU3pS0wElGY29cUincit6inmo4+I9voSKTADV3TGxK6+ozVM1PYttul9+KmWw0aeDmipNtFc/sx3Phi2X5ohkfeGB+Nd3og33koGANmefqK4vyHtrxYpUaOWNwrKy4IOe/pXcaEo8+ViRwoGMe9dNPciextFCo+8CDxgGmjhs9Oc1Lgc5YfjRtHJ3D866DAiIOcY/WjrjipWVeDuH+NMKj1H4UAWbPGw+zCrJIJ/lUFooKMM55FWShGOKzluax2Gd8/570hyDkYzTyMgDFNOAen+f8ipKIznGaRueOMDtT9pK8g9cc0yRxHGzuflAyaQzm9ROPGvh4cZ+0xk4/3xXuykEV4HOr3XjTQyTsaS5jG4/wAPzjFe4rHcAf67r/s1zVHdm8NEXTKxjMYxtPWmFeCKr+VOf+W36U/ypsH98fyqUDHrK0GSuNx45FVtxJJJ5NMkhn3czHGfSomik/57tWiM2WN/vRvyODXKeJ9UutGa0ZLlwshYMMDnGPb3rBPjG9z/AK4kd+n+FFxpHo5fk0F686Xxfc952z9B/hS/8Jddf893x9BSuB6Jvo3jHWvO/wDhLLnj/SJfTjFH/CU3P/PxKKOYLHonmYo8z6V53/wlN13uJfzpyeJLp/8Al5l+u7pRzofKz0LfSF689fxPPk4uJv8AvqmjxJcHn7RN+LGlzoOU9F38UheuHg1yR8bppv8Avsirg1MuARPL/wB/DUuqkUqbZ1u/PSkL1zEl6yxqfOk5H/PQ1GLtmVj58/H/AE0NT7dFexZ1W/nGKbI3y5rk2uXfhbqVTjjMjEfzpkF1Luw1zK5H+22PyzSeIXYaoPudKXJRfypd58tuvWueW6fGfMbk4yXNMub5kYgSt17Oaj6xHsV7BnSK5O7r1oVm8scGuT/tJgf9ZIf+BmnrqZK8Fj77jT+sLsL2LOnUsX5HA60rORKCAcVyU2pkDBZj9GqodSbPLt9Sx4oVddhOkzuJZDs7/hTTIW2oB27VxB1N/wC83/fdRnU2xncT/wACq/aoXszupHI2ipd59D+VeenU2yPr2NR/2r1GV6/3u/4U1UF7M9Cmf5QMH6VheLr7ULHw5Je6d5onjKqwU/KUbg7l6EDrzXMf2qWA2FMA8nOc1W07VTP4nsoLiPNuzunzE4LlGwfbB/ofprTXPJIl+71NnRvGkKSpp2py7hkRw3UqCMS4HRlBIVvxIPseK7JILeSB5bcWyyNks6HYxP1HU1474i0B7JftAWS6sJm+4+A8LZOcuePxPX19ct9cuINIll0zUTaCGTy/ss9yHcjpkKVPH0bjHSnUw8ZXlB/I1hinC0Zo9M8Q6hYeGLWbUnSGS9kO2MMxZpG7AknOK7PTbmS80m2uJ4BbyzRK7xE52EjOM1856bfvNrUWpa002pwWkqlwZTgjPGMjpnGRxXt2meO/DV+FRpms2PQXSBVP/AhkY+pFZQpciCtX9q/I6CSRkwsbMMjBx3qPcdmKe1pHNGGhVHRhuBXGCPXPTFVltEBIKAEHBBFUzJHF66Fj1y5wAckEexwKoYI+YqME8ZNbXiO2EerjaAoaJT2HqP6VlLgdF3dgT0FebU0kz06SvBCLtCdCDjpUqqzfM4IA4BJ/lSLEW9efXpU4gZsKCXPsOKybN0iPfGCp2MQvqePrivKfFyY8TXTdd+1s+vyivZoLWSH5wqBhzucBsfnxXmHxJjf/AISGGduTJAuTjGSCR/hXTg5/vLHNjIfu7h8OU8/V76zzgS2x7+hH+Nd//ZDrPHKkiEqwbAXjO0Aj+dec/D9tvjOCPr5yOv8A47u/pXt8FiXYfLkDrWeOly1B4OCnT1Maw08R72nij8x5C25RnAwBjmtODS7aRG3QoOOXZRj8PWtRLQn5VACnsvQ/41bSzRAN2Sf7orzZVjuUIx0MZNFtJDxAhGeWMYqdNEskbBtoHOOMwittYScBQRjpnrVmK0I9j+prL2smJyitzGi0OwyWksoST2Ma1ai0OwPSxtsnqTCv+Fb9tpbyHO3j16VLPPpul/6+UPL2jXk1rGjUkrt2RyzxEb2irswJPCVlfWc9v9hgPmxsnEYHUHvXyrq+kT6Pqs9lcgLJE5jYA5wQcGvsX7Rq2ppts4BZwHjfJwfwH/6q+UfHttNYeM9RtbiTzJYrhgXP8XPB/EV62AjyNpXaOKvJyV5Wueo/CDULzWfCdxotppNqRbzDzLjaASG5+Y85PB/SvUtH8JNpUjXs90zzYPyINqDPt3ryj9nLUhFc69p5I+ZYpl/Asp/mK92muNylaK0acKjb3JjVny8sditUc67om9uakpOtaxKZm0tK67XIPY03rXWjnYo6VLEecVHToz8wqkQyejNJmjNaEi5pabmlBoAWlBptKDTEOzUUZxJKvuD+YqTNR9Ln2Zf5H/69DAc1PgbbMp9DmmGli+/SYHQUVCko2Ln0orMZ8RySySgTSyM8hAZmc5Jx7/Suz8NOJInlH8SKa4x1/wBFGMZ2c/lXT+DZN+mMc8ghP8/nWlPcmex1O4+uKQt7cZ+lR5OcDrSE5GfTjHrXQYWJlbKkehz0pueuO9RglSeOvFB4buaQy/YyYdkx15H4f/rq6WHTuB1rFhk8qVG9D6VrjpgHI9fXpWcjSGw9WAPTAphwc4HvS8FR/Kg9ep5qSxB15qtf5+yEY/iB/WrBGMc8VXvMG1b04/nSexS3OZL7fGegt6XUf/oa174or5/m/wCRt0Pn/l5j7f7Yr6CUHFc8jRDD8uAFyfX0pecGnlaCODQhMiRow375Sy+gqqRycDjPFTkcmmEVoQ2cH8Rbd54tOVWI/ePnH0FcQNKnOMvJ+letaxYLf3NnG3RSxH5Cki8MxFg23j2FHJcXNY8qGlSn+OT8Mf4VKNLkA+9J7f5xXrJ8Lw8YU8e1PXwxEP4Kfsw5zyUaXJ1zJ+OP8Kd/ZcvpJ+f/ANavXV8NQA5CU7/hG7f+5S9kx+0PJYtKkZhnzfzqzNo0kTFY1kK47t1r1NfDkA/gq03h+3PzFB0qXSdylUR4u2mTDqkn51LFpMhYfI/5162+gQZP7uhdAhH8FP2LF7RHAWHh+OQAusv/AH2RW0nhuzOB5Unv++b/ABrsbfSI06LirX9nJn7v6VhKhK+5qqyOPbQLFFQeUzd+Zn/xpr6HZrExEEgI4x5r4/nXWz6axA2r2xkV4bqdl8UTqV0tquoG3ErbCNmMZOMcZrP6vLuX7ZHdHRYd5KoyjH/PRv8AGq/9jxliQDn2dv8AGuFXSviVKy+amqnb833kH+e9TvpvjtruB1ttVt0Vhv33O4Nz3z2qXhn3LVddjsU0yJcfL0PA3Hj9ajvNNUNjbkf7xrpEgJjO5RnpTbu3AzlQAKx9i7mvtFY5JdODNjy8/wDAjVpNJhKD9znHua3FthuOAMhatJbjyxx7UpUpAqiOOudKUcCIA+9Z/wDZq7gDEvJ/u121zbAyfhVFbQSXWRyF561UKUiXNXMIaLDtG6FCT/s0f2JD/wA8UyO+K6pbbdMMjAA4qlrkv9maRdXS43RxnBx/EeB+taKmxOSscbeSaXYuY2eHzAeUABOe/NY1zq9oMmKGPjjIQHFcpe3bCVmLEkccnrn/APXSI/mRknP064+ldsKCW5ySrNmrcaxKzrgoVBIIPB/PtTLW9dNWs58hTHOmD2wG5/n+orNVnjbg8etPC7AWyQ2Mgjgj0/pXRBKLTRk5NnsWoy6fp93Ml5eWqRyqGkhldd31C9efpXFXPhPR7tZdQtEzBk4WGXMZI64x/LIrTsdFtbmyR1R2MnzOzgBiT1zzk/jTk0q60ouNOZYzccSKMKvH8W37uR2wB1712VsDKcvaLS5UKy5eV6nLWtrZ6ZFc2t3a3DSXabWjhjBESngEnPUdcAc1y5lmgDwrJu2sVztIPHfkZ/OvUodHMJJlyTnLbmyST3JP0rzvxDZGy1m5QBtrnzF3c9ef6VnXoOEEyea7LGieK9X0GXdY3bxp0aLrG31Q8fpXpvhj4oWur30VlqUQhu532pLGcozH1zyuT9a8Pc4Qc/Wrugbn8R6aBkk3UXA/3hXE0UmfQHiaBGvbeRyOYyMepB/+vWOttkDJVU6DH+ea6bxDAzi3cLnDMMk8dv8ACsWK3LHAG4+grxsQ7TZ7WGV6aI4rdekYBbHcZNW4rcx/IdpPXJJwBViK2YLy+D02r0/Gr9vZlzkgkexrjlOx1qJSW0DHG4Mem5s/oK84+Len+Q+lXI5V1kjzj0IP9a9ijs9xAC4x6Vwnxk04Hw1ZXOSTFc7SR2DKf8BWmDq/v0YYtJ0mjyzwXJ5PjrR2Y8POq/8AfXy/1r6YjtRtAHTrya+X9HY23iHS7nB/dXEbn6Bga+u4LFnxwcVvmcXKceU4sLV9nB3M5LUHoTj2qzFY5wABitlLBI13SsFUeppBdxqdlnCZn/vdh+NckcI1rUdhyxTfwleHSzjLAKvqac1zZ2reXAhuZv7qDNTmxuLvm7mIX/nmnA/GrcNvBaptiQKK7aeGtrFW83v9xzSq3+J3MxrXU9R4mmFrB/cj5Y/jVmz0ew08744gZT1kflj+NW3mA71VkuPerlKlT13fmTzTkrLRFp5go4r5M+NFt9n+JWouB8sojlH4oP6g19QST+9fO/x5t8eKLG6A+WW0Ck+6s39CKeGxLnWsxypcsGyh8Er8WfxE8rOBdWskf8n/APZa+jZJ+etfJngC9Nh4+0SccZuFjP0b5f619SPJx1rnzRuNRNdUb4SPNE2VO5Vb1GaWoLR99rGfbFTV103eKZMlZ2KlymH3etQ+1XLhd0X0NU67IPQwmtQpynBzSUH2rQzZNmlpgOQDS1ZA6ikzRTAdnmlzTM04GmIdUch2vEffB/KnZqO44hLd1Ib8jQwJTSxfe60080sX36ALm7Heik3e9FFgPkH7FK7i0QKZSdmM8Z6Vv+GdKutMguIrkKAzgrhs9qzopxFqSXDBsCTcQOvWt0a9bbc+XLj8P8aI2WpMrmltPvSAc89KoDW4B92OU/l/jSnXIGPKS5/D/GtOZEcrLuBijvj+Rqh/bNuf+WUvT2/xpv8AbUGeI5OfYUcyDlZo4rVtpTJbpknK8GuX/tqD/nlL+GKs23iKCINuhlIJzxipk0XFWOmB280044OTWGfFFoCcQT4HsP8AGmnxTanrbzfp/jWZpob2c88cDiq9+c2rcEZwOB71kf8ACU2o4+zzHn2/xqrd+LLR4Sgt58nvxj+dJ7DTRVumA8VaIRwPtUfB7/OtfQiivmuPVI9T8U6QY45ECXUfDYyfnH+FfSq4xWUi0LTT0p3WmE0ITISOaaRT2xTDVmZh69rVtoBtbu6XdGztH94DBK57/SqUXxQ0OMfNg/8AbZa6KbSrDVsRahZwXUSfOqTRhwD64PfmnR+DvDi8jQtNHP8Az6p/hWkU+hLaOfb4saEBwE/Gcf4U0fF3Q8dIv+/5/wDia6tPCnh9emiaaP8At1T/AAqwnhzQ1+7o9gP+3VP8KfvdxaHFH4waIv8Az7/jO3/xFN/4XBo5ztFrj/ru3/xFd6uh6SOml2Q/7d0/wqRdK05elhaj6Qr/AIUe93DQ8+Pxe0oDOLQD/ru//wARV3SviQuv3n2LTLOC4uNhfYszD5R3yVAruF0+zU8WkA+ka1WvNPtUv7K4Hl25QuCyhVzlenIpWl3HzLsZI1PxE43LoURGcf8AHyKUah4k/wCgJAMf9PA/xrZglsLSFIPt0bBc4aScFjk55OfekbUNNQkNfWw+sy/40/mL5GWl/wCI+p0mAD/roD/7NVgX2vt/zD4B+IP/ALPVhtV0gcHU7ID3uF/xqNtb0IEE6tp4+twn+NS4+Y0/Ib9s1/A/0KAfgP8A4ug3GvFf+Pa2B9x/9nTW8ReHxkHWNOH/AG3T/Goj4m8NqRnWdPH/AG3X/GlyLuPm8iuNQ1t7+e02WivEiOd3AIbdjHzf7JpkkWtyH5nssf71U18UeGYvFFzcNqln5clnEu9WyCVeTjjv81Xm8ceFF5/te2/BWP8ASp5E92VztdCD7FqvGZLLr/z0/wDrUybTdVlxiayX/toT/wCy1M3j7wov/MVi/CN/8KafiF4UGP8AiZgn2hf/AAqfYw7le1l2Ko0nVw3/AB82eenDf/Y1NHpOr7cfarVfxz/7LSN8RfCw/wCX9z9IH/wqNviR4XB4upj9IGo9jBh7WYT6NqYcIdRtgzgkADJOOv8ADXHQT+L2BeLw/fMp5DBosMPUZrrJfiP4WkA8xppMdAbfOPzrqtPdJtOt5o+I5I1ZQRjAIyKXsY9A9rLqeXhvGxORoF8pz3lt6w/Ftz4mi0Yrq2nTWtvJIFDySwtlhyB8vPb9K9yIB7k15X8ap9mnaXbBj87ySHHsAP8A2Y01SSYOo2jw+5l3yPH1HSktJAVwwGV688+lMmjZn2bvnA49/pVe3LLdEMCDgnBOOa2MzTH3gcnr074/z/WpTtKDaBgHPA71WZl2HPpgAdf89KsIAybgB82D78/5AqhHp3hi787w9bSQqjzJGFYnsRx/SrMksz3cRlVctuXAHsf8KxPBMhk0dgrBmjlZM598j+ddDdSLDCoklUyMwVDjOT0/Svepy5qaZlazGFURRjl2OcEkcZrhfHduPOt7sA4bchzx7iu+i8xo13hN2zDMuOTj6CuX8bW/m6M0g6xMHBH6/oazxEOakxp6nmE8ZUZ6rn0rpvhnpjal4705cjbCxuGyeyjP88VgTRb43xlsc10fwsultviBp+84EgeMfUqa8CWxvHc+gdZhEtsmQOHB557Vn2+ngoCDj6/4Va8Q6vBo+iXGo3EUkkMAVmWMDdgsBxk+9cVF8YvDiAH7DqI57Rp/8VXjYuhVnO8Eevhq8IU7SZ20dhcebGqlPJ/iyDn8BWpFaggAdO9eeD42eHVOGstS/wC/af8AxVTp8cfCqgf6FqhP/XNP/iq4ng8Q94lyxcejPR47UZGD+lc18UNIkvPAV2Yk3NC6SjHYBsE/kTTdG+MPg++SRpGu7bYMgTRD5vYbSf1xWL4p+Nej3Oj3un2NtdKZoWjDlFI5GOea3oYSUGpPc5Z1nLToeLWXk295btcAmNZMuq+mc8H/AD1r7C0+/wDt9jbz2MLeVNGrrJJxwRkV8Vz36zN8qlTnNe/eHvjv4Z07wzplld2+ofabe1jhl8uFSpZVAJB3dOK9WdOTaZytq1j2D7CJG3XMhlPoen5VOPLiXCqAPavJm/aE8J44ttU/78p/8XVZ/wBoLws3/Lpqv/fpP/i6zacPghqTZvdnrz3AA61WkufevJG+Pvhgji01T/v0n/xdQN8ePDTdLTU/+/Sf/FVyVI4mX2WawjBbs9Ze496rPcV5U3x08Nn/AJc9S/79p/8AFUw/HHw4f+XPUv8Av2n/AMVXJLC4l/ZZ0RlSXU9ReUmvHfjnbebZaVcgfcaVCfrtI/kavH43+HP+fTUv+/af/FVy3jr4j6H4o0WO0tra9WZJd4MiKBjBHZj6itcJhq9OtGUo6Dq1KTptJnmun3Js9StLpfvQzK4/Ag19bAlwCuSGGRivkDIxxnOeK9lg+MlhHplrbvHemVIUSTy0VF3BQD/FuIznuK7sxw063LyIywdaMLpnuOm5FqUbqrEYzz61crgPhh4us/E8OpJaxvGbdkZlZAv3s9ME5+73rvqKUHCCjLcc2pSbQjDcpHrWe3BI9K0TVGddspxXTTMZjM0ZpuaWtzEkQ/KPbinVGh6j3p+ferRAtLmm5pc0wFzSim0A0xD80jjchU9xikzS5pgJG26JT7VJF9+oYvulfRiKlj+9SAu9u/5UUz8aKAPka/ZoLV3Q4dQDnHvitNPDmqMgP2+AZGf9X/8AWrP1VS1hKMdcfzFem6Pol5q52WcDSsi5YKBwKIpPcmba2OIHhrVcj/T4Mf8AXOpF8L6mcj7dDn/rnXZy2kltcGGZSrqdrBhgg12Hh7wgNVtTOZNijgZGc1bjFakKcjxo+GNVI2i+iLY6CI0knhvVU4e+jUkcZhr03X9FbSbpot3TuK5uXLHkkkjHPWhQTDnZyR8O6kcY1KLp/wA8qP8AhHdRbCnU4hg/88R/jXUU09OBzT5EHOzEHgzVm4/taLn/AKY0q+C9SOSdYjx/1w/+vXX2r+ZbK2eR8pqyqgHn8ayasbrVHEf8IRfk860n4W//ANeoLnwNdpEXbWA3QEfZsd/rXekZByar3/8Ax6OxxwV6fUVL2KSR5dp9m9h4vtLZ5vNMV3D8wXbnJB6V9Rp92vmiQ48exAH/AJe4Dx/wGvpdBhcVlIa3FpjVJ+FMakgZC1RmpWGajPWtUZsqXmqjSESZoWl3tswpAxwTn9Kq/wDCbDOPsD8H/nqP8Kr+KD/oEBzj9+P/AEFq5jODW0NjKb1OxHjnH/MOb8Zv/rUp8eEDjTvzm/8Asa4zd+lGeaqxHMzsv+E8l/6BqD/tsf8A4mmnx7cHpp8X/f4/4VyGe1JnGcUrBzM7D/hPLr+Gwhz/ANdD/hWF4n1u68UWlvpcsMUEUs2WdGJYYBPf3FZoY9cUgb/TLQ56Sf0NNoLsxpvB9vA4U3MrZ6HA/wAKaPClp/z2mH4r/hXUal99M9xxVFgeNpxUuESlORjDwrYjrLORnuw/wqVPCmmkHLTn1+etA7h17VPCHJzn5RQ4oFJmavhPTR/z2/7+U7/hFNM7pJ/38Nawb2p+78anlRXMzA/4R3Tk1Rk8uQx/ZQ+wyHGd5GaZeaJpsUO+O3xg/wB88/rWu/8AyFE6/NaOPykX/GorsBrdxjoKFFWByZx7w2sWqCN4wIthbBY8frVYxLKk8kJ+SPJBzyT/AIVY1bBnhiUjdK4X8B1qGABU1KPngnj2xU2KuJciNdMhmVcMSMmpr+FFtkaFAPmBOPTvVOV93hmM45AHfHenancj+zY1z8zKDgHnFFkFy3FEk80cpjCwlwFA/iH0rYXxdr8GsS2CarcLbRIBGikAKBx1x/nFZrSBbWIj/ZOR06VFHEt14n1HqGiY42ntk1N0ilqauo+K/EEUtrs1q8VWfDASdeRWH4o1a81KaJLq7muDEpI82QttyecZrYl063mCO7t8hyvPT/PFcz4hjS1v5FU/KI1Y5OSSf8/ypRmmxuLRhzrAqb5S2euB361UgbfKWOQoHA9OacIZLl9zHapPAz/n/OKSI7L1lJB+XHHatCSxJwCSQM+n+f8AP41LalPJdSB8h56dD0/rVd2yMk8dKS3lCXOMjDDBOOh/z/OgDt/BVyqG5iZipeQNknPVf1ziulu/31woTOI2BGB79q5HwfLGlxdqyhmCqykDjOT+XQV2KulraiSUfO5HB9c/5/KvYwr5qaRnLRk7NOWwowq579enP+fWsfxJHI+kyRuoDOhPOa6CMzRfJLjaCfmHb/PNcr4ne71EywwOY0QHkf5/Wt6ukGT1POI2OwNycZzntWr4OtpJfG2lRwA71ukcEd1Byf0BrGi/dM8Z+8pIP4V1Hw9eVfH2ltHH5mWKt7Dacn8BXz8tDdHver6QuuaRdaY0nli5j8vft3beeuO9clZ/ACBsFtYkZfU24A/nXoVpJ5dzGwCZB/j+7+NdEbG5uzm9uwI/+eUPAP1PU159Zz5lys6YtJankep/Bnw8lgbW21C6m1HIC+Uike+R2/Oqth+zqsw33utyRA/wJCCfzzXuEMdrZpsgjVAPQUrXI9azVX2espfJCb5tEjyFv2eNJUfJr18repiQ1RvP2e7GO2ldPEF0zqhKgwLgkDjvXsr3PvVaW4yCCawqY63wlRpye58UXlobR9pO7OR0xzVSt/xNA0OrXUJXHlTyJ+v/ANasCvXpy5opmE1Z2CirllpV/qRYWFnPdMvLLBGXKj1IHSr6+D/Ej/d0DUzjr/oj/wCFU5JbsSi30MSitUeGddMPnf2NqHldN/2Z8fnipIfCXiK5GYdD1Fx6rbP/AIUuePcOSXYxaK6D/hBvFP8A0L+pf+Azf4Uh8D+KB/zL+pf+A7f4Ue0h3Qezn2MCit8eCPFB/wCYBqH/AH4b/Cqmo+HNX0iOOTUdPuLVJCQhlQruIGcChVIN2TBwkldozeNo9e9eg+CPh1aeLNCfUJNSlt3SdoTGsQYcBTnOfevPa9d+DV/ix1WzJHyyRyqPqCD/ACFZYqUo03KG5thYxlUSkd58NvBUPg/VbwwahJcLdQgFXjCgFTkHr7mvSjXI6Pcf8TiEZ4OR+hrrjXHRqSmry3OqtTjCVo7CVXuk+UMO3FWKbIN0RFdEHZmEtUZ1L2pSMHFJXSc4qnDn3H+f51JUI/1in1yP8/lUmatEsUngn+VRfaRtyI5OuMY5pXDkjY4A7jHWkVZs/NLn6LVCE+1f9MpPyp4n64RjgZHvTQku0/vfm7HHajZLx++IHfA609BE0b71ztK9sGn596rCOX/nufyqcUAIvErj1ANTR/eqAnEy+4IqxF96gCzz7/nRS4NFAHyZqeBbMeOWGB75r1Lw94kvNBLSWhTDqA6uuQa8q1U/uOn8YHI969F0yMS3cELMQkjrG5HoTShs7kVHbUuXepTahqEt3cEGWRtzYGBXRaF4vn0qAwoEdD/e9a5XVFMGpXEDPvMMjRA+ynFV/MKn6Vro0ZJ9TZ1jWJNUnaWVgWY9RWM5zk5z6980by3GeaQocU0BEeM5IxTSMZ5HvT2XbnnkVGdv5mmMt6fJtmMZ5DfzrV2ZyR2rn1Yq4YHBBzXQQuJIVkHRgD6VjUXU2pO+gbT36VWvFP2V88gEH9ausMj0AqreYFnLweBn9axb0N0tTzabb/wncRH/AD9w/wA1r6YXpXzPOCvjuIsACbmHp/wH/wDXX0yozWbBbsDTD7VIRTGpoTImFRGpmFRtWiM2YPigf8S6IjtMP/QWrkyD3Ndh4kTfpg9pQf51ymzjitoPQynuRYI6n8KO3J96k8vJxTvJ5zxg1VyLEBHvS4PepfKJOOtTf2beHpaT89/LNFwtYqdxzRj/AEi35P8ArV/Xiro067720oHupFMktHikt/MjKFpUxn/eFDulcUWm7FjUVDMhyeh6VTaONR8zYz7962NTjRBH9D/Ssl40lOD0FTe7LtZFbMW77w/A1ZhaEpgv85PAqE20YI5/WrMFvARv5yORzQwQ/wAlSM80ohQ8f1xTwQBQW5PPFRqXoVVhRtcsEb7jpKhx9UNdp/whumSIys9xzwcsP8K40uF1fTnHaSQZP+4D/SvWEwcEbsnrxTQ7I4//AIVz4clmSR455JVPysZWGPyFcWfDOn2/xVm0VoibG4jRtm85OUPfr1FeyDaJQoY8mvNfEDLb/G3SnJx51tGv1O5hQxmyfh34XSD7Oun5iHVTM5H86uR/D7wq6ANpEDA9d5Zj+HP6VuBACfl3c9xmp4yFlCjAUH2osM+e9at4bXUr22gGFgmeNV9ArED9Kowyi28TahL3aJX+uVH+Nbvi3aPGepwbct57HIGM55/rXNao/ka6zfKBJbx9+vyj/CuWTvdFxVtTVa9LLt2Y9cVx/im73XxVX+Yqu/H44/nXTq2BweK4/VJLeC9meRS87OTgjJ69falRjrcuo9DPigJXzZ5CIwM4BqK1/eXDFeCxwB171FcXMsxwxwvXaOlTWKqdzPwo4PFdRgWZQG6HOevP1pkSKGBBOe+MHildzIxCdD0GM8VYt4TtDNncRkd/SgDoPB7IuqTqRjdErDkEEg//AF67NwlzcKjA+Wg5x3rz3Qz5OpYUoN6cZ+o6e/8AnivQdKxJy/CL1z3/APrV62B1jYzmbHnJLAjSADIBP+PSsO7uUbdGqBmJPOKtLDcT5dcrFIBtUDBUfzqtqb22i2Blk5kI2onGScdv8a6Zyb9BWVjyzWIBZa3PGD/FuPPQnnFegfCTSrO/1ue/k3+dYxholU4BLZBJ9cAdOnNeaX9xJcahNJMRvZucDivQfg3qCw+KJ7RmAFxbEKPVlIP8s14NXVuxtA9xI7ZxW7DqHmW8bg/eUH9Kw+lRWVyRB5ZP3HdPwDED+VeJmLcYpo78NBTbTOi+1Z70xrj3rNWY+tKJGPTmvFdSR2ewSLpnJ71E0mc81Tury2sYvMvLiOBPV2xn6DvXNy+PtOkuhaaZG1zOQcM/yJwCfr29qcadSeyC0UeM/EK3+z+JdVT0uWcDH97n+tcMetdp4y1CXVdWnvp40R7hVfagwOBj+n864xhhyPevqcNdU0meVX+Nnb/DHxDNoGu3RhhWU3FuUwexDA56/WvfNJvtQ1nSra4kMUayghgVyVZTg8dOevTvXzN4RZh4ltVVsF9yZ+qmvoLwlLLbQPA0zPGDuUEcAnrj8hXFmFNX5ludeE1Vjql0+AOJJi07jo0hzj6DoKnLqowoAHtVcz8ZzULzD1ry7tncqd9yw0uaiaXrz2qo9wB+NRG5G4DnrQos0UUiy0tedfF2Pz/DFtKASYrtc49CrD/Cu0e59q5Px+PtXg6/X+KMLIPbawP8s104a8akWZ10nTaPn8ffGfWu6+FV0YfEtxCTxNbMMepBB/xriJhtnce9b/ga5+zeMbF84DMyfmpFe5WXNTaPEovlqJnvtnP5Wo20hPSVc/TNeiGvJTdFSCOo5BFerwSie1jkHR0DD8RmvOw6tdHo4jWzFoNL2ptdKOZlORdrkYplT3QwwPtVfPFdMdjnkrMDxg+4p5/Wo2OUOPSn5z+NWiGLRSdTRVIQtLTaKYD80Cm0UxCSNjY3owzVmMfNVaXmJh3xViE7tpHfmkBb49T+dFJzRTEfJl8pbykHOZkGfxFd9GywXCMrE7GDZxjpXCXAzc2o4+a5T+ddtld3U8c0oapk1Fcv6xPFd6zeXNsS0UshdTjHXk/rmqWcD2pyMintjPXFNJ+c7fwNaLRWMkrIu2ccbTLvIA75rsvFOi6Np+kW0tlOGnfHyhgdwx972rgVlwcg4PrUj3DuACaGrsaIpMZIFREGnls44FNbkmqAYc561r6XNuhaM/wnIH1rJPPXt71YsZhFdKScKflP41M1dFwdmbpBIzVS+YfYpSRn5T0FXwvPSmz2UU9tMXheQhSTiQrkY6VySdkdijdnlt8AvjmLn/l4hz/47X0yn3a+b9daL/hYMfkwCBfOg+QMzf3c8k19IIPlqb3SFs2OIqNhUtMYU4ksgZcc1Ec1Ow4qMitEZsxvEP8AyC245Dqa5Inj3rt9UtJLyzaCLG9iMZOO9Yq+GNQbvCB/vGqiyXBvYw1ZuenHbHvRyeT29q318J33eSEf8CP+FPHhG95zPCPxP+FXdB7OXY50HBHP44rtUjvJdQCRp+6Me7/VnHTru9c9qzh4Oue9zEP+Amu0hUxQxIckhAMg+gqWlJicHbU5aayv9+0o4X/drJ1uN4zZGSNlbzejA/3lr0QHPQH86xPFGnR3kUEjs6mEMwx+B5/Kuidbmhy2MaeG5Z8yZy2uDasZxnrWEZOeBXb6jpMVwE3u4Hsazh4ZsuCZ3XPOGcD+lYqyWprKLOZJyM0gcgYrp38O2gKlZCVz82XPT8qkg8OWkz7ERmPX7xFaqm2r9DO6va5ym9uTxR5jdCeK7T/hDkP/ACxUfWU05fB8QGPIjP8A20NLlXcrlZw0j/6Tp5J6XBH5xv8A4V61E8zKCqpjAxkn0rg9W023s7zTlSNVIvUDAH2Yf1rr4X1NxEYo7YwlRguxzjHoKya1NUtCw7XrSZHlAdvvV5z47c2vxR8K3BQb3ESMe3EnP869Bk/tYEBDaKoGT8rVzfiexFzqWm3eoRW0s8Tf6NKqsNhyD0P4VLVluOMbs6122yOMd6fCqn53wBn061nNa6uSCL22B7/us/1qVItUCFJNQhIIwAsAyCe/Wk5tdCuXzPIvH0YTx7fuI2AYKckdcoK5HxCjfarRowdv2dTyfQsOteweJNKU3ryXUcc9xtGZTEATxxXGG1SW0gLRRuNpU7xz94+v1rk5veZsqeiOXuZWgsZZo8b1XK+ma45/tJlaWeRMk5ZioJ/OvUfENkB4dnCxoh2K25RyBkE15xc2rO6IVOxeenU10UUrGdVWZkuY5XZkhOOuc1NaqDCQV5HaprgxxYVUwzcAdM/4VVtJBu56Z7VqZF1It7ZbhR25qyeT7Yz6VAshyOQPXnmpR0C9F7Dt+H5UxF3R7WG41KVrhwqKoKkNg5ya7211CyjHlp+8XjOehFeW3UxiuIsSyIGB+ZBk4/H6fzqRbyQkf6XdtjDHkLjn2/xFdVHF+xjZIThzbnr8GpxmJhGoLqCFQdTzwK5fU7CWVmvdXvrW0U/dWTJwPTHrzWLBdLdxX0UUMMU0do00EkckxYsrDpudlPBPYVy80d3PMWuBLJu+UOxzzjI5pvMfaaNFulyrQ0dZtNMZZprS5mklQjlowikfTJNVvDGr/wBieI7DUMHbDMrMefudG4+hNZ0tndW6HepA7gHNVlODXLOSm72EtD6+VlcB0bKsMqQeoqpDC/n3HZBLnJ4HIB/rXPfDbWDrHgiyZ23S2ubWQ5/u4x/46VropNMN5cl3ndYRj5FPevJx8E6ep6GDlaeg6XU7K0wGcyuP4Yx/WsfW9Y1f+y5bizhNpFHyWxkkZx1P17V0lrp1laf6uFd394jJpb/yp7WSGRFZHXBUjINeRHki9Eeg7yPF7lri/ZmnmkmlbnLEmiy02a3uEuB8vlnP510k1rHbzsqoFAPYVXlT/R5B0Ow/yr0o2toYNanA+M9O/s+5gQEsHhyM/U1xEn3zjp9c16d48QTWOlXGMlkKk/l/jXmcow1d+Hd4Hn4hWmXvD83keIdPk6YuEB+hODX0RpT+WT79a+aYZDFPHIDgowYfga+jrKUMqMucEZrHGK6R0YF7nQi5+X9Ka7SFN+1ivrWZI87QObbaZQMqG5B9qrzXOoRTsUZXi/ikfapAyC23nkben19q8zkPTuaLzYqLz8HI9c1zn2zU3QjcqlWIdyyYQbup9DjH5niqzX2pkbkmh27QTudMZ4zjn2P51oqZPMdNNJh26YyaytbT7Vot9bkZ8y3dR9SpotJp3iLTtuJY7G45XjH3eO9PZ+CDjB6mqirMmTurHzzcD95n+8Aat6RP9k1uwuD91JkY/QNzUV/CYLhoj1jZkP4Mar5OEb04/wA/nXtbo8LaR7y74LDivVfDk/2jw9YyZz+6C/8AfPH9K8atrv7TZQT9pI1f8xmvUvAtz5/hxUz/AKqZ0/8AZv8A2avNp6SselUd4pnSk03vSnrSZroRzsjuBujPtVI1fPIxVFhtcitoPoYzQn8qRP8AVrnrjFHShehHof8A69aoyY4GjOaTpUF9LJBY3EsIUypEzIG6EgEjNUkIs9aK8t0z4jatfapaWskdnHHNMkbMI2yAzAEj5vek1H4iaxaahcQQpbNEkhCM8DKWXPBIzxW3sJXsZe1ja56mDS5rB8J61Nrugx3k4jE29kcIMDIPH6EVuZqGrOxad1dD+1Ps+UUf3ePyqLNPs+JJF9Gz+YqRmhx6n86KM0UxHyhMc31l1wblOPxr0C00a/vbC4vYLYtbWwzLIWUBeM9zyfYe1ef8tqdgP+nhev412X2iXyvK8xxETuKBvlJ9cetFPYmY8SAHnP4GnvJFj5V/E1Xz7mnKA2Mtjn0rQhC7hzk/QUA5J/SnvFCIyyzAtkcetRY6d80XBqxqRaJfXGjyarFb77SJisjqwJX6rnIHPWsxmJ69PQ1JG5EZTzGVG+8AeDj1HeoCeTxQgFz3poPOaCc59+nFA6etMDrLFvPtY5uuVGeO/erUsLi1k4JO08Vl+GZQ5lt2xn7657+tdYbVmt3PfaTz3rzK8uRtHq0I88UzxLxCrDx/Hv5Ilh49Pu19JR/dFfO/jCMJ8Rl68yQE5/4DX0QnKinF3imYzVpscaYwqSmNVozZCwqM1K1Rkc1oZsIl3ToPervkA9KgtFVrpA2T1/lWqI19OazlPlZvT2KQgp3k45zV1oQrYxSeUCelL2hpoU/KHPQ1Jt6DngVP5R7CsHUzeJqsMMFs8kTth3GQF6fgKcarvZK5lVStc2QBnkgflVLWQHs25BIU9D7Vi3tjqBB8gSEn3P8AjUlhbXccdwtzC6jy/lZh1613Sp/uue5xU6zdVQ5S7crmNDjrTWsZ7iG3aBGPOCQQMc9TnqB6Vbdd8EWeeB1+lXrIAWKfUiuVWnozrqRsjIu9HuicxkkYPRv6UabZXdrckzRERlcbye+a6AAYzgmknH7gY4wwP612Ks1DkONUI8/MZ4Y5OWz+P/16RW/e/fGB71E0b+awWLI7c0qxkPzEB79ayubnI+JG3aivA+S9hYfi4H9a6yyLCzhYKPuAc1y3iBN+pToM5EkD/wDkRP8A69dRZDdaRKc/KWB/AkVhCpebRtOFoJliRiV+bavbrXPeKwv2ewkGTtm6jtXRTAbVG4g9eKw/E426Mj4J2TKcmiv/AA2Kj8aNiQcq2eopVI7kk/SmpJmGJgfvIDjGe1SLIxxwfyrRbEPcxfEVvm5UjoyDOa4fT7TzYTHlU2zSKT2HNemalCJhGxQ8DFchp+lFLy9gkV+J2ZSB2IBryK1XlqyR6FJXhEzDYvPayRlFZTGcq2AGGOhzXialkyTu2r91QM19HvpanIKuePSvn+/sHstau9PVHLRTvEF2kkndgYFdODrKbaMsVC1mc3fRM37w8MOeO1U4MbgMD612d94J8SPbGQ6Ff4C7uYGB/LGa4tUKuyvuVgcEEYIrtumcdmtzViICjjrzjOetSmQIgZjx0b+VVYpCyYB3Zxx1qwmGjOSffjH+f/11RIiQw3sqs6vIANvBxn/Oa1o9I07KsYsDrgntVGwRAeBjOTtGOn+Fbax+ahAYqccAev4Vm2aJGnJe3ap9ne7u3ixjy2uXaP8A75Jx+lUJnkcFFUM54OQKsmJXyShJPOC/4/4flVy2tVi/eMoU44xngCsktTS+hzeqad+7SFpGeWRtzEj7oHOP1Fc1qmnmzfI+6fbH+e9drd3f+kSCBPOuGGPZB/Ssa80S4O+WabLMMsoHArVMzaOi+DGsm11270l2/d3kW+MH/nonP6qW/IV7gr7Dj8a+W/C1/wD2P4w027LYWG6UOf8AZJw36E19QSq24bQTjrXJjY3ps6cG7VETGaqd3P8AKRSq2W2d/Qdap6iGjxkEZHcV4sY3Z7GiMC/X/SGI781nMy5KkjBFUfHUM9xp8Bt1kdhICQgJ4wa4M6ffsebS5JwRnyzXqUqacbtnBWqOMrJHTeKlE3hOykHPlTFePTn/AArzC4BDnPXJFekyQS/8K/nhkRkeCfO1gQeo/wAa85u1xK3Hf0rpw+l0cmI1aZXHWvfdAm87RNPk3ZLW8eT/AMBFeG2Ol3morMbSAy+Su+TBAwPXmvX/AAXciTwnp5JJKqVOe2GI/kKWKV4l4R2kzr4ZVBwW25HDY6Vm3Ol2cmU813UliVAAUbiCR09Bj6VIZTuI471C0hIxnivPselzCW9tHaFirs24BRuxwMk/zY1W+wQIylXkBVdo5HdVX09FFSlx0z+NQSXcEed8qfLjPPrVWZLkieMiCCOJMlUUKM+n4U1pCe1Zt5rMNsNoEkjq20qo74zVe11c3F4YGjKDJ2nOckdafKyXNbHmnimAQ6/fKOnnlvwb5v61j5/dY9D/AJ/lXVeO4PL1p5Mf62JHz9Mr/QVyi52sPxr1KbvBHkVVabPU/DVwZvDlk2ckJtP4Ej+leq/Di43W+oW5PKujj8QR/QV454Jk83w+Vz/q5mX+R/rXqPw9l8vW7iL/AJ6QE/kR/ia4pK1RndF3po9KNNNKfejtWpixtVLhcPn1q3UNwuUz6VcHqRJXRUz0o6OffBoobhh15FdCMGLmkIDKQeQRg0lKOapCPD9MgNjq2qRRIr3ttHILYMufnVwCQD32BiKs3KN4ns7jUvJjt7qOXG57g7JQcswHmMcFeuBxg9OlX9dlsdO8aXqXdsiLvEsc0asZNzYb+8B3PT0FRXfiHSdR2G8NxMUXCh4t2Dnnq3fjtXoJt2ZxtLY6H4XXO7Tb616+XKsgH+8Mf+y13wrzTwJqdu/iW4traJI7eWDKYjCsSMdcfU16UDXLW+M3p/COp9uSLsjsyfyP/wBeo6VDi4hbPGSD+NZGhpZ9jRS/j+lFMR8oR/8AITsAD1mBrr8HbxznvXN6NbpeeKNKt3+68/OD7Gu01LTpdNmCFhJG2SrHg1MJpPlHODa5jPwR2pQCeMUoyP8APWj5vb0rYxsOMbqVBHLDIwaPKcsF7n0pMt1zxSlm4OfzpABiCscnIHoKsxWKskkguIf3ah9jEhmz2HHUVX3NjGaXJxnNA0M2EnoRz6UqxMT909aXLevtUiFs9TSbGkaGiRTxanbyrEzAOA2O4PBr1xNIYxcr1HYV5n4fhZ7pS05TBBOO9e4R4MakelcFel7adrnZCpKlDTqfM3xGsWtPiYuFKgtbnPXPT/P4V7og+XFeUfGR0HxA08D73lQ5/wC+2r1hB8tEY8seXsDfNqxaa1P7UxutaIlkTCmGpGphqzJlnToy94nHTJreW2RTuA+btzWRpDYvMeqmt6lyJu7HzNEPk5Oc0eSKmoo9lEOZkJg5zmqN1GUmA9a1KzdRBaQKDjgciqhBRd0JybIgrev6024VTAQ7AZ4yacoHrn61T1dtunsx6BgePrTqaxZUPiRJJEscSoxHygZ/KnQXEKW4BnjUA92FZd7fgWPm+oXv7Vyv29pZSmCFOe9YUU73NKstLHo0VzBM+2OaN2HJCsDVW61nTY1eNr23EiMAymQAgg81wug62bfUrgtGoVUKls9eQa4LxJfCXX9RTBz9olU9OfmNW6rU3EhQ9257adc00Lu/tG0APTMgFQHxRog4Or2ZPtID/KvnKeLc/FWLItGwUKMN0z/Sh1mgUEepzanDqXja+8i5E1p9kV0K/d3Kyk/yrrLbU7aG4ltWnxILmVcFSerEjtivI/D0pg1K4xx/okw9c/KTXrWjS711CUHlrtm/NVrnpNurdG80vZ6hN4o0uN2BnnbA42Wzn+lYfivxHZz+H5IYor4yl0YFrZ0HUdSRiuvyM5yd31rL8WQibwjfEZLBQ2TzyGBrskm4tM54uzuiK01eVbKBZNI1MukSq37kAZA9zVuPVZnAK6Re4z3MYx/49V9SXjQ9ioPFO25XG3I9zSSfcG0aWmjzrISSwNGxJyjkEj8iRUdvDbtql38i7jtbp7YqNLryU2dOM49KyLPUDHr0xZuHiXj6E15uIrwVRRa23N6dOTi2jqTaw8/IK4eTwxp1r4/u9ZFupuJUj2E9EO0hmA9TgD/9ZrqDqgrG1vUBEEu0GJANm7IwO4z+v501iaLklBWKp0pp+8RahdSXen3a2Pz3Ue5VVSAQ3YfNgentXB+NPhfPr3huTVI9HEXiGOIO5jkUeeR94MASCcZweucc13ENyt3Ai27yGYneuGGHU9VBPQ55FdDBeSG0jEsZ8zaMhmHWuuLitQqp7HxQhKyYYYIOCGHI/CrUchbEYHJPQDFXvG1ylx461uZNgV72U4QEDO7375z+OazrJTLKAoyAM/5/Kui+lzktrY27OJVUb2G30B79/wDPtWqjxpGXOAuAuP71ZtuhOOTjOGBxgCphKZr0QqCUjG4j3qCzUj3AKw4KhRjpipLuYCE+bN5UX/LWXPOB2H41RUzKp8q3eUkkKqr3579v881mzb7m82XOLy4BylnCSUT3cj+QpJDbLkU0t2hOnILKzBw11KMu59v8feq+o/aLexnjsoQcg7ppGDs477fyrREGpRhVmluTgYENrCoVfbLYFPZCQ0k1skUJxuCsZD+IUECh6AtTzXcd+7PzZzmvsHwbexX1hZ3U6qwuLOOXkZ+YqpP8zXyRqcKQ6jMsUbRxbsorelfSHga6ZfAuizAn5bVVz9Mj+lRX/htoKSvLlPSo7PSoblrhYUEh9TkD6DoKwPG0kBsrYxKgPmEEgdsVTOqN/erJ1q8NxaqCfuvmvHjVlL3bI9OOH5XzNmLK25AT6VUYYPIzU27dHjnhjUTDJBPT2roiVIx9Sj36VrUOP4BJ+mf6V5HeAhj15ANevX9zb20t3FcybPtNoyqME7iMjHH1ryK6GfriuvDnFiehu+A4pbnW5LSKQRtNFjJ6HkcGuu8Auf7Entz1gunT+RrzTS786be+fs3jaVK5x196734dXfnnVQAVBlWULnON2f8AAVpXj7rZGHl76R3M8vlQvLjO0E4H0zXPHWZ2ViYhGwGf1H9DXQTpvtmA/iUis3TbWxuYwJwNzQPyGx8wXj+XSvPukrs661TkVznpJLu5gaSa4k2KQ2BxwWwR+FQ3KGN7gBiW3ujEnqAAwrV1FYYmuoLZt0DoWUZ6EDNZtwd8xP8Az0aNh77lwa1jqjNSurkQvN0xuW5w8bn8sGrtvIp1hMAgmQuM8cMo4qiYBDbRzKSfMj349CCv/wBerjS/aZ2uYFkwAjD5epBwabKRk/EGPM9pIBjdG6k/Qg/1NcIvU/SvRvHaLJYWkmcsspXrzhlP+ArzlMCQZ6ZrsoP3EceIXvnb+AJswX8OejIwH5g/yFeoeDZ/J8U2mTgOGQ/ipx/IV4/4GnMWrTxdpIT+YI/+vXo+iXxj1SwugrIFnRirDBA3DINc1dWqXOmi707HuNJmlIptWiGHemONykU6g1SJZnkYNMY8r9ammXDn3qCXhM+nNdCOeQ7NGeabmlzVEmdf+HtJ1O5+0XtjFNMAF3tnOKgHhPw+p/5BNr+KZrYpc1XM+4rIoWmh6VYTLPaafbwyqCA6IAQO9aOaaKM0O7Cw/PNDHam7+6Q35GkFOVd6smeoIoA1Ovaiq0VynlJkjO0ZooEfN3hNPM8caSOcb5CPwRq7/wAVjBg6jJPSuA8I3ENr4306adgqDzcn/gBFd/4tnSSS1EZ3oys3mD7tcyT9qjpuvZM5oZ6ClxzzzSd/agkggAcdzmu25xsUc+uK0NL0XUNXkZLG3MrJywBAx+ZrPXOR9K6nwd4gg0O9ka5jdo5EILKRkHtwetZ1JNK6IldbFO+8Jaxptobm6tDHCv3mLL/LNYpUivQPFXjSx1fS2tbaOUMSDliMVwDMcDGCM81MJt7ji27iDPtUkfGKh6il3FZkXI2kHPrVs0RtWlyYFyDyRXtlheq9hAxPWNT+lfPa3iB8Z4xnOa9csbmcWds4lXyRbgGPbyWwMHOfrxXn4qThaUTsow9orHlHxcufM+KEAB4EduP1Jr2tPuivnz4lTGX4kI3B5gGSfpX0HH90VUHzQTZMlyyaHUw0+mmtESyJqjNSt9aiNaIzZa05tt9F9cfpXR765e0bF3F/vCug3D1rOc+VjSuT76N49a5rWo9cluU/s24ijix827r/ACrNWw8Vsfn1WEewH/1qweId9ivZnb7/AHqKVlyCQCScZxXJxaV4i8wtJrClP7oz/hUMui60u1pNcdhnGBnqal4iVthqB2JZMfdH5Vx2pXDPPcRs7bQ5wueOtS2Wk6hb3iTzatJKi9UwcH8zWVqJd7q68uNydx7GnTquV7lKFitqOqQw2CxS5XCrk9hWBLP5beYozjnPrVnVLA3tqiNA0iuoRwQee1ZN1p99Gfs0PlKgACZfaqADGPXrWlOsti5UebUrLq6y3lykJHyxt5nQ8gZ59K57xBLGPE18nmJ5jTyMFzzjcc1fOmXa/aJI7ZVfaTK6TMMnHPb09aZqmhX73CubIPKHDs6yuxcdxkr34/Koc05XNFStGxgz3CQxhmcKMgfUmrdtZTsrT5IjHPJz09MVoRaDqFwwii0RncHcW3noPqOK3bHw1c2EUN9L+7aMEtBuPTIHzevXNKUr6LchwSV2ylo9pIWS6XmGWB9pIw2GQ9q9Q8LljpkzkZ3uj8e6LXBW7zS3KkkbCTnj2ruvBj/8So+nlwkZ/wBwf4VWHvzakT+E3uABkfrVLxA6jwrqe0jCwMx3elXow0jAbWqPU7b7ToN/E6kB4XGR24rvexzLcjs2M2n2kiZIeBGyD7VN5Mh9aqaIyDRrLMoJWBQcnrgYrSE0X/PRP++hUKS7lNMztXnnt4ENrCs1xsGImfbkZAJz9DmuflvAmpqe4XBNdBqnkSiKdHSR49yBlIOM4z/IVyOonFwJB1weSe/FeHiknXZ6VH+EjRuda+z2zTFXbaeQnJrMm8RSTxvGtleHj72zj86qPO3UNgAjgDiopZ2KDkAE9BWMaLTujSUn0NfQxp6awrTXRtnc7sN8i5HPVux9sV3iXEFwqPDPHIhyN0Z3D8xxXkgOXUtxzzn/ADzXdeDsNoSd9kjqR9cH+tenSldWZzVVfU+V/EL+d4m1SVQ2HvJmAI5++etSaUo2tJjvgU7XrKYeJdTjCMQt5MvT/bNWtNj8iIBsLg56dv15ruvocXUvGQW8JckZ6jPpUFrL5VuZHUbnJJ4xiqt1M0zqinjg0XbrHZZ5GFxwvH5j/PNSUPtYLrUIjMZHSI/LtiBy4ye/T2rWs7LUEBgts2UTHnbyx7dBwPxJPqawNAhe4IHnTqozxFIQf0/z1rqINOt3O1p33L1LXJzn8/6VdiTQttIgQK9w0kzDndK5YD8M4FSyfZkct5CvIeV2A4HH+fypsOlhRmO9ugCD92Yn/ParDWskShUuZCxb7zAEgc8cChrQcXqcN4xs7tJLe6m5jYFB7Hr/AJ+le0+C0MXw60odP9F3fqTWP4bsLG+mvdMvoBdw3MQLCbn7p7HqOp5HpXbraQ2mmC0toxFBFF5caD+EAcVlLWNi46TTMSX97JFIZHBjYkBWIDcEYI7j2qO4cshHam7ifb61HIQUPPbNeWlqeu3oUrWFYVlRNx3SFzlieT169PpRPDHMFEqghWDjJ6EdDT1PzNjuOlMkOQMY/GtUZs5bxijfaLB1BJO9eBmvO72Mq75BDbiCDXsUmV1CwfOFEhU8+orzLxZCYNe1CMdBOW4755/rXRRlrY5K8dLnMV2/w1l26tdxf3oN2Pow/wAa4k9TXT+AZvK8UxJ/z1jdOntn+ldNVXgzmpO00eu/wDnINchNbzJNKhdEKcRlmHzZJP8AWuvCMVwAfwFcze6c5nvnSKQzKQ6YB56Hj9a86J6VRXRnrLbx3OWuS4ERJVR1O3moRc2rS2ReGUlFADsQOM4BIpW0+4Mcf+iy5aNDnyz6EEdPpSXEF1NbwH7JMriPym3IRggg5/nWisYWHx3NxMNsMcaokLNjGSOo4/GopbiW5dY1kZUcoRg9Mg/1FTwWtzBegAIqMzoxdgPlLZz196pw2s8TDcYuCpAMq9j9fc0aD1KerQmXTbqaUMJohHIN31wa4uQbZmA7McV6Hra29wtzJJKiL9nYD94vLDOO9eez/wCtJ9cGuqg9DlrrU1/DEnleJIOuGLL+YNekBiOc4I/nXlOny+Rq1pLnhZEJ/MV6rzn+R9azxC1TNcM/daPeLWcXNnBOOksauPxGalrI8Lz/AGjwvpz56QhP++fl/pWtmkthvcTNHFFGapEle4XgEVVcblI9a0JVBjNUDxxW0HoYzRGp3Ip9RTs1GmQuPQkfrTs1ojMd2zSk03PFHSmAtKDTc0vamgH5qSI81BmpYvvUxEUkEnmNjGMnFFaHHvRSA+XtIJbxDbZzwsnf2rq5nJwCenTNcjo2f7egYcDZJ/KupkJJ570QFJk1uQX2sO1WwkPcE1Rt8mTAJJ9quBHx0PtRJ6jhsPCw9AozS5h6hB1qPyz/AJFAiYk5FTcqw8NGOdvApPOQqcKB7Yq/pL6fb3EjanZPdQlCFVJChDdjxWUkIZnaNZAC5IDNkBew6fWpUtbA0K84UE4A/SsLXLsp5eJCgb5gytySO1bFxFiFtwBGOh5rnNSQSW5juCf3THYe4GOKu4rFVNQkmVR85UYJYddte56fdbtIg6j9wn4cCvnfTpHN9Cm5VVR3zhscivdbCR20yA7sZiXcMewrhxmsUduDWrPL/Hkm/wAeqc8F4f5Cvo+M/KPpXzZ43I/4TUHIPzw/ToK+kVOFGPSqhpBGc/jZKTxTCc0jOcVGWIHBq0zNoc1RHNK0hA5pmc1ojNkkBxcRn/aFbO+sOM4lT/eFT3t7fQzhLaxMykZ37gBmsKybaNqVrGk780itz1rBm1DW8/JpI6d5B/jTYr/XmkUNpiovck4x+tc/s5XNeZWOiLECq8wd4twBIVhnH1qtbz37fLcWsaqf4w/T8O9LcsfJwD/Evf3FRLR2Lir7EobIqW2iDAZxzWXd3E9rbtNFC1yxIAiXjb6n1rGj8ReIYZZPJ8PSzJn5MyhQPzrKona5UtrI6aOyjaJxgcSt/wChGvOkxqYuZbjaGErRELwCAQQe9bcuveJ0idrXRUZnYl1kmH7s9x1Gaz/Dem38cFy+rWiwzNcF0RHBBBAz0J71WFhFuS6vY5q7qRSa2KNxKLK3kjW1JikGHLEnHGOwqzYaeuv38Mt7Dsh27RsZgSoB5611KWVtdSRxTwqUJx0PH6Vu22mWkEjPFAqsqhSOe3P9a9mMacIaL3u55zdWctZaGLpmixQRyrYQrEJIgAz5PfJyc89Kh12GIz2iPbK+9zncOuQtdXEVVZAMALKRgDtxVWcLusXwMk9duT2rOjFQfM9WXNNqyZ5VKiQaj5UaqqbgdoHY1teF1jezj8yZ1WO0jOEkK5wSvOPpWD4ngnTxhIYrgRqsi/LtHAIGQDW94SUfZZCSM/Z2XOPSVhWcl7+p1Q+DQ6OIWRbmWTHf96/+NI9hYXELxRtKQQ2FMr5PH1qsVbaHM5wf7oq7YbGOPPYnkFSOvFWmu6ItPsZ3h+ytn0qGUwwmRlw5ZsE4JrWNvFHHkRW7YHTIz/Ksrwp9m0zw84kfyoYZJCzSHp82ev41P4n1cWujpHZOHur8iK32HOd3Vh+f6ily01G7RtFTnKyJ5reIWu5IUjIcfdGOOfYVyWsrtZDxjBGPzrqRYvpXhqC1kmaWSKNd8jtnLZ5x7c8e1cnrLZiDbs/NwMcf55rzMXG1ZNKx2UX+7avcz3xGjHcPXr0FZr6nagcSk8+h5q/1V8gY44FU2hiXpFH+AFOHmOV+hs+DNR0+bXza3XkvFcRMAsyjaWGCOv416JcS6dpFoxEltZ24OTgrGv58V4xehPscu1QAFyOPeuRu1HnR4QdPau2lZxsclS/Nch1ho7jW9QnRt4e5kcNnO4Fyc5/Gse7zHJs9R398/wCFa7x4bcBzWRfnF+cHJ2j+X+fyrdIxZVbmUY7dhxW54bsodV8VadZzQeZGXLyRuMqyqpYg/UisRid3XI6Dmuz+F1qZ/Ed3eEfJb220cdGduP0VqBGHq3h8eGPEktmylrST95A7jIdCenuR0P0z0NbtpBayEDZCW4G3A4/D/Pau68XaAdf0UxQrm9gPmW3Yk91z/tDj0yB6V55pd3A8EYucI4wodhjPpnjr/hVRYjbitbdCNkC5UdxVpwQuFPGPun17fypkYIG4Nkeo57VI77VBZhj371Yh+kXX2TWraQc5cIR7Nx/n6V6ORxjH1ry+HdHcQvgrtkBLHgDB/wD1V6dwcHPWsTU5VvkLDPTIpjHIJ/WprkYupgOPnOPzqvn8u1ec1Znqp3VyDOG7kY6Uw4Kj06Ypx+9jA79aac98VSIZR1CMG1UnosinNee+MoTHrVwM5yiEZ+g5/Q16RfDNjLz0GcVw3juL/T4pNv8ArLcD8QTWlJ++YV1eJwjAhj2q/obSLrdn5T7HaVVDZxjPH9aouMNzUtlMbe+t5x/yzkVh+BzXc9UcMdGet/2Xqcn3rtMf75rM1C0ubd2jluwCE8wcn5vYe/FdmjZBGF4HBHf61z/iFUSSBzAkh5zk9FHX+deatz1JRVrnPW9rLc3KwNOylgffoAf1zUKW7NOYt0pdX2Ngcd+c/hWmk9woWciJFdmjVlHIwCB/Kq0ouXWSVp25hEoUcd60RjYrLY3ckXyRybjjhiBxjn9aupoErAFmK8sDyDx27fnTZI/st/Gwkc4EbAk9QTg/0rqMAr+FTJ2KjFM5dvC6zwhJpSCSchf0rzy5Qo4DdR8v5cV7WQD0P6V5H4ggFvq93H6TufwPI/nW+Hk3dGGJgkk0ZucbDnp/jXrNtKJ7WGbtJGrfmAa8lz+7Hsa9O8PyGbQbNjg/u9v5Ej+lXiFomRhnq0ey/D+fzfDXl8fuZ3T88N/U11B5rhPhtPmHUIM9GR/zBB/kK7qso7Gs9wpM0UGrIAnIxVKQbWIq7Va4GDmtIMiZSHEjj3zTqRuJvqv8v/106tbmNgo70UUxBS96SjvTAd2qWH71Q1LCfmp3At596KX8KKBHy3oxB1yHHZG/lXTSHP51yNnMYtSRxuzg9K6X7SWZMkYIwQfWlBimi9ZuqSFieAOlXvtkQyA/6VlK+QcHjHWoBqFo9z9nFxGZQcbc9/SrcUyYto2/tcfZj9MUfbIgOC35Vmk85zUbQIQx+bPsxqeVD52WP7Zi3MAHOCSccEU+DUIVUliRu5Az+Fc7IjQzMfmR94PPIAz/APXqKR9sxLMQAhO0fmf8+9TyovmZt3Os20jiBopMyDAbI6/5FcvfXMzXzxKuwhSpz39KjknDXolJHljJxu5H+FTaisN1h4ZB5oXL5Jzj2qbDuZ6K0UiSxjcEXOQO/SvcdNkQ6TbDncYE/PArx3wo7L4ktI2LeW7lGB6EEEYx6V63p67LC3UdFjUD6Yrhxctkehg47s818bHHjINycSRY/If419JR/wCrX6V82+NS3/CXsAMgGI/oK+kYj+7X6CtI/AjCfxseaYaeelMNUiGRtSAjFK1Qk81rEyY5zhc9xWwkqoiMxyCPz4rCnY+UwGc4OMfSp4JUaKKK4kaOROgJxuz/APrqKtjWkakl8AVCKRkgYxjg1G18dpKHnqM9+ag+zQMQfPfIAAw3pSEwQZhRZZS38I5xWVos01RZMuZ3UAY2gn61Bcf6g59Qf1qyqIgOBjPJ+tQXTILaT5h09a5Kj5nobw0GKTsOfWiIlgQPU1xfi7U7+wvmjtruSNCgO1TjFYnh/XNRfWFgmv53UxMSCxxkDOaUsLKpFO4SqqOh3TSkPdDPSU/yFTbsq1cJpF7dT3Fs0lwzbpHDmSQjP+NdRqOqrpQj3QSXHnHaBDyQf/r1FCi6WJimxVpqdB2Rv2NqbiRAwwCSckenpW8ibTKPcfyFcroOvtcXsdnLYT2zqrOolPJGK3Li/kiuEaOAsjL8xzwDkY/r+Veu6sW7XPM5Glexbjb95JH/ABF93A+lVmyxtlBIO1sZHQ5rOk1aQXM0U6xRXAn8hGLkqSybk6D6Cq97qt9FounSJbxee04R2GSq8kfXqKFUi3a4crtc5HxTAw8RSNzuwpPFW/Bzf61CucLOOR6S5/rVfX55J9SnMxXzVYr8oxnBqrourHSRLMIRJ+/mQqWx12msZT5pHTCPLE7uWNSi7Ejx6hB+VS2SqLhTtUH12iuVPjzYpxpqfKe7+n4VGvxEk3kjTYRk8ZkOaFBXvcpzdrWNvRooLnTru2uYVmha4lRkIyCMiq+leFYdN1oXizyzW8SFbW3l58knrg/y+tYth4gmtYg0cMWLm7YkNn5cjPFX5fFN5Ax8uKH05BP9aj61RWkt0WoVUnyvRnTarJ5mmTgBThcnDAnqK4LUlXyvbcMZHHpWoPEd3dQPE8cAV1KttXBGaxtVciEtzu4xjtzXPiKsas04mtGEoQaZQBHPuoqNxxSxlQ7dPmXI5pG/THemkVcq3KlrWUf7J7VyFwv72MkdF7c812cgyjLnt61x1yCJVByODXTROasRlMjAGc9KwdTJGpMowSVXNdHjMec9PQVzmoA/2pIeeAPpwK6DAozkBeu3ivUPhNb7dG1G4K4aW4VRn0VTj9WNeZyDBGPlB7Y/wr174fR+T4bhU8F1LE+p3n+hFK4WOwU4Ze3Irz/xpoy6R4i+1worW2pZfYeiyj7w/HO7p3b2rvS27JrO8f2JvfCrTqMvZyLOAR/D0b9Dn8KqJLOBtdob/Rp3t3zyjjj8un5Voh5UUtIoJxy8WNw/A/jWbARPEu8AEjOcZBNWUadXCBEIU45zjFaCGu0csm9DK+3ndnOPqO31r1Czuo7yziniYlGX8QfQ1535yqPljTzEGfp+NdP4RufPtrsHCssgymMYyOv4/wBKzasaLVBqC7dQmGepz+lU846Gr+qjF+Tz8yg8VQPB/wAK86ekmepTd4Ihbqc9M9AadLNJcSF5W3PjBIUD27U0kE59s0mDk5I4pIGRTjdbyr1JQ/yrjfG0YksdLnxkFGXn6D/Cu2IDKR68VyPihN/ha1k7pNtz3/iFXDSSMqq91nmTj5qb6VJONsjfU1GK9E83qe+aZILjTbSfAzLCr/moqjrMStLbFxld5U56cg/4UeEZ/P8AC2nOeSIgn/fJI/pVrV7aSezbyRulQhlB7kV5j0kestYI5OeORobhUK+QlwDHjrz/APrFEC+Zb2/+3BJEefTmrsel36RmIGPY4VmyeQwxVtNKZWTBXakrOP8AdYdKrmRnys56aFlgs5A7N+7LHJ64IOK61CGRWH3SAazYtBMfEk7MFyACOACCCP1rUhiMFukeS2xcAt7Um7lRi0N/HPNeZ+M4PL1uYjo6o/6bf6V6djJ4/WuB8ewlb63lxxJCy/8AfJz/AFrSg7TMsQvcOKH3SK9F8Hyb/DyD/nnIy/1/rXnQ7j2rufAsu6wu4v7kgbH1H/1q6a69w5cO/fPWPh3L5et3ER6SQHH1DD/E16VXkvg6byPFVpzgPuQ8+qn+uK9ZNc1N6HTUWolFFJmtTIXNRTDKZqSmkZFUtyWZs3EkTe5H6UpNF38secfdYH9aTPrWyMWOpM0maAaoQufelzTM+9ANMRIDUsP36gBqWE/NxQBc59DRSjOKKAPlCEk3ilTzj6962zMqqBn5u5rDjdllDBhyuP1NTo2+Ub3woP3R39qzi7MqS0N2OWMrJI8gjVV27ieOlcjaRGTVYoUKn94BuHcDv/WtE3ct5qZhtlDokeCjD5ffiqd+kkM5uxiOXP3QMbTjtVOd3YlRsrnc9OTVeWSTLRxld2MjPpXP6b4hkuXEV0QGH8QH3h9PWrtxeRxzLJCS5Xrx1FacyM+Uh1LVSJPJIG4DnH0qrb3sZgTf1+ZNx9D0rOu3M91JNjcW/vdQKhy23OM46E9azbNEi3bhXlG47juGPerN8baKRVLBHAwCB+uapbkS2WRcb84PPSqk7F5PmZWPQEGlcLGr4alC+J7ADnMoH617JaDZBGvouOK8U0JzF4gsM5wZ0yM9sivbYh0HvXn4vdHp4L4WeYeNyx8VyYUkAxZ9uBX0fF/ql+lfOvjPJ8T3Z7KIv6f1r6KgP7lP90fyraPwI5p/Gx5php5pp4qkQyJuhqFutTNULcGtUZMaxwua0lWObAkRWHYEZrInOIWI7CqcOuzRKqlUYgD5j3rnrxk2nE6aDVnc6R7eGIgxxqpPXFT2zqyEA8g4Nc1Jr0kv8KD3po124T5kCDjHSolGTRrpY6ieLzLeRM43KR+lcD/Z4iYSGVztIY+mQa1/7fvmBBdOfRRWZeys9vLyRlDkfhVUU4JpkSV9RfEOh3OuagLmxkjkt/L2MQQfmBPv71haf4VvV1iQI37xY3Q5XA5GOD0PWt+0u5YITFE5SPP3V6VNBI7XCje2Seuahc8VZFuMZas5yPw/dWlzGjSrutptz+4ODxW3qrEw2p9J4z+tVDNNc3U67W3ggHGcnHFWZ47ia1hiW2mLh1Jbbxxg1yYlSbUmb0uVJpGwhVPGlqqdGgYfoK3twkSURrjdt4H1I/pWGbS4PinT7pUzEiFWOQMHbWraTlCgIyHLDORxhmP9aVKVomFVXZg6pbXZufMjhdyL20k7dAuCfwrXu4JZNMii8sh0uVfGe29jTdTuZVEMkUSsjMu8lsYx0qV9ThYyh2AyABjnuf8AGk69mEaLdmjivE1zHaarck8h5WIK/WudkvDBC4EZcSXR74xmNTWl4pDzXZFuSVDZ5rGuI2e227csJ4yfbMeP6V1UZOUbkTjyuxSvNQkRH8tQpxkZ57U7w9dvqTzJMQGigMisBjLAj9KbcWTyk4jfIGOG4/lTfCVjOL4EnYkkTLnqcfj9K2qXVNtGcPjSZ11zCkOlac6Z+aZWY57mi84JOaluYhJoEK5YeU3BHqDVK7hckk3Ep/EV40HzPU9FqyJrWQluPSm6lMxhYHkY7Doaq2kJ3jLy4HT5jUuoPmIZzwD07/jXXTiYyehkrcObjb5jYGcc+nantcO2RnBqE/8AH1yRjHFIeOPT1ruUUc3MxxmbnLY+hrn9RTZKrKOqdPetzv1+lY+rA4i+hHHTrWsFZkTd0VkwI1AJrn9QAOpS9cEDkHFdGuVVc9COK57UcDUH6Dgc1qzJFRlO77vP/wBb6f5969e8EKV0WFSMYjI/8dQ/zzXkqwSTTrFGMyO21QO7HgfzFe3aPYCwlltQMiKd4/TIww/9lFZOaUkjRRumzUGMdjxWmI0urBoZFykibW/EVmgegHpWnbHNun0xW0TFnjkUEmnX13YuDugk24buvY/lVk3sKriQEDPX0rd8caZFFq1vfMuI7keVIR2Ycg1zDweUQlwSR0Vug/GrEaELxyIfLIOR1781b8I3Lwa/5DNkSq0TfUfMD+hrn/Ke0bzYmDRnrjOOv/6609HnWHxJZzjhJHXOPfiomXE7PWhtuImPGUx+R/8Ar1mN1z3rZ1tcrA3oSP5Vj444Jrzq2k2enQ1pojb6c5puPXNPYcdeaMY9R9alM0GAdPyzXNa9GD4Tvkxnyp8gf8CH+NdPjkcCsTUoPM0nW4epA38/TP8ASqi9SJrQ8guxiVuvPPNQVavF+YHjpVXpXpR2PKe5678PJ/N8KonXypnX+Tf1rrZBliMcGuD+Fsu7T9Qg/uSo/wCYI/8AZa79lOR34rzq2k2epQ1popFAM8cVG3r+dWWXBxzTCpA5rO5rYgK4/wD1U1hwP1FSle3p2pGGQeMfhRcViDZ83Pr2rkPHltmxtZeTslKZA/vD/wCxrs8DOawfGlv53h6d/wCKJ0cfnj+RNaUpWmjKrG8GeRrw314rrPAcmL67iz96MNj1wf8A69co/wAszY7NXQeC5PL8SIn/AD0Rl/TP9K76usGedSdpo9P0qYW2sWU2fuTKT9MivaTXh20jp17V7Xbyie0hlHSRFb8xmuKkztqolpO9FBrcwEooNJTQineJmNx6g1XDblBx15q7OMrms6L/AFSjP3ePyraOxjJakgNLnFNzSE1ZI7NID6U3PrSg0xD81NCfmFV81LCfmoAvZopm73NFAHymg+bI64/TmoJpCkx2k89KlOFfsTgd/rVWfIk4HXtWC3Lexd8KkHUpz1zH6Z71d8VWyRQQygtvkc5BOR0qPwjGReXR/h2AZ/Gp/GT/ALqzHT5mJ/Ssm37axqkvZXOWiJWRSM9RmtQzbSAcgdR2zWSjFjtByOlWQHfGDgV0GBNI247u57jioz7cjtSgosZBY7gOCRURJBJPpikOw/kQNgnBbpTAqhflfJGO1Sq4FnMD13LgCr/huygv72VLhCyCPcACRzkUN2QJXZHpsZ/tyxxg4nQ5B9xXuKD58e9efwaRZW0gkgg2spDA7iefzrbTU7wv/rjyeuBXHXg6jVjvw81TTTOV8cHb4muRngrFx69K+i7c/uI/90V8yeLJpJdeDyPvJKA+/FfTNsc28R/2B/KtUrRSOeTvNslNNPAp1MNNEsjc1A5qZ+lV3PNaozZDOf3Tj2rmw3AFb95KI4ScZ5Arne/FRM1pEpJHNOViV601VJAPY1ba1EdrHP5sTbzjYrZdfqKzubogBwOufxpJ8tBL/un+VS7Bk0kgBhkA4+U0rjGWx6+nBq7bj/SUI4wc1RtSNvXPAqyrFcMDyO9S2NEisItWum6DCn9TWg92iQrIrAqE5A+lYU5Juix5JQZNTA/8S1vXYa4sV8NzWlHU3JdUijMAdGZskcHHVSKxNJ1O5a1njMhwk7hfXFOuzn7MR/fXP61Q0k4+2qc8XDf0rkUnyM0cUmazSs/32JHuaVG+RuO4qBTkHGKkQ/u3/D+dYSNYmFq5H2pifUY/KsubcDLuGDvhY/8Ajw4q/rWftXHTaKyrn/jzkbrxEcg/7Z/xr2cP/CR59Ze+yd1uDO4t3VScZyR71X8PF47+FHPKl1/DJo+cSk7SRtGeKj0STOqKQMfvWH41tNp02jKMXzpnVvzo8w7B3/rVK6/1YPHTrVsHdpt2uekh4/Cs+d8wIeDlRXjwWp6MtiKBiHXmnX/NvuOPeoIz+945qbUWxZdMkeld1JanNLYyT/x9oAD0A/SkJ5HehSPtsBz95e/4igjBrtRzjcfmaztUXdbR45PmY/nWiR0/wqreRh4hwMCTkfUf/qq47kszZWCTRqD0HSub1TIvmHsDn26Vt3Tk6iRwQvFY+sJ/pnvtBwM55z3rV7GaNv4fQ2k3jbTTdlRDHvl+cgAMillz+KivX4VX+0bpgysv2sEEHj5sf/F14p4UTzdSibo0cbHB78Y/9mr1LTL7cZozgFXjcYHT7o/pXHUpvnU7m9OStym5Km3BA5zzk1atGzAOeQTmmXSHySRgd6bZN8jDuGNdVJ3MaqsU/Flh/aHh+4VVzJCvmp+HP8q8+hkM1ur5BbGCG716wSCOcYPY15be2J0nVbm06CNt0bY6oeR9eP1FbGJUM0aEq8RXnk4ziqzxSpLG9vzCDvVgfun/AArQ3JLkZXzOw61nTWZjYuuUz129KUlcpOx6PdTC80i2uBj58N19RWbtIGMHrS6HcC58MLF1eB9rc+9SMoz7eleZidJnq4XWBCVyMCk24qcx9aAnbpmsUzdoh2nPSs2WHzLrUICOJbf+hFa+36ZqjIuzWovSSArz7H/69VclxPDr1du3qSMjkVRHWtjW4TDeXEfHyTMuPoSKx69WDvE8eatI9B+FcuNS1CD+/Er/AJHH/s1epFcqK+fNK1WfSZnmt5ZopGXbuifacenQ+grTfxlqb9bu9I/6+mH8gK5q2HlOd0ddDExhDlZ7U6HOcc+9Rsu3k4H6V4e/ia/c5aWZj/tXEh/9mqtJrE8v30Rv94s38zULCy7lvGR7HuLz26AFp4k/3nAqvJqemxjD39qvbmZR/WvF5NSiNoiJbKtwD80hCkH8Nv8AWoP7RuezIPpGo/pVLCeZDxnkezNr2jrjOp2ue/70H+VZOu63pF1o15bR38bySQsEVQTlscdq8u/tG7xgTuPocU0310etxL/32auOGSd7kSxbatYjnGJTwfXmtLw7KIvEli/Yyhfz4/rWUzM5yxJJ7k1PazG3u7ecHlHVvyNdEleNjli7STPamTINep+HJxceHrJs5xHs/wC+eP6V5p5YYZzkY6j0rvvBshbQzGf+WcrDH15/qa82k/esepWj7tzoPeg0ZpK6jkCk70tN6c00IRxlayR8ssq9MNn861/0rJuBsvWH95QfyzWsCJoTNGf1pmefejPPStTEfmjNMz1ozxSAkDc1LCRnNV881JEfmpgXePQflRUe73ooA+WgAWHTgD86o3HD8kdT2rUtWHmHdAH+UYzzjis242tdHGAc9+KwW5b2I4rmSAN5bkZAzz6VHc3k90R50rPt6AsTip/s+yBpGYNx25GKhtI1lmIcHAHQVem5OuwyHAPI57YqyDn+IDANQRhVudsn3RnI/Oo2Y9+tAEolzwDwOKf5UzgsqEj1xUOB5JO3r0q1p7ssgXdwynjr0pDFjhf7JcF0KMMYJrX8Fhj4ltoMblnPluvPTPaqkizSxNHwCAAD0xitzwDA9t4y05nK7RKucfWs67tTbNKPxo9Rm8MRQvteKWI4+63HFVf+EdWXULK1gPltcPs3NkgHFehaqY9QvvNiyV2gDIrndZD6ZNp96gy0U4YL68HivnqWJn7fkUro9uXJKj8KUjxv4jaPNofi+SxldZHTy2yo4IKg19EWh/0WL/cX+VfPvxT1Y614ylvDEIhtjXZ944Ar3+zP+hw9fuL/ACr3/so8WzTdywxpp/WkJppNNAxHPFVnPNTsarv3rRGTKl2m+L8QeawVAPX+Vbd7JsjHAOWArGQFTuxms6mhtR2JU6BR69alVG3hOMkjHzVEuSc9KkjO0/erNs3SZPdW01nOYpdpYAHKnI5GagcM6sADnBqTzGKlVU59hTvImZSVR847KajmRVmVrSJgigjHyitC2tI5nZZLhYQFyGYZz7VWtbO4aNCUl+6Odhp7GKJiGljUjghpAD+tK6Y0nYqzkCdef4Tz+NPX5rBwf7pqtdXdvHdKxuINqoQxEqnHT0P6Uz+2NIWxlLapbLJtYiMtkk9q58RFuOhpTkk9WXLps29s3PVP51S03JuL5f8Aptn6cCr91g2Fo8DCRHRGDYxwazY5pobq68rYSXBOfpXLCneLRrJ6mmAw7HFSxxM8cpzjC5x+Irn9TvdTsGAdlG4bgFCmqdt4muLZpXumMkZQrtUqDnj3qvqkmroz9vFOzL+qxE3SE9wKzL79xbz4xxGpz9HH+NQXPimGacM9tJtxjG8A5/Kla7TULN5EjaNWhlXG4NyCprupU3GCTOec4yk7Bfaqba8EfJ81Qn51X0f5bxmIw32ngDsOM/zq6BCCgxnK55I9vrVexZI72ckBT5oIJcDsPatuVcrM+Z3OjRwLe9UkD5wRnjqKoMDJbIFIJ2jjI61ae509EmRprcKzZG+Udxj2qsNY02C3jQ3VqnGARJz/ADrzVSlfRHa5xtuVo4pBNyMepq5eQGS0KkjkVTfXdPY5+3Iy9cgkinz+ILZ4lgjuCc4AG0jNdMIST2MZSjbcz2Tbb9iwGAQOnJpGR24COfoDUdxevDASsjqGz0J/z3qnLqLhN375xjgDJJ+gzXUrnPdGh5EpH+ok4/2DUbwyCNyyHC4P0qKez12GJJW0K7KSPsX5o/vYJ5+bjgdTjt6jMcdrre9mfSBFCv8ArHe8hyo9dobJ69BVqMiXKJzkjZv3GR16fjVHUxuuwScfu1z+tWUf/Sbhs8Zxj2qtfn/SwQONijp04rSWxmty54U41gR7gnyPkt/X8q7qzlWO+lUSrlohjGcHG4/0/SuC0V9msIytjKtj8q6eymI1mAE58xGXn1IP+NRa6K5rM9HuNRyvkhBjAGe/QUtg4JcjqCKyd4YRt6opzn2FXtOkGZBjHH4VrGKWxnKbluawbvXHeN7YJLZ3wwMgwucfiv8ANvzrrN/XB71i+KoftGgz4HMZEgI9qsg88nhWQjaSj+o71D9qvrJf36i4g7kffA/rVyIK+AR83UU6aJjHt8tmB5GBmgZr+GLyJ5Joo2zHPHuXHYjFdH5JIz+PNeZafLNp+oiVCVMb5ZOcEGugufipplpI8TafdtIv3gSijP5mvOxVOc5JxR6WEqwhFqTOu8ntis3VLtrKRAqZ3DOTXKyfGKJc+VpH/fdxj+Smsu/+Kct26n+zLT5QQNzs3/sorCOGrdjoliqPc6k63NuwqLTUv5rjULZljTeAwG5wg59SeK4iX4j3Lgf8S6y46ZQnH61CfiLfZBWwsBz/AM8T/jWn1aq+hk8VS7md4mO/V744AJmYkA5wSa588Ve1PVZNUvZbqaNEaTGVjG1RgY4H4VRY5NehTi4xSZ5tSScm0NopaUKT0BrQgbRVhLG7lx5dtM+f7qE1aTw9rMozHpV6w9RA3+FLmQ+V9jNoraXwlr7f8wq4X/fXb/OrCeBvED9bSJP9+5iX+bVLqQXUapzfQ52iurT4fawxAeWxjz/euVP8s1Ovw7vDy+qaaB/stI3/ALJS9tT7lexqdjjad2H1rrZ/AU8ajytRt5WzyPLcDH1xVe48IXSJH5cwkbGGAjIx6cnrS9tB9Q9jNdDpbb4hWEdjBG9pO0ixqrHeoyQBnvXo3ws8UweIF1OGKFojAY2wzZzncP6CvEF8G37MQZIlHbdnJ/SvQ/hVZnwtrl097eQeRcwbDjdwwYEdvrWHJSTvHc6FOq9JbHuVJ9aojWLA/wDLwB/wE/4Uv9q2OP8Aj4Ufgf8ACmIuUh/SqZ1ax5H2lP1o/tWxP/LylUhFuszUcJcwN/eDL/I1N/a9gePtUeazdV1jTzDEwvIcrIBy2Paqi9SJLQfuoLcVmnWdN7X1ufpIDTDrunjj7Rnv8qMf5Ct7mNjU3cUbjWO3iGyAyPPb6Qt/hUbeJIMEra3knssY/qaLoLG4G75qSJua5s+I8YK6fcgf7ZVcfrTR4mkDALaIp/25wD+gNF0Kx128+lFcl/wlj94Igf8Arqf/AImii6HY8Us7hIZT5jAfd4/CqV4ITI7CIHqfvHk1ZgtVudxcuCMdOe1SGzTedxJwOaySKexmo2dPY4IxuAwKqW7urMUHPrXQiyiNuEVT8xJwMiqdlaItww2kMB3+tO4rGSoaSbodxowy5DDB6Ee9bKW0f9okbAfmzz37+tQyxxrKwZF3bscjvS5h2MsM+3k9qltN/wBojweAavgBQAqJ/wB8irdvcNCT9wg9ex78UuYfKRrcqTja2Sey1t+HNQg07XbK+mErRI+5gqZOKrwywSN8zEE++M12PhrVZ4b23traO1TcwHmG3Utj6nmsMRN8jVjooQvJantuh20WpaZBfRuVjmQOqsuCAfWna34WXVrWOITiLY+4tsznj60/R73baqXEskmOWCcH8uK0m1Btvy2srcdsD+ZrzMJHC8qbWo6s6qqPU+U/iNa/YvEbqG3A7TkD619BWbD7HD/1zX+Qr5/+KEobxY+1Ng2pgEjIxmvfLRh9jgOf+WS/yFenTu6cWyallUZb3e9MLUzd70hYCtEZMGPWoHNOZ6hZ855rVGbKGpNiEYzncv8AOsY67GgK+ZaA9MeUmc/jWtqB3QjA6MD+tefvo9+9zKyy26qzkjdJ2z6AVlVpxluaUZuOx1H/AAkiwg5uolwOqxoP6VTfxq/2f7RG+oSxfLh47VwDnG3B2Ac5GPrWSvhy5lGJLu1X/gTf/E1dm0rVZ9Ik06XXbeOORQpeG3O7g5yDuHv+Z9sZRo0ups6tToOfxfqLthdM11vcxFB+pFQt4k1+ckQaLcnt+9vo0JJ9i1Rf8Izckkv4rvQTyfKt0T27GpF8J20g/wBI8Ra1Ljv9oIH061p7OiuhHtKrLNk3ie7077TDb6ciqxiIe7MnzA46qMdffsfbPGNNNcTSyu+0ytvKqykAnrg8120PhjSba18hNR1JoyxYr5y4JPJJ+XknAoTwzoES4CXBHQBpcflgCmpU18KE1OW7OSjZhDOAxJyhyce/oKx7hS1w7FgGxg5P/wBavU9K0vwsmozLfRstt5II3SOcsD7GtZrX4bI5drJZG6nKyn+ZrSKTVzKV07EvgfTbbW/BdpJczBLmEeW+xtwAHK/oRW2/g7TGclbyZGb7xVhz+YrJsPEfgzQ4Wi0yxkgjc5ZYoQAT68mpZPiDoijKWE7fUKP61ySwvvXTNlWlYS6+Gun3swefWrsgcKAEGPxxVV/hNo+0BdUvW998Y/8AZKe3xJsQf3WluRjvIB/SoW+Jef8AV6ao/wB6TP8ASrVBpW5iHJt3sTXHwm0fyFMd5deYBySy9fptqnqXgiHT7zT9M095XiuYp1aWQbgjFRjOBx0/SlX4j3UkoQ21vGCfvEM2P1FVZvHN/cXbbRAGtd0iEJ1+Ug9T70/ZW+0wUpdgT4SXSxqw1eDOOhibj9agHwxnhjmiNtZ3MrIwE5z949+Rn/8AX7CnXHxA1U6eksN9H5xbaYxCv59KzX8c+IpD/wAf7KP9mNRn9K1UbdSdR4+Gut+WkY03Sht5DM4wc9eNhqyvw68QHZ+60SPaAMYbjH0UVlv4q8QyZzqN3+Bx/Kq7a5rk33r68PHTzWqlyiszq7b4fa5HaGCS/wBPGXLkrG5xk5x2/XPb0FMi+F7RFWm1dBtx0h/+yrjpdQ1Bj+9upT/vSn+tRDUo0b9/eRLgdGnHX8TQ2uwKPmeijwJp8hEcuq5AzwqL/XNI3gHRUQ79TkB7OiIpHvn1ri7O8AjEsV5asGGRsuULEf7oOf0q0msGQY3E4/CuSpTqSleLsjog4pWZ1eo+H9LvbdILrxDqjxxSLKgiljQhxnByEyOvrWNP4X8Oojt9s1i4kUEr5t85Geo44zzWf/akSr828N9P/r1CdXj3Y3KfYtj+lONOp1kx3h2PPIgFuJRuJbPPuar3rh71o2+6UBz+FT3WLfVpl29WPU9aoaoJGuFZD83lBvTHWuuS0OZbnQeE2ii12LzYkkcK+0OoYH5SehrtLi4xLGywwIFccIgX+leZ6LfOb+B02iUPtO/JH5CuwluruRdqyWwPXHlkZ7df89KjlL5jqra+xDGrHG1AOvBx+FWYNUjSQhiSAOa4tNRvowN9uJO37tgcfnipoNTdWbz7e5AxwBEW/lnmi8kJKLO2bW7cZwG9uKr3WsxywSxeUzBlI69q5b+0mYHZYXzZ6YiA/mRTWv7rHy6VeHnqfLGf/HqOeQ+SJmSF4ziMHcvAwM5p8erSRp5VxEUc8BuSKPMcTnzIDCzciNiCf0qdV8yLY8Yx0APWtlqjJ7mIpdNQYyPvZ1zkVmaj4duNTvQ1oYgdvz7ztArb1P7NatGked45HOcD+lS6Y7fakAKjf69PpWc21saRSejOaTwHqTdbi1B9N5P9KsJ8PrsnD3sIOcfKpNejLYuRkyKM88DNSfYVzyx/BTXP7aZ0exgeeJ8PVwC+ot/wGH/69Tr4AtNozdXDH2VRXfCzUDJZuuegpBbAZ4PP6Uvaz7j9lDscCPAlopzvuGHocf0qaPwpaJtzaIdvqTz9fWu6FsmfuHI96kFtGM/L37jNHtJ9w9nA5aHTLRcf6Baj6W6E/wDoNa1vFAgwBs9kUL/IVq/Z0B6dvTrTlgX2rOV2XFJFNVt/785/7aGpDBasPmSVvqSateWmchsUYUdx171DizVSRVFpZjj7P/46BTxbQD7sAA+lTbowPvqPXGOaaXh6+aB9BS9mw50NMaKNqoAOgwM0woDjavt0qQzxD+MkfjxUbXUY4w3FUqbJdRDGg3ZIBzUbWpOMD9aebwIchGP/AAKmnUGxgKv51XsmT7REZsGPRgPahdKbeGDx8Hpg0pv5OvTPTimveSkfeP5VapMh1Ea/9o6qDnzYSB/snig6xqoAxLFn121im6l6GVsexphmYj7+fatFFmfMjb/tzVVI/wCPfH+0tKfEt8uN6wd++P6VgtJkn1qNjg9OlWokuRvyeKrraNqRZ7nr/SqFz4hvJ4JI8R7iOMoCuc+mKzDu5GMfjTemR1GapRIbHnV9Swu0W6H0EQqNtV1UnidVJ9IxTT75596YcDuSO1WQSfbtRPW9k/4CAKjea6fG69usDnCyYz+VJzwO1Jghj1z7DrQA0tJnJmuGPT5pWP8AWiOKMkkx5oJOOM5pUZgc5P8AjQIsFEBx5afnRSbv9pPxzRQM5CwjcbwIXbJ4wpPYc1I9vdMxK28uOvEZ/wAKqNdTZCx3Dhf7iOccfQ01Y7qckBJ3+iMf5VncqxdW3u1OTFIPquP51We0lRjIojU5JwZFGani0PU53HlaZeSHB+7Ax/pV1PCWvv8Ac0W++XGcwEfzpc8e4creyMp7VmfeJ4lfjOZV/oaWS3SVRuuoA3cgk5/IVsx+BfFEwwuiXQA6bgFx+ZFX7b4ceK3j2HTGjB5O6dAD+GaTqQ7jUJdjk3solGGvYz3OA3+FRskCcee57HCe9d0fhN4mmIJNsoGOHm/PoDVmP4M6y+PMv7OMexY/0pe2prqP2c+x52HjVRiSXjp8o/xq5b69PY3CSwvKHU5U7RjPrznNehp8F5xgy6tED/sxk/zqC7+EVvZQmS4109eES35Ptyx/lUuvSejZcadVao5n/haHiRF2pfThO+3YMf8AjlU7v4heIbobZL+5YAnH73b/AOggV09r4F0WAZuFubo9w77VP4KKs23gvQYMuLAsCfuyOW/Ks19XjtEvkrPdnmy/bvE2qW1tGglu55FjjLOfmJ4AJY9Oa+nbazvEtoYzbtuRFUncOoH1rgtF0PR9P1qzvhZQW4hmWQyklQgB69cY+teiN4x8Or11my/4DKD/ACrppuNRdjCopQeon2S7z/qef94f40GwvCPuJ+LVG3jbw9/DqSPnpsjdv5CmHxro5+5Jcyf7trJ/Va05YLqZXk+hKdLvG/55D/gR/wAKX+w7hus0Y/Amqx8Z2RHyWeoN9IMfzIpp8aRYymlX5z0LeWM/+PU7wXUOWb6E8vht5l2vdj14j/8Ar1w2p6ZPp2rT2yb5ghBD4AzkZ/rXXN4ymIHl6RIc/wB+4Vf5Zrz3xLpN/rut3GqiKO3WfarRCUtgqAAc4Gc4qJyg+pcIzXQ0BFccf6tT6GVR/WpFtJnGWuLdfYzA/wAq55fD+oIADctxwMSGpl8OXbxF2nDFeo3k8etZ+53NPf7G39iIPzXluB7Mx/kKd9mtl5bU4l/4A3+FYH/COs33pAD9OarXXh+YMTC8skmOVU4B+nvTvAVpHUBdLUENq6knsqf4mmtcaNGDuvpmI/uov/xVcMbTB6PgHByTkU9bIFiWTHrk9PSn7oanRXWqaUsm6K5lAIwd8fX2wBWPNrtuCQkdw4HGcIM/mwqNLaIH/Vj3zxSm1iONqjHcdxRdC5WQ/wBsq33bWcsP7zxj+TGmNrLkcWkf/A7gjH5KatrboFJCpnsc4I+tOWOMZDLFjvgYp3QcrM86teHlLW1Ht5rt/wCyCmjVNRfGxbUY54idj+rCtQRRdRtIHtTjGMgHa2T1FF0HKzLa+1NxkTRqfVbXt+LmmpPq6/Mt1ICQQcRRjg9RyDWtgAfK+RnHWgiL5htYADrjIouHKZONRcEG6uPQ4Ma/qEpptrz/AJ7XR56m4YfyIrYIUgAMD+gphKIeu3HTtRcXKjKa0nbgyS7f9qeQ8/8AfVRjSR951jYD+/ls/mTWyZY9uWKnjqRimi5t93oP9ocUXYWRlDRlP/LO269oVz/Knrpwi+6EUd9saj+QrS+0RnkNuGfT/P8ASozcAEEeYD05p3YWRUFmyDHny89g/Wmmx6bpJjnnlzVg3Tfexu/Q037R1JV1I9DnFGotCu2nRkgtkn6nrQLGMZATHXjHapzMxBIAZcYxUZkdVwu0nHAIp2YXRkayuydZFA5A5HFVrmU/6O4JwVKk/j/9etHUFNwseBkgkEgY7VD/AGPJdQ4jYKVYGqtoTfUz9MxbeILaQoGTeGw3GQK9Aj1eyHLwlM9PkBH6HNcBZqzXcLglXjJBIOD0NaZeUsdhY45xu5xUctylKx3kGp2MgG25iRfRhs/9CFXk8ubmORHGOi4avMWZimXIAbpuGOntj/P8gIT0GS2cFevb/wCtUul5lKr5HqAg6cDPQ8Y/lSmId3Iyex9//wBdebxy6moJinuOByBLwPwzVpNW1aAhWeIjPWRcZ/Ie/wDnil7MftUdNq9orlHRhvHqQTWau4YGSW6EAdKqx63JMQskSNk/fWVRjnrtzV1XDklcYb0raCsrGUnd3Mq4tnO6V1yS2c4ycCkhcxOjhclSPzrQeOWZG5Hl56k1mMPnA6gHHPvwalotM7qzvoZLZcqSVHt+dTm8TnCk/XFctpFyfL8vPHYd/atXzPUD644rL2aNfaM0Dejp5Q/OmfambkKoqkATnhseopQTjhv0o5EHOy0bqTJ4A96YbiXPL4/4DUALAcgHNKd3TA9iDT5ELnY8zOcgseaaZHODmT0puDnHIHuKXHsSPrRyoOZjd7EHO4n3NNyWGe31qTap4x+RpCnt/WjlDmITk9X59jQCwAxn8RUhUg9fz70woeuP1p2Fcbk5zuP5Um5jnmnj5ednPSkJVuoOfpRYVxhOR05pDIfTPb8KlAT1AzSBVHT09KdguM85gBuXikMmOuc1ISMgcZ9aYcZwMZ7nFFhXGF/rgUeYNuTTsDPGPwFIR0INOwriFlI6D8qQ7CP/AK1IwJ5yMdetMaNs5GAadhXHYU5H9aaQrA4/DJo2uCSWH1FNw2OSKADaB3FJjHfp2p3z8jC5FNPA5Ue4xQAwk565pvTjNPLqOGXikDKccMeOoFFxWIyevU8f3acp56CpAoP3d2PUjrUiQEjAGB70XHyjODyVGe9FWRAMdE/OilcOU9yXTbRFAS3hUDoBGKlW3RRwB+VFFeGdd2BjGfSgwjuTRRVJDuxhRV7ZpCAOcUUU7FJjWAHYUmflzgUUUi0V7iUxQtIADgdK5K78y8l8yaQn0AGAKKKqBZVFohcAk80jW6E5OeOgz0oorQQgt45Mqw3A8HdzmnNbQRSMkcSLtOPujtRRVxZLQqrxxgcelOdcsF9AKKKq5NhpGDjrz/8AX/pTgoaFT6MQKKKaEMCDn6Z6VLHGGLKTxtJ/IZ/pRRTAjaFQc0RoqyhlGCPXnNFFMRNNZxo6leFYBgMdM9qi+xIxYE49wORRRQBha1psP2M34yJFba4/v4bGfY/55rniFCD5R8wooraGxjPciaVVRf3eQzY5PTvnp7VELtljBA6vt560UVoZ3Hi5dmUYGcDnHrUUlzImw5GG9Bj/AD0ooqrCbGNdS7xhvmB6kCnLJL5nlvIzAjd6UUU7E3FbKMGLFvmA/OmuWDr87fN056CiinYQwfNGp6ZFKshAVG5LHGRxRRQA4KC23GDjqOOlCx4/iPWiikJimEbhgke4pme4J69zRRTAQFA20rndxnNRyzeWxIBJz1JoooAhFwJCD5YB9c/59avxQCZc5C/RRRRTEPnsDBF5rSBwZAMbcdfxpVh2cBjwRg0UU0JnNfZxHfIQejgHjrzXaR6NZyPsCsD9RjpnsBRRWaLB9LtYJAoVyR3EjDP4ZpVsbUqSYVb2f5v50UVQCC1slIH2OLJOOFH+FWGtI4Iw6pHz6JiiikMRSWA56VnsSZzjjJx0ooqokyM7UdXltrj7HGgYkFt8jE9z2GPSsc6uWeRZYFYpxlG25/A5ooqZblR2Lmjaz5155KwbOAc78/0rtMAgZz0B4NFFJjQvlgjcCRxn9KMEdGP40UUAKAfm5+lCpuPBI5+tFFAAYiuDu/SoixDcnPOOaKKAELkEA4NORtxVe570UUASTRlPm3559Pr/AIVACSTz6DpRRTADksRmggfrRRQAGEgE7vwxUe35j0/KiikA0LuBOcYpdnA5oooAaQVzg8CnICRnd9MiiigCUQ56sD+FL9nXuT+FFFJjFW1RwSCwx6nNS/Y0Q4znAoopNjSEFupb+HJOM7aayBRkYHpRRSuOwwDgMDjOacke9sEj06UUUAhViVsjA9elOWFByQfwoopDHGJMn5aKKKAP/9k=" + } + ] + } + ] + }, + { + "language": "markdown", + "source": [ + "# Load Text Recognition Model" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "// Loading the text recognition model\nconst recModel = await core.readModel(recModelXMLPath);\nconst recModelCompiled = await core.compileModel(recModel, 'AUTO');\nconst recInputLayer = recModelCompiled.input(0);\nconst recOutputLayer = recModelCompiled.output(0);\n" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "# Define Post-Processing Functions" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "// Function to calculate the ratios for the image\nfunction calculateRatios(originalImage, resizedImage) {\n const realY = originalImage.rows;\n const realX = originalImage.cols;\n const resizedY = resizedImage.rows;\n const resizedX = resizedImage.cols;\n const ratioX = realX / resizedX;\n const ratioY = realY / resizedY;\n\n return { ratioX, ratioY };\n}\n\n// Function to convert the image to grayscale\nfunction convertToGrayscale(originalImage) {\n const grayscaleImage = new cv.Mat();\n cv.cvtColor(originalImage, grayscaleImage, cv.COLOR_BGR2GRAY);\n\n return grayscaleImage;\n}\n\n\n// Function to adjust bounding box coordinates by a given ratio\nfunction multiplyByRatio(ratioX, ratioY, box) {\n const scaleShape = (shape, idx) => idx % 2\n ? Math.max(shape * ratioY, 10)\n : shape * ratioX;\n\n return box.map(scaleShape);\n}\n\n\n// Function to resize and convert a crop to the recognition model input format\nfunction resizeAndConvertCropToModelInput(crop, netShape) {\n const [netWidth, netHeight] = netShape;\n\n // Resize the crop to the network's input shape\n const tempImg = new cv.Mat();\n cv.resize(crop, tempImg, new cv.Size(netWidth, netHeight));\n\n // Create the reshaped buffer\n const reshapedBuffer = new Uint8Array(netHeight * netWidth);\n let index = 0;\n\n for (let i = 0; i < netHeight; i++) {\n for (let j = 0; j < netWidth; j++) {\n reshapedBuffer[index++] = tempImg.ucharPtr(i, j)[0];\n }\n }\n\n // Clean up\n tempImg.delete();\n\n return reshapedBuffer;\n}\n\n// Function to extract recognition results from the model output\nfunction extractRecognitionResults(output) {\n const outputData = output.getData();\n const outputShape = output.getShape();\n const [batchSize, height, width] = outputShape;\n\n return setShape(outputData, [height, width]);\n}\n\n// Function to parse annotations from the recognition results\nfunction parseAnnotations(recognitionResults) {\n const letters = \"~0123456789abcdefghijklmnopqrstuvwxyz\";\n const annotation = [];\n\n for (const row of recognitionResults) {\n const letterIndex = argMax(row);\n const parsedLetter = letters[letterIndex];\n\n // Stop if end character is encountered\n if (parsedLetter === letters[0]) break;\n annotation.push(parsedLetter);\n }\n\n return annotation.join('');\n}\n\n// Function to crop the image based on the bounding box coordinates\nfunction cropImage(originalImage, xMin, yMin, xMax, yMax) {\n xMin = Math.max(0, xMin);\n yMin = Math.max(0, yMin);\n xMax = Math.min(originalImage.cols, xMax);\n yMax = Math.min(originalImage.rows, yMax);\n if (xMin >= xMax || yMin >= yMax) {\n throw new Error('Invalid crop coordinates');\n }\n const roi = originalImage.roi(\n new cv.Rect(xMin, yMin, xMax - xMin, yMax - yMin)\n );\n const cropped = new cv.Mat();\n roi.copyTo(cropped);\n roi.delete();\n\n return cropped;\n}\n\n// Function to log the bounding boxes with annotations\nfunction printSortedAnnotations(boxesWithAnnotations) {\n /* Sort the boxes with annotations based\n on their position in the input image */\n const sortedAnnotations = boxesWithAnnotations\n .sort((a, b) => {\n const [aXMin, aYMin] = a.box;\n const [bXMin, bYMin] = b.box;\n\n return (aYMin - bYMin) || (aXMin - bXMin);\n })\n .map(item => item.annotation);\n\n console.log('Sorted Annotations:', sortedAnnotations);\n}\n\n// Get Text size\nfunction getTextSize(text, fontFace, fontScale) {\n const canvas = createCanvas(200, 200);\n const ctx = canvas.getContext('2d');\n const adjustedFontScale = fontScale * 35;\n ctx.font = `${adjustedFontScale}px ${fontFace}`;\n const metrics = ctx.measureText(text);\n const width = metrics.width;\n const height =\n metrics.actualBoundingBoxAscent +\n metrics.actualBoundingBoxDescent;\n\n return { width, height };\n}\n\n/* The convertResultToImage function visualizes object detection\n results on an image by drawing bounding boxes around detected\n objects and optionally adding labels to them. */\nfunction convertResultToImage(\n bgrImage,\n resizedImage,\n boxesWithAnnotations,\n options,\n) {\n const defaultOptions = { threshold: 0.3, confLabels: true };\n const { threshold, confLabels } = Object.assign(defaultOptions, options);\n\n const colors = {\n red: [255, 0, 0, 255],\n green: [0, 255, 0, 255],\n white: [255, 255, 255, 255]\n };\n const [realY, realX] = [bgrImage.rows, bgrImage.cols];\n const [resizedY, resizedX] = [resizedImage.rows, resizedImage.cols];\n const [ratioX, ratioY] = [realX / resizedX, realY / resizedY];\n\n const rgbImage = new cv.Mat();\n cv.cvtColor(bgrImage, rgbImage, cv.COLOR_BGR2RGB);\n\n boxesWithAnnotations.forEach(({ box, annotation }) => {\n const conf = box[box.length - 1];\n\n if (conf < threshold) return;\n\n const [xMin, yMin, xMax, yMax] = multiplyByRatio(ratioX, ratioY, box);\n\n cv.rectangle(\n rgbImage,\n new cv.Point(xMin, yMin),\n new cv.Point(xMax, yMax),\n colors.green,\n 3\n );\n\n if (!confLabels) return;\n\n const text = `${annotation}`;\n const fontScale = 0.8;\n const thickness = 1;\n const { width: textW, height: textH } = getTextSize(text, 'Arial', fontScale);\n const imageCopy = rgbImage.clone();\n\n cv.rectangle(\n imageCopy,\n new cv.Point(xMin, yMin - textH - 10),\n new cv.Point(xMin + textW, yMin - 10),\n colors.white,\n cv.FILLED\n );\n cv.addWeighted(imageCopy, 0.4, rgbImage, 0.6, 0, rgbImage);\n cv.putText(\n rgbImage,\n text,\n new cv.Point(xMin, yMin - 10),\n cv.FONT_HERSHEY_SIMPLEX,\n fontScale,\n colors.red,\n thickness,\n cv.LINE_AA\n );\n\n imageCopy.delete();\n\n });\n\n return rgbImage;\n}\n" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "# Async Inference Helper Function" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "async function inferAsyncProcess(\n tensor,\n recModelCompiled,\n recOutputLayer,\n i,\n annotations,\n) {\n // Create infer request\n const inferRequest = recModelCompiled.createInferRequest();\n\n // Define the completion callback function\n function completionCallback(outputTensor, i, annotations) {\n const recognitionResults = extractRecognitionResults(outputTensor);\n const annotation = parseAnnotations(recognitionResults);\n annotations.push(annotation);\n }\n\n // Start inference in asynchronous mode\n try {\n const result = await inferRequest.inferAsync([tensor]);\n completionCallback(result[recOutputLayer], i, annotations);\n }catch (error) {\n console.error('Error during inference:', error);\n }\n}\n" + ], + "outputs": [] + }, + { + "language": "markdown", + "source": [ + "### Do Inference and Show Detected Text Boxes and OCR Results for the Image\n" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "// Process each bounding box and run inference on the recognition model\nconst [batchSize, channels, height, width] = recInputLayer.shape;\n// Calculate ratios\nconst {\n ratioX,\n ratioY,\n} = calculateRatios(inputImageMat, resizedImage);\n\n// Convert image to grayscale\nconst grayscaleImage = convertToGrayscale(inputImageMat);\n\nconst annotations = [];\nconst croppedImages = [];\n\n\nfor (let i = 0; i < boundingBoxesArray.length; i++) {\n const crop = boundingBoxesArray[i];\n const [xMin, yMin, xMax, yMax] = multiplyByRatio(ratioX, ratioY, crop).map(Math.floor);\n const cropRect = new cv.Rect(xMin, yMin, xMax - xMin, yMax - yMin);\n const croppedImage = grayscaleImage.roi(cropRect);\n\n try {\n const preprocessedCrop = resizeAndConvertCropToModelInput(croppedImage, [width, height]);\n const tensorData = new Float32Array(preprocessedCrop);\n const tensor = new ov.Tensor(\n ov.element.f32,\n Int32Array.from(recInputLayer.shape),\n tensorData\n );\n\n await inferAsyncProcess(\n tensor,\n recModelCompiled,\n recOutputLayer,\n i,\n annotations\n );\n\n croppedImages.push(\n cropImage(inputImageMat, xMin, yMin, xMax, yMax)\n );\n } catch (error) {\n console.error('Error during preprocessing:', error);\n }\n\n croppedImage.delete();\n}\n\ngrayscaleImage.delete();\n\nconst boxesWithAnnotations = boundingBoxesArray.map((box, index) => ({\n box,\n annotation: annotations[index]\n}));\n\nconst resultImage = convertResultToImage(\n inputImageMat,\n resizedImage,\n boxesWithAnnotations,\n { threshold: 0.3, confLabels: true }\n);\n\ndisplayArrayAsImage(\n resultImage.data,\n resultImage.cols,\n resultImage.rows,\n display\n);\n\ncroppedImages.forEach((croppedImage) => {\n displayArrayAsImage(\n croppedImage.data,\n croppedImage.cols,\n croppedImage.rows,\n display\n );\n});\n" + ], + "outputs": [ + { + "items": [ + { + "mime": "application/vnd.code.notebook.stdout", + "value": [ + "Annotation for box 0: building", + "Cropped Image Size: 159 x 40", + "Annotation for box 1: noyce", + "Original Image Size: 690 x 517", + "Cropping Coordinates: (256, 50) to (377, 88)", + "Cropped Image Size: 121 x 38", + "Cropping Coordinates: (604, 205) to (653, 228)", + "Cropped Image Size: 49 x 23", + "Cropped Image Size: 26 x 32", + "Cropped Image Size: 31 x 23", + "Text: noyce, Width: 74.716796875, Height: 21", + "Text: 2200, Width: 62.2890625, Height: 19", + "Text: robert, Width: 73.14453125, Height: 20", + "" + ] + } + ] + }, + { + "items": [ + { + "mime": "image/jpeg", + "value": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAIFArIDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD1SXSkkG1ihGQehHQ59a429+D3h+9klkY3SNKxZjHcY5Jz/Epr0RvvfjSgVEcPSg/djYp1pyWrPKx8C9HkJUahqKLjj50b/wBlFMm+BFkUKR6veY940r1uM7cn0BNYmla9qOu28F3p+nxJaNjfJcSkFjn5ggAOcdMnGT+dEuWLsa06NSpFzWy3bdt9vyPNZPgZtxs1F2x/eOP5IarP8GJ0JImd8/3ZlH81Fev3eqXT3sljpNtHcXEIBneZykcWRkAkAksfQDjvUunapLNdPYX8C29+ieZtVtySJnG5T9eoPI/Wl7t7DdCooc/ztfW3e29v+H2PEI/hZLIZhGLl2jco4W4iO1uv8jQ3wynjI3W2pn1IUH+SmvUtNuNTOqa5FptlBKov2Z5riQqmdiDaAAST39P6bum3yXrzW1zai3vrfHnQk7hg9GVu6nB9/XFTBpmuIw0qeq1Vk91dXS3XzPCm8CxwMfNGoJ/vqB/NajHhOyHBuLnP+8v+Fe2/2hf38jtotlbyWsbFPPuJmQSsDghAATgEYyevbjmpP7RtbjRLy+ltWWSzWTz7eQ5KOgyVz0I9D3BovFkPDVV0+V1dX7roeJf8IpZ8fvrn/vpf8KVfDFgZCq3FwWQDI3Lx+leuWEJ1sW93FollFp0gB3zhfNcEfeVQp49MnJ68VladoUs3i7VIZdP0/wAhPJ37ZHBVSDgqABknv0qG1pbqbRwkkp87ScVfdd0rf11Pn6JmwRuP3j1+tTozFY8Z+71r3uX4U+HGB8uzCHOcpNIP5saoy/B/SGACSXSAcDbcAj9Y6aptM5nNNHiW47mPJHTimFmxzz6HFewzfBqHJMOpXC+zRo/9VrOn+Dd8ABDqStj/AJ6QY/kxqrMi55iTyORSgkkjca76b4Sa9GcrPZOPTMg/9kqhN8MvE0IbZaW8mf7twgH/AI8RRZgcjvYJ94/j2pwLhgMn8a3pvA3iSHGdHnbHePbJ/wCgk1Sk8Pa1bkmbSdQQAdWtnA/lSsO5nglien5Uu48eh9Kc0E0QAlR0bOMMuOtAUlhnnFSUKCc4596z53ddTixwcD+taIBznHtWbd8XcRAznvn3qZDRoxSbo+uecCnb8DOc1HAdsW/vninE4AXB5pR2KYF+2Rj2oLEDP60Agk+1NJXbkjr61RI4ls5zTdxzycd6VsdOfejbke3Tr2qRiZ+7nIPajJJyePrS7TnHJNOCkHj0pXGAJFNY7kOPT1p+DkcDJ7UpU4OecjNSxmXZOymUc9sj860tiNhtuAeazYGKzSrj9a14cmBOOCKl7jWxD5KnGVNIYB/eb061cCgDPApSoxwuaLhYpfZ/4Qx980z7Kc84I7ZFaGwHigJk8UuYdigLIsASqE/lQ1mccJ36A1qCP0FIVzmp5h8pjG1bsjH39aja0YHJDqM9xW6UwOeMetMaM7CcHpRzBynOjcr+vap1i3ICcA+tJOu2Q8d61LBVe0AKg8kHjNDYJGU9kAhZokPfgCsXULXbGXSELz/CMV190keCgUY74rEvo/8ARZMdAc/jW1KTTM5o5bawPANHmOP4m/OrxT3PPrUezII6/hXZzHPYq+Y/948UeYe9TmMYzgflSeWCPu496LoRBvNLv4xTzGMcgimMgC5GaegaikngnPNN3nnnrT25jT2BH6//AF6j2mgBd+KXzD6U3BpMH0phckVwDyOKeZhjAzj1qHB9DSUrBcl3gjFG4VFRRYBz802iimIcjlJFcHBByDX25o999v0SwvAcie3jl/NQa+Ia+u/hfem/+Guhyk5KwGI/8AYr/SuLGLRM2o7nXlzSbzSUhrhOiyF3GjcaSkqh2FViHYZ9DT9x9ajHDj3Bp1ddPWJEh24+tG4+tJRViF3H1o3NTaKYh28+tQzMd8bZ74qWopwfLyOxzQxoXc3rSb29aTtSGkAu808OcVHTl6VLQ0O3tVS+lmjsriUT+XsQsCEzjAz+NWGOB0yT0FVLyyF3aTQzOxEileOAv0Hf8alq+g9jjpPGlsLaG4kt2lkk2sFKqCmf4s7jj6cH1qnrN7p8olDXVybskI0YyfL9to4BPoxBxWfd6TBeWF3ZiZkvNPYx+Y2CSoHPfv1/SuIW+2RQ2zTq08EmFwAVUAE4ORzg++M1jUpXi0ZOo0zrPtunAcre574lQfp5dFUl8WMFAdLgsByQB1/OivH9jU7G3NE90b71KOaRvvUor7Bnnoeoyrj/AGTWb4Qi8nwlpibNhEIypGOcmrstxDbIHnlSJScAuwAz+NRDVNP/AOf61/7/AC/41jKcIyvKSTKeKhCm6Umldp79r/5lCK7h0DVtRGpOILe8mE8Fy/3GOwBkJ7EbeM9QeOhp9tMus+I4L+0+extIJI/tAGBK7EZC+oG3r0z9DVttR01xte9tGHoZVP8AWnLqemqoVb21AHQCVf8AGs+entzqxq8xw9ua65mrXurbW2728zG0rWLLTNW1uDUZRaGS+Z43mBVHGxBwx4J9verumk6nrt1q0Kstk1stvDIRjzvmLFgOuOQAe/OOKttqOmSLte8tGHo0qn+tOGqacOBfWv8A3+X/ABpKdNaOasOpmWGldxaUmkn7ytpbb7u7ORsNL8OabbfYtcX7LewEhjJcSIsq5OHQ7sEEdhyD1ArZFraJ4N1VdPsZre3mglZBKW3y5TG7BJIzgYzz7CtN9R0uTG+8s2xyMyqcfrT/AO1dO/5/7X/v8v8AjSj7FaKS/A1q5zCo1Jzu73d5afJdPxsS6Onl6LYJt27beMbcYx8o4rJS5g0vxfqBvZVhW9ihMDvwrlcqVB/vcjj3rR/tXTv+f+1/7/L/AI0janpjY3XtocHIzKvH61TqU9LSWnmc0MfQUpuUlaXmu6f5ou0tUv7V07/n/tf+/wAv+NL/AGrp3/P/AGv/AH+X/Gr9vS/mX3nP9Yo/zr70XKKp/wBq6d/z/wBr/wB/l/xo/tXTv+f+1/7/AC/40e3pfzL7w+sUf5196LlHWqf9q6d/z/2v/f5f8aP7V07/AJ/7X/v8v+NHtqX8y+8PrFH+dfei0UQ9VU/UU3yIv+eafgKr/wBq6d/z/wBr/wB/l/xo/tXTv+f+1/7/AC/40e2pfzL7w+s0f5196J2tomGCvHpk1Tm0HSrg5lsLaTPd4lb+YqX+1dO/5/7X/v8AL/jR/aunf8/9r/3+X/Gl7al/MvvH9Zpfzr70Zk3gvQJs7tKsx9LdB/ICsy5+GHha5YM+mRhh0KvIv8mrpv7V07/n/tf+/wAv+NWldZEV0YMrAFWU5BFOLpz+Fplwrxn8Ekzg3+E3h/nyhcReyTHH/jwNU5fg5YOSYL+6j7Dcyt/7KK9JrH8VT3Nv4ekks5fKn86FVfsMyKOfUc0SjGKbsdNGMqtSNNPdpfeefyfBeXawj1nJPrb/AP2dULj4Pasm3y761cDpuVx/IGvQtX0ZdL0a61WO+vH1K3jMxuHmb5yOSCgO3aemMcCumD+ZFG+MblBxUqKbs0XVgoRU4Sum2traq34a/wDAPBp/hb4hiDlBaSn/AGZSM/8AfQFZ8vw/8RQAZ01WUcnbPG36bq+iaQgEcgH603SRiqjPmx/CuvIpc6NfMO2yFm/kKpy6Tf2+BPZXUXrvhZf5ivppYYigBjT/AL5FH2aH+4B9Dip9iUqh4UfCtjuOZrjj/aH+FH/CKWW0/vrnJ/2l/wDia9j1LWbGz1H+zvs89xetGskcEQyZASR3OABgkk8Co7J7O/vHsr3Svsl4sfmiOZY33JnGQRnv1qOWN7G3saqhztab/Lvbe3meIf8ACIWC3DfvrnJH95f8K0rfwvZC3RfOuMDP8Q5/SvUtSsdIjuza2ujfb7xRukjgVU8tT03NkAE9h1NJYaPpF+ssIs5LS6i/1ttIzB19CPmIKnsRxQ4JuwezqKHO1p/Wtt7eex5r/wAIzZY/1k/5j/Cj/hFbIgDzbj/vof4V6TY+HdM1Syju7V7hVbIKswBVgSCCNvUEEVTi0fT5NPuNQW9eK0id082RRg7TtJHIyMggetRyIOWd2rap2+fY4b/hFbDPLzf99D/CnL4WsR/y0nyf9of4V1DWFxHtmns7u3sieLiWIAKOxdQxZR6kjjvirGo6PLp0Ild45GkO2KKMMXlb0UY5P9OanlW5o6VSLUbavtr+Rx58MWRPEtwMD+8Of0rK1nSYtP8AJ8mSQmTdnfjtjHTHrXbLZ363MNvd6fJavMSIjI6lXPXGQcA98HrVDxF4e1OQQFLCdyu7/Vjdjp6VEo+7oglGUJJS/wA/xRwRhOSBj3OKYyN5fbn3rcl0TUYiA9heIAOS0LD+lUJbd4j86upHQEYNY6j0OTvkKuQQM5ycGrWnTFbVh1Ibik1FCocY+ZuTUOnfNC49xWiV0Q9yy5y2e59azb1AYZQegHWtIg4z3qldLlXGOCp960gRI54RtI4SNWdycKBySe3FSf2TqR+X+zrvB6/uW/wqxpRxrFgBn/Xx+/8AEK9N4IrrRzs8pOk6lz/xL7v/AL8t/hSNpGpcZ066z/1xb/CvVvoeaTJ79Kqwjyg6RqPT+zbv/vy3+FMfSNS2nGnXf/flv8K9ZNNyGUkHI+tMLaHk40jU/s6/8S674Y/8sG7j6e1L/ZGpHA/s67/78t/hXqy/6th6EGgc0xHlH9j6j/0Drv8A78t/hR/Y+pDrp13/AN+W/wAK9WJGQCRk0tA9jyf+x9SP/MOu/wDvy3+FB0bUhx/Z13/34b/CvWPwooEeTf2Pqf8A0Drv/vw3+FVHiaORo5FKupIKtwQfQ17JXk+sf8hq/wAf8/En/oRoAoEYptOI4pKYhK+mfgNffafh89uWybW8kQD0BAb+ZNfM1e7fs73vya7Yk94plH/fQP8ASufFK9M0pP3j3E0ZoNJXmHWhaKQUtUAh4Kn3p1Nf7v05p1dVLYiQtFFFakhSUtJQAtNcZUj1FOpDTsBCnKD6UppqMDuUYyrEH2p1IBKcvem0ucZx1qWNDgCTgDmms8IVw0ijHv0NTp5KrvldOR/EeKwLzWrYas2nea6Fl3hU4yO+ep98AfXFZSq8iuPd2OA8ZmWHxN9v06SNFaHZK0imME8jBYgDpn3ribTQMTrqWoX9vZ2srHylLY83nnHfHuB19a9V8QatBJaLBLbSTRthkiSEoiehyy7cjr1xXll34fvdS/eXd4ke5iwlnPl/KPqeePSuWddyVm7EuCT2udjHD4SSJFDIwCgAlGyf/HxRXMx2Nokao19bFlABO1+f0org9k/52XzeR9CN96lFDfepRX1rOBHGeL2Y6nChY7RCCBngEscn9B+VSt4SChib7hf+mP8A9lUPi/8A5C0X/XAf+hNXVSjKyL0yCK+ew+CoYrGV/bRvZq2r8+x8tQwNDF47E+3jezVtX59n5Hmej6p4f1q7itLfVZo55ThEmtCuT6Z3EZrqR4Ryeb7H/bL/AOyqKy8FWVtcQTkq7RMrhguDkcitX+1hLrJso0lHlJukZ0KgkngDPXoeRxXof2Lgf5Pxf+Z3/wBhZf8A8+/xl/mUx4NyP+P/AP8AIP8A9lTG8IYOBfZ9f3P/ANlXUofkrF8TRvP4cvII2KtMojyDzhmAP6E0v7FwP8n4v/Mf9hZf/wA+/wAZf5mXH4XjlBMeoo6g4JRA2Pyapx4OB/5iH/kH/wCyrH8H+HH0LVhLC7LDMhSSPPB4yD9cj9a7rzNsuz2zT/sXA/yfi/8AMFkWX/8APv8AGX+Zz58GBRk6hx/1x/8Asqb/AMIhH/0Ev/IH/wBlXTyN+7/EVxXjjUNesJ9POi3CxhlkMqNErB8bcdRx36etL+xcF/J+L/zH/YWX/wDPv8Zf5mgPBit01IH/ALY//ZUv/CFf9RD/AMg//ZVrafcyS6da3NwoSWSJGdRxgkDI/M1oq2RS/sXBfyfi/wDMP7Cy/wD59/jL/M5j/hCv+oh/5B/+yo/4Qn/qIf8AkH/7KtXU71rHTby9SFpmgjeQRKcF9oJwD+FcpZ/ENrlgp0LUVY8YQ7sfoKP7FwX8n4v/ADH/AGFl/wDz7/GX+Zrf8IT/ANRD/wAg/wD2VL/whP8A1EP/ACD/APZV0scpdQeRx3qN52Bc7sKvt7Zpf2Lgv5Pxf+Yf2Fl3/Pv8Zf5nO/8ACE/9RD/yD/8AZUv/AAhP/UQ/8g//AGVSaV430PWblLew1SOWZ/uxtGyFu/G4CuiSbcORR/Y2C/k/F/5h/YWXf8+/xl/mcx/whX/UQ/8AIP8A9lR/whX/AFEP/IP/ANlXR3F4kBQMQN2cZbFNF6CPuEj2OaP7GwX8n4v/ADH/AGDl3/Pv8Zf5nJ6n4X/s7T5bv7Z5nl4+XysZyQOufetjwe7No7hmJCzMFBPQYU8fiTT/ABDcrLoN0oVgfl6/761F4O/5BEv/AF3P/oK1x08NSw+ZRhSVk4+fd9zho4SjhM3jToKycL7vu+/odFmqOs2L6nphtY3VGMsb5YcYV1Y/yq7XO6h4sis72S3itjN5Z2sxfb8w6joa9XE4ijQhes7J6f1Y9+tj6WB5a1SXLrp119DZ1m1bUtGvbGNgjzwtGrN0BIxVlPlhjTP3VAzXJ/8ACa/9Q/8A8jf/AGNH/Ca/9Q//AMjf/Y1x/wBr4K9+f8H/AJHC+IsA4KHtNN9pf5eR1uaM1y0HjOJ50We0MUZOGcSbtvvjFdRXZh8XRxKbpSvb+up1YTHYfFpuhK9t91+Yi9KdmmL3HvTu1dB1mTCB/wAJ5dtgZ/s6IZx/00eluz/xWum/9ec//oUdam1RIZAo8wjaWxyR6Zo2qZBIVBdQQGI5APUZ/AVHJpbzudX1lc3Nb7PL+FjI0iWOy1nVrW6YR3E1ybiIvx5sZVQNp77cYOOn40sUiX3jET2mHitbVoZ5k5UuWBCZ7kYJPpn3q/e2FpqMQivbaK4RTkLIoYA06KKGwtBFbQiOGNSVjiXHvwBU8j26GjxEHeaT5mreW1r/APA6HNapdXGhajf6faAhtYw9ljok7EJJ+QIf860NZ05bHw5Y29tE8lvYzQPJGq7i0aMMnA6n+L8KW1huNV1eLVbuB7e3tkZbSGX75ZusjD+HjgDryelbe4+pqIwvf8DarifZumktVZy83svuX4tkEus6dFZpdG7iaGTAjMbbzIT0CgZLE+grC8S2wfVdIu57i5sbNFkjaaFgnkuwG3ccEAHBXPqR61rQ6PptveteQ2NvHckkmVYwGyevNXHVZY2jkAdGGGVhkEehFVKLkrMwpV6dCop079b3t1VtN9u/Xsc++l2MWoWST63qV7N5qyxW7TB8lTncQBwB6nA/OulkAOAQD9aoWOl2GmlzZWcNuX+8Y0C5q5knqc04xsTiK/tWrNtLvZfghvkxHny0/wC+aRreJhgrx9TT6KqyMLsoz6Fpdznz7C2kz18yJWz+YrPfwN4akz/xJ7Nc9dkKr/6CBW/RS5UO7OSn+GfhibJFhsPqksg/9mrMufg94fnJKy3kRP8AcmH9VNeg5ozS5IhzM8nT4E6ZBeQz2+qXi+W6uA+x+hz2C1Vn8MRW2rahZfbppJ4ZFCxRWrSMylFJbCngDOK9kU8isPQmi/4STxIFK+Z9oiLD+LHlDH4damad1bv+h14Xk5Krmrrl/wDbonn9p4GvNRgaayvbaRVYo6sHR0YdVYbeD7Gq0fgzV51lNvHDMIpGiYrKBhh1HzYr0yMqfHUv2ftYAXWOm7f+7z743fhR4X+VNWU/eGpzkjuMkEfoQaFJ3sKph4KDmuydvXueSnw1q7SzwrYySPCQsixkNtJGR0PoRWdJpV7HpslwljcC3CsfMETbRjg84x2r2fw/cQ3WveInhdZUFzGN6nIyI1BGfYgisq3Rf+FV3p2jPkXPOP8AbeolO6+TOmlQ9nNK7+Knp6q+q8uh5UYpIY2M0bxjb1ZSKpyzAx4G5SSMZGM817nq8cQuvDpuB/onn/Pk/L5nlny8/j+uKj8aWNgmgF5IIQxuIFBKAk5kXIHfpmnOUnF26CwtKnGtSck25P7tbdte78jxV1TzELAbu1DyqpIJ59BXsOs+HNIPiLQ4RYW6xzPMHVYwu7EeRnFVU8FaHP4tvrZrMCOO0hdQrsMFmcHv7Cm5tNpLr+hlGjFwjKo20o3tf+81ZdkeUpIHJAyCOxGDQ8gQ45J9AMmvTbnwHpJ8VW9lGJkiezklO2TnIdQOoPqap6d8PrO7utTgN3NFcW1wVKnaTsIBRsYHBH8jT9o9hPD0lepZ2sna/d23tt8uy8zz5JFkztPI6j0ryrV1H9tX/P8Ay8Sf+hGvopfABl8R3mnw34LwQRyMTEP4i3Bw3HQH8a4zV/gT4hk1C6uLe9sJFklZ1VxIDySeykU41LrUxxFGMJpQ2aT181c8aIHem45r0m6+CnjKH/V2trN/uXIX/wBDxWPcfC3xranL6FM4z/yykjk/9BY1aku5z8rONr1T4CXnkeO57fOBcWTrj1IZW/oa4m58FeKLQnzvDuqKB3+yOR+YFb3wuF3pHxL0d7m3mgDyNCfMjK/eUqOvuRU1bODCCakj6rNJRmivKO0KUUlFUgHEZBHrQpyoPtQOaRD8pHoSK6KREh1FFFbkBRRRQAUhpaKoRABiVwB15pTSP/rlPqMUp5pBcKlgh8x9xAwKiHLqg6noK0dvlQ4QZIHHuahpA3YpXEUNwx+1Mq2ykDaWwGY+v8sVha9pOlw2c0ir9jD/ACtMkrRFs9RwCWPTjHNdO22PBIzjpxk5qte2EeoweVeIGhYg+Vng49cdfp0rmnTTTTV2wUtTztS06Yim1S6hQZBkUspwD91SCQePY4471xt61ld6sls9/fM9zKUaJ48FRxk4GcDj0H0Net61cvJbxw6bGQkbKA6LkEZwVXHt3OB2z6ec+JdRG26ewnghuWg2yrb4Z1IBXBYf7wIxjPNebOnyysnc6FrG5lyrp6Sui+C9YKqxAy0o4+gTiiuf+y66eYnuPL/g3MM47Z5op+yj3/FmPN5H0s33qKVh81Ar6hnGjivF3/IWi/64D/0Jq6W+5s7kesbfyrm/F/8AyFov+uA/9Caresf2+uoPHavbNZS4GHU7lBGDz+deRlzSxmJv3X6ngZam8divWP6nHeGvC97o2p2dzb3s3lB1EkZfhl6EH8K9FnC+fESOcHH6UxbJ47NjGAZQuVB6Z7Vk6dqOrX98Ib3S0t0QE+YsmefTH+ele1ddz3LM6hP9X+FUtRG/T+OfmQ/qKvRqQmK5+81yHSZmtdTilWE58udELKV9DjkEUJiaNG3UK0Z/z0NK751DA7IP5msZvGPh+CPct6ZG/hRI2yfbkVc0aWa/DXskTRCZtyo3VV6DPvgUNgkzZb/Vj6j+dct4u8Wf8I1PZK2ltepOHLMr4Me3b2wc5z+ldNdyx21q00zqkaYLMxwAKw746Pq8kbSahat5YIUCRT1/H2pDLCNHrdlZanBNMsRUSpHnAbI/iHqK14GOwZrCbUtP062t9OtZUkkYiONI2BIHcnHQYzW3b5KAmgCtfSW8WnXUt4xW2SN2lbBOEGSenPSueste8JOyi11CLcxAUFWGT+IroruEXNjc25GfMR0we+cisOw8KWVrMsrWsQYEEHaDUsqJ1EXTjpUDc+cD3P8AQVPGAFxmoSPmmHqf6U0JnJ2Hgu1sdQtrsbFkgcOCo5OK6K31BLi7kijVwIjtYuhXJ9s9unNefaJ4f1LR9Vs7iHULhrbzAHiZyVKnivRsDzUOOcdaGCKuvaHba3apHcx7ihJQ5xgmquh6UmmaNLZJkKryY59ef610K8rVQDDzD/a/oKAOFtNCl0vS7uV7u5mBRVCySEgfOvb8K6rwf/yCZf8Aruf/AEFaj1ZQPD1wR6J/6EKk8Hj/AIlMv/Xc/wDoK14lT/kaR/wfqzxKv/I6h/g/VnRZrzm4UP4iuVYAgzycEZ7mvRa88k58SXP/AF3l/maM1SdSgn/Mv0FnSTq4ZP8AnX5o19PSzDtHLawPu6Fo1ODUNpDbNqQLW8TJvb5Sgx37VkSahJb+K/sD8RyWizQt/tBmDD8tp/OtLT2Yyqzgg+a3/oRx+len7Clb4V9x7P1ejf4F9yIfE8MMOpRLBEkSmEEhFCjOW9K72uD8UHOpQ/8AXAf+hNXd15eXpLF4hLuv1PGyqKjjcUl3j+oL1P1rP16eW20S5lhcpIAoDDqMsAf0NXx1P1rL8S/8i/df8A/9DFehjG1h6jW/K/yPUx8nHCVWnqoy/JnEW9hLcw+YjIBuK4JOeg9vero8O3RtjP51uEAJI3NkY/CpdJ/48D/11b+Qq1JeqdPvRG+QisGGejAV4mCyrDVaUJzT1Xc8PLsgweIwcK00+ZruZ9r4eu7stskgGAD8zH/CrX/CIah/z2tv++m/+JrX05jHJgHqtX9QuWgsJJF+9jA+prrhkmEavZ/edk+G8Ans/vOPk0GSJyj3toGHBAZzj8lqaLwtdzruiurNx7O3H/jtZ+sQalcwINNufIcEsx253egq54Xv7spbSXWROT5co9ecf4GpWS4W17P7xvhrAbWf3lg+ENQAyZrXH+83/wATQPCF+RkT2p/4G3/xNdNqkL3elzwRthnQgc45rmtC1Awyjd8ufkkB7Ef5/WreRYRK9n95C4cwN7Wf3if8IfqB/wCW1r/303/xNL/wh2of89rX/vpv/iauPfy3OoArNIse4ABGxkV0glBQtnj3qVkuEb0T+8b4awK6P7zjv+EP1D/nta/99N/8TS/8IdqH/Pa1/wC+m/8Aias6t4ka1hkuVaQQJ0ESbmarHh/xGuqQwyq5eGb7pYYZT0wcUv7GwnZ/eV/qzgez+8zv+EO1D/nta/8AfTf/ABNH/CHah/z2tf8Avpv/AImuvubpbe2kmYZCDOPWss+II0x5qKmeBmTGf0pyyXCR3T+8UeGsDLZP7zAuPCl9b20s7y2xWNC5AZs4Az6VqeC7iV4LqBnJjjKlFP8ADnOcflV+bUo73Sb9UUgrbueoOflNZfgr/l+/7Z/+zVywwtPDY+lGlezTv9zOOOApYDNaMKN0pKV9fJnXA1mXegadeXL3LxPHcuctPDI0bngDGQemAOPaoPE19PY6SGt32PJIIyw6gEE8eh4rk4ptYnjDx3s5U9CbrH82ruxWPjTq+xVNydr6HrV87nhMT7GhCUpWu+Xt8jv7Cyt9Nt/ItIxGhJZjnJZj1ZieSfc1TvdA0+/unuJVmSSQAS+TM0YkAGPmCkZ44rlIrfxDMcRXM7n0W8B/9mqX+zvFH/PS6/8AAof/ABVY/wBozat7CX3f8AyjxBi4zc1h6ib62f8AkdnZ2VrYeZ9kgjhD7dwQYBwAo49gAKRdPs1059PECi0dWVouxDEk/nk1xo0/xQekl1/4FD/4qj+z/FBOPMus/wDX0P8A4qj+0an/AD4l93/AJ/t/E3u8PU+59NunQ7iWGGe2e2mjSSBl2NGwyCPSsqPwtpKOrvDLOUx5f2iZ5BHgg/LuPHQdPSud/s7xT/z0uv8AwLH/AMVQNO8UnpJdf+BY/wDiqTzCb3oS+7/gFw4hxdNNQw9RX7J/5HbS28M1xBPJGGlgJMTHquRg/pSrBCl3JdqgE8iKjP3KjJA/U1xH9neKf+el1/4Fj/4qj+zvFP8Az0uv/Asf/FUf2jU/58S+4j+3a9rfVqn3P17HcGCE3i3ZjBnVDGH7hSQSPzArI8Q/2MHgm1Rpbd8FY7mIuhA7qXTsfQ1z39neKf8Anpdf+BY/+KoOm+KSOXusf9fQ/wDiqUsxqNW9hL7jSln9eE1J4app2un99mbHhmzgTUr6/sYGisZY444XcHdOQWLSHdyclup64rpSa4L+zvFP/PS6/wDAsf8AxVH9neKf+el1/wCBY/8AiqUcwqRVvYy+4K/EFetU53hanTo3srdjvDTCAeoB+tcN/Z3in/npdf8AgWP/AIqj+zvFP/PS6/8AAsf/ABVP+0an/PiX3GX9t1v+gWf3P/I7UxRnny1/KmmCM9sfQmuL/s7xR/z0uv8AwKH/AMVR/Z3ij/npdf8AgUP/AIql/aE/+fEvuH/blf8A6Ban3P8AyOyNuuOCw/4FTPIPaV/xx/hXIf2d4n/56XX/AIFD/wCKpP7O8T/37r/wKH/xVT9el/z4l9xX9u1/+gWp9z/yOvMcg6SL+K//AF6TbKP7h/MVxF2+uaY8TXNxcIWOUzPuBxjtk+o6121nM1xY287gBpI1cgdMkZrbC4iGIlKDg4tdzsy/N1i6k6Tg4SjbR+Y7Mg/gH4NUMDzlpC0SqS2cbv8APpVqoxxKfeu5U0tj1ea4oZ+6fkaN+P4G/Kn0ZrSxNxnmL/tD/gJpPOjz98VJRSsxjd6nowP40tIUU9VB/CmGCIk/IvPtT1EMuG2BWOeGFJv3nCfiewqK6tA1u4jZlbHBySB+GamjAEagdMUa3DQu21v5Q3n7x6561ZJAUsc8egzVZLpQgDZLVmXGuXtluaXS2lj3YVoJQ2B/tAgEfhmsqlSMFeRPK5PQ12X/AJaNkcdM9K53XdQu7phYaei4GDK8hIDr3QAckepHbjPNUtU8Tz3Egs1VLZiQdpbMjD2Hb8jWdcapJa2bhIrZXmbDmU7i2B1JOSfxrzK2KlJpU9u50QoNK8ht4swtJIr3TpMtnzJoisw+uzIUcAADnHA9K4fVrXRrLyJbOScwykK2VwwbqCwPGOeMAda6W41W6EX+jy2hkYEAJCpAPY5xjA+tczdXcb3O6/Z725HAK5SH8gMnP4HjocVg5NstxSRz0mratFI0aWl66ISqsdpJA75xRXWGSxJO2z+XtthyPwJzn86KXt4/ymfKz2qQfPSCnS/fpBX1RwHE+MP+QtF/1wH/AKE1dm0YY81z3iTRLu/uIrm1USEJ5bR5AI5Jzk/Ws7+zvFH9+6/8Ch/8VXz8atbCYqtL2UpKTVrL+u58xCtXwWMry9jKSm1Zpdr/AOZ2QQAYpgt1DlgOa5D+zfFH9+6/8Ch/8VR/Zvij+/df+BQ/+Kro/tSr/wA+J/d/wDq/tmt/0DT+5/5HahcCq9xaR3ClXUEH1Fcl/Zvij+/df+BQ/wDiqP7N8Uf37r/wKH/xVH9qVf8AnxP7v+AH9s1v+gaf3P8AyNxdBs0k3rAgOeoUVpwwCNQAMCuQ/s3xR/fuv/Aof/FUf2b4o/v3X/gUP/iqSzOov+XE/u/4A/7arP8A5hp/c/8AI7C5to7m2eGVFdHGCpGQa5ibwXpjtn7Mg+gxVb+zfFH9+6/8Ch/8VR/Zvij+/df+BQ/+KoeZ1X/y4n9zBZzWX/MNP7n/AJGnpnhmy06XzIYFV/XGT+db6JgYxXG/2Z4n/v3P/gUP/iqP7N8Uf37r/wACh/8AFULM6q/5cT+5/wCQPOaz/wCYaf3P/ItaxoWpT38s9nq91bo+D5at8oOOwqguk+JIzxrsxH+1ED/Opf7M8Tnq1z/4FD/4qk/svxN/euf/AAJH/wAVQ80rf8+J/d/wAWc1f+gWf3P/ACOttUcQqJGLOAAWx196xdZXW478HTpLcW7gblkQkg9z/Ks3+zfE4/juf/Aof/FUh0zxMerXJ/7eh/8AFU/7Vq/8+J/d/wAAX9sVf+gaf3P/ACN2Sxcae/lgGULlAemR0qhpF5q15dFL6zigRBkMr5JPpj0qj/Znib+9c/8AgUP/AIqkGleJQcg3AP8A18j/AOKoea1v+fEvuYf2xV/6Bp/c/wDI7NFwtc/qetx6ZqLW8lpdyGQBleKPK+mM561m/wBm+J/79z/4FD/4qmtpPiNzl/PYj1uQf/ZqP7Vq/wDPiX3f8AP7Yq/9A0/uf+RrawpHhy4JHZP/AEIU7wd/yCZf+u5/9BWsaTSPEUsZjkE7oeqtcAg/hurpvD+my6ZpvlTEGR3MjKP4cgDGe/SsaE6uIx6rOm4pRtqv67mOGnWxWZRxDpShFRtqrdX/AJmoa89Yqvia4LMFAnlyScD+KvQq831ERx65eC6tzJGZpMxt8uQScH9Qa0zecYSozlspK5eezjTnh6ktEppsl1+x+1pFe2rK1xaYdCpznHUfiDVyxlEghlX7rYIqpp+qWemQNDb6aApOT+97/lVBDZR6kL1baUEPvEfn/Ln8q3ec4Fq3P+D/AMjp/t7L7/xPwl/kanib/kIw/wDXAf8AoTV3EUyyLuXP4155qN6dVu43SAo20RhAdxJyfb3rsdGtbm1tfLurl55M5LsAPw4rlyyrGriK9SGqbVvxObJasK2KxNWm7xbjb8TVH3jWX4l/5F+6/wCAf+hitQfe/CsvxL/yL91/wD/0MV6WN/3ap/hf5Hq5j/udX/DL8mc5pA/4lw/67N/6CtY0StY+LNQtpiWtNTUSJ6B1UKw/EYrUtftieHjLZWwnkFywKE442rU8+kXOpaRHLLEIryMiVFBztYds/TisctjfC079iMmlbL6Xoadh8rRjOcLjJq3q4zpcp9MH9aw9Evbu6vfKl06aBUB3O54z9K6W7tzcWE0XGWQgfWu6krKzPSqu7ujC01FZ2B9Koqghv5lXgCbIH5GqtprMVrO0Vywt7mP5XSXgGrNtcQ3t/mOVHd23EIc46f4Upe7HlYRV5cx2MXzKAe9crq1kbHWBJGv7m5yWA7MK6uBSEGazfEKKLJJDxtkBPtWsdVYh6O5naXD5kobHC8Vv3CkWUuOuw/yrJ0JlkjkKkHDdq3XXdEw9RisqasXUd2cjb26XSJE4BVh0qPS/DM1hqamK8kWzWTzFhVRj1xn61Lp8wjcZ+9ExRh6HpVuS+uWvH8p9sS4C4AOfWk1y3uUrytY19UXdpVwP+mZNclf6XbavaxR3SBlUhx7HGP612NyPNs5V67kP8q465sTqemRwCWWEnad8TbW496KvwphS+Jo0NM0yx0vRL+K0QK7QuXODk/KcU/wV/wAv3/bP/wBmqPTNHbTNG1Fnubi4d4HGZpC2BtPSpPBX/L9/2z/9mrx6/wDyMKPo/wAmeBjf+Rxh/SX5MueMf+QRF/13H/oLVztuJG0thE22TawVsZwexrofGP8AyCIv+u4/9BasHTs/Zcf5700v+FGf+D9UPDv/AIXZf4P1Rn+E9Q1SRJxqJzc202FcJtyMe3+ea9JlkP2dyhw20kflXmuiazeXWq3Fje2scUsADqyE4YZ969DtpPOtVf8AvLzXqQ0lqfQT+EwtCuXNxE7Ozb1IOTnn/wDWKhutVul1u5UTSKsUigKGwMY9KZpOY5ljPWOZk/Wq2qL5fiG5H/PREf8ATH9Ku3xonrFnbXFx5VrLKBnahbHrxWTomqXF3KBM6lWTcAFA54q9EfP01c/xxD9RXOeH5MTWwPoVP60pfCmOPxWOh1jUzp9tG4TezuEAJwB71Lpl8b608xkCMG2kA59KyvFSk6Ukg/5ZzKePy/rUnh5v3UyejA/n/wDqomrJMUdbm/ms1tWhWd4xvYocHA4zWhn5a5KRAb68hJ4LsD261M9IOS6DjrJJnRQajDI4TcQx6Bhirua8z0y2u9I8QSWMt1NNbyRB4/NYtt/P8a9BilMtoGz8xXBPvUxbvaRTStdDor63mmaFLiJpV6orgsPwqZ3CjNeb6BL5PiK3Y9SxU59wRXQ+NZHGkRhSQrSYbB68GtvZ++4GfN7vMdNvBXNCsGFeWabr89hpF7aCUrvXdGwPKtkZx9R/KtnwJfzS3d3DNPLJlAyh3LAYPbP1FQ4+7cq+tjuyQBzTdwPGawPFGvNo1kpiQPPJnbu6Dtk/nXDQeMtbiu4xLcbhIMqHhAVvpgf1qXoUtTrvGn/Ll/20/wDZa3tM/wCQTZ/9cE/9BFcjrWppq2m6bdKNpPmB1/usNuR/n1rrtM/5BNn/ANcE/wDQRXk4f/kYVvRfkj5/B/8AI5xPpH8kWqaR84p1NfsfQ16p9COooopgFFFFACUUd6WgQ1hkEVXhJMQ9Rwas4qtECssq4x82R+NMCSnKnmI6cdMjNNpVkWIl3OFAOTUVIpxaYJ2ZzOsWNtNO0M8McobDbGAOO27n0rCl0yaRU+xajPCOfmkUSjBOeN3IHpgj+tdPJbvNPc3Um+Pe21EdRlQAMfzzj86zb5I7TTpGnU+WQRl8/OfTPvXFaMrtqyNW3Y5yay/tO7aJ7iL7PENwCJ8zqe7Nk9Tn0HXj14LV7e4bULiC8fyLUOY45HnJLRg/LkDLBeAfSvR7dIbezLyvA1xM3mS5YAIfp7dAK8z8e6Ze3esOtozSwPskXY+V3EYJPqeP1qYU4J6aEybtc2Y761SJEXXZ1VVACrAxAHoD3FFcFHpWorGo+2IuABtz09qKj6tT/nJ9q+x9hSj56YBWEfGekynrOv1jz/I1KvirRz1uHX6wv/hXte0h3MPZy7G1S4rLXxFpD4xfIM/3lI/mKnXWNMbpf234yAUc8e4uWXYu0VAl/ZSfcu7dvpIp/rUyujfddT9DVcyFZi4paKXtQFhKKWjFADaXFFLQAlGKWigBMUUtFADcUUtFACUUuKKAExSUtFACUUtFIBKgmsrW5cPPbQysBgM8YY49OanopSipK0lcmUIzVpK5T/srT/8Anxtv+/K/4UHStP8A+fG2/wC/S/4VcorP2NL+VfcZ/V6P8i+5FWPTrOGQSRWkCOOjLGAR+OKn2AU+irjCMVaKsaQhGCtFWGfx/hWX4l/5F+6/4B/6GK1v4h9KZPBFcwPDOgeNxhlPes8RTdWlKmuqa+9GWKpOtQnSW8k196OS8O6rYWWmyQ3c+xzKWA2MeMD0HtWqPEGjgYF1/wCQ3/wqL/hDtO/57XX/AH0v/wATR/wh2n/89rr/AL6X/wCJryKEcyo01TjGNl6/5nhYaGcYelGjCELLvf8AzHjXdGVtwuhn/rk/+FS/8JHpGP8Aj7/8hv8A4VX/AOEO0/8A57XX/fS//E0f8Idp/wDz2uv++l/+JrXnzT+WH4/5m/tc6/kh+P8AmVr678O3/M7xyH1aFj/Sm2V14fsT+4kjjH+zCw/pVv8A4Q7Tv+e11/30v/xNH/CHaf8A89rr/vpf/iaTlmj+zH8f8xqtna+xD8f8ywviPSAP+Pv/AMhv/hVa/wBX0PULR7ee4DI/UeW/+FL/AMIdp/8Az2uv++l/+Jo/4Q7T/wDntdf99L/8TT9pmn8sfx/zF7XOv5Ifj/mV9MvtB0uDybecImcn92/J/KtD/hI9I/5+/wDyG/8AhVf/AIQ7T/8Antdf99L/APE0f8Idp/8Az2uv++l/+Jo9pmn8sfx/zB1c6/kh+P8AmYOrLp1xeNe6dqrW07/fHlMVb8MU3TrhYpt17qiSKOgSBl/Piug/4Q7T/wDntdf99L/8TR/wh2n/APPa6/76X/4mlKeaS3jH8f8AMca+dR2hD8f8x41/SfLK/a+ox/q3/wAK419Q1CBykLWckSnCkh84rrv+EO0//ntdf99L/wDE0f8ACHWH/Pa5/wC+l/8AiabqZo1Zxh+P+YKtnSd1CH4/5nM2urXEgmS5WGJDC4BUsSxKkAdPetvwX/y/f9s//Zqt/wDCHWH/AD2uf++l/wDia1dN0y20uAxW6n5jlnblm+tZ0cPi54qFauklG+3mhUcNmFfHU8TilFKCe3mmvPuZfjH/AJBEX/Xcf+gtXNrcpYWVvI8UzCRScxoW6MetdJ4x/wCQRD/13H/oLVJoFuk/h613KD9//wBDNXD/AJGc1/c/VFUf+R5N/wBz9Ucta3dlPfCSKKU3LLs/1TDjOee3513FlGY7VE9BQlhErZCDNW1j2jFenGLTuz6OUk1Y4W7uV0vXbmCc+WsjebEx6HNMu5Fu9RW6DAfuvLIBzk5zXYalo9pqcYW5hSTHQkcj6Gs2z8K6fZTrLFAA69CSTj86qpK92upMEuvQ0rBGSwiRvvBRkHtXMaf+41FkzjZcsPw3V2aptXFYzeHbc6sb4tKctv2byFz64pX9zlYL4rj/ABDH5mh3I9FDfkQaz9Fukgdi7gK6g5PArobq2FzZywHgSIVP4iuPbw3q0RxHqXygcAwg0TkuVBBas7OG4jnBCSIxHZWzXNX37vXpl7Oqt+mKuaBp13ZtJJeXIldhtAVAoAqv4hs777bHdWUUcnybGDsR3oUk4O4ctpKxS1V1Os6U6jlhIp/If/XrpLA5tQPSuXtl1aSZFmsY1H9/zM4/SuttYvLhVfQVipOU0+yNHG0Wedt/ofiM9hHdE/huzXW+KovN0GRsZMbK364/rXP+INPuTr8ggt5XMrBlKrx09fqK63U4Xn0O5jI+cwngeuK7OZe1UvQwSfJY8dupPJuIY2+5LuQ/XqP5Guk8FSGHXI0J+/Gyfpn+lZ8ekvrJuEhQs0a+YjY4Dg9M+/IqXw87w67ZFVJYyAFQORng/wBaia3sOL7nQ+PYyVtm7FGH0wQa5udrO40TTYnldLlA53IBwNxHf8K7TxpbmTSUmA4jf5j6AjH88V5xNa291Ym3mj/eo4aNu2MjIoklaLGuqOi3Wi6ZbQWvnZRnaRpcZYkLzx9K9D0z/kE2f/XBP/QRXmEDKwbb0HFen6Z/yCbP/rgn/oIrxMN/v9b0X5I8PCf8jnE+kfyRapr8qadSHoRXrHv3AHKg+1LTU5QU7mgAo7UtHSmITvRRRQAlV2+W5Hoy/wAqs1XnBEkTDGN2D9DTYkPqFQz3W88Rxkbfdu5/Dp+dTUmKmSuNMdqMYKj0xXLahChaFWYON24RJyzsBkAfzPauh1Ge8Marb2Pmgr99pQoz6dz2rA1NLq2tJ5kiO/ywX2gvz6A9lzj9TzXnYmskuWLszWC01OcuYmudT8vULlYYXH7uJT0UDnc/brnAx7mq9xFDeG88uwguCQxaePBEWMAYHcnk9uuPeruoQSyqqXEpigxzGu3eT0wBkj3yDxXNagh+0MsOqiO0QAiNH2kkjPTqcDHrz09vMqSnO6ka2XQvpDpSoqtBMWAAJM0Qz+lFUk8KJIivJq0iOwBZck4PpRWF4/zC5H2HiRlbr+tWY7jPU81SfO7I605CPcV9A9QRqLIG6GpQ3FZquVOPwqykpHfj1qWiizhCeUX8qUbQeFAP0qNX/GnhsdP51IydJ5E5WR1/3WIqZdRvE+7eXA/7at/jVPPFOye2PWndhZF9NY1JOl7cfjITUqeINVUYF4/4qp/mKy80vanzy7i5Y9jZXxNqi9Z1b6xr/hUy+LNRHUQN9UP9DWD/AI0ZNP2s+5Ps49jpE8X3mPmt4D9Mj+tWF8XS/wAdkh+kpH9K5RTzntU3Sj29RdQ9jB9Dql8Wp/FZMPpJn+lSr4stf4racfTaf61yIzjNLT+s1O4fV4HYr4psD1juB9VH+NSL4k0xussi/WM/0riqOtP61Mn6tA7oa9pjf8vQH1Rh/SpF1fTm6XkP4tiuBz70ZxVLFy7C+rR7noa39k33buA/SQVIs0T/AHZUb6MK849Mn86MfQ0/rb7C+qruelcGlxXmoYqeCR+NSLcTp92eQemGIqvrfkL6r5nouKMV58NQvVxi7uP+/h/xqZdY1Fel5L+Jz/On9bj2F9Wfc7vFGK4hdf1Nf+XnP1Rf8KlXxLqK9Wib6p/hVLFQJ+rTOyxRXJL4pvh1ityP91h/WpF8WTj71pGfo5H+NUsTTF9XmdOR8y0uK5g+LWLL/oIPr++/+xqdfFkP8dpIP91wf8Kf1in3JdGfY6CisRfFVkx5guB+Cn+tTL4l05hy0q/WM/0qlWh3F7KfY1aKzl8QaWT/AMfOPrGw/pUi6zpr9LyL8Tj+dP2kO4uSXYu0VXXUbFj8t5bn/tqP8alW4gf7s0Z+jinzLuHKx9GKAQehB+hpcUXFYSilxRQISiloxQAlGKWigaQlFLiikUZXiDTZdT0zyoSPMRxIqn+LAIxnt1rmotI8SQRiOLz40HRUuQAPw3V3dJXn4nLaVep7Rtp7aM8nGZPQxVb20pSjK1tHb9GcP/Zvij+/df8AgUP/AIql/s3xR/fuv/Aof/FV3FFY/wBj0/8An5L7/wDgHN/q9R/5+z/8CX+Rw/8AZvij+/df+BQ/+KpP7N8Uf37r/wACh/8AFV3VFH9j0/8An5L7/wDgB/q9R/5+z/8AAl/kcL/Zvij+/df+BQ/+Ko/s3xR/fuv/AAKH/wAVXc0Uf2PT/wCfkvv/AOAH+r1H/n7P/wACX+Rw39m+KP791/4FD/4qkOmeJ+7XP/gUP/iq7qil/Y9P/n5L7/8AgB/q9R/5+z/8CX+Rw39meJx0a5/8Ch/8VSHTPE56tcn/ALeh/wDFV3VFL+x6X88vv/4A/wDV6j/z9n/4Ev8AI4T+yvEvXNx/4Ej/AOKpw03xOOj3P/gUP/iq7ijFH9j0v55ff/wA/wBXqP8Az9n/AOBL/I4U6V4lLZJuCfU3I/8AiqX+zPE2MbrnH/X0P/iq7mkNH9kU/wCeX3/8Af8Aq9R/5+z/APAl/kcIukeIk+6J1z6XA/8AiqYmia9HIZEikVz1ZZ1BP45rvqSl/ZFP+eX3/wDAD/V2j/z9n/4Ev8jhZNJ8RyxsknnujDBVrgEEfTdVP/hF9RUY+wKB/vJ/jXovakxR/ZFP/n5L7/8AgB/q7R/5+z/8CX+R57D4X1FSsaWiRIT13qAPfg13dtD9mtIYN27y41TdjGcDFT4pMV04XA08NJyi22+53YDKqOClKcG25dW7/ohKKWiu09IZH/EPQ0+mLxKw9afQAUUUUAFJRRTQmLUM4zESOxzUtMYblI9RTsITr0pKSM5jX6UtIZJJuOnPs++M7fauc1JHkgQJKWiEiuyrj95j3JrfhBkifzR/tKnp9fesjUFVZDxyOa4KtBzm5GsJJKxz+qW4u7y1326+VgtJlgG4xgE8kDr06+w5rL1dpltpZ4NI08SxITHIzBtm0HB27fbpV64vV5S2jM0gyWWIgBT1OSeB/P2qqEkZWe6bAzxFFznPY56/pUvCwtzS1Ye0d7I87HimIAB9Xvd4+9tuGxn2orTfQAXYv4i0+NiTlBHEAp9OOPyorj5aP9f8MV7xqMvzHjjNKq8D2qWUKjfMQp9+KRMEfeH4Gu1O5tawBcds08D2pwX0PPtQFHai4xVYrwf0qVG6ZFRjHY5oXOD29c0gLKv+lPyCOp4qqGPWpAxFAFjryPrR06moww/rTweMUgHdvWlxzTQRnGaX8qAFHXGanWq44NTrnFS2Uh1O4pO1J9BUlCmkz7UuCKQj6cUXAOv/ANegDgjFAH5UvGfWi4WAH+VHrxR06/yoouAcdaPrS/nSUAAFHWlpOo4oCwuaQ+/elx0pCPbrQFgOKM/SjPNIeuPzphYAM0tA60mOKBWF4OaM4pMdeaM0DsLmgH3pO9GeKQC59KXAI/8ArU3OOtBNMLDuAMgflT1mlX7srr9GIqIe1Lxii7FZFgX92h+W6nA/66t/jUy6vqCdLyb8Wz/OqPvQeO1Pnl3E4RfQ0117U1/5eifqi/4VKviPUV6yRt9Yx/SsjNGfXFP2s+4vZw7G4vie/HWO3P8AwA/41Iviq5H3reE/QkVz4NLnFV7ap3F7GHY6QeK2/isx+Ev/ANapU8VQn71q4+jA1y2TxRn3p/WKncXsIPodcviezPWKdfwH+NTL4i089WkX6p/hXGA0u4EGn9ZmL6vA7dde01v+XjH1Rv8ACpV1awbpdx/icVweeaUNT+tz7C+qx7nfDULI9LyD/v4KkW4gf7s0bfRhXn27HNLuprFvsL6qu56ICD0IP0pcV50GA6cfpUi3MqfdmkX6Mar655C+q+Z6DRXBpqV2vS6m4/2zUy6xfAcXT/jzT+tx7C+qy7nbUVxya7fjrcZ+qr/hUo8RXo6mM/Vaf1qAvq8zq6K5hfElyOscR/A/41KviWT+K3U/Q4qliaYvYTOhxRWEviVT1tvyf/61SL4jgPWCT8CDR9Yp9yfYz7GxSVljxDZnqso/4CP8aeNcsCf9Yw+qmq9rB9Rezmuho0lUhq9g3S4H/fJ/wp41Oybpcx/icU/aR7hyS7FmioBf2b/duoT9JBUgmib7siH6MKfMhWY6ijr05oqhDDxKD6jFPqOQ4KH3xUlABSUUUxBRRSGmIU02lpKYiJP4h6MadSdJW9wDSjAOSM1Mm0rrUYsZw/1rDvpLa+kePzFfaNssOcEH39q2toLlskMfeuW8Q+D4tZnF3a3j2t0DnzFBKntxgjB/OvHr46pTnapCyNVFWuiu0UMPyRRiKIHCoOiiucurvUILhrgaUWmU+UCLjeqjqTjAI9z06c9M9RFo93pdr+/upNSYAkcJGemMZOc8/wA/YCqqW89ze7jaG3aMDMeEIx6lgOn0I7etYzx6rPlpuyRcYW1ZxclnYeY3mTnfk7s+X17/AMdFdc2l6IWJltbxpCfmP2Z+T370Vxe1fc05Ucn8YWI8PWGM5N1nA/3D/jXm+gs7MnzNjcDjcfWvRPjGcaPpyc8zk/kv/wBevP8Aw8MrCxxy39a9/L1eirkYx/vGdUu8Yw7g+zU8PPgfvZP++jT0Xr3qYKMZP1r0uVdjhUmMFxdL/wAvEv4nNOF3eD/lu2fcCn7AOMUu3Bpckew+eXcFvrw/8tAT7qKkGo3ag8ofwpNn+c0nlg0vZQ7B7WfclGr3IHKRn8/8akXWpsgeSv4Gq3lg8H+VHlij2FPsP20+5eGuP0Nv27N/9anjXlHW3fI9xWfswKa0Q/yan6tT7D+sT7lqbxfZW0oSaC55GcqqkfzrRi8Q2jxq22YZGRlR/jXn2ujZepjj5Bj8zXQWke62iz3QfyqPqlNsv6zNI6Ya9Zesn/fFPXW7E/8ALVh7FD/hXPeSDwBTvJ/+txR9Sph9bmdENWsW6XCj6jFPGpWbEYuovxYVzPkgcAUnkj86n6jHuNYyXY6tby2I4uIz77xUgliPSRf++hXIfZwTkiq93bDyOQBzUPAruWsW+x3III6g0o7e1eT/AGchbwKxHOTg47Ukf2q2W2Y3U65yWxK3fn1rP6m+5f1pdj1rjjtRjmvJYLvVQsxOoXgLJvUee3HJ96emp61HpzuNUujIVLZaQnHtzS+py7j+tR7Hq9GPWvGx4x8QRGMf2m7fL0KIcn8q0pvGWtrKscVyCAwDFolOfyFS8JMr6zA9S7c0Y+ua4JfFWrqPmMB9zGR/I1XXxzrPnmP7NZtlyB8rDA9T81J4WoNYmB6LTRXFSeM71Uz9kgOOT8xGaLTx3NPEXfTFTnGBPn/2Wl9Wqdh/WKb6na44pOc1yEXj2OW4aP8As2UKvVllB/oKkg8eWUsrRNZXasvJI2kH9an2FTsP20O51hpP0rnl8Z6cTgw3S/8AAFP/ALNSHxxoitteSZcHGTCT/Kk6U10H7WD6nRdTQSeawIvGugTPtW9bcemYH/wqX/hK9CHDanCp/wBoEfzFL2c+xXPHubXOeKO1ZqeIdGkj3rqdptHUmUAVLHq+myjdHqNqwPcTKf60uWXYfNHuXcn9aUVXF5ayHCXELewkBqVXVujA/Q1NmO6Hg0Umc8nNGcA0WGLnJozmm5zS5Hf9aAFzxnvS03OR/jQT2pDFoFITRmgBwODSknFMz9aTPsaAJMnJo3cVHu44NKTQBITzxSbuaj3U0t/jTsIm3f8A1qaXqIuKaxAosFydX5qQPVMPUgfiiwXLO/rQH75/Wq5fApC5pWC5a3+9AfjFVfMwO9HmHHNFguWw9L5nHPNUxL604S4o5R3LRkpnmc+9QGTjmmGUZosFy6JPfip45PlPSs5JM+tTROefTFKwFvzKaWX0B/CoN/uaQygDPX1ouFiwHA5AA+lI1y6naJZsnoqOwJ/I1Wjd5z8gwv8AfPT8PWrsFuie7Hqx6mi7ewrJbl3TFupLtZJbmYqucRmQspz65rqAcgVh2i7BmtmI5jX24r0qEWo6nDWd5D6TNFJXRYxFzRmkooEFLSUUWuBlSW99bXJaCXzYWJwjYyvsPUVHLqlxbOEuLXtywyP6H271qy9FPoaXNebLBVYSbo1GvJ6o0U090Yh8U2SMVaNwMcsOTn6VZbxVo8wiCzlWYcDb7Zxx3rQaytZkZp7eJw3UFBk1HJounSWe5bRFBk8whePm5GePqa5pLEpPms7dSk43VjKOr6bvKC9SVmUny0OeO+SeOKfHp8N2GmSFVmY7SAdowOn1xz271UvPDOliDc7yw7ACZA+AMd+elZVxY31tYWr6Rc3QjBdWDgq0244XHHHAPOR6+9eFVhzSTSsdF2kXZrazimkj/tdItrFfLEiAJg9MbeMUVCuhakVGLfTU4+60SMR7E45PvRWXspdwuzz/AOMTgWelL0zLJ/6CtcRoCfLBj/np/n+ddh8ZWITSFz/FMf8A0CuT8PD5bfPILA/TmvsMAv3MTLF/xGdaiDqeualA5z69aEGeo+tSAcZr0zhGYOTTgo9Kdtx1p2PUcGgBMfzoxxjPPWlAyM+1OHAoAZjnijHan4yTTgOTRcViPGKYVwPT3qUjJ6cU0jjn6VQmcp4iH+nRcf8ALPkfia6KzH+iw8Z+RefwFYHiMYvYvePv9TXRWIzaQHJx5a9/aktwexZC8YxzTyp6c0gGR79zUiimIZt6+tLt7cVJjvRtB7UDI9oHbmoLtf3B6dRVvGeRUN2D5B6dRSY0czcffmRersM/gKWeMSW0CjqcAZ9xU7xhrmduypj9KgGZJIfRCq8etZXLCZVimbIGPLwKjiQGwAx2NWLiMSXaL2AyffFVg5SzVFHztkD86aYjkbiNllLHA3MTj+7zgD9K347dY7e3UdSQScdTWLdna6g9Of58V0CjMFuf93+VIZoviOMucYAqrZxB5ZZCOTU8mbicQg/KOWPrS2q7ZpgB35xTuAskS7TwM1QjUJaFQPmZjjitdl456VVt4RI+/wDgThRSYIjhsxFEBgbjyTjvVeGAC/kOOueK1mXv61UVcXrk9x/SkwIp4/LjPy4PbNZr2obPr1+tacxLsWI46CmLC3lu5JGBx71Ldykita2KLGH43NWRrEWGb6CumhA+zr+P86wddGCT221fQkj02ITaU2eRlu1XYLcR2qrt5PQVFoLA2Mm4Zw54/AVrWqCRzIcEDoPSkMqxaaqKSQN7dTT/ALJg8cVqFR2zim7fb/61OyFdmPdwyiEFZJBznhiKfbm8WBNt1cAgYz5rA/zq9cpmA8dOabAg8gD0zip5VcfMyJbrUh0v7r6ea3+NPGqawg4v5yfd8/zqURgdjxUMvZVGSfahwj2Hzy7jZPEevqdkWoPuz3RT/MVNfa/4r0/w++qG4VohII9zwLhiew4q3pGn2b3BN7cLEijcc9WPoKd44vY73QpLaN0ZFUFVjPyoB2x0rJwjzWSLU5WvcwLf4ja+0UbN9lbd1JiP9DXUW/irU5I0Z47clgDwpH9a8qtjttozxlW/LmvRbFQ1pEePu1MqUL7Fxqy7myPFF5jm2hP0JFV7nxvcWrqDpquGGciYjH6VEIxjP61Q1G3DbDj1Gaj2MOxftZdy+vxCOPn0lwfacH/2WpB8Q7X/AJaaddKfZlNc41uuPu9KYbVSCABxR7CAe2mdSPiDppHzWl6uP9lf/iqkXx/oxIDi5T6xf4GuOa0Tg7ec+lRtZoaPYRD28juh450HHN3IvoDA/wDhTv8AhNfD7YzqaL/vIw/mK89eyU/yrO1Kw22EsgPQA9B2NNYaIfWJHq6eLNBfBGrWoz6yAfzq2niDSZAAup2Z/wC2y/418+Y/cdfut/P/APVW5b2ivCj+qg4NKWGiuoLEvse4LqNpIPkuoG9MSKf61IJ4z91ww9Qa8TGnqyngcnqajfTwB8oxUfV13K+seR7iHyePrSF8YzmvDjBOgxHLIh7YciiCTWC4WG/vF+kzD+tH1bzH9Y8j3HzD60vmc9a8WfUNatA+dXvmYJnm4Y4/M/WvZPhJKuveFJpNSH2m5iu2jMkvLY2qQP1NZVaXs48xcKyk7WJt46Cmb+eP1ruDoemsebVfwY/41E/hzTGH/HuR9Hb/ABrm9ojW5x6SA96mSXB/wroz4X0/JIM6/R//AK1QT+F4PJfybq4V8HbkqRn8ql1EUmYbTHcFXJY9AOamitWchpj06IOn4+tOggWBcYy3cnqasrzVblbEiKOBircK81XjXmrsK8it6VPUxnIuwjCitOBsxVnJ0q7atlSK9CCscMmWKKQ0VoQFFFJQAtFGaSqQmI/KH6UgOce9KeQaZGfkHqKTQImlfACA4Hep7U7reRe45qmf1qzYt+8Ze5XpWKpKMLDvqY+rwM5D4DIq58s9CfU+v0/+tjBmnu5y8vnMFRvmJ6Ac8fXOK6jU4WliZSxjTkEr1/8ArVjWsUVxaS2jYjAkQDb3yeK8Kr7rk+XRNHcn7qKQnvCoI5BHB3Nz+lFXmttYLExsipn5VEgAA7DFFL2sOyFdnkPxkYmfSVHULL/7JXOaAMJbem7+tbfxdm8660g5+9C749claxvDYxHa9wOlexgv4MTLFfxJHWrx7Y/SpgBx+opqKMU9Rg5/pXonChccUYx707n1oGcfSgY0AZpwUenWlxzRjikMQgZ6nrS4zg+tO29h1oC800IQjNMPbPQ1Lj6Uxh6UyWcp4jGLyI8f6v8Aqa6OwH+hwdP9WvT6VzXiZtt7Cf8Apn/U10+nDdYW5/6ZL/KktwexaHp2p+PrmmhSOM/lTwOPXFMLByM0dRTsdTwfSjbkcdKVx2EwG7VDdAfZj+FWPbH4VDdrm3JxjkH9aT2GYLYWS5J4AA/lUZURW8BOBltxpZwzSyp2PzHjsB0p0yq0VvH0LY/LFZGlivGC05ds/OpOPQdqZBEBE0uOqkL7CrUzCO4YkjCx4qNABp54x8ppoRx+orsdO+c5rdhA/s+Bj0Cjr9KwtUR1uFds7WI2j2x1/GtazLy2S9QkaAfjQxI2rSIpHub778mi3H+lTCpoSTEjc8gVDvEN1ITx8uf5UmxizsXcQp1P3iB2pLMELIPRqktoyQZXHzPSWh+eUe9K4yXbng1Tn2pIwHJPX2FXJZPLQn+I1nscsfU0pMaQ1YzI4A/lVuSMJAR6CnwQ+WmSPnNLPjymPTjihCZVtx/owyPWsXXFyp4zlK24P9RyfWsHXpNssMa8lsg/SrvoTYh0AM4lQcKG3H8v/rVvaeflbjnisTw02XuUz2U/zrcseZH60uoy6BznFKRk04Dj2/nQeOcc1VySvKuY2+maitV+U9OuatOMofU1BGDDHlgcnnHpUNlJBI20YHJPTFIsQU5Y5Y1IkZ5dx82Pyp5AJzjNNAyFhVLUY99hcL2KH+VaJHyjPSq1wu+J1xwQRVXEefWozanHUPnp7V6Lo7B9OhbjG3v2rzm2zsmRuxH9a7/w44fSo+nHFZSNYG0Bxz0qrfp+6BHUNzirQ/yKr3qk259iDUFmYV5yenemlRnjg4qTnr/Wm8468mmTYjYZ69KYU49cmpT+YFNP+e9O4Fdl69vpVW7i32U6D+KNsflV5vpURUE4xweKpMlnBDmCQe4P+fzro9L+exhbGCBj9cVzyDaJk6fL/Kt3RGzZgHqrEYzTlsJGoEyOgodPT681LGpc4A5q9Fa7eXHPWsrlpFCKyaVvmGF9atiBY1woAq5tAGB+lRuuAT29aLlWsYGpx4l6feT/AOtXo3wHus2etWpPKvFKBn1DA/yFcBqaYWMgdQw/lXUfA+4MXi3ULUnAmtWwPUq6/wBCayxCvSY4O00e9k5waDSDpj0pa8i51jO9IeQaDwaQ1DZRzd7CY7px75H41Gq81qanFkpJ/wABNUAuCPfmuyjrFBJkkYq7CMCq0a1bjGBXfTics5Fhat2xw2KqLU8JxIK6EYMu5opKKskWikzRmmJhmlpKKYgzUa8Fh71JUfSQ+4oaEKaltW23Ke/FRGhW2uG9DmpktBk9+uRIvfrXD3MLPe7SSo7kdQPb1Nd7fKqgzEgLs5J9K4q5R5LlpJI9kefkQjlh6sD/AC/P0Pl1Xadu52UneA3N1/yz09Cn8JMRYkdue/1opjBixJkckn1orL2TKszyL4u/u9V06ME7VgbjH+0cfyrL0S5WCC1YxswVBnHetD4wMD4islPa2/8AZ2rL0wAWcGcE7Ov4murDy5aEWTXV6sjqE1qHGTDMO/Qf41MNZtiB8kv/AHz/APXrDXnp9KlH6YzW31iZl7GJuDV7U9RIP+AUv9qWmc72BH+yaxDx3pVPbNH1iQexibn9p2ZH+t59wRTxqFpx++A/AisIdMjrR37U/rEg9ijoBfWp/wCXhB9TTzeWp4+0RfTdXO8AdKaQM4xin9ZfYn2KOkNzB/z2i+u8U0zxEYEiH/gQrmjg88flULgDnj8qpYh9hOiu5F4slRb6Ahs/uzyOe9dZpbqdNtiGGTEv8hXm2suEuY+3y1vaaF+xwHGPkH8qp17K9iVSvodyF9P51IFPrXJqfQkfialUuD/rJPwY0vrK7FKh5nUgHrjn3pCMgY9ea5pZJs/6+X/vs1ILi4B/18v4tR9ZXYPYvudFg96iu1/cH2IrGW7uf+fiT86Gurhkw07EehxQ8RHsHsGRsuXu2I5AA/SoEBeaGQ/dBCqPoKny5LfPy3B4600bxtAbhORxWftUV7NjLiPzLph1Crk1FKfL00Kf4kIHPepxu3M+RuYYJx1qtPGSgU4wowOKaqoXs2c3ri7Xt8enX8K1bBAdLTAHK8/WsfxA7i4jDFdoUYGK09JkdrCMDG3BGMVbmrEqGtjZsyDbRn29abIA90sfQdz602BniiCKFwvrSpGUkWQHJBzz3qXURSgy5joB17VWtyFuZgT71J5rcZQc+9REEs7AYLe9J1ECgyKeUu2c8dqbCB56cf55pxhJOccd6eR+9V1XAUYxUqauPlLYHI7daimJ8p8eh6UpnGMbWpjSqQRtbnPatOdEcrKqy+Xa+jZwPesrUYCsPmONzkH8K1QF8zc4JUD5RiqGs3MSwAnPXHK+1OMkJxZleGW/0ycZPKf1/wDr10Fl/wAfMign8frXL+HpBHqz56FSOnuDXT27Bbkt/Cc8020gUTU5H+FJnjGaYJUPIP4Yo8xAOvT0o5kLlYrHaDzz0qvC2+c7uoFPXa7lncAL0FRQ4W6yRheRkmp5irGxY6e90C7ArEO4H3j6CtUacgG9k57KUPSn28tqsUbGSLaBhVyOfrzVHUdUWQNFbuOfvOOPwHtWfM5OxdkkU9QaBHMcIRv7zgcfhWVIu5SCDVhsMOwqFl655reNkYs872eXe3cZHQkDjpzXZeFnLaeec7W/xrkr9RFrt2PUk/yNdJ4QkzDKueh/w/xqZFwOr71Fcgm2fnoM09TikmBMDjGeCOKg0MnnB9qQ9fYUdevSgn8qLiGkGmHvwSKcf/rUh75P9KAImHH+NRnjnP5VMRxjH5VAeuRxTTFY4yaLZqdxH0+ZwM1qeHYmkjlQZ4YH86i1K0K6rPIeFBDAeuQM1a8NNtubiPIyVB59v/11UpaCitTpoIljHyjn1qcYA/rUa4BqQcH3rC5stBe1RyKCOvPepcjGD0+tMY54ppiZj6ov7kEZ4bn8qt/C+4+x/E2yGcLNvjP/AAJD/UCodQXNs479etZ/h+4+xeONFuMhQLqEk+28A/pTkrwaJ2aPqg8N9aXtRJ2IozkV4h2dCM9aQ9KVutNJwM5wKzZSIbqPzLdx3HIrJArVtb22vJZo4JBJ5eNxA45z0PfpVB4/LlZPQ13YXaxNTQSNenFWkqJBUqV6UEckyZe1SocNUIqVTyK2Mi9SvMbe1uJljaUxoWEadWwDwKYh+QfSp4P4vwqiU0nqZCWOu3Ea3T6x9nncB/sywK0SH+6c/MfQnI/CrVjqz3egnUDbN5qK+6GM5y6EghfqRxUGp3891cvo+lt/pRX/AEi4H3bVD3Pq5/hX8TgddSztIbCzhtLddsMShVHtURWun9M7K0v3adSKu2mkklp5276WvrpfrrkJY67cRrdPq/2eZwH+zLArRJ/snPzH0JyPwqe21xG8OPq1zGY/JRzKinPzISCAe/I4qPU7+a6uX0fS2/0or/pFwPu2qHufVyPur+J46weIrOKx8F3FrbJiC3jT5euUVgT9eAaV+W7j0X4mqiqrhCqknKStZJWj5276Wvrpfrq6K08QXMC3barHbzuA62ywBokz/CT94/UEVPBrsbaA+qXEZjMCuJ41OdroSGA9eQcVrqwZAV6EZFcTdAv4T8QTKD5Jv5JAMdVV13fnhqJXhquzFRUcS7TSXvRWiS0btb/h+xrx2niC5t1u21WO3ncB1tlgDRJ/sk/ePuQRVjT9QOp2Mdw0flygmOWPOdrqSGAPcZFaykMgK9CMiub0L5hqUi8RvfzFPoDg/qDTtyySuZSn7WlKTSVmrWSW99PP59jqGxJaxMRnA/8ArVh3cCtdAFFYsQBuOK2rZt1lj+6cVlaomORXDi43ZOHfQQ6hZIdggiIXjOOv6UVjhJMD5M/nRXF9XX8xryI8L+LrhvFEC9harx/wJ6z9NnhSzhUyICEGcsKufFaT/ir1GSMWqA49CWrimJEjYPAA5r0aMb0YryM6ztVkdyk0JAzNH/32KmSaIjiRSPrXAA44z+FO3E98Y7VXsifaHoAZeOR+dPVhntXnvmtuypJx6U7z5Ookdc+jGl7IPaHoQ+maXuK8++1TqMCaT2IY08XtyFOJ5cdfvmj2bD2h3+R3pp7eprgxqN4rAi5mHp+8NO/tS9A4upiP940ezYc6O2J/yKic5XAH4Vx/9q32R/pUnbvSnV70Hm5c/UD/AApqDFzon14/6TF67eh+tdFpfNjB1PyD+VcXc3UlyVaZ9zjjJFXYNZu7eFEjcbVGACoOBVSi2khRdmdwlTL6Y47c1xK+I78Z/exnj+4KcPE+oL8xMX4r/wDXrP2bLU0dr2xTj6j9R0rix4pvhjcsH4qf8aePFt6ODFAfbB/xo9nIfOjsgcEcfpTs/jXHjxddHB8iE+vX/Gnf8JbNjm1jJ9mNTySDnR1pIAxmjOQOvrXKf8JfJ3tEP0c/4U4eLepNoOOP9Z/9anySByR0+7I9aic+lc6PFy4/49D/AN/P/rUp8Uwtw1tIM+4NPlkLmRV8RsVnjY88Vd0Ns6dF17/zNYer6kl+VaNCMDByas6Xq0VtbCOQMSCeg96tp8pKa5jrY2AGKlz3rCTXrUcNv/Kpl160PV2HsVrOzLujYyM8A0bvSsga7ZMB+8Iz6qaX+3LM/wDLX8waVmF0apbpyMUZHbpWWNasyf8AXD8Qad/a9nxmZR+dFmGhpZHOcD+tNJ7Vn/2tZkEeevH4UHVbM4/0hPzo1C6LjnisnWBusjx0YGrLalaHP7+P/vqqGo3cMltIFlRiOwIqo3uSzF0ZtusLweQw/SuziI4HT61w2mPt1iJmYbdx5J9q7OKQHHI6+tXPcmGxfB7nNBHHPFRq3HBBz75pc547+lTcsdn+VJwM8A0mc0h6UrisOJX0FNIB7Um4delIDTTGIQmTx9ajZVHf24NSMT1/SoZG46002TY5HWkSLVJSAQSAePcVZ8NyNicKzLgjkHHr/hUfiEYvUJ6FB/M1F4bfF3OhOMqD+R/+vWv2SFuddHLLkfvpMYzyalEsrDHmsQevNVYz/Kpwc9xnNZ3NACAcZORR5QHHSnL+eKM/jSuFiMxZH3qQxlT94496mOcDPNIev+TRcLFfyuPf6U6O3GeeakJA6/jTkyM880OQJHPa9Ftutw6tF/iKz9Ck26p7OrD+Rra19QTA5GRyv8v8a57S3KapAxPfHX1qo6olrU7MHHQ/lUq/T86rq3HtU4PPOOazNB4OOn40xm46frSlhkdvxqNjwc/jRcLFW7O6KRfVDXLXMxjntpVblDkH8q6yVQTtPQ8Vx98u2JC2flbB/wA/561pF30M5I+vYJlubOKdPuyorg+xGach4IrmvC2t2kfw+0fULy5jhiFpGjO7fxKNpHucg1h6j8RbKR3is5Wjj/56FDuP09P5/SvGcJczSR2Q1R2Woarb2Iwx3y9o06/j6VympazNeZEjbYz0iXp+Pr/KuYl8S2jk4lPJySc5J96qSa7bvz5vHemqMux0R5I9TuvCN35mrTwk5DxbvyI/xro76PEyuOjD9RXnfg3V4H8UWsSyAmUMn/jpP9K9PvY98G7upzXTRXLLUwrNN6FBB0qQCmKKkFejE4pDl61IKjxThWpmy5Ecp+NJdfaWspo7SZYLh1wkrJuCH1x3/wA9aSA/IRWJq/iCWyvTbW8SEoBvaQE5JGeMGssTiaeHp89V6bHHisdSwUFWqvS/a+voO0/TNc0y0FvbX9gFyWZ2tXZpGPVmJfJJ9a1m/tN9KkhN3Al8wIW4SH5V99pJ5/GuY/4Sq+/55W//AHy3+NH/AAlV9/zyt/8Avlv8a86OcYOOib/E4qnF2EqS5pb3v8K/pmjp+ma5ploLe2v7ALku7NauzSMerMd+ST61twefJYtBqRhuGcFX2IVVge2CTXJ/8JVff88rf/vlv8aP+Eqvv+eVv/3y3+NOOc4KOzYq3FuFqu8nrvdRSf3o2E07V7eEWltrAW0HyozwbpkX0DZwfqRmtKK0tbXSfsEcAa3WMoVc53A9c+pPOfrXLf8ACV33/PK3/wC+W/xp8Piu581RPBCYs/NsBDY9smnHOMFe139zCXFeFqtKUmtf5ba93Y0o7DV4bcWtrrAW0AwjPBvmRf7obODx3IzWjbWsFlaRW1uhWONdoyck9ySe5J5NPi/1QHccU416ygo6o9ypiKlRWl+CSu/O25e05siRPUA1FcxqV3PyF5IPeiwbbcgH+IEVLdB9rKEB9fmxXHjV7txUnqYrXcm44eQDPZsf0opxilz94D2DdP0orxuaHmd/LE+bPig+7xo47LBGP6/1rjmGJWzgV1nxLbPjW56cRxj/AMcFcqPmZgec+le3Q0pR9DlrfxGAJBoP1NO2gkUuOgx17nvWpmN3H26UhznPrTyuOc4zRj5s9zSAYDj1/HijHHANSbM/d/lQF59aLgR56YwM+tL2xzinqpOecgnGMUgXpkH39qLgRk8nHWjPOQAM9vWn7cDB/IikKgc5HHancViNuOcjrjr0qUH5R7DHWlt7WW9uobW2j3zyuERAerHoK6dfh34nwM6Zgjr++T/4qlKcY7scYt7HLbsDj86ack9K61fhx4oZSTpy49fPj/8AiqD8NfFLH/kHqB6meP8AxqPaw7l+zn2OTzzngEUZ/Edq69Phf4rcEfYIwf8Arun+NKfhh4rIA+wJ/wB/0/xo9tT7h7OfY5FZNhJIB+oppck56V1T/DjxPE5U6fknsHU4/WkHw78UMQo007j3Mi4/nT9pDuLkl2OX3c9vwoLj8O9dO/w98TqxVtNbg9nU/wBaYfAHiZf+YZJ/30v+NHtIdw5Jdjmlbtn8aA4yDjr1zXRf8IF4mAydKk9/mX1+tI3gTxIM/wDEqk5x0K/40+eHcXLLsc2xIHt/OnxsdpIPFb7eBPErfd0qX/vpf8afH4C8UZO7SpuPRl/xp+0hbcOSV9jA345zx3FAkIHcY46/59a6P/hBPEpB/wCJTN0/vD/GmnwH4kyf+JVNyOOV/wAaj2kO4+SXY57ee5+nFKJCq9ecVvt4E8SEc6VNn6j/ABpP+EE8Tf8AQJmPvkf40+eHcXLLsYHm9ODnGMUolIXPQ1vjwH4n5H9ly/8AfS/40o8BeJsH/iVyfiyn+tHPDuPll2Oe3nPNNMhxzjnk4rpU+H/idyQNKkHHOXUf1qJvAniTdj+y5v8Avof40c8O4ckuxz/mlj+P0pDKOBnkd/WugPgTxMOP7KmOD1yP8aT/AIQTxMcf8SqbnpyP8aftKfcXJLscyrkOPXNOMh6ZNdB/wgfifp/ZUwx7j/Gnf8ID4mx/yCpPzH+NP2lPuLkn2OeEzjkOR+OKd9qnUgea/H+1W6fAXib/AKBUo49R/jR/wgXiYnH9ly569R/jT9pT7hyT7GKuoXQXi5m59HP+NOGp3oH/AB9z4HbzDWw3gPxP30qX8GH+NJ/wgPifP/IKk/Mf40c9Pug5J9jIGq3qt/x93Gc9DIakXWL4DH2uX2+c1p/8IF4nzj+y5frkf40v/CA+Jh/zDJP++1/xo56fdByz7Gb/AG3qHT7XJ+PenHW7/obpzj1wf6VpL8P/ABS2caW//fa/405vh94oRtp0x8j0dT+uaXNT7ofLPsYF1fXFyQZ5CzDgEgdPwptpeTWc5kibDEEHjPFb7fD7xOCo/sx1DHAJcY/nUq/DbxQWA+wDn/pqo/rT9pT7i9nPsZsfiO9Qf8smx1yv/wBeph4ovBx5cJP+6f8AGrbfDjxSP+YcPr5qf41Efh/4nXrpvH/XVP8AGlzU31Hy1Ow0eKrgdYIfpz/jUi+KpM82qcf7Ro/4V94ozj+z+faVP8ahv/BfiDTLKS9u9PMdvEAXfzEbAyB2NK9N9R2muhaXxUw+VrZCfZ//AK1P/wCEnUjm0/J//rVyYbtntThIe2M0/ZonnZ1i+JojjMDjI7NmpodfgaTHkyD05FceHOefyqzbSnzh154/SplDQpSOg1XUIrqFFVHDA5wfpWBHJ5d6j44WTJxz3q3I2V6Z9M1mykiV/qOlKmEmdgmpQY6sPwqZNUtsAbm/75rm0b5MjPPept3APGKzaKTOi/tK2wDv7+hpDf2+ceZ19jXPB8L3B/nVae/ig4zub0GKFFvYblY6WS7t9ufNAHrXJapewtNLFEdwZ/vDp17VSuL6a4JywVSc7R0qqetdMKdtzGc77G/pdxr2qoljaXbNFaofLidhtQFsnAPua0n0fxSoJJib/gC/4Vytvdz2U7SW8jRseMqe1aKeKNYRdv21yP8AaAP9KU4Sv7tioThb3rl2W18SRn5o1+oRP8KrltbT70Y/FVqBvEmpuPmuM/8AABULa1eOMGQf98ikoz6pA5R6NnR+EtS1O08Y6NLMm2MXkSucfwlgD+hr6sZQ6lT0IxXxhDrFzHPHLvOUcMMexr7Ntp1urWG4TlZUV1+hGa568WmnY0pSvdGYFwSp6inipbpNs5PY81FW8HdXIluKBThSDrS1qZsngPauR1tHk8QyxoMuzIFHTJKiusiPziuW1STyvFBkxnZJG2PoFrys4/hU/wDGv1PnuIf4VL/HH8mXYYDp+oQqjsXYKZELbtpPXHtU95ZXE91cM7PEgBZZRgjpwMGq+jSxTSveXUq+YTkA1f1e4LCOCNvlf5i2eMdq9dWsfQPcgS5lbR5PmI2uoz7HHFV7dpLS6hm80skv8OBxg4I960/IiGlSwQyI7bCTtIJJrJdhJZQkHJWQj8CM/wBKoksX32ie9nSOQK6coCMjgZxWLqs7zzws7FiIgMnuMn/Gt6V9uqQSdBIik/yP9K53UV2XIQ/wjb+RIryM8/3R+qPA4j/3F+qO8HDOPfNB5NB4mPuKDXsHvEkDbLhG7BhWrKmWPuKxa3Cd6I3qM1hXjeLKi7Mymi+Y/WirpjGTxRXi8h0+0Pkf4jc+N73GfuxjH/AFrm8Eu3BPzda6nx/E03jS9eJd6Hy8MvI4RfSsXEMZfehD7vWvUpO1OK8iams2U9hxnGfalAzwfyx7VdW6gQbQmM+4oEtl12N/h+tVqSUwhOOvFAQgg4Jq4JbM/Kobk4HJpoktCRjIB4o1EVthyNp/SlAzkdB3rRtbeO6RnToDjJPep/7OUA9PxNTzFWMcryeOelJ5fbuK2l01QfmGBnH3hTxpcJ4OcdvmFHMPlMEqD0HQdKRkK9Qf6V0H9mwYOWbn3GKY2lWzD/WsPyoUw5GVvCbY8Y6HnHN7H/6EK+rAkbYJRc464r5e0W3W18baGisWH2yI5OOu8V9PB8KPasq65mmaUnZMd5UQP+rT/vmlMcR/gX8hTWnUjbt5/vU0uax9ma847y4s/wCrX8qXyoj1jX8qj84JyQT9KaJc80ezDnFMMe/cEXP0pTGn90flSb6TdTUA5iQRx4+6Pypwjj/ur+VRBx607fRyBzD/AC0/uj8qb5UZ/hH5Ub80nmUcgcw4RRg/dH5U4InoPyqPfS76OQOYeEQD7o/Kgon90flTPMpC9HIHMO2KD0H5UFU/uj8qYXPqaTfT9mLmH+XH/cH5U0on90flTd9Jvo5A5x2xP7q/lUciJt+6vT0oL0yRxj0pSp6DUxyKm4/KOlIVTao2r+VRo4yPcUgfkVn7MvnJPLTYflX8qZ5SbM7Bk+1IZfk59aGf9370ezDnJBHHj7i/lTDGhJ+UZPtSeZ8uaaJMvz2p+zDnHPGgA+UflSmKMj7i/lTJHyB9aXzAQDTVMTmRtFH5g+QdOwpjxIxwFX8qc8nzZ44FMVuMnOSafsxc5G8MakAKuPpVXUryz061Mk+EB+VSEyfwqxdSCGIyNnCqWOPauW0nXTqWoJPJ1C79u7hQegq40ridQ1heS3zEafpM92y9JJWKIvsMnn8Bim3j67ZQbzoayDqfLdWwPpjNdjZzx+Uqoo6du9S3MxROQAT2q1RjYfO27HI6fqFrqUZ2xBJlHzxsACPwqZ7eNjgRr78dKs3ttbhmu1gVJcZMqdTjsaZF8yBx/FzWagm7Dm2iFYUD4Kr19Kz/ABXax3HhXVYti/8AHpIQNo6hSR/KtR5cMVH07dc5z6+1Raign065ixnzInX81Iq407Mzc7o+WMDP+FKMdzjHFKUIz+uaXaWbgduwruOMADngjj9alg+WVT7jkU1VPXH508ADkAHnNSxo0HIxg9TzyKo3JPnA+3p1rQcDaMdD14rPveiken+f61nT3LlsXYCTEhHoOQabNOkSbnOPT1NZ4vTFCqKoyO5qq7M7bmO4+9Wqd3qQ5lq41B5Rtj+Rf1qln3oorVJLYhtsKQ0tK1MQhpKXsDQRigBKKKKACvsHwBe/2h4A0K4zkmzRCfdBtP6rXx9X0/8ABG9+1fDeCI9bW4lh/Xf/AOz1z4lXima0Xqd5eJlA3ocVSrUlTfEy+orM71NF6Gk1qLSg0lLW6MRynDZrn9c0+8/tVrmGF5FfBUxruwQAOR+FdB7HpViM5iQnqVGa58XhI4qnySdrO+h52Y4GONpKnJtWd013ODSw1CP7ltdr9EYVYkXV5ggeG6+QbVxERx+Art6K5P7Lqf8AP+X3/wDBPO/sat/0Ez+9/wCZxMH9s27l4obkEjHMJP8AMVGsGqLnFtcAZJx5J/wruqWj+yqn/P8Al94f2PW/6CZ/e/8AM4mU6zP5fmQXB8sYXEGMD8BUa6bqV5cIr20oY8bnjKqB7nFd1Sg0pZO6itUrSa7NkTyJ1Fy1a85R7NkcgxIh+opSKJfug+hBpTXsn0A2te0bfZp7cVkGtLTmzA6+hzUTV0NE2KKfiiuH2SNLnw4jmNVAYj6Ggu24hgQQTngcVFAGZEGefarsOiapdTsIY8sBnlxXYo3ZPNYriU/7XPSnLLj+EjnuKu/8IrrgH+oAH/XRf8akXwrrZJHkp0/56LVezfYXOu5nCXIHy/8AjtL5meApz/u1e/4RbXAOLdefR1/xpB4b1xR/x78f76/40vZsOdFVZnA2KXA64VevqePwpwuZ84DzenBNbmmeGr94XNza5cNx849PY1fbwzOCClmR7B/p71Di09i079TlVurlnGHl5GO9TLJfEfKJvwU810ieG7pWBNq68/3x/jUo0i9jJCWrcdOhqHfoi426s5gDVWHEdyR/uk/0pHOqxDLw3Iz1JjPP6V1gtr9AQbQn/gP+FRNDqG7DW8gzz9wgVKcuxpaHcx/DUlw/i7SBKHBF5EPmXH8Yr6bB4r50sIZIvGOjtLGyE3cXUY/jFfRQ6DjmiavYUHa4pNJ5gFNOccCozkmkoiciTKk5pcrioAGY8DJo3c4NVyC5yfK0uV+lQBqTdRyBzljetHmL61W3daN3NPkQuctF1o3rVXd/9ejdj6Ucgc5a3rRvWqu7il30cgc7LO9aQutV93vSb8mjkQc7LO9c0m9fQ1Bu+tIXo5A52WN6+lJvWq5b3oLU+QXOWNy+lRyuu3pUe8+tRyvkChwGpkgkTcvH8NNDptzjvVffgK3+yaA37k/Wo5EPnZP5i+lKZE8vp2qtuzuP4Ubv3ePUUciHzssNKgTGMZpEkTd0PIqqzEn29acGCyj6YoUA52WZpF29KcJFIHHFVJW+Snb8Ln0FPkRPOyR5FJC7etODIMccDtVNn5BHTPrU2+nyIOdj5yjgAqMHg15tFbTweILfTYYJoR5pLM6bSUB4IyOc16JI3AIA47Gqk9lHFIl9GEW3hGAijl2Y/MSfUcH8KmWiNaXvPUvzxzC1EcG/DcMVk2HH+92/nXNaNp051qQ7IY03fOY7h5iV92bvXUPepaW7yFgwRc7V/iPasR9aKRu8eoWEU+eIGUMoH0BBz71imjrUXuVNUhvItbkizMyocAxz7cKeQSvQ88V0EZCQIgx8qgVzy6zNez+ddR2pcELiI5x75roVUlA3fAp0bNszxScUivKcuccUvUYPfipDAz7nTlVHPP8An0pMfJ+FbcpyJniE2l6HFdzRO1rvSRlYGQAggketKunaB2+ykdeJf/r1n+JfDEzeJ9TZGIV7mRlGwnqc/wBayT4Yuc8N1/2TUOMf5jVSl/KdUNH0EqOINp6Ym6/rUqaFoTkgCPjr++Ncf/wjF0P4x/3yf8KY3hy7X+IfkaXIv5x87/kO7OkaJjAAJxjiUmvNJvNkuXjyWKkgAe3/AOqrj6BeKM5XH4/4VmSxvBO0T/eU4Na0aajezuZVpuVvdsatrZW0unSebb3Au9+EbPybfcdfWg6VHgjMgP1H+FU7Cxub7zPIYfuxk5bFWP7FvuP3i/8AfRq27PczWq2N3TNB0O9ljt2N8JmXk5ULkDnHBrbPw/0ZWw1zcqf+ui8f+O1xUek6or7opdreokIqdbDXRjbeSDjH+vIxWMlJvSZvCUUtYHYf8K80dhxeXQ/4En+FZHijwXY6LoxvLW6mkdXVSrlSMH6D6Vl/ZPEg6X0/H/TyeP1pJrDxFcwGCa7llhY5KNcEjP0JpRU003PQqTg4tKGpzmPl/GvUdP8Ahdp99p9tdjVph58SybRGvGQDjrXDDwzqmCPLXGf74rWgg8XW8CQw306xoNqIJ+APQe1XWk5L3JpGdGKi3zxudNJ8JLcfc1SYn/riD/WoG+EcnJGoyAeptv8A69Yj3Pjezhkm+33QREyzCboB1rNbxz4mZstrFwx9yD/SsYwxD+GaZrKeHW8Gjpz8Jbj+HUc59bc/416x8JNBuPDWk6hp803mq06zKdhXGVwev+6K8a03xB4/1K3+0WN1PLFuK7hHHjI7civTvhRqHi2bXLyHxEZGt2t8xFkQYcMP7o9CfyqJe2vac0x/umrwi0euVmzpsmYds1pVUu0+634GtaTszKa0KuM04UUV1IxFqeM5T8TUFSxH5SKpEMloooBqyRaKSimIdSim9KWmAkozG3rikU7kVvUU81HHxHt9CRSYAau6Y2JXX1Gapmp7Ftt0vvxUy2GjTwc0VJtorn9mO58MWy/NEMj7wwPxru9EG+8lAwBszz9RXF+Q9teLFKjRyxuFZWXBBz39K7jQlHnysSOFAxj3rpp7kT2NooVH3gQeMA00cNnpzmpcDnLD8aNo5O4fnXQYERBzjH60dccVKyrwdw/xphUeo/CgCzZ42H2YVZJBP8qgtFBRhnPIqyUIxxWctzWOwzvn/PekOQcjGaeRkAYppwD0/wA/5FSURnOM0jc8cYHan7SV5B645pkjiONnc/KBk0hnN6iceNfDw4z9pjJx/vivdlIIrwOdXuvGmhknY0lzGNx/h+cYr3FY7gD/AF3X/ZrmqO7N4aIumVjGYxjaetMK8EVX8qc/8tv0p/lTYP74/lUoGPWVoMlcbjxyKrbiSSTyaZJDPu5mOM+lRNFJ/wA92rRGbLG/3o35HBrlPE+qXWjNaMly4WQsGGBzjHt71gnxje5/1xI79P8ACi40j0cvyaC9edL4vue87Z+g/wAKX/hLrr/nu+PoKVwPRN9G8Y6153/wllzx/pEvpxij/hKbn/n4lFHMFj0TzMUeZ9K87/4Sm673Ev505PEl0/8Ay8y/Xd0o50PlZ6FvpC9eev4nnycXE3/fVNHiS4PP2ib8WNLnQcp6Lv4pC9cPBrkj43TTf99kVcGplwCJ5f8Av4al1UilTbOt356UheuYkvWWNT50nI/56GoxdsysfPn4/wCmhqfbor2LOq384xTZG+XNcm1y78LdSqccZkYj+dMgupd2GuZXI/22x+WaTxC7DVB9zpS5KL+VLvPlt161zy3T4z5jcnGS5plzfMjECVuvZzUfWI9ivYM6RXJ3detCs3ljg1yf9pMD/rJD/wADNPXUyV4LH33Gn9YXYXsWdOpYvyOB1pWciUEA4rkptTIGCzH6NVQ6k2eXb6ljxQq67CdJncSyHZ3/AAppkLbUA7dq4g6m/wDeb/vuozqbYzuJ/wCBVftUL2Z3UjkbRUu8+h/KvPTqbZH17Go/7V6jK9f73f8ACmqgvZnoUz/KBg/SsLxdfahY+HJL3TvNE8ZVWCn5SjcHcvQgdea5j+1SwGwpgHk5zmq2naqZ/E9lBcR5t2d0+YnBco2D7YP9D9Naa55JEv3eps6N40hSVNO1OXcMiOG6lQRiXA6MoJCt+JB9jxXZJBbyQPLbi2WRslnQ7GJ+o6mvHfEWgPZL9oCyXVhM33HwHhbJzlzx+J6+vrlvrlxBpEsumaibQQyeX9lnuQ7kdMhSp4+jcY6U6mHjK8oP5GsMU4WjNHpniHULDwxazak6QyXsh2xhmLNI3YEk5xXZ6bcyXmk21xPALeWaJXeInOwkZxmvnPTb95tai1LWmm1OC0lUuDKcEZ4xkdM4yOK9u0zx34avwqNM1mx6C6QKp/4EMjH1IrKFLkQVq/tX5HQSSMmFjZhkYOO9R7jsxT2tI5ow0Ko6MNwK4wR656YqstogJBQAg4IIqmZI4vXQseuXOADkgj2OBVDBHzFRgnjJra8R2wj1cbQFDRKew9R/SspcDou7sCegrzamkmenSV4IRdoToQcdKlVWb5nBAHAJP8qRYi3rz69KnEDNhQS59hxWTZukR74wVOxiF9Tx9cV5T4uTHia6brv2tn1+UV7NBayQ/OFQMOdzgNj8+K8w+JMb/wDCQwztyZIFycYyQSP8K6cHP95Y5sZD93cPhynn6vfWecCW2Pf0I/xrv/7IdZ45UkQlWDYC8Z2gEfzrzn4ftt8ZwR9fOR1/8d3f0r2+CxLsPlyB1rPHS5ag8HBTp6mNYaeI97TxR+Y8hbcozgYAxzWnBpdtIjboUHHLsox+HrWoloT8qgBT2Xof8atpZogG7JP90V5sqx3KEY6GMmi2kh4gQjPLGMVOmiWSNg20DnHGYRW2sJOAoIx0z1qzFaEex/U1l7WTE5RW5jRaHYZLSWUJJ7GNatRaHYHpY22T1JhX/Ct+20t5Dnbx69Klnn03S/8AXyh5e0a8mtY0aklduyOWeIje0VdmBJ4Ssr6znt/sMB82Nk4jA6g96+VdX0ifR9VnsrkBZInMbAHOCDg19i/aNW1NNtnALOA8b5OD+A//AFV8o+Pbaaw8Z6ja3EnmSxXDAuf4ueD+Ir1sBHkbSu0cVeTkrytc9R+EGoXms+E7jRbTSbUi3mHmXG0AkNz8x5yeD+lepaP4SbSpGvZ7pnmwfkQbUGfbvXlH7OWpCK517TyR8yxTL+BZT/MV7tNcblK0Vo04VG3uTGrPl5Y7Fao513RN7c1JSda1iUzNpaV12uQexpvWutHOxR0qWI84qOnRn5hVIhk9GaTNGa0JFzS03NKDQAtKDTaUGmIdmooziSVfcH8xUmaj6XPsy/yP/wBehgOanwNtmU+hzTDSxffpMDoKKhSUbFz6UVmM+I5JZJQJpZGeQgMzOck49/pXZ+GnEkTyj+JFNcY6/wCijGM7Ofyrp/Bsm/TGOeQQn+fzrSnuTPY6ncfXFIW9uM/So8nOB1pCcjPpxj1roMLEytlSPQ56U3PXHeowSpPHXig8N3NIZfsZMOyY68j8P/11dLDp3A61iwyeVKjeh9K1x0wDkevr0rORpDYerAHpgUw4OcD3peCo/lQevU81JYg681Wv8/ZCMfxA/rVgjGOeKr3mDat6cfzpPYpbnMl9vjPQW9LqP/0Na98UV8/zf8jbofP/AC8x9v8AbFfQSg4rnkaIYflwAuT6+lLzg08rQRwaEJkSNGG/fKWX0FVSOTgcZ4qcjk0witCGzg/iLbvPFpyqxH7x84+griBpU5xl5P0r1rWLBb+5s426KWI/IUkXhmIsG28ewo5Li5rHlQ0qU/xyfhj/AAqUaXIB96T2/wA4r1k+F4eMKePanr4YiH8FP2Yc55KNLk65k/HH+FO/suX0k/P/AOtXrq+GoAchKd/wjdv/AHKXsmP2h5LFpUjMM+b+dWZtGkiYrGshXHdutepr4cgH8FWm8P25+YoOlS6TuUqiPF20yYdUk/OpYtJkLD5H/OvW30CDJ/d0LoEI/gp+xYvaI4Cw8PxyAF1l/wC+yK2k8N2ZwPKk9/3zf412NvpEadFxVr+zkz939KwlQlfc1VZHHtoFiioPKZu/Mz/4019Ds1iYiCQEcY818fzrrZ9NYgbV7YyK8N1Oy+KJ1K6W1XUDbiVthGzGMnGOM1n9Xl3L9sjujosO8lUZRj/no3+NV/7HjLEgHPs7f41wq6V8SpWXzU1U7fm+8g/z3qd9N8dtdwOttqtuisN++53Bue+e1S8M+5arrsdimmRLj5eh4G48frUd5pqhsbcj/eNdIkBMZ3KM9Kbd24GcqABWPsXc19orHJLpwZseXn/gRq0mkwlB+5zj3Nbi2w3HAGQtWktx5Y49qUqUgVRHHXOlKOBEAfes/wDs1dwBiXk/3a7a5tgZPwqitoJLrI5C89aqFKRLmrmENFh2jdChJ/2aP7Eh/wCeKZHfFdUttumGRgAcVS1yX+zNIurpcbo4zg4/iPA/WtFTYnJWONvJNLsXMbPD5gPKAAnPfmsa51e0GTFDHxxkIDiuUvbthKzFiSOOT1z/APrpEfzIyTn6dcfSu2FBLc5JVmzVuNYlZ1wUKgkEHg/n2plreumrWc+QpjnTB7YDc/z/AFFZqs8bcHj1p4XYC2SGxkEcEen9K6IJRaaMnJs9i1GXT9Pu5kvLy1SOVQ0kMrru+oXrz9K4q58J6PdrLqFomYMnCwy5jJHXGP5ZFadjotrc2SOqOxk+Z2cAMSeuecn8acmlXWlFxpzLGbjiRRhV4/i2/dyO2AOveuytgZTl7RaXKhWXLyvU5a1tbPTIrm1u7W4aS7Ta0cMYIiU8Ak56jrgDmuXMs0AeFZN21iudpB478jP516lDo5hJMuSc5bc2SSe5J+led+IbI2Ws3KANtc+Yu7nrz/Ss69BwgmTzXZY0TxXq+gy7rG7eNOjRdY2+qHj9K9N8MfFC11e+istSiEN3O+1JYzlGY+ueVyfrXh7nCDn61d0Dc/iPTQMkm6i4H+8K4mikz6A8TQI17byORzGRj1IP/wBesdbbIGSqp0GP88103iGBnFu4XOGYZJ47f4VixW5Y4A3H0FeNiHabPawyvTRHFbr0jALY7jJq3FbmP5DtJ65JOAKsRWzBeXwem1en41ft7MuckEj2Nccp2OtRKS2gY43Bj03Nn9BXnHxb0/yH0q5HKuskecehB/rXsUdnuIAXGPSuE+MmnA+GrK5ySYrnaSOwZT/gK0wdX9+jDFpOk0eWeC5PJ8daOzHh51X/AL6+X+tfTEdqNoA6deTXy/o7G28Q6Xc4P7q4jc/QMDX13BYs+ODit8zi5TjynFhavs4O5nJag9Cce1WYrHOAAMVspYJGu6Vgqj1NILuNTss4TM/97sPxrkjhGtajsOWKb+Erw6WcZYBV9TTmubO1by4ENzN/dQZqc2Nxd83cxC/8804H41bht4LVNsSBRXbTw1tYq3m9/uOaVW/xO5mNa6nqPE0wtYP7kfLH8as2ej2GnnfHEDKesj8sfxq28wHeqslx71cpUqeu78yeaclZaItPMFHFfJnxotvs/wAStRcD5ZRHKPxQf1Br6gkn96+d/jzb48UWN0B8stoFJ91Zv6EU8NiXOtZjlS5YNlD4JX4s/iJ5WcC6tZI/5P8A+y19GyT89a+TPAF6bDx9ok44zcLGfo3y/wBa+pHk461z5o3GomuqN8JHmibKncqt6jNLUFo++1jPtipq66bvFMmSs7FS5TD7vWofarlwu6L6GqddkHoYTWoU5Tg5pKD7VoZsmzS0wHIBpasgdRSZopgOzzS5pmacDTEOqOQ7XiPvg/lTs1HccQlu6kN+RoYEppYvvdaaeaWL79AFzdjvRSbveiiwHyD9ildxaIFMpOzGeM9K3/DOlXWmQXEVyFAZwVw2e1Z0U4i1JLhg2BJuIHXrW6NettufLlx+H+NEbLUmVzS2n3pAOeelUBrcA+7HKfy/xpTrkDHlJc/h/jWnMiOVl3AxR3x/I1Q/tm3P/LKXp7f403+2oM8Ryc+wo5kHKzRxWrbSmS3TJOV4Ncv/AG1B/wA8pfwxVm28RQRBt0MpBOeMVMmi4qx0wO3mmnHByawz4otATiCfA9h/jTT4ptT1t5v0/wAazNNDeznnjgcVXvzm1bgjOBwPesj/AISm1HH2eY8+3+NVbvxZaPCUFvPk9+MfzpPYaaKt0wHirRCOB9qj4Pf51r6EUV81x6pHqfinSDHHIgS6j4bGT84/wr6VXGKykWhaaelO60wmhCZCRzTSKe2KYaszMPXtattANrd3S7o2do/vAYJXPf6VSi+KGhxj5sH/ALbLXRTaVYatiLULOC6iT51SaMOAfXB7806Pwd4cXkaFpo5/59U/wrSKfQltHPt8WNCA4CfjOP8ACmj4u6HjpF/3/P8A8TXVp4U8Pr00TTR/26p/hVhPDmhr93R7Af8Abqn+FP3u4tDij8YNEX/n3/Gdv/iKb/wuDRznaLXH/Xdv/iK71dD0kdNLsh/27p/hUi6Vpy9LC1H0hX/Cj3u4aHnx+L2lAZxaAf8AXd//AIirulfEhdfvPsWmWcFxcbC+xZmHyjvkqBXcLp9mp4tIB9I1qteafapf2VwPLtyhcFlCrnK9ORStLuPmXYyRqfiJxuXQoiM4/wCPkUo1DxJ/0BIBj/p4H+NbMEthaQpB9ujYLnDSTgscnPJz70jahpqEhr62H1mX/Gn8xfIy0v8AxH1OkwAf9dAf/ZqsC+19v+YfAPxB/wDZ6sNqukDg6nZAe9wv+NRtrehAgnVtPH1uE/xqXHzGn5Dftmv4H+hQD8B/8XQbjXiv/HtbA+4/+zpreIvD4yDrGnD/ALbp/jUR8TeG1IzrOnj/ALbr/jS5F3HzeRXGoa29/PabLRXiRHO7gENuxj5v9k0ySLW5D8z2WP8AeqmvijwzF4oubhtUs/Lks4l3q2QSryccd/mq83jjwovP9r234Kx/pU8ie7K52uhB9i1XjMll1/56f/Wpk2m6rLjE1kv/AG0J/wDZambx94UX/mKxfhG/+FNPxC8KDH/EzBPtC/8AhU+xh3K9rLsVRpOrhv8Aj5s89OG/+xqaPSdX24+1Wq/jn/2Wkb4i+Fh/y/ufpA/+FRt8SPC4PF1MfpA1HsYMPazCfRtTDhDqNsGcEgAZJx1/hrjoJ/F7AvF4fvmU8hg0WGHqM11kvxH8LSAeY00mOgNvnH511Wnuk2nW80fEckasoIxgEZFL2Mege1l1PLw3jYnI0C+U57y29Yfi258TRaMV1bTprW3kkCh5JYWyw5A+Xnt+le5EA9ya8r+NU+zTtLtgx+d5JDj2AH/sxpqkkwdRtHh9zLvkePqOlJaSArhgMr1559KZNGzPs3fOBx7/AEqvblluiGBBwTgnHNbGZpj7wOT16d8f5/rUp2lBtAwDngd6rMy7Dn0wAOv+elWEAZNwA+bB9+f8gVQj07wxd+d4etpIVR5kjCsT2I4/pVmSWZ7uIyquW3LgD2P+FYngmQyaOwVgzRysmc++R/OuhupFhhUSSqZGYKhxnJ6fpXvU5c1NMytZjCqIoxy7HOCSOM1wvju3HnW92AcNuQ549xXfReY0a7wm7ZhmXHJx9BXL+NrfzdGaQdYmDgj9f0NZ4iHNSY09TzCeMqM9Vz6V03wz0xtS8d6cuRthY3DZPZRn+eKwJot8b4y2Oa6P4WXS23xA0/ecCQPGPqVNeBLY3jufQOswiW2TIHDg889qz7fTwUBBx9f8KteIdXg0fRLjUbiKSSGAKzLGBuwWA4yfeuKi+MXhxAD9h1Ec9o0/+KrxsXQqzneCPXw1eEKdpM7aOwuPNjVSnk/xZBz+ArUitQQAOnevPB8bPDqnDWWpf9+0/wDiqnT44+FVA/0LVCf+uaf/ABVcTweIe8S5YuPRno8dqMjB/Sua+KGkSXngK7MSbmhdJRjsA2CfyJpujfGHwffJI0jXdtsGQJoh83sNpP64rF8U/GvR7nR73T7G2ulM0LRhyikcjHPNb0MJKDUnucs6zlp0PFrLybe8t2uATGsmXVfTOeD/AJ619haff/b7G3nsYW8qaNXWSTjgjIr4rnv1mb5VKnOa9+8PfHfwzp3hnTLK7t9Q+029rHDL5cKlSyqASDu6cV6s6cm0zlbVrHsH2ESNuuZDKfQ9PyqceXEuFUAe1eTN+0J4Txxbap/35T/4uqz/ALQXhZv+XTVf+/Sf/F1m04fBDUmze7PXnuAB1qtJc+9eSN8ffDBHFpqn/fpP/i6gb48eGm6Wmp/9+k/+KrkqRxMvss1hGC3Z6y9x71We4rypvjp4bP8Ay56l/wB+0/8AiqYfjj4cP/LnqX/ftP8A4quSWFxL+yzojKkup6i8pNeO/HO282y0q5A+40qE/XaR/I1ePxv8Of8APpqX/ftP/iq5bx18R9D8UaLHaW1terMku8GRFAxgjsx9RWuEw1enWjKUdB1alJ02kzzXT7k2epWl0v3oZlcfgQa+tgS4BXJDDIxXyBkY4znPFeywfGSwj0y1t3jvTKkKJJ5aKi7goB/i3EZz3Fd2Y4adbl5EZYOtGF0z3HTci1KN1ViMZ59auVwHww8XWfieHUktY3jNuyMysgX72emCc/d7131FKDhBRluObUpNoRhuUj1rPbgkelaJqjOu2U4rppmMxmaM03NLW5iSIflHtxTqjQ9R70/PvVogWlzTc0uaYC5pRTaAaYh+aRxuQqe4xSZpc0wEjbdEp9qki+/UMX3SvoxFSx/epAXe3f8AKimfjRQB8jX7NBau6HDqAc498Vpp4c1RkB+3wDIz/q//AK1Z+qqWsJRjrj+Yr03R9EvNXOyzgaVkXLBQOBRFJ7kzbWxxA8Narkf6fBj/AK51IvhfUzkfboc/9c67OW0ktrgwzKVdTtYMMEGuw8PeEBqtqZzJsUcDIzmrcYrUhTkeNHwxqpG0X0RbHQRGkk8N6qnD30akjjMNem6/oraTdNFu6dxXNy5Y8kkkY560KCYc7OSPh3UjjGpRdP8AnlR/wjuothTqcQwf+eI/xrqKaenA5p8iDnZiDwZqzcf2tFz/ANMaVfBepHJOsR4/64f/AF66+1fzLZWzyPlNWVUA8/jWTVjdao4j/hCL8nnWk/C3/wDr1Bc+BrtIi7awG6Aj7Njv9a70jIOTVe//AOPR2OOCvT6ipexSSPLtPs3sPF9pbPN5piu4fmC7c5IPSvqNPu180SHHj2IA/wDL3AeP+A19LoMLispDW4tMapPwpjUkDIWqM1KwzUZ61qjNlS81UaQiTNC0u9tmFIGOCc/pVX/hNhnH2B+D/wA9R/hVfxQf9AgOcfvx/wCgtXMZwa2hsZTep2I8c4/5hzfjN/8AWpT48IHGnfnN/wDY1xm79KM81ViOZnZf8J5L/wBA1B/22P8A8TTT49uD00+L/v8AH/CuQz2pM4zilYOZnYf8J5dfw2EOf+uh/wAKwvE+t3Xii0t9LlhigilmyzoxLDAJ7+4rNDHrikDf6ZaHPST+hptBdmNN4Pt4HCm5lbPQ4H+FNHhS0/57TD8V/wAK6jUvvpnuOKosDxtOKlwiUpyMYeFbEdZZyM92H+FSp4U00g5ac+vz1oHcOvap4Q5Oc/KKHFApMzV8J6aP+e3/AH8p3/CKaZ3ST/v4a1g3tT9341PKiuZmB/wjunJqjJ5chj+yh9hkOM7yM0y80TTYod8dvjB/vnn9a13/AOQonX5rRx+Ui/41FdgNbuMdBQoqwOTOPeG1i1QRvGBFsLYLHj9arGJZUnkhPyR5IOeSf8KsatgzwxKRulcL+A61DAAqalHzwTx7YqbFXEuRGumQzKuGJGTU1/Ci2yNCgHzAnHp3qnK+7wzGccgDvjvTtTuR/Zsa5+ZlBwDziiyC5biiSeaOUxhYS4CgfxD6VsL4u1+DWJbBNVuFtokAjRSAFA464/zis1pAtrER/snI6dKijiW68T6j1DRMcbT2yam6RS1NXUfFfiCKW12a1eKrPhgJOvIrD8UatealNEl1dzXBiUkebIW25POM1sS6dbzBHd2+Q5Xnp/niuZ8Qxpa38iqflEascnJJP+f5UozTY3Fow51gVN8pbPXA79aqQNvlLHIUDgenNOEMly+5jtUngZ/z/nFJEdl6ykg/LjjtWhJYk4BJIGfT/P8An8altSnkupA+Q89Oh6f1qu7ZGSeOlJbyhLnGRhhgnHQ/5/nQB2/gq5VDcxMxUvIGyTnqv65xXS3f764UJnEbAjA9+1cj4PljS4u1ZQzBVZSBxnJ/LoK7FXS1tRJKPncjg+uf8/lXsYV81NIzloydmnLYUYVc9+vTn/PrWP4kjkfSZI3UBnQnnNdBGZovklxtBPzDt/nmuV8Tvd6iZYYHMaIDyP8AP61vV0gyep5xGx2BuTjOc9q1fB1tJL420qOAHet0jgjuoOT+gNY0X7pnjP3lJB/Cuo+Hryr4+0to4/MyxVvYbTk/gK+flobo971fSF1zSLrTGk8sXMfl79u7bz1x3rkrP4AQNgtrEjL6m3AH869CtJPLuY2ATIP8f3fxrojY3N2c3t2BH/zyh4B+p6mvPrOfMuVnTFpLU8j1P4M+HksDa22oXU2o5AXykUj3yO351VsP2dVmG+91uSIH+BIQT+ea9whjtbNNkEaoB6Cla5HrWaq+z1lL5ITfNokeQt+zxpKj5Nevlb1MSGqN5+z3Yx20rp4gumdUJUGBcEgcd69le596rS3GQQTWFTHW+EqNOT3Pii8tDaPtJ3ZyOmOaqVv+JoGh1a6hK48qeRP1/wDrVgV69OXNFMwmrOwUVcstKv8AUiwsLOe6ZeWWCMuVHqQOlX18H+JH+7oGpnHX/RH/AMKpyS3YlFvoYlFao8M66YfO/sbUPK6b/sz4/PFSQ+EvEVyMw6HqLj1W2f8Awpc8e4ckuxi0V0H/AAg3in/oX9S/8Bm/wpD4H8UD/mX9S/8AAdv8KPaQ7oPZz7GBRW+PBHig/wDMA1D/AL8N/hVTUfDmr6RHHJqOn3FqkhIQyoV3EDOBQqkG7Jg4SSu0ZvG0evevQfBHw6tPFmhPqEmpS27pO0JjWIMOApznPvXnteu/Bq/xY6rZkj5ZI5VH1BB/kKyxUpRpuUNzbCxjKolI7z4beCofB+q3hg1CS4W6hAKvGFAKnIPX3NelGuR0e4/4nEIzwcj9DXXGuOjUlNXludVanGErR2EqvdJ8oYduKsU2QboiK6IOzMJaozqXtSkYOKSuk5xVOHPuP8/zqSoR/rFPrkf5/KpM1aJYpPBP8qi+0jbkRydcYxzSuHJGxwB3GOtIqzZ+aXP0WqEJ9q/6ZSflTxP1wjHAyPemhJdp/e/N2OO1GyXj98QO+B1p6CJo33rnaV7YNPz71WEcv/Pc/lU4oAReJXHqAamj+9UBOJl9wRViL71AFnn3/OilwaKAPkzU8C2Y8csMD3zXqXh7xJeaCWktCmHUB1dcg15Vqp/cdP4wOR716LpkYlu4IWYhJHWNyPQmlDZ3IqO2pcu9Sm1DUJbu4IMsjbmwMCui0LxfPpUBhQI6H+961yuqKYNSuIGfeYZGiB9lOKr+YVP0rXRoyT6mzrGsSapO0srAsx6isZznJzn175o3luM80hQ4poCI8ZyRimkYzyPensu3PPIqM7fzNMZb0+TbMYzyG/nWrszkjtXPqxVwwOCDmughcSQrIOjAH0rGouptSd9A2nv0qteKfsr55AIP61dYZHoBVW8wLOXg8DP61i3obpanm023/hO4iP8An7h/mtfTC9K+Z5wV8dxFgATcw9P+A/8A66+mVGazYLdgaYfapCKY1NCZEwqI1MwqNq0RmzB8UD/iXREdph/6C1cmQe5rsPEib9MHtKD/ADrlNnHFbQehlPciwR1P4UduT71J5eTineTznjBqrkWICPelwe9S+UScdam/s28PS0n57+WaLhaxU7jmjH+kW/J/1q/rxV0add97aUD3UimSWjxSW/mRlC0qYz/vCh3SuKLTdixqKhmQ5PQ9KptHGo+ZsZ9+9bGpxogj+h/pWS8aSnB6Cpvdl2sitmLd94fgaswtCUwX+cngVCbaMEc/rVmC3gI385HI5oYIf5KkZ5pRCh4/ringgCgtyeeKjUvQqrCja5YI33HSVDj6oa7T/hDdMkRlZ7jng5Yf4VxpcLq+nOO0kgyf9wH+lesJg4I3ZPXimh2Rx/8Awrnw5LMkjxzySqflYysMfkK4s+GdPt/irNorRE2NxGjbN5ycoe/XqK9kG0ShQx5Nea+IGW3+NulOTjzraNfqdzChjNk/DvwukH2ddPzEOqmZyP51cj+H3hV0AbSIGB67yzH8Of0rcCAE/Lu57jNTxkLKFGAoPtRYZ8961bw2upXttAMLBM8ar6BWIH6VRhlFt4m1CXu0Sv8AXKj/ABrd8W7R4z1ODblvPY5AxnPP9a5rVH8jXWb5QJLePv1+Uf4Vyyd7ouKtqarXpZduzHriuP8AFN3uviqv8xVd+Pxx/OunVsDg8Vx+qSW8F7M8il52cnBGT16+1KjHW5dR6GfFASvmzyERgZwDUVr+8uGK8FjgDr3qK4uZZjhjheu0dKmsVU7mfhRweK6jAsygN0Oc9efrTIkUMCCc98YPFK7mRiE6HoMZ4qxbwnaGbO4jI7+lAHQeD2RdUnUjG6JWHIIJB/8Ar12bhLm4VGB8tBzjvXnuhnydSwpQb04z9R09/wDPFeg6ViTl+EXrnv8A/Wr1sDrGxnM2POSWBGkAGQCf8elYd3co26NUDMSecVaWG4ny65WKQDaoGCo/nVbU3ttFsDLJzIRtROMk47f410zk36CsrHlmsQCy1ueMH+LceehPOK9A+EmlWd/rc9/Jv86xjDRKpwCWyCT64A6dOa80v7iS41CaSYjezc4HFeg/BvUFh8UT2jMALi2IUerKQf5ZrwaurdjaB7iR2zit2HUPMt43B+8oP6Vh9KisrkiDyyfuO6fgGIH8q8TMW4xTR34aCm2mdF9qz3pjXHvWasx9aUSMenNeK6kjs9gkXTOT3qJpM55qndXltYxeZeXEcCertjP0Heubl8fadJdC00yNrmcg4Z/kTgE/Xt7U406k9kFoo8Z+IVv9n8S6qnpcs4GP73P9a4Y9a7TxlqEuq6tPfTxoj3Cq+1BgcDH9P51xjDDke9fU4a6ppM8qv8bO3+GPiGbQNdujDCspuLcpg9iGBz1+te+aTfahrOlW1xIYo1lBDArkqynB46c9enevmbwizDxLaqrYL7kz9VNfQXhKWW2geBpmeMHcoI4BPXH5CuLMKavzLc68JqrHVLp8AcSTFp3HRpDnH0HQVOXVRhQAPaq5n4zmoXmHrXl3bO5U77lhpc1E0vXntVR7gD8aiNyNwHPWhRZoopFlpa86+Lsfn+GLaUAkxXa5x6FWH+Fdo9z7Vyfj8favB1+v8UYWQe21gf5Zrpw141IszrpOm0fP4++M+td18Krow+JbiEnia2YY9SCD/jXETDbO4963/A1z9m8Y2L5wGZk/NSK9ysuam0eJRfLUTPfbOfytRtpCekq5+ma9ENeSm6KkEdRyCK9XglE9rHIOjoGH4jNedh1a6PRxGtmLQaXtTa6UczKci7XIxTKnuhhgfaq+eK6Y7HPJWYHjB9xTz+tRscocelPzn8atEMWik6miqQhaWm0UwH5oFNopiEkbGxvRhmrMY+aq0vMTDvirEJ3bSO/NIC3x6n86KTmimI+TL5S3lIOczIM/iK76NlguEZWJ2MGzjHSuEuBm5tRx81yn867bK7up45pQ1TJqK5f1ieK71m8ubYlopZC6nGOvJ/XNUs4HtTkZFPbGeuKaT852/ga0WisZJWRds442mXeQB3zXZeKdF0bT9ItpbKcNO+PlDA7hj73tXArLg5BwfWpHuHcAE0NXY0RSYyQKiINPLZxwKa3JNUAw5z1rX0ubdC0Z/hOQPrWSeevb3qxYzCK6Uk4U/KfxqZq6Lg7M3SCRmql8w+xSkjPynoKvheelNnsop7aYvC8hCknEhXIx0rkk7I7FG7PLb4BfHMXP/LxDn/x2vplPu183660X/CwY/JgEC+dB8gZm/u55Jr6QQfLU3ukLZscRUbCpaYwpxJZAy45qI5qdhxUZFaIzZjeIf+QW3HIdTXJE8e9dvqlpJeWbQRY3sRjJx3rFXwxqDd4QP941UWS4N7GGrNz047Y96OTye3tW+vhO+7yQj/gR/wAKePCN7zmeEfif8Ku6D2cuxzoOCOfxxXapHeS6gEjT90Y93+rOOnXd657VnDwdc97mIf8AATXaQqYoYkOSQgGQfQVLSkxODtqctNZX+/aUcL/u1k63G8ZsjJGyt5vRgf7y16IDnoD+dYnijTo7yKCR2dTCGYY/A8/lXROtzQ5bGNPDcs+ZM5bXBtWM4z1rCMnPArt9R0mK4Cb3cD2NZw8M2XBM7rnnDOB/SsVZLU1lFnMk5GaQOQMV07+HbQFSshK5+bLnp+VSQeHLSZ9iIzHr94itVTbV+hndXtc5Te3J4o8xuhPFdp/whyH/AJYqPrKacvg+IDHkRn/toaXKu5XKzhpH/wBJ08k9Lgj843/wr1qJ5mUFVTGBjJPpXB6tptvZ3mnKkaqReoGAPsw/rXXwvqbiIxR2xhKjBdjnGPQVk1qapaFh2vWkyPKA7ferznx25tfij4VuCg3uIkY9uJOf516DJ/awICG0VQMn5Wrm/E9iLnUtNu9QitpZ4m/0aVVYbDkHofwqWrLccY3Z1rttkcY70+FVPzvgDPp1rOa11ckEXtsD3/dZ/rUqRaoEKSahCQRgBYBkE9+tJza6FcvmeRePownj2/cRsAwU5I65QVyPiFG+1WjRg7fs6nk+hYda9g8SaUpvXkuo457jaMymIAnjiuMNqktpAWijcbSp3jn7x9frXJze8zZU9EcvcytBYyzR43quV9M1xz/aTK0s8iZJyzFQT+deo+IbIDw7OFjRDsVtyjkDIJrzi5tWd0Qqdi89OproopWM6qszJcxyuzJCcdc5qa1UGEgryO1TXBjiwqphm4A6Z/wqraSDdz0z2rUyLqRb2y3CjtzVk8n2xn0qBZDkcgevPNSjoF6L2Hb8PypiLuj2sNxqUrXDhUVQVIbBzk13trqFlGPLT94vGc9CK8tupjFcRYlkQMD8yDJx+P0/nUi3khI/0u7bGGPIXHPt/iK6qOL9jGyQnDm3PX4NTjMTCNQXUEKg6nngVy+p2EsrNe6vfWtop+6smTgemPXmsWC6W7ivoooYYpo7RpoJI5JixZWHTc7KeCewrl5o7ueYtcCWTd8odjnnGRzTeY+00aLdLlWho6zaaYyzTWlzNJKhHLRhFI+mSareGNX/ALE8R2GoYO2GZWY8/c6Nx9CazpbO6t0O9SB3AOarKcGuWclN3sJaH18rK4Do2VYZUg9RVSGF/PuOyCXOTwOQD/Wue+G2sHWPBFkztultc2shz/dxj/x0rXRSaYby5LvO6wjHyKe9eTj4J09T0MHK09B0up2VpgM5lcfwxj+tY+t6xq/9ly3FnCbSKPktjJIzjqfr2rpLXTrK0/1cK7v7xGTS3/lT2skMiKyOuCpGQa8iPJF6I9B3keL3LXF+zNPNJNK3OWJNFlps1vcJcD5fLOfzrpJrWO3nZVQKAewqvKn+jyDodh/lXpRtbQwa1OB8Z6d/Z9zAgJYPDkZ+priJPvnHT65r07x4gmsdKuMZLIVJ/L/GvM5Rhq78O7wPPxCtMveH5vI8Q6fJ0xcID9CcGvojSn8sn36180wyGKeOQHBRgw/A19HWUoZUZc4IzWOMV0jowL3OhFz8v6U12kKb9rFfWsyR52gc220ygZUNyD7VXmudQinYoyvF/FI+1SBkFtvPI29Pr7V5nIenc0XmxUXn4OR65rnPtmpuhG5VKsQ7lkwg3dT6HGPzPFVmvtTI3JNDt2gnc6YzxnHPsfzrRUyeY6aaTDt0xk1la2n2rRb63Iz5lu6j6lTRaTTvEWnbcSx2NxyvGPu8d6ez8EHGD1NVFWZMndWPnm4H7zP94A1b0if7JrdhcH7qTIx+gbmor+EwXDRHrGzIfwY1XycI3px/n869rdHhbSPeXfBYcV6r4cn+0eHrGTOf3QX/AL54/pXjVtd/abKCftJGr/mM16l4FufP8OKmf9VM6f8As3/s1ebT0lY9Ko7xTOlJpvelPWkzXQjnZHcDdGfaqRq+eRiqLDa5FbQfQxmhP5Uif6tc9cYo6UL0I9D/APXrVGTHA0ZzSdKgvpZILG4lhCmVImZA3QkAkZqkhFnrRXlumfEbVr7VLS1kjs445pkjZhG2QGYAkfN70mo/ETWLTULiCFLZokkIRngZSy54JGeK29hK9jL2sbXPUwaXNYPhPWptd0GO8nEYm3sjhBgZB4/QitzNQ1Z2LTurof2p9nyij+7x+VRZp9nxJIvo2fzFSM0OPU/nRRmimI+UJjm+suuDcpx+NegWmjX97YXF7BbFra2GZZCygLxnueT7D2rz/ltTsB/08L1/Guy+0S+V5XmOIidxQN8pPrj1op7EzHiQA85/A095IsfKv4mq+fc05QGxlsc+laEIXcOcn6CgHJP6U94oRGWWYFsjj1qLHTvmi4NWNSLRL640eTVYrffaRMVkdWBK/Vc5A561mMxPXp6GpI3IjKeYyo33gDwceo71ATyeKEAue9NB5zQTnPv04oHT1pgdZYt59rHN1yozx371alhcWsnBJ2nisvwzKHMtu2M/fXPf1rrDas1u577See9eZXlyNo9WhHnimeJeIVYeP49/JEsPHp92vpKP7or538YRhPiMvXmSAnP/AAGvohOVFOLvFMxmrTY40xhUlMarRmyFhUZqVqjI5rQzYRLunQe9XfIB6VBaKrXSBsnr/KtURr6c1nKfKzensUhBTvJxzmrrQhWxik8oE9KXtDTQp+UOehqTb0HPAqfyj2FYOpm8TVYYYLZ5InbDuMgL0/AU41XeyVzKqla5sgDPJA/KqWsgPZtyCQp6H2rFvbHUCD5AkJPuf8aksLa7jjuFuYXUeX8rMOvWu6VP91z3OKnWbqqHKXblcxocdaa1jPcQ27QIx5wSCBjnqc9QPSrbrvgizzwOv0q9ZACxT6kVyq09GddSNkZF3o90TmMkjB6N/SjTbK7tbkmaIiMrjeT3zXQADGcE0k4/cDHGGB/WuxVmochxqhHn5jPDHJy2fx/+vSK37374wPeomjfzWCxZHbmlWMh+YgPfrWVzc5HxI27UV4HyXsLD8XA/rXWWRYWcLBR9wDmuW8QJv1KdBnIkgf8A8iJ/9euoshutIlOflLA/gSKwhUvNo2nC0EyxIxK/NtXt1rnvFYX7PYSDJ2zdR2ropgNqjcQevFYficbdGR8E7JlOTRX/AIbFR+NGxIOVbPUUqkdySfpTUkzDEwP3kBxjPapFkY44P5VotiHuYviK3zcqR0ZBnNcPp9p5sJjyqbZpFJ7DmvTNShEwjYoeBiuQ0/Sil5ewSK/E7MpA7EA15Fary1ZI9CkrwiZhsXntZIyispjOVbADDHQ5rxNSyZJ3bV+6oGa+j30tTkFXPHpXz/f2D2WtXenqjlop3iC7SSTuwMCunB1lNtGWKhazObvomb94eGHPHaqcGNwGB9a7O+8E+JHtjIdCv8Bd3MDA/ljNcWqFXZX3KwOCCMEV23TOOzW5qxEBRx15xnPWpTIEQMx46N/KqsUhZMA7s4461YTDRnJPvxj/AD/+uqJESGG9lVnV5ABt4OM/5zWtHpGnZVjFgdcE9qo2CIDwMZydox0/wrbWPzUIDFTjgD1/Cs2zRI05L27VPs73d28WMeW1y7R/98k4/SqEzyOCiqGc8HIFWTEr5JQknnBf8f8AD8quW1qsX7xlCnHGM8AVklqaX0Ob1TTv3aQtIzyyNuYkfdA5x+ormtU082b5H3T7Y/z3rtbu7/0iQQJ51wwx7IP6VjXmiXB3yzTZZhllA4FapmbR0XwY1k2uu3eku37u8i3xg/8APROf1Ut+Qr3BX2HH418t+Fr/APsfxhpt2WwsN0oc/wCyThv0Jr6glVtw2gnHWuTGxvTZ04N2qImM1U7uf5SKVWy2zv6DrVPUQ0eMgjI7ivFjG7PY0RgX6/6QxHfms5mXJUkYIqj46hnuNPgNusjsJASEBPGDXBnT79jzaXJOCM+Wa9SlTTjds4K1RxlZI6bxUom8J2Ug58qYrx6c/wCFeYXAIc565Ir0mSCX/hX88MiMjwT52sCD1H+Nec3a4lbjv6V04fS6OTEatMrjrXvugTedomnybslrePJ/4CK8NsdLvNRWY2kBl8ld8mCBgevNev8Agu5EnhPTySSVUqc9sMR/IUsUrxLwjtJnXwyqDgttyOGx0rNudLs5Mp5rupLEqAAo3EEjp6DH0qQyncRx3qFpCRjPFefY9LmEt7aO0LFXZtwCjdjgZJ/mxqt9ggRlKvICq7RyO6qvp6KKlLjpn8agku4I875U+XGefWqsyXJE8ZEEEcSZKooUZ9PwprSE9qzbzWYbYbQJJHVtpVR3xmq9rq5uLwwNGUGTtOc5I60+Vkua2PNPFMAh1++UdPPLfg3zf1rHz+6x6H/P8q6rx3B5etPJj/WxI+fplf6CuUXO1h+NepTd4I8iqrTZ6n4auDN4csmzkhNp/Akf0r1X4cXG631C3J5V0cfiCP6CvHPBMnm+Hyuf9XMy/wAj/WvUfh7L5et3EX/PSAn8iP8AE1xSVqjO6LvTR6UaaaU+9HatTFjaqXC4fPrVuobhcpn0q4PUiSuipnpR0c++DRQ3DDryK6EYMXNIQGUg8gjBpKUc1SEeH6ZAbHVtUiiRXvbaOQWwZc/OrgEgHvsDEVZuUbxPZ3GpeTHb3UcuNz3B2Sg5ZgPMY4K9cDjB6dKv67LY6d40vUu7ZEXeJY5o1Yybmw394DuenoKiu/EOk6jsN4biYouFDxbsHPPVu/HavQTbszjaWx0Pwuud2m31r18uVZAP94Y/9lrvhXmngTU7d/EtxbW0SR28sGUxGFYkY64+pr0oGuWt8ZvT+EdT7ckXZHZk/kf/AK9R0qHFxC2eMkH8ayNDSz7Gil/H9KKYj5Qj/wCQnYAHrMDXX4O3jnPeub0a3S88UaVbv915+cH2NdpqWnS6bMELCSNslWPBqYTSfKOcG1zGfgjtSgE8YpRkf560fN7elbGNhxjdSoI5YZGDR5Tlgvc+lJluueKUs3Bz+dIAMQVjk5A9BVmKxVkkkFxD+7UPsYkM2ew46iq+5sYzS5OM5oGhmwk9COfSlWJifunrS5b19qkQtnqaTY0jQ0SKeLU7eVYmYBwGx3B4NeuJpDGLleo7CvM/D8LPdKWnKYIJx3r3CPBjUj0rgr0vbTtc7IVJUoadT5m+I1i1p8TFwpUFrc5656f5/CvdEHy4ryj4yOg+IGngfe8qHP8A321esIPlojHljy9gb5tWLTWp/amN1rREsiYUw1I1MNWZMs6dGXvE46ZNby2yKdwHzduayNIbF5j1U1vUuRN3Y+Zoh8nJzmjyRU1FHsohzMhMHOc1RuoykwHrWpWbqILSBQccDkVUIKLuhOTZEFb1/Wm3CqYCHYDPGTTlA9c/Wqertt09mPQMDx9adTWLKh8SJJIljiVGI+UDP5U6C4hS3AM8agHuwrLvb8Cx831C9/auV+3tLKUwQpz3rCine5pVlpY9GiuYJn2xzRuw5IVgaq3Ws6bGrxte24kRgGUyAEEHmuF0HWzb6lcFo1CqhUtnryDXBeJL4S6/qKYOftEqnpz8xq3Vam4kKHu3PbTrmmhd39o2gB6ZkAqA+KNEHB1ezJ9pAf5V85Txbn4qxZFo2ChRhumf6UOs0Cgj1ObU4dS8bX3kXImtPsiuhX7u5WUn+VdZbanbQ3Etq0+JBcyrgqT1YkdsV5H4elMGpXGOP9EmHrn5Sa9a0aXeuoSg8tds35qtc9Jt1bo3ml7PUJvFGlxuwM87YHGy2c/0rD8V+I7Ofw/JDFFfGUujAtbOg6jqSMV1+RnOTu+tZfiyETeEb4jJYKGyeeQwNdkk3FpnPF2d0RWmryrZQLJpGpl0iVW/cgDIHuatx6rM4BXSL3Ge5jGP/HqvqS8aHsVB4p23K425HuaST7g2jS00edZCSWBo2JOUcgkfkSKjt4bdtUu/kXcdrdPbFRpdeSmzpxnHpWRZ6gY9emLNw8S8fQmvNxFeCqKLW25vTpycW0dSbWHn5BXDyeGNOtfH93rIt1NxKkewnoh2kMwHqcAf/rNdQdUFY2t6gIgl2gxIBs3ZGB3Gf1/OmsTRckoKxVOlNP3iLULqS70+7Wx+e6j3KqqQCG7D5sD09q4Pxp8L59e8NyapHo4i8QxxB3McijzyPvBgCQTjOD1zjmu4huVu4EW3eQzE71www6nqoJ6HPIroYLyQ2kYljPmbRkMw611xcVqFVPY+KEJWTDDBBwQw5H4VajkLYjA5J6AYq942uUuPHWtzJsCveynCAgZ3e/fOfxzWdZKZZQFGQBn/AD+VdF9LnJbWxt2cSqo3sNvoD37/AOfatVHjSMucBcBcf3qzbdCccnGcMDjAFTCUzXohUEpGNxHvUFmpHuAVhwVCjHTFSXcwEJ82byov+WsuecDsPxqipmVT5Vu8pJIVVXvz37f55rNm33N5sucXlwDlLOEkonu5H8hSSG2XIppbtCdOQWVmDhrqUZdz7f4+9V9R+0W9jPHZQg5B3TSMHZx32/lWiINSjCrNLcnAwIbWFQq+2WwKeyEhpJrZIoTjcFYyH8QoIFD0Banmu4792fmznNfYPg29ivrCzup1VhcWccvIz8xVSf5mvkjU4Uh1GZYo2ji3ZRW9K+kPA10y+BdFmBPy2qrn6ZH9Kiv/AA20FJXlynpUdnpUNy1wsKCQ+pyB9B0FYHjaSA2VsYlQHzCCQO2KpnVG/vVk61eG4tVBP3XzXjxqyl7tkenHD8r5mzFlbcgJ9KqMMHkZqbdujxzwxqJhkgnp7V0RKkY+pR79K1qHH8Ak/TP9K8jvAQx68gGvXr+5t7aW7iuZNn2m0ZVGCdxGRjj615FdDP1xXXhzixPQ3fAcUtzrclpFII2mixk9DyODXXeAXP8AYk9uesF06fyNeaaXfnTb3z9m8bSpXOOvvXe/Dq7886qACoMqyhc5xuz/AICtK8fdbIw8vfSO5nl8qF5cZ2gnA+ma546zOysTEI2Az+o/oa6CdN9swH8SkUvh3Q9O1Sw8263CQoy/KwHIUY7e1cKR11qnIrnFySXdzA0k1xJsUhsDjgtgj8KhuUMb3ADEtvdGJPUABhXeXnhnT1FxDDLP5TqSBuHB6+lU5vDNlJKxMtxl2RvvL3XB7VsotmKqJq5xovN0xuW5w8bn8sGrtvIp1hMAgmQuM8cMo4rf/wCEQsIoVdZrk713feXjBB/u+1W38O2U85uI3uv4W6jscelDgxqaPPfiDHme0kAxujdSfoQf6muEXqfpXuni3wpYXtrbmSW4yjkcMucEfT2FcSngnS/N2me7yOT8y/8AxNdVHSCTOasnKbcSDwBNmC/hz0ZGA/MH+Qr1DwbP5Pim0ycBwyH8VOP5CuV8J+FbCy1CYRzXJ8yPBDMvYj2rt9K0q2t9Rs7pHnUpKr7WIGOeh4rCtB890b0pr2dj0ukzVgwL6mq7DDEds1VrE3uJ3pjjcpFOoNNCZnkYNMY8r9ammXDn3qCXhM+nNdCOeQ7NGeabmlzVEmdf+HtJ1O5+0XtjFNMAF3tnOKgHhPw+p/5BNr+KZrYpc1XM+4rIoWmh6VYTLPaafbwyqCA6IAQO9aOaaKM0O7Cw/PNDHam7+6Q35GkFOVd6smeoIoA1Ovaiq0VynlJkjO0ZooEfN3hNPM8caSOcb5CPwRq7/wAVjBg6jJPSuA8I3ENr4306adgqDzcn/gBFd/4tnSSS1EZ3oys3mD7tcyT9qjpuvZM5oZ6ClxzzzSd/agkggAcdzmu25xsUc+uK0NL0XUNXkZLG3MrJywBAx+ZrPXOR9K6nwd4gg0O9ka5jdo5EILKRkHtwetZ1JNK6IldbFO+8Jaxptobm6tDHCv3mLL/LNYpUivQPFXjSx1fS2tbaOUMSDliMVwDMcDGCM81MJt7ji27iDPtUkfGKh6il3FZkXI2kHPrVs0RtWlyYFyDyRXtlheq9hAxPWNT+lfPa3iB8Z4xnOa9csbmcWds4lXyRbgGPbyWwMHOfrxXn4qThaUTsow9orHlHxcufM+KEAB4EduP1Jr2tPuivnz4lTGX4kI3B5gGSfpX0HH90VUHzQTZMlyyaHUw0+mmtESyJqjNSt9aiNaIzZa05tt9F9cfpXR765e0bF3F/vCug3D1rOc+VjSuT76N49a5rWo9cluU/s24ijix827r/ACrNWw8Vsfn1WEewH/1qweId9ivZnb7/AHqKVlyCQCScZxXJxaV4i8wtJrClP7oz/hUMui60u1pNcdhnGBnqal4iVthqB2JZMfdH5Vx2pXDPPcRs7bQ5wueOtS2Wk6hb3iTzatJKi9UwcH8zWVqJd7q68uNydx7GnTquV7lKFitqOqQw2CxS5XCrk9hWBLP5beYozjnPrVnVLA3tqiNA0iuoRwQee1ZN1p99Gfs0PlKgACZfaqADGPXrWlOsti5UebUrLq6y3lykJHyxt5nQ8gZ59K57xBLGPE18nmJ5jTyMFzzjcc1fOmXa/aJI7ZVfaTK6TMMnHPb09aZqmhX73CubIPKHDs6yuxcdxkr34/Koc05XNFStGxgz3CQxhmcKMgfUmrdtZTsrT5IjHPJz09MVoRaDqFwwii0RncHcW3noPqOK3bHw1c2EUN9L+7aMEtBuPTIHzevXNKUr6LchwSV2ylo9pIWS6XmGWB9pIw2GQ9q9Q8LljpkzkZ3uj8e6LXBW7zS3KkkbCTnj2ruvBj/8So+nlwkZ/wBwf4VWHvzakT+E3uABkfrVLxA6jwrqe0jCwMx3elXow0jAbWqPU7b7ToN/E6kB4XGR24rvexzLcjs2M2n2kiZIeBGyD7VN5Mh9aqaIyDRrLMoJWBQcnrgYrSE0X/PRP++hUKS7lNMztXnnt4ENrCs1xsGImfbkZAJz9DmuflvAmpqe4XBNdBqnkSiKdHSR49yBlIOM4z/IVyOonFwJB1weSe/FeHiknXZ6VH+EjRuda+z2zTFXbaeQnJrMm8RSTxvGtleHj72zj86qPO3UNgAjgDiopZ2KDkAE9BWMaLTujSUn0NfQxp6awrTXRtnc7sN8i5HPVux9sV3iXEFwqPDPHIhyN0Z3D8xxXkgOXUtxzzn/ADzXdeDsNoSd9kjqR9cH+tenSldWZzVVfU+V/EL+d4m1SVQ2HvJmAI5++etSaUo2tJjvgU7XrKYeJdTjCMQt5MvT/bNWtNj8iIBsLg56dv15ruvocXUvGQW8JckZ6jPpUFrL5VuZHUbnJJ4xiqt1M0zqinjg0XbrHZZ5GFxwvH5j/PNSUPtYLrUIjMZHSI/LtiBy4ye/T2rWs7LUEBgts2UTHnbyx7dBwPxJPqawNAhe4IHnTqozxFIQf0/z1rqINOt3O1p33L1LXJzn8/6VdiTQttIgQK9w0kzDndK5YD8M4FSyfZkct5CvIeV2A4HH+fypsOlhRmO9ugCD92Yn/ParDWskShUuZCxb7zAEgc8cChrQcXqcN4xs7tJLe6m5jYFB7Hr/AJ+le0+C0MXw60odP9F3fqTWP4bsLG+mvdMvoBdw3MQLCbn7p7HqOp5HpXbraQ2mmC0toxFBFF5caD+EAcVlLWNi46TTMSX97JFIZHBjYkBWIDcEYI7j2qO4cshHam7ifb61HIQUPPbNeWlqeu3oUrWFYVlRNx3SFzlieT169PpRPDHMFEqghWDjJ6EdDT1PzNjuOlMkOQMY/GtUZs5bxijfaLB1BJO9eBmvO72Mq75BDbiCDXsUmV1CwfOFEhU8+orzLxZCYNe1CMdBOW4755/rXRRlrY5K8dLnMV2/w1l26tdxf3oN2Pow/wAa4k9TXT+AZvK8UxJ/z1jdOntn+ldNVXgzmpO00eu/wDnINVdJaSy064DDLC42RLkfOTjH06/pVoIxXAB/AVWtrUC2vCrBbiK6E8audu4BVJx+tefF9j1VGEpxU9rl0NdwyiR54pxty8SKV4xzgnr+OM+1JdX8cbWqKoErL8rynCqB3NQf2hHIqJFE5lKqCHG0L1ByemPpmmag8Un2e4ZY5PLQxyIcE9jkA1rdW91lQherFV4pb7WV9NNNt+vXuSxajPHJFE8sEyOCFKKAVPPbJ44p095dz3ptICFUqjbz0XOc8d+lQWt1Y/bVEC20agndJLtTAz2zg5pkNzbx6jI7Xdrt2JgmdMZBPHWknfqVUUIybcdVHrbe66Ly6fh3WczYmiuiHlCLKjAY4yAQR+NYZVxeP844x29zW5qeoaa+oPLJf2qRi1ddxnTG7IwOtc1Jq2nC6Lfb7VgyjlZlP9a1jsr9GZc2suRK8obWW9+i6aK5r2Nw0WpxpHJHGzZG+Too/rXRW17Il0lvLPDOZASkkXGCOcEZP51xMeraUl/DK95ZyISoK+arHr6Z5rr7W905rgNbx28KKOZXCoT7Adfx/nU1HLn1ZdKNL6v7sbq2r039d1bt1Xqd5DrGqatqd7p9h5ECW3ls1zIpbAdAwAXjJznuMD1zWtGsqRhZ5FklH3nVdoJ+nasfwgd4v7r5fKmePy3BBDhY1UkHvyCK23PzsevNEb2uznxMo3UIJJJL77K+vr0G5o4oozVo5ivcLwCKquNyketaEqgxmqB44raD0MZojU7kU+op2ajTIXHoSP1p2a0RmO7ZpSabnijpTAWlBpuaXtTQD81JEeagzUsX3qYiKSCTzGxjGTiitDj3opAfL2kEt4hts54WTv7V1czk4BPTpmuR0bP9vQMOBsk/lXUyEk896ICkya3IL7WHarYSHuCao2+TJgEk+1XAj46H2ok9Rw2HhYegUZpcw9Qg61H5Z/yKBExJyKm5Vh4aMc7eBSechU4UD2xV/SX0+3uJG1Oye6hKEKqSFCG7HispIQzO0ayAFyQGbIC9h0+tSpa2BoV5woJwB+lYWuXZTy8SFA3zBlbkkdq2LiLELbgCMdDzXOakgktzHcE/umOw9wMcVdxWKqahJMqj5yowSw67a9z0+63aRB1H7hPw4FfO+nSOb6FNyqqjvnDY5Fe62EjtpkB3YzEu4Y9hXDjNYo7cGtWeX+PJN/j1TngvD/IV9Hxn5R9K+bPG5H/Cag5B+eH6dBX0ipwox6VUNIIzn8bJSeKYTmkZzioyxA4NWmZtDmqI5pWkIHNMzmtEZskgOLiM/wC0K2d9YcZxKn+8Knvb2+hnCW1iZlIzv3ADNYVk20bUrWNJ35pFbnrWDNqGt5+TSR07yD/GmxX+vNIobTFRe5Jxj9a5/Zyua8ysdEWIFV5g7xbgCQrDOPrVa3nv2+W4tY1U/wAYfp+HeluWPk4B/iXv7iolo7FxV9iUNkVLbRBgM45rLu7ie1t2miha5YkARLxt9T61jR+IvEMMsnk+HpZkz8mZQoH51lUTtcqW1kdNHZRtE4wOJW/9CNedJjUxcy3G0MJWiIXgEAgg9625de8TpE7WuiozOxLrJMP3Z7jqM1n+G9Nv44Ll9WtFhma4LoiOCCCBnoT3qsLCLcl1exzV3Uik1sUbiUWVvJGtqTFIMOWJOOMdhVmw09dfv4Zb2HZDt2jYzAlQDz1rqUsra6kjinhUoTjoeP0rdttMtIJGeKBVZVCkc9uf617MY04Q0Xvdzzm6s5ay0MXTNFigjlWwhWISRABnye+Tk556VDrsMRntEe2V97nO4dcha6uIqqyAYAWUjAHbiqs4XdYvgZJ67cntWdGKg+Z6suabVkzyqVEg1Hyo1VU3A7QOxra8LrG9nH5kzqsdpGcJIVzglecfSsHxPBOnjCQxXAjVZF+XaOAQMgGt7wko+yyEkZ+zsucekrCs5L39Tqh8Gh0cQsi3MsmO/wC9f/GkewsLiF4o2lIIbCmV8nj61WKttDmc4P8AdFXbDYxx57E8gqR14q013RFp9jO8P2Vs+lQymGEyMuHLNgnBNaxt4o48iK3bA6ZGf5VleFPs2meHnEj+VDDJIWaQ9Pmz1/Gp/E+ri10dI7Jw91fkRW+w5zu6sPz/AFFLlpqN2jaKnOVkTzW8QtdyQpGQ4+6Mcc+wrktZXayHjGCMfnXUixfSvDUFrJM0skUa75HbOWzzj2549q5PWWzEG3Z+bgY4/wA815mLjasmlY7KL/dtXuZ74jRjuHr16Cs19TtQOJSefQ81f6q+QMccCqbQxL0ij/ACnDzHK/Q2fBmo6fNr5tbryXiuImAWZRtLDBHX8a9EuJdO0i0YiS2s7cHJwVjX8+K8YvQn2OXaoAC5HHvXI3ajzo8IOntXbSs42OSpfmuQ6w0dxreoTo28PcyOGzncC5Oc/jWPd5jk2eo7++f8K13jw24Dmsi/OL84OTtH8v8AP5VukYsqtzKMduw4rc8N2UOq+KtOs5oPMjLl5I3GVZVUsQfqRWIxO7rkdBzXZ/C61M/iO7vCPkt7baOOjO3H6K1AjD1bw+PDHiSWzZS1pJ+8gdxkOhPT3I6H6Z6Gt20gtZCBshLcDbgcfh/ntXdeLtAOv6KYoVzewHzLbsSe65/2hx6ZA9K880u7geCMXOEcYUOwxn0zx1/wqosRtxWtuhGyBcqO4q04IXCnjH3T69v5UyMEDcGyPUc9qkd9qgswx796sQ/SLr7JrVtIOcuEI9m4/wA/SvRyOMY+teXw7o7iF8FdsgJY8AYP/wCqvTuDg561ianKt8hYZ6ZFMY5BP61NcjF1MBx85x+dV8/l2rzmrM9VO6uQZw3cjHSmHBUenTFOP3sYHfrTTnviqRDKOoRg2qk9FkU5rz3xlCY9auBnOUQjP0HP6GvSL4ZsZeegziuG8dxf6fFJt/1luB+IJrSk/fMK6vE4RgQx7Vf0NpF1uz8p9jtKqhs4xnj+tUXGG5qWymNvfW84/wCWcisPwOa7nqjhjoz1v+y9Tk+9dpj/AHzUEvh/VpgTG+9QMg7gNx9OTXTo2QRheBwR3+tP1G7ksNIe5iVWaMLw3TGQD+ma8DF4ipR5VTSbk7amGdY+tg40o4eKcpysr7fhY42PwtrEkgWVSi4wW8xTj9e/9KjXwrq5YboH+9gnzE6c89fpWqvijUgiyPFaCN3ZFIDZyAcd/WmnxTqYiZ/KtOIRKBsb1xj71LmzD+WP4/5nD7XPv+fdP8f8zJPhbWvK4tW3ccGZPTnv605/DF1CVE+Yyd38QPHbp+tbE/im+huFTyrYoQjZ2t0Jwf4q6DVQDaqD/fH8jUfWcVTqwhWirS7E08xzKji6VDGQjao2tL30+b7nAN4XWeEJNKQSTkL+leeXKFHAbqPl/LivayAeh/SvI/EEAt9Xu4/Sdz+B5H869rDybuj6DEwSSaM3ONhz0/xr1m2lE9rDN2kjVvzANeS5/dj2NeneH5DNoNmxwf3e38iR/SrxC0TIwz1aPZfh/P5vhry+P3M7p+eG/qa6g81wnw2nzDqEGejI/wCYIP8AIV3VZR2NZ7hSZooNWQBORiqUg2sRV2q1wMHNaQZEykOJHHvmnUjcTfVf5f8A66dWtzGwUd6KKYgpe9JR3pgO7VLD96oalhPzU7gW8+9FL+FFAj5b0Yg65DjsjfyrppDn865GzmMWpI43ZweldL9pLMmSMEYIPrSgxTRes3VJCxPAHSr32yIZAf8ASspXyDg8Y61ANQtHufs4uIzKDjbnv6VbimTFtG39rj7Mfpij7ZEBwW/Ks0nnOajaBCGPzZ9mNTyofOyx/bMW5gA5wSTjginwahCqksSN3IGfwrnZEaGZj8yPvB55AGf/AK9RSPtmJZiAEJ2j8z/n3qeVF8zNu51m2kcQNFJmQYDZHX/Irl765ma+eJV2EKVOe/pUck4a9EpI8sZON3I/wqbUVhusPDIPNC5fJOce1TYdzPRWikSWMbgi5yB36V7jpsiHSbYc7jAn54FeO+FHZfElpGxby3cowPQggjGPSvW9PXZYW6josagfTFcOLlsj0MHHdnmvjY48ZBuTiSLH5D/GvpKP/Vr9K+bfGpb/AIS9gBkAxH9BX0jEf3a/QVpH4EYT+NjzTDTz0phqkQyNqQEYpWqEnmtYmTHOcLnuK2ElVERmOQR+fFYU7HymAznBxj6VPBKjRRRXEjRyJ0BON2f/ANdRVsa0jUkvgCoRSMkDGMcGo2vjtJQ89RnvzUH2aBiD575AAGG9KQmCDMKLLKW/hHOKytFmmqLJlzO6gDG0E/WoLj/UHPqD+tWVREBwMZ5P1qC6ZBbSfMOnrXJUfM9DeGgxSdhz60REsCB6muL8Xanf2F80dtdyRoUB2qcYrE8P65qL6wsE1/O6mJiQWOMgZzSlhZVIp3CVVR0O6aUh7oZ6Sn+QqbdlWrhNIvbqe4tmkuGbdI4cySEZ/wAa6jUdVXShHugkuPOO0CHkg/8A16ihRdLExTYq01Og7I37G1NxIgYYBJOSPT0reRNplHuP5CuV0HX2uL2OzlsJ7Z1VnUSnkjFblxfyRXCNHAWRl+Y54ByMf1/KvXdWLdrnmcjSvYtxt+8kj/iL7uB9KrNljbKCQdrYyOhzWdJq0guZop1iiuBP5CMXJUlk3J0H0FV73Vb6LRdOkS3i89pwjsMlV5I+vUUKpFu1w5Xa5yPimBh4ikbndhSeKt+Dm/1qFc4Wccj0lz/Wq+vzyT6lOZivmqxX5RjODVXRdWOkiWYQiT9/MhUtjrtNYynzSOmEeWJ3csalF2JHj1CD8qlslUXCnaoPrtFcqfHmxTjTU+U939PwqNfiJJvJGmwjJ4zIc0KCve5Tm7WsbejRQXOnXdtcwrNC1xKjIRkEZFV9K8Kw6brQvFnlmt4kK2tvLz5JPXB/l9axbDxBNaxBo4YsXN2xIbPy5GeKvy+KbyBj5cUPpyCf61H1qitJbotQqpPlejOm1WTzNMnACnC5OGBPUVwWpKvle24YyOPStQeI7u6geJ44ArqVbauCM1jaq5EJbndxjHbmufEVY1ZpxNaMJQg0ygCOfdRUbjiljKh26fMuRzSN+mO9NIq5VuVLWso/2T2rkLhf3sZI6L255rs5BlGXPb1rjrkESqDkcGumic1YjKZGAM56Vg6mSNSZRgkqua6PGY856egrnNQB/tSQ88AfTgV0GBRnIC9dvFeofCa326NqNwVw0twqjPoqnH6sa8zkGCMfKD2x/hXr3w+j8nw3Cp4LqWJ9TvP9CKVwsdgpwy9uRXn/AI00ZdI8Rfa4UVrbUsvsPRZR94fjnd07t7V3pbdk1neP7E3vhVp1GXs5FnAI/h6N+hz+FVElnA2u0N/o072755Rxx+XT8q0Q8qKWkUE45eLG4fgfxrNgIniXeACRnOMgmrKNOrhAiEKcc5xitBDXaOWTehlfbzuznH1Hb616hZ3Ud5ZxTxMSjL+IPoa8785VHyxp5iDP0/Gun8I3Pn212DhWWQZTGMZHX8f6Vm1Y0WqDUF26hMM9Tn9Kp5x0NX9VGL8nn5lB4qgeD/hXnT0kz1KbvBELdTnpnoDTpZpLiQvK258YJCge3amkgnPtmkwcnJHFJAyKcbreVepKH+Vcb42jEljpc+Mgoy8/Qf4V2xAZSPXiuR8UJv8AC1rJ3Sbbnv8AxCrhpJGVVe6zzJx81N9KknG2RvqajFeieb1PfNMkFxptpPgZlhV/zUVZ1NVOkyLIAVIUEH6isjwjP5/hbTnPJEQT/vkkf0ra1C2lu9MeCEoJGClS+ccEHnH0r5rMWoVKbeykebxHUjCWEqTdkppv00ODnjkaG4VCvkJcAx468/8A6xRAvmW9v/twSRHn05raTwzqccbRLPbbGCk8t94fhVlPDUyMn7yLakrOBz91h06V1fX8P/Ojt/tvLv8An8jk5oWWCzkDs37sscnrgg4r0TU8G2XPTeP5GsGHwjMmBJdK6jO0c4AIII/Wt/Uf+Pdf97+hrixGIp1sRS9m72b/AEPHxmNw+KzLCewmpWcr287GP+Oea8z8ZweXrcxHR1R/02/0r07GTx+tcD49hK31vLjiSFl/75Of617VB2mfWYhe4cUPukV6L4Pk3+HkH/PORl/r/WvOh3HtXc+BZd1hdxf3JA2PqP8A61dNde4cuHfvnrHw7l8vW7iI9JIDj6hh/ia9KryXwdN5Hiq05wH3IefVT/XFesmuam9DpqLUSiikzWpkLmophlM1JTSMiqW5LM2biSJvcj9KUmi7+WPOPusD+tJn1rZGLHUmaTNANUIXPvS5pmfegGmIkBqWH79QA1LCfm4oAuc+hopRnFFAHyhCSbxSp5x9e9bZmVVAz83c1hxuyyhgw5XH6mp0bfKN74UH7o7+1ZxdmVJaG7HLGVkkeQRqq7dxPHSuRtIjJqsUKFT+8A3DuB3/AK1om7lvNTMNsodEjwUYfL78VTv0khnN2MRy5+6Bjacdqpzu7EqNlc7npyaryySZaOMruxkZ9K5/TfEMly4iuiAw/iA+8Pp61duLyOOZZISXK9eOorTmRnykOpaqRJ5JA3Ac4+lVbe9jMCb+vzJuPoelZ125nupJsbi397qBUOW25xnHQnrWbZokW7cK8o3Hcdwx71ZvjbRSKpYI4GAQP1zVLciWyyLjfnB56VUnYvJ8zKx6Ag0rhY1fDUoXxPYAc5lA/WvZLQbII19FxxXimhOYvEFhnODOmRntkV7bEOg968/F7o9PBfCzzDxuWPiuTCkgGLPtwK+j4v8AVL9K+dfGeT4nuz2URf0/rX0VAf3Kf7o/lW0fgRzT+NjzTDTzTTxVIhkTdDULdamaoW4NaoyY1jhc1pKsc2BIisOwIzWROcQsR2FU4ddmiVVKoxAHzHvXPXjJtOJ00GrO50j28MRBjjVSeuKntnVkIB5Bwa5qTXpJf4UHvTRrtwnzIEHGOlRKMmjXSx1E8XmW8iZxuUj9K4H+zxEwkMrnaQx9Mg1r/wBv3zAgunPoorMvZWe3l5IyhyPwqqKcE0yJK+oviHQ7nXNQFzYyRyW/l7GIIPzAn396wtP8K3q6xIEb94sbocrgcjHB6HrW/aXcsEJiicpHn7q9KmgkdrhRvbJPXNQueKsi3GMtWc5H4furS5jRpV3W025/cHB4rb1ViYbU+k8Z/WqhmmubqddrbwQDjOTjirM8dxNawxLbTFw6ktt44wa5MSpNqTN6XKk0jYQqnjS1VOjQMP0Fb24SJKI1xu28D6kf0rDNpcHxTp90qZiRCrHIGDtrVtJyhQEZDlhnI4wzH+tKlK0TCqrswdUtrs3PmRwu5F7aSdugXBP4Vr3cEsmmRReWQ6XKvjPbexpup3MqiGSKJWRmXeS2MY6VK+pwsZQ7AZAAxz3P+NJ17MI0W7NHFeJrmO01W5J5DysQV+tc7JeGCFwIy4kuj3xjMamtLxSHmuyLckqGzzWNcRs9tt25YTxk+2Y8f0rqoyco3InHldileahIiP5ahTjIzz2p3h67fUnmSYgNFAZFYDGWBH6U24snlJxG+QMcNx/Km+ErGcXwJOxJImXPU4/H6VtUuqbaM4fGkzrrmFIdK050z80ysxz3NF5wSc1LcxCTQIVyw8puCPUGqV3C5JJuJT+IrxoPmep6LVkTWshLcelN1KZjCwPIx2HQ1VtITvGXlwOnzGpdQfMQzngHp3/GuunExk9DJW4c3G3zGwM459O1Pa4dsjODUJ/4+uSMY4pDxx6etdyijm5mOMzc5bH0Nc/qKbJVZR1Tp71ud+v0rH1YHEX0I46da1grMibuismBGoBNc/qAB1KXrggcg4ro1yqrnoRxXPajgag/QcDmtWZIqMp3fd5/+t9P8+9eveCFK6LCpGMRkf8AjqH+ea8lWCSadYoxmR22qB3Y8D+Yr27R7AWEstqBkRTvH6ZGGH/sorJzSkkaKN02agxjseK0xGl1YNDIuUkTa34is0D0A9K07Y5t0+mK2iYs8cigk06+u7Fwd0Em3Dd17H8qsm9hVcSAgZ6+lbvjjTIotWt75lxHcjypCOzDkGuYeDyiEuCSOit0H41YjQheORD5ZByOvfmrfhG5eDX/ACGbIlVom+o+YH9DXP8AlPaN5sTBoz1xnHX/APXWno86w+JLOccJI65x78VEy4nZ60NtxEx4ymPyP/16zG6571s62uVgb0JH8qx8ccE151bSbPToa00Rt9Oc03HrmnsOOvNGMeo+tSmaDAOn5ZrmtejB8J3yYz5U+QP+BD/GunxyOBWJqUHmaTrcPUgb+fpn+lVF6kTWh5BdjErdeeeagq1eL8wPHSqvSvSjseU9z134eT+b4VROvlTOv8m/rXXs7KSATjrXA/C2Xdp+oQf3JUf8wR/7LXfspyO/FeZiIRlJxkrneqFLEUVGrFSXZpNfiQGeYZ+bj6VGbqcfx/oKey4OOaYVIHNc/wBWofyL7kY/2TgP+fEP/AV/kBup/wDnp/46KglkklwXYsQOlPK9vTtSMMg8Y/CqjRpQd4xSfoaUsvwlGXPSpRi+6ik/wRBs+bn17VyHjy2zY2svJ2SlMgf3h/8AY12eBnNYPjS387w9O/8AFE6OPzx/ImuilK00bVY3gzyNeG+vFdZ4DkxfXcWfvRhseuD/APXrlH+WZsdmroPBcnl+JET/AJ6Iy/pn+ld9XWDPOpO00en6VMLbWLKbP3JlJ+mRXtJrw7aR069q9rt5RPaQyjpIit+YzXFSZ21US0neig1uYCUUGkpoRTvEzG49Qarhtyg4681dnGVzWdF/qlGfu8flW0djGS1JAaXOKbmkJqyR2aQH0pufWlBpiH5qaE/MKr5qWE/NQBezRTN3uaKAPlNB82R1x+nNQTSFJjtJ56VKcK/YnA7/AFqrPkScDr2rBblvYu+FSDqU565j9M96u+KrZIoIZQW3yOcgnI6VH4RjIvLo/wAOwDP41P4yf91Zjp8zE/pWTb9tY1SXsrnLRErIpGeozWoZtpAOQOo7ZrJRix2g5HSrIDvjBwK6DAmkbcd3c9xxUZ9uR2pQUWMgsdwHBIqIkgkn0xSHYfyIGwTgt0pgVQvyvkjHapVcCzmB67lwBV/w3ZQX97KlwhZBHuABI5yKG7IErsj02M/25Y4wcTocg+4r3FB8+PevP4NIsraQSQQbWUhgdxPP51tpqd4X/wBceT1wK468HUasd+HmqaaZyvjg7fE1yM8FYuPXpX0Xbn9xH/uivmTxZNJLrweR95JQH34r6Ztjm3iP+wP5VqlaKRzyd5tkppp4FOphpolkbmoHNTP0qu55rVGbIZz+6ce1c2G4ArfvJRHCTjPIFc734qJmtIlJI5pysSvWmqpIB7GrbWojtY5/NibecbFbLr9RWdzdEAOB1z+NJPloJf8AdP8AKpdgyaSQAwyAcfKaVxjLY9fTg1dtx/pKEcYOao2pG3rngVZViuGB5HepbGiRWEWrXTdBhT+prQe7RIVkVgVCcgfSsKck3RY8koMmpgf+Ja3rsNcWK+G5rSjqbkuqRRmAOjM2SODjqpFYmk6nctazxmQ4SdwvrinXZz9mI/vrn9aoaScfbVOeLhv6VyKT5GaOKTNZpWf77Ej3NKjfI3HcVApyDjFSIf3b/h/OsJGsTC1cj7UxPqMflWXNuBl3DB3wsf8Ax4cVf1rP2rjptFZVz/x5yN14iOQf9s/417OH/hI8+svfZO63BncW7qpOM5I96r+Hi8d/CjnlS6/hk0fOJSdpI2jPFR6JJnVFIGP3rD8a2m06bRlGL50zq350eYdg7/1qldf6sHjp1q2Du027XPSQ8fhWfO+YEPByorx4LU9GWxFAxDrzTr/m33HHvUEZ/e8c1NqLYsumSPSu6ktTmlsZJ/4+0AB6AfpSE8jvQpH22A5+8vf8RQRg12o5xuPzNZ2qLuto8cnzMfzrRI6f4VVvIw8Q4GBJyPqP/wBVXHclmbKwSaNQeg6VzeqZF8w9gc+3Stu6cnUSOCF4rH1hP9M99oOBnPOe9avYzRt/D6G0m8baabsqIY98vzkABkUsufxUV6/Cq/2jdMGVl+1ggg8fNj/4uvFPCiebqUTdGjjY4PfjH/s1epaZfbjNGcAq8bjA6fdH9K46lN86nc3pyVuU3JU24IHOecmrVo2YBzyCc0y6Q+SSMDvTbJvkYdwxrqpO5jVVin4ssP7Q8P3CquZIV81Pw5/lXn0Mhmt1fILYwQ3evWCQRzjB7GvLb2xOk6rc2nQRtujbHVDyPrx+orYxKhmjQlXiK88nGcVWeKVJY3t+YQd6sD90/wCFaG5JcjK+Z2HWs6azMbF1ymeu3pSkrlJ2PR7qYXmkW1wMfPhuvqKzdpAxg9aXQ7gXPhhYurwPtbn3qRlGfb0rzMTpM9XC6wISuRgUm3FTmPrQE7dM1imbtEO056Vmyw+ZdahARxLb/wBCK19v0zVGRdmtRekkBXn2P/16q5LieHXq7dvUkZHIqiOtbGtwmG8uI+PkmZcfQkVj16sHeJ481aR6D8K5calqEH9+JX/I4/8AZq9SK5UV8+aVqs+kzPNbyzRSMu3dE+049Oh9BWm/jLU363d6R/19MP5AVzVsPKc7o66GJjCHKz2p0Oc4596jZdvJwP0rw9/E1+5y0szH/auJD/7NVaTWJ5fvojf7xZv5moWFl3LeMj2PcXnt0ALTxJ/vOBVeTU9NjGHv7Ve3Myj+teLyalEbRES2VbgH5pCFIP4bf61B/aNz2ZB9I1H9KpYTzIeM8j2Zte0dcZ1O1z3/AHoP8qydd1vSLrRry2jv43kkhYIqgnLY47V5d/aN3jAncfQ4ppvro9biX/vs1ccMk73Ili21axHOMSng+vNaXh2UReJLF+xlC/nx/WspmZzliST3Jqe1mNvd284PKOrfka6JK8bHLF2kme1MmQa9T8OTi48PWTZziPZ/3zx/SvNPLDDOcjHUeld94NkLaGYz/wAs5WGPrz/U15tJ+9Y9StH3bnQe9BozSV1HIFJ3pab05poQjjK1kj5ZZV6YbP51r/pWTcDZesP7yg/lmtYETQmaM/rTM8+9GeelamI/NGaZnrRnikBIG5qWEjOar55qSI/NTAu8eg/Kio93vRQB8tAAsOnAH51RuOH5I6ntWpasPMO6AP8AKMZ5xxWbcbWujjAOe/FYLct7EcVzJAG8tyMgZ59Kjubye6I86Vn29AWJxU/2fZA0jMG47cjFQ2kayzEODgDoKvTcnXYZDgHkc9sVZBz/ABAYBqCMKtztk+6M5H51GzHv1oAlEueAeBxT/KmcFlQkeuKhwPJJ29elWtPdlkC7uGU8delIYscL/ZLguhRhjBNa/gsMfEttBjcs58t156Z7VUkWaWJo+AQAAemMVueAYHtvGWnM5XaJVzj61nXdqbZpR+NHqM3hiKF9rxSxHH3W44qr/wAI6suoWVrAfLa4fZubJAOK9C1Ux6hfebFkrtAGRXO6yH0ybT71BlopwwX14PFfPUsTP2/IpXR7cuSVH4UpHjfxG0ebQ/F8ljK6yOnltlRwQVBr6ItD/osX+4v8q+ffinqx1rxlLeGIRDbGuz7xwBXv9mf9Dh6/cX+Ve/8AZR4tmm7lhjTT+tITTSaaBiOeKrOeanY1XfvWiMmVLtN8X4g81gqAev8AKtu9k2RjgHLAVjICp3YzWdTQ2o7EqdAo9etSqjbwnGSRj5qiXJOelSRnafvVm2bpMnuraaznMUu0sADlTkcjNQOGdWABzg1J5jFSqqc+wp3kTMpKo+cdlNRzIqzK1pEwRQRj5RWhbWkczsslwsIC5DMM59qrWtncNGhKS/dHOw09jFExDSxqRwQ0gB/WldMaTsVZyBOvP8J5/Gnr81g4P901Wuru3julY3EG1UIYiVTjp6H9KZ/bGkLYyltUtlk2sRGWySe1c+Ii3HQ0pySerLl02be2bnqn86pabk3F8v8A02z9OBV+6wbC0eBhIjojBsY4NZsc00N1deVsJLgnP0rlhTvFo1k9TTAYdjipY4meOU5xhc4/EVz+p3up2DAOyjcNwChTVO28TXFs0r3TGSMoV2qVBzx71X1STV0Z+3inZl/VYibpCe4FZl9+4t58Y4jU5+jj/GoLnxTDNOGe2k24xjeAc/lStdpqFm8iRtGrQyrjcG5BU13UqbjBJnPOcZSdgvtVNteCPk+aoT86r6P8t4zEYb7TwB2HGf51dAhBQYzlc8ke31qvYskd7OSAp80EEuB2HtW3KuVmfM7nRo4FveqSB84Izx1FUGBktkCkE7RxkdatPc6eiTI01uFZsjfKO4x7VWGsabBbxobq1TjAIk5/nXmqlK+iO1zjbcrRxSCbkY9TVy8gMloVJHIqm+u6exz9uRl65BJFPn8QWzxLBHcE5wANpGa6YQknsYylG25nsm237FgMAgdOTSMjtwEc/QGo7i9eGAlZHUNnoT/nvVOXUXCbv3zjHAGST9BmupXOe6NDyJSP9RJx/sGo3hkEblkOFwfpUU9nrsMSStoV2UkfYvzR/ewTz83HA6nHb1GY47XW97M+kCKFf9Y73kOVHrtDZPXoKtRkS5ROckbN+4yOvT8ao6mN12CTj92uf1qyj/6TcNnjOMe1Vr8/6WCBxsUdOnFaS2M1uXPCnGsCPcE+R8lv6/lXdWcqx30qiVctEMYzg43H+n6VwWivs1hGVsZVsflXT2UxGswAnPmIy8+pB/xqLXRXNZno9xqOV8kIMYAz36ClsHBLkdQRWTvDCNvVFOc+wq9p0gzIMY4/CtYxS2M5TctzWDd647xvbBJbO+GBkGFzj8V/m3511m/rg96xfFUP2jQZ8DmMiQEe1WQeeTwrIRtJR/Ud6h+1X1kv79RcQdyPvgf1q5EFfAI+bqKdNExj2+WzA8jAzQM1/DF5E8k0UbZjnj3LjsRiuj8kkZ/HmvMtPlm0/URKhKmN8snOCDXQXPxU0y0keJtPu2kX7wJRRn8zXnYqnOck4o9LCVYQi1JnXeT2xWbql21lIgVM7hnJrlZPjFEufK0j/vu4x/JTWXf/ABTlu3U/2ZafKCBudm/9lFYRw1bsdEsVR7nUnW5t2FRaal/NcahbMsabwGA3OEHPqTxXES/Ee5cD/iXWXHTKE4/WoT8Rb7IK2FgOf+eJ/wAa0+rVX0MniqXczvEx36vfHABMzEgHOCTXPnir2p6rJql7LdTRojSYysY2qMDHA/CqLHJr0KcXGKTPNqSTk2htFLShSegNaEDaKsJY3cuPLtpnz/dQmrSeHtZlGY9KvWHqIG/wpcyHyvsZtFbS+Etfb/mFXC/767f51YTwN4gfraRJ/v3MS/zapdSC6jVOb6HO0V1afD7WGIDy2Mef71yp/lmp1+Hd4eX1TTQP9lpG/wDZKXtqfcr2NTscbTuw+tdbP4CnjUeVqNvK2eR5bgY+uKr3HhC6RI/LmEjYwwEZGPTk9aXtoPqHsZrodLbfEKwjsYI3tJ2kWNVY71GSAM969G+FnimDxAupwxQtEYDG2GbOc7h/QV4gvg2/ZiDJEo7bs5P6V6H8KrM+FtcunvbyDyLmDYcbuGDAjt9aw5KSd47nQp1XpLY9ypPrVEaxYH/l4A/4Cf8ACl/tWxx/x8KPwP8AhTEXKQ/pVM6tY8j7Sn60f2rYn/l5SqQi3WZqOEuYG/vBl/kam/tewPH2qPNZuq6xp5hiYXkOVkA5bHtVRepEloP3UFuKzTrOm9r63P0kBph13Txx9oz3+VGP8hW9zGxqbuKNxrHbxDZAZHnt9IW/wqNvEkGCVtbyT2WMf1NF0FjcDd81JE3Nc2fEeMFdPuQP9squP1po8TSBgFtEU/7c4B/QGi6FY67efSiuS/4Sx+8EQP8A11P/AMTRRdDseKWdwkMp8xgPu8fhVK8EJkdhED1P3jyaswWq3O4uXBGOnPapDZpvO4k4HNZJFPYzUbOnscEY3AYFVLd3VmKDn1roRZRG3CKp+Yk4GRVOytEW4YbSGA7/AFp3FYyVDSTdDuNGGXIYYPQj3rZS2j/tEjYD82ee/f1qGWONZWDIu7djkd6XMOxlhn28ntUtpv8AtEeDwDV8AKAFRP8AvkVbt7hoSfuEHr2PfilzD5SNblScbWyT2Wtvw5qEGna7ZX0wlaJH3MFTJxVeGWCRvmYgn3xmux8NarPDe29tbR2qbmA8w26lsfU81hiJvkasdFCF5LU9t0O2i1LTIL6NyscyB1VlwQD607W/Cy6taxxCcRbH3Ftmc8fWn6Pe7bVS4lkkxywTg/lxWk2oNt+W1lbjtgfzNeZhI4XlTa1HVnVVR6nyn8RrX7F4jdQ24HacgfWvoKzYfY4f+ua/yFfP/wAUJQ3ix9qbBtTAJGRjNe+WjD7HAc/8sl/kK9Ond04tk1LKoy3u96YWpm73pCwFaIyYMetQOacz1Cz5zzWqM2UNSbEIxnO5f51jHXY0BXzLQHpjykzn8a1tQO6EYHRgf1rz99Hv3uZWWW3VWckbpO2fQCsqtOMtzSjNx2Oo/wCEkWEHN1EuB1WNB/Sqb+NX+z/aI31CWL5cPHauAc424OwDnIx9ayV8OXMoxJd2q/8AAm/+Jq7NpWqz6RJp0uu28ccihS8Nud3BzkHcPf8AM+2Mo0aXU2dWp0HP4v1F2wuma63uYig/UioW8Sa/OSINFuT2/e30aEk+xaov+EZuSSX8V3oJ5PlW6J7djUi+E7aQf6R4i1qXHf7QQPp1rT2dFdCPaVWWbJvE93p32mG305FVjEQ92ZPmBx1UY6+/Y+2eMaaa4mlld9plbeVVlIBPXB5rtofDGk21r5CajqTRlixXzlwSeST8vJOBQnhnQIlwEuCOgDS4/LAFNSpr4UJqct2clGzCGcBiTlDk49/QVj3Clrh2LANjByf/AK1ep6VpfhZNRmW+jZbbyQRukc5YH2NazWvw2Ry7WSyN1OVlP8zWkUmrmUrp2JfA+m22t+C7SS5mCXMI8t9jbgAOV/Qitt/B2mM5K3kyM33irDn8xWTYeI/BmhwtFpljJBG5yyxQgAn15NSyfEHRFGUsJ2+oUf1rklhfeumbKtKwl18NdPvZg8+tXZA4UAIMfjiqr/CbR9oC6pet774x/wCyU9viTYg/utLcjHeQD+lQt8S8/wCr01R/vSZ/pVqg0rcxDk272Jrj4TaP5CmO8uvMA5JZev021T1LwRDp95p+mae8rxXMU6tLINwRioxnA46fpSr8R7qSUIba3jBP3iGbH6iqs3jm/uLttogDWu6RCE6/KQep96fsrfaYKUuwJ8JLpY1YavBnHQxNx+tQD4YzwxzRG2s7mVkYCc5+8e/Iz/8Ar9hTrj4gaqdPSWG+j84ttMYhX8+lZr+OfEUh/wCP9lH+zGoz+laqNupOo8fDXW/LSMabpQ28hmcYOevGw1ZX4deIDs/daJHtAGMNxj6KKy38VeIZM51G7/A4/lVdtc1yb719eHjp5rVS5RWZ1dt8PtcjtDBJf6eMuXJWNzjJzjt+ue3oKZF8L2iKtNq6DbjpD/8AZVx0uoagx/e3Up/3pT/WohqUaN+/vIlwOjTjr+JobXYFHzPRR4E0+QiOXVcgZ4VF/rmkbwDoqId+pyA9nREUj3z61xdneARiWK8tWDDI2XKFiP8AdBz+lWk1gyDG4nH4VyVKdSUrxdkdEHFKzOr1Hw/pd7bpBdeIdUeOKRZUEUsaEOM4OQmR19axp/C/h1EdvtmsXEiglfNvnIz1HHGeaz/7UiVfm3hvp/8AXqE6vHuxuU+xbH9KcadTrJjvDseeRALcSjcS2efc1XvXD3rRt90oDn8KnusW+rTLt6sep61Q1QSNcKyH5vKDemOtdclocy3Og8JtFFrsXmxJI4V9odQwPyk9DXaXFxiWNlhgQK44RAv9K8z0W+c38DptEofad+SPyFdhLdXci7VktgeuPLIz26/56VHKXzHVW19iGNWONqAdeDj8KswapGkhDEkAc1xaajfRgb7cSdv3bA4/PFTQam6s3n29yBjgCIt/LPNF5ISUWds2t24zgN7cVXutZjlgli8pmDKR17Vy39pMwOywvmz0xEB/Mimtf3WPl0q8PPU+WM/+PUc8h8kTMkLxnEYO5eBgZzT49WkjTyriIo54DckUeY4nPmQGFm5EbEE/pU6r5kWx4xjoAetbLVGT3MRS6agxkfezrnIrM1Hw7canehrQxA7fn3naBW3qf2a1aNI87xyOc4H9Kl0x2+1IAVG/16fSs5trY0ik9Gc0ngPUm63FqD6byf6VYT4fXZOHvYQc4+VSa9GWxcjJkUZ54Gak+wrnlj+Cmuf20zo9jA88T4ergF9Rb/gMP/16nXwBabRm6uGPsqiu+FmoGSzdc9BSC2Azwef0pe1n3H7KHY4EeBLRTnfcMPQ4/pU0fhS0Tbm0Q7fUnn6+td0LZM/cOR71ILaMZ+Xv3GaPaT7h7OBy0OmWi4/0C1H0t0J/9BrWt4oEGANnsihf5CtX7OgPTt6dacsC+1Zyuy4pIpqtv/fnP/bQ1IYLVh8ySt9STVry0zkNijCjuOveocWaqSKotLMcfZ//AB0Cni2gH3YAB9Km3RgffUeuMc00vD180D6Cl7NhzoaY0UbVQAdBgZphQHG1fbpUhniH8ZI/Hio2uoxxhuKpU2S6iGNBuyQDmo2tScYH6083gQ5CMf8AgVNOoNjAVfzqvZMn2iIzYMejAe1C6U28MHj4PTBpTfydemenFNe8lI+8fyq1SZDqI1/7R1UHPmwkD/ZPFB1jVQBiWLPrtrFN1L0MrY9jTDMxH38+1aKLM+ZG3/bmqqR/x74/2lpT4lvlxvWDv3x/SsFpMk+tRscHp0q1ElyN+TxVdbRtSLPc9f6VQufEN5PBJHiPcRxlAVzn0xWYd3Ixj8ab0yOozVKJDY86vqWF2i3Q+giFRtquqk8TqpPpGKaffPPvTDgdyR2qyCT7dqJ63sn/AAEAVG810+N17dYHOFkxn8qTngdqTBDHrn2HWgBpaTOTNcMenzSsf60RxRkkmPNBJxxnNKjMDnJ/xoEWCiA48tPzopN3+0n45ooGchYRuN4ELtk8YUnsOake3umYlbeXHXiM/wCFVGupshY7hwv9xHOOPoaasd1OSAk7/RGP8qzuVYurb3anJikH1XH86rPaSoxkURqck4MijNTxaHqc7jytMvJDg/dgY/0q6nhLX3+5ot98uM5gI/nS549w5W9kZT2rM+8TxK/Gcyr/AENLJbpKo3XUAbuQSc/kK2Y/AviiYYXRLoAdNwC4/Mir9t8OPFbx7DpjRg8ndOgB/DNJ1IdxqEuxyb2USjDXsZ7nAb/Co2SBOPPc9jhPeu6Pwm8TTEEm2UDHDzfn0BqzH8GdZfHmX9nGPYsf6UvbU11H7OfY87DxqoxJLx0+Uf41ct9ensbhJYXlDqcqdoxn15zmvQ0+C84wZdWiB/2Yyf51Bd/CK3soTJca6evCJb8n25Y/yqXXpPRsuNOqtUcz/wALQ8SIu1L6cJ327Bj/AMcqnd/ELxDdDbJf3LAE4/e7f/QQK6e18C6LAM3C3N0e4d9qn8FFWbbwXoMGXFgWBP3ZHLflWa+rx2iXyVnuzzZft3ibVLa2jQS3c8ixxlnPzE8AEsenNfTttZ3iW0MZt23IiqTuHUD61wWi6Ho+n61Z3wsoLcQzLIZSSoQA9euMfWvRG8Y+HV66zZf8BlB/lXTTcai7GFRSg9RPsl3n/U8/7w/xoNheEfcT8WqNvG3h7+HUkfPTZG7fyFMPjXRz9yS5k/3bWT+q1pywXUyvJ9CU6XeN/wA8h/wI/wCFL/Ydw3WaMfgTVY+M7Ij5LPUG+kGP5kU0+NIsZTSr856FvLGf/Hqd4LqHLN9CeXw28y7Xux68R/8A164bU9Mn07Vp7ZN8wQgh8AZyM/1rrm8ZTEDy9IkOf79wq/yzXnviXSb/AF3W7jVRFHbrPtVohKWwVAAOcDOcVE5QfUuEZroaAiuOP9Wp9DKo/rUi2kzjLXFuvsZgf5Vzy+H9QQAG5bjgYkNTL4cu3iLtOGK9RvJ49az9zuae/wBjb+xEH5ry3A9mY/yFO+zWy8tqcS/8Ab/CsD/hHWb70gB+nNVrrw/MGJheWSTHKqcA/T3p3gK0jqAulqCG1dST2VP8TTWuNGjB3X0zEf3UX/4quGNpg9HwDg5JyKetkCxLJj1yenpT90NTorrVNKWTdFcygEYO+Pr7YArHm123BISO4cDjOEGfzYVGltED/qx754pTaxHG1RjuO4ouhcrIf7ZVvu2s5Yf3njH8mNMbWXI4tI/+B3BGPyU1bW3QKSFTPY5wR9acscYyGWLHfAxTug5WZ51a8PKWtqPbzXb/ANkFNGqai+Ni2oxzxE7H9WFagii6jaQPanGMZAO1snqKLoOVmW19qbjImjU+q2vb8XNNSfV1+ZbqQEgg4ijHB6jkGtbAA+V8jOOtBEXzDawAHXGRRcOUycai4IN1cehwY1/UJTTbXn/Pa6PPU3DD+RFbBCkABgf0FMJRD1246dqLi5UZTWk7cGSXb/tTyHn/AL6qMaSPvOsbAf38tn8ya2TLHtyxU8dSMU0XNvu9B/tDii7CyMoaMp/5Z23XtCuf5U9dOEX3QijvtjUfyFaX2iM8htwz6f5/pUZuACCPMB6c07sLIqCzZBjz5eewfrTTY9N0kxzzy5qwbpvvY3foab9o6kq6kehzijUWhXbToyQWyT9T1oFjGMgJjrxjtU5mYgkAMuMYqMyOq4XaTjgEU7MLoyNZXZOsigcgcjiq1zKf9HcE4KlSfx/+vWjqCm4WPAyQSCQMdqh/seS6hxGwUqwNVbQm+pn6Zi28QW0hQMm8NhuMgV6BHq9kOXhKZ6fICP0Oa4CzVmu4XBKvGSCQcHoa0y8pY7Cxxzjdzio5blKVjvINTsZANtzEi+jDZ/6EKvJ5c3MciOMdFw1eYszFMuQA3TcMdPbH+f5AQnoMls4K9e3/ANapdLzKVXyPUBB04Geh4x/KlMQ7uRk9j7//AK683jl1NQTFPccDkCXgfhmrSatq0BCs8RGesi4z+Q9/88UvZj9qjptXtFco6MN49SCazV3DAyS3QgDpVWPW5JiFkiRsn76yqMc9duauq4ckrjDelbQVlYyk7u5lXFs53SuuSWznGTgUkLmJ0cLkqR+daDxyzI3I8vPUmsxh84HUA459+DUtFpndWd9DJbLlSSo9vzqc3ic4Un64rltIuT5fl547Dv7Vq+Z6gfXHFZezRr7Rmgb0dPKH50z7UzchVFUgCc8Nj1FKCccN+lHIg52WjdSZPAHvTDcS55fH/AagBYDkA5pTu6YHsQafIhc7HmZzkFjzTTI5wcyelNwc45A9xS49iR9aOVBzMbvYg53E+5puSwz2+tSbVPGPyNIU9v60cocxCcnq/PsaAWAGM/iKkKkHr+femFD1x+tOwrjcnOdx/Kk3Mc808fLzs56UhKt1Bz9KLCuMJyOnNIZD6Z7fhUoCeoGaQKo6enpTsFxnnMANy8Uhkx1zmpCRkDjPrTDjOBjPc4osK4wv9cCjzBtyadgZ4x+ApCOhBp2FcQspHQflSHYR/wDWpGBPORjr1pjRtnIwDTsK47CnI/rTSFYHH4ZNG1wSSw+opuGxyRQAbQO4pMY79O1O+fkYXIpp4HKj3GKAGEnPXNN6cZp5dRwy8UgZTjhjx1AouKxGT16nj+7TlPPQVIFB+7ux6kdakSAkYAwPei4+UZweSoz3oqyIBjon50UrhynuS6baIoCW8KgdAIxUq26KOAPyoorwzruwMYz6UGEdyaKKpId2MKKvbNIQBziiinYpMawA7Ckz8ucCiikWivcSmKFpAAcDpXJXfmXkvmTSE+gAwBRRVQLKotELgEnmka3QnJzx0GelFFaCEFvHJlWG4Hg7uc05raCKRkjiRdpx90dqKKuLJaFVeOMDj0pzrlgvoBRRVXJsNIwcdef/AK/9KcFDQqfRiBRRTQhgQc/TPSpY4wxZSeNpP5DP9KKKYEbQqDmiNFWUMowR685oopiJprONHUrwrAMBjpntUX2JGLAnHuByKKKAMLWtNh+xm/GRIrbXH9/DYz7H/PNc8QoQfKPmFFFbQ2MZ7kTSqqL+7yGbHJ6d89PaohdssYIHV9vPWiitDO48XLsyjAzgc49aikuZE2HIw3oMf56UUVVhNjGupd4w3zA9SBTlkl8zy3kZgRu9KKKdibitlGDFi3zAfnTXLB1+dvm6c9BRRTsIYPmjU9MilWQgKjcljjI4oooAcFBbbjBx1HHShY8fxHrRRSExTCNwwSPcUzPcE9e5oopgICgbaVzu4zmo5ZvLYkAk56k0UUAQi4EhB8sA+uf8+tX4oBMuchfoooopiHz2Bgi81pA4MgGNuOv40qw7OAx4IwaKKaEzmvs4jvkIPRwDx15rtI9Gs5H2BWB+ox0z2AoorNFg+l2sEgUK5I7iRhn8M0q2NqVJMKt7P8386KKoBBa2SkD7HFknHCj/AAqw1pHBGHVI+fRMUUUhiKSwHPSs9iTOccZOOlFFVEmRnajq8ttcfY40DEgtvkYnuewx6VjnVyzyLLArFOMo23P4HNFFTLcqOxc0bWfOvPJWDZwDnfn+ldpgEDOegPBoopMaF8sEbgSOM/pRgjox/GiigBQD83P0oVNx4JHP1oooADEVwd36VEWIbk55xzRRQAhcggHBpyNuKr3PeiigCSaMp82/PPp9f8KgBJJ59B0oopgByWIzQQP1oooADCQCd34YqPb8x6flRRSAaF3AnOMUuzgc0UUANIK5weBTkBIzu+mRRRQBKIc9WB/Cl+zr3J/Ciikxirao4JBYY9TmpfsaIcZzgUUUmxpCC3Ut/DknGdtNZAoyMD0oopXHYYBwGBxnNOSPe2CR6dKKKAQqxK2RgevSnLCg5IP4UUUhjjEmT8tFFFAH/9k=" + } + ] + }, + { + "items": [ + { + "mime": "image/jpeg", + "value": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAAoAJ8DASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD1PUNYe0umiSIMFxkk4yaqWPi22umyux0BwzRSB9v1qHWlxqEpx1Cn9K5PS9DXStRu7mJgIbgA7AOQc5roprmitDOo+V7nqquGAIOQeQaoxaxayXf2YMxcsVBxxke9PsSRZwqSDtQDP4Vy0RMWqMf+edyR+tSlefKN/DzHVXOp2tpMI55gpIzjBP8AKrYZWUEHIPIritfY/wBsHPR4VP5Eiun06YyadA2efLAz+FSvhuNqzsWUuYnkMfmJvH8IYZqUnAzXF6XIU1OJ2OW81lOfXJFb+uzPFos7xsVbjkHnGRVPSKkJK8uU1QcjNLmuc8OTuTcRs7MPlI3HOOtdCDkZptWJWopcZxmgNk1gahqNxHfvFGwVVwMbRzxmsqw8YRXN61skwaZM7o3jKnjrz0qoxclcmUlF2Z2tJmooZlmiWRTwwzWE3idE1tbH7MxVpPL8zf3zjpj+tS9HYpK6ujojTc1Wvr2Oys3uJSdi9gMk+1Z+leIrLVVn8ovGYV3MJQBx69aLoLM2KSsrTvEGn6jdva28zPKoLYKEcDvn8a1GbC5ppit0DFJVD+2tO87yTfWvmZxs81c5/OrwYMMg5pgYOvLi9U+sY/nXFRaXNZ+J1u4XkNvcB/MQEkA9c13niKwu7uON7OVY3XIJZN2RXOxaXrfmBXkt9vc+Wc/zqaS0s+5VR638jqNFcmxAP8LECsO9Hlazdr0+dXH4iuk0+38i3VD2FYfiWwvFuUv7KPzTt2SR+o7GlJ2nzIcVeDiyt4hZTeWUi/xxsv8AWtzQn3aVGM/dyP1rjVuLu7kijewuQykhd68LnrzXcaTbG3sUjPJ6n60+XlixOV5I5s/uNXmHTZck/gTmuj1dPN0a6Uf88yfy5/pXNaw4ttdnj2OWmKsgVCc8AV1roZbNkYfeQgj8Kn/l3Yvapc57w6+Lsj+9H/hXVr0rz6x1OGykjYzRB1BBUsAa6bS9fivblYFCEsDgrID09qvVxTRm9JNMq6wNuqH/AGkVv6Vi6jp0cGoafqCKFLsY3I756f1rd8QDbe2z/wB5WH5f/rrLv3a6tLaKMjMUyuc+gzn+dKHM4NLuOfLGak+x0WkSE2hX+62K4vVj9m1+R+6T7v1Brr9Hz5LH1Ncv4riMerSsP4wrD8sf0pVtKiYUNaTR0/iAGTQpyOwDfqK80gupLC8cocFg0ZGfvAjkV6dj7ZoGP+ett/Na8m1eNxiWMbmjcSY9u/6ZqJaTfoarWC9Tb8IXRXxFbvziRWQ/kf6iuu8YXk1vopWF2UyEgleuMGuC0WQQaxZyjp5ynPsTzXe+LYfN0cH+7IM/iCKp6Q0Jirz1PMRYyrYR6ijJ5Ql8t1x09816F4E1GS60+eBmLxQuBFJ2wf4c+2P1riItTnstDmgSEvEsxeXaoJAKjHXoMiut8F6ncTT/AGeUkRtDvRCoBXkensa1vdsxtZI74qGpnkrnpRRWRqPCAUNGrDBFFFICv9jj3ZCj8qnWMKMCiiq5mxWSIpLVHkDkfMOhqQJgYooouxWMW58N2E8hdraIknJOwVLpui2umyM8ECIzDBIUA4ooquZ2Ja1H6xpMWq2yxybgVOQVYr/KsGPwikcgInuNueR5p5oopxdhSV9zprS2EEQQDAAxWP4g0OXU54pIphGANr/JnPOfWiij4nqNPlWhrWdqtrZR265KxoFG7riuQtvC9wdVL3Jj8hMqAMkuMYwfTrRRSUU3cfM0rIzj4RvYtQWGLaIVb5JSei/T1rudSszfaXNbgjey/KT/AHhyKKKcoK1hKbvc8umtLy0uJomgkQygo8ZQkNn0rQ8O3X9n37Xl7izs7cFJJrgiJFBwBlmwBliAKKKqKtG4pO8j/9k=" + } + ] + }, + { + "items": [ + { + "mime": "image/jpeg", + "value": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAAmAHkDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD1bV7ye3kjWF9nGTwOaWHU2OmvK5BkjU5yOvpUGuD95Gfaucj1SOeDVIYziS23RSL77dw/MEVolzQ8xN2n5HV6XqM10zrNt4AIwMVps4C5JwPWub0xsTgr0Zc1parKyaZKVOCRj8zSqJJhDVFafxLBE5CxZQHG9nCg1dsdWgvW2LlJMZ2t3+lcJf6FHrsaxSbsR5IwcYJ71L4fjl04W0Ej7ngk2Fj3AP8AgabSvYSva56BcXMVtbtPM+2NBliaitdRtbwsIJdzKMkYI/nTbqFbyylgY8OpBxXH6fcS2F4VfIeElHHqKzW9i3HS520l5bxSBJJ40YjOGYCpsjFcSkzXepLMwzlgwB7DtXXCXEBduwyauUeVIiL5mTGQL94gfU0oYMMg5rgdZ1C8kgluYI1km48tHJ2j8vapPCuuyXEcEkoETyNsliB4BzjgVXKK53fSjdVS9uGt7GaVfvKpI+tcnf8AiqTSo0kuppNjttBWINg4zzgUormHLQ7jNH51z3hzxANbErRsXjTHzGMrye3PWt7NNqwJ3MnWx/qifeuHltzpviU3yjFvqAEUvoJAMKfyyK7nxDp9xqFmqWtwYJVbO4DOR6VQTQjLo/2W6kMsmMmQjHzdQcfWoTajZFNK92M07CyoBxgYxWpqozpU3sAaw9G07V4b8m+niaJQQoRME/jXTywCa2kibo6lT+NOq+Z3FTXKc7pDKJ3U9SM1RnwupzAf89f6Cqc76jo8xjntJpVXhJoBnI9xU2mSS6jeBza3CDOWeZduTTiry5gk7LlO3g5QfSue1+x8u9ivI+A4KSe/pXRwLtQA9azfEbLFpZmYHEbBjgZNZMuLMvRoN8hc9jiujnTNpKo7oR+lYHhiZbm3kkQNs38EqRmulIymD3rSo7siCschZRiSeNWHBHf6VZt/DmnwasL0Qx+bu3AnsfUCqEVwtrfvbsds0LkbW43D1H4VJMxub15WJw20KpPTFVG8rCl7rZ0+oLu024Hfy2/lXGXUtrDAst5IiRcfNIcDJrtX/e2rj1Qj9K417WK7to4rhFeMhchlyOKKPxMKvwI2PDl9Z3AaGzkhcINx8tw2K6Ld7VgaDBYWAMNsI42c5IVMZ/Sug49acr3JjsP2g0mwUUViaCeWucgUuBiiimBFLAkg+YCo0tI4zkAUUU02JosquBTJY1kjKsMg0UUdQ6EUFukIwgwParGKKKGIxtY0Cx1XDXEQLgYDgkN+Yqjp3hu0sbjzI9zN0Bdi2Pzooq4bClqdEqAJj8K4q88Myi4dotUuo1JyFD8D2oopwSbCUmkWtD0ie21BZptQmnCg4Q9M+tcr/wANA+E/+gfrX/fmL/45RRRU0ZMNUf/Z" + } + ] + }, + { + "items": [ + { + "mime": "image/jpeg", + "value": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAAXADEDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwDu7rVNQfUriK1wRB1QEA47nnr9K0RqrHSzOFBkB2+2fWotauY7VQkSKLm4+XcBzjv/AIVYsLFE0/y7hFcP8zKw4q3sJblWz1a4+0rHcxkK2CrEDkHuMU++1eeCeRIYSwj6gDJPqetVlH2/UkWJQsEXAx0AFSzDytcx2fB/MY/pSY0XV1QHTjcbBuB27R61FZ60Z7ryJY9pOMHBHXp16is62BOn3cR6x4b/AL5PP8qV22xWso6ruX8QQR/WkM6jmiq32pPX9KKQ7mPrOmXd1ewXNq0eUXBDtjvn0NT2kWpESJdMm0oQMPu5/IUUVV9CbalKGPU7TKxQqATzhwf51b1G2uZI4LpFDTIv7xQQM+47daKKGxIZpls8xneVNiyKQQcd+vQ1QYTxOLGZBvD7lIIw3GM//rooo6jOc/4Wd4J/6D3/AJKz/wDxFFFFK4H/2Q==" + } + ] + }, + { + "items": [ + { + "mime": "image/jpeg", + "value": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAAgABoDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD03Ur25h1BhHMyqAMLnj8qtXOpsNN8+JgJDgeuDVDV0Jvmx121gWGsJqGg29yn+rn8tgP7pJAI/A8fhS6D6nZaVfyXUDNMwLhscDHGK0s1zujHBce9boPApgjH1FAb/P8AsiuK021OlXk+nFf9Hc+fBxwMn5h+B5rsNe0u9u7uOW0vpLdQNrKoBB561FqWiPcWitbttuo+Y3I/PNKzHdD9II3t+FbobgVzugWWoQeY19KjsSNuxcYFTT+LPDVtcSQT+I9JimiYpJHJexqyMDgggtwQe1N3ErH/2Q==" + } + ] + }, + { + "items": [ + { + "mime": "image/jpeg", + "value": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAAXAB8DASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwDg538i7nWG3jVi5VGPOecH+lK0lzuNorrG4kKnYvHIBH8jVnUIo1S+yB5qvuQn3wf6VTlEseqQyy4LM0ZbHTJ4/wAa9WWjOFDLeCSaSFZJ5DviZhz3BxirGgttZ4yxIKK3J78g/wAqZ5P7y2TcyGO4dMg846gU/SEMOpyISTuDKue2D0/WlHcGaWq6fNNfAxbfLcZYnsQCP61ANNvJlzO0YYIFXBz908f1/OiitnFXIuSTaS8wk2y7CZfMVgOnGKjktYNKg+23NyypEdzPjP3gF7c9aKKmaUYuS6Djq0j/2Q==" + } + ] + }, + { + "items": [ + { + "mime": "image/jpeg", + "value": "data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAAqAKMDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD2d+QAfWuQ8R+OYfDesLYS6Zc3CtEsnmwsOMkjGD9PXvXXyZ4rnNUtdEudVP269gju9ijypJVB284ODz61kzVGtpWpw6rpkF/bM/kyruAcYYc4IPuCDWgr571gXrNo2kLDp9o8q/dBQjbHuPLHnOOewP4VrWbl4lJ5NCYNFpn2j1NZk3iHTbbUBYTajax3ZxiFnAbnp+daDHLV51d+EodU1W+upRl5Lh23d+pA5+gFDYJHo6Sk9RzUgbNZ8TNFYQl2JYIgYnueM1ajfcuaaYrDzMASMGlE6H1H1FZOtRzy6DqKW0kkc728gjeM4ZWwcEe+awfA8OtwW93Fq9zPcIGUwPOSW77hk8ntT5mFjtww9aCwHUgfjVWCZZVypBAJGR7U6U/Mv0Jo5hWLG4HoR+dLXnmr+K9dsNfurK30m2uLWNlEb7iGOVBOefUmum8P6leahZmW9sltJN5ARX3Ajjmq1FZG7mjNQSscIoJ5P9K5bXfHOneHdUWwvIb9naMSeZCgZMEkdSw54pcwWOwzxRms+w1CO/sobu3k8yCZA6MRg4NWZrgRQPK2AEUsSenAp3CxPRWLaeIbK9lEVteWs0hGdiSAnHrjNWH1WKKTy3eFZMZ2mQA4+lF0FjRopgfIB45opiIpO2K8/wDE/hSDXPEUtzJkOsaICCQcAZ/rXY65a3dzZAWV29tMrbg6DORg8GqWjWN2kbNfzmedjkuV259KzcWWmh+kWrWPh6G0di/kxsoJ9ATj9K0bTBQY9KwNYOv216Rpz2xtnA+WVCSD36V0FlG6wpuxuwM4o5WPmRPkeYw9hWZbptklDdfMb+ZqXVWu7ZVurSMTOgw8JON6+x7GsFvGdjG3+k6dfxS918oHP45GalxY0zoLt9tkozgkqKs2/KCuYs9WuPEF8hjspbayhywMow0jdOnbAzXVRLtQCqs0TdGfq93cWOh311aoj3EMMjxK4ypYAkA+2a5/wp4jvPEsd5Z6tYLbyogJ8osFdTx65H51oX3iPRopLiwvLvy3BKOpRv54xVeHX/DdgrzQXiMxGMKCWPsOKQzcsbdLJBbxcRxgKo9B2q3J99foazdIuZb23FzJGYzKdwU/wjsPyrRkIDr9DQI5HVfF2g6ZrVxZXlldG4jK7pVhBVsqCMHPoQK3tG1G11S0FzZBxESQA6bTx7VWutDiutQluXVTvI69eAB/StS1gS3QIowBV9BEs33o/Xd/Q1z+r6dpd7qWbu5hSfywBG7LuIycHBOfWugl6x/739DXE+LvC0Gu6us7llkWBUDKcHGWP9ago31/4lVnbWVrbSPEDt8wEbUHXJ5z9OK1UAmgKtghhgg1jaHbvaaDBayMXaFCmT6AnH6Vs25G0VSJZhW3h200/WkvIYVRmDKdq46j/wCtVDW/DNrquvrdzR7mWNF6kZAJ/wAa6y4wHiP+1/Q1Xxm6P0FNPUHsTxhhGo9qKnUfKKKokkK5FNEYHQU8UUhkbRBuopyoBT/SloAjZAwwapS6bDI2SgrQopptCtcpw2ccI+VQKtBcU6g9KGC0Mq+0e3vJC8kasT3IqlH4as45Q6wIGHfaM10I6GimpNC5UyvBAI1CgcCqOu6PFrFksEpYbW3qVbBBxj+ta/ekI4pDOAPglo2/dXt4n+7Liug0DS5dNikSW5uJyzZzM+4j6VtkD0pVptp9BK/cpatbT3OnvHbTvbzZBWReoxVDSLG7iiP265a4lz99lA49K3u1MUVNkVdnL6hDrcGpEWM8ItXI+V487fX610dvHtQZNPkAz0FPSnZCuzP1mae2svPt7c3EiMD5YYLn15qnpFzcaghnntDbNnaEZ9xx61tTgGM5FQW4AAwAKFFWuDlrYsjgUV8y/FjxT4h034mavaWOvapa20fk7IYLyREXMKE4UHA5JP40UBc//9k=" + } + ] + } + ] + }, + { + "language": "markdown", + "source": [ + "### Print Annotations in Plain Text Format" + ], + "outputs": [] + }, + { + "language": "typescript", + "source": [ + "printSortedAnnotations(boxesWithAnnotations);\n" + ], + "outputs": [ + { + "items": [ + { + "mime": "application/vnd.code.notebook.stdout", + "value": [ + "Sorted Annotations: [ 'robert', 'n', 'noyce', 'building', '2200', 'center' ]", + "" + ] + } + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/src/bindings/c/tests/ov_core_test.cpp b/src/bindings/c/tests/ov_core_test.cpp index 3e8ceebcaa0e49..40aab57b2c7e6b 100644 --- a/src/bindings/c/tests/ov_core_test.cpp +++ b/src/bindings/c/tests/ov_core_test.cpp @@ -126,12 +126,7 @@ TEST_P(ov_core_test, ov_core_compile_model) { ov_core_free(core); } -#ifdef OPENVINO_ARCH_ARM64 -// Ticket: 126283 -TEST_P(ov_core_test, DISABLED_ov_core_compile_model_with_property) { -#else TEST_P(ov_core_test, ov_core_compile_model_with_property) { -#endif auto device_name = GetParam(); ov_core_t* core = nullptr; OV_EXPECT_OK(ov_core_create(&core)); @@ -149,12 +144,7 @@ TEST_P(ov_core_test, ov_core_compile_model_with_property) { char* property_value = nullptr; OV_EXPECT_OK(ov_compiled_model_get_property(compiled_model, key, &property_value)); -#if defined(OPENVINO_ARCH_ARM) || defined(OPENVINO_ARCH_ARM64) - // TODO: fix once ARM plugin supports multi-stream - EXPECT_STREQ(property_value, "1"); -#else EXPECT_STREQ(property_value, "2"); -#endif ov_free(property_value); ov_compiled_model_free(compiled_model); diff --git a/src/bindings/python/constraints.txt b/src/bindings/python/constraints.txt index 6127d46c62a103..49ebd8d4f87716 100644 --- a/src/bindings/python/constraints.txt +++ b/src/bindings/python/constraints.txt @@ -2,7 +2,7 @@ numpy>=1.16.6,<2.1.0 # Python bindings, frontends # pytest -pytest>=5.0,<8.3 +pytest>=5.0,<8.4 pytest-dependency==0.6.0 pytest-html==4.1.1 pytest-timeout==2.2.0 @@ -10,7 +10,7 @@ pytest-timeout==2.2.0 # Python bindings py>=1.9.0 pygments>=2.8.1 -setuptools>=65.6.1 +setuptools>=65.6.1,<72 sympy>=1.10 wheel>=0.38.1 patchelf<=0.17.2.1 @@ -19,7 +19,7 @@ patchelf<=0.17.2.1 h5py>=3.1.0,<3.12.0 docopt~=0.6.2 paddlepaddle==2.6.0 -tensorflow>=1.15.5,<2.17.0 +tensorflow>=1.15.5,<2.18.0 six~=1.16.0 protobuf>=3.18.1,<4.0.0 onnx==1.15.0 diff --git a/src/bindings/python/src/openvino/frontend/pytorch/fx_decoder.py b/src/bindings/python/src/openvino/frontend/pytorch/fx_decoder.py index a1293f89a1ffc5..d9dae251aa64e7 100644 --- a/src/bindings/python/src/openvino/frontend/pytorch/fx_decoder.py +++ b/src/bindings/python/src/openvino/frontend/pytorch/fx_decoder.py @@ -16,6 +16,11 @@ logger.setLevel(logging.WARNING) +class InlinedInput: + def __init__(self, data) -> None: + self.data = data + + class TorchFXPythonDecoder (Decoder): def __init__(self, pt_module, fx_gm=None, nodes=None, mark_node_callback=None, input_shapes=[], input_types=[]): @@ -59,7 +64,7 @@ def __init__(self, pt_module, fx_gm=None, nodes=None, mark_node_callback=None, i for arg in uargs if arg[1] is not None] for idx, shape in enumerate(found_shapes): if shape is not None: - new_shape=[] + new_shape = [] for dim in range(0, len(shape)): if (type(shape[dim]).__name__ == "SymInt"): new_shape.append(-1) @@ -81,7 +86,7 @@ def __init__(self, pt_module, fx_gm=None, nodes=None, mark_node_callback=None, i # None in inputs mean the input is inlined or None (also considered inlined) self._inputs = [self._nodes.index( - arg) if arg in self._nodes else (arg,) for arg in pt_module.args] + arg) if arg in self._nodes else InlinedInput(arg) for arg in pt_module.args] # FIXME: Find a better way to pass nested tuples to OV frontend. This is a temporary solution to flatten arguments. new_inputs = [] @@ -92,22 +97,22 @@ def __init__(self, pt_module, fx_gm=None, nodes=None, mark_node_callback=None, i if arg in self._nodes: new_inputs.append(self._nodes.index(arg)) else: - new_inputs.append((arg,)) + new_inputs.append(InlinedInput(arg)) self.input_types.append(OVAny(DecoderType.List( TorchFXPythonDecoder.get_type_for_value(arg)))) else: v = self._inputs[i] new_inputs.append(v) self.input_types.append( - TorchFXPythonDecoder.get_type_for_value(v[0] if isinstance(v, tuple) else self._nodes[v])) + TorchFXPythonDecoder.get_type_for_value(v.data if isinstance(v, InlinedInput) else self._nodes[v])) self._inputs = new_inputs def inputs(self): # Consider 0 a special case which may mean the input is inlined, but not guaranteed - return [x if not isinstance(x, tuple) else 0 for x in self._inputs] + return [x if not isinstance(x, InlinedInput) else 0 for x in self._inputs] def is_input_inlined(self, index): - return isinstance(self._inputs[index], tuple) + return isinstance(self._inputs[index], InlinedInput) @staticmethod def unpack_containers(arg): @@ -142,19 +147,24 @@ def arg_to_constant(arg): return make_constant(OVType.i64, Shape([]), [arg]) elif isinstance(arg, float): return make_constant(OVType.f32, Shape([]), [arg]) + elif isinstance(arg, str): + u8_tensor = torch.frombuffer(str.encode(arg), dtype=torch.uint8) + return torch_tensor_to_ov_const(u8_tensor, shared_memory=True) return None def inlined_input(self, index): assert index < len(self._inputs), "Requested input doesn't exist" assert isinstance( - self._inputs[index], tuple), "Requested input which is not inlined" - assert self._inputs[index][0] is not None, "Requested None inlined input" + self._inputs[index], InlinedInput), "Requested input which is not inlined" + arg = self._inputs[index].data + assert arg is not None, f"Requested None inlined input for op {self.get_op_type()}" constant = None - arg = self._inputs[index][0] constant = self.arg_to_constant(arg) - assert constant is not None, f"Constant wasn't created for inlined input {index}" - return constant.outputs() + if constant is not None: + return constant.outputs() + else: + return [] def input(self, index): # TODO: remove return self.inputs()[index] # TODO: find specialized method @@ -257,9 +267,7 @@ def get_named_input(self, name): raise RuntimeError("This input is not a Node") def get_subgraph_size(self): - if issubclass(type(self.pt_module), torch.fx.Node): - return 0 - return len(self.get_subgraphs()) if hasattr(self.pt_module, 'blocks') else 1 + return len(self.get_subgraphs()) def decoder_type_name(self) -> str: return "fx" @@ -277,9 +285,7 @@ def visit_subgraph(self, node_visitor): node_visitor(decoder) def get_subgraphs(self): - if issubclass(type(self.pt_module), torch.fx.Node): - return [] - return list(self.pt_module.blocks()) + return [] def get_subgraph_decoder(self, index): decoder = TorchFXPythonDecoder(self.get_subgraphs()[index], @@ -309,7 +315,7 @@ def _raw_output(self, index): return self._raw_outputs()[index] def _raw_inputs(self): - return [self._nodes[x] if not isinstance(x, tuple) and x < len(self._nodes) else x[0] for x in self._inputs] + return [self._nodes[x] if not isinstance(x, InlinedInput) and x < len(self._nodes) else x.data for x in self._inputs] def _raw_input(self, index): return self._raw_inputs()[index] @@ -347,7 +353,7 @@ def as_string(self): return None def input_is_none(self, index): - if index >= len(self._inputs) or (isinstance(self._inputs[index], tuple) and self._inputs[index][0] is None): + if index >= len(self._inputs) or (isinstance(self._inputs[index], InlinedInput) and self._inputs[index].data is None): return True else: r_input = self._raw_input(index) diff --git a/src/bindings/python/wheel/setup.py b/src/bindings/python/wheel/setup.py index 610c4e744e32e3..095b9579f4b354 100644 --- a/src/bindings/python/wheel/setup.py +++ b/src/bindings/python/wheel/setup.py @@ -193,6 +193,13 @@ "install_dir": "runtime", "binary_dir": OPENVINO_BINARY_DIR, "source_dir": OPENVINO_SOURCE_DIR + }, + "tbb_dev": { + "name": "tbb_dev", + "prefix": f"{BUILD_BASE}/libs.tbb.dev", + "install_dir": "runtime/3rdparty/tbb", + "binary_dir": OPENVINO_BINARY_DIR, + "source_dir": OPENVINO_SOURCE_DIR } } @@ -266,7 +273,7 @@ def finalize_options(self): self.jobs = multiprocessing.cpu_count() if self.jobs is None else int(self.jobs) if self.cmake_args is None: - self.cmake_args = "" + self.cmake_args = os.getenv("CMAKE_ARGS", "") def cmake_build_and_install(self, install_cfg): """Runs cmake (configure, build and install) if artfiacts are not already built / installed.""" @@ -297,6 +304,7 @@ def cmake_build_and_install(self, install_cfg): f"-DPython3_EXECUTABLE={sys.executable}", f"-DCMAKE_BUILD_TYPE={CONFIG}", f"-DCPACK_GENERATOR={CPACK_GENERATOR}", + "-DENABLE_PYTHON=ON", "-DENABLE_WHEEL=OFF", self.cmake_args, "-S", source_dir, @@ -469,6 +477,8 @@ def copy_package_data(self, src_dirs): os.makedirs(package_dir, exist_ok=True) package_clibs_dir = os.path.join(PACKAGE_DIR, WHEEL_LIBS_INSTALL_DIR) os.makedirs(package_clibs_dir, exist_ok=True) + package_cmake_dir = os.path.join(package_dir, "cmake") + os.makedirs(package_cmake_dir, exist_ok=True) replacements = { # change the path where the libraries are installed (runtime/lib/intel64/Release -> openvino/libs) @@ -490,15 +500,20 @@ def copy_package_data(self, src_dirs): move(file_path, dst_file) self.announce(f"Move {file_path} to {dst_file}", level=3) + # collect all cmake files in one directory + for file_path in Path(src).rglob("*.cmake"): + file_name = os.path.basename(file_path) + if file_path.is_file(): + dst_file = os.path.join(package_cmake_dir, file_name) + self.announce(f"Move {file_path} to {dst_file}", level=3) + move(file_path, dst_file) + self.announce("Patch cmake configurations", level=3) + replace_strings_in_file(dst_file, replacements) + if os.path.isdir(src) and os.listdir(src): # copy the rest of the files to the package directly shutil.copytree(src, dst, dirs_exist_ok=True) - # patch cmake configurations - for file_path in Path(dst).rglob("*.cmake"): - if file_path.is_file(): - replace_strings_in_file(file_path, replacements) - def copy_file(src, dst, verbose=False, dry_run=False): """Custom file copy.""" diff --git a/src/common/low_precision_transformations/include/low_precision/broadcast.hpp b/src/common/low_precision_transformations/include/low_precision/broadcast.hpp new file mode 100644 index 00000000000000..39ba4052535c29 --- /dev/null +++ b/src/common/low_precision_transformations/include/low_precision/broadcast.hpp @@ -0,0 +1,30 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "transparent_base_transformation.hpp" + +namespace ov { +namespace pass { +namespace low_precision { + +/** + * @ingroup ov_transformation_common_api + * @brief BroadcastTransformation propagates dequantization operations through Broadcast operation. + * + * For more details about the transformation, refer to + * [BroadcastTransformation](@ref openvino_docs_OV_UG_lpt_BroadcastTransformation) page + * in the OpenVINO Developer Guide. + */ +class LP_TRANSFORMATIONS_API BroadcastTransformation : public TransparentBaseTransformation { +public: + OPENVINO_RTTI("BroadcastTransformation", "0"); + BroadcastTransformation(const Params& params = Params()); + bool canBeTransformed(const TransformationContext& context, std::shared_ptr layer) const override; +}; + +} // namespace low_precision +} // namespace pass +} // namespace ov diff --git a/src/common/low_precision_transformations/include/low_precision/recurrent_cell.hpp b/src/common/low_precision_transformations/include/low_precision/recurrent_cell.hpp index 8a305db307c612..22aaf3281c2b94 100644 --- a/src/common/low_precision_transformations/include/low_precision/recurrent_cell.hpp +++ b/src/common/low_precision_transformations/include/low_precision/recurrent_cell.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2022 Intel Corporation +// Copyright (C) 2022-2024 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // @@ -23,6 +23,9 @@ class LP_TRANSFORMATIONS_API RecurrentCellTransformation : public LayerTransform static std::shared_ptr wrap_fake_quantize(const std::shared_ptr parameter); static std::shared_ptr wrap_quantization(const std::shared_ptr parameter); static std::shared_ptr wrap_dequantization(const std::shared_ptr parameter, const bool with_subtract); + +private: + void propagate(TransformationContext& context, const std::shared_ptr node); }; } // namespace low_precision diff --git a/src/common/low_precision_transformations/src/broadcast.cpp b/src/common/low_precision_transformations/src/broadcast.cpp new file mode 100644 index 00000000000000..5e78ca0ef50996 --- /dev/null +++ b/src/common/low_precision_transformations/src/broadcast.cpp @@ -0,0 +1,77 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "low_precision/broadcast.hpp" + +#include + +#include "openvino/opsets/opset1.hpp" +#include "openvino/opsets/opset3.hpp" +#include "openvino/pass/pattern/op/or.hpp" +#include "openvino/pass/pattern/op/wrap_type.hpp" +#include "low_precision/network_helper.hpp" + +#include "itt.hpp" + +using namespace ov::pass::low_precision; + +BroadcastTransformation::BroadcastTransformation(const Params& params) : TransparentBaseTransformation(params) { + MATCHER_SCOPE(BroadcastTransformation); + auto broadcast1 = pattern::wrap_type({ + pattern::wrap_type(), + ov::pass::pattern::any_input(), + ov::pass::pattern::any_input() }); + + auto broadcast3 = pattern::wrap_type({ + pattern::wrap_type(), + ov::pass::pattern::any_input(), + ov::pass::pattern::any_input() }); + + const auto matcher = std::make_shared(ov::OutputVector{ broadcast1, broadcast3 }); + + ov::graph_rewrite_callback callback = [this](pattern::Matcher& m) { + auto op = m.get_match_root(); + if (transformation_callback(op)) { + return false; + } + return transform(*context, m); + }; + + auto m = std::make_shared(matcher, matcher_name); + this->register_matcher(m, callback); +} + +bool BroadcastTransformation::canBeTransformed(const TransformationContext& context, std::shared_ptr layer) const { + if (!LayerTransformation::canBeTransformed(context, layer)) { + return false; + } + + const auto& dequantization = NetworkHelper::getDequantization(layer, defaultPrecisions); + if (dequantization.empty()) { + return false; + } + + if (dequantization.isPerTensor()) { + return true; + } + + const auto& inputShape = layer->get_input_partial_shape(0); + if (inputShape.rank().is_dynamic() || inputShape[dequantization.channelDimIndex].is_dynamic()) { + return false; + } + + const auto targetShapeConstant = ov::as_type_ptr(layer->get_input_node_shared_ptr(1)); + const auto& targetShape = targetShapeConstant->cast_vector(); + if (targetShape[dequantization.channelDimIndex] != inputShape[dequantization.channelDimIndex].get_length()) { + return false; + } + + const auto axesMappingConstant = ov::as_type_ptr(layer->get_input_node_shared_ptr(2)); + const auto& axesMapping = axesMappingConstant->cast_vector(); + if (static_cast(axesMapping[dequantization.channelDimIndex]) != dequantization.channelDimIndex) { + return false; + } + + return true; +} diff --git a/src/common/low_precision_transformations/src/layer_transformation.cpp b/src/common/low_precision_transformations/src/layer_transformation.cpp index a4c0133c5813c3..4ec573c0f2a6ea 100644 --- a/src/common/low_precision_transformations/src/layer_transformation.cpp +++ b/src/common/low_precision_transformations/src/layer_transformation.cpp @@ -401,6 +401,7 @@ std::shared_ptr LayerTransformation::moveDequantizationAfter( const FakeQuantizeDequantization& dequantization, const bool updateOutputPrecision, const bool moveSubtract) const { + OPENVINO_ASSERT(!dequantization.empty()); const auto result = ov::pass::low_precision::NetworkHelper::moveDequantizationAfter(operation, dequantization, updateOutputPrecision, diff --git a/src/common/low_precision_transformations/src/low_precision.cpp b/src/common/low_precision_transformations/src/low_precision.cpp index bba12f7e389be8..6435f47d12ffec 100644 --- a/src/common/low_precision_transformations/src/low_precision.cpp +++ b/src/common/low_precision_transformations/src/low_precision.cpp @@ -44,6 +44,7 @@ #include "low_precision/assign_and_read_value.hpp" #include "low_precision/avg_pool.hpp" #include "low_precision/batch_to_space.hpp" +#include "low_precision/broadcast.hpp" #include "low_precision/clamp.hpp" #include "low_precision/convolution.hpp" #include "low_precision/convolution_backprop_data.hpp" @@ -240,6 +241,7 @@ bool ov::pass::low_precision::LowPrecision::run_on_model(const std::shared_ptr() }, // TODO: there are conditions { name() }, + { name() }, + { name() }, { name() }, { name() }, { name() }, @@ -192,6 +195,8 @@ bool ov::pass::low_precision::MarkupPrecisions::isSupported(const std::shared_pt { name() }, { name() }, { name() }, + { name() }, + { name() }, { name() }, { name() }, // ? diff --git a/src/common/low_precision_transformations/src/recurrent_cell.cpp b/src/common/low_precision_transformations/src/recurrent_cell.cpp index 7fd40cf2071a0f..cec96044502596 100644 --- a/src/common/low_precision_transformations/src/recurrent_cell.cpp +++ b/src/common/low_precision_transformations/src/recurrent_cell.cpp @@ -1,17 +1,19 @@ -// Copyright (C) 2022 Intel Corporation +// Copyright (C) 2022-2024 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #include "low_precision/recurrent_cell.hpp" -#include "openvino/pass/pattern/op/wrap_type.hpp" -#include "openvino/opsets/opset1.hpp" - #include + #include "openvino/core/node.hpp" #include "openvino/opsets/opset1.hpp" +#include "openvino/opsets/opset2.hpp" +#include "openvino/opsets/opset3.hpp" #include "openvino/opsets/opset5.hpp" +#include "openvino/opsets/opset12.hpp" #include "openvino/pass/pattern/op/or.hpp" +#include "openvino/pass/pattern/op/wrap_type.hpp" #include "low_precision/network_helper.hpp" #include "low_precision/rt_info/disable_cleanup_attribute.hpp" @@ -21,50 +23,14 @@ namespace pass { namespace low_precision { RecurrentCellTransformation::RecurrentCellTransformation(const Params& params) : LayerTransformation(params) { - const auto X = ov::pass::pattern::any_input(); - const auto H = ov::pass::pattern::any_input(); const auto C = ov::pass::pattern::any_input(); const auto S = ov::pass::pattern::any_input(); - const auto W = ov::pass::pattern::wrap_type(); - const auto R = ov::pass::pattern::wrap_type(); const auto B = ov::pass::pattern::wrap_type(); - const auto H_as_const = ov::pass::pattern::wrap_type(); - - const auto fq_X = wrap_fake_quantize(X); - const auto fq_H = wrap_fake_quantize(H); - const auto fq_W = wrap_fake_quantize(W); - const auto fq_R = wrap_fake_quantize(R); - - const auto dequantization_X = wrap_dequantization(ov::pass::pattern::any_input(), true); - const auto dequantization_H = wrap_dequantization(ov::pass::pattern::any_input(), true); - const auto dequantization_W = wrap_dequantization(ov::pass::pattern::any_input(), true); - const auto dequantization_R = wrap_dequantization(ov::pass::pattern::any_input(), true); - - const auto dequantization_without_subtract_X = wrap_dequantization(ov::pass::pattern::any_input(), false); - const auto dequantization_without_subtract_H = wrap_dequantization(ov::pass::pattern::any_input(), false); - const auto dequantization_without_subtract_W = wrap_dequantization(ov::pass::pattern::any_input(), false); - const auto dequantization_without_subtract_R = wrap_dequantization(ov::pass::pattern::any_input(), false); - - auto X_in = std::make_shared( - OutputVector{ - fq_X, dequantization_X, dequantization_without_subtract_X - }); - - auto H_in = std::make_shared( - OutputVector{ - H_as_const, fq_H, dequantization_H, dequantization_without_subtract_H - }); - - auto W_in = std::make_shared( - OutputVector{ - fq_W, dequantization_W, dequantization_without_subtract_W - }); - - auto R_in = std::make_shared( - OutputVector{ - fq_R, dequantization_R, dequantization_without_subtract_R - }); + auto X_in = ov::pass::pattern::any_input(); + auto H_in = ov::pass::pattern::any_input(); + auto W_in = ov::pass::pattern::any_input(); + auto R_in = ov::pass::pattern::any_input(); const auto lstm_seq = ov::pass::pattern::wrap_type( {X_in, H_in, C, S, W_in, R_in, B}); @@ -91,8 +57,134 @@ RecurrentCellTransformation::RecurrentCellTransformation(const Params& params) : this->register_matcher(m, callback); } +namespace { + +std::shared_ptr find_fake_quantize_upper(const std::shared_ptr& parent) { + if (auto fq = as_type_ptr(parent)) { + return fq; + } + + if (!NetworkHelper::isPrecisionPreserved(parent)) { + return nullptr; + } + + return find_fake_quantize_upper(parent->get_input_node_shared_ptr(0)); +} + +template +std::string name() { + return Operation::get_type_info_static().name; +} + +bool isSupportedForPerChannelQuantization(const std::shared_ptr& node) { + static const std::unordered_set supportedForPerChannelQuantization = { + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() }, + { name() } + }; + + return supportedForPerChannelQuantization.find(node->get_type_name()) != supportedForPerChannelQuantization.end(); +} + +std::vector> get_supported_precisions(std::shared_ptr lstm) { + // pair fields: + // 0 - input number, + // 1 - input type, `element::undefined` - any precision + if (is_type(lstm)) { + return std::vector>{ {0, element::u8}, { 1, element::u8 }, { 4, element::undefined }, { 5, element::undefined } }; + } else if (is_type(lstm)) { + return std::vector>{ {0, element::u8}, { 1, element::u8 }, { 3, element::undefined }, { 4, element::undefined } }; + } + + OPENVINO_THROW("unsupported operation type: ", lstm->get_type_name()); +} + +} // namespace + +void RecurrentCellTransformation::propagate(TransformationContext& context, const std::shared_ptr node) { + if (!isSupportedForPerChannelQuantization(node)) { + return; + } + + const auto& normalized_node = NetworkHelper::separateInStandaloneBranch(node, defaultPrecisions); + auto dequantization = NetworkHelper::getDequantization(node, defaultPrecisions); + if (dequantization.empty()) { + return; + } + const auto& new_node = moveDequantizationAfter(context, normalized_node, dequantization); + + const auto& new_dequantization = NetworkHelper::getDequantizationBelow(new_node); + if (new_dequantization.empty()) { + return; + } + + for (auto output : new_dequantization.multiply->outputs()) { + for (auto input : output.get_target_inputs()) { + auto child = input.get_node()->shared_from_this(); + propagate(context, child); + } + } +} + bool RecurrentCellTransformation::transform(TransformationContext& context, ov::pass::pattern::Matcher& m) { const auto lstm = m.get_match_root(); + const auto inputs = get_supported_precisions(lstm); + for (const auto& input : inputs) { + const auto& parent = lstm->get_input_node_shared_ptr(input.first); + if (!isSupportedForPerChannelQuantization(parent)) { + continue; + } + + const auto& fq = find_fake_quantize_upper(parent); + if (fq != nullptr) { + const auto& quantizationDetails = QuantizationDetails::getDetails(fq); + if ((quantizationDetails.inputLowValues.size() != 1) || (quantizationDetails.inputHighValues.size() != 1) || + (quantizationDetails.outputLowValues.size() != 1) || (quantizationDetails.outputHighValues.size() != 1)) { + continue; + } + + const auto& precisionsAttribute = getAttributeFromOutput(fq); + const auto& precisions = precisionsAttribute.empty() ? + defaultPrecisions : + precisionsAttribute.as().value(); + const auto& dataPrecision = getDataPrecision(fq, quantizationDetails, precisions); + if (dataPrecision.empty() || ((input.second != element::undefined) && (dataPrecision.precision != input.second))) { + return false; + } + + auto result = NetworkHelper::decomposeFakeQuantize( + fq, + dataPrecision.precision, + dataPrecision.min, + dataPrecision.max, + dataPrecision.hasZeroPoint, + updatePrecisions); + auto multiply = std::get<1>(result); + + for (const auto& output : multiply->outputs()) { + for (const auto& input : output.get_target_inputs()) { + const auto input_node = input.get_node(); + propagate(context, input_node->shared_from_this()); + } + } + } + } + if (!canBeTransformed(context, lstm)) { return false; } @@ -154,18 +246,21 @@ bool RecurrentCellTransformation::transform(TransformationContext& context, ov:: } bool RecurrentCellTransformation::canBeTransformed(const TransformationContext& context, std::shared_ptr lstm) const { - std::shared_ptr W, R; - - if (is_type(lstm)) { - W = lstm->get_input_node_shared_ptr(4); - R = lstm->get_input_node_shared_ptr(5); - } else if (is_type(lstm)) { - W = lstm->get_input_node_shared_ptr(3); - R = lstm->get_input_node_shared_ptr(4); - } else { - return false; - } + const auto inputs = get_supported_precisions(lstm); + for (const auto& index : inputs) { + const auto& input = lstm->get_input_node_ptr(index.first); + if (as_type(input) || as_type(input)) { + continue; + } + const auto dequantization = NetworkHelper::getDequantization(lstm, defaultPrecisions, index.first); + if (dequantization.empty()) { + continue; + } + if ((index.second != element::undefined) && (dequantization.data.get_element_type() != index.second)) { + return false; + } + } return true; } diff --git a/src/common/low_precision_transformations/tests/broadcast_transformation.cpp b/src/common/low_precision_transformations/tests/broadcast_transformation.cpp new file mode 100644 index 00000000000000..7745f38143d440 --- /dev/null +++ b/src/common/low_precision_transformations/tests/broadcast_transformation.cpp @@ -0,0 +1,197 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "layer_transformation.hpp" + +#include +#include + +#include + +#include "common_test_utils/ov_test_utils.hpp" +#include "low_precision/broadcast.hpp" +#include "ov_lpt_models/broadcast.hpp" +#include "simple_low_precision_transformer.hpp" + +namespace { +using namespace ov::pass; +using namespace ov::builder::subgraph; +using namespace ov::opset1; +using namespace ov; + +class BroadcastTransformationTestValues { +public: + class Pattern { + public: + ov::element::Type precisionBeforeDequantization; + ov::builder::subgraph::DequantizationOperations dequantizationBefore; + ov::builder::subgraph::DequantizationOperations dequantizationAfter; + }; + + TestTransformationParams params; + Shape tagetShape; + Shape axesMapping; + Pattern actual; + Pattern expected; +}; + +typedef std::tuple< + ov::PartialShape, + bool, + BroadcastTransformationTestValues> BroadcastTransformationParams; + +class BroadcastTransformation : public LayerTransformation, public testing::WithParamInterface { +public: + void SetUp() override { + const ov::PartialShape inputShape = std::get<0>(GetParam()); + const bool v1 = std::get<1>(GetParam()); + const BroadcastTransformationTestValues testValues = std::get<2>(GetParam()); + + // batch update support + auto tagetShape = testValues.tagetShape; + tagetShape[0] = inputShape[0].get_length(); + + actualFunction = BroadcastFunction::get( + v1, + inputShape, + testValues.actual.precisionBeforeDequantization, + testValues.actual.dequantizationBefore, + tagetShape, + testValues.axesMapping, + testValues.actual.dequantizationAfter); + + SimpleLowPrecisionTransformer transform; + transform.add(testValues.params); + transform.transform(actualFunction); + + referenceFunction = BroadcastFunction::get( + v1, + inputShape, + testValues.expected.precisionBeforeDequantization, + testValues.expected.dequantizationBefore, + tagetShape, + testValues.axesMapping, + testValues.expected.dequantizationAfter); + } + + static std::string getTestCaseName(testing::TestParamInfo obj) { + const ov::PartialShape inputShape = std::get<0>(obj.param); + const bool v1 = std::get<1>(obj.param); + const BroadcastTransformationTestValues testValues = std::get<2>(obj.param); + + std::ostringstream result; + result << + v1 << "_" << + inputShape << "_" << + testValues.tagetShape << "_" << + testValues.axesMapping << "_" << + testValues.actual.precisionBeforeDequantization << "_" << + testValues.actual.dequantizationBefore << "_" << + testValues.actual.dequantizationAfter << "_" << + testValues.expected.precisionBeforeDequantization << "_" << + testValues.expected.dequantizationBefore << "_" << + testValues.expected.dequantizationAfter; + return result.str(); + } +}; + +TEST_P(BroadcastTransformation, CompareFunctions) { + actualFunction->validate_nodes_and_infer_types(); + + auto res = compare_functions(actualFunction, referenceFunction, true); + ASSERT_TRUE(res.first) << res.second; + + ASSERT_TRUE(LayerTransformation::allNamesAreUnique(actualFunction)) << "Not all names are unique"; +} + +namespace hw_broadcast { +const std::vector inputShapes = { + { 1, 3, 1, 1 }, + { 4, 3, 1, 1 }, +}; + +const std::vector testValues = { + { + LayerTransformation::createParamsU8I8(), + { 1, 3, 9, 9}, + { 0, 1, 2, 3 }, + { + ov::element::u8, + {{ov::element::f32}, {0.1f}, {0.2f}}, + {{}, {}, {}}, + }, + { + ov::element::u8, + {{}, {}, {}}, + {{ov::element::f32}, {0.1f}, {0.2f}} + } + }, + { + LayerTransformation::createParamsU8I8(), + { 1, 3, 9, 9 }, + { 0, 1, 2, 3 }, + { + ov::element::u8, + { + {ov::element::f32}, + {{0.1f, 0.2f, 0.3f}}, + {{0.4f, 0.5f, 0.6f}} + } + }, + { + ov::element::u8, + { {}, {}, {}}, + { + {ov::element::f32}, + {{0.1f, 0.2f, 0.3f}}, + {{0.4f, 0.5f, 0.6f}} + } + } + } +}; + +INSTANTIATE_TEST_SUITE_P( + smoke_LPT, + BroadcastTransformation, + ::testing::Combine( + ::testing::ValuesIn(inputShapes), + ::testing::ValuesIn({ true, false }), + ::testing::ValuesIn(testValues)), + BroadcastTransformation::getTestCaseName); +} // hw_broadcast + +namespace chw_broadcast { +const std::vector inputShapes = { + { 1, 1, 1, 1 } +}; + +const std::vector testValues = { + { + LayerTransformation::createParamsU8I8(), + { 1, 9, 9, 9}, + { 0, 1, 2, 3 }, + { + ov::element::u8, + {{ov::element::f32}, {0.1f}, {0.2f}}, + {{}, {}, {}}, + }, + { + ov::element::u8, + {{}, {}, {}}, + {{ov::element::f32}, {0.1f}, {0.2f}} + } + } +}; + +INSTANTIATE_TEST_SUITE_P( + smoke_LPT, + BroadcastTransformation, + ::testing::Combine( + ::testing::ValuesIn(inputShapes), + ::testing::ValuesIn({ true, false }), + ::testing::ValuesIn(testValues)), + BroadcastTransformation::getTestCaseName); +} // chw_broadcast + +} // namespace diff --git a/src/common/snippets/include/snippets/kernel_executor_table.hpp b/src/common/snippets/include/snippets/kernel_executor_table.hpp index 46f9cd04b923ba..af797e4c80422a 100644 --- a/src/common/snippets/include/snippets/kernel_executor_table.hpp +++ b/src/common/snippets/include/snippets/kernel_executor_table.hpp @@ -43,7 +43,7 @@ class KernelExecutorBase { * @brief Update current kernel config in accordance with the passed expression. Corresponding kernel is recompiled if necessary. * This method should be called to update KernelExecutor based on runtime info (e.g. shapes) available through expression ptr */ - virtual void update_by_expression(const lowered::ExpressionPtr& expr) = 0; + virtual void update_by_expression(const lowered::ExpressionPtr& expr, const lowered::LinearIRPtr& linear_ir) = 0; /** * @brief Replace current kernel config with the provided value. Corresponding kernel is recompiled if necessary. * This method should be called to restore a saved state of the executor, that was configured using update_by_expression(). @@ -70,8 +70,8 @@ class KernelExecutor : public KernelExecutorBase { explicit KernelExecutor(Conf c) : KernelExecutorBase(), m_config{std::move(c)} {} // Note: override when final is redundant, but needed to avoid warnings on some compilers - void update_by_expression(const lowered::ExpressionPtr& expr) override final { // NOLINT - update_config(expr, m_config); + void update_by_expression(const lowered::ExpressionPtr& expr, const lowered::LinearIRPtr& linear_ir) override final { // NOLINT + update_config(expr, linear_ir, m_config); OPENVINO_ASSERT(m_config.is_completed(), "Failed to update kernel config in update_by_expression"); update_kernel(m_config, m_kernel); OPENVINO_ASSERT(m_kernel, "Failed to compile kernel executor"); @@ -103,7 +103,7 @@ class KernelExecutor : public KernelExecutorBase { protected: /*** Updates stored kernel config based on runtime info from expression (e.g. new input shapes). */ - virtual void update_config(const lowered::ExpressionPtr& expr, Conf& config) const = 0; + virtual void update_config(const lowered::ExpressionPtr& expr, const lowered::LinearIRPtr& linear_ir, Conf& config) const = 0; /*** Updates stored kernel in accordance with the passed config. Recompilation of the kernel is * performed if necessary. */ virtual void update_kernel(const Conf& c, std::shared_ptr& kernel) const = 0; @@ -130,9 +130,9 @@ class KernelExecutorTable { return m_table.at(expr); } /*** Updates every registered KernelExecutor in accordance with the corresponding expression */ - void update_state() const { + void update_state(const lowered::LinearIRPtr& linear_ir) const { for (const auto& record : m_table) - record.second->update_by_expression(record.first); + record.second->update_by_expression(record.first, linear_ir); } /*** Returns lambda function that contains current state of the table, and restores this state when called */ diff --git a/src/common/snippets/include/snippets/lowered/loop_info.hpp b/src/common/snippets/include/snippets/lowered/loop_info.hpp index e763f2244d76c6..6be47f49d17ae1 100644 --- a/src/common/snippets/include/snippets/lowered/loop_info.hpp +++ b/src/common/snippets/include/snippets/lowered/loop_info.hpp @@ -430,7 +430,8 @@ class ExpandedLoopInfo : public LoopInfo { ExpandedLoopInfo(size_t work_amount, size_t increment, const std::vector& entries, const std::vector& exits, std::vector ptr_increments, std::vector final_offsets, std::vector data_sizes, - SpecificLoopIterType type, std::shared_ptr unified_loop_info, bool is_wa_const = false); + SpecificLoopIterType type, std::shared_ptr unified_loop_info, bool is_wa_const = false, + bool evaluate_once = false); /** * @brief Clone LoopInfo with new expressions * @param expr_map map of new and old expressions @@ -474,7 +475,18 @@ class ExpandedLoopInfo : public LoopInfo { * @return const ref of `m_data_sizes` */ const std::vector& get_data_sizes() const; + /** + * @brief Returns True if the current Loop should be executed once + * Otherwise, returns False + * @return `m_evaluance_once` + */ + bool is_evaluate_once() const; + /** + * @brief Set value to `m_evaluance_once` + * @param value - new value of `m_evaluance_once` + */ + void set_evaluate_once(bool value); /** * @brief Update `m_ptr_increments` using copy values from `new_values`. * The count of new values must be equal to the count of current increments. @@ -517,6 +529,8 @@ class ExpandedLoopInfo : public LoopInfo { const SpecificLoopIterType m_type = {}; std::shared_ptr m_unified_loop_info = {}; + + bool m_evaluate_once = false; }; using ExpandedLoopInfoPtr = std::shared_ptr; diff --git a/src/common/snippets/include/snippets/lowered/port_descriptor.hpp b/src/common/snippets/include/snippets/lowered/port_descriptor.hpp index 3fc429bec4df1e..2d5c72c06ef983 100644 --- a/src/common/snippets/include/snippets/lowered/port_descriptor.hpp +++ b/src/common/snippets/include/snippets/lowered/port_descriptor.hpp @@ -20,12 +20,6 @@ using PortDescriptorPtr = std::shared_ptr; class PortDescriptor { friend class LinearIRBuilder; public: - // The structure with service values for scheduling parameters - struct ServiceDimensions { - // The value for the subtensor that means that scheduling should be by full dimension - static size_t FULL_DIM; - }; - explicit PortDescriptor(const ov::Input& node, VectorDims subtensor_shape = {}, std::vector layout = {}); @@ -54,6 +48,9 @@ class PortDescriptor { void set_reg_type(RegType type) { m_reg.type = type; } void set_reg_idx(size_t idx) { m_reg.idx = idx; } + // Indexing starts from the end (rbegin() + idx) + void set_subtensor_dim(size_t idx, VectorDims::value_type value); + std::string serialize() const; bool empty() const { return m_layout.empty() && m_subtensor_shape.empty();} PortDescriptorPtr clone() const; @@ -87,6 +84,8 @@ class PortDescriptorUtils { public: static void set_port_descriptor_ptr(const ov::Input& n, const PortDescriptorPtr& desc); static void set_port_descriptor_ptr(const ov::Output& n, const PortDescriptorPtr& desc); + static void set_port_descriptor(const ov::Input& n, std::vector subtensor, std::vector layout = {}); + static void set_port_descriptor(const ov::Output& n, std::vector subtensor, std::vector layout = {}); static PortDescriptorPtr get_port_descriptor_ptr(const ov::Input& in); static PortDescriptorPtr get_port_descriptor_ptr(const ov::Input& out); @@ -116,17 +115,6 @@ class PortDescriptorVectorAttribute : public ov::RuntimeAttribute { std::vector outputs{}; }; -template -void set_port_desc(const T& port, std::vector subtensor) { - const auto& shape = port.get_shape(); - for (size_t i = 1; i <= std::min(subtensor.size(), shape.size()); i++) { - auto& dim = subtensor[subtensor.size() - i]; - if (dim != PortDescriptor::ServiceDimensions::FULL_DIM) - dim = std::min(dim, shape[shape.size() - i]); - } - PortDescriptorUtils::set_port_descriptor_ptr(port, std::make_shared(shape, subtensor)); -} - } // namespace lowered } // namespace snippets } // namespace ov diff --git a/src/common/snippets/include/snippets/runtime_configurator.hpp b/src/common/snippets/include/snippets/runtime_configurator.hpp index 059771d961df82..058eca59716d1b 100644 --- a/src/common/snippets/include/snippets/runtime_configurator.hpp +++ b/src/common/snippets/include/snippets/runtime_configurator.hpp @@ -61,7 +61,7 @@ class RuntimeConfigurator { * @param linear_ir LinearIR * @return updated config */ - const std::shared_ptr& get_updated_config(const std::shared_ptr& linear_ir); + const std::shared_ptr& get_updated_config(const lowered::LinearIRPtr& linear_ir); /*** Returns pointer to KernelExecutorTable owned by the config */ const std::shared_ptr& get_kernel_executor_table() const { return m_config->kernel_executor_table; } @@ -70,19 +70,19 @@ class RuntimeConfigurator { * @brief Update RuntimeConfig based on LinearIR * @param linear_ir LinearIR */ - virtual void update(const std::shared_ptr& linear_ir); + virtual void update(const lowered::LinearIRPtr& linear_ir); /** * @brief Allocate and intialize fields in RuntimeConfig and RuntimeConfigurator * @param linear_ir LinearIR */ - virtual void initialization(const std::shared_ptr& linear_ir); + virtual void initialization(const lowered::LinearIRPtr& linear_ir); /** * @brief Initializes input and data information of LinearIR: * descriptors (that contains shapes and layouts) and data_sizes * @param linear_ir LinearIR */ - void init_data_info(const std::shared_ptr& linear_ir); + void init_data_info(const lowered::LinearIRPtr& linear_ir); /** * @brief Initializes information of buffers: * - static buffer_scratchpad_size @@ -90,23 +90,23 @@ class RuntimeConfigurator { * - clusters with dynamic buffers (`m_dynamic_buffer_clusters`) for the quick access in `update()` * @param linear_ir LinearIR */ - void init_buffer_info(const std::shared_ptr& linear_ir); + void init_buffer_info(const lowered::LinearIRPtr& linear_ir); /** * @brief Initializes tensor rank of config * @param linear_ir LinearIR */ - virtual void init_tensor_rank(const std::shared_ptr& linear_ir) const; + virtual void init_tensor_rank(const lowered::LinearIRPtr& linear_ir) const; /** * @brief Update Loop informations in LinearIR: Unified and ExpandedLoopInfo * @param linear_ir LinearIR */ - void update_loop_info(const std::shared_ptr& linear_ir) const; + void update_loop_info(const lowered::LinearIRPtr& linear_ir) const; /** * @brief Update Buffer scratchpad size and offsets if needed * Note: `update_loop_info` must be called before * @param linear_ir LinearIR */ - void update_buffer_scratchpad_size(const std::shared_ptr& linear_ir) const; + void update_buffer_scratchpad_size(const lowered::LinearIRPtr& linear_ir) const; /** * @brief Calculate data offsets of LinearIR and update these values in RuntimeConfig */ diff --git a/src/common/snippets/include/snippets/utils/utils.hpp b/src/common/snippets/include/snippets/utils/utils.hpp index 33eebcffedf68b..869956b5274c60 100644 --- a/src/common/snippets/include/snippets/utils/utils.hpp +++ b/src/common/snippets/include/snippets/utils/utils.hpp @@ -21,6 +21,26 @@ namespace ov { namespace snippets { namespace utils { +/* --- Special values --- */ +template::value || std::is_same::value), bool>::type> +constexpr inline T get_dynamic_value() { + return std::numeric_limits::max(); +} +template::value || std::is_same::value), bool>::type> +constexpr inline bool is_dynamic_value(T value) { + return value == get_dynamic_value(); +} + +// This value means full dimension +// For example, for the subtensor it means that scheduling should be by full dimension +constexpr inline size_t get_full_dim_value() { + return get_dynamic_value() - 1; +} +constexpr inline bool is_full_dim_value(size_t value) { + return value == get_full_dim_value(); +} +/* ---------------------- */ + // Get non-scalar Constant count that will be created after FakeQuantize decomposition. // This count is needed to know exact count of non-scalar Constants during tokenization. auto get_non_scalar_constant_count_for_fq(const std::shared_ptr& fq) -> size_t; @@ -59,16 +79,6 @@ inline T div_up(const T a, const U b) { return static_cast((a + b - 1) / b); } -template::value || std::is_same::value), bool>::type> -constexpr inline T get_dynamic_value() { - return std::numeric_limits::max(); -} - -template::value || std::is_same::value), bool>::type> -constexpr inline bool is_dynamic_value(T value) { - return value == get_dynamic_value(); -} - inline bool is_dynamic_vdims(const VectorDims& shape) { return std::any_of(shape.cbegin(), shape.cend(), [](size_t v){ return is_dynamic_value(v); }); } diff --git a/src/common/snippets/src/lowered/loop_info.cpp b/src/common/snippets/src/lowered/loop_info.cpp index 6f14a52e750feb..d99788fad12946 100644 --- a/src/common/snippets/src/lowered/loop_info.cpp +++ b/src/common/snippets/src/lowered/loop_info.cpp @@ -373,10 +373,10 @@ void UnifiedLoopInfo::add_loop_ports(const std::vector& ports) { ExpandedLoopInfo::ExpandedLoopInfo(size_t work_amount, size_t increment, const std::vector& entries, const std::vector& exits, std::vector ptr_increments, std::vector final_offsets, std::vector data_sizes, - SpecificLoopIterType type, std::shared_ptr unified_loop_info, bool is_wa_const) + SpecificLoopIterType type, std::shared_ptr unified_loop_info, bool is_wa_const, bool evaluate_once) : LoopInfo(work_amount, increment, entries, exits, is_wa_const), m_ptr_increments(std::move(ptr_increments)), m_finalization_offsets(std::move(final_offsets)), - m_data_sizes(std::move(data_sizes)), m_type(type), m_unified_loop_info(std::move(unified_loop_info)) { + m_data_sizes(std::move(data_sizes)), m_type(type), m_unified_loop_info(std::move(unified_loop_info)), m_evaluate_once(evaluate_once) { validate(); } @@ -392,7 +392,8 @@ std::shared_ptr ExpandedLoopInfo::clone_with_new_expr(const Expression const auto& new_output_ports = clone_loop_ports(expr_map, m_output_ports); return std::make_shared(m_work_amount, m_increment, new_input_ports, new_output_ports, - m_ptr_increments, m_finalization_offsets, m_data_sizes, m_type, m_unified_loop_info, m_is_work_amount_const); + m_ptr_increments, m_finalization_offsets, m_data_sizes, m_type, + m_unified_loop_info, m_is_work_amount_const, m_evaluate_once); } bool ExpandedLoopInfo::is_dynamic() const { @@ -435,6 +436,14 @@ const std::vector& ExpandedLoopInfo::get_data_sizes() const { return m_data_sizes; } +bool ExpandedLoopInfo::is_evaluate_once() const { + return m_evaluate_once; +} + +void ExpandedLoopInfo::set_evaluate_once(bool value) { + m_evaluate_once = value; +} + void ExpandedLoopInfo::update_ptr_increments(const std::vector& new_values) { OPENVINO_ASSERT(new_values.size() == m_ptr_increments.size(), "Failed to update ptr_increments: incompatible counts"); m_ptr_increments.assign(new_values.cbegin(), new_values.end()); diff --git a/src/common/snippets/src/lowered/loop_manager.cpp b/src/common/snippets/src/lowered/loop_manager.cpp index 3e07ec850927ab..09f8ccb94b9660 100644 --- a/src/common/snippets/src/lowered/loop_manager.cpp +++ b/src/common/snippets/src/lowered/loop_manager.cpp @@ -160,7 +160,6 @@ void LoopManager::get_io_loop_ports(LinearIR::constExprIt loop_begin_pos, void LoopManager::mark_loop(LinearIR::constExprIt loop_begin_pos, LinearIR::constExprIt loop_end_pos, size_t loop_depth, size_t vector_size) { - const auto FULL_DIM = PortDescriptor::ServiceDimensions::FULL_DIM; std::vector loop_input_ports, loop_output_ports; LoopManager::get_io_loop_ports(loop_begin_pos, loop_end_pos, loop_input_ports, loop_output_ports); @@ -178,8 +177,8 @@ void LoopManager::mark_loop(LinearIR::constExprIt loop_begin_pos, "Failed to broadcast work amount in marking loop"); }; - auto is_outside_loop = [&FULL_DIM](const std::vector& subtensor) { - return std::all_of(subtensor.begin(), subtensor.end(), [&FULL_DIM](size_t lhs) { return lhs == FULL_DIM; }); + auto is_outside_loop = [](const std::vector& subtensor) { + return std::all_of(subtensor.begin(), subtensor.end(), utils::is_full_dim_value); }; std::vector loop_subtensor; @@ -192,7 +191,7 @@ void LoopManager::mark_loop(LinearIR::constExprIt loop_begin_pos, subtensor[subtensor.size() - 1] = vector_size; } - const size_t resizing_value = is_outside_loop(subtensor) ? FULL_DIM : 1; + const size_t resizing_value = is_outside_loop(subtensor) ? utils::get_full_dim_value() : 1; while (subtensor.size() < loop_depth) subtensor.insert(subtensor.begin(), resizing_value); if (loop_subtensor.empty()) @@ -202,7 +201,7 @@ void LoopManager::mark_loop(LinearIR::constExprIt loop_begin_pos, "Incorrect scheduling parameters for loop"); for (size_t dim_idx = 0; dim_idx < loop_depth; ++dim_idx) { - if (*(subtensor.rbegin() + dim_idx) != FULL_DIM) { + if (!utils::is_full_dim_value(*(subtensor.rbegin() + dim_idx))) { broadcast(loop_tensor, shape, dim_idx); } } @@ -211,7 +210,7 @@ void LoopManager::mark_loop(LinearIR::constExprIt loop_begin_pos, for (size_t dim_idx = 0; dim_idx < loop_depth; ++dim_idx) { OPENVINO_ASSERT(dim_idx < loop_subtensor.size(), "Incorrect indexes of Loop for markup"); const auto& subtensor_value = *(loop_subtensor.rbegin() + dim_idx); - if (subtensor_value == FULL_DIM) { + if (utils::is_full_dim_value(subtensor_value)) { continue; } diff --git a/src/common/snippets/src/lowered/pass/compute_buffer_allocation_size.cpp b/src/common/snippets/src/lowered/pass/compute_buffer_allocation_size.cpp index e4664800995db1..028cdde1088e60 100644 --- a/src/common/snippets/src/lowered/pass/compute_buffer_allocation_size.cpp +++ b/src/common/snippets/src/lowered/pass/compute_buffer_allocation_size.cpp @@ -60,7 +60,7 @@ size_t ComputeBufferAllocationSize::get_allocation_size(const LoopManagerPtr& lo const auto processing_rank = !processed_dim_idxs.empty() ? std::max(*processed_dim_idxs.rbegin(), subtensor.size()) : subtensor.size(); for (size_t i = 0; i < std::min(processing_rank, rank); ++i) { if (processed_dim_idxs.count(i) == 0) { - if (i < subtensor.size()) + if (i < subtensor.size() && !utils::is_full_dim_value(*(subtensor.rbegin() + i))) allocation_size = utils::dynamic_safe_mul(allocation_size, std::min(*(planar_shape.rbegin() + i), *(subtensor.rbegin() + i))); else allocation_size = utils::dynamic_safe_mul(allocation_size, *(planar_shape.rbegin() + i)); diff --git a/src/common/snippets/src/lowered/pass/insert_specific_iterations.cpp b/src/common/snippets/src/lowered/pass/insert_specific_iterations.cpp index 2ef872ba4ad262..dcff90015d28f2 100644 --- a/src/common/snippets/src/lowered/pass/insert_specific_iterations.cpp +++ b/src/common/snippets/src/lowered/pass/insert_specific_iterations.cpp @@ -167,6 +167,7 @@ bool InsertSpecificIterations::decompose(LinearIR& linear_ir, LinearIR::constExp if (is_decomposed_loop_needed(unified_loop_info, iter_type, remaining_work_amount)) { const auto work_amount = get_decomposed_loop_work_amount(unified_loop_info, iter_type, remaining_work_amount); const auto increment = get_decomposed_loop_increment(unified_loop_info, iter_type, remaining_work_amount); + const auto evaluate_once = !utils::is_dynamic_value(work_amount) && work_amount == increment; // Update remaining Loop work amount // Note: if work_amount is unknown and increment = 1, it means that a loop will iterate by whole work_amount if (!is_wa_dynamic || increment == 1) { @@ -199,7 +200,7 @@ bool InsertSpecificIterations::decompose(LinearIR& linear_ir, LinearIR::constExp const auto decomposed_loop_info = std::make_shared(work_amount, increment, decomposed_loop_entry_ports, decomposed_loop_exit_ports, decomposed_ptr_increments, decomposed_finalization_offsets, - decomposed_data_sizes, iter_type, unified_loop_info); + decomposed_data_sizes, iter_type, unified_loop_info, false, evaluate_once); init_decomposed_loop(linear_ir, decomposed_loop_begin_it, decomposed_loop_end_it, decomposed_loop_info, loop_id, decomposed_loop_end); decomposed = true; diff --git a/src/common/snippets/src/lowered/pass/optimize_loop_single_evaluation.cpp b/src/common/snippets/src/lowered/pass/optimize_loop_single_evaluation.cpp index c19bf7d65a2fef..c6255d90106e77 100644 --- a/src/common/snippets/src/lowered/pass/optimize_loop_single_evaluation.cpp +++ b/src/common/snippets/src/lowered/pass/optimize_loop_single_evaluation.cpp @@ -4,6 +4,7 @@ #include "snippets/lowered/pass/optimize_loop_single_evaluation.hpp" +#include "snippets/lowered/loop_manager.hpp" #include "snippets/lowered/linear_ir.hpp" #include "snippets/op/loop.hpp" #include "snippets/utils/utils.hpp" @@ -16,30 +17,31 @@ namespace pass { bool OptimizeLoopSingleEvaluation::run(lowered::LinearIR& linear_ir, lowered::LinearIR::constExprIt begin, lowered::LinearIR::constExprIt end) { OV_ITT_SCOPED_TASK(ov::pass::itt::domains::SnippetsTransform, "Snippets::OptimizeLoopSingleEvaluation") + const auto& loop_manager = linear_ir.get_loop_manager(); + bool is_modified = false; for (auto expr_it = begin; expr_it != end; ++expr_it) { const auto& expr = *expr_it; if (auto loop_end = ov::as_type_ptr(expr->get_node())) { - // *1* solo vector/tail loop + empty outer loop - // => skip increments (both counter & ptr) : set evaluate_once flag - // *2* solo vector/tail loop + non-empty outer loop - // => skip counter increments but perform ptr increments : set evaluate_once, - // and perform pointer increments through finalization offsets - // *3* vector loop(s) + one tail loop - // => vector as usual, tail depends on outer loop, see *1* and *2* - if (loop_end->has_dynamic_params() || loop_end->get_work_amount() >= 2 * loop_end->get_increment()) - continue; - - auto new_finalization_offsets = loop_end->get_finalization_offsets(); - const auto& ptr_increments = loop_end->get_ptr_increments(); - const auto work_amount_incr = static_cast(loop_end->get_increment()); - for (size_t i = 0; i < new_finalization_offsets.size(); i++) { - new_finalization_offsets[i] += ptr_increments[i] * work_amount_incr; + const auto& loop_info = loop_manager->get_loop_info(loop_end->get_id()); + if (loop_info->is_evaluate_once()) { + auto new_finalization_offsets = loop_end->get_finalization_offsets(); + const auto& ptr_increments = loop_end->get_ptr_increments(); + const auto work_amount_incr = static_cast(loop_end->get_increment()); + for (size_t i = 0; i < new_finalization_offsets.size(); i++) { + const auto ptr_shift = utils::dynamic_safe_mul(ptr_increments[i], work_amount_incr); + new_finalization_offsets[i] = utils::dynamic_safe_add(new_finalization_offsets[i], ptr_shift); + } + loop_end->set_finalization_offsets(new_finalization_offsets); + loop_end->set_ptr_increments(std::vector(new_finalization_offsets.size(), 0)); + loop_end->set_evaluate_once(true); + + // Update the corresponding ExpandedLoopInfo + loop_info->update_ptr_increments(loop_end->get_ptr_increments()); + loop_info->update_finalization_offsets(loop_end->get_finalization_offsets()); + + is_modified = true; } - loop_end->set_finalization_offsets(new_finalization_offsets); - loop_end->set_ptr_increments(std::vector(new_finalization_offsets.size(), 0)); - loop_end->set_evaluate_once(true); - is_modified = true; } } return is_modified; diff --git a/src/common/snippets/src/lowered/pass/propagate_subtensors.cpp b/src/common/snippets/src/lowered/pass/propagate_subtensors.cpp index b58de6790c23a4..c89274a728c4c9 100644 --- a/src/common/snippets/src/lowered/pass/propagate_subtensors.cpp +++ b/src/common/snippets/src/lowered/pass/propagate_subtensors.cpp @@ -15,14 +15,43 @@ namespace snippets { namespace lowered { namespace pass { namespace { + +// The algorithm uses the following special values in subtensors/shapes: +// 1. Dynamic value in subtensor/shape : SIZE_MAX +// 2. Full dimension in subtensor : SIZE_MAX - 1 +// 3. Default value of `new_dim_value` : SIZE_MAX - 2 +// 4. `Forced` special dynamic value : SIZE_MAX - 3 +// +// We have to introduce `FORCED_DYNAMIC_VALUE` to distinguish `new_dim_value = DYNAMIC` +// from the real dynamic values in subtensors and shapes and force this value in subtensors. +// For example, there is Brgemm with the following info in the tail Loop: +// Input 0: shape [?, ?], existing subtensor [32, FULL_DIM] +// Input 1: shape [?, ?], existing subtensor [FULL_DIM, FULL_DIM] +// Output : shape [?, ?], existing subtensor [32, FULL_DIM] +// If the user wants to force `?` in the place of `32` in subtensors, the steps will be: +// 1. Set `?` to subtensor and shape of Input 0 : +// shape [?, ?] (shape has not been changed!), new subtensor [?, FULL_DIM] +// 2. Make shape inference of Brgemm and get Output: +// shape [?, ?] (shape has not been changed!), existing subtensor [FULL_DIM, FULL_DIM] +// 3. Update subtensor on output using shape: +// new_subtensor[i] = std::min(planar_shape[i], subtensor[i]); // i = 0: std::min(SIZE_MAX(?), 32) +// new subtensor [32, FULL_DIM] - has not been changed! But should be [?, FULL_DIM] +// Conclusion: we have to distinguish forced dynamic value with existing dynamic values in shape and subtensor + +constexpr size_t NEW_DEFAULT_VALUE = SIZE_MAX - 2; +constexpr size_t FORCED_DYNAMIC_VALUE = SIZE_MAX - 3; + void propagate_updated_subtensor_through_loop(const LinearIR& linear_ir, const LoopInfoPtr& loop_info, LinearIR::container::const_iterator begin, LinearIR::container::const_iterator end, bool most_outer_loop, - const size_t new_dim_value = SIZE_MAX) { - OPENVINO_ASSERT(snippets::utils::implication(most_outer_loop, new_dim_value != SIZE_MAX), + size_t new_dim_value = NEW_DEFAULT_VALUE) { + // Marks the forced dynamic value + new_dim_value = utils::is_dynamic_value(new_dim_value) ? FORCED_DYNAMIC_VALUE : new_dim_value; + OPENVINO_ASSERT(snippets::utils::implication(most_outer_loop, new_dim_value != NEW_DEFAULT_VALUE), "if the updated subtensor propagation was called for the outer loop, new_dim_value must not be equal to default value"); + std::map original_shapes; // First step: set new dim value to the corresponding input_ports' dimensions if (most_outer_loop) { @@ -32,9 +61,8 @@ void propagate_updated_subtensor_through_loop(const LinearIR& linear_ir, const auto& expr = port.expr_port->get_expr(); const auto& desc = port.expr_port->get_descriptor_ptr(); auto subtensor = desc->get_subtensor(); - if (port.dim_idx < subtensor.size()) { - *(subtensor.rbegin() + port.dim_idx) = new_dim_value; - desc->set_subtensor(subtensor); + if (port.dim_idx < desc->get_subtensor().size()) { + desc->set_subtensor_dim(port.dim_idx, new_dim_value); } const auto parent_desc = expr->get_input_port_connector(port.expr_port->get_index())->get_source().get_descriptor_ptr(); @@ -78,7 +106,9 @@ void propagate_updated_subtensor_through_loop(const LinearIR& linear_ir, const size_t subtensor_start = planar_dims.size() - subtensor.size(); VectorDims new_subtensor(planar_dims.begin() + subtensor_start, planar_dims.end()); for (size_t i = 0; i < new_subtensor.size(); ++i) { - new_subtensor[i] = std::min(new_subtensor[i], subtensor[i]); + // If user forces dynamic value to set in subtensor, set real dynamic dimension using `get_dynamic_value()` + new_subtensor[i] = new_subtensor[i] == FORCED_DYNAMIC_VALUE ? utils::get_dynamic_value() : + utils::is_full_dim_value(subtensor[i]) ? subtensor[i] : std::min(new_subtensor[i], subtensor[i]); } desc->set_subtensor(new_subtensor); } diff --git a/src/common/snippets/src/lowered/port_descriptor.cpp b/src/common/snippets/src/lowered/port_descriptor.cpp index 3280be29973b69..e5fd3638e831c8 100644 --- a/src/common/snippets/src/lowered/port_descriptor.cpp +++ b/src/common/snippets/src/lowered/port_descriptor.cpp @@ -9,8 +9,6 @@ namespace ov { namespace snippets { namespace lowered { -size_t PortDescriptor::ServiceDimensions::FULL_DIM = SIZE_MAX; - PortDescriptor::PortDescriptor(const ov::Input& in, VectorDims subtensor_shape, std::vector layout) : PortDescriptor(ov::Input(in.get_node(), in.get_index()), std::move(subtensor_shape), std::move(layout)) {} @@ -53,6 +51,11 @@ void PortDescriptor::set_shape(const VectorDims& tensor) { *m_tensor_shape = tensor; } +void PortDescriptor::set_subtensor_dim(size_t idx, VectorDims::value_type value) { + OPENVINO_ASSERT(idx < m_subtensor_shape.size(), "Failed to set subtensor value: idx should be less than size"); + *(m_subtensor_shape.rbegin() + idx) = value; +} + PortDescriptorPtr PortDescriptor::clone() const { auto desc = std::make_shared(*m_tensor_shape, m_subtensor_shape, m_layout); desc->set_reg(m_reg); @@ -130,6 +133,26 @@ void PortDescriptorUtils::set_port_descriptor_ptr(const ov::Output& ou } } +namespace { +template +void set_port_desc(const T& port, std::vector subtensor, std::vector layout) { + const auto& shape = port.get_shape(); + for (size_t i = 1; i <= std::min(subtensor.size(), shape.size()); i++) { + auto& dim = subtensor[subtensor.size() - i]; + if (!utils::is_full_dim_value(dim)) + dim = std::min(dim, shape[shape.size() - i]); + } + PortDescriptorUtils::set_port_descriptor_ptr(port, std::make_shared(shape, subtensor, layout)); +} +} // namespace + +void PortDescriptorUtils::set_port_descriptor(const ov::Input& in, std::vector subtensor, std::vector layout) { + set_port_desc(in, subtensor, layout); +} +void PortDescriptorUtils::set_port_descriptor(const ov::Output& in, std::vector subtensor, std::vector layout) { + set_port_desc(in, subtensor, layout); +} + PortDescriptorPtr PortDescriptorUtils::get_port_descriptor_ptr(const ov::Input& in) { return get_port_descriptor_ptr(ov::Input(in.get_node(), in.get_index())); } diff --git a/src/common/snippets/src/op/reduce.cpp b/src/common/snippets/src/op/reduce.cpp index 5717bfe1255300..b0b69e0bd7e84c 100644 --- a/src/common/snippets/src/op/reduce.cpp +++ b/src/common/snippets/src/op/reduce.cpp @@ -5,6 +5,7 @@ #include "snippets/op/reduce.hpp" #include "snippets/itt.hpp" +#include "snippets/utils/utils.hpp" #include "snippets/lowered/port_descriptor.hpp" namespace ov { @@ -33,7 +34,7 @@ void ReduceBase::compute_and_set_reduce_subtensors(const std::shared_ptr subtensor(reduce_rank, 1); for (size_t i = axis; i < reduce_rank; ++i) - subtensor[i] = lowered::PortDescriptor::ServiceDimensions::FULL_DIM; + subtensor[i] = utils::get_full_dim_value(); lowered::PortDescriptorUtils::set_port_descriptor_ptr(reduce->input(0), std::make_shared(reduce->input(0), subtensor)); lowered::PortDescriptorUtils::set_port_descriptor_ptr(reduce->output(0), std::make_shared(reduce->output(0), subtensor)); } diff --git a/src/common/snippets/src/op/serialization_node.cpp b/src/common/snippets/src/op/serialization_node.cpp index cb17e8a57ddf24..9864a1a12f94a5 100644 --- a/src/common/snippets/src/op/serialization_node.cpp +++ b/src/common/snippets/src/op/serialization_node.cpp @@ -49,7 +49,8 @@ bool SerializationNode::visit_attributes(AttributeVisitor &visitor) { std::stringstream ss; for (size_t i = 0; i < subtensor.size(); ++i) { const auto& v = subtensor[i]; - const auto v_str = (v == lowered::PortDescriptor::ServiceDimensions::FULL_DIM) ? "FULL_DIM" : std::to_string(v); + const auto v_str = utils::is_full_dim_value(v) ? "FULL_DIM" : + utils::is_dynamic_value(v) ? "?" : std::to_string(v); const auto del = i < subtensor.size() - 1 ? ", " : ""; ss << v_str << del; } diff --git a/src/common/snippets/src/op/subgraph.cpp b/src/common/snippets/src/op/subgraph.cpp index a33d478ee3929d..4ede0b58a66cf0 100644 --- a/src/common/snippets/src/op/subgraph.cpp +++ b/src/common/snippets/src/op/subgraph.cpp @@ -552,7 +552,7 @@ snippets::Schedule Subgraph::generate(const void* compile_params) const { exec_table->replace_key_expression(expression_map.at(expr.get()), expr); // Some kernel executors might've been registered during code emission. // We need to update them, so appropriate kernels will be compiled. - exec_table->update_state(); + exec_table->update_state(m_linear_ir); return {std::move(lowering_result)}; } diff --git a/src/common/snippets/src/pass/matmul_to_brgemm.cpp b/src/common/snippets/src/pass/matmul_to_brgemm.cpp index 6eaf8424ff5a78..7268d4a7cc6a67 100644 --- a/src/common/snippets/src/pass/matmul_to_brgemm.cpp +++ b/src/common/snippets/src/pass/matmul_to_brgemm.cpp @@ -18,16 +18,12 @@ namespace snippets { namespace pass { void MatMulToBrgemm::init_ports(const std::shared_ptr& brgemm) const { - auto get_subtensor = []() { - return std::vector{ lowered::PortDescriptor::ServiceDimensions::FULL_DIM, lowered::PortDescriptor::ServiceDimensions::FULL_DIM }; - }; + const auto subtensor = std::vector(2, utils::get_full_dim_value()); for (const auto& input : brgemm->inputs()) { const auto& tensor = utils::pshape_to_vdims(input.get_partial_shape()); - const auto& subtensor = get_subtensor(); lowered::PortDescriptorUtils::set_port_descriptor_ptr(input, std::make_shared(tensor, subtensor)); } const auto& tensor = utils::pshape_to_vdims(brgemm->get_output_partial_shape(0)); - const auto& subtensor = get_subtensor(); lowered::PortDescriptorUtils::set_port_descriptor_ptr(brgemm->output(0), std::make_shared(tensor, subtensor)); } diff --git a/src/common/snippets/src/pass/softmax_decomposition.cpp b/src/common/snippets/src/pass/softmax_decomposition.cpp index 269d06c958dd39..34dc1c19c5d9d0 100644 --- a/src/common/snippets/src/pass/softmax_decomposition.cpp +++ b/src/common/snippets/src/pass/softmax_decomposition.cpp @@ -55,7 +55,7 @@ SoftmaxDecomposition::SoftmaxDecomposition() { OPENVINO_ASSERT(axis < rank, "Softmax has incorrect axis"); std::vector subtensor(rank, 1); for (size_t i = axis; i < rank; ++i) - subtensor[i] = PortDescriptor::ServiceDimensions::FULL_DIM; + subtensor[i] = utils::get_full_dim_value(); PortDescriptorUtils::set_port_descriptor_ptr(power->input(0), std::make_shared(power->input(0), subtensor)); PortDescriptorUtils::set_port_descriptor_ptr(power->output(0), std::make_shared(power->output(0), subtensor)); diff --git a/src/common/snippets/src/runtime_configurator.cpp b/src/common/snippets/src/runtime_configurator.cpp index c3db1864bf1135..6f8945649c2b94 100644 --- a/src/common/snippets/src/runtime_configurator.cpp +++ b/src/common/snippets/src/runtime_configurator.cpp @@ -35,7 +35,7 @@ RuntimeConfigurator::RuntimeConfigurator(std::shared_ptr c) : OPENVINO_ASSERT(m_config, "Runtime config is nullptr!"); } -const std::shared_ptr& RuntimeConfigurator::get_updated_config(const std::shared_ptr& linear_ir) { +const std::shared_ptr& RuntimeConfigurator::get_updated_config(const lowered::LinearIRPtr& linear_ir) { // First initialization if (m_io_num == 0) initialization(linear_ir); @@ -44,7 +44,7 @@ const std::shared_ptr& RuntimeConfigurator::get_updated_config(co return m_config; } -void RuntimeConfigurator::initialization(const std::shared_ptr& linear_ir) { +void RuntimeConfigurator::initialization(const lowered::LinearIRPtr& linear_ir) { init_data_info(linear_ir); init_tensor_rank(linear_ir); init_buffer_info(linear_ir); @@ -55,7 +55,7 @@ void RuntimeConfigurator::initialization(const std::shared_ptrtile_rank = linear_ir->get_config().m_loop_depth; } -void RuntimeConfigurator::update(const std::shared_ptr& linear_ir) { +void RuntimeConfigurator::update(const lowered::LinearIRPtr& linear_ir) { if (linear_ir->is_dynamic()) { update_loop_info(linear_ir); update_buffer_scratchpad_size(linear_ir); @@ -67,11 +67,11 @@ void RuntimeConfigurator::update(const std::shared_ptr& linea update_latest_shapes(); } -void RuntimeConfigurator::init_tensor_rank(const std::shared_ptr& linear_ir) const { +void RuntimeConfigurator::init_tensor_rank(const lowered::LinearIRPtr& linear_ir) const { m_config->tensor_rank = linear_ir->get_master_shape().size(); } -void RuntimeConfigurator::init_data_info(const std::shared_ptr& linear_ir) { +void RuntimeConfigurator::init_data_info(const lowered::LinearIRPtr& linear_ir) { const auto& parameters = linear_ir->get_parameters(); const auto& results = linear_ir->get_results(); m_in_num = parameters.size(); @@ -113,7 +113,7 @@ void RuntimeConfigurator::init_data_info(const std::shared_ptr& linear_ir) { +void RuntimeConfigurator::init_buffer_info(const lowered::LinearIRPtr& linear_ir) { std::map> dynamic_buffer_clusters, static_buffer_clusters; // All needed checks are in Validate pass @@ -143,7 +143,7 @@ void RuntimeConfigurator::init_buffer_info(const std::shared_ptr& linear_ir) const { +void RuntimeConfigurator::update_loop_info(const lowered::LinearIRPtr& linear_ir) const { // Initialized UnifiedLoopInfo struct CurrentUnifiedLoopInfo { size_t current_work_amount = 0; @@ -180,21 +180,27 @@ void RuntimeConfigurator::update_loop_info(const std::shared_ptrset_work_amount( - lowered::pass::InsertSpecificIterations::get_decomposed_loop_work_amount(current_unified_loop_info, decomposed_loop_type, current_work_amount)); + const auto work_amount = + lowered::pass::InsertSpecificIterations::get_decomposed_loop_work_amount(current_unified_loop_info, decomposed_loop_type, current_work_amount); + expanded_loop_info->set_work_amount(work_amount); // Update remaining Loop work amount - current_work_amount -= expanded_loop_info->get_work_amount(); - - expanded_loop_info->update_ptr_increments(ptr_increments); - if (current_work_amount > 0) { - expanded_loop_info->update_finalization_offsets(std::vector(finalization_offsets.size(), 0)); + current_work_amount -= work_amount; + + // Update only `finalization offsets`. `Ptr increments` are always zeroed in this case + auto updated_finalization_offsets = current_work_amount > 0 ? std::vector(finalization_offsets.size(), 0) : finalization_offsets; + if (expanded_loop_info->is_evaluate_once()) { + expanded_loop_info->set_increment(work_amount); + // work_amount is equal to increment in cases with `evaluate_once` + for (size_t i = 0; i < updated_finalization_offsets.size(); ++i) + updated_finalization_offsets[i] += ptr_increments[i] * work_amount; } else { - expanded_loop_info->update_finalization_offsets(finalization_offsets); + expanded_loop_info->update_ptr_increments(ptr_increments); } + expanded_loop_info->update_finalization_offsets(updated_finalization_offsets); } } -void RuntimeConfigurator::update_buffer_scratchpad_size(const std::shared_ptr& linear_ir) const { +void RuntimeConfigurator::update_buffer_scratchpad_size(const lowered::LinearIRPtr& linear_ir) const { const auto& loop_manager = linear_ir->get_loop_manager(); m_config->buffer_scratchpad_size = linear_ir->get_static_buffer_scratchpad_size(); diff --git a/src/common/snippets/tests/include/lir_test_utils.hpp b/src/common/snippets/tests/include/lir_test_utils.hpp index 2f687f6e1412d1..b653c86af8ab0b 100644 --- a/src/common/snippets/tests/include/lir_test_utils.hpp +++ b/src/common/snippets/tests/include/lir_test_utils.hpp @@ -44,38 +44,6 @@ void init_expr_descriptors(const ov::snippets::lowered::ExpressionPtr& expr, const std::vector& subtensors = {}, const std::vector& layouts = {}); -/** - * @brief Creates unified loop info based on provided entry and exit points, and adds it to the linear_ir's loops map - * @attention This helper wraps LoopManager::mark_loop method, but only for LoopInfo creation (whereas original - * mark_loop method also marks expressions with the corresponding loop info). - * @param linear_ir linear_ir in which loop info should be added - * @param entries entry points of loop - * @param exits exit points of loop - */ -void create_and_add_unified_loop_info(const std::shared_ptr& linear_ir, - size_t work_amount, - size_t increment, - const std::vector& entries, - const std::vector& exits, - bool add_default_handlers = true); -/** - * @brief Creates unified loop info based on provided entry and exit points, and adds it to the linear_ir's loops map. - * Meanwhile set loop id to expr range [loop_begin_pos, loop_end_pos). - * @attention This helper wraps LoopManager::mark_loop method, which also marks expressions with the corresponding loop info - * @param linear_ir linear_ir in which loop info should be added - * @param loop_begin_pos begin expr postion in this loop - * @param loop_end_pos end expr postion in this loop - * @param entries entry points of loop - * @param exits exit points of loop - */ -void create_and_add_unified_loop_info(const std::shared_ptr& linear_ir, - ov::snippets::lowered::LinearIR::constExprIt loop_begin_pos, - ov::snippets::lowered::LinearIR::constExprIt loop_end_pos, - size_t work_amount, - size_t increment, - const std::vector& entries, - const std::vector& exits, - bool add_default_handlers = true); } // namespace snippets } // namespace test } // namespace ov diff --git a/src/common/snippets/tests/src/lir_test_utils.cpp b/src/common/snippets/tests/src/lir_test_utils.cpp index 274480fcd84c85..c4f5047011cd08 100644 --- a/src/common/snippets/tests/src/lir_test_utils.cpp +++ b/src/common/snippets/tests/src/lir_test_utils.cpp @@ -39,9 +39,7 @@ void LoweredPassTestsF::TearDown() { } ov::snippets::VectorDims get_default_subtensor() { - static const VectorDims default_subtensor{PortDescriptor::ServiceDimensions::FULL_DIM, - PortDescriptor::ServiceDimensions::FULL_DIM}; - return default_subtensor; + return VectorDims(2, ov::snippets::utils::get_full_dim_value()); } void init_expr_descriptors(const ov::snippets::lowered::ExpressionPtr& expr, @@ -85,28 +83,6 @@ void init_expr_descriptors(const ov::snippets::lowered::ExpressionPtr& expr, } } -void create_and_add_unified_loop_info(const LinearIRPtr& linear_ir, - size_t work_amount, - size_t increment, - const std::vector& entries, - const std::vector& exits, - bool set_default_handlers) { - // Equal begin and end iterators are set to avoid expressions marking with new loop id - create_and_add_unified_loop_info(linear_ir, linear_ir->begin(), linear_ir->begin(), work_amount, increment, entries, exits, set_default_handlers); -} - -void create_and_add_unified_loop_info(const LinearIRPtr& linear_ir, - ov::snippets::lowered::LinearIR::constExprIt loop_begin_pos, - ov::snippets::lowered::LinearIR::constExprIt loop_end_pos, - size_t work_amount, - size_t increment, - const std::vector& entries, - const std::vector& exits, - bool set_default_handlers) { - const auto& loop_manager = linear_ir->get_loop_manager(); - loop_manager->mark_loop(loop_begin_pos, loop_end_pos, work_amount, increment, entries, exits, set_default_handlers); -} - } // namespace snippets } // namespace test } // namespace ov diff --git a/src/common/snippets/tests/src/lowered/pass/buffer_allocation.cpp b/src/common/snippets/tests/src/lowered/pass/buffer_allocation.cpp index e56a31a8e92a4c..4dc3f2dae7e867 100644 --- a/src/common/snippets/tests/src/lowered/pass/buffer_allocation.cpp +++ b/src/common/snippets/tests/src/lowered/pass/buffer_allocation.cpp @@ -95,8 +95,7 @@ void BufferAllocationTest::Validate() { std::shared_ptr EltwiseBufferAllocationTest::GetModel() const { const auto subtensor_eltwise = std::vector{1, m_vector_size}; - const auto subtensor_buffer = std::vector{ov::snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM, - ov::snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM}; + const auto subtensor_buffer = std::vector(2, ov::snippets::utils::get_full_dim_value()); const auto parameter0 = std::make_shared(ov::element::f32, ov::PartialShape({1, 3, 100, 100})); const auto parameter1 = std::make_shared(ov::element::f32, ov::PartialShape({1, 3, 100, 100})); diff --git a/src/common/snippets/tests/src/lowered/pass/extracted_loop_invariants.cpp b/src/common/snippets/tests/src/lowered/pass/extracted_loop_invariants.cpp index c3f4f5ea7f6877..ee762f4bfca746 100644 --- a/src/common/snippets/tests/src/lowered/pass/extracted_loop_invariants.cpp +++ b/src/common/snippets/tests/src/lowered/pass/extracted_loop_invariants.cpp @@ -66,11 +66,11 @@ TEST_F(ExtractLoopInvariantsTest, ExtractedLoopInvariantsWithParams) { auto result = linear_ir->push_node(sub.second); auto begin = multiply.first; auto end = result.first; - create_and_add_unified_loop_info(linear_ir, begin, end, 512, vector_size, - {LoopPort((*multiply.first)->get_input_port(0)), - LoopPort((*multiply.first)->get_input_port(1)), - LoopPort((*sub.first)->get_input_port(0))}, - {LoopPort((*sub.first)->get_output_port(0))}); + linear_ir->get_loop_manager()->mark_loop(begin, end, 512, vector_size, + std::vector{LoopPort((*multiply.first)->get_input_port(0)), + LoopPort((*multiply.first)->get_input_port(1)), + LoopPort((*sub.first)->get_input_port(0))}, + std::vector{LoopPort((*sub.first)->get_output_port(0))}); linear_ir->set_loop_depth(1); } { @@ -85,10 +85,10 @@ TEST_F(ExtractLoopInvariantsTest, ExtractedLoopInvariantsWithParams) { auto result = linear_ir_ref->push_node(sub.second); auto begin = sub.first; auto end = result.first; - create_and_add_unified_loop_info(linear_ir_ref, begin, end, 512, vector_size, - {LoopPort((*sub.first)->get_input_port(0)), - LoopPort((*sub.first)->get_input_port(1))}, - {LoopPort((*sub.first)->get_output_port(0))}); + linear_ir_ref->get_loop_manager()->mark_loop(begin, end, 512, vector_size, + std::vector{LoopPort((*sub.first)->get_input_port(0)), + LoopPort((*sub.first)->get_input_port(1))}, + std::vector{LoopPort((*sub.first)->get_output_port(0))}); } } @@ -124,10 +124,10 @@ TEST_F(ExtractLoopInvariantsTest, ExtractedLoopInvariantsWithScalar) { auto result = linear_ir->push_node(sub.second); auto begin = scalar.first; auto end = result.first; - create_and_add_unified_loop_info(linear_ir, begin, end, 512, vector_size, - {LoopPort((*multiply.first)->get_input_port(0)), - LoopPort((*sub.first)->get_input_port(0))}, - {LoopPort((*sub.first)->get_output_port(0))}); + linear_ir->get_loop_manager()->mark_loop(begin, end, 512, vector_size, + std::vector{LoopPort((*multiply.first)->get_input_port(0)), + LoopPort((*sub.first)->get_input_port(0))}, + std::vector{LoopPort((*sub.first)->get_output_port(0))}); linear_ir->set_loop_depth(1); } { @@ -142,10 +142,10 @@ TEST_F(ExtractLoopInvariantsTest, ExtractedLoopInvariantsWithScalar) { auto result = linear_ir_ref->push_node(sub.second); auto begin = sub.first; auto end = result.first; - create_and_add_unified_loop_info(linear_ir_ref, begin, end, 512, vector_size, - {LoopPort((*sub.first)->get_input_port(0)), - LoopPort((*sub.first)->get_input_port(1))}, - {LoopPort((*sub.first)->get_output_port(0))}); + linear_ir_ref->get_loop_manager()->mark_loop(begin, end, 512, vector_size, + std::vector{LoopPort((*sub.first)->get_input_port(0)), + LoopPort((*sub.first)->get_input_port(1))}, + std::vector{LoopPort((*sub.first)->get_output_port(0))}); } } @@ -187,20 +187,20 @@ TEST_F(ExtractLoopInvariantsTest, ExtractedLoopInvariantsOutputLoopUpdateNotNeed auto result1 = linear_ir->push_node(sub.second); auto begin = multiply.first; auto end = result1.first; - create_and_add_unified_loop_info(linear_ir, begin, end, 16, vector_size, - {LoopPort((*multiply.first)->get_input_port(0), true, 0), - LoopPort((*multiply.first)->get_input_port(1), true, 0), - LoopPort((*add.first)->get_input_port(0), true, 0), - LoopPort((*sub.first)->get_input_port(0), true, 0)}, - {LoopPort((*add.first)->get_output_port(0), true, 0), - LoopPort((*sub.first)->get_output_port(0), true, 0)}); - create_and_add_unified_loop_info(linear_ir, begin, end, 3, 1, - {LoopPort((*multiply.first)->get_input_port(0), true, 1), - LoopPort((*multiply.first)->get_input_port(1), true, 1), - LoopPort((*add.first)->get_input_port(0), true, 1), - LoopPort((*sub.first)->get_input_port(0), true, 1)}, - {LoopPort((*add.first)->get_output_port(0), true, 1), - LoopPort((*sub.first)->get_output_port(0), true, 1)}); + linear_ir->get_loop_manager()->mark_loop(begin, end, 16, vector_size, + std::vector{LoopPort((*multiply.first)->get_input_port(0), true, 0), + LoopPort((*multiply.first)->get_input_port(1), true, 0), + LoopPort((*add.first)->get_input_port(0), true, 0), + LoopPort((*sub.first)->get_input_port(0), true, 0)}, + std::vector{LoopPort((*add.first)->get_output_port(0), true, 0), + LoopPort((*sub.first)->get_output_port(0), true, 0)}); + linear_ir->get_loop_manager()->mark_loop(begin, end, 3, 1, + std::vector{LoopPort((*multiply.first)->get_input_port(0), true, 1), + LoopPort((*multiply.first)->get_input_port(1), true, 1), + LoopPort((*add.first)->get_input_port(0), true, 1), + LoopPort((*sub.first)->get_input_port(0), true, 1)}, + std::vector{LoopPort((*add.first)->get_output_port(0), true, 1), + LoopPort((*sub.first)->get_output_port(0), true, 1)}); linear_ir->set_loop_depth(2); } { @@ -218,21 +218,21 @@ TEST_F(ExtractLoopInvariantsTest, ExtractedLoopInvariantsOutputLoopUpdateNotNeed auto result1 = linear_ir_ref->push_node(sub.second); auto begin_inner = add.first; auto end_inner = result1.first; - create_and_add_unified_loop_info(linear_ir_ref, begin_inner, end_inner, 16, vector_size, - {LoopPort((*add.first)->get_input_port(0), true, 0), - LoopPort((*add.first)->get_input_port(1), true, 0), - LoopPort((*sub.first)->get_input_port(0), true, 0)}, - {LoopPort((*add.first)->get_output_port(0), true, 0), - LoopPort((*sub.first)->get_output_port(0), true, 0)}); + linear_ir_ref->get_loop_manager()->mark_loop(begin_inner, end_inner, 16, vector_size, + std::vector{LoopPort((*add.first)->get_input_port(0), true, 0), + LoopPort((*add.first)->get_input_port(1), true, 0), + LoopPort((*sub.first)->get_input_port(0), true, 0)}, + std::vector{LoopPort((*add.first)->get_output_port(0), true, 0), + LoopPort((*sub.first)->get_output_port(0), true, 0)}); auto begin_outer = multiply.first; auto end_outer = result1.first; - create_and_add_unified_loop_info(linear_ir_ref, begin_outer, end_outer, 3, 1, - {LoopPort((*multiply.first)->get_input_port(0), true, 1), - LoopPort((*multiply.first)->get_input_port(1), true, 1), - LoopPort((*add.first)->get_input_port(0), true, 1), - LoopPort((*sub.first)->get_input_port(0), true, 1)}, - {LoopPort((*add.first)->get_output_port(0), true, 1), - LoopPort((*sub.first)->get_output_port(0), true, 1)}); + linear_ir_ref->get_loop_manager()->mark_loop(begin_outer, end_outer, 3, 1, + std::vector{LoopPort((*multiply.first)->get_input_port(0), true, 1), + LoopPort((*multiply.first)->get_input_port(1), true, 1), + LoopPort((*add.first)->get_input_port(0), true, 1), + LoopPort((*sub.first)->get_input_port(0), true, 1)}, + std::vector{LoopPort((*add.first)->get_output_port(0), true, 1), + LoopPort((*sub.first)->get_output_port(0), true, 1)}); } } @@ -263,14 +263,14 @@ TEST_F(ExtractLoopInvariantsTest, ExtractedLoopInvariantsFromInnermostToLoopOuts auto add = linear_ir->push_node(param_0.second, broadcastmove.second); init_expr_descriptors(*add.first, {subtensor, subtensor, subtensor}, {layout, layout, layout}); auto result = linear_ir->push_node(add.second); - create_and_add_unified_loop_info(linear_ir, broadcastmove.first, result.first, 3, 1, - {LoopPort((*broadcastmove.first)->get_input_port(0), true, 1), - LoopPort((*add.first)->get_input_port(0), true, 1)}, - {LoopPort((*add.first)->get_output_port(0), true, 1)}); - create_and_add_unified_loop_info(linear_ir, broadcastmove.first, result.first, 512, vector_size, - {LoopPort((*broadcastmove.first)->get_input_port(0), true, 0), - LoopPort((*add.first)->get_input_port(0), true, 0)}, - {LoopPort((*add.first)->get_output_port(0), true, 0)}); + linear_ir->get_loop_manager()->mark_loop(broadcastmove.first, result.first, 3, 1, + std::vector{LoopPort((*broadcastmove.first)->get_input_port(0), true, 1), + LoopPort((*add.first)->get_input_port(0), true, 1)}, + std::vector{LoopPort((*add.first)->get_output_port(0), true, 1)}); + linear_ir->get_loop_manager()->mark_loop(broadcastmove.first, result.first, 512, vector_size, + std::vector{LoopPort((*broadcastmove.first)->get_input_port(0), true, 0), + LoopPort((*add.first)->get_input_port(0), true, 0)}, + std::vector{LoopPort((*add.first)->get_output_port(0), true, 0)}); linear_ir->set_loop_depth(2); } { @@ -281,14 +281,14 @@ TEST_F(ExtractLoopInvariantsTest, ExtractedLoopInvariantsFromInnermostToLoopOuts auto add = linear_ir_ref->push_node(param_0.second, broadcastmove.second); init_expr_descriptors(*add.first, {subtensor, subtensor, subtensor}, {layout, layout, layout}); auto result = linear_ir_ref->push_node(add.second); - create_and_add_unified_loop_info(linear_ir_ref, add.first, result.first, 3, 1, - {LoopPort((*add.first)->get_input_port(0), true, 1), - LoopPort((*add.first)->get_input_port(1), true, 1)}, - {LoopPort((*add.first)->get_output_port(0), true, 1)}); - create_and_add_unified_loop_info(linear_ir_ref, add.first, result.first, 512, vector_size, - {LoopPort((*add.first)->get_input_port(0), true, 0), - LoopPort((*add.first)->get_input_port(1), true, 0)}, - {LoopPort((*add.first)->get_output_port(0), true, 0)}); + linear_ir_ref->get_loop_manager()->mark_loop(add.first, result.first, 3, 1, + std::vector{LoopPort((*add.first)->get_input_port(0), true, 1), + LoopPort((*add.first)->get_input_port(1), true, 1)}, + std::vector{LoopPort((*add.first)->get_output_port(0), true, 1)}); + linear_ir_ref->get_loop_manager()->mark_loop(add.first, result.first, 512, vector_size, + std::vector{LoopPort((*add.first)->get_input_port(0), true, 0), + LoopPort((*add.first)->get_input_port(1), true, 0)}, + std::vector{LoopPort((*add.first)->get_output_port(0), true, 0)}); } } @@ -356,31 +356,31 @@ TEST_F(ExtractLoopInvariantsRemoveLoopsTest, ExtractedLoopInvariantsAllExprsInLo init_expr_descriptors(*multiply.first, {subtensor, subtensor, subtensor}, {layout, layout, layout}); auto result = linear_ir->push_node(multiply.second); // 3 inner loop - create_and_add_unified_loop_info(linear_ir, max.first, hmax.first, 1, vector_size, - {LoopPort((*max.first)->get_input_port(0), true, 0), - LoopPort((*max.first)->get_input_port(1), true, 0)}, - {LoopPort((*max.first)->get_output_port(0), true, 0)}); - create_and_add_unified_loop_info(linear_ir, sub.first, hsum.first, 1, vector_size, - {LoopPort((*sub.first)->get_input_port(0), true, 0), - LoopPort((*sub.first)->get_input_port(1), true, 0), - LoopPort((*add.first)->get_input_port(1), true, 0)}, - {LoopPort((*exp.first)->get_output_port(0), true, 0), - LoopPort((*add.first)->get_output_port(0), true, 0)}); - create_and_add_unified_loop_info(linear_ir, multiply.first, result.first, 1, vector_size, - {LoopPort((*multiply.first)->get_input_port(0), true, 0), - LoopPort((*multiply.first)->get_input_port(1), true, 0)}, - {LoopPort((*multiply.first)->get_output_port(0), true, 0)}); + linear_ir->get_loop_manager()->mark_loop(max.first, hmax.first, 1, vector_size, + std::vector{LoopPort((*max.first)->get_input_port(0), true, 0), + LoopPort((*max.first)->get_input_port(1), true, 0)}, + std::vector{LoopPort((*max.first)->get_output_port(0), true, 0)}); + linear_ir->get_loop_manager()->mark_loop(sub.first, hsum.first, 1, vector_size, + std::vector{LoopPort((*sub.first)->get_input_port(0), true, 0), + LoopPort((*sub.first)->get_input_port(1), true, 0), + LoopPort((*add.first)->get_input_port(1), true, 0)}, + std::vector{LoopPort((*exp.first)->get_output_port(0), true, 0), + LoopPort((*add.first)->get_output_port(0), true, 0)}); + linear_ir->get_loop_manager()->mark_loop(multiply.first, result.first, 1, vector_size, + std::vector{LoopPort((*multiply.first)->get_input_port(0), true, 0), + LoopPort((*multiply.first)->get_input_port(1), true, 0)}, + std::vector{LoopPort((*multiply.first)->get_output_port(0), true, 0)}); // outer loop info const auto loop_begin = std::make_shared(); auto loop_begin_expr = linear_ir->insert_node(loop_begin, std::vector{}, {}, false, max.first); const auto loop_end = std::make_shared(); std::vector loop_end_inputs{(*loop_begin_expr)->get_output_port_connector(0)}; auto loop_end_expr = linear_ir->insert_node(loop_end, loop_end_inputs, {}, false, result.first); - create_and_add_unified_loop_info(linear_ir, loop_begin_expr, result.first, 10, 1, - {LoopPort((*max.first)->get_input_port(0), true, 1), - LoopPort((*max.first)->get_input_port(1), true, 0), - LoopPort((*add.first)->get_input_port(1), true, 0)}, - {LoopPort((*multiply.first)->get_output_port(0), true, 1)}); + linear_ir->get_loop_manager()->mark_loop(loop_begin_expr, result.first, 10, 1, + std::vector{LoopPort((*max.first)->get_input_port(0), true, 1), + LoopPort((*max.first)->get_input_port(1), true, 0), + LoopPort((*add.first)->get_input_port(1), true, 0)}, + std::vector{LoopPort((*multiply.first)->get_output_port(0), true, 1)}); loop_end->set_id((*loop_end_expr)->get_loop_ids().back()); linear_ir->set_loop_depth(2); } @@ -409,11 +409,11 @@ TEST_F(ExtractLoopInvariantsRemoveLoopsTest, ExtractedLoopInvariantsAllExprsInLo const auto loop_end = std::make_shared(); std::vector loop_end_inputs{(*loop_begin_expr)->get_output_port_connector(0)}; auto loop_end_expr = linear_ir_ref->insert_node(loop_end, loop_end_inputs, {}, false, result.first); - create_and_add_unified_loop_info(linear_ir_ref, loop_begin_expr, result.first, 10, 1, - {LoopPort((*max.first)->get_input_port(0), true, 1), - LoopPort((*max.first)->get_input_port(1), true, 0), - LoopPort((*add.first)->get_input_port(1), true, 0)}, - {LoopPort((*multiply.first)->get_output_port(0), true, 1)}); + linear_ir_ref->get_loop_manager()->mark_loop(loop_begin_expr, result.first, 10, 1, + std::vector{LoopPort((*max.first)->get_input_port(0), true, 1), + LoopPort((*max.first)->get_input_port(1), true, 0), + LoopPort((*add.first)->get_input_port(1), true, 0)}, + std::vector{LoopPort((*multiply.first)->get_output_port(0), true, 1)}); loop_end->set_id((*loop_end_expr)->get_loop_ids().back()); } } diff --git a/src/common/transformations/src/transformations/transpose_sinking/ts_concat.cpp b/src/common/transformations/src/transformations/transpose_sinking/ts_concat.cpp index 8dbcf7ba285f5b..502d89cb205aeb 100644 --- a/src/common/transformations/src/transformations/transpose_sinking/ts_concat.cpp +++ b/src/common/transformations/src/transformations/transpose_sinking/ts_concat.cpp @@ -36,18 +36,21 @@ TSConcatForward::TSConcatForward() { return false; } - if (concat_node->get_output_partial_shape(0).is_dynamic()) { - return false; + auto concat_axis = concat_node->get_axis(); + if (concat_axis < 0) { + if (concat_node->get_output_partial_shape(0).rank().is_dynamic()) { + return false; + } + const auto rank = concat_node->get_output_partial_shape(0).rank().get_length(); + concat_axis = ov::util::normalize(concat_axis, rank); } + // todo: support dyn rank case bool updated = sink_forward::UpdateInputTransposes(main_node, transpose_info); if (!updated) { return false; } - const auto rank = concat_node->get_output_partial_shape(0).rank().get_length(); - const auto concat_axis = ov::util::normalize(concat_node->get_axis(), rank); - const auto transpose_axis_order = transpose_info.transpose_const->get_axis_vector_val(); const int64_t transposed_concat_axis = transpose_axis_order[concat_axis]; concat_node->set_axis(transposed_concat_axis); @@ -83,12 +86,19 @@ TSConcatBackward::TSConcatBackward() { } auto concat_node = as_type_ptr(main_node); - if (concat_node->get_output_partial_shape(0).is_dynamic()) { + if (!concat_node) { return false; } - const auto rank = concat_node->get_output_partial_shape(0).rank().get_length(); - auto concat_axis = ov::util::normalize(concat_node->get_axis(), rank); + auto concat_axis = concat_node->get_axis(); + if (concat_axis < 0) { + if (concat_node->get_output_partial_shape(0).rank().is_dynamic()) { + return false; + } + + const auto rank = concat_node->get_output_partial_shape(0).rank().get_length(); + concat_axis = ov::util::normalize(concat_axis, rank); + } const auto transpose_axis_order = transpose_const->get_axis_vector_val(); const auto reversed_transpose_axis_order = ReverseTransposeOrder(transpose_axis_order); diff --git a/src/core/src/runtime/itensor.cpp b/src/core/src/runtime/itensor.cpp index 203297c671d401..b1b517426b9f67 100644 --- a/src/core/src/runtime/itensor.cpp +++ b/src/core/src/runtime/itensor.cpp @@ -16,6 +16,21 @@ namespace ov { +namespace { +Strides default_byte_strides(const Shape& shape, const element::Type& et) { + auto strides = Strides(shape.size()); + if (!strides.empty()) { + strides.back() = et.size(); + std::transform(shape.crbegin(), + shape.crend() - 1, + strides.rbegin(), + strides.rbegin() + 1, + std::multiplies()); + } + return strides; +} +} // namespace + ITensor::~ITensor() = default; size_t ITensor::get_size() const { @@ -31,31 +46,13 @@ bool ITensor::is_continuous() const { // OpenVINO doesn't support strides for lp types return true; } - const auto& shape = get_shape(); - const auto& type = get_element_type(); - std::vector strides(shape.size()); - if (!shape.empty()) { - strides[shape.size() - 1] = 1; - } - auto size = shape.size(); - for (size_t i = 1; i < size; i++) { - strides[size - i - 1] = strides[size - i] * shape[size - i]; - } - - ov::Strides byte_strides(strides.size()); - for (size_t i = 0; i < strides.size(); ++i) - byte_strides[i] = strides[i] * type.size(); - return byte_strides == get_strides(); + return default_byte_strides(get_shape(), get_element_type()) == get_strides(); } void ITensor::copy_to(const std::shared_ptr& dst) const { const auto& is_scalar = [](const ov::Shape& shape) { return shape.empty() || (shape.size() == 1 && shape[0] == 1); }; - const auto shapes_equal = [is_scalar](const ov::Shape& src, const ov::Shape& dst) { - // WA for scalar tensors to copy {1} to {} or otherwise - return src == dst || (is_scalar(src) && is_scalar(dst)); - }; OPENVINO_ASSERT(dst, "Destination tensor was not initialized."); OPENVINO_ASSERT(!dynamic_cast(this), "Default copy to doesn't support copy from remote tensor."); @@ -68,16 +65,11 @@ void ITensor::copy_to(const std::shared_ptr& dst) const { dst->get_element_type(), ")"); - if (dst->get_shape() == ov::Shape{0}) - dst->set_shape(get_shape()); - - OPENVINO_ASSERT(shapes_equal(get_shape(), dst->get_shape()), - "Tensor shapes are not equal. (src: ", - get_shape(), - " != dst: ", - dst->get_shape(), - ")"); const auto& shape = get_shape(); + if (shape != dst->get_shape()) { + dst->set_shape(shape); + } + auto* src_data = static_cast(data()); auto* dst_data = static_cast(dst->data()); ov::Strides src_strides{get_byte_size()}; @@ -86,25 +78,15 @@ void ITensor::copy_to(const std::shared_ptr& dst) const { ov::Shape max_pos{1}; if (get_element_type().bitwidth() < 8 || (get_strides() == dst->get_strides() && is_continuous()) || - (is_scalar(get_shape()) && is_scalar(dst->get_shape()))) { + (is_scalar(shape) && is_scalar(dst->get_shape()))) { // OpenVINO doesn't support strides for LP types // or both tensors have default strides // Strides and positions already initialized } else { // Tensors have default strides const auto& type = get_element_type(); - std::vector strides(shape.size()); - if (!shape.empty()) { - strides[shape.size() - 1] = 1; - } - auto size = shape.size(); - for (size_t i = 1; i < size; i++) { - strides[size - i - 1] = strides[size - i] * shape[size - i]; - } - - ov::Strides default_strides(strides.size()); - for (size_t i = 0; i < strides.size(); ++i) - default_strides[i] = strides[i] * type.size(); + const auto shape_rank = shape.size(); + const auto default_strides = default_byte_strides(shape, type); src_strides = get_strides(); dst_strides = dst->get_strides(); @@ -113,8 +95,7 @@ void ITensor::copy_to(const std::shared_ptr& dst) const { // Calculate src and dst shapes bool found_step = false; - for (size_t i = 0; i < shape.size(); i++) { - size_t inverted_idx = shape.size() - i - 1; + for (size_t inverted_idx = shape_rank - 1; inverted_idx < shape_rank; --inverted_idx) { if (!found_step) { if (default_strides[inverted_idx] == src_strides[inverted_idx] && src_strides[inverted_idx] == dst_strides[inverted_idx]) { @@ -134,7 +115,7 @@ void ITensor::copy_to(const std::shared_ptr& dst) const { if (strides_size < default_strides.size()) { strides = default_strides[strides_size]; - dim = get_shape()[strides_size]; + dim = shape[strides_size]; } src_str[strides_size] = strides; dst_str[strides_size] = strides; @@ -151,13 +132,8 @@ void ITensor::copy_to(const std::shared_ptr& dst) const { dst_strides = std::move(dst_str); } - const auto update_index = [](const ov::Shape& pos, const ov::Shape& shape, const ov::Strides& strides) { - size_t offset = 0; - - for (size_t i = 0; i < pos.size(); i++) { - offset += pos[i] * strides[i]; - } - return offset; + const auto update_index = [](const ov::Shape& pos, const ov::Strides& strides) { + return std::inner_product(pos.begin(), pos.end(), strides.begin(), static_cast(0)); }; using copy_function_def = std::function; @@ -190,8 +166,8 @@ void ITensor::copy_to(const std::shared_ptr& dst) const { else finish = true; } - src_idx = update_index(cur_pos, max_pos, src_strides); - dst_idx = update_index(cur_pos, max_pos, dst_strides); + src_idx = update_index(cur_pos, src_strides); + dst_idx = update_index(cur_pos, dst_strides); } } diff --git a/src/core/tests/ov_tensor_test.cpp b/src/core/tests/ov_tensor_test.cpp index a6832f2bb5aff9..8e610196b6e4a1 100644 --- a/src/core/tests/ov_tensor_test.cpp +++ b/src/core/tests/ov_tensor_test.cpp @@ -936,6 +936,18 @@ INSTANTIATE_TEST_SUITE_P(copy_tests, TestParams { ov::Shape{}, {}, {1}, {} + }, + TestParams{ + ov::Shape{3,2,2}, {}, + ov::Shape{5}, {} + }, + TestParams{ + ov::Shape{3,2,2}, ov::Strides{64,16,8}, + ov::Shape{5,2}, {} + }, + TestParams{ + ov::Shape{3,2,2}, ov::Strides{64,16,8}, + ov::Shape{3,4,3}, ov::Strides{128,24,8} } ))); diff --git a/src/frontends/pytorch/src/translate_session.cpp b/src/frontends/pytorch/src/translate_session.cpp index 9295b388048baa..a39c6b067528fe 100644 --- a/src/frontends/pytorch/src/translate_session.cpp +++ b/src/frontends/pytorch/src/translate_session.cpp @@ -225,7 +225,8 @@ std::shared_ptr TranslateSession::convert_pytorch_model( } }; - FRONT_END_GENERAL_CHECK(pytorch_model->get_subgraph_size() == 1, "Model should have exactly 1 subgraph."); + FRONT_END_GENERAL_CHECK(pytorch_model->decoder_type_name() != "ts" || pytorch_model->get_subgraph_size() == 1, + "Model should have exactly 1 subgraph for TorchScript."); pytorch_model->visit_subgraph(node_visitor); ResultVector results; @@ -368,10 +369,7 @@ void TranslateSession::encode_tensor_name(Output output, namespace { bool is_number(const std::string& s) { - std::string::const_iterator it = s.begin(); - while (it != s.end() && std::isdigit(*it)) - ++it; - return !s.empty() && it == s.end(); + return !s.empty() && std::all_of(s.begin(), s.end(), ::isdigit); } } // namespace diff --git a/src/inference/include/openvino/runtime/intel_npu/level_zero/level_zero.hpp b/src/inference/include/openvino/runtime/intel_npu/level_zero/level_zero.hpp index 7bb1d4bf1d3905..3709891a5e3000 100644 --- a/src/inference/include/openvino/runtime/intel_npu/level_zero/level_zero.hpp +++ b/src/inference/include/openvino/runtime/intel_npu/level_zero/level_zero.hpp @@ -34,7 +34,7 @@ namespace level_zero { * @brief This class represents an abstraction for NPU plugin remote tensor * which can be shared with user-supplied LevelZero buffer. * The plugin object derived from this class can be obtained with ZeroContext::create_tensor() call. - * @note User can obtain LevelZero buffer handle from this class. + * @note User can obtain Level Zero buffer handle from this class. * @ingroup ov_runtime_level_zero_npu_cpp_api */ class ZeroBufferTensor : public RemoteTensor { diff --git a/src/inference/include/openvino/runtime/intel_npu/remote_properties.hpp b/src/inference/include/openvino/runtime/intel_npu/remote_properties.hpp index f058b5ece45971..0d29d44b6ff170 100644 --- a/src/inference/include/openvino/runtime/intel_npu/remote_properties.hpp +++ b/src/inference/include/openvino/runtime/intel_npu/remote_properties.hpp @@ -19,24 +19,16 @@ using npu_handle_param = void*; /** * @brief Enum to define the type of the shared memory buffer + * @ingroup ov_runtime_level_zero_npu_cpp_api */ enum class MemType { - L0_INTERNAL_BUF = 0, //!< Internal L0 buffer type allocated by plugin + L0_INTERNAL_BUF = 0, //!< Internal Level Zero buffer type allocated by plugin SHARED_BUF = 1, //!< Shared buffer }; -/** - * @brief Enum to define the type of the tensor - */ -enum class TensorType { - INPUT = 0, //!< Tensor is only used as input - OUTPUT = 1, //!< Tensor is only used as output - BINDED = 2 //!< Tensor could be used as input and output -}; - /** @cond INTERNAL */ -inline std::ostream& operator<<(std::ostream& os, const MemType& share_mem_type) { - switch (share_mem_type) { +inline std::ostream& operator<<(std::ostream& os, const MemType& mem_type) { + switch (mem_type) { case MemType::L0_INTERNAL_BUF: return os << "L0_INTERNAL_BUF"; case MemType::SHARED_BUF: @@ -46,13 +38,13 @@ inline std::ostream& operator<<(std::ostream& os, const MemType& share_mem_type) } } -inline std::istream& operator>>(std::istream& is, MemType& share_mem_type) { +inline std::istream& operator>>(std::istream& is, MemType& mem_type) { std::string str; is >> str; if (str == "L0_INTERNAL_BUF") { - share_mem_type = MemType::L0_INTERNAL_BUF; + mem_type = MemType::L0_INTERNAL_BUF; } else if (str == "SHARED_BUF") { - share_mem_type = MemType::SHARED_BUF; + mem_type = MemType::SHARED_BUF; } else { OPENVINO_THROW("Unsupported memory type: ", str); } @@ -63,24 +55,68 @@ inline std::istream& operator>>(std::istream& is, MemType& share_mem_type) { /** * @brief This key identifies type of internal shared memory * in a shared memory tensor parameter map. + * @ingroup ov_runtime_level_zero_npu_cpp_api */ static constexpr Property mem_type{"MEM_TYPE"}; /** * @brief This key identifies memory handle * in a shared memory tensor parameter map + * @ingroup ov_runtime_level_zero_npu_cpp_api */ static constexpr Property mem_handle{"MEM_HANDLE"}; /** * @brief This key identifies LevelZero context handle - * in a shared context or shared memory tensor parameter map + * in a shared context parameter map + * @ingroup ov_runtime_level_zero_npu_cpp_api */ static constexpr Property l0_context{"L0_CONTEXT"}; /** - * @brief This key identifies type of the tensor - * in a shared memory tensor parameter map. + * @brief Enum to define the type of the tensor + * @ingroup ov_runtime_level_zero_npu_cpp_api + */ +enum class TensorType { + INPUT = 0, //!< Tensor is only used as input + OUTPUT = 1, //!< Tensor is only used as output + BINDED = 2 //!< Tensor could be used as input and output +}; + +/** @cond INTERNAL */ +inline std::ostream& operator<<(std::ostream& os, const TensorType& tensor_type) { + switch (tensor_type) { + case TensorType::INPUT: + return os << "INPUT"; + case TensorType::OUTPUT: + return os << "OUTPUT"; + case TensorType::BINDED: + return os << "BINDED"; + default: + OPENVINO_THROW("Unsupported tensor type"); + } +} + +inline std::istream& operator>>(std::istream& is, TensorType& tensor_type) { + std::string str; + is >> str; + if (str == "INPUT") { + tensor_type = TensorType::INPUT; + } else if (str == "OUTPUT") { + tensor_type = TensorType::OUTPUT; + } else if (str == "BINDED") { + tensor_type = TensorType::BINDED; + } else { + OPENVINO_THROW("Unsupported tensor type: ", str); + } + return is; +} +/** @endcond */ + +/** + * @brief This key sets the type of the internal Level Zero buffer + * allocated by the plugin in a shared memory tensor parameter map. + * @ingroup ov_runtime_level_zero_npu_cpp_api */ static constexpr Property tensor_type{"TENSOR_TYPE"}; diff --git a/src/plugins/auto_batch/src/plugin.cpp b/src/plugins/auto_batch/src/plugin.cpp index de125cb12551af..d97987bea6f39a 100644 --- a/src/plugins/auto_batch/src/plugin.cpp +++ b/src/plugins/auto_batch/src/plugin.cpp @@ -19,9 +19,10 @@ namespace ov { namespace autobatch_plugin { -std::vector supported_configKeys = {ov::device::priorities.name(), - ov::auto_batch_timeout.name(), - ov::enable_profiling.name()}; +std::vector supported_configKeys = { + ov::PropertyName{ov::device::priorities.name(), ov::PropertyMutability::RW}, + ov::PropertyName{ov::auto_batch_timeout.name(), ov::PropertyMutability::RW}, + ov::PropertyName{ov::enable_profiling.name(), ov::PropertyMutability::RW}}; inline ov::AnyMap merge_properties(ov::AnyMap config, const ov::AnyMap& user_config) { for (auto&& kvp : user_config) { @@ -82,9 +83,13 @@ ov::Any Plugin::get_property(const std::string& name, const ov::AnyMap& argument return {it->second}; } } else if (name == ov::supported_properties.name()) { - return std::vector{ - ov::PropertyName{ov::supported_properties.name(), ov::PropertyMutability::RO}, - ov::PropertyName{ov::device::full_name.name(), ov::PropertyMutability::RO}}; + std::vector property_name; + property_name.push_back(ov::PropertyName{ov::supported_properties.name(), ov::PropertyMutability::RO}); + property_name.push_back(ov::PropertyName{ov::device::full_name.name(), ov::PropertyMutability::RO}); + for (auto& it : supported_configKeys) { + property_name.push_back(it); + } + return decltype(ov::supported_properties)::value_type(std::move(property_name)); } else if (name == ov::internal::supported_properties.name()) { return decltype(ov::internal::supported_properties)::value_type{}; } else if (name == ov::device::full_name.name()) { @@ -113,6 +118,7 @@ OV_DEFINE_PLUGIN_CREATE_FUNCTION(Plugin, version) Plugin::Plugin() { set_device_name("BATCH"); m_plugin_config.insert(ov::auto_batch_timeout(1000)); // default value (ms) + m_plugin_config.insert(ov::enable_profiling(false)); } std::shared_ptr Plugin::compile_model(const std::shared_ptr& model, @@ -132,7 +138,7 @@ std::shared_ptr Plugin::compile_model(const std::shared_ptr< auto full_properties = merge_properties(m_plugin_config, properties); auto device_batch = full_properties.find(ov::device::priorities.name()); if (device_batch == full_properties.end()) { - OPENVINO_THROW("ov::device::priorities key for AUTO NATCH is not set for BATCH device"); + OPENVINO_THROW("ov::device::priorities key for AUTO BATCH is not set for BATCH device"); } auto meta_device = parse_meta_device(device_batch->second.as(), properties); diff --git a/src/plugins/intel_cpu/src/dnnl_extension_utils.cpp b/src/plugins/intel_cpu/src/dnnl_extension_utils.cpp index 82e4d3fde3ac14..60351d14b3e89e 100644 --- a/src/plugins/intel_cpu/src/dnnl_extension_utils.cpp +++ b/src/plugins/intel_cpu/src/dnnl_extension_utils.cpp @@ -3,15 +3,17 @@ // #include "dnnl_extension_utils.h" -#include "memory_desc/dnnl_blocked_memory_desc.h" -#include "onednn/iml_type_mapper.h" -#include "utils/general_utils.h" + #include #include #include - #include +#include "cpu_memory.h" +#include "memory_desc/dnnl_blocked_memory_desc.h" +#include "onednn/iml_type_mapper.h" +#include "utils/general_utils.h" + using namespace dnnl; namespace ov { @@ -254,5 +256,11 @@ bool DnnlExtensionUtils::isUnarySupportedAsPostOp(Algorithm alg) { #endif } +std::string DnnlExtensionUtils::computeWeightsStringHash(const std::shared_ptr memory, + const std::shared_ptr dstDesc) { + const auto desc_hash = dnnl::impl::primitive_hashing::get_md_hash(*dstDesc->getDnnlDesc().get()); + return std::to_string(desc_hash) + "_" + std::to_string(reinterpret_cast(memory->getData())); +} + } // namespace intel_cpu } // namespace ov diff --git a/src/plugins/intel_cpu/src/dnnl_extension_utils.h b/src/plugins/intel_cpu/src/dnnl_extension_utils.h index 5def48284ab062..cdc6342e8963fd 100644 --- a/src/plugins/intel_cpu/src/dnnl_extension_utils.h +++ b/src/plugins/intel_cpu/src/dnnl_extension_utils.h @@ -22,6 +22,7 @@ namespace intel_cpu { class DnnlMemoryDesc; class DnnlBlockedMemoryDesc; class Shape; +class IMemory; class DnnlExtensionUtils { public: @@ -101,6 +102,13 @@ class DnnlExtensionUtils { static dnnl_memory_desc_t clone_desc(const_dnnl_memory_desc_t cdesc); static const char* query_pd_info(const_dnnl_primitive_desc_t pd); static bool isUnarySupportedAsPostOp(Algorithm alg); + /** + * @brief Computes weights string hash based on weights memory and requested descriptor + * @param memory Weights memory pointer + * @param dstDesc descriptor defining weights representation after repacking + * @return string hash + */ + static std::string computeWeightsStringHash(const std::shared_ptr memory, const std::shared_ptr dstDesc); }; } // namespace intel_cpu diff --git a/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.cpp b/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.cpp index 1dec30581dd71a..3d29173788d658 100644 --- a/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.cpp +++ b/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.cpp @@ -705,23 +705,18 @@ void jit_gelu_tanh_emitter::emit_isa(const std::vector &in_vec_idxs, con h->ld1r(vmm_aux1.s, table_val2("gelu_tanh_sqrt_two_over_pi")); h->fmul(vmm_aux0.s, vmm_aux1.s, vmm_aux2.s); - const bool store_src = vmm_src.getIdx() == vmm_dst.getIdx(); - if (store_src) { - h->mov(vmm_aux2.b16, vmm_src.b16); - } - tanh_emitter->emit_code( { vmm_aux0.getIdx() }, - { vmm_aux0.getIdx() }, + { vmm_aux2.getIdx() }, aux_vec_idxs, aux_gpr_idxs); // compute 0.5 * x * (1 + tanh(G(x))) h->ld1r(vmm_aux1.s, table_val2("one")); - h->fadd(vmm_aux0.s, vmm_aux1.s, vmm_aux0.s); + h->fadd(vmm_aux0.s, vmm_aux1.s, vmm_aux2.s); h->ld1r(vmm_aux1.s, table_val2("half")); h->fmul(vmm_aux0.s, vmm_aux1.s, vmm_aux0.s); - h->fmul(vmm_dst.s, store_src ? vmm_aux2.s : vmm_src.s, vmm_aux0.s); + h->fmul(vmm_dst.s, vmm_src.s, vmm_aux0.s); } void jit_gelu_tanh_emitter::register_table_entries() { @@ -1219,6 +1214,8 @@ jit_mod_emitter::jit_mod_emitter(dnnl::impl::cpu::aarch64::jit_generator *host, size_t jit_mod_emitter::get_inputs_count() const { return 2; } +size_t jit_mod_emitter::get_aux_vecs_count() const { return 1; } + void jit_mod_emitter::emit_impl(const std::vector &in_vec_idxs, const std::vector &out_vec_idxs) const { if (host_isa_ == dnnl::impl::cpu::aarch64::asimd) { emit_isa(in_vec_idxs, out_vec_idxs); @@ -1233,14 +1230,15 @@ void jit_mod_emitter::emit_isa(const std::vector &in_vec_idxs, const std using TReg = typename dnnl::impl::cpu::aarch64::cpu_isa_traits::TReg; - TReg divend = TReg(in_vec_idxs[0]); + TReg dividend = TReg(in_vec_idxs[0]); TReg divisor = TReg(in_vec_idxs[1]); TReg r = TReg(out_vec_idxs[0]); + TReg aux = TReg(aux_vec_idxs[0]); - h->uni_fdiv(r.s, divend.s, divisor.s); - h->frintz(r.s, r.s); - h->uni_fmul(r.s, r.s, divisor.s); - h->uni_fsub(r.s, divend.s, r.s); + h->fdiv(aux.s, dividend.s, divisor.s); + h->frintz(aux.s, aux.s); + h->fmul(aux.s, aux.s, divisor.s); + h->fsub(r.s, dividend.s, aux.s); } std::set> jit_mod_emitter::get_supported_precisions(const std::shared_ptr& node) { @@ -1874,7 +1872,7 @@ void jit_tanh_emitter::emit_isa(const std::vector &in_vec_idxs, const st TReg src = TReg(in_vec_idxs[0]); TReg dst = TReg(out_vec_idxs[0]); - TReg aux = TReg(aux_vec_idxs.back()); + TReg aux = TReg(aux_vec_idxs[0]); h->ld1r(aux.s, table_val2("two")); h->uni_fmul(aux.s, src.s, aux.s); diff --git a/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.hpp b/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.hpp index b71fb0a67f2a19..0152a5bd3d99e1 100644 --- a/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.hpp +++ b/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.hpp @@ -477,6 +477,8 @@ class jit_mod_emitter : public jit_emitter { size_t get_inputs_count() const override; + size_t get_aux_vecs_count() const override; + static std::set> get_supported_precisions(const std::shared_ptr& node = nullptr); private: diff --git a/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.cpp b/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.cpp index 1f6bd487032730..925a6d28697d41 100644 --- a/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.cpp +++ b/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.cpp @@ -14,20 +14,27 @@ namespace intel_cpu { CPURuntimeConfigurator::CPURuntimeConfigurator() : ov::snippets::RuntimeConfigurator(std::make_shared()) { } -void CPURuntimeConfigurator::update(const std::shared_ptr& linear_ir) { - RuntimeConfigurator::update(linear_ir); - +void CPURuntimeConfigurator::update(const ov::snippets::lowered::LinearIRPtr& linear_ir) { if (linear_ir->is_dynamic()) { - get_kernel_executor_table()->update_state(); + update_loop_info(linear_ir); update_loop_args(linear_ir); + // Update KernelExecutor Table should be before `update_buffer_scratchpad_size` + // because `ComputeAllocationSize` depends on subtensors which are updated in the table + get_kernel_executor_table()->update_state(linear_ir); + update_buffer_scratchpad_size(linear_ir); } + + m_config->master_shape = linear_ir->get_master_shape(); + + update_data_offsets(); + update_latest_shapes(); } -void CPURuntimeConfigurator::init_tensor_rank(const std::shared_ptr& linear_ir) const { +void CPURuntimeConfigurator::init_tensor_rank(const ov::snippets::lowered::LinearIRPtr& linear_ir) const { m_config->tensor_rank = std::max(linear_ir->get_master_shape().size(), rank6D); } -void CPURuntimeConfigurator::update_loop_args(const std::shared_ptr& linear_ir) const { +void CPURuntimeConfigurator::update_loop_args(const ov::snippets::lowered::LinearIRPtr& linear_ir) const { const auto& cpu_config = ov::as_type_ptr(m_config); OPENVINO_ASSERT(cpu_config, "CPURuntimeConfigurator expects CPURuntimeConfig"); diff --git a/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.hpp b/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.hpp index 6b3a54652097ae..f1a21e5982aa1c 100644 --- a/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.hpp +++ b/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.hpp @@ -29,17 +29,17 @@ class CPURuntimeConfigurator : public ov::snippets::RuntimeConfigurator { * @brief Update RuntimeConfig based on LinearIR * @param linear_ir LinearIR */ - void update(const std::shared_ptr& linear_ir) override; + void update(const ov::snippets::lowered::LinearIRPtr& linear_ir) override; /** * @brief Initializes tensor rank of config * @param linear_ir LinearIR */ - void init_tensor_rank(const std::shared_ptr& linear_ir) const override; + void init_tensor_rank(const ov::snippets::lowered::LinearIRPtr& linear_ir) const override; /** * @brief Calculate Loop parameters of Loop emitters and update these values in CPURuntimeConfig * @param linear_ir LinearIR */ - void update_loop_args(const std::shared_ptr& linear_ir) const; + void update_loop_args(const ov::snippets::lowered::LinearIRPtr& linear_ir) const; const size_t rank6D = 6; }; diff --git a/src/plugins/intel_cpu/src/emitters/snippets/x64/jit_loop_emitters.cpp b/src/plugins/intel_cpu/src/emitters/snippets/x64/jit_loop_emitters.cpp index 6b99097872db37..cb6dfeb741109a 100644 --- a/src/plugins/intel_cpu/src/emitters/snippets/x64/jit_loop_emitters.cpp +++ b/src/plugins/intel_cpu/src/emitters/snippets/x64/jit_loop_emitters.cpp @@ -41,7 +41,8 @@ void jit_loop_begin_emitter::validate_arguments(const std::vector &in, c // Note: the only expected output is work amount register (communicated to jit_loop_end_emitter) OV_CPU_JIT_EMITTER_ASSERT(out.size() == 1, "Invalid outputs size: expected 1 got " + std::to_string(out.size())); OV_CPU_JIT_EMITTER_ASSERT(loop_begin_label != nullptr && loop_end_label != nullptr, "has not inited labels!"); - OV_CPU_JIT_EMITTER_ASSERT(implication(is_work_amount_dynamic, !evaluate_once), "with dynamic work_amount cannot evaluate once!"); + OV_CPU_JIT_EMITTER_ASSERT(!snippets::utils::is_dynamic_value(wa_increment) || evaluate_once, + "loop increment might be dynamic only if loop evaluates once!"); } void jit_loop_begin_emitter::emit_code(const std::vector &in, const std::vector &out, @@ -52,7 +53,8 @@ void jit_loop_begin_emitter::emit_code(const std::vector &in, const std: void jit_loop_begin_emitter::emit_impl(const std::vector& in, const std::vector& out) const { // If the loop evaulate once, we can skip loop begin code emission - if (evaluate_once) + // If work_amount is dynamic, we should get runtime `work_amount` - it might be `zero` and we should skip loop evaluation + if (evaluate_once && !is_work_amount_dynamic) return; Reg64 reg_work_amount = Reg64(static_cast(out.back())); @@ -124,7 +126,8 @@ void jit_loop_end_emitter::validate_arguments(const std::vector &in, con "Invalid finalization_offsets size: expected: ", io_size, " got ", finalization_offsets.size()); OV_CPU_JIT_EMITTER_ASSERT(data_sizes.size() == io_size, "Invalid data_sizes size: expected: ", io_size, " got ", data_sizes.size()); OV_CPU_JIT_EMITTER_ASSERT(loop_end_label != nullptr && loop_begin_label != nullptr, "has not inited labels!"); - OV_CPU_JIT_EMITTER_ASSERT(implication(are_ptr_shifts_dynamic, !evaluate_once), "with dynamic data pointer shifts cannot evaluate once!"); + OV_CPU_JIT_EMITTER_ASSERT(!snippets::utils::is_dynamic_value(wa_increment) || evaluate_once, + "loop increment might be dynamic only if loop evaluates once!"); } void jit_loop_end_emitter::emit_code(const std::vector &in, const std::vector &out, diff --git a/src/plugins/intel_cpu/src/emitters/snippets/x64/kernel_executors/brgemm.cpp b/src/plugins/intel_cpu/src/emitters/snippets/x64/kernel_executors/brgemm.cpp index 6898fd18b587cd..e538c3baef28bb 100644 --- a/src/plugins/intel_cpu/src/emitters/snippets/x64/kernel_executors/brgemm.cpp +++ b/src/plugins/intel_cpu/src/emitters/snippets/x64/kernel_executors/brgemm.cpp @@ -4,6 +4,8 @@ #include "brgemm.hpp" +#include "snippets/lowered/loop_manager.hpp" + #include #include "common/utils.hpp" #include "dnnl_extension_utils.h" @@ -18,7 +20,7 @@ using namespace dnnl::impl::cpu::x64; namespace { size_t init_hash(dnnl_data_type_t dt_in0, dnnl_data_type_t dt_in1, float beta, bool is_with_amx, - bool is_with_comp, dnnl::impl::cpu::x64::cpu_isa_t isa) { + bool is_with_comp, dnnl::impl::cpu::x64::cpu_isa_t isa) { size_t seed = 0; #define HASH(X) seed = hash_combine(seed, X) HASH(dt_in0); HASH(dt_in1); @@ -41,7 +43,7 @@ BrgemmKernelConfig::BrgemmKernelConfig(const element::Type& in0_dtype, const ele } bool BrgemmKernelConfig::is_completed() const { - return !utils::one_of(0, m_M, m_N, m_K, m_LDA, m_LDB, m_LDC); + return !utils::one_of(0, m_M, m_N, m_K, m_LDA, m_LDB, m_LDC) || is_empty(); } bool BrgemmKernelConfig::operator==(const BrgemmKernelConfig& rhs) const { @@ -54,11 +56,22 @@ bool BrgemmKernelConfig::operator==(const BrgemmKernelConfig& rhs) const { } void BrgemmKernelConfig::update(dnnl_dim_t M, dnnl_dim_t N, dnnl_dim_t K, dnnl_dim_t LDA, dnnl_dim_t LDB, dnnl_dim_t LDC) { - m_M = M; m_N = N; m_K = K; - m_LDA = LDA; m_LDB = LDB; m_LDC = LDC; + // If M is zero, it means that Brgemm won't be executed (in Loop with work_amount = 0, for example) + // To process this case, we have to make this Config as empty (nullify runtime parameters) + if (utils::one_of(0, M, N, K)) { + m_M = 0; m_N = 0; m_K = 0; + m_LDA = 0; m_LDB = 0; m_LDC = 0; + } else { + m_M = M; m_N = N; m_K = K; + m_LDA = LDA; m_LDB = LDB; m_LDC = LDC; + } m_hash = compute_hash(); } +bool BrgemmKernelConfig::is_empty() const { + return everyone_is(0, m_M, m_N, m_K, m_LDA, m_LDB, m_LDC); +} + BrgemmKernelConfig::operator amx_tile_config_t() const { amx_tile_config_t res; res.M = m_M; res.N = m_N; res.K = m_K; @@ -115,6 +128,12 @@ BrgemmKernelExecutor::BrgemmKernelExecutor(ov::intel_cpu::MultiCacheWeakPtr kern std::shared_ptr BrgemmKernelExecutor::compile_kernel(const BrgemmKernelConfig& config) const { + std::shared_ptr compiled_kernel = std::make_shared(); + + // Brgemm is not executable - nothing to compile + if (config.is_empty()) + return compiled_kernel; + cpu::x64::brgemm_t desc; auto status = brgemm_desc_init(&desc, config.get_isa(), cpu::x64::brgemm_strd, config.get_dt_in0(), config.get_dt_in1(), @@ -122,10 +141,8 @@ std::shared_ptr BrgemmKernelExecutor::compile_kernel(const config.get_beta(), config.get_LDA(), config.get_LDB(), config.get_LDC(), config.get_M(), config.get_N(), config.get_K(), nullptr); - - auto compiled_kernel = std::make_shared(); - OV_CPU_JIT_EMITTER_ASSERT(status == dnnl_success, "Cannot initialize brgemm descriptor due to invalid params"); + if (config.is_with_amx()) { status = brgemm_init_tiles(desc, compiled_kernel->palette); OV_CPU_JIT_EMITTER_ASSERT(status == dnnl_success, "Cannot initialize brgemm tiles due to invalid params"); @@ -138,31 +155,49 @@ std::shared_ptr BrgemmKernelExecutor::compile_kernel(const return compiled_kernel; } -void BrgemmKernelExecutor::update_config(const ov::snippets::lowered::ExpressionPtr& expr, BrgemmKernelConfig& config) const { - auto get_projected_input_subtensor = [](const snippets::lowered::PortDescriptorPtr& desc) { - // Note: for output shape you will need get_preordered_vdims() - auto shape = snippets::utils::get_planar_vdims(desc->get_shape(), desc->get_layout()); - auto subtensor = desc->get_subtensor(); - OV_CPU_JIT_EMITTER_ASSERT(subtensor.size() <= shape.size() && subtensor.size() == 2, - "Invalid subtensor + shape combination"); - auto shape_it = shape.rbegin(); - for (auto sub_it = subtensor.rbegin(); sub_it != subtensor.rend(); sub_it++, shape_it++) { - *sub_it = std::min(*sub_it, *shape_it); - } - return subtensor; - }; +void BrgemmKernelExecutor::update_config(const ov::snippets::lowered::ExpressionPtr& expr, + const ov::snippets::lowered::LinearIRPtr& linear_ir, + BrgemmKernelConfig& config) const { const auto& input_pds = expr->get_input_port_descriptors(); const auto& output_pds = expr->get_output_port_descriptors(); OV_CPU_JIT_EMITTER_ASSERT((input_pds.size() == 2 || input_pds.size() == 3) && output_pds.size() == 1, "Invalid number of in/out port descriptors"); - // Update runtime-defined config fields: - // Matrix A (first input) + + const auto in0_shape = snippets::utils::get_planar_vdims(input_pds[0]->get_shape(), input_pds[0]->get_layout()); + const auto in1_shape = snippets::utils::get_planar_vdims(input_pds[1]->get_shape(), input_pds[1]->get_layout()); + auto in0_subtensor = input_pds[0]->get_subtensor(); + auto in1_subtensor = input_pds[1]->get_subtensor(); + + auto M = *++in0_subtensor.rbegin(); + auto K = *in0_subtensor.rbegin(); + auto N = *in1_subtensor.rbegin(); + + if (ov::snippets::utils::is_full_dim_value(M)) { + M = *++in0_shape.rbegin(); + } else { + const auto& loop_ids = expr->get_loop_ids(); + OPENVINO_ASSERT(!loop_ids.empty(), "Loop by dimension M is missed"); + // TODO [146125]: Loop by M is first one in `loop_ids` + const auto& expanded_loop_info = linear_ir->get_loop_manager()->get_loop_info(loop_ids.front()); + M = expanded_loop_info->get_increment(); + input_pds[0]->set_subtensor_dim(1, M); + output_pds[0]->set_subtensor_dim(1, M); + } + + if (ov::snippets::utils::is_full_dim_value(K)) { + K = *in0_shape.rbegin(); + } else if (ov::snippets::utils::is_dynamic_value(K)) { + OPENVINO_THROW("Dynamic K is not supported"); + } + + if (ov::snippets::utils::is_full_dim_value(N)) { + N = *in1_shape.rbegin(); + } else if (ov::snippets::utils::is_dynamic_value(N)) { + OPENVINO_THROW("Dynamic N is not supported"); + } + const auto LDA = DIM_CAST(snippets::utils::get_dim_stride(expr->get_input_port(0))); - const auto& in0_subtensor = get_projected_input_subtensor(input_pds[0]); - const auto K = DIM_CAST(*in0_subtensor.rbegin()); - const auto M = DIM_CAST(*++in0_subtensor.rbegin()); - // Matrix B (second input) - // Non float input 1 => with data repacking + const auto LDC = DIM_CAST(snippets::utils::get_dim_stride(expr->get_output_port(0))); auto LDB = DIM_CAST(snippets::utils::get_dim_stride(expr->get_input_port(1))); const auto& brgemm_node = as_type_ptr(expr->get_node()); @@ -172,10 +207,8 @@ void BrgemmKernelExecutor::update_config(const ov::snippets::lowered::Expression OV_CPU_JIT_EMITTER_ASSERT(!repacking_buffer_shape.empty(), "Repacking buffer shape mustn't be empty"); LDB = DIM_CAST(repacking_buffer_shape.back()); } - const auto N = DIM_CAST(*get_projected_input_subtensor(input_pds[1]).rbegin()); - // Matrix C (output) - const auto LDC = DIM_CAST(snippets::utils::get_dim_stride(expr->get_output_port(0))); - config.update(M, N, K, LDA, LDB, LDC); + + config.update(DIM_CAST(M), DIM_CAST(N), DIM_CAST(K), LDA, LDB, LDC); } void BrgemmKernelExecutor::execute(const BrgemmKernelExecutor* executor, call_args* args) { diff --git a/src/plugins/intel_cpu/src/emitters/snippets/x64/kernel_executors/brgemm.hpp b/src/plugins/intel_cpu/src/emitters/snippets/x64/kernel_executors/brgemm.hpp index b0dd9c465b66de..4dd52e21ca2dfd 100644 --- a/src/plugins/intel_cpu/src/emitters/snippets/x64/kernel_executors/brgemm.hpp +++ b/src/plugins/intel_cpu/src/emitters/snippets/x64/kernel_executors/brgemm.hpp @@ -24,6 +24,7 @@ struct BrgemmKernelConfig : public snippets::KernelExecutorBase::GenericConfig { return std::unique_ptr( new BrgemmKernelConfig(*this)); } void update(dnnl_dim_t M, dnnl_dim_t N, dnnl_dim_t K, dnnl_dim_t LDA, dnnl_dim_t LDB, dnnl_dim_t LDC); + bool is_empty() const; dnnl_data_type_t get_dt_in0() const { return m_static_params->dt_in0; } dnnl_data_type_t get_dt_in1() const { return m_static_params->dt_in1; } @@ -95,7 +96,9 @@ class BrgemmKernelExecutor : public CPUKernelExecutor compile_kernel(const BrgemmKernelConfig& c) const override; - void update_config(const ov::snippets::lowered::ExpressionPtr& expr, BrgemmKernelConfig& config) const override; + void update_config(const ov::snippets::lowered::ExpressionPtr& expr, + const ov::snippets::lowered::LinearIRPtr& linear_ir, + BrgemmKernelConfig& config) const override; }; #define GET_OFF_BRGEMM_ARGS(field) offsetof(BrgemmKernelExecutor::call_args, field) diff --git a/src/plugins/intel_cpu/src/emitters/tpp/x64/jit_tpp_emitter.cpp b/src/plugins/intel_cpu/src/emitters/tpp/x64/jit_tpp_emitter.cpp index 91c95f0a478d3c..70ddbb3d79ee21 100644 --- a/src/plugins/intel_cpu/src/emitters/tpp/x64/jit_tpp_emitter.cpp +++ b/src/plugins/intel_cpu/src/emitters/tpp/x64/jit_tpp_emitter.cpp @@ -48,7 +48,7 @@ TppEmitter::TppEmitter(dnnl::impl::cpu::x64::jit_generator* h, io_port_descriptors.resize(num_kernel_args); // Note: this is needed mostly for Reduce operations, since they allow the last subternsor dim to be FULL_DIM; auto replace_full_dim = [](size_t dim, size_t replace_dim) { - if (dim == snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM) + if (ov::snippets::utils::is_full_dim_value(dim)) return replace_dim; return dim; }; diff --git a/src/plugins/intel_cpu/src/node.cpp b/src/plugins/intel_cpu/src/node.cpp index dc24c611861a16..41c3011f8707ec 100644 --- a/src/plugins/intel_cpu/src/node.cpp +++ b/src/plugins/intel_cpu/src/node.cpp @@ -831,16 +831,8 @@ void Node::prepareMemory(const DnnlMemoryDescPtr& intDesc, size_t indx) { MemoryPtr ptr; auto weightCache = context->getWeightsCache(); if (weightCache != nullptr && memory::format_kind::blocked == intDesc->getDnnlDesc().get_format_kind()) { - const auto& format = intDesc->serializeFormat(); - const uint64_t data_hash = - weightCache->GetHashFunc().hash(static_cast(internalBlob->getData()), - internalBlob->getSize()); - - const std::string string_hash = name + "_" + std::to_string(indx) - + "_" + format - + "_" + std::to_string(internalBlob->getSize()) - + "_" + std::to_string(data_hash); - + const auto string_hash = + name + "_" + std::to_string(indx) + "_" + DnnlExtensionUtils::computeWeightsStringHash(internalBlob, intDesc); ptr = *weightCache->findOrCreate(string_hash, create); } else { ptr = create(); @@ -905,10 +897,7 @@ MemoryPtr Node::prepareWeightMemory(DnnlMemoryDescPtr dstWeightDesc, DnnlMemoryD auto weightCache = context->getWeightsCache(); if (weightCache != nullptr) { - const std::string string_hash = getName() + "_" + format - + "_" + std::to_string(edgeMem->getSize()) - + "_" + std::to_string(*edgeMem->getDataAs()); - + const auto string_hash = DnnlExtensionUtils::computeWeightsStringHash(edgeMem, dstWeightDesc); ptr = *weightCache->findOrCreate(string_hash, create); } else { ptr = create(); diff --git a/src/plugins/intel_cpu/src/nodes/executors/dnnl/dnnl_utils.cpp b/src/plugins/intel_cpu/src/nodes/executors/dnnl/dnnl_utils.cpp index d9cbd05a847231..c801eca5bbe13a 100644 --- a/src/plugins/intel_cpu/src/nodes/executors/dnnl/dnnl_utils.cpp +++ b/src/plugins/intel_cpu/src/nodes/executors/dnnl/dnnl_utils.cpp @@ -4,12 +4,15 @@ #include "nodes/executors/dnnl/dnnl_utils.hpp" +#include #include #include "cpu_memory.h" #include "memory_desc/dnnl_memory_desc.h" +#include "memory_desc/cpu_memory_desc_utils.h" #include "nodes/executors/executor.hpp" #include "nodes/reorder.h" +#include "utils/cpu_utils.hpp" namespace ov { namespace intel_cpu { @@ -86,8 +89,7 @@ MemoryPtr prepareWeightsMemory(const DnnlMemoryDescPtr srcWeightDesc, MemoryPtr ptr; if (globalWeightCache && dnnl::memory::format_kind::blocked == dstWeightDesc->getDnnlDesc().get_format_kind()) { - const std::string string_hash = format + "_" + std::to_string(weightsMem->getSize()) + "_" + - std::to_string(reinterpret_cast(weightsMem->getData())); + const auto string_hash = DnnlExtensionUtils::computeWeightsStringHash(weightsMem, dstWeightDesc); ptr = *globalWeightCache->findOrCreate(string_hash, create); } else { ptr = create(); diff --git a/src/plugins/intel_cpu/src/nodes/fullyconnected.cpp b/src/plugins/intel_cpu/src/nodes/fullyconnected.cpp index 76e41db1cd06c0..da3dcafa4750ef 100644 --- a/src/plugins/intel_cpu/src/nodes/fullyconnected.cpp +++ b/src/plugins/intel_cpu/src/nodes/fullyconnected.cpp @@ -25,6 +25,8 @@ #include "utils/debug_capabilities.h" #include "utils/general_utils.h" +#include "fake_quantize.h" + using namespace dnnl; using namespace ov::element; @@ -94,6 +96,25 @@ bool FullyConnected::canFuse(const NodePtr& node) const { #if defined(OV_CPU_WITH_SHL) return false; #endif + if (node->getType() == Type::FakeQuantize) { + auto* fq = dynamic_cast(node.get()); + if (fq->getBroadcastingPolicy() != FakeQuantize::BroadcastingPolicy::PerTensor) { + const auto& dstShape = getOutputShapeAtPort(0); + auto dataRanks = dstShape.getRank(); + // only per-OC or per-Tensor fakequantize can be postOps + if (fq->getAxis() != dataRanks - 1) { + DEBUG_LOG("reject FakeQuantize ", + fq->getName(), + "(axis=", + fq->getAxis(), + ") from fusing into ", + getName(), + " with dst shape ", + dstShape); + return false; + } + } + } return canFuseSimpleOperation(node); } diff --git a/src/plugins/intel_cpu/src/nodes/kernels/scaled_attn/common.hpp b/src/plugins/intel_cpu/src/nodes/kernels/scaled_attn/common.hpp index bd05801c139dc8..34c00a527d2ce7 100644 --- a/src/plugins/intel_cpu/src/nodes/kernels/scaled_attn/common.hpp +++ b/src/plugins/intel_cpu/src/nodes/kernels/scaled_attn/common.hpp @@ -20,9 +20,11 @@ namespace XARCH { // avx512/avx2 register length in byte static constexpr size_t vec_len_avx512 = 64lu; static constexpr size_t vec_len_avx2 = 32lu; +static constexpr size_t vec_len_neon = 16lu; // avx512/avx2 register length in float static constexpr size_t vec_len_f32_avx512 = vec_len_avx512 / sizeof(float); static constexpr size_t vec_len_f32_avx2 = vec_len_avx2 / sizeof(float); +static constexpr size_t vec_len_f32_neon = vec_len_neon / sizeof(float); #ifdef HAVE_AVX512F inline __m512 cvt_bf16_to_fp32(const __m256i src) { diff --git a/src/plugins/intel_cpu/src/nodes/kernels/scaled_attn/mha_single_token.cpp b/src/plugins/intel_cpu/src/nodes/kernels/scaled_attn/mha_single_token.cpp index e4648ece365e9a..5177f4013319e6 100644 --- a/src/plugins/intel_cpu/src/nodes/kernels/scaled_attn/mha_single_token.cpp +++ b/src/plugins/intel_cpu/src/nodes/kernels/scaled_attn/mha_single_token.cpp @@ -13,12 +13,17 @@ # include #endif + #include "openvino/core/type/bfloat16.hpp" #include "openvino/core/parallel.hpp" #include "mha_single_token.hpp" #include "common.hpp" #include "softmax_kernel.hpp" +#if defined(OPENVINO_ARCH_ARM64) +# include +#endif + namespace ov { namespace Extensions { namespace Cpu { @@ -53,6 +58,13 @@ void cvt_copy(TA* dst, TB* src, size_t n) { auto vb = mm256_uni_loadu_ps(src + i); mm256_uni_storeu_ps(dst + i, vb); } +#elif defined(OPENVINO_ARCH_ARM64) + int vec_len_f32_neon = 4; + auto _dst = reinterpret_cast(dst); + for (; i + vec_len_f32_neon <= n; i += vec_len_f32_neon) { + float32x4_t vb1 = vld1q_f32(src + i); + vst1q_f32(_dst + i, vb1); + } #endif for (; i < n; i++) { dst[i] = src[i]; @@ -78,6 +90,15 @@ static void attn_acc_value(float* out, float weight, T* v, size_t S, float* scal v_out = _mm256_fmadd_ps(attn_w_vec_fp32, v_value, v_out); mm256_uni_storeu_ps(out + i, v_out); } +#elif defined(OPENVINO_ARCH_ARM64) + float32x4_t attn_w_vec_fp32 = vdupq_n_f32(weight); + auto _v = reinterpret_cast(v); + for (; i + vec_len_f32_neon <= S; i += vec_len_f32_neon) { + float32x4_t v_value = vld1q_f32(_v + i); + float32x4_t v_out = vld1q_f32(out + i); + v_out = vmlaq_f32(v_out, attn_w_vec_fp32, v_value); + vst1q_f32(out + i, v_out); + } #endif for (; i < S; i++) { out[i] += weight * v[i]; @@ -308,6 +329,47 @@ static float sum_q_head(T* a, size_t n) { vsum0 = _mm256_add_ps(vsum0, vsum2); hsum(vsum0); sum = _mm256_cvtss_f32(vsum0); +#elif defined(OPENVINO_ARCH_ARM64) + size_t vec_len_f32_neon = 4; + float32x4_t vsum0 = vdupq_n_f32(0.0f); + float32x4_t vsum1 = vdupq_n_f32(0.0f); + float32x4_t vsum2 = vdupq_n_f32(0.0f); + float32x4_t vsum3 = vdupq_n_f32(0.0f); + + for (; i + 4 * vec_len_f32_neon <= n; i += vec_len_f32_neon * 4) { + float32x4_t va0 = vld1q_f32(a + i); + float32x4_t va1 = vld1q_f32(a + i + vec_len_f32_neon); + float32x4_t va2 = vld1q_f32(a + i + vec_len_f32_neon * 2); + float32x4_t va3 = vld1q_f32(a + i + vec_len_f32_neon * 3); + + vsum0 = vaddq_f32(va0, vsum0); + vsum1 = vaddq_f32(va1, vsum1); + vsum2 = vaddq_f32(va2, vsum2); + vsum3 = vaddq_f32(va3, vsum3); + } + if (i + 2 * vec_len_f32_neon <= n) { + float32x4_t va0 = vld1q_f32(a + i); + float32x4_t va1 = vld1q_f32(a + i + vec_len_f32_neon); + + vsum0 = vaddq_f32(va0, vsum0); + vsum1 = vaddq_f32(va1, vsum1); + i += 2 * vec_len_f32_neon; + } + if (i + vec_len_f32_neon <= n) { + float32x4_t va0 = vld1q_f32(a + i); + vsum0 = vaddq_f32(va0, vsum0); + i += vec_len_f32_neon; + } + + vsum0 = vaddq_f32(vsum0, vsum1); + vsum2 = vaddq_f32(vsum2, vsum3); + vsum0 = vaddq_f32(vsum0, vsum2); + + float32x2_t sum_low = vget_low_f32(vsum0); + float32x2_t sum_high = vget_high_f32(vsum0); + sum_low = vadd_f32(sum_low, sum_high); + sum_low = vpadd_f32(sum_low, sum_low); + sum = vget_lane_f32(sum_low, 0); #endif for (; i < n; i++) { @@ -406,7 +468,59 @@ static float dot_product(TA* a, TB* b, size_t n, float* scale, float* zp, float* vsum0 = _mm256_add_ps(vsum0, vsum2); hsum(vsum0); sum = _mm256_cvtss_f32(vsum0); + +#elif defined(OPENVINO_ARCH_ARM64) + float32x4_t vsum0 = vdupq_n_f32(0.0f); + float32x4_t vsum1 = vdupq_n_f32(0.0f); + float32x4_t vsum2 = vdupq_n_f32(0.0f); + float32x4_t vsum3 = vdupq_n_f32(0.0f); + + auto _a = reinterpret_cast(a); + auto _b = reinterpret_cast(b); + + for (; i + 4 * vec_len_f32_neon <= n; i += vec_len_f32_neon * 4) { + float32x4_t va0 = vld1q_f32(_a + i); + float32x4_t va1 = vld1q_f32(_a + i + vec_len_f32_neon); + float32x4_t va2 = vld1q_f32(_a + i + vec_len_f32_neon * 2); + float32x4_t va3 = vld1q_f32(_a + i + vec_len_f32_neon * 3); + + float32x4_t vb0 = vld1q_f32(_b + i); + float32x4_t vb1 = vld1q_f32(_b + i + vec_len_f32_neon); + float32x4_t vb2 = vld1q_f32(_b + i + vec_len_f32_neon * 2); + float32x4_t vb3 = vld1q_f32(_b + i + vec_len_f32_neon * 3); + + vsum0 = vmlaq_f32(vsum0, va0, vb0); + vsum1 = vmlaq_f32(vsum1, va1, vb1); + vsum2 = vmlaq_f32(vsum2, va2, vb2); + vsum3 = vmlaq_f32(vsum3, va3, vb3); + } + if (i + 2 * vec_len_f32_neon <= n) { + float32x4_t va0 = vld1q_f32(_a + i); + float32x4_t va1 = vld1q_f32(_a + i + vec_len_f32_neon); + + float32x4_t vb0 = vld1q_f32(_b + i); + float32x4_t vb1 = vld1q_f32(_b + i + vec_len_f32_neon); + + vsum0 = vmlaq_f32(vsum0, va0, vb0); + vsum1 = vmlaq_f32(vsum1, va1, vb1); + i += 2 * vec_len_f32_neon; + } + if (i + vec_len_f32_neon <= n) { + float32x4_t va0 = vld1q_f32(_a + i); + float32x4_t vb0 = vld1q_f32(_b + i); + vsum0 = vmlaq_f32(vsum0, va0, vb0); + i += vec_len_f32_neon; + } + + vsum0 = vaddq_f32(vsum0, vsum1); + vsum2 = vaddq_f32(vsum2, vsum3); + vsum0 = vaddq_f32(vsum0, vsum2); + + float32x2_t temp_sum = vadd_f32(vget_low_f32(vsum0), vget_high_f32(vsum0)); + temp_sum = vpadd_f32(temp_sum, temp_sum); + sum = vget_lane_f32(temp_sum, 0); #endif + for (; i < n; i++) { sum += a[i] * b[i]; } @@ -593,6 +707,18 @@ static void attn_reduce(T* dst, float* temp, size_t M, size_t S, size_t temp_str } mm256_uni_storeu_ps(dst + i, result_vec_fp32); } +#elif defined(OPENVINO_ARCH_ARM64) + auto _dst = reinterpret_cast(dst); + for (; i + vec_len_f32_neon <= S; i += vec_len_f32_neon) { + auto* src = temp + i; + auto result_vec_fp32 = vdupq_n_f32(0.0f); + for (size_t m = 0; m < M; m++) { + auto o_vec_fp32 = vld1q_f32(src); + result_vec_fp32 = vaddq_f32(result_vec_fp32, o_vec_fp32); + src += temp_stride; + } + vst1q_f32(_dst + i, result_vec_fp32); + } #endif for (; i < S; i++) { auto* src = temp + i; diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/brgemm_to_brgemm_cpu.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/brgemm_to_brgemm_cpu.cpp index 3c9bfcc5ea064b..d71faef96923d0 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/brgemm_to_brgemm_cpu.cpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/brgemm_to_brgemm_cpu.cpp @@ -31,7 +31,7 @@ using namespace snippets::lowered; namespace { std::vector make_subtensor(const ov::Shape& tensor) { - return std::vector(std::min(tensor.size(), size_t(2)), PortDescriptor::ServiceDimensions::FULL_DIM); + return std::vector(std::min(tensor.size(), size_t(2)), ov::snippets::utils::get_full_dim_value()); } template void set_full_port_desc(const T& port) { diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_blocking.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_blocking.cpp index 044a1f724e78c3..3c8e4caf00c9b0 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_blocking.cpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_blocking.cpp @@ -9,6 +9,7 @@ #include "snippets/lowered/linear_ir.hpp" #include "snippets/lowered/loop_manager.hpp" #include "snippets/lowered/pass/pass.hpp" +#include "snippets/lowered/pass/propagate_subtensors.hpp" #include "snippets/snippets_isa.hpp" #include "snippets/utils/utils.hpp" #include "transformations/snippets/x64/op/brgemm_cpu.hpp" @@ -56,6 +57,15 @@ LinearIR::constExprIt BrgemmBlocking::get_loop_begin_pos(LinearIR& linear_ir, co return loop_begin_it; } +snippets::lowered::SpecificIterationHandlers BrgemmBlocking::get_default_blocking_loop_handlers(size_t work_amount, size_t block_size) { + SpecificIterationHandlers handlers; + const auto tail_size = snippets::utils::is_dynamic_value(work_amount) ? snippets::utils::get_dynamic_value() : work_amount % block_size; + if (tail_size != 0) + handlers.register_pass(tail_size); + handlers.register_pass(); + return handlers; +} + bool BrgemmBlocking::run(LinearIR& linear_ir, LinearIR::constExprIt begin, LinearIR::constExprIt end) { OV_ITT_SCOPED_TASK(ov::pass::itt::domains::SnippetsTransform, "Snippets::BrgemmBlocking") const auto& loop_manager = linear_ir.get_loop_manager(); @@ -107,16 +117,24 @@ bool BrgemmBlocking::run(LinearIR& linear_ir, LinearIR::constExprIt begin, Linea const auto block_size_n = snippets::utils::is_dynamic_value(n) ? brgemm->get_n_block_size() : std::min(brgemm->get_n_block_size(), n); const auto block_size_k = snippets::utils::is_dynamic_value(k) ? brgemm->get_k_block_size() : std::min(brgemm->get_k_block_size(), k); - *++in_0_subtensor.rbegin() = block_size_m; - *++out_subtensor.rbegin() = block_size_m; - *in_1_subtensor.rbegin() = block_size_n; - *out_subtensor.rbegin() = block_size_n; - *in_0_subtensor.rbegin() = block_size_k; - *++in_1_subtensor.rbegin() = block_size_k; + const bool m_blocking = block_size_m != m; + const bool n_blocking = block_size_n != n; + const bool k_blocking = block_size_k != k; - brgemm_expr->get_input_port_descriptor(0)->set_subtensor(in_0_subtensor); - brgemm_expr->get_input_port_descriptor(1)->set_subtensor(in_1_subtensor); - brgemm_expr->get_output_port_descriptor(0)->set_subtensor(out_subtensor); + // If block_size is dynamic, it means that Brgemm will process full tensor: + // subtensor[i] = FULL_DIM as by default + if (!snippets::utils::is_dynamic_value(block_size_m) && m_blocking) { + brgemm_expr->get_input_port_descriptor(0)->set_subtensor_dim(1, block_size_m); + brgemm_expr->get_output_port_descriptor(0)->set_subtensor_dim(1, block_size_m); + } + if (!snippets::utils::is_dynamic_value(block_size_n) && n_blocking) { + brgemm_expr->get_input_port_descriptor(1)->set_subtensor_dim(0, block_size_n); + brgemm_expr->get_output_port_descriptor(0)->set_subtensor_dim(0, block_size_n); + } + if (!snippets::utils::is_dynamic_value(block_size_k) && k_blocking) { + brgemm_expr->get_input_port_descriptor(0)->set_subtensor_dim(0, block_size_k); + brgemm_expr->get_input_port_descriptor(1)->set_subtensor_dim(1, block_size_k); + } const bool need_brgemm_copy_b = brgemm_cpu && with_repacking(brgemm_cpu->get_type()); ov::snippets::lowered::ExpressionPtr copy_b_expr = nullptr; @@ -154,7 +172,9 @@ bool BrgemmBlocking::run(LinearIR& linear_ir, LinearIR::constExprIt begin, Linea if (!include_repacking && brgemm_cpu && with_compensations(brgemm_cpu->get_type())) entries.emplace_back(brgemm_expr->get_input_port(2), false); const std::vector exits{LoopPort(brgemm_expr->get_output_port(0), true)}; - loop_manager->mark_loop(loop_begin_it, loop_end_it, m, block_size_m, 1, entries, exits); + + const auto id = loop_manager->mark_loop(loop_begin_it, loop_end_it, m, block_size_m, 1, entries, exits, false); + loop_manager->get_loop_info(id)->set_handlers(get_default_blocking_loop_handlers(m, block_size_m)); }; auto mark_n_blocking = [&]() { @@ -165,7 +185,9 @@ bool BrgemmBlocking::run(LinearIR& linear_ir, LinearIR::constExprIt begin, Linea LoopPort(brgemm_expr->get_input_port(0), false), LoopPort(need_brgemm_copy_b ? copy_b_expr->get_input_port(0) : brgemm_expr->get_input_port(1), true)}; const std::vector exits{LoopPort(brgemm_expr->get_output_port(0), true)}; - loop_manager->mark_loop(loop_begin_it, loop_end_it, n, block_size_n, 0, entries, exits); + + const auto id = loop_manager->mark_loop(loop_begin_it, loop_end_it, n, block_size_n, 0, entries, exits, false); + loop_manager->get_loop_info(id)->set_handlers(get_default_blocking_loop_handlers(n, block_size_n)); }; auto mark_k_blocking = [&]() { @@ -176,14 +198,14 @@ bool BrgemmBlocking::run(LinearIR& linear_ir, LinearIR::constExprIt begin, Linea LoopPort(brgemm_expr->get_input_port(0), true, 0), LoopPort(need_brgemm_copy_b ? copy_b_expr->get_input_port(0) : brgemm_expr->get_input_port(1), true, 1)}; const std::vector exits{LoopPort(brgemm_expr->get_output_port(0), false)}; - const auto id = loop_manager->mark_loop(loop_begin_it, loop_end_it, k, block_size_k, entries, exits); - const auto& loop_info = loop_manager->get_loop_info(id); - loop_info->register_pass_to_handler(0.f); + + auto handlers = get_default_blocking_loop_handlers(k, block_size_k); + handlers.register_pass(0.f); + + const auto id = loop_manager->mark_loop(loop_begin_it, loop_end_it, k, block_size_k, entries, exits, false); + loop_manager->get_loop_info(id)->set_handlers(handlers); }; - const bool k_blocking = block_size_k != k; - const bool n_blocking = block_size_n != n; - const bool m_blocking = block_size_m != m; // It is not necessary to include copyB in loop by M if there are no blocking by KN const bool include_repacking_in_loop = k_blocking || n_blocking; diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_blocking.hpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_blocking.hpp index cdc2d05cffd1e5..4d29267f034fc9 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_blocking.hpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_blocking.hpp @@ -5,6 +5,7 @@ #pragma once #include "snippets/lowered/pass/pass.hpp" +#include "snippets/lowered/specific_loop_iter_handlers.hpp" namespace ov { namespace intel_cpu { @@ -24,6 +25,8 @@ class BrgemmBlocking : public snippets::lowered::pass::RangedPass { snippets::lowered::LinearIR::constExprIt begin, snippets::lowered::LinearIR::constExprIt end) override; + static snippets::lowered::SpecificIterationHandlers get_default_blocking_loop_handlers(size_t work_amount, size_t block_size); + private: static snippets::lowered::LinearIR::constExprIt move_new_memory_buffer(snippets::lowered::LinearIR& linear_ir, const snippets::lowered::LinearIR::constExprIt& brgemm_it); diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/cpu_iter_handlers.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/cpu_iter_handlers.cpp index d5e96b2a7339ba..a8281ad1d02da6 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/cpu_iter_handlers.cpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/cpu_iter_handlers.cpp @@ -4,6 +4,7 @@ #include "cpu_iter_handlers.hpp" +#include "snippets/op/loop.hpp" #include "snippets/lowered/loop_manager.hpp" #include "transformations/snippets/x64/op/brgemm_cpu.hpp" @@ -34,6 +35,19 @@ std::shared_ptr SetBrgemmBeta::merge(const st return nullptr; return merged_pass; } + +bool SetEvaluateOnce::run(LinearIR& linear_ir, LinearIR::constExprIt begin, LinearIR::constExprIt end) { + const auto& loop_end = ov::as_type_ptr(end->get()->get_node()); + OPENVINO_ASSERT(loop_end, "SetEvaluateOnce expected LoopEnd node in iterator `end`."); + const auto& loop_info = linear_ir.get_loop_manager()->get_loop_info(loop_end->get_id()); + loop_info->set_evaluate_once(true); + return true; +} + +std::shared_ptr SetEvaluateOnce::merge(const std::shared_ptr& other) { + return !other || ov::is_type(other) ? std::make_shared() : nullptr; +} + } // namespace pass } // namespace intel_cpu } // namespace ov \ No newline at end of file diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/cpu_iter_handlers.hpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/cpu_iter_handlers.hpp index 5da97e29796f70..24697c2f50f6a6 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/cpu_iter_handlers.hpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/cpu_iter_handlers.hpp @@ -25,7 +25,23 @@ class SetBrgemmBeta : public snippets::lowered::pass::RangedPass { std::shared_ptr merge(const std::shared_ptr& other) override; private: - float m_beta; + float m_beta = 0; +}; + +/** + * @interface SetEvaluateOnce + * @brief The pass set `evaluate once = true` only to ExpandedLoopInfo which is mapped on LoopEnd in the passed iterator `end`. + * The pointer arithmetic should be updated in the separate optimization `OptimizeLoopSingleEvaluation` + * @ingroup snippets + */ +class SetEvaluateOnce : public snippets::lowered::pass::RangedPass { +public: + SetEvaluateOnce() = default; + OPENVINO_RTTI("SetEvaluateOnce", "RangedPass") + bool run(snippets::lowered::LinearIR& linear_ir, + snippets::lowered::LinearIR::constExprIt begin, + snippets::lowered::LinearIR::constExprIt end) override; + std::shared_ptr merge(const std::shared_ptr& other) override; }; } // namespace pass } // namespace intel_cpu diff --git a/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/eltwise_to_eltwise_tpp.cpp b/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/eltwise_to_eltwise_tpp.cpp index b3c04fb7833db9..da83038f5455f8 100644 --- a/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/eltwise_to_eltwise_tpp.cpp +++ b/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/eltwise_to_eltwise_tpp.cpp @@ -3,6 +3,7 @@ // #include "snippets/itt.hpp" +#include "snippets/utils/utils.hpp" #include "eltwise_to_eltwise_tpp.hpp" #include "openvino/pass/pattern/op/wrap_type.hpp" @@ -40,14 +41,12 @@ EltwiseToEltwiseTPP::EltwiseToEltwiseTPP() { OPENVINO_ASSERT(tpp_eltwise, "Failed to create TPP node"); const size_t M_block = 32; - const size_t N_block = ov::is_type(node) ? - snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM : - 64; + const size_t N_block = ov::is_type(node) ? ov::snippets::utils::get_full_dim_value() : 64; ov::replace_node_update_name(node, tpp_eltwise); for (size_t i = 0; i < node->get_input_size(); i++) - snippets::lowered::set_port_desc(tpp_eltwise->input(i), {M_block, N_block}); + ov::snippets::lowered::PortDescriptorUtils::set_port_descriptor(tpp_eltwise->input(i), {M_block, N_block}); - snippets::lowered::set_port_desc(tpp_eltwise->output(0), {M_block, N_block}); + ov::snippets::lowered::PortDescriptorUtils::set_port_descriptor(tpp_eltwise->output(0), {M_block, N_block}); return true; }; diff --git a/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/lowered/set_tpp_leading_dim.cpp b/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/lowered/set_tpp_leading_dim.cpp index a420ed2cbfea22..4f38eddc2bde0f 100644 --- a/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/lowered/set_tpp_leading_dim.cpp +++ b/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/lowered/set_tpp_leading_dim.cpp @@ -74,7 +74,7 @@ size_t get_leading_dim(ExpressionPort port, const snippets::lowered::LoopManager bool full_dim_substituted = false; for (size_t i = 1; i <= subtensor.size(); i++) { const auto idx = subtensor.size() - i; - if (subtensor[idx] == snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM) { + if (ov::snippets::utils::is_full_dim_value(subtensor[idx])) { // the reason that we don't support FULL_DIM substitution for an arbitrary layout is that // the layout and subtersor can (and usually do) have different ranks full_dim_substituted = true; diff --git a/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/scalar_to_scalar_tpp.cpp b/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/scalar_to_scalar_tpp.cpp index 5ea5b135ba595a..0b9f41d47aa0da 100644 --- a/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/scalar_to_scalar_tpp.cpp +++ b/src/plugins/intel_cpu/src/transformations/tpp/x64/pass/scalar_to_scalar_tpp.cpp @@ -42,9 +42,9 @@ ScalarToScalarTPP::ScalarToScalarTPP() { tpp_scalar->set_friendly_name(node->get_friendly_name()); ov::replace_node_update_name(node, tpp_scalar); const auto& out = tpp_scalar->output(0); - snippets::lowered::set_port_desc(out, {1}); + ov::snippets::lowered::PortDescriptorUtils::set_port_descriptor(out, {1}); for (const auto& in : out.get_target_inputs()) - snippets::lowered::set_port_desc(in, {1}); + ov::snippets::lowered::PortDescriptorUtils::set_port_descriptor(in, {1}); return true; }; diff --git a/src/plugins/intel_cpu/src/weights_cache.cpp b/src/plugins/intel_cpu/src/weights_cache.cpp index eed92f5977cffe..65fd3644ad4215 100644 --- a/src/plugins/intel_cpu/src/weights_cache.cpp +++ b/src/plugins/intel_cpu/src/weights_cache.cpp @@ -10,8 +10,6 @@ namespace ov { namespace intel_cpu { -const SimpleDataHash WeightsSharing::simpleCRC; - WeightsSharing::SharedMemory::SharedMemory( std::unique_lock && lock, const MemoryInfo::Ptr & memory, diff --git a/src/plugins/intel_cpu/src/weights_cache.hpp b/src/plugins/intel_cpu/src/weights_cache.hpp index 70c62569cdeb47..f0401700e49719 100644 --- a/src/plugins/intel_cpu/src/weights_cache.hpp +++ b/src/plugins/intel_cpu/src/weights_cache.hpp @@ -22,31 +22,6 @@ namespace ov { namespace intel_cpu { - -class SimpleDataHash { -public: - SimpleDataHash() { - for (int i = 0; i < kTableSize; i++) { - uint64_t c = i; - for (int j = 0; j < 8; j++) - c = ((c & 1) ? 0xc96c5795d7870f42 : 0) ^ (c >> 1); - table[i] = c; - } - } - // Computes 64-bit "cyclic redundancy check" sum, as specified in ECMA-182 - uint64_t hash(const unsigned char* data, size_t size) const { - uint64_t crc = 0; - for (size_t idx = 0; idx < size; idx++) - crc = table[(unsigned char)crc ^ data[idx]] ^ (crc >> 8); - - return ~crc; - } - -protected: - static constexpr int kTableSize = 256; - uint64_t table[kTableSize]; -}; - /** * Caching store of Memory objects * Will return a cached object or create new one @@ -94,12 +69,9 @@ class WeightsSharing { SharedMemory::Ptr get(const std::string& key) const; - static const SimpleDataHash& GetHashFunc () { return simpleCRC; } - protected: mutable std::mutex guard; std::unordered_map sharedWeights; - static const SimpleDataHash simpleCRC; }; /** diff --git a/src/plugins/intel_cpu/tests/functional/custom/single_layer_tests/instances/x64/matmul.cpp b/src/plugins/intel_cpu/tests/functional/custom/single_layer_tests/instances/x64/matmul.cpp index 3daa819cd4854d..83faa2c06ec6f6 100644 --- a/src/plugins/intel_cpu/tests/functional/custom/single_layer_tests/instances/x64/matmul.cpp +++ b/src/plugins/intel_cpu/tests/functional/custom/single_layer_tests/instances/x64/matmul.cpp @@ -1108,6 +1108,45 @@ INSTANTIATE_TEST_SUITE_P( testParamsDynamicFusingFullUndefShapes, MatMulLayerCPUTest::getTestCaseName); +class FCNotFuseFQCPUTest : public MatMulLayerCPUTest { + void SetUp() override { + MatMulLayerCPUTest::SetUp(); + expectPostOpsToBeFused = false; + } +}; + +TEST_P(FCNotFuseFQCPUTest, CompareWithRefs) { + run(); + CheckPluginRelatedResults(compiledModel, cpuNodeType); +} + +const std::vector& notFuseSmoke() { + static const std::vector params = { + {static_shapes_to_test_representation({{59, 1}, {1, 120}}), {false, true}}, + {static_shapes_to_test_representation({{59, 1}, {1, 120}}), {true, true}}, + + {static_shapes_to_test_representation({{59, 120}, {120, 1}}), {false, false}}, + {static_shapes_to_test_representation({{59, 120}, {120, 1}}), {true, true}}, + + {static_shapes_to_test_representation({{71, 128}, {128, 20}}), {true, false}}, + {static_shapes_to_test_representation({{71, 128}, {128, 20}}), {false, true}}, + }; + return params; +} + +const auto notFuseTestParamsSmoke = ::testing::Combine(::testing::Combine(::testing::ValuesIn(notFuseSmoke()), + ::testing::Values(ElementType::f32), + ::testing::Values(ElementType::undefined), + ::testing::Values(ElementType::undefined), + ::testing::Values(utils::InputLayerType::CONSTANT), + ::testing::Values(ov::test::utils::DEVICE_CPU), + ::testing::Values(emptyAdditionalConfig())), + ::testing::Values(MatMulNodeType::FullyConnected), + ::testing::ValuesIn({fusingFakeQuantizePerBatch, fusingFakeQuantizeFullTensor}), + ::testing::ValuesIn({CPUSpecificParams{{}, {}, {""}, "any_type"}})); + +INSTANTIATE_TEST_SUITE_P(smoke_FC, FCNotFuseFQCPUTest, notFuseTestParamsSmoke, FCNotFuseFQCPUTest::getTestCaseName); + } // namespace } // namespace MatMul } // namespace test diff --git a/src/plugins/intel_cpu/tests/functional/custom/subgraph_tests/src/quantized_matmuls_with_shared_weights.cpp b/src/plugins/intel_cpu/tests/functional/custom/subgraph_tests/src/quantized_matmuls_with_shared_weights.cpp new file mode 100644 index 00000000000000..107d669f442f80 --- /dev/null +++ b/src/plugins/intel_cpu/tests/functional/custom/subgraph_tests/src/quantized_matmuls_with_shared_weights.cpp @@ -0,0 +1,103 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include "common_test_utils/node_builders/constant.hpp" +#include "common_test_utils/node_builders/fake_quantize.hpp" +#include "common_test_utils/node_builders/reshape.hpp" +#include "openvino/openvino.hpp" +#include "shared_test_classes/base/ov_subgraph.hpp" + +namespace ov { +namespace test { + +enum class FQInterval { U8, I8 }; +inline std::ostream& operator<<(std::ostream& os, FQInterval interval) { + switch (interval) { + case FQInterval::U8: + os << "U8"; + break; + case FQInterval::I8: + os << "I8"; + break; + default: + OPENVINO_THROW("Unknown FQInterval"); + } + return os; +} + +typedef std::tuple QuantizedMatMulsWithSharedWeightsParans; + +/* This test verifies the correctness of the hash function computation for the shared weights. + Specifically, it checks that when one op requires compensations computation and second one does not, + the resulting hashes are not identical, and the weights are repacked for each op separately +*/ +class QuantizedMatMulsWithSharedWeightsTest + : public testing::WithParamInterface, + virtual public SubgraphBaseTest { +public: + static std::string getTestCaseName(const testing::TestParamInfo& obj) { + InputShape shape1; + InputShape shape2; + FQInterval interval1; + FQInterval interval2; + std::tie(shape1, shape2, interval1, interval2) = obj.param; + std::ostringstream result; + result << "IS1=" << shape1 << "IS2=" << shape2 << "FQInterval1=" << interval1 << "FQInterval2=" << interval2; + return result.str(); + } + + void SetUp() override { + targetDevice = ov::test::utils::DEVICE_CPU; + abs_threshold = 1e-4; + + InputShape shape1; + InputShape shape2; + FQInterval interval1; + FQInterval interval2; + std::tie(shape1, shape2, interval1, interval2) = this->GetParam(); + init_input_shapes({shape1, shape2}); + + const auto weights = ov::test::utils::make_constant(ov::element::i8, {16, 16}); + const auto convert = std::make_shared(weights, ov::element::f32); + const auto scale = ov::test::utils::make_constant(ov::element::f32, {16, 1}, ov::test::utils::InputGenerateData(0, 1, 5)); + const auto mul = std::make_shared(convert, scale); + + auto build_fq = [](const ov::Output& parent, FQInterval interval_type) { + const auto low = interval_type == FQInterval::I8 ? std::vector{-12.8f} : std::vector{0.f}; + const auto high = interval_type == FQInterval::I8 ? std::vector{12.7f} : std::vector{25.5f}; + return ov::test::utils::make_fake_quantize(parent, ov::element::f32, 256, {1, 1, 1, 1}, low, high, low, high); + }; + + const auto param1 = std::make_shared(ov::element::f32, inputDynamicShapes[0]); + const auto fq1 = build_fq(param1, interval1); + const auto mm1 = std::make_shared(fq1, mul, false, true); + + const auto param2 = std::make_shared(ov::element::f32, inputDynamicShapes[1]); + const auto fq2 = build_fq(param2, interval2); + const auto mm2 = std::make_shared(fq2, mul, false, true); + + function = std::make_shared(ov::OutputVector{mm1, mm2}, ov::ParameterVector{param1, param2}); + } +}; + +TEST_P(QuantizedMatMulsWithSharedWeightsTest, CompareWithRefs) { + run(); +} + +namespace { + +std::vector shapes1{{{-1, -1, -1, 16}, {{1, 1, 15, 16}, {1, 1, 12, 16}, {1, 1, 15, 16}}}}; +std::vector shapes2{{{-1, -1, -1, 16}, {{1, 1, 12, 16}, {1, 1, 15, 16}, {1, 1, 12, 16}}}}; +INSTANTIATE_TEST_SUITE_P(smoke_CustomTest, QuantizedMatMulsWithSharedWeightsTest, + ::testing::Combine( + ::testing::ValuesIn(shapes1), + ::testing::ValuesIn(shapes2), + ::testing::Values(FQInterval::U8, FQInterval::I8), + ::testing::Values(FQInterval::U8, FQInterval::I8)), + QuantizedMatMulsWithSharedWeightsTest::getTestCaseName); +} // namespace +} // namespace test +} // namespace ov diff --git a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/low_precision_transformations/recurrent_cell_transformation.cpp b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/low_precision_transformations/recurrent_cell_transformation.cpp index ae5c19559e5a7b..066d81d1f37f36 100644 --- a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/low_precision_transformations/recurrent_cell_transformation.cpp +++ b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/low_precision_transformations/recurrent_cell_transformation.cpp @@ -1,4 +1,4 @@ -// Copyright (C) 2022 Intel Corporation +// Copyright (C) 2022-2024 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // @@ -92,6 +92,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_LPT, RecurrentCellTransformation, ::testing::ValuesIn(weights_shapes), ::testing::Values(ov::test::utils::DEVICE_CPU), ::testing::ValuesIn(trasformationParamValues), + ::testing::ValuesIn({ true, false }), ::testing::ValuesIn(params)), RecurrentCellTransformation::getTestCaseName); } // namespace testValues1 @@ -171,6 +172,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_LPT, RecurrentCellTransformation, ::testing::ValuesIn(weights_shapes), ::testing::Values(ov::test::utils::DEVICE_CPU), ::testing::ValuesIn(trasformationParamValues), + ::testing::ValuesIn({ true, false }), ::testing::ValuesIn(params)), RecurrentCellTransformation::getTestCaseName); } // namespace testValues2 diff --git a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/snippets/matmul.cpp b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/snippets/matmul.cpp index 778bcba7a235a0..1089bdc3faffaa 100644 --- a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/snippets/matmul.cpp +++ b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/snippets/matmul.cpp @@ -66,11 +66,39 @@ INSTANTIATE_TEST_SUITE_P(smoke_Snippets_MatMult, MatMul, std::vector> input_shapes_dynamic{ + // All dimensions are dynamic { {PartialShape{-1, -1, -1, -1}, {{2, 1, 32, 64}, {2, 2, 10, 20}, {2, 2, 100, 80}, - {2, 2, 10, 20}, {2, 1, 32, 64}}}, + {2, 2, 10, 20}, {2, 1, 32, 64}, {2, 3, 64, 55}}}, {PartialShape{-1, -1, -1, -1}, {{1, 3, 64, 128}, {2, 2, 20, 30}, {2, 2, 80, 120}, - {2, 2, 20, 30}, {1, 3, 64, 128}}} + {2, 2, 20, 30}, {1, 3, 64, 128}, {2, 3, 55, 128}}} + }, + // Only M dimension is dynamic + one one loop by M + { + {PartialShape{-1, 2, -1, 64}, {{2, 2, 64, 64}, {2, 2, 64, 64}, {2, 2, 35, 64}, + {2, 2, 120, 64}, {2, 2, 15, 64}, {2, 2, 35, 64}}}, + {PartialShape{-1, 2, 64, 32}, {{2, 2, 64, 32}, {2, 2, 64, 32}, {1, 2, 64, 32}, + {1, 2, 64, 32}, {2, 2, 64, 32}, {1, 2, 64, 32}}} + }, + // Only M dimension is dynamic + all Loops (by M, N, K) + { + {PartialShape{2, 2, -1, 550}, {{2, 2, 64, 550}, {2, 2, 16, 550}, {2, 2, 35, 550}, + {2, 2, 16, 550}, {2, 2, 70, 550}, {2, 2, 64, 550}}}, + {PartialShape{2, 1, 550, 70}, {{2, 1, 550, 70}, {2, 1, 550, 70}, {2, 1, 550, 70}, + {2, 1, 550, 70}, {2, 1, 550, 70}, {2, 1, 550, 70}}} + }, + // Only K dimension is dynamic + { + {PartialShape{2, 2, 70, -1}, {{2, 2, 70, 128}, {2, 2, 70, 10}, {2, 2, 70, 33}, + {2, 2, 70, 35}, {2, 2, 70, 100}}}, + {PartialShape{2, 2, -1, 70}, {{2, 2, 128, 70}, {2, 2, 10, 70}, {2, 2, 33, 70}, + {2, 2, 35, 70}, {2, 2, 100, 70}}} + }, + // Only N dimension is dynamic + { + {PartialShape{}, {{2, 2, 65, 550}}}, + {PartialShape{2, 2, 550, -1}, {{2, 2, 550, 70}, {2, 2, 550, 12}, {2, 2, 550, 70}, + {2, 2, 550, 12}, {2, 2, 550, 10}}} }, }; diff --git a/src/plugins/intel_cpu/tests/functional/utils/fusing_test_utils.cpp b/src/plugins/intel_cpu/tests/functional/utils/fusing_test_utils.cpp index 39e60bdfe8a235..6f5e559201b30e 100644 --- a/src/plugins/intel_cpu/tests/functional/utils/fusing_test_utils.cpp +++ b/src/plugins/intel_cpu/tests/functional/utils/fusing_test_utils.cpp @@ -58,7 +58,11 @@ void CpuTestWithFusing::CheckFusingResults(const std::shared_ptr postOpMgrPtr; std::vector fusedOps; bool checkFusingPosition = true; + bool expectPostOpsToBeFused = true; }; static int getChannelAxis(const ov::AxisSet &axes, bool keep_dims) { @@ -304,6 +305,26 @@ const auto fusingFakeQuantizePerChannel = fusingSpecificParams{std::make_shared< return ov::test::utils::make_fake_quantize(cfg.input, localPrc, 256, newShape); }, "FakeQuantize(PerChannel)"}}), {"FakeQuantize"}}; +const auto fusingFakeQuantizePerBatch = fusingSpecificParams{std::make_shared(std::vector{ + {[](postNodeConfig& cfg){ + auto localPrc = cfg.input->get_element_type(); + const auto shape = cfg.input->get_output_partial_shape(0); + ov::Shape perBatchSize(shape.size(), 1); + perBatchSize[0] = shape[0].get_length(); + return ov::test::utils::make_fake_quantize(cfg.input, localPrc, 256, perBatchSize); + }, "FakeQuantize(PerBatch)"}}), {"FakeQuantize"}}; + +const auto fusingFakeQuantizeFullTensor = fusingSpecificParams{std::make_shared(std::vector{ + {[](postNodeConfig& cfg){ + auto localPrc = cfg.input->get_element_type(); + const auto shape = cfg.input->get_output_partial_shape(0); + ov::Shape fullTensorShape(shape.size(), 1); + for (size_t axis = 0; axis < shape.size(); axis++) { + fullTensorShape[axis] = shape[axis].get_length(); + } + return ov::test::utils::make_fake_quantize(cfg.input, localPrc, 256, fullTensorShape); + }, "FakeQuantize(FullTensor)"}}), {"FakeQuantize"}}; + const auto fusingFakeQuantizePerChannelRelu = fusingSpecificParams{std::make_shared(std::vector{ {[](postNodeConfig& cfg){ auto localPrc = cfg.input->get_element_type(); diff --git a/src/plugins/intel_cpu/tests/unit/snippets_transformations/x64/lowered/brgemm_blocking.cpp b/src/plugins/intel_cpu/tests/unit/snippets_transformations/x64/lowered/brgemm_blocking.cpp index ef0ffcd70e6c39..82cbcdfa2c21f3 100644 --- a/src/plugins/intel_cpu/tests/unit/snippets_transformations/x64/lowered/brgemm_blocking.cpp +++ b/src/plugins/intel_cpu/tests/unit/snippets_transformations/x64/lowered/brgemm_blocking.cpp @@ -7,6 +7,7 @@ #include "lir_test_utils.hpp" #include "openvino/opsets/opset10.hpp" #include "snippets/lowered/linear_ir.hpp" +#include "snippets/lowered/loop_info.hpp" #include "snippets/snippets_isa.hpp" #include "transformations/snippets/x64/op/brgemm_copy_b.hpp" #include "transformations/snippets/x64/op/brgemm_cpu.hpp" @@ -22,6 +23,7 @@ using namespace ov::snippets; using BRGEMM_TYPE = intel_cpu::brgemm_utils::BRGEMM_TYPE; namespace { + void create_brgemm_loop_infos(const LinearIRPtr& linear_ir, const ExpressionPtr& brgemm_expr, size_t m = 0, size_t m_blk = 0, @@ -31,21 +33,30 @@ void create_brgemm_loop_infos(const LinearIRPtr& linear_ir, const bool n_block = k != 0 && k_blk != 0; const bool m_block = m != 0 && m_blk != 0; if (k_block) { - create_and_add_unified_loop_info(linear_ir, k, k_blk, - {LoopPort(brgemm_expr->get_input_port(0)), LoopPort(brgemm_expr->get_input_port(1), true, 1)}, - {LoopPort(brgemm_expr->get_output_port(0), false)}); - const auto& loop_info = linear_ir->get_loop_manager()->get_loop_info(0); + const auto loop_info = + std::make_shared(k, k_blk, + std::vector{LoopPort(brgemm_expr->get_input_port(0)), + LoopPort(brgemm_expr->get_input_port(1), true, 1)}, + std::vector{LoopPort(brgemm_expr->get_output_port(0), false)}, + ov::intel_cpu::pass::BrgemmBlocking::get_default_blocking_loop_handlers(k, k_block)); loop_info->register_pass_to_handler(0.f); + linear_ir->get_loop_manager()->add_loop_info(loop_info); } if (n_block) { - create_and_add_unified_loop_info(linear_ir, n, n_blk, - {LoopPort(brgemm_expr->get_input_port(0), false), LoopPort(brgemm_expr->get_input_port(1))}, - {LoopPort(brgemm_expr->get_output_port(0))}); + linear_ir->get_loop_manager()->add_loop_info( + std::make_shared(n, n_blk, + std::vector{LoopPort(brgemm_expr->get_input_port(0), false), + LoopPort(brgemm_expr->get_input_port(1))}, + std::vector{LoopPort(brgemm_expr->get_output_port(0))}, + ov::intel_cpu::pass::BrgemmBlocking::get_default_blocking_loop_handlers(n, n_block))); } if (m_block) { - create_and_add_unified_loop_info(linear_ir, m, m_blk, - {LoopPort(brgemm_expr->get_input_port(0), true, 1), LoopPort(brgemm_expr->get_input_port(1), false, 1)}, - {LoopPort(brgemm_expr->get_output_port(0), true, 1)}); + linear_ir->get_loop_manager()->add_loop_info( + std::make_shared(m, m_blk, + std::vector{LoopPort(brgemm_expr->get_input_port(0), true, 1), + LoopPort(brgemm_expr->get_input_port(1), false, 1)}, + std::vector{LoopPort(brgemm_expr->get_output_port(0), true, 1)}, + ov::intel_cpu::pass::BrgemmBlocking::get_default_blocking_loop_handlers(m, m_block))); } } @@ -59,22 +70,31 @@ void create_brgemm_with_copy_b_loop_infos(const LinearIRPtr& linear_ir, const bool n_block = k != 0 && k_blk != 0; const bool m_block = m != 0 && m_blk != 0; if (k_block) { - create_and_add_unified_loop_info(linear_ir, k, k_blk, - {LoopPort(brgemm_expr->get_input_port(0)), LoopPort(copy_b_expr->get_input_port(0), true, 1)}, - {LoopPort(brgemm_expr->get_output_port(0), false)}); - const auto& loop_info = linear_ir->get_loop_manager()->get_loop_info(0); + const auto loop_info = + std::make_shared(k, k_blk, + std::vector{LoopPort(brgemm_expr->get_input_port(0)), + LoopPort(copy_b_expr->get_input_port(0), true, 1)}, + std::vector{LoopPort(brgemm_expr->get_output_port(0), false)}, + ov::intel_cpu::pass::BrgemmBlocking::get_default_blocking_loop_handlers(k, k_block)); loop_info->register_pass_to_handler(0.f); + linear_ir->get_loop_manager()->add_loop_info(loop_info); } if (n_block) { - create_and_add_unified_loop_info(linear_ir, n, n_blk, - {LoopPort(brgemm_expr->get_input_port(0), false), LoopPort(copy_b_expr->get_input_port(0))}, - {LoopPort(brgemm_expr->get_output_port(0))}); + linear_ir->get_loop_manager()->add_loop_info( + std::make_shared(n, n_blk, + std::vector{LoopPort(brgemm_expr->get_input_port(0), false), + LoopPort(copy_b_expr->get_input_port(0))}, + std::vector{LoopPort(brgemm_expr->get_output_port(0))}, + ov::intel_cpu::pass::BrgemmBlocking::get_default_blocking_loop_handlers(n, n_block))); } if (m_block) { const auto& second_input_port = k_block || n_block ? copy_b_expr->get_input_port(0) : brgemm_expr->get_input_port(1); - create_and_add_unified_loop_info(linear_ir, m, m_blk, - {LoopPort(brgemm_expr->get_input_port(0), true, 1), LoopPort(second_input_port, false, 1)}, - {LoopPort(brgemm_expr->get_output_port(0), true, 1)}); + linear_ir->get_loop_manager()->add_loop_info( + std::make_shared(m, m_blk, + std::vector{LoopPort(brgemm_expr->get_input_port(0), true, 1), + LoopPort(second_input_port, false, 1)}, + std::vector{LoopPort(brgemm_expr->get_output_port(0), true, 1)}, + ov::intel_cpu::pass::BrgemmBlocking::get_default_blocking_loop_handlers(m, m_block))); } } } // namespace @@ -148,7 +168,8 @@ TEST_F(BrgemmBlockingTest, BlockingIsNotNeeded) { auto brgemm = linear_ir_ref->push_node(data_a.second, data_b.second, BRGEMM_TYPE::STAND_ALONE, 0, 0, 0, layout, layout, layout, m, k, n); brgemm.second->set_beta(0.f); - init_expr_descriptors(*brgemm.first, {{m, k}, {k, n}, {m, n}}); + const auto full_subtensor = VectorDims(2, ov::snippets::utils::get_full_dim_value()); + init_expr_descriptors(*brgemm.first, std::vector(3, full_subtensor)); auto result = linear_ir_ref->push_node(brgemm.second); } } @@ -201,6 +222,7 @@ TEST_F(BrgemmBlockingTest, WithDataRepackingOnlyByM) { const ov::PartialShape input_shape_b{1, 16, 64, 384}; const auto precision_a = ov::element::u8; const auto precision_b = ov::element::i8; + const auto full = ov::snippets::utils::get_full_dim_value(); { auto data_a = linear_ir->push_node(precision_a, input_shape_a); @@ -226,7 +248,7 @@ TEST_F(BrgemmBlockingTest, WithDataRepackingOnlyByM) { auto brgemm = linear_ir_ref->push_node(data_a.second, copy_b.second, BRGEMM_TYPE::REPACKING_ONLY, 0, 0, 0, VectorDims{}, VectorDims{}, VectorDims{}, m_blk, k, n, 0.f); const auto& brgemm_expr = *brgemm.first; - init_expr_descriptors(brgemm_expr, {{m_blk, k}, {k, n}, {m_blk, n}}); + init_expr_descriptors(brgemm_expr, {{m_blk, full}, {full, full}, {m_blk, full}}); create_brgemm_with_copy_b_loop_infos(linear_ir_ref, brgemm_expr, copy_b_expr, 384, m_blk); brgemm_expr->set_loop_ids({0}); auto result = linear_ir_ref->push_node(brgemm.second); diff --git a/src/plugins/intel_cpu/tests/unit/snippets_transformations/x64/lowered/buffer_allocation.cpp b/src/plugins/intel_cpu/tests/unit/snippets_transformations/x64/lowered/buffer_allocation.cpp index c618c9e0d86fb5..2abfde0b3bb431 100644 --- a/src/plugins/intel_cpu/tests/unit/snippets_transformations/x64/lowered/buffer_allocation.cpp +++ b/src/plugins/intel_cpu/tests/unit/snippets_transformations/x64/lowered/buffer_allocation.cpp @@ -138,8 +138,8 @@ class MHAFP32BufferAllocationTest : public BufferAllocationCPUTest { const size_t k_blk = 16; const size_t n_blk = 64; const auto subtensor_scalar = std::vector{1}; - const auto subtensor_power = std::vector{1, ov::snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM}; - const auto subtensor_full = std::vector(2, ov::snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM); + const auto subtensor_power = std::vector{1, ov::snippets::utils::get_full_dim_value()}; + const auto subtensor_full = std::vector(2, ov::snippets::utils::get_full_dim_value()); const auto parameter0 = std::make_shared(ov::element::f32, ov::PartialShape({1, 12, 128, 64})); const auto parameter1 = std::make_shared(ov::element::f32, ov::PartialShape({1, 128, 12, 64})); @@ -196,8 +196,8 @@ class MHABF16AMXBufferAllocationTest : public BufferAllocationCPUTest { const size_t k_blk = 16; const size_t n_blk = 64; const auto subtensor_scalar = std::vector{1}; - const auto subtensor_power = std::vector{1, ov::snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM}; - const auto subtensor_full = std::vector(2, ov::snippets::lowered::PortDescriptor::ServiceDimensions::FULL_DIM); + const auto subtensor_power = std::vector{1, ov::snippets::utils::get_full_dim_value()}; + const auto subtensor_full = std::vector(2, ov::snippets::utils::get_full_dim_value()); const auto parameter0 = std::make_shared(ov::element::bf16, ov::PartialShape({1, 12, 128, 64})); const auto parameter1 = std::make_shared(ov::element::bf16, ov::PartialShape({1, 128, 12, 64})); diff --git a/src/plugins/intel_cpu/thirdparty/onednn b/src/plugins/intel_cpu/thirdparty/onednn index f0f8defe2dff50..f1cf31a2fa0979 160000 --- a/src/plugins/intel_cpu/thirdparty/onednn +++ b/src/plugins/intel_cpu/thirdparty/onednn @@ -1 +1 @@ -Subproject commit f0f8defe2dff5058391f2a66e775e20b5de33b08 +Subproject commit f1cf31a2fa097932b8d74e88bf4bd941382504e4 diff --git a/src/plugins/intel_gpu/include/intel_gpu/graph/kernel_impl_params.hpp b/src/plugins/intel_gpu/include/intel_gpu/graph/kernel_impl_params.hpp index 0fc6cbdac13132..fa8a8807bbd92c 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/graph/kernel_impl_params.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/graph/kernel_impl_params.hpp @@ -114,6 +114,15 @@ struct kernel_impl_params final { return output_layouts[idx]; } + layout& get_output_layout(size_t idx = 0) { + OPENVINO_ASSERT(output_layouts.size() > idx, + "The size of output layouts must be greater than the requested index: ", + "Requested index is ", idx, ",", + "but the size of output layouts is ", output_layouts.size()); + return output_layouts[idx]; + } + + bool has_fused_primitives() const { return !fused_desc.empty(); } ov::element::Type_t get_output_element_type() const { diff --git a/src/plugins/intel_gpu/include/intel_gpu/runtime/layout.hpp b/src/plugins/intel_gpu/include/intel_gpu/runtime/layout.hpp index a454fc7afdee15..52e9f643c299d7 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/runtime/layout.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/runtime/layout.hpp @@ -288,6 +288,15 @@ struct layout { return *this; } + layout clone_with_other_shape(const ov::PartialShape& new_shape) { + return layout(new_shape, this->data_type, this->format, this->data_padding); + } + + layout clone_with_other_shape(const ov::Shape& new_shape) { + return clone_with_other_shape(ov::PartialShape(new_shape)); + } + + friend bool operator==(const layout& lhs, const layout& rhs) { return lhs.data_type == rhs.data_type && lhs.format == rhs.format && lhs.size == rhs.size && lhs.data_padding == rhs.data_padding; } @@ -306,7 +315,7 @@ struct layout { return (lhs.data_padding < rhs.data_padding); } - /// Number of elements to be stored in this memory layout + /// Number of elements to be stored in this layout size_t count() const; /// Layout size with padding included diff --git a/src/plugins/intel_gpu/src/graph/crop.cpp b/src/plugins/intel_gpu/src/graph/crop.cpp index 146a1fa89b400b..09c5f01f216e57 100644 --- a/src/plugins/intel_gpu/src/graph/crop.cpp +++ b/src/plugins/intel_gpu/src/graph/crop.cpp @@ -250,7 +250,7 @@ crop_inst::typed_primitive_inst(network& network, crop_node const& node) : paren "Invalid Batch offset: exceeds data for output!"); } - if (node.can_be_optimized()) { + if (!node.is_dynamic() && node.can_be_optimized()) { update_output_memory(); } } diff --git a/src/plugins/intel_gpu/src/graph/graph_optimizer/prepare_buffer_fusing.cpp b/src/plugins/intel_gpu/src/graph/graph_optimizer/prepare_buffer_fusing.cpp index e3471b37c05bd9..17cc9e9f42d38a 100644 --- a/src/plugins/intel_gpu/src/graph/graph_optimizer/prepare_buffer_fusing.cpp +++ b/src/plugins/intel_gpu/src/graph/graph_optimizer/prepare_buffer_fusing.cpp @@ -470,7 +470,7 @@ bool crop_in_place_optimization::match(const program_node& node, for (auto user : node.get_users()) { // If the user node's output shape is already static, the padding // w/ dyn pad mask will not be propagated properly at runtime - if (node.is_dynamic() && !user->get_output_layout().is_dynamic()) + if (node.is_dynamic() && !user->get_output_pshape().is_dynamic()) return false; // do not optimize when next node is concatenation which is not output if (user->is_type() && !user->is_output()) @@ -484,10 +484,10 @@ bool crop_in_place_optimization::match(const program_node& node, if (node.is_dynamic() && (user->is_type() || user->is_type())) return false; if (user->is_type()) { - // runtime buffer fusing is only handled when there is only one reshape user - if (node.is_dynamic() && node.get_users().size() != 1) - return false; auto& reshape_node = user->as(); + // runtime buffer fusing is only handled when there is only one reshape user and reshape mode is base + if (node.is_dynamic() && (node.get_users().size() != 1 || reshape_node.get_primitive()->mode != reshape::reshape_mode::base)) + return false; if (can_reshape_be_optimized(reshape_node) && (!node.is_dynamic() || !reshape_node.is_runtime_propagatable_padding())) return false; @@ -500,6 +500,14 @@ bool crop_in_place_optimization::match(const program_node& node, if (node.is_constant()) return false; + // do not optimize variadic_split crop when either input1 or input2 is not constant. + // VariadicSplit ngraph shape infer requires value of axis(input1) and split_lengths(input2). + // And non_constant input1/input2 makes risky execution of runtime buffer fusing. + auto& crop_node = node.as(); + if ((crop_node.get_primitive()->op_mode == cldnn::crop_ngraph_op_mode::variadic_split) && + (!crop_node.get_dependency(1).is_constant() || !crop_node.get_dependency(2).is_constant())) + return false; + if (node.get_users().size() > 0) { if (node.get_program().is_body_program() && node.get_dependency(0).is_type()) { return false; diff --git a/src/plugins/intel_gpu/src/graph/graph_optimizer/reorder_inputs.cpp b/src/plugins/intel_gpu/src/graph/graph_optimizer/reorder_inputs.cpp index c6de09403c1cef..88dcb8865d937a 100644 --- a/src/plugins/intel_gpu/src/graph/graph_optimizer/reorder_inputs.cpp +++ b/src/plugins/intel_gpu/src/graph/graph_optimizer/reorder_inputs.cpp @@ -59,7 +59,20 @@ std::map get_preferred_formats(program& p, layout_o onednn_impls_counter++; } - if (onednn_impls_counter < 1 && lo.get_optimization_attributes().use_onednn_impls) { + // Fallback to ocl when asymmetric weights convolution is existed. + size_t total_convs = 0; + size_t num_asym_wei_convs = 0; + for (auto n : p.get_processing_order()) { + if (n->is_type()) { + total_convs++; + if (n->as().weights_zero_points_term()) + num_asym_wei_convs++; + } + } + + GPU_DEBUG_LOG << "Number of convolutions with weights zero points: " << num_asym_wei_convs << "/" << total_convs << std::endl; + + if (lo.get_optimization_attributes().use_onednn_impls && (onednn_impls_counter < 1 || num_asym_wei_convs > 0)) { should_update_fmt_map = true; lo.set_optimization_attribute(layout_optimizer::optimization_attributes_type::use_onednn_impls, 0); GPU_DEBUG_LOG << "Disable oneDNN implementations globally" << std::endl; diff --git a/src/plugins/intel_gpu/src/graph/graph_optimizer/select_preferred_formats.cpp b/src/plugins/intel_gpu/src/graph/graph_optimizer/select_preferred_formats.cpp index e44ee477c0812f..70d0b70c7fa9fa 100644 --- a/src/plugins/intel_gpu/src/graph/graph_optimizer/select_preferred_formats.cpp +++ b/src/plugins/intel_gpu/src/graph/graph_optimizer/select_preferred_formats.cpp @@ -31,6 +31,15 @@ void select_preferred_formats::run(program& p) { return; #ifdef ENABLE_ONEDNN_FOR_GPU + + // Fallback to ocl when asymmetric weights convolution is existed. + if (_lo.get_optimization_attributes().use_onednn_impls) { + for (auto n : p.get_processing_order()) { + if (n->is_type() && n->as().weights_zero_points_term()) + return; + } + } + auto forcing_map = _lo.get_implementation_forcing(); engine.create_onednn_engine(p.get_config()); diff --git a/src/plugins/intel_gpu/src/graph/layout_optimizer.cpp b/src/plugins/intel_gpu/src/graph/layout_optimizer.cpp index b2acc2abf1c173..bcada1fa769fea 100644 --- a/src/plugins/intel_gpu/src/graph/layout_optimizer.cpp +++ b/src/plugins/intel_gpu/src/graph/layout_optimizer.cpp @@ -1937,13 +1937,17 @@ void layout_optimizer::select_preferred_formats_for_onednn(program_node& node, d prim_input = node.get_dependency_index(node.as().input()); if (node.is_type()) prim_input = node.get_dependency_index(node.as().input()); + size_t prim_weights = node.get_primitive()->input_size(); // Note: did not handle attribute properly. especially for zero-point cldnn::format src_fmt = format::any; - if (idx == prim_input) + if (idx == prim_input) { src_fmt = onednn::find_data_format(prim_desc.src_desc()); - else // Dep for fused post ops + } else if (idx == prim_weights) { + src_fmt = format::custom; + } else { // Dep for fused post ops src_fmt = onednn::find_data_format(prim_desc.dst_desc()); + } // WA: shallow convolution needs to set input format by bfyx. // onednn recommended byxf for input format. It will insert reorder before shallow conv. diff --git a/src/plugins/intel_gpu/src/graph/primitive_inst.cpp b/src/plugins/intel_gpu/src/graph/primitive_inst.cpp index 522fb03f15c5bd..f8267673722e64 100644 --- a/src/plugins/intel_gpu/src/graph/primitive_inst.cpp +++ b/src/plugins/intel_gpu/src/graph/primitive_inst.cpp @@ -465,7 +465,7 @@ void primitive_inst::update_shape() { auto desc = get_node().as().get_primitive(); auto var_mem_size = get_network().get_variable(desc->variable_info.variable_id).get_actual_mem_size(); // Need to trigger realloc_if_needed - if (var_mem_size < _impl_params->get_output_layout(0).get_buffer_size().count()) + if (var_mem_size < _impl_params->get_output_layout(0).get_linear_size()) set_shape_change(); } } @@ -684,13 +684,13 @@ event::ptr primitive_inst::realloc_if_needed() { prealloc_shape[seq_axis] += tmp_prealloc_count; required_buffer_size = std::accumulate(prealloc_shape.begin(), prealloc_shape.end(), size_t(1), std::multiplies()); } else { - required_buffer_size = (updated_layouts[i].get_buffer_size().count()); + required_buffer_size = (updated_layouts[i].get_linear_size()); } if (required_buffer_size * 10 < _max_output_layout_count[i]) { reclaim = true; } if (reclaim) { - GPU_DEBUG_TRACE_DETAIL << id() << ": Updated output[" << i << "] size " << updated_layouts[i].get_buffer_size().count() + GPU_DEBUG_TRACE_DETAIL << id() << ": Updated output[" << i << "] size " << updated_layouts[i].get_linear_size() << " is much smaller than current memory size! " << _max_output_layout_count[i] << "Reset memory of output " << i << std::endl; _max_output_layout_count[i] = 0; @@ -705,7 +705,7 @@ event::ptr primitive_inst::realloc_if_needed() { } for (size_t i = 0; i < actual_layouts.size(); ++i) { - bool can_reuse_buffer = (_outputs[i] && updated_layouts[i].get_buffer_size().count() <= _max_output_layout_count[i]); + bool can_reuse_buffer = (_outputs[i] && updated_layouts[i].get_linear_size() <= _max_output_layout_count[i]); std::pair prealloc_info; if (_node->is_type() && i == 0) { const auto& desc = _node->as().get_primitive(); @@ -717,17 +717,15 @@ event::ptr primitive_inst::realloc_if_needed() { prealloc_info = sp.predict_preallocation_shape(id(), updated_layouts[i], can_reuse_buffer, i, tmp_prealloc_count); } if (prealloc_info.first && sp.can_preallocate(ov::shape_size(prealloc_info.second) * (dt_sizes_in_B[i]))) { - auto new_layout = updated_layouts[i]; - new_layout.set_partial_shape(prealloc_info.second); - updated_params.output_layouts[i] = new_layout; + updated_params.output_layouts[i] = updated_layouts[i].clone_with_other_shape(prealloc_info.second); } - if (updated_params.output_layouts[i].get_buffer_size().count() < updated_layouts[i].get_buffer_size().count()) { + if (updated_params.output_layouts[i].get_linear_size() < updated_layouts[i].get_linear_size()) { updated_params.output_layouts[i] = updated_layouts[i]; } if (can_reuse_buffer) { GPU_DEBUG_TRACE_DETAIL << id() << ": reuse previously allocated output buffer[" << i << "] - " - << actual_layouts[i].get_buffer_size().count() << "/" << _max_output_layout_count[i] + << actual_layouts[i].get_linear_size() << "/" << _max_output_layout_count[i] << std::endl; if (_node->is_type() && (i == 0)) { // kv_cache has already assigned memory. @@ -759,7 +757,7 @@ event::ptr primitive_inst::realloc_if_needed() { GPU_DEBUG_TRACE_DETAIL << id() << ": realloc output memory. " << std::endl; GPU_DEBUG_TRACE_DETAIL << " outputs[" << i << "] " << " Current buffer_size=" << _max_output_layout_count[i] - << " Requested buffer_size=" << updated_layouts[i].get_buffer_size().count() + << " Requested buffer_size=" << updated_layouts[i].get_linear_size() << std::endl; _outputs[i] = allocate_output(_network.get_engine(), _network.get_memory_pool(), @@ -773,7 +771,7 @@ event::ptr primitive_inst::realloc_if_needed() { is_output_buffer(this, true), output_memory_ptr(i).get(), true); - _max_output_layout_count[i] = updated_params.output_layouts[i].get_buffer_size().count(); + _max_output_layout_count[i] = updated_params.output_layouts[i].get_linear_size(); GPU_DEBUG_CODE(std::string memalloc_info = ""); GPU_DEBUG_CODE(memalloc_info += (((_outputs.size() > 1) ? ("o" + to_string(i) + ":") : "") + (_outputs[i]->from_memory_pool ? "from_pool" : "new_alloc"));) @@ -1189,6 +1187,7 @@ void primitive_inst::do_runtime_in_place_kv_cache() { } const auto& desc = _node->as().get_primitive(); auto& past_layout = _impl_params->input_layouts[0]; + auto& new_layout = _impl_params->input_layouts[1]; auto& present_layout = _impl_params->output_layouts[0]; const auto& sequence_axis = desc->concat_axis; const auto& gather_axis = desc->gather_axis; @@ -1207,9 +1206,10 @@ void primitive_inst::do_runtime_in_place_kv_cache() { GPU_DEBUG_TRACE_DETAIL << "[do runtime kv_cache opt] " << id() << " initial present_layout : " << present_layout.to_string() << std::endl; GPU_DEBUG_TRACE_DETAIL << "[do runtime kv_cache opt] " << id() << " initial past_layout : " << past_layout.to_string() << std::endl; auto max_pad = kv_cache_inst::get_max_pad(past_layout, _deps[0].first->_max_output_layout_count[0], sequence_axis_legacy, "past_layout"); - - if (max_pad > 0) { - kv_cache_inst::update_pad(present_layout, max_pad - 1, sequence_axis_legacy); + const auto new_seq_len = static_cast(new_layout.get_shape()[sequence_axis]); + // In chatbot scenario, when chat history must be stored in kvcache, new_seq_len may not be 1 even if max_pad is greater than 0 + if (max_pad - new_seq_len >= 0) { + kv_cache_inst::update_pad(present_layout, max_pad - new_seq_len, sequence_axis_legacy); GPU_DEBUG_TRACE_DETAIL << "[do runtime_in_place_kv_cache] " << id() << " Updated present_layout's pad : " << present_layout.to_string() << std::endl; auto& variable = get_network().get_variable(desc->variable_info.variable_id); variable.set_layout(present_layout); @@ -1850,7 +1850,7 @@ primitive_inst::primitive_inst(network & network, program_node const& node, bool _impl_params->strm = _network.get_stream_ptr(); for (size_t i = 0; i < get_node().get_output_layouts().size(); ++i) { if (_outputs.size() > i) { - _max_output_layout_count.push_back(_outputs[i] ? _outputs[i]->get_layout().get_buffer_size().count() : 0); + _max_output_layout_count.push_back(_outputs[i] ? _outputs[i]->get_layout().get_linear_size() : 0); } else { _outputs.push_back(nullptr); _max_output_layout_count.push_back(0); @@ -1983,9 +1983,9 @@ event::ptr primitive_inst::update_weights() { GPU_DEBUG_TRACE_DETAIL << id() << ": add original weights memory " << original_layout.to_short_string() << " to weights cache; " << "cache_size=" << _reordered_weights_cache.size() << "/" << _reordered_weights_cache.capacity() << std::endl; } else { - auto expected_layout = reorder_kernel_params->get_output_layout(); // Set original partial shape, because it may be lost during kernel_selector::weights_tensor -> layout conversion - expected_layout.set_partial_shape(original_layout.get_partial_shape()); + auto expected_layout = + reorder_kernel_params->get_output_layout().clone_with_other_shape(original_layout.get_partial_shape()); _impl_params->weights_layout = optional_layout(expected_layout); if (_reordered_weights_cache.has(expected_layout)) { diff --git a/src/plugins/intel_gpu/src/graph/reshape.cpp b/src/plugins/intel_gpu/src/graph/reshape.cpp index eed87ed759211d..5cbef11dd3b045 100644 --- a/src/plugins/intel_gpu/src/graph/reshape.cpp +++ b/src/plugins/intel_gpu/src/graph/reshape.cpp @@ -109,7 +109,8 @@ layout reshape_inst::calc_output_layout(reshape_node const& node, kernel_impl_pa auto desc = impl_param.typed_desc(); if (desc->output_shape.count() == 0) { if (desc->output_partial_shape.size() != 0) { - return layout{desc->output_partial_shape, input_layout.data_type, input_layout.format}; + format out_fmt = format::adjust_to_rank(input_layout.format, desc->output_partial_shape.rank().get_length()); + return layout{desc->output_partial_shape, input_layout.data_type, out_fmt}; } else { OPENVINO_ASSERT("[GPU] Output shape is not provided"); } diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernel_base.cpp b/src/plugins/intel_gpu/src/kernel_selector/kernel_base.cpp index 890e086f28a6bd..271e8d6a620890 100644 --- a/src/plugins/intel_gpu/src/kernel_selector/kernel_base.cpp +++ b/src/plugins/intel_gpu/src/kernel_selector/kernel_base.cpp @@ -140,8 +140,10 @@ JitConstants KernelBase::MakeFusedOpsJitConstants(const kernel_selector::base_pa if (conf.empty()) return jit; - if (params.fused_ops.size() == 1 && params.fused_ops[0].GetType() == KernelType::REORDER) + if (std::all_of(params.fused_ops.cbegin(), params.fused_ops.cend(), + [](fused_operation_desc desc) { return desc.GetType() == KernelType::REORDER; })) { return jit; + } try { for (auto& c : conf) { diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/fully_connected/fully_connected_kernel_bf_tiled.cpp b/src/plugins/intel_gpu/src/kernel_selector/kernels/fully_connected/fully_connected_kernel_bf_tiled.cpp index 07d81dce5e3f23..1613afec063eb1 100644 --- a/src/plugins/intel_gpu/src/kernel_selector/kernels/fully_connected/fully_connected_kernel_bf_tiled.cpp +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/fully_connected/fully_connected_kernel_bf_tiled.cpp @@ -310,7 +310,7 @@ FullyConnected_bf_tiled::GetAutoTuneParams(const fully_connected_params& params, if (!params.is_shape_agnostic && batch == 1) { // Tuning for Meteor Lake size_t min_num_threads = params.engineInfo.computeUnitsCount * simd; - if (output_f / 2 < min_num_threads && params.weights.GetLayout() == WeightsLayout::os_is_yx_osv32_isv2) { + if (output_f / 2 <= min_num_threads && params.weights.GetLayout() == WeightsLayout::os_is_yx_osv32_isv2) { GPU_DEBUG_TRACE_DETAIL << "FC bf tiled: Set ofm_tile 1. (output_f : " << output_f << ", computeUnitsCount : " << params.engineInfo.computeUnitsCount << " min_num_threads : " << min_num_threads << ")" << std::endl; diff --git a/src/plugins/intel_gpu/tests/functional/shared_tests_instances/low_precision_transformations/recurrent_cell_transformation.cpp b/src/plugins/intel_gpu/tests/functional/shared_tests_instances/low_precision_transformations/recurrent_cell_transformation.cpp index afda5292e69c60..85f8d79e7ace31 100644 --- a/src/plugins/intel_gpu/tests/functional/shared_tests_instances/low_precision_transformations/recurrent_cell_transformation.cpp +++ b/src/plugins/intel_gpu/tests/functional/shared_tests_instances/low_precision_transformations/recurrent_cell_transformation.cpp @@ -1,4 +1,4 @@ -// Copyright (C) 2022 Intel Corporation +// Copyright (C) 2022-2024 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // @@ -94,6 +94,7 @@ INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_LPT, RecurrentCellTransformation, ::testing::ValuesIn(weights_shapes), ::testing::Values(ov::test::utils::DEVICE_GPU), ::testing::ValuesIn(trasformationParamValues), + ::testing::ValuesIn({ true, false }), ::testing::ValuesIn(params)), RecurrentCellTransformation::getTestCaseName); } // namespace testValues1 @@ -174,6 +175,7 @@ INSTANTIATE_TEST_SUITE_P(DISABLED_smoke_LPT, RecurrentCellTransformation, ::testing::ValuesIn(weights_shapes), ::testing::Values(ov::test::utils::DEVICE_GPU), ::testing::ValuesIn(trasformationParamValues), + ::testing::ValuesIn({ true, false }), ::testing::ValuesIn(params)), RecurrentCellTransformation::getTestCaseName); } // namespace testValues2 diff --git a/src/plugins/intel_gpu/tests/unit/fusions/eltwise_fusion_test.cpp b/src/plugins/intel_gpu/tests/unit/fusions/eltwise_fusion_test.cpp index 883279ed369dd9..d4c50ec84ac78a 100644 --- a/src/plugins/intel_gpu/tests/unit/fusions/eltwise_fusion_test.cpp +++ b/src/plugins/intel_gpu/tests/unit/fusions/eltwise_fusion_test.cpp @@ -672,3 +672,30 @@ INSTANTIATE_TEST_SUITE_P(fusings_gpu, eltwise_quantize_fs_b_yx_fsv32_exception, eltwise_test_params{ CASE_ELTWISE_FP16_BATCH_FS_B, 6, 6 }, eltwise_test_params{ CASE_ELTWISE_FP16_BATCH_B_FS, 6, 6 }, })); + +class eltwise_fusing_reorders : public EltwiseFusingTest { +public: + layout get_input_layout3(eltwise_test_params& p) { + return layout{ {1, 1, 1, p.input_size[3]}, p.input_type, p.input_format }; + } +}; +TEST_P(eltwise_fusing_reorders, reorders_for_data_type) { + auto p = GetParam(); + create_topologies( + input_layout("input", get_input_layout(p)), + data("data", get_mem(get_input_layout3(p))), + eltwise("eltwise", { input_info("input"), input_info("data") }, p.mode, p.default_type), + reorder("reorder1", input_info("eltwise"), format::bfyx, data_types::i32, {}, reorder_mean_mode::subtract, padding(), true), + reorder("reorder2", input_info("reorder1"), format::bfyx, data_types::f16, {}, reorder_mean_mode::subtract, padding(), true), + data("data2", get_mem(get_input_layout3(p))), + eltwise("eltwise_min", { input_info("reorder2"), input_info("data2") }, eltwise_mode::min, p.default_type), + reorder("out", input_info("eltwise_min"), p.default_format, data_types::f32) + ); + + tolerance = default_tolerance(p.input_type); + execute(p, true); +} + +INSTANTIATE_TEST_SUITE_P(fusings_gpu, eltwise_fusing_reorders, ::testing::ValuesIn(std::vector{ + eltwise_test_params{ { 1, 16, 16, 2 }, data_types::f16, data_types::f16, format::bfyx, data_types::f16, format::bfyx, eltwise_mode::max, 4, 6 }, +})); diff --git a/src/plugins/intel_gpu/tests/unit/passes/prepare_buffer_fusing_test.cpp b/src/plugins/intel_gpu/tests/unit/passes/prepare_buffer_fusing_test.cpp index e4a077594c7a7e..e5506388eba273 100644 --- a/src/plugins/intel_gpu/tests/unit/passes/prepare_buffer_fusing_test.cpp +++ b/src/plugins/intel_gpu/tests/unit/passes/prepare_buffer_fusing_test.cpp @@ -777,6 +777,91 @@ TEST(prepare_buffer_fusing, in_place_crop_dynamic) { ASSERT_EQ(output_ptr_3[i], out3[i]); } +TEST(prepare_buffer_fusing, in_place_crop_dynamic_split_lengths) { + auto& engine = get_test_engine(); + + auto in_layout = layout{ ov::PartialShape{-1, -1, -1}, data_types::f32, format::bfyx}; + auto in2_layout = layout{ ov::PartialShape{-1, -1}, data_types::f32, format::bfyx}; + auto input_mem = engine.allocate_memory({ {1, 2, 4}, data_types::f32, format::bfyx }); + auto weights_mem = engine.allocate_memory({ {8, 4}, data_types::u8, format::bfyx }); + auto bias_mem = engine.allocate_memory({ {1, 1, 8}, data_types::f32, format::bfyx }); + auto scale_mem = engine.allocate_memory({ {8, 1}, data_types::f32, format::bfyx }); + auto zp_mem = engine.allocate_memory({ {8, 1}, data_types::f32, format::bfyx }); + auto axis_mem = engine.allocate_memory({ {}, data_types::i64, format::bfyx }); + auto shapeof_mem = engine.allocate_memory({ {2, 6}, data_types::f32, format::bfyx }); + + int64_t axis = 2; + set_values(input_mem, { -0.5f, 2.0f, 0.5f, 1.0f, + 0.5f, -2.0f, -0.5f, -1.0f }); + set_values(axis_mem, {axis}); + set_values(shapeof_mem, { 1.0f, 2.0f, 3.0f, 4.0f, + 5.0f, 6.0f, 7.0f, 8.0f, + 9.0f, 10.0f, 11.0f, 12.0f}); + set_values(weights_mem, { 1, 2, 3, 4, + 5, 6, 7, 8, + 9, 10, 11, 12, + 13, 14, 15, 0, + 15, 14, 13, 12, + 11, 10, 9, 8, + 7, 6, 5, 4, + 3, 2, 1, 0}); + set_values(bias_mem, { 1.0f, -2.0f, 3.0f, -4.0f, 5.0f, -6.0f, 7.0f, 2.0f }); + set_values(scale_mem, { 2.0f, 4.0f, -2.0f, -4.0f, 0.5f, -0.5f, 2.0f, 2.0f }); + set_values(zp_mem, { 1.0f, 2.0f, 2.0f, 1.0f, 4.0f, 1.0f, 6.0f, 2.0f }); + + std::vector out1 = { 13.f, 58.f, -11.f, -62.f }; + std::vector out2 = { -51.f, -108.f, 18.5f, -18.f, 1.f, -4.f, 57.f, 100.f, -8.5f, 6.f, 13.f, 8.f }; + std::vector out3 = { 13.f, 58.f, -51.f, -108.f, 18.5f, -18.f, 1.f, -4.f, -11.f, -62.f, 57.f, 100.f, -8.5f, 6.f, 13.f, 8.f }; + + cldnn::crop_ngraph_op_mode op_mode = cldnn::crop_ngraph_op_mode::variadic_split; + topology topology( + input_layout("input", in_layout), + input_layout("input_shapeof", in2_layout), + data("axis", axis_mem), + data("weights", weights_mem), + data("bias", bias_mem), + data("scale", scale_mem), + data("zp", zp_mem), + fully_connected("fc", input_info("input"), "weights", "bias", "scale", "zp", data_types::f32, 3, 2), + shape_of("shapeof", input_info("input_shapeof"), cldnn::data_types::i64), + crop("crop1", { input_info("fc"), input_info("axis"), input_info("shapeof") }, cldnn::tensor(1), cldnn::tensor(0), op_mode, 0, axis), + reorder("output1", input_info("crop1"), format::bfyx, data_types::f32), + crop("crop2", { input_info("fc"), input_info("axis"), input_info("shapeof") }, cldnn::tensor(1), cldnn::tensor(0), op_mode, 1, axis), + reshape("reshape", input_info("crop2"), true, std::vector{0, 0, 3, 2}, ov::PartialShape{-1, -1, 3, 2}, cldnn::reshape::reshape_mode::base), + reorder("output2", input_info("reshape"), format::bfyx, data_types::f32, std::vector(), reorder_mean_mode::subtract, padding(), true), + reorder("output3", input_info("fc"), format::bfyx, data_types::f32) + ); + + auto config = get_test_default_config(engine); + config.set_property(ov::intel_gpu::allow_new_shape_infer(true)); + config.set_property(ov::intel_gpu::optimize_data(true)); + network network(engine, topology, config); + + network.set_input_data("input", input_mem); + network.set_input_data("input_shapeof", shapeof_mem); + + std::map outputs; + EXPECT_NO_THROW(outputs = network.execute()); + + auto output = outputs.at("output1").get_memory(); + cldnn::mem_lock output_ptr(output, get_test_stream()); + + for (size_t i = 0; i < out1.size(); i++) + ASSERT_EQ(output_ptr[i], out1[i]); + + auto output_2 = outputs.at("output2").get_memory(); + cldnn::mem_lock output_ptr_2(output_2, get_test_stream()); + + for (size_t i = 0; i < out2.size(); i++) + ASSERT_EQ(output_ptr_2[i], out2[i]); + + auto output_3 = outputs.at("output3").get_memory(); + cldnn::mem_lock output_ptr_3(output_3, get_test_stream()); + + for (size_t i = 0; i < out3.size(); i++) + ASSERT_EQ(output_ptr_3[i], out3[i]); +} + // Testing for implicit crop along batch axis and outer padding optimzing. // Outer padding opt includes opt out of reshape and reorder which has padded input only in batch axis // This optimzing also includes offset(outer axis padded input) handling of oneDNN primitive. diff --git a/src/plugins/intel_gpu/tests/unit/test_cases/convolution_gpu_test.cpp b/src/plugins/intel_gpu/tests/unit/test_cases/convolution_gpu_test.cpp index 0bf595e124db89..132b2378420a03 100644 --- a/src/plugins/intel_gpu/tests/unit/test_cases/convolution_gpu_test.cpp +++ b/src/plugins/intel_gpu/tests/unit/test_cases/convolution_gpu_test.cpp @@ -11,6 +11,7 @@ #include #include #include +#include #include #include @@ -9933,6 +9934,59 @@ TEST(convolution_gpu_onednn, has_proper_synchronization) { } } +// A test that detects crashes in OneDNN convolution selection checks +TEST(convolution_gpu_onednn, grouped_runtime_weights) { + auto& engine = get_test_engine(); + + if (!engine.get_device_info().supports_immad) + return; + + tests::random_generator rg(GET_SUITE_NAME); + + int64_t input_b = 1, input_f = 256, input_y = 29, input_x = 29; + auto input_size = ov::PartialShape{ input_b, input_f, input_y, input_x }; + auto input_data = rg.generate_random_4d(input_b, input_f, input_y, input_x, -1, 1); + auto input_data_byxf = flatten_4d(format::byxf, input_data); + auto input_mem = engine.allocate_memory({ input_size, data_types::f16, format::byxf }); + set_values(input_mem, input_data_byxf); + + int64_t weights_b = 1, weights_f = 256, weights_y = 5, weights_x = 5; + auto weights_size = ov::PartialShape{ weights_b, weights_f, weights_y, weights_x }; + auto weights_data = rg.generate_random_4d(weights_b, weights_f, weights_y, weights_x, -1, 1); + auto weights_data_bfyx = flatten_4d(format::bfyx, weights_data); + auto weights_mem = engine.allocate_memory({ weights_size, data_types::f16, format::bfyx }); + set_values(weights_mem, weights_data_bfyx); + + auto input = input_layout("input", input_mem->get_layout()); + auto weights = input_layout("weights", weights_mem->get_layout()); + auto weights_reshape = reshape("reshaped_weights", input_info("weights"), true, { 256, 1, 1, 5, 5 }, { 256, 1, 1, 5, 5 }); + auto conv = convolution("conv", input_info("input"), "reshaped_weights", no_bias, 256, { 1, 1 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, true); + auto output_reorder = reorder("reorder", input_info("conv"), { data_types::f32, format::bfyx, { 1, 256, 25, 25 } }); + + topology topology(input, weights, weights_reshape, conv, output_reorder); + + ExecutionConfig config = get_test_default_config(engine); + ov::intel_gpu::ImplementationDesc conv_impl = { format::byxf, "", impl_types::onednn }; + config.set_property(ov::intel_gpu::force_implementations(ov::intel_gpu::ImplForcingMap{ { "conv", conv_impl }})); + config.set_property(ov::intel_gpu::optimize_data(true)); + + network network(engine, topology, config); + + network.set_input_data("input", input_mem); + network.set_input_data("weights", weights_mem); + + auto output = network.execute(); + + ASSERT_EQ(output.size(), size_t(1)); + ASSERT_EQ(output.begin()->first, "reorder"); + + auto output_memory = output.at("reorder").get_memory(); + auto output_layout = output_memory->get_layout(); + cldnn::mem_lock output_ptr(output_memory, get_test_stream()); + + ASSERT_EQ(output_layout.get_shape(), ov::Shape({1, 256, 25, 25})); +} + #endif // ENABLE_ONEDNN_FOR_GPU template diff --git a/src/plugins/intel_npu/CMakeLists.txt b/src/plugins/intel_npu/CMakeLists.txt index ce06b2542ac31c..6267fcfb288d38 100644 --- a/src/plugins/intel_npu/CMakeLists.txt +++ b/src/plugins/intel_npu/CMakeLists.txt @@ -30,8 +30,8 @@ add_subdirectory(src) if(ENABLE_TESTS) add_subdirectory(tests) + add_subdirectory(tools) endif() -add_subdirectory(tools) ov_cpack_add_component(${NPU_INTERNAL_COMPONENT} HIDDEN) diff --git a/src/plugins/intel_npu/src/backend/src/zero_infer_request.cpp b/src/plugins/intel_npu/src/backend/src/zero_infer_request.cpp index 12b94767b14154..f7df47760aa56d 100644 --- a/src/plugins/intel_npu/src/backend/src/zero_infer_request.cpp +++ b/src/plugins/intel_npu/src/backend/src/zero_infer_request.cpp @@ -274,12 +274,14 @@ void ZeroInferRequest::create_pipeline() { void ZeroInferRequest::set_tensor_data(const std::shared_ptr tensor, const size_t index, const bool isInput) { + OV_ITT_TASK_CHAIN(ZERO_SET_TENSOR, itt::domains::LevelZeroBackend, "set_tensor", "set_tensor_data"); auto& levelZeroTensors = isInput ? _levelZeroInputTensors : _levelZeroOutputTensors; auto& tensorsData = isInput ? _inputTensorsData : _outputTensorsData; bool setTensorData = false; bool levelZeroTensorCreatedLocally = true; + OV_ITT_TASK_NEXT(ZERO_SET_TENSOR, "check_data_allocation"); ze_memory_allocation_properties_t desc = {}; desc.stype = ZE_STRUCTURE_TYPE_MEMORY_ALLOCATION_PROPERTIES; auto res = zeMemGetAllocProperties(_initStructs->getContext(), tensor->data(), &desc, nullptr); @@ -306,6 +308,7 @@ void ZeroInferRequest::set_tensor_data(const std::shared_ptr tensor // random tensor if (tensorsData.at(index).has_value() && !tensorsData.at(index)->levelZeroTensorCreatedLocally) { _logger.debug("ZeroInferRequest::set_tensor_data - create locally L0 tensor"); + OV_ITT_TASK_NEXT(ZERO_SET_TENSOR, "allocate tensor"); levelZeroTensors.at(index) = allocate_tensor(isInput ? _metadata.inputs.at(index) : _metadata.outputs.at(index), @@ -327,6 +330,7 @@ void ZeroInferRequest::set_tensor_data(const std::shared_ptr tensor if (_pipelineIsCreated) { _logger.debug("ZeroInferRequest::infer_async - update command list"); + OV_ITT_TASK_NEXT(ZERO_SET_TENSOR, "updateCommandList"); _pipeline->updateCommandList(*tensorsData.at(index), isInput ? _executor->get_input_descriptors().at(index).idx : _executor->get_output_descriptors().at(index).idx); @@ -337,6 +341,8 @@ void ZeroInferRequest::set_tensor_data(const std::shared_ptr tensor void ZeroInferRequest::set_remote_tensor_data(const std::shared_ptr tensor, const size_t index, const bool isInput) { + OV_ITT_TASK_CHAIN(ZERO_SET_REMOTE_TENSOR, itt::domains::LevelZeroBackend, "set_tensor", "set_remote_tensor_data"); + auto l0_context = reinterpret_cast( extract_object(tensor->get_context()->get_property(), ov::intel_npu::l0_context)); if (_initStructs->getContext() != l0_context) { @@ -357,6 +363,7 @@ void ZeroInferRequest::set_remote_tensor_data(const std::shared_ptrupdateCommandList(*tensorsData.at(index), isInput ? _executor->get_input_descriptors().at(index).idx : _executor->get_output_descriptors().at(index).idx); @@ -364,6 +371,8 @@ void ZeroInferRequest::set_remote_tensor_data(const std::shared_ptr& port, const ov::SoPtr& tensor) { + OV_ITT_SCOPED_TASK(itt::domains::LevelZeroBackend, "set_tensor"); + auto foundPort = find_port(port); OPENVINO_ASSERT(foundPort.found(), "Cannot find tensor for port ", port); try { @@ -392,6 +401,8 @@ void ZeroInferRequest::set_tensor(const ov::Output& port, const } ov::SoPtr ZeroInferRequest::get_tensor(const ov::Output& port) const { + OV_ITT_SCOPED_TASK(itt::domains::LevelZeroBackend, "get_tensor"); + auto foundPort = find_port(port); OPENVINO_ASSERT(foundPort.found(), "Cannot find tensor for port ", port); @@ -428,10 +439,11 @@ void ZeroInferRequest::infer() { void ZeroInferRequest::infer_async() { _logger.debug("InferRequest::infer_async started"); - OV_ITT_SCOPED_TASK(itt::domains::LevelZeroBackend, "infer_async"); + OV_ITT_TASK_CHAIN(ZERO_INFER, itt::domains::LevelZeroBackend, "infer_async", "start"); _executor->mutexLock(); if (!_pipelineIsCreated) { + OV_ITT_TASK_NEXT(ZERO_INFER, "create_pipeline"); create_pipeline(); _pipelineIsCreated = true; @@ -469,6 +481,7 @@ void ZeroInferRequest::infer_async() { } _logger.info("Tensor is not allocated in the current Level Zero context"); + OV_ITT_TASK_NEXT(ZERO_INFER, "memcpy"); std::memcpy(levelZeroBuffer, userBuffer, userTensor->get_byte_size()); } } @@ -476,11 +489,12 @@ void ZeroInferRequest::infer_async() { ++inputIndex; } + OV_ITT_TASK_NEXT(ZERO_INFER, "push"); _pipeline->push(); } void ZeroInferRequest::get_result() { - OV_ITT_SCOPED_TASK(itt::domains::LevelZeroBackend, "get_result"); + OV_ITT_TASK_CHAIN(ZERO_RESULT, itt::domains::LevelZeroBackend, "get_result", "pull"); _pipeline->pull(); size_t outputIndex = 0; @@ -518,6 +532,7 @@ void ZeroInferRequest::get_result() { } _logger.info("Tensor is not allocated in the current Level Zero context"); + OV_ITT_TASK_NEXT(ZERO_RESULT, "memcpy"); std::memcpy(userBuffer, levelZeroBuffer, userTensor->get_byte_size()); } } @@ -525,6 +540,7 @@ void ZeroInferRequest::get_result() { ++outputIndex; } + OV_ITT_TASK_NEXT(ZERO_RESULT, "reset"); _pipeline->reset(); _logger.debug("InferRequest::get_result finished"); } diff --git a/src/plugins/intel_npu/src/backend/src/zero_pipeline.cpp b/src/plugins/intel_npu/src/backend/src/zero_pipeline.cpp index a1d356a915b619..2e5712babbdc29 100644 --- a/src/plugins/intel_npu/src/backend/src/zero_pipeline.cpp +++ b/src/plugins/intel_npu/src/backend/src/zero_pipeline.cpp @@ -297,6 +297,10 @@ struct IntegratedPipeline final : public Pipeline { }; void updateCommandList(const TensorData& tensorsData, const uint32_t index) override { + OV_ITT_TASK_CHAIN(ZERO_EXECUTOR_IP_PULL, + itt::domains::LevelZeroBackend, + "IntegratedPipeline", + "updateCommandList"); const size_t numberOfCommandLists = _command_lists.size(); for (size_t i = 0; i < numberOfCommandLists; i++) { diff --git a/src/plugins/intel_npu/src/plugin/src/plugin.cpp b/src/plugins/intel_npu/src/plugin/src/plugin.cpp index 40e4b257832b77..56387d620a8bca 100644 --- a/src/plugins/intel_npu/src/plugin/src/plugin.cpp +++ b/src/plugins/intel_npu/src/plugin/src/plugin.cpp @@ -211,15 +211,6 @@ Plugin::Plugin() // parse again env_variables after backend is initialized to get backend proprieties _globalConfig.parseEnvVars(); - // initialize properties which have device-tied default values in global config - // *only if there is a driver available - if (_metrics->GetAvailableDevicesNames().size() > 0) { - _globalConfig.update({{ov::intel_npu::stepping.name(), - std::to_string(_metrics->GetSteppingNumber(get_specified_device_name(_globalConfig)))}}); - _globalConfig.update({{ov::intel_npu::max_tiles.name(), - std::to_string(_metrics->GetMaxTiles(get_specified_device_name(_globalConfig)))}}); - } - // Map from name to function {Config -> ov::Any} // Note that some properties are RW before network is loaded, and become RO after network is loaded _properties = { @@ -462,14 +453,24 @@ Plugin::Plugin() {ov::intel_npu::stepping.name(), {false, ov::PropertyMutability::RW, - [](const Config& config) { - return config.get(); + [&](const Config& config) { + if (!config.has()) { + const auto specifiedDeviceName = get_specified_device_name(config); + return static_cast(_metrics->GetSteppingNumber(specifiedDeviceName)); + } else { + return config.get(); + } }}}, {ov::intel_npu::max_tiles.name(), {false, ov::PropertyMutability::RW, - [](const Config& config) { - return config.get(); + [&](const Config& config) { + if (!config.has()) { + const auto specifiedDeviceName = get_specified_device_name(config); + return static_cast(_metrics->GetMaxTiles(specifiedDeviceName)); + } else { + return config.get(); + } }}}, {ov::intel_npu::compilation_mode.name(), {false, @@ -622,7 +623,8 @@ std::shared_ptr Plugin::compile_model(const std::shared_ptr< // Update stepping w/ information from driver, unless provided by user or we are off-device // Ignore, if compilation was requested for platform, different from current if (!localConfig.has() && device != nullptr && - device->getName() == ov::intel_npu::Platform::standardize(platform)) { + device->getName() == ov::intel_npu::Platform::standardize(platform) && + _metrics->GetBackendName() == "level_zero") { try { localConfig.update({{ov::intel_npu::stepping.name(), std::to_string(device->getSubDevId())}}); } catch (...) { @@ -633,7 +635,8 @@ std::shared_ptr Plugin::compile_model(const std::shared_ptr< // Update max_tiles w/ information from driver, unless provided by user or we are off-device // Ignore, if compilation was requested for platform, different from current if (!localConfig.has() && device != nullptr && - device->getName() == ov::intel_npu::Platform::standardize(platform)) { + device->getName() == ov::intel_npu::Platform::standardize(platform) && + _metrics->GetBackendName() == "level_zero") { try { localConfig.update({{ov::intel_npu::max_tiles.name(), std::to_string(device->getMaxNumSlices())}}); } catch (...) { diff --git a/src/plugins/intel_npu/tools/common/include/tensor_utils.hpp b/src/plugins/intel_npu/tools/common/include/tensor_utils.hpp index c6ca8f50fd3f94..87b2301a7ae4fb 100644 --- a/src/plugins/intel_npu/tools/common/include/tensor_utils.hpp +++ b/src/plugins/intel_npu/tools/common/include/tensor_utils.hpp @@ -5,8 +5,11 @@ #pragma once +#include #include +#include + namespace npu { namespace utils { @@ -58,5 +61,12 @@ inline ov::Tensor toFP32(const ov::Tensor& in, void* ptr = nullptr) { */ std::vector> parseTensorsAsFP32(const std::map& tensors); +/** + * @brief Join several non-batched tensors having the same shapes and precisions into a batched one. + * + * @param tensors The source non-batched tensors + * @return The merged batched tensor + */ +ov::Tensor joinTensors(const std::list& tensors, const ov::Layout& layout); } // namespace utils } // namespace npu diff --git a/src/plugins/intel_npu/tools/common/src/tensor_utils.cpp b/src/plugins/intel_npu/tools/common/src/tensor_utils.cpp index 470d737a2b9d31..32616b86135243 100644 --- a/src/plugins/intel_npu/tools/common/src/tensor_utils.cpp +++ b/src/plugins/intel_npu/tools/common/src/tensor_utils.cpp @@ -468,5 +468,29 @@ std::vector> parseTensorsAsFP32(const std::map& tensors, const ov::Layout& layout) { + if (tensors.empty()) { + OPENVINO_THROW("Cannot join tensors: nothing to join"); + } + if (!ov::layout::has_batch(layout)) { + OPENVINO_THROW("Cannot join tensors: has no batch_idx in layout", layout.to_string()); + } + auto pivotShape = tensors.front().get_shape(); + auto pivotPrecision = tensors.front().get_element_type(); + if (!std::all_of(tensors.begin(), tensors.end(), [&pivotShape, &pivotPrecision](const auto& t) { + return t.get_shape() == pivotShape && t.get_element_type() == pivotPrecision; + })) { + OPENVINO_THROW("Cannot join tensors with different shapes, expected: ", pivotPrecision, ", ", pivotShape); + } + pivotShape[ov::layout::batch_idx(layout)] *= tensors.size(); + ov::Tensor out(pivotPrecision, pivotShape); + const auto outputBuffer = out.data(); + size_t bytesOffset = 0; + for (const auto& t : tensors) { + memcpy(reinterpret_cast(outputBuffer) + bytesOffset, t.data(), t.get_byte_size()); + bytesOffset += t.get_byte_size(); + } + return out; +} } // namespace utils } // namespace npu diff --git a/src/plugins/intel_npu/tools/single-image-test/main.cpp b/src/plugins/intel_npu/tools/single-image-test/main.cpp index 2d14dbd23e0d7e..e29b5025158373 100644 --- a/src/plugins/intel_npu/tools/single-image-test/main.cpp +++ b/src/plugins/intel_npu/tools/single-image-test/main.cpp @@ -20,6 +20,8 @@ #include #include #include +#include +#include #include #include #include @@ -66,10 +68,18 @@ DEFINE_string(device, "", "Device to use"); DEFINE_string(config, "", "Path to the configuration file (optional)"); DEFINE_string(ip, "", "Input precision (default: U8, available: FP32, FP16, I32, I64, U8)"); DEFINE_string(op, "", "Output precision (default: FP32, available: FP32, FP16, I32, I64, U8)"); -DEFINE_string(il, "", "Input layout"); -DEFINE_string(ol, "", "Output layout"); -DEFINE_string(iml, "", "Model input layout"); -DEFINE_string(oml, "", "Model output layout"); +DEFINE_string( + il, "", + "Input layout for all inputs, or ';' separated list of pairs :. Regex in is supported"); +DEFINE_string(ol, "", + "Output layout for all outputs, or ';' separated list of pairs :. Regex in is " + "supported"); +DEFINE_string(iml, "", + "Model input layout for all model inputs, or ';' separated list of pairs :. Regex in " + " is supported"); +DEFINE_string(oml, "", + "Model output layout for all outputs, or ';' separated list of pairs :. Regex in " + " is supported"); DEFINE_bool(img_as_bin, false, "Force binary input even if network expects an image"); DEFINE_bool(pc, false, "Report performance counters"); @@ -156,6 +166,25 @@ std::vector splitStringList(const std::string& str, char delim) { return out; } +std::map parseArgMap(std::string argMap) { + argMap.erase(std::remove_if(argMap.begin(), argMap.end(), ::isspace), argMap.end()); + + const auto pairs = splitStringList(argMap, ';'); + + std::map parsedMap; + for (auto&& pair : pairs) { + const auto lastDelimPos = pair.find_last_of(':'); + auto key = pair.substr(0, lastDelimPos); + std::string value; + if (lastDelimPos != std::string::npos) { + value = pair.substr(lastDelimPos + 1); + } + parsedMap[std::move(key)] = std::move(value); + } + + return parsedMap; +} + void parseCommandLine(int argc, char* argv[]) { std::ostringstream usage; usage << "Usage: " << argv[0] << "[]"; @@ -531,6 +560,38 @@ std::vector> parseMeanOrScale(const std::string& mean_scale, return result; } +using RegexPtr = std::unique_ptr; +std::map parseLayoutRegex(std::string layouts) { + std::map input_output_layouts = parseArgMap(std::move(layouts)); + + std::map out; + for (const auto& input_output_layout : input_output_layouts) { + auto [name, value] = input_output_layout; + if (value.empty()) { + if (name.empty()) { + throw std::runtime_error("Can't parse layouts string \"" + layouts + + "\" into valid \"input:layout;input:layout\" pairs"); + } + // there is no value only name, thus we consider input/output name as "any" and + // apply layout value as the parsed name + out.emplace(std::make_unique(".*"), name); + continue; + } + std::string valid_regex_str = name.empty() ? ".*" : "^" + name + "$"; + out.emplace(std::make_unique(std::move(valid_regex_str)), std::move(value)); + } + return out; +} + +template +std::optional getRegexSubstitutionIfExist(const std::string& haystack, const std::map& substitutions) { + for (const auto& s : substitutions) { + if (std::regex_search(haystack, *s.first)) { + return {s.second}; + } + } + return {}; +} // // File utils // @@ -569,27 +630,70 @@ ov::Tensor loadImage(const ov::element::Type& precision, const ov::Shape& shape, return tensor; } -ov::Tensor loadBinary(const ov::element::Type& modelPrecision, const ov::Shape& shape, const std::string& filePath, - const ov::element::Type& dataPrecision) { +ov::Tensor loadBinary(const ov::element::Type& modelPrecision, const ov::Shape& shape, const ov::Layout& layout, + const std::string& filePath, const ov::element::Type& dataPrecision) { std::ifstream binaryFile(filePath, std::ios_base::binary | std::ios_base::ate); OPENVINO_ASSERT(binaryFile, "Failed to open input binary file: ", filePath); - const auto fileBytes = binaryFile.tellg(); + const auto fileSize = binaryFile.tellg(); binaryFile.seekg(0, std::ios_base::beg); OPENVINO_ASSERT(binaryFile.good(), "While reading a file an error is encountered"); - - const ov::Tensor requestedTensor(modelPrecision, shape); - const int reqTensorBytes = static_cast(requestedTensor.get_byte_size()); + const size_t fileBytes = static_cast(fileSize); + ov::Tensor requestedTensor(modelPrecision, shape); + const size_t reqTensorBytes = static_cast(requestedTensor.get_byte_size()); if (dataPrecision != modelPrecision && dataPrecision != ov::element::Type_t::undefined) { std::cout << "Converting " << filePath << " input from " << dataPrecision << " to " << modelPrecision << std::endl; const ov::Tensor inputTensor(dataPrecision, shape); - binaryFile.read(reinterpret_cast(inputTensor.data()), static_cast(fileBytes)); - npu::utils::convertTensorPrecision(inputTensor, requestedTensor); + if (fileBytes == inputTensor.get_byte_size()) { + binaryFile.read(reinterpret_cast(inputTensor.data()), static_cast(fileBytes)); + npu::utils::convertTensorPrecision(inputTensor, requestedTensor); + } else { + std::cout << "File contains " << fileBytes + << " bytes, but it expected to be: " << inputTensor.get_byte_size() + << " while converting precision from " << dataPrecision << " to " << modelPrecision + << ". Check whether it is possible to batch loading " << std::endl; + OPENVINO_ASSERT(ov::layout::has_batch(layout), + "Input layout has no batch dimenstion: ", layout.to_string()); + size_t N = shape[ov::layout::batch_idx(layout)]; + OPENVINO_ASSERT(fileBytes * N == inputTensor.get_byte_size(), "File contains ", fileBytes, " bytes, but ", + inputTensor.get_byte_size() * N, " total in batch size ", N, + " expected while converting precision from ", dataPrecision, " to ", modelPrecision); + ov::Shape debatchedInputTensorShape(shape); + debatchedInputTensorShape[ov::layout::batch_idx(layout)] = 1; + const ov::Tensor inputDebatchedTensor(dataPrecision, debatchedInputTensorShape); + binaryFile.read(reinterpret_cast(inputDebatchedTensor.data()), + static_cast(fileBytes)); + const ov::Tensor convertedPrecisionTensor(modelPrecision, debatchedInputTensorShape); + npu::utils::convertTensorPrecision(inputDebatchedTensor, convertedPrecisionTensor); + std::list tensorsToJoin; + std::generate_n(std::back_inserter(tensorsToJoin), N, [&convertedPrecisionTensor]() { + return convertedPrecisionTensor; + }); + requestedTensor = npu::utils::joinTensors(tensorsToJoin, layout); + } + } else { - OPENVINO_ASSERT(fileBytes == reqTensorBytes, "File contains ", fileBytes, " bytes, but ", reqTensorBytes, - " expected"); - binaryFile.read(reinterpret_cast(requestedTensor.data()), static_cast(reqTensorBytes)); + if (fileBytes == reqTensorBytes) { + binaryFile.read(reinterpret_cast(requestedTensor.data()), + static_cast(reqTensorBytes)); + } else { + std::cout << "File contains " << fileBytes << " bytes, but it expected to be: " << reqTensorBytes + << " when datatypes match. " + << ". Check whether it is possible to batch loading " << std::endl; + OPENVINO_ASSERT(ov::layout::has_batch(layout), + "Input layout has no batch dimenstion: ", layout.to_string()); + size_t N = shape[ov::layout::batch_idx(layout)]; + OPENVINO_ASSERT(fileBytes * N == reqTensorBytes, "File contains ", fileBytes, " bytes, but ", + reqTensorBytes, " in batch size ", N, " expected"); + + // duplicate a binary into tensor memory if the tensor batched + for (size_t n = 0; n < N; ++n) { + binaryFile.seekg(0, std::ios_base::beg); + binaryFile.read(reinterpret_cast(requestedTensor.data()) + fileBytes * n, + static_cast(fileBytes)); + } + } } return requestedTensor; @@ -617,7 +721,7 @@ ov::Tensor loadInput(const ov::element::Type& modelPrecision, const ov::Shape& s if (isImage(shape, layout) && !FLAGS_img_as_bin) { return loadImage(modelPrecision, shape, layout, filePath, colorFormat); } else { - return loadBinary(modelPrecision, shape, filePath, dataPrecision); + return loadBinary(modelPrecision, shape, layout, filePath, dataPrecision); } } @@ -1620,10 +1724,10 @@ static int runSingleImageTest() { throw std::logic_error("Parameter -op " + FLAGS_op + " is not supported"); } - ov::Layout inUserLayout(FLAGS_il); - ov::Layout outUserLayout(FLAGS_ol); - ov::Layout inModelLayout(FLAGS_iml); - ov::Layout outModelLayout(FLAGS_oml); + std::map inUserLayouts = parseLayoutRegex(FLAGS_il); + std::map outUserLayouts = parseLayoutRegex(FLAGS_ol); + std::map inModelLayouts = parseLayoutRegex(FLAGS_iml); + std::map outModelLayouts = parseLayoutRegex(FLAGS_oml); std::vector inputFilesPerCase; std::vector> inputFilesForOneInfer; @@ -1712,10 +1816,16 @@ static int runSingleImageTest() { } // Input layout - if (!inUserLayout.empty()) { - for (size_t i = 0; i < inputInfo.size(); ++i) { + for (size_t i = 0; i < inputInfo.size(); ++i) { + if (std::optional inUserLayout = + getRegexSubstitutionIfExist(inputInfo[i].get_any_name(), inUserLayouts); + inUserLayout.has_value()) { ov::Layout inLayerModelLayout; - if (inModelLayout.empty()) { + if (std::optional inModelLayout = + getRegexSubstitutionIfExist(inputInfo[i].get_any_name(), inModelLayouts); + inModelLayout.has_value()) { + inLayerModelLayout = inModelLayout.value(); + } else { const auto shape = inputInfo[i].get_shape(); inLayerModelLayout = getLayoutByRank(shape.size()); std::cout << "WARNING: Configuring preprocessing. Since --iml option isn't set, input model " @@ -1723,11 +1833,12 @@ static int runSingleImageTest() { << inputInfo[i].get_any_name() << "\" is infered from shape: " << toString(shape) << " rank (" << shape.size() << ") as " << inLayerModelLayout.to_string() << std::endl; - } else { - inLayerModelLayout = inModelLayout; } + std::cout << "Set layouts for the input: \"" << inputInfo[i].get_any_name() << "\", model " + << inLayerModelLayout.to_string() << ", user " << inUserLayout.value().to_string() + << std::endl; ppp.input(i).model().set_layout(inLayerModelLayout); - ppp.input(i).tensor().set_layout(inUserLayout); + ppp.input(i).tensor().set_layout(inUserLayout.value()); } } @@ -1766,10 +1877,16 @@ static int runSingleImageTest() { } // Output layout - if (!outUserLayout.empty()) { - for (size_t i = 0; i < outputInfo.size(); ++i) { + for (size_t i = 0; i < outputInfo.size(); ++i) { + if (std::optional outUserLayout = + getRegexSubstitutionIfExist(outputInfo[i].get_any_name(), outUserLayouts); + outUserLayout.has_value()) { ov::Layout outLayerModelLayout; - if (outModelLayout.empty()) { + if (std::optional outModelLayout = + getRegexSubstitutionIfExist(outputInfo[i].get_any_name(), outModelLayouts); + outModelLayout.has_value()) { + outLayerModelLayout = outModelLayout.value(); + } else { const auto shape = outputInfo[i].get_shape(); outLayerModelLayout = getLayoutByRank(shape.size()); std::cout << "WARNING: Configuring preprocessing. Since --oml option isn't set, output model " @@ -1777,11 +1894,12 @@ static int runSingleImageTest() { << outputInfo[i].get_any_name() << "\" is infered from shape: " << toString(shape) << " rank (" << shape.size() << ") as " << outLayerModelLayout.to_string() << std::endl; - } else { - outLayerModelLayout = outModelLayout; } + std::cout << "Set layouts for the output: \"" << outputInfo[i].get_any_name() << "\", model " + << outLayerModelLayout.to_string() << ", user " << outUserLayout.value().to_string() + << std::endl; ppp.output(i).model().set_layout(outLayerModelLayout); - ppp.output(i).tensor().set_layout(outUserLayout); + ppp.output(i).tensor().set_layout(outUserLayout.value()); } } @@ -1852,10 +1970,14 @@ static int runSingleImageTest() { // Determine the input layout ov::Layout inputLayout; - if (!inUserLayout.empty()) { - inputLayout = inUserLayout; - } else if (!inModelLayout.empty()) { - inputLayout = inModelLayout; + if (std::optional inUserLayout = + getRegexSubstitutionIfExist(inputInfo.get_any_name(), inUserLayouts); + inUserLayout.has_value()) { + inputLayout = inUserLayout.value(); + } else if (std::optional inModelLayout = + getRegexSubstitutionIfExist(inputInfo.get_any_name(), inModelLayouts); + inModelLayout.has_value()) { + inputLayout = inModelLayout.value(); } else { inputLayout = getLayoutByRank(shape.size()); std::cout << "WARNING: Loading input data. Since --iml option isn't set, input model layout for " @@ -1905,7 +2027,9 @@ static int runSingleImageTest() { LayoutMap outputLayouts; // Several metrics may require this // Load the reference data - for (const auto& [tensorName, tensor] : outputTensors) { + for (const auto& out : compiledModel.outputs()) { + const auto& tensorName = out.get_any_name(); + const auto& tensor = outputTensors.at(tensorName); const ov::element::Type& precision = tensor.get_element_type(); const ov::Shape& shape = tensor.get_shape(); @@ -1922,10 +2046,14 @@ static int runSingleImageTest() { // Determine the output layout ov::Layout outputLayout; - if (!outUserLayout.empty()) { - outputLayout = outUserLayout; - } else if (!outModelLayout.empty()) { - outputLayout = outModelLayout; + if (std::optional outUserLayout = + getRegexSubstitutionIfExist(tensorName, outUserLayouts); + outUserLayout.has_value()) { + outputLayout = outUserLayout.value(); + } else if (std::optional outModelLayout = + getRegexSubstitutionIfExist(tensorName, outModelLayouts); + outModelLayout.has_value()) { + outputLayout = outModelLayout.value(); } else { outputLayout = getLayoutByRank(shape.size()); std::cout << "WARNING: Since --oml option isn't set, output model layout for layer \"" @@ -1941,7 +2069,8 @@ static int runSingleImageTest() { outputInd = 0; // Dump the outputs obtained upon prediction - for (const auto& tensorEntry : outputTensors) { + for (const auto& out : compiledModel.outputs()) { + const auto& tensor = outputTensors.at(out.get_any_name()); std::ostringstream ostr; ostr << netFileName << "_kmb_out_" << outputInd << "_case_" << numberOfTestCase << ".blob"; const auto blobFileName = ostr.str(); @@ -1949,7 +2078,7 @@ static int runSingleImageTest() { std::cout << "Dump device output #" << outputInd << "_case_" << numberOfTestCase << " to " << blobFileName << std::endl; - dumpTensor(tensorEntry.second, blobFileName); + dumpTensor(tensor, blobFileName); ++outputInd; } @@ -2048,13 +2177,14 @@ static int runSingleImageTest() { } } else { size_t outputInd = 0; - for (const auto& tensorEntry : outputTensors) { + for (const auto& out : compiledModel.outputs()) { + const auto& tensor = outputTensors.at(out.get_any_name()); std::ostringstream ostr; ostr << netFileName << "_ref_out_" << outputInd << "_case_" << numberOfTestCase << ".blob"; const auto blobFileName = ostr.str(); std::cout << "Dump reference output #" << outputInd << " to " << blobFileName << std::endl; - dumpTensor(tensorEntry.second, blobFileName); + dumpTensor(tensor, blobFileName); ++outputInd; } diff --git a/src/tests/functional/plugin/shared/include/low_precision_transformations/recurrent_cell_transformation.hpp b/src/tests/functional/plugin/shared/include/low_precision_transformations/recurrent_cell_transformation.hpp index d0452c9da1b638..82a8795698bb36 100644 --- a/src/tests/functional/plugin/shared/include/low_precision_transformations/recurrent_cell_transformation.hpp +++ b/src/tests/functional/plugin/shared/include/low_precision_transformations/recurrent_cell_transformation.hpp @@ -42,6 +42,7 @@ typedef std::tuple< std::vector, std::string, ov::pass::low_precision::LayerTransformation::Params, + bool, // use precision transparent operations RecurrentCellTransformationParam >RecurrentCellTransformationParams; diff --git a/src/tests/functional/plugin/shared/src/low_precision_transformations/recurrent_cell_transformation.cpp b/src/tests/functional/plugin/shared/src/low_precision_transformations/recurrent_cell_transformation.cpp index e94663bf2b8596..692a00877c3368 100644 --- a/src/tests/functional/plugin/shared/src/low_precision_transformations/recurrent_cell_transformation.cpp +++ b/src/tests/functional/plugin/shared/src/low_precision_transformations/recurrent_cell_transformation.cpp @@ -21,14 +21,16 @@ std::string RecurrentCellTransformation::getTestCaseName(testing::TestParamInfo< std::string targetDevice; RecurrentCellTransformationParam param; ov::pass::low_precision::LayerTransformation::Params params; - std::tie(netPrecision, activationsShape, weightsShape, targetDevice, params, param) = obj.param; + bool addPrecisionTransparentOperations; + std::tie(netPrecision, activationsShape, weightsShape, targetDevice, params, addPrecisionTransparentOperations, param) = obj.param; std::ostringstream result; result << get_test_case_name_by_params(netPrecision, activationsShape[0], targetDevice, params) << "FQ_X_" << param.fakeQuantize_X << "_" << "DQ_X_" << param.dequantization_X << "_" << "FQ_W_" << param.fakeQuantize_W << "_" << - "DQ_W_" << param.dequantization_W; + "DQ_W_" << param.dequantization_W << "_" << + "PTO" << addPrecisionTransparentOperations; return result.str(); } @@ -37,9 +39,10 @@ void RecurrentCellTransformation::SetUp() { std::vector activations_shapes; std::vector weights_shapes; RecurrentCellTransformationParam param; + bool addPrecisionTransparentOperations; ov::pass::low_precision::LayerTransformation::Params params; - std::tie(precision, activations_shapes, weights_shapes, targetDevice, params, param) = this->GetParam(); + std::tie(precision, activations_shapes, weights_shapes, targetDevice, params, addPrecisionTransparentOperations, param) = this->GetParam(); init_input_shapes(activations_shapes); @@ -64,13 +67,14 @@ void RecurrentCellTransformation::SetUp() { param.dequantization_H, param.dequantization_W, param.dequantization_R - }); + }, + addPrecisionTransparentOperations); } void RecurrentCellTransformation::run() { LayerTransformation::run(); - const auto params = std::get<5>(GetParam()); + const auto params = std::get<6>(GetParam()); const auto actualPrecision = get_runtime_precision_by_type(params.layerName); auto expectedPrecision = params.expectedKernelType; if (expectedPrecision == "FP32" && std::get<0>(GetParam()) == ov::element::f16) { diff --git a/src/tests/ov_helpers/ov_lpt_models/include/ov_lpt_models/broadcast.hpp b/src/tests/ov_helpers/ov_lpt_models/include/ov_lpt_models/broadcast.hpp new file mode 100644 index 00000000000000..4384fecd089ea6 --- /dev/null +++ b/src/tests/ov_helpers/ov_lpt_models/include/ov_lpt_models/broadcast.hpp @@ -0,0 +1,29 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include "low_precision/layer_transformation.hpp" +#include "ov_lpt_models/common/dequantization_operations.hpp" + +namespace ov { +namespace builder { +namespace subgraph { + +class BroadcastFunction { +public: + static std::shared_ptr get( + const bool v1, + const ov::PartialShape& inputShape, + const ov::element::Type precisionBeforeDequantization, + const ov::builder::subgraph::DequantizationOperations& dequantizationBefore, + const Shape& tagetShape, + const Shape& axesMapping, + const ov::builder::subgraph::DequantizationOperations& dequantizationAfter); +}; + +} // namespace subgraph +} // namespace builder +} // namespace ov diff --git a/src/tests/ov_helpers/ov_lpt_models/include/ov_lpt_models/recurrent_cell.hpp b/src/tests/ov_helpers/ov_lpt_models/include/ov_lpt_models/recurrent_cell.hpp index da98410c55d13c..57ffdedc4c0eb6 100644 --- a/src/tests/ov_helpers/ov_lpt_models/include/ov_lpt_models/recurrent_cell.hpp +++ b/src/tests/ov_helpers/ov_lpt_models/include/ov_lpt_models/recurrent_cell.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2022 Intel Corporation +// Copyright (C) 2022-2024 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // @@ -25,7 +25,8 @@ class RecurrentCellFunction { const RNNType type, const std::vector& fqOnDatas, const std::vector& converts, - const std::vector& dequantizations); + const std::vector& dequantizations, + const bool addPrecisionTransparentOperations = false); }; std::shared_ptr makeQuantizationAndDequantization(const std::shared_ptr input, @@ -33,7 +34,8 @@ std::shared_ptr makeQuantizationAndDequantization(const std::shared_ptr +std::shared_ptr make_broadcast(const std::shared_ptr& parent, const Shape& tagetShape, const Shape& axesMapping) { + return std::make_shared( + parent, + std::make_shared(ov::element::i32, Shape{ tagetShape.size() }, tagetShape), + std::make_shared(ov::element::i32, Shape{ axesMapping.size() }, axesMapping)); +} +} // namespace + +std::shared_ptr BroadcastFunction::get( + const bool v1, + const ov::PartialShape& inputShape, + const ov::element::Type precisionBeforeDequantization, + const ov::builder::subgraph::DequantizationOperations& dequantizationBefore, + const Shape& tagetShape, + const Shape& axesMapping, + const ov::builder::subgraph::DequantizationOperations& dequantizationAfter) { + const auto input = std::make_shared(precisionBeforeDequantization, inputShape); + std::shared_ptr parent = input; + + if (!dequantizationBefore.empty()) { + parent = makeDequantization(parent, dequantizationBefore); + } + + parent = v1 ? + make_broadcast(parent, tagetShape, axesMapping) : + make_broadcast(parent, tagetShape, axesMapping); + parent->set_friendly_name("broadcast"); + + if (!dequantizationAfter.empty()) { + parent = makeDequantization(parent, dequantizationAfter); + } + + const std::shared_ptr result = std::make_shared(parent); + + const std::shared_ptr function = std::make_shared( + ov::ResultVector{ result }, + std::vector> { input }, + "BroadcastTransformation"); + return function; +} + +} // namespace subgraph +} // namespace builder +} // namespace ov diff --git a/src/tests/ov_helpers/ov_lpt_models/src/recurrent_cell.cpp b/src/tests/ov_helpers/ov_lpt_models/src/recurrent_cell.cpp index 7be3fca1217403..7a3537c91f3824 100644 --- a/src/tests/ov_helpers/ov_lpt_models/src/recurrent_cell.cpp +++ b/src/tests/ov_helpers/ov_lpt_models/src/recurrent_cell.cpp @@ -1,4 +1,4 @@ -// Copyright (C) 2022 Intel Corporation +// Copyright (C) 2022-2024 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // @@ -30,7 +30,8 @@ std::shared_ptr RecurrentCellFunction::get( const RNNType type, const std::vector& fqOnDatas, const std::vector& converts, - const std::vector& dequantizations) { + const std::vector& dequantizations, + const bool addPrecisionTransparentOperations) { auto X = std::make_shared(inputPrecision, inputActivationsShapes[0]); X->set_friendly_name("X"); std::shared_ptr parent_X = makeQuantizationAndDequantization(X, @@ -46,7 +47,8 @@ std::shared_ptr RecurrentCellFunction::get( H->get_friendly_name(), fqOnDatas[1], converts[1], - dequantizations[1]); + dequantizations[1], + addPrecisionTransparentOperations); auto C = std::make_shared(inputPrecision, inputActivationsShapes[2]); C->set_friendly_name("C"); @@ -58,7 +60,8 @@ std::shared_ptr RecurrentCellFunction::get( W->get_friendly_name(), fqOnDatas[2], converts[2], - dequantizations[2]); + dequantizations[2], + addPrecisionTransparentOperations); auto R = ov::opset1::Constant::create(fqOnDatas[2].empty() ? ov::element::i8 : inputPrecision, inputWeightsShapes[1], {1}); @@ -127,12 +130,20 @@ std::shared_ptr makeQuantizationAndDequantization(const std::shared_ptr parent; - if (fqOnData.empty()) { - parent = input; - } else { - std::shared_ptr fakeQuantize1 = makeFakeQuantizeTypeRelaxed(input, inputPrecision, fqOnData); + const DequantizationOperations& dequantization, + const bool addPrecisionTransparentOperations) { + std::shared_ptr parent = input; + if (addPrecisionTransparentOperations) { + auto shape = input->get_output_shape(0); + std::swap(shape[shape.size() - 2], shape[shape.size() - 1]); + parent = std::make_shared( + parent, + std::make_shared(element::u32, Shape({ shape.size() }), shape), + true); + } + + if (!fqOnData.empty()) { + std::shared_ptr fakeQuantize1 = makeFakeQuantizeTypeRelaxed(parent, inputPrecision, fqOnData); fakeQuantize1->set_friendly_name("fakeQuantize_" + friendly_name); parent = fakeQuantize1; } @@ -142,6 +153,15 @@ std::shared_ptr makeQuantizationAndDequantization(const std::shared_ptrget_output_shape(0); + parent = std::make_shared( + parent, + std::make_shared(element::u32, Shape({ shape.size() }), shape), + true); + } + return parent; } diff --git a/tests/constraints.txt b/tests/constraints.txt index c0ab1a660164f4..16bffdf16967db 100644 --- a/tests/constraints.txt +++ b/tests/constraints.txt @@ -18,7 +18,7 @@ opencv-python>=4.5 paddlepaddle==2.6.1 protobuf>=3.18.1,<4.0.0 py>=1.9.0 -pytest>=5.0,<7.5 +pytest>=5.0,<8.4 pytest-dependency==0.5.1 pytest-html==4.1.1 pytest-timeout==2.2.0 @@ -27,6 +27,7 @@ jaxlib<=0.4.14 kornia==0.7.0 networkx<=3.3 keras>=2.0.0,<3.0.0 +timm==1.0.7 --extra-index-url https://download.pytorch.org/whl/cpu -torch>=1.13,<2.3 \ No newline at end of file +torch>=1.13,<2.4 \ No newline at end of file diff --git a/tests/e2e_tests/requirements.txt b/tests/e2e_tests/requirements.txt index 2c37134327f7cc..2d380c682819aa 100644 --- a/tests/e2e_tests/requirements.txt +++ b/tests/e2e_tests/requirements.txt @@ -20,7 +20,7 @@ scikit-image>=0.17.2 tabulate==0.9.0 pytest>=5.0,<=7.0.1; python_version < '3.10' -pytest==7.2.0; python_version >= '3.10' +pytest==8.3.1; python_version >= '3.10' pytest-cov==2.11.1 # pytest-html==1.19.0 pytest-html diff --git a/tests/layer_tests/pytorch_tests/test_batch_norm.py b/tests/layer_tests/pytorch_tests/test_batch_norm.py index 577a036af70240..8e72ae33eaa15e 100644 --- a/tests/layer_tests/pytorch_tests/test_batch_norm.py +++ b/tests/layer_tests/pytorch_tests/test_batch_norm.py @@ -60,5 +60,7 @@ def forward(self, x): @pytest.mark.precommit_fx_backend @pytest.mark.precommit_torch_export def test_batch_norm(self, weights, bias, eps, train, running_stats, ie_device, precision, ir_version, kwargs_to_prepare_input): + if running_stats and self.use_torch_export(): + pytest.skip("running_mean not supported by torch.export") self._test(*self.create_model(weights, bias, eps, train, running_stats), ie_device, precision, ir_version, kwargs_to_prepare_input=kwargs_to_prepare_input, dynamic_shapes=False, use_mo_convert=False) diff --git a/tests/layer_tests/pytorch_tests/test_full.py b/tests/layer_tests/pytorch_tests/test_full.py index 20a70367e047f5..6ef8ca25a692a0 100644 --- a/tests/layer_tests/pytorch_tests/test_full.py +++ b/tests/layer_tests/pytorch_tests/test_full.py @@ -93,7 +93,7 @@ def test_full(self, shape, value, ie_device, precision, ir_version): @pytest.mark.parametrize("shape", [[1], [1, 2], [1, 2, 3], [1, 2, 3, 4], [2, 3, 4, 5, 6]]) @pytest.mark.parametrize("value", [0, 1, -1, 0.5]) @pytest.mark.parametrize("dtype", ["int8", "int32", "int64", "float32", "float64"]) - @pytest.mark.parametrize("with_names", [True, False]) + @pytest.mark.parametrize("with_names", [skip_if_export(True), False]) @pytest.mark.nightly @pytest.mark.precommit_fx_backend @pytest.mark.precommit_torch_export @@ -104,7 +104,7 @@ def test_full_dtype(self, shape, value, dtype, with_names, ie_device, precision, @pytest.mark.parametrize("shape", [[1], [1, 2], [1, 2, 3], [1, 2, 3, 4], [2, 3, 4, 5, 6]]) @pytest.mark.parametrize("value", [0, 1, -1, 0.5]) @pytest.mark.parametrize("dtype", ["int8", "int32", "int64", "float32", "float64"]) - @pytest.mark.parametrize("with_names", [True, False]) + @pytest.mark.parametrize("with_names", [skip_if_export(True), False]) @pytest.mark.nightly def test_full_out(self, shape, value, dtype, with_names, ie_device, precision, ir_version): self._test(*self.create_model(shape, dtype=dtype, use_out=True, with_names=with_names), ie_device, precision, @@ -496,7 +496,7 @@ def test_zeros_ones(self, op_type, shape, ie_device, precision, ir_version): @pytest.mark.parametrize("shape", [(1, 1), (1, 2), (1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5, 6)]) @pytest.mark.parametrize("op_type", ["aten::zeros", "aten::ones"]) @pytest.mark.parametrize("dtype", ["int8", "int32", "int64", "float32", "float64"]) - @pytest.mark.parametrize("with_names", [True, False]) + @pytest.mark.parametrize("with_names", [skip_if_export(True), False]) @pytest.mark.nightly @pytest.mark.precommit_fx_backend @pytest.mark.precommit_torch_export @@ -508,7 +508,7 @@ def test_zeros_ones_with_dtype(self, op_type, shape, dtype, with_names, ie_devic @pytest.mark.parametrize("shape", [(1, 1), (1, 2), (1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5, 6)]) @pytest.mark.parametrize("op_type", ["aten::zeros", "aten::ones"]) @pytest.mark.parametrize("dtype", ["int8", "int32", "int64", "float32", "float64"]) - @pytest.mark.parametrize("with_names", [True, False]) + @pytest.mark.parametrize("with_names", [skip_if_export(True), False]) @pytest.mark.nightly def test_zeros_ones_with_out(self, op_type, shape, dtype, with_names, ie_device, precision, ir_version): self._test(*self.create_model(op_type, dtype=dtype, with_out=True, with_names=with_names), ie_device, precision, diff --git a/tests/model_hub_tests/models_hub_common/utils.py b/tests/model_hub_tests/models_hub_common/utils.py index 6dac33640162de..068826669fab5b 100644 --- a/tests/model_hub_tests/models_hub_common/utils.py +++ b/tests/model_hub_tests/models_hub_common/utils.py @@ -27,7 +27,6 @@ def get_models_list(file_name: str): model_name, model_link = model_info.split(',') elif len(model_info.split(',')) == 4: model_name, model_link, mark, reason = model_info.split(',') - assert mark in ["skip", "xfail"], "Incorrect failure mark for model info {}".format(model_info) models.append((model_name, model_link, mark, reason)) return models diff --git a/tests/model_hub_tests/pytorch/hf_transformers_models b/tests/model_hub_tests/pytorch/hf_transformers_models index 5da9db39095810..f79f32b6d93ee8 100644 --- a/tests/model_hub_tests/pytorch/hf_transformers_models +++ b/tests/model_hub_tests/pytorch/hf_transformers_models @@ -4,7 +4,7 @@ abeja/gpt-neox-japanese-2.7b,gpt_neox_japanese acl-submission-anonym/EAM-spectral,examuse,skip,Load problem adalbertojunior/modular-test,modular,skip,Load problem adept/persimmon-8b-base,persimmon -aerner/lm-v2,open-llama,xfail,Example input problem +aerner/lm-v2,open-llama afonsosamarques/ardt-vanilla-combo_train_hopper_v2-2508_1336-33,decision_transformer,xfail,Tracing problem aihijo/gec-zh-gector-bert-large,gector,skip,Load problem albert-base-v2,albert @@ -170,6 +170,7 @@ huggingface/time-series-transformer-tourism-monthly,time_series_transformer,skip HuggingFaceM4/tiny-random-idefics,idefics,xfail,Unsupported op aten::any aten::einsum prim::TupleConstruct prim::TupleUnpack HuggingFaceM4/tiny-random-vllama-clip,vllama,skip,Load problem HuggingFaceM4/tiny-random-vopt-clip,vopt,skip,Load problem +HuggingFaceH4/zephyr-7b-beta,mistral HuiHuang/gpt3-damo-base-zh,gpt3,skip,Load problem hustvl/yolos-tiny,yolos iakarshu/tilt_base,tilt_base_configuration,skip,Load problem @@ -184,7 +185,7 @@ jaketae/fastspeech2-ljspeech,fastspeech2,skip,Load problem jambran/depression-classification,DepressionDetection,skip,Load problem Jellywibble/dalio-reward-charlie-v1,reward-model,skip,Load problem JonasGeiping/crammed-bert-legacy,crammedBERT,skip,Load problem -jonatasgrosman/wav2vec2-large-xlsr-53-english,wav2vec2,xfail,Unsupported op aten::index_put_ prim::TupleConstruct +jonatasgrosman/wav2vec2-large-xlsr-53-english,wav2vec2 Joqsan/test-my-fnet,my_fnet,skip,Load problem jozhang97/deta-swin-large,deta,skip,Load problem jploski/retnet-mini-shakespeare,retnet,skip,Load problem @@ -257,7 +258,6 @@ microsoft/xclip-base-patch32,xclip microsoft/xprophetnet-large-wiki100-cased,xlm-prophetnet miguelvictor/python-fromzero-lstmlm,lstmlm,skip,Load problem mingzi151/test-hf-wav2vec2bert,wav2vec2bert,skip,Load problem -mistralai/Mistral-7B-v0.1,mistral MIT/ast-finetuned-audioset-10-10-0.4593,audio-spectrogram-transformer Mizuiro-sakura/luke-japanese-large-sentiment-analysis-wrime,luke mlml-chip/thyme2_colon_e2e,cnlpt,skip,Load problem diff --git a/tests/model_hub_tests/pytorch/test_timm.py b/tests/model_hub_tests/pytorch/test_timm.py index 1e168de83a50d5..78bd632179be6f 100644 --- a/tests/model_hub_tests/pytorch/test_timm.py +++ b/tests/model_hub_tests/pytorch/test_timm.py @@ -8,7 +8,7 @@ import torch from models_hub_common.utils import get_models_list -from torch_utils import TestTorchConvertModel, process_pytest_marks +from torch_utils import TestTorchConvertModel def filter_timm(timm_list: list) -> list: @@ -42,10 +42,6 @@ def filter_timm(timm_list: list) -> list: return sorted([v[1] for v in unique_models.values()]) -def get_all_models() -> list: - return process_pytest_marks(os.path.join(os.path.dirname(__file__), "timm_models")) - - # To make tests reproducible we seed the random generator torch.manual_seed(0) @@ -82,10 +78,16 @@ def test_convert_model_precommit(self, name, ie_device): self.run(name, None, ie_device) @pytest.mark.nightly - @pytest.mark.parametrize("name", get_all_models()) + @pytest.mark.parametrize("name,link,mark,reason", get_models_list(os.path.join(os.path.dirname(__file__), "timm_models"))) @pytest.mark.parametrize("mode", ["trace", "export"]) - def test_convert_model_all_models(self, mode, name, ie_device): + def test_convert_model_all_models(self, mode, name, link, mark, reason, ie_device): self.mode = mode + assert mark is None or mark in [ + 'skip', 'xfail', 'xfail_trace', 'xfail_export'], f"Incorrect test case for {name}" + if mark == 'skip': + pytest.skip(reason) + elif mark in ['xfail', f'xfail_{mode}']: + pytest.xfail(reason) self.run(name, None, ie_device) @pytest.mark.nightly diff --git a/tests/model_hub_tests/pytorch/test_torchvision_models.py b/tests/model_hub_tests/pytorch/test_torchvision_models.py index 9aeabbbe09b032..31aeaedb2366d4 100644 --- a/tests/model_hub_tests/pytorch/test_torchvision_models.py +++ b/tests/model_hub_tests/pytorch/test_torchvision_models.py @@ -7,8 +7,9 @@ import pytest import torch import torchvision.transforms.functional as F +from models_hub_common.utils import get_models_list -from torch_utils import process_pytest_marks, TestTorchConvertModel +from torch_utils import TestTorchConvertModel def get_all_models() -> list: @@ -103,10 +104,15 @@ def test_convert_model_precommit_export(self, model_name, ie_device): self.mode = "export" self.run(model_name, None, ie_device) - @pytest.mark.parametrize("name", - process_pytest_marks(os.path.join(os.path.dirname(__file__), "torchvision_models"))) + @pytest.mark.parametrize("name,link,mark,reason", get_models_list(os.path.join(os.path.dirname(__file__), "torchvision_models"))) @pytest.mark.parametrize("mode", ["trace", "export"]) @pytest.mark.nightly - def test_convert_model_all_models(self, mode, name, ie_device): + def test_convert_model_all_models(self, mode, name, link, mark, reason, ie_device): self.mode = mode + assert mark is None or mark in [ + 'skip', 'xfail', 'xfail_trace', 'xfail_export'], f"Incorrect test case for {name}" + if mark == 'skip': + pytest.skip(reason) + elif mark in ['xfail', f'xfail_{mode}']: + pytest.xfail(reason) self.run(name, None, ie_device) diff --git a/tests/model_hub_tests/pytorch/timm_models b/tests/model_hub_tests/pytorch/timm_models index 9087edc24ffe2c..6aa64a90c19071 100644 --- a/tests/model_hub_tests/pytorch/timm_models +++ b/tests/model_hub_tests/pytorch/timm_models @@ -13,7 +13,7 @@ cait_s36_384.fb_dist_in1k,None cait_xs24_384.fb_dist_in1k,None cait_xxs24_224.fb_dist_in1k,None cait_xxs36_224.fb_dist_in1k,None -coat_tiny.in1k,None +coat_tiny.in1k,None,xfail_export,Requested None inlined input coatnet_bn_0_rw_224.sw_in1k,None coatnet_nano_rw_224.sw_in1k,None coatnet_rmlp_1_rw2_224.sw_in12k,None @@ -23,7 +23,7 @@ convformer_b36.sail_in1k,None convformer_m36.sail_in1k,None convformer_s18.sail_in1k,None convformer_s36.sail_in1k,None -convit_base.fb_in1k,None,xfail,Trace failed +convit_base.fb_in1k,None,xfail_trace,Trace failed convmixer_1024_20_ks9_p14.in1k,None convmixer_1536_20.in1k,None convnext_atto_ols.a2_in1k,None @@ -102,10 +102,10 @@ edgenext_xx_small.in1k,None efficientformer_l1.snap_dist_in1k,None efficientformer_l3.snap_dist_in1k,None efficientformer_l7.snap_dist_in1k,None -efficientformerv2_l.snap_dist_in1k,None -efficientformerv2_s0.snap_dist_in1k,None -efficientformerv2_s1.snap_dist_in1k,None -efficientformerv2_s2.snap_dist_in1k,None +efficientformerv2_l.snap_dist_in1k,None,xfail_export,Requested None inlined input +efficientformerv2_s0.snap_dist_in1k,None,xfail_export,Requested None inlined input +efficientformerv2_s1.snap_dist_in1k,None,xfail_export,Requested None inlined input +efficientformerv2_s2.snap_dist_in1k,None,xfail_export,Requested None inlined input efficientnet_b0.ra_in1k,None efficientnet_b1.ft_in1k,None efficientnet_b1_pruned.in1k,None @@ -144,6 +144,9 @@ eva02_base_patch14_224.mim_in22k,None eva02_base_patch16_clip_224.merged2b,None eva02_large_patch14_clip_224.merged2b,None fastvit_ma36.apple_dist_in1k,None +fastvit_mci0.apple_mclip,None +fastvit_mci1.apple_mclip,None +fastvit_mci2.apple_mclip,None,xfail_trace,Accuracy validation failed fastvit_s12.apple_dist_in1k,None fastvit_sa12.apple_dist_in1k,None fastvit_sa24.apple_dist_in1k,None @@ -167,7 +170,7 @@ gcresnext50ts.ch_in1k,None gcvit_base.in1k,None gernet_s.idstcv_in1k,None ghostnet_100.in1k,None -ghostnetv2_100.in1k,None +ghostnetv2_100.in1k,None,xfail_export,Requested None inlined input gmixer_24_224.ra3_in1k,None gmlp_s16_224.ra3_in1k,None halo2botnet50ts_256.a1h_in1k,None @@ -188,16 +191,18 @@ hgnetv2_b3.ssld_stage1_in22k_in1k,None hgnetv2_b4.ssld_stage1_in22k_in1k,None hgnetv2_b5.ssld_stage1_in22k_in1k,None hgnetv2_b6.ssld_stage1_in22k_in1k,None -hrnet_w18_small.gluon_in1k,None -hrnet_w18_small_v2.gluon_in1k,None -hrnet_w18_ssld.paddle_in1k,None -hrnet_w30.ms_in1k,None -hrnet_w32.ms_in1k,None -hrnet_w40.ms_in1k,None -hrnet_w44.ms_in1k,None -hrnet_w48.ms_in1k,None -hrnet_w48_ssld.paddle_in1k,None -hrnet_w64.ms_in1k,None +hiera_base_224.mae,None +hiera_base_plus_224.mae,None +hrnet_w18_small.gluon_in1k,None,xfail_export,Requested None inlined input +hrnet_w18_small_v2.gluon_in1k,None,xfail_export,Requested None inlined input +hrnet_w18_ssld.paddle_in1k,None,xfail_export,Requested None inlined input +hrnet_w30.ms_in1k,None,xfail_export,Requested None inlined input +hrnet_w32.ms_in1k,None,xfail_export,Requested None inlined input +hrnet_w40.ms_in1k,None,xfail_export,Requested None inlined input +hrnet_w44.ms_in1k,None,xfail_export,Requested None inlined input +hrnet_w48.ms_in1k,None,xfail_export,Requested None inlined input +hrnet_w48_ssld.paddle_in1k,None,xfail_export,Requested None inlined input +hrnet_w64.ms_in1k,None,xfail_export,Requested None inlined input inception_next_base.sail_in1k,None inception_resnet_v2.tf_ens_adv_in1k,None inception_v3.gluon_in1k,None @@ -236,6 +241,9 @@ mobilenetv2_110d.ra_in1k,None mobilenetv2_120d.ra_in1k,None mobilenetv3_rw.rmsp_in1k,None mobilenetv3_small_050.lamb_in1k,None +mobilenetv4_conv_blur_medium.e500_r224_in1k,None +mobilenetv4_conv_small.e1200_r224_in1k,None +mobilenetv4_hybrid_medium.e500_r224_in1k,None mobileone_s0.apple_in1k,None mobileone_s1.apple_in1k,None mobileone_s2.apple_in1k,None @@ -324,6 +332,8 @@ resnest50d.in1k,None resnest50d_1s4x24d.in1k,None resnest50d_4s2x40d.in1k,None resnet101.a1_in1k,None +resnet101_clip.openai,None +resnet101_clip_gap.openai,None resnet101c.gluon_in1k,None resnet101d.gluon_in1k,None resnet101s.gluon_in1k,None @@ -344,10 +354,18 @@ resnet33ts.ra2_in1k,None resnet34.a1_in1k,None resnet34d.ra2_in1k,None resnet50.a1_in1k,None +resnet50_clip.openai,None +resnet50_clip_gap.openai,None resnet50_gn.a1h_in1k,None resnet50c.gluon_in1k,None resnet50d.a1_in1k,None resnet50s.gluon_in1k,None +resnet50x16_clip.openai,None +resnet50x16_clip_gap.openai,None +resnet50x4_clip.openai,None +resnet50x4_clip_gap.openai,None +resnet50x64_clip.openai,None +resnet50x64_clip_gap.openai,None resnet51q.ra2_in1k,None resnet61q.ra2_in1k,None resnetaa101d.sw_in12k,None @@ -388,7 +406,7 @@ selecsls60.in1k,None selecsls60b.in1k,None semnasnet_075.rmsp_in1k,None senet154.gluon_in1k,None -sequencer2d_s.in1k,None +sequencer2d_s.in1k,None,xfail_export,No conversion rule found for operations aten.mkldnn_rnn_layer.default seresnet152d.ra2_in1k,None seresnet33ts.ra2_in1k,None seresnet50.a1_in1k,None @@ -453,7 +471,7 @@ tinynet_b.in1k,None tinynet_c.in1k,None tinynet_d.in1k,None tinynet_e.in1k,None -tnt_s_patch16_224,None +tnt_s_patch16_224,None,xfail_export,Requested None inlined input tresnet_m.miil_in1k,None tresnet_v2_l.miil_in21k,None twins_pcpvt_base.in1k,None @@ -467,25 +485,39 @@ vgg16_bn.tv_in1k,None vgg19.tv_in1k,None vgg19_bn.tv_in1k,None visformer_tiny.in1k,None +vit_base_mci_224.apple_mclip,None vit_base_patch14_dinov2.lvd142m,None vit_base_patch14_reg4_dinov2.lvd142m,None vit_base_patch16_224.augreg2_in21k_ft_in1k,None vit_base_patch16_224_miil.in21k,None vit_base_patch16_clip_224.datacompxl,None vit_base_patch16_clip_quickgelu_224.metaclip_2pt5b,None +vit_base_patch16_rope_reg1_gap_256.sbb_in1k,None,xfail,Argument shapes are inconsistent vit_base_patch16_rpn_224.sw_in1k,None vit_base_patch16_siglip_224.webli,None +vit_base_patch16_siglip_gap_224.webli,None vit_base_patch32_224.augreg_in1k,None vit_base_patch32_clip_224.datacompxl,None vit_base_patch32_clip_quickgelu_224.metaclip_2pt5b,None vit_base_patch8_224.augreg2_in21k_ft_in1k,None vit_base_r50_s16_224.orig_in21k,None +vit_betwixt_patch16_reg1_gap_256.sbb_in1k,None +vit_betwixt_patch16_reg4_gap_256.sbb_in12k,None +vit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k,None,xfail,Argument shapes are inconsistent +vit_betwixt_patch32_clip_224.tinyclip_laion400m,None vit_huge_patch14_224.mae,None vit_huge_patch14_gap_224.in1k_ijepa,None vit_large_patch14_clip_224.datacompxl,None vit_large_patch14_clip_quickgelu_224.dfn2b,None vit_large_r50_s32_224.augreg_in21k,None +vit_little_patch16_reg1_gap_256.sbb_in12k,None +vit_little_patch16_reg4_gap_256.sbb_in1k,None vit_medium_patch16_gap_240.sw_in12k,None +vit_medium_patch16_reg1_gap_256.sbb_in1k,None +vit_medium_patch16_reg4_gap_256.sbb_in12k,None +vit_mediumd_patch16_reg4_gap_256.sbb_in12k,None +vit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k,None,xfail,Argument shapes are inconsistent +vit_pwee_patch16_reg1_gap_256.sbb_in1k,None vit_relpos_base_patch16_224.sw_in1k,None vit_relpos_base_patch16_clsgap_224.sw_in1k,None vit_relpos_base_patch32_plus_rpn_256.sw_in1k,None @@ -493,13 +525,18 @@ vit_relpos_medium_patch16_cls_224.sw_in1k,None vit_relpos_medium_patch16_rpn_224.sw_in1k,None vit_small_r26_s32_224.augreg_in21k,None vit_so400m_patch14_siglip_224.webli,None +vit_so400m_patch14_siglip_gap_224.pali_mix,None,skip,Access to model google/paligemma-3b-mix-224-jax is restricted vit_srelpos_small_patch16_224.sw_in1k,None vit_tiny_r_s16_p8_224.augreg_in21k,None -volo_d1_224.sail_in1k,None -volo_d2_224.sail_in1k,None -volo_d3_224.sail_in1k,None -volo_d4_224.sail_in1k,None -volo_d5_224.sail_in1k,None +vit_wee_patch16_reg1_gap_256.sbb_in1k,None +vit_xsmall_patch16_clip_224.tinyclip_yfcc15m,None +vitamin_base_224.datacomp1b_clip,None,xfail,RuntimeError Error in loading state_dict for VisionTransformer +vitamin_large2_224.datacomp1b_clip,None +volo_d1_224.sail_in1k,None,xfail,Cannot get length of dynamic dimension +volo_d2_224.sail_in1k,None,xfail,Cannot get length of dynamic dimension +volo_d3_224.sail_in1k,None,xfail,Cannot get length of dynamic dimension +volo_d4_224.sail_in1k,None,xfail,Cannot get length of dynamic dimension +volo_d5_224.sail_in1k,None,xfail,Cannot get length of dynamic dimension wide_resnet101_2.tv2_in1k,None wide_resnet50_2.racm_in1k,None xception41.tf_in1k,None diff --git a/tests/model_hub_tests/pytorch/torchvision_models b/tests/model_hub_tests/pytorch/torchvision_models index 35e6805bd18152..a045925ed54f4a 100644 --- a/tests/model_hub_tests/pytorch/torchvision_models +++ b/tests/model_hub_tests/pytorch/torchvision_models @@ -3,9 +3,9 @@ convnext_base,none convnext_large,none convnext_small,none convnext_tiny,none -deeplabv3_mobilenet_v3_large,none -deeplabv3_resnet101,none -deeplabv3_resnet50,none +deeplabv3_mobilenet_v3_large,none,xfail_export,Requested None inlined input +deeplabv3_resnet101,none,xfail_export,Requested None inlined input +deeplabv3_resnet50,none,xfail_export,Requested None inlined input densenet121,none densenet161,none densenet169,none @@ -21,11 +21,11 @@ efficientnet_b7,none efficientnet_v2_l,none efficientnet_v2_m,none efficientnet_v2_s,none -fcn_resnet101,none -fcn_resnet50,none +fcn_resnet101,none,xfail_export,Requested None inlined input +fcn_resnet50,none,xfail_export,Requested None inlined input googlenet,none inception_v3,none -lraspp_mobilenet_v3_large,none +lraspp_mobilenet_v3_large,none,xfail_export,Requested None inlined input maxvit_t,none mc3_18,none mnasnet0_5,none @@ -39,8 +39,8 @@ mvit_v1_b,none mvit_v2_s,none r2plus1d_18,none r3d_18,none -raft_large,none -raft_small,none +raft_large,none,xfail_export,Mutating module attribute corr_pyramid during export +raft_small,none,xfail_export,Mutating module attribute corr_pyramid during export regnet_x_16gf,none regnet_x_1_6gf,none regnet_x_32gf,none diff --git a/thirdparty/open_model_zoo b/thirdparty/open_model_zoo index 9c6d95a2a668d6..cec8d2be4baf81 160000 --- a/thirdparty/open_model_zoo +++ b/thirdparty/open_model_zoo @@ -1 +1 @@ -Subproject commit 9c6d95a2a668d6ae41aebda42b15608db7dd3fa0 +Subproject commit cec8d2be4baf81c191091abd83c59507fc12d2e8 diff --git a/tools/constraints.txt b/tools/constraints.txt index 21961ea88f9e14..258ed7a8b3208c 100644 --- a/tools/constraints.txt +++ b/tools/constraints.txt @@ -7,7 +7,7 @@ mxnet~=1.2.0; sys_platform == 'win32' mxnet>=1.7.0.post2,<=1.9.1; sys_platform != 'win32' onnx>=1.8.1,<=1.15.0 networkx<=3.1.0 -pytest>=5.0,<7.3 +pytest>=5.0,<8.4 protobuf>=3.18.1,<4.0.0 defusedxml>=0.7.1 requests>=2.25.1